diff --git a/.ocamlformat b/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/bin_attacker/.ocamlformat b/src/bin_attacker/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/bin_attacker/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/bin_attacker/attacker_minimal.ml b/src/bin_attacker/attacker_minimal.ml index a7d4323c95a106b279c168b7bbdbc6560bd9a79d..3da92121f7e6c32f5ffb083d84e08a7efc0e4b58 100644 --- a/src/bin_attacker/attacker_minimal.ml +++ b/src/bin_attacker/attacker_minimal.ml @@ -24,100 +24,117 @@ (*****************************************************************************) open Format -include Logging.Make(struct let name = "attacker" end) + +include Logging.Make (struct + let name = "attacker" +end) module Proto = Client_embedded_proto_alpha (* the genesis block and network *) -let genesis_block_hashed = Block_hash.of_b58check - "BLockGenesisGenesisGenesisGenesisGenesisGeneskvg68z" +let genesis_block_hashed = + Block_hash.of_b58check "BLockGenesisGenesisGenesisGenesisGenesisGeneskvg68z" + let network = Store.Net genesis_block_hashed + let network = Store.Chain_id.Id genesis_block_hashed (* the bootstrap accounts and actions like signing to do with them *) let source_account = List.nth Proto.Bootstrap_storage.accounts 4 + let destination_account = List.nth Proto.Bootstrap_storage.accounts 0 + let wrong_account = List.nth Proto.Bootstrap_storage.accounts 1 + let another_account = List.nth Proto.Bootstrap_storage.accounts 2 + let signed = Ed25519.append_signature source_account.secret_key + let signed_wrong = Ed25519.append_signature wrong_account.secret_key (* forge a block from a list of operations *) let block_forged ?prev ops = let from_int64 x = - [ MBytes.of_string Proto.Constants_repr.version_number ; - Proto.Fitness_repr.int64_to_bytes x ] in + [ MBytes.of_string Proto.Constants_repr.version_number; + Proto.Fitness_repr.int64_to_bytes x ] + in let pred = match prev with None -> genesis_block_hashed | Some x -> x in - let block ops = Store.Block_header.{ chain_id = network ; - predecessor = pred ; - timestamp = Systime_os.now () ; - fitness = from_int64 1L; - operations = ops } in + let block ops = + Store.Block_header. + { chain_id = network; + predecessor = pred; + timestamp = Systime_os.now (); + fitness = from_int64 1L; + operations = ops } + in let open Proto in let generate_proof_of_work_nonce () = - Rand.generate - Proto.Alpha_context.Constants.proof_of_work_nonce_size in + Rand.generate Proto.Alpha_context.Constants.proof_of_work_nonce_size + in let generate_seed_nonce () = - match Proto.Nonce_storage.of_bytes @@ - Rand.generate - Proto.Alpha_context.Constants.nonce_length with - | Error _ -> assert false - | Ok nonce -> nonce in - Block_repr.forge_header (block ops) - Block_repr.{ - baking_slot = {level = Raw_level_repr.of_int32_exn 1l ; priority = 0l } ; - seed_nonce_hash = Proto.Nonce_storage.hash (generate_seed_nonce ()); - proof_of_work_nonce = generate_proof_of_work_nonce () ; - } + match + Proto.Nonce_storage.of_bytes + @@ Rand.generate Proto.Alpha_context.Constants.nonce_length + with + | Error _ -> + assert false + | Ok nonce -> + nonce + in + Block_repr.forge_header + (block ops) + Block_repr. + { baking_slot = {level = Raw_level_repr.of_int32_exn 1l; priority = 0l}; + seed_nonce_hash = Proto.Nonce_storage.hash (generate_seed_nonce ()); + proof_of_work_nonce = generate_proof_of_work_nonce () } (* forge a transaction *) let tx_forged ?dest amount fee = let open Proto.Operation_repr in let open Proto.Tez_repr in let open Proto.Contract_repr in - let trgt - = match dest with - None -> destination_account - | Some dest -> dest in + let trgt = + match dest with None -> destination_account | Some dest -> dest + in let src = source_account in - let tx = Transaction - { amount = of_cents_exn amount ; - parameters = None ; - destination = default_contract trgt.public_key_hash ; } in - let op = Sourced_operations - ( Manager_operations - { source = default_contract src.public_key_hash ; - public_key = Some src.public_key ; - fee = of_cents_exn fee ; - counter = 1l ; - operations = [tx] ; }) in - forge { chain_id = network } op + let tx = + Transaction + { amount = of_cents_exn amount; + parameters = None; + destination = default_contract trgt.public_key_hash } + in + let op = + Sourced_operations + (Manager_operations + { source = default_contract src.public_key_hash; + public_key = Some src.public_key; + fee = of_cents_exn fee; + counter = 1l; + operations = [tx] }) + in + forge {chain_id = network} op (* forge a list of proposals, california eat your heart out *) let props_forged period props = let open Proto.Operation_repr in let src = source_account in - let props = Proposals { - period = period ; - proposals = props } in - let op = Sourced_operations (Delegate_operations { - source = src.public_key ; - operations = [props] }) in - forge { chain_id = network } op + let props = Proposals {period; proposals = props} in + let op = + Sourced_operations + (Delegate_operations {source = src.public_key; operations = [props]}) + in + forge {chain_id = network} op (* "forge" a ballot *) let ballot_forged period prop vote = let open Proto.Operation_repr in let src = source_account in - let ballot = Ballot { - period = period ; - proposal = prop ; - ballot = vote - } in - let op = Sourced_operations (Delegate_operations { - source = src.public_key ; - operations = [ballot] }) in - forge { chain_id = network } op + let ballot = Ballot {period; proposal = prop; ballot = vote} in + let op = + Sourced_operations + (Delegate_operations {source = src.public_key; operations = [ballot]}) + in + forge {chain_id = network} op let identity = P2p_identity.generate Crypto_box.default_target @@ -125,7 +142,8 @@ let identity = P2p_identity.generate Crypto_box.default_target let try_action addr port action = let socket = Lwt_unix.socket PF_INET6 SOCK_STREAM 0 in let uaddr = Ipaddr_unix.V6.to_inet_addr addr in - Lwt_unix.connect socket (Lwt_unix.ADDR_INET (uaddr, port)) >>= fun () -> + Lwt_unix.connect socket (Lwt_unix.ADDR_INET (uaddr, port)) + >>= fun () -> let io_sched = P2p_io_scheduler.create ~read_buffer_size:(1 lsl 14) () in let conn = P2p_io_scheduler.register io_sched socket in P2p_connection.authenticate @@ -133,17 +151,21 @@ let try_action addr port action = ~incoming:false conn (addr, port) - identity Distributed_db.Raw.supported_versions >>=? fun (_, auth_fd) -> - P2p_connection.accept auth_fd Distributed_db.Raw.encoding >>= function - | Error _ -> failwith "Connection rejected by peer." + identity + Distributed_db.Raw.supported_versions + >>=? fun (_, auth_fd) -> + P2p_connection.accept auth_fd Distributed_db.Raw.encoding + >>= function + | Error _ -> + failwith "Connection rejected by peer." | Ok conn -> - action conn >>=? fun () -> - P2p_connection.close conn >>= fun () -> - return_unit + action conn + >>=? fun () -> P2p_connection.close conn >>= fun () -> return_unit let replicate n x = let rec replicate_acc acc n x = - if n <= 0 then acc else replicate_acc (x :: acc) (n-1) x in + if n <= 0 then acc else replicate_acc (x :: acc) (n - 1) x + in replicate_acc [] n x let send conn (msg : Distributed_db.Message.t) = @@ -151,20 +173,20 @@ let send conn (msg : Distributed_db.Message.t) = let request_block_times block_hash n conn = let open Block_hash in - lwt_log_notice - "requesting %a block %d times" - pp_short block_hash n >>= fun () -> + lwt_log_notice "requesting %a block %d times" pp_short block_hash n + >>= fun () -> let block_hashes = replicate n block_hash in send conn (Get_block_headers (network, block_hashes)) let request_op_times op_signed n conn = let open Operation_hash in let op_hash = hash_bytes [op_signed] in - lwt_log_notice "sending %a transaction" pp_short op_hash >>= fun () -> - send conn (Operation op_signed) >>=? fun () -> - lwt_log_notice - "requesting %a transaction %d times" - pp_short op_hash n >>= fun () -> + lwt_log_notice "sending %a transaction" pp_short op_hash + >>= fun () -> + send conn (Operation op_signed) + >>=? fun () -> + lwt_log_notice "requesting %a transaction %d times" pp_short op_hash n + >>= fun () -> let op_hashes = replicate n op_hash in send conn (Get_operations op_hashes) @@ -172,30 +194,40 @@ let send_block_size n conn = let bytes = MBytes.create n in let open Block_hash in lwt_log_notice - "propagating fake %d byte block %a" n pp_short (hash_bytes [bytes]) >>= fun () -> - send conn (Block bytes) + "propagating fake %d byte block %a" + n + pp_short + (hash_bytes [bytes]) + >>= fun () -> send conn (Block bytes) let send_protocol_size n conn = let bytes = MBytes.create n in let open Protocol_hash in lwt_log_notice "propagating fake %d byte protocol %a" - n pp_short (hash_bytes [bytes]) >>= fun () -> - send conn (Protocol bytes) + n + pp_short + (hash_bytes [bytes]) + >>= fun () -> send conn (Protocol bytes) let send_operation_size n conn = let op_faked = MBytes.create n in let op_hashed = Operation_hash.hash_bytes [op_faked] in lwt_log_notice "propagating fake %d byte operation %a" - n Operation_hash.pp_short op_hashed >>= fun () -> - send conn (Operation op_faked) >>=? fun () -> + n + Operation_hash.pp_short + op_hashed + >>= fun () -> + send conn (Operation op_faked) + >>=? fun () -> let block = signed (block_forged [op_hashed]) in let block_hashed = Block_hash.hash_bytes [block] in lwt_log_notice "propagating block %a with operation" - Block_hash.pp_short block_hashed >>= fun () -> - send conn (Block block) + Block_hash.pp_short + block_hashed + >>= fun () -> send conn (Block block) let send_operation_bad_signature () conn = let open Operation_hash in @@ -203,22 +235,27 @@ let send_operation_bad_signature () conn = let hashed_wrong_op = hash_bytes [signed_wrong_op] in lwt_log_notice "propagating operation %a with wrong signature" - pp_short hashed_wrong_op >>= fun () -> - send conn (Operation signed_wrong_op) >>=? fun () -> + pp_short + hashed_wrong_op + >>= fun () -> + send conn (Operation signed_wrong_op) + >>=? fun () -> let block = signed (block_forged [hashed_wrong_op]) in let block_hashed = Block_hash.hash_bytes [block] in lwt_log_notice "propagating block %a with operation" - Block_hash.pp_short block_hashed >>= fun () -> - send conn (Block block) + Block_hash.pp_short + block_hashed + >>= fun () -> send conn (Block block) let send_block_bad_signature () conn = let open Block_hash in let signed_wrong_block = signed_wrong (block_forged []) in lwt_log_notice "propagating block %a with wrong signature" - pp_short (hash_bytes [signed_wrong_block]) >>= fun () -> - send conn (Block signed_wrong_block) + pp_short + (hash_bytes [signed_wrong_block]) + >>= fun () -> send conn (Block signed_wrong_block) let double_spend () conn = let spend account = @@ -226,111 +263,121 @@ let double_spend () conn = let op_hashed = Operation_hash.hash_bytes [op_signed] in let block_signed = signed (block_forged [op_hashed]) in let block_hashed = Block_hash.hash_bytes [block_signed] in - lwt_log_notice - "propagating operation %a" - Operation_hash.pp_short op_hashed >>= fun () -> - send conn (Operation op_signed) >>=? fun () -> - lwt_log_notice - "propagating block %a" - Block_hash.pp_short block_hashed >>= fun () -> - send conn (Block block_signed) in - spend destination_account >>=? fun () -> - spend another_account + lwt_log_notice "propagating operation %a" Operation_hash.pp_short op_hashed + >>= fun () -> + send conn (Operation op_signed) + >>=? fun () -> + lwt_log_notice "propagating block %a" Block_hash.pp_short block_hashed + >>= fun () -> send conn (Block block_signed) + in + spend destination_account >>=? fun () -> spend another_account let long_chain n conn = - lwt_log_notice "propogating %d blocks" n >>= fun () -> + lwt_log_notice "propogating %d blocks" n + >>= fun () -> let prev_ref = ref genesis_block_hashed in let rec loop k = - if k < 1 then - return_unit + if k < 1 then return_unit else let block = signed (block_forged ~prev:!prev_ref []) in prev_ref := Block_hash.hash_bytes [block] ; - send conn (Block block) >>=? fun () -> - loop (k-1) in + send conn (Block block) >>=? fun () -> loop (k - 1) + in loop n let lots_transactions amount fee n conn = let signed_op = signed (tx_forged amount fee) in let rec loop k = - if k < 1 then - return_unit - else - send conn (Operation signed_op) >>=? fun () -> - loop (k-1) in + if k < 1 then return_unit + else send conn (Operation signed_op) >>=? fun () -> loop (k - 1) + in let ops = replicate n (Operation_hash.hash_bytes [signed_op]) in let signed_block = signed (block_forged ops) in - lwt_log_notice "propogating %d transactions" n >>= fun () -> - loop n >>=? fun () -> + lwt_log_notice "propogating %d transactions" n + >>= fun () -> + loop n + >>=? fun () -> lwt_log_notice "propagating block %a with wrong signature" - Block_hash.pp_short (Block_hash.hash_bytes [signed_block]) >>= fun () -> - send conn (Block signed_block) + Block_hash.pp_short + (Block_hash.hash_bytes [signed_block]) + >>= fun () -> send conn (Block signed_block) let main () = let addr = Ipaddr.V6.localhost in let port = 9732 in let run_action action = try_action addr port action in let run_cmd_unit lwt = - Arg.Unit begin fun () -> - Lwt_main.run begin - lwt () >>= function - | Ok () -> Lwt.return_unit - | Error err -> - lwt_log_error "Error: %a" pp_print_error err >>= fun () -> - Lwt.return_unit - end - end in + Arg.Unit + (fun () -> + Lwt_main.run + ( lwt () + >>= function + | Ok () -> + Lwt.return_unit + | Error err -> + lwt_log_error "Error: %a" pp_print_error err + >>= fun () -> Lwt.return_unit )) + in let run_cmd_int_suffix lwt = - Arg.String begin fun str -> - let last = str.[String.length str - 1] in - let init = String.sub str 0 (String.length str - 1) in - let n = - if last == 'k' || last == 'K' - then int_of_string init * 1 lsl 10 - else if last == 'm' || last == 'M' - then int_of_string init * 1 lsl 20 - else if last == 'g' || last == 'G' - then int_of_string init * 1 lsl 30 - else int_of_string str in - Lwt_main.run begin - lwt n >>= function - | Ok () -> Lwt.return_unit - | Error err -> - lwt_log_error "Error: %a" pp_print_error err >>= fun () -> - Lwt.return_unit - end - end in + Arg.String + (fun str -> + let last = str.[String.length str - 1] in + let init = String.sub str 0 (String.length str - 1) in + let n = + if last == 'k' || last == 'K' then int_of_string init * (1 lsl 10) + else if last == 'm' || last == 'M' then + int_of_string init * (1 lsl 20) + else if last == 'g' || last == 'G' then + int_of_string init * (1 lsl 30) + else int_of_string str + in + Lwt_main.run + ( lwt n + >>= function + | Ok () -> + Lwt.return_unit + | Error err -> + lwt_log_error "Error: %a" pp_print_error err + >>= fun () -> Lwt.return_unit )) + in let cmds = - [( "-1", - run_cmd_int_suffix (run_action << request_block_times genesis_block_hashed), - "[N {,K,M,G}] Attempt to request to download N {,kilo,mega,giga}blocks.") - ;( "-2", - run_cmd_int_suffix (run_action << request_op_times (signed (tx_forged 5L 1L))), - "[N {,K,M,G}] Attempt to request to download N {,kilo,mega,giga}ops.") - ;( "-3", - run_cmd_int_suffix (run_action << send_block_size), - "[N {,K,M,G}] Attempt to propagate an N {,kilo,mega,giga}byte fake block.") - ;( "-4", - run_cmd_int_suffix (run_action << send_operation_size), - "[N {,K,M,G}] Attempt to propagate an N {,kilo,mega,giga}byte fake operation.") - ;( "-5", - run_cmd_int_suffix (run_action << send_protocol_size), - "[N {,K,M,G}] Attempt to propagate an N {,kilo,mega,giga}byte fake protocol.") - ;( "-6", - run_cmd_unit (run_action << send_operation_bad_signature), - "Attempt to propagate a transaction with a bad signature.") - ;( "-7", - run_cmd_unit (run_action << send_block_bad_signature), - "Attempt to propagate a block with a bad signature.") - ;( "-8", - run_cmd_unit (run_action << double_spend), - "Attempt to send the same transaction in two blocks") - ; ( "-9", + [ ( "-1", + run_cmd_int_suffix + (run_action << request_block_times genesis_block_hashed), + "[N {,K,M,G}] Attempt to request to download N {,kilo,mega,giga}blocks." + ); + ( "-2", + run_cmd_int_suffix + (run_action << request_op_times (signed (tx_forged 5L 1L))), + "[N {,K,M,G}] Attempt to request to download N {,kilo,mega,giga}ops." + ); + ( "-3", + run_cmd_int_suffix (run_action << send_block_size), + "[N {,K,M,G}] Attempt to propagate an N {,kilo,mega,giga}byte fake \ + block." ); + ( "-4", + run_cmd_int_suffix (run_action << send_operation_size), + "[N {,K,M,G}] Attempt to propagate an N {,kilo,mega,giga}byte fake \ + operation." ); + ( "-5", + run_cmd_int_suffix (run_action << send_protocol_size), + "[N {,K,M,G}] Attempt to propagate an N {,kilo,mega,giga}byte fake \ + protocol." ); + ( "-6", + run_cmd_unit (run_action << send_operation_bad_signature), + "Attempt to propagate a transaction with a bad signature." ); + ( "-7", + run_cmd_unit (run_action << send_block_bad_signature), + "Attempt to propagate a block with a bad signature." ); + ( "-8", + run_cmd_unit (run_action << double_spend), + "Attempt to send the same transaction in two blocks" ); + ( "-9", run_cmd_int_suffix (run_action << long_chain), - "[N {,K,M,G}] Attempt to send a chain of N {,kilo,mega,giga}blocks") - ; ( "-10", + "[N {,K,M,G}] Attempt to send a chain of N {,kilo,mega,giga}blocks" ); + ( "-10", run_cmd_int_suffix (run_action << lots_transactions 0L 0L), - "[N {,K,M,G}] Attempt to send N {,kilo,mega,giga}ops") - ] in + "[N {,K,M,G}] Attempt to send N {,kilo,mega,giga}ops" ) ] + in Arg.parse cmds print_endline "Tezos Evil Client" diff --git a/src/bin_attacker/attacker_minimal.mli b/src/bin_attacker/attacker_minimal.mli index 69783195e6230b9b6af711cbce35ed0e9709420d..fa412ec29d3ede6ae3a8c9bef8acfaba2491a44d 100644 --- a/src/bin_attacker/attacker_minimal.mli +++ b/src/bin_attacker/attacker_minimal.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val main: unit -> unit +val main : unit -> unit diff --git a/src/bin_client/.ocamlformat b/src/bin_client/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/bin_client/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/bin_client/client_protocols_commands.ml b/src/bin_client/client_protocols_commands.ml index b99393f3d61be369160c4958a2230e9f796165bc..fd47439e6f15426306f0ae316fed9c292c85a6c0 100644 --- a/src/bin_client/client_protocols_commands.ml +++ b/src/bin_client/client_protocols_commands.ml @@ -23,79 +23,100 @@ (* *) (*****************************************************************************) -let group = - { Clic.name = "protocols" ; - title = "Commands for managing protocols" } +let group = {Clic.name = "protocols"; title = "Commands for managing protocols"} let commands () = let open Clic in let check_dir _ dn = - if Sys.is_directory dn then - return dn - else - failwith "%s is not a directory" dn in + if Sys.is_directory dn then return dn + else failwith "%s is not a directory" dn + in let check_dir_parameter = parameter check_dir in - [ - - command ~group ~desc: "List protocols known by the node." + [ command + ~group + ~desc:"List protocols known by the node." no_options - (prefixes [ "list" ; "protocols" ] stop) + (prefixes ["list"; "protocols"] stop) (fun () (cctxt : #Client_context.full) -> - Shell_services.Protocol.list cctxt >>=? fun protos -> - Lwt_list.iter_s (fun ph -> cctxt#message "%a" Protocol_hash.pp ph) protos >>= fun () -> - return_unit - ); - - command ~group ~desc: "Inject a new protocol into the node." + Shell_services.Protocol.list cctxt + >>=? fun protos -> + Lwt_list.iter_s + (fun ph -> cctxt#message "%a" Protocol_hash.pp ph) + protos + >>= fun () -> return_unit); + command + ~group + ~desc:"Inject a new protocol into the node." no_options - (prefixes [ "inject" ; "protocol" ] - @@ param ~name:"dir" ~desc:"directory containing the sources of a protocol" check_dir_parameter - @@ stop) + ( prefixes ["inject"; "protocol"] + @@ param + ~name:"dir" + ~desc:"directory containing the sources of a protocol" + check_dir_parameter + @@ stop ) (fun () dirname (cctxt : #Client_context.full) -> - Lwt.catch - (fun () -> - Lwt_utils_unix.Protocol.read_dir dirname >>=? fun (_hash, proto) -> - Shell_services.Injection.protocol cctxt proto >>= function - | Ok hash -> - cctxt#message "Injected protocol %a successfully" Protocol_hash.pp hash >>= fun () -> - return_unit - | Error err -> - cctxt#error "Error while injecting protocol from %s: %a" - dirname Error_monad.pp_print_error err >>= fun () -> - return_unit) - (fun exn -> - cctxt#error "Error while injecting protocol from %s: %a" - dirname Error_monad.pp_print_error [Error_monad.Exn exn] >>= fun () -> - return_unit) - ); - - command ~group ~desc: "Dump a protocol from the node's record of protocol." + Lwt.catch + (fun () -> + Lwt_utils_unix.Protocol.read_dir dirname + >>=? fun (_hash, proto) -> + Shell_services.Injection.protocol cctxt proto + >>= function + | Ok hash -> + cctxt#message + "Injected protocol %a successfully" + Protocol_hash.pp + hash + >>= fun () -> return_unit + | Error err -> + cctxt#error + "Error while injecting protocol from %s: %a" + dirname + Error_monad.pp_print_error + err + >>= fun () -> return_unit) + (fun exn -> + cctxt#error + "Error while injecting protocol from %s: %a" + dirname + Error_monad.pp_print_error + [Error_monad.Exn exn] + >>= fun () -> return_unit)); + command + ~group + ~desc:"Dump a protocol from the node's record of protocol." no_options - (prefixes [ "dump" ; "protocol" ] - @@ Protocol_hash.param ~name:"protocol hash" ~desc:"" - @@ stop) + ( prefixes ["dump"; "protocol"] + @@ Protocol_hash.param ~name:"protocol hash" ~desc:"" + @@ stop ) (fun () ph (cctxt : #Client_context.full) -> - Shell_services.Protocol.contents cctxt ph >>=? fun proto -> - Lwt_utils_unix.Protocol.write_dir (Protocol_hash.to_short_b58check ph) ~hash:ph proto >>=? fun () -> - cctxt#message "Extracted protocol %a" Protocol_hash.pp_short ph >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Fetch a protocol from the network." + Shell_services.Protocol.contents cctxt ph + >>=? fun proto -> + Lwt_utils_unix.Protocol.write_dir + (Protocol_hash.to_short_b58check ph) + ~hash:ph + proto + >>=? fun () -> + cctxt#message "Extracted protocol %a" Protocol_hash.pp_short ph + >>= fun () -> return_unit); + command + ~group + ~desc:"Fetch a protocol from the network." no_options - (prefixes [ "fetch" ; "protocol" ] - @@ Protocol_hash.param ~name:"protocol hash" - @@ stop - ) + ( prefixes ["fetch"; "protocol"] + @@ Protocol_hash.param ~name:"protocol hash" + @@ stop ) (fun () hash (cctxt : #Client_context.full) -> - Shell_services.Protocol.fetch cctxt hash >>= function - | Ok () -> - cctxt#message "Protocol %a successfully fetched." - Protocol_hash.pp_short hash >>= fun () -> - return_unit - | Error err -> - cctxt#error "Error while fetching protocol: %a" - Error_monad.pp_print_error err >>= fun () -> - return_unit - ) - ] + Shell_services.Protocol.fetch cctxt hash + >>= function + | Ok () -> + cctxt#message + "Protocol %a successfully fetched." + Protocol_hash.pp_short + hash + >>= fun () -> return_unit + | Error err -> + cctxt#error + "Error while fetching protocol: %a" + Error_monad.pp_print_error + err + >>= fun () -> return_unit) ] diff --git a/src/bin_client/client_protocols_commands.mli b/src/bin_client/client_protocols_commands.mli index d50cc7538fbf04ef0ead4fba3aa1e197ad1f423f..bf2cb2fa6a10aaebad81920f9818a24c4c835b74 100644 --- a/src/bin_client/client_protocols_commands.mli +++ b/src/bin_client/client_protocols_commands.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val commands: unit -> Client_commands.command list +val commands : unit -> Client_commands.command list diff --git a/src/bin_client/client_rpc_commands.ml b/src/bin_client/client_rpc_commands.ml index 86ddb334b1a4b6cbe88597c8c33da27a81212001..78ee0458606308ec7c4fa3b22d54732a1fb618d0 100644 --- a/src/bin_client/client_rpc_commands.ml +++ b/src/bin_client/client_rpc_commands.ml @@ -34,46 +34,52 @@ open Json_schema exception Unsupported_construct type input = { - int : int -> int -> string option -> string list -> int Lwt.t ; - float : string option -> string list -> float Lwt.t ; - string : string option -> string list -> string Lwt.t ; - bool : string option -> string list -> bool Lwt.t ; - continue : string option -> string list -> bool Lwt.t ; - display : string -> unit Lwt.t ; + int : int -> int -> string option -> string list -> int Lwt.t; + float : string option -> string list -> float Lwt.t; + string : string option -> string list -> string Lwt.t; + bool : string option -> string list -> bool Lwt.t; + continue : string option -> string list -> bool Lwt.t; + display : string -> unit Lwt.t } (* generic JSON generation from a schema with callback for random or interactive filling *) -let fill_in ?(show_optionals=true) input schema = - let rec element path { title ; kind ; _ }= +let fill_in ?(show_optionals = true) input schema = + let rec element path {title; kind; _} = match kind with - | Integer { minimum ; maximum ; _ } -> + | Integer {minimum; maximum; _} -> let minimum = match minimum with - | None -> min_int - | Some (m, `Inclusive) -> int_of_float m - | Some (m, `Exclusive) -> int_of_float m + 1 in + | None -> + min_int + | Some (m, `Inclusive) -> + int_of_float m + | Some (m, `Exclusive) -> + int_of_float m + 1 + in let maximum = match maximum with - | None -> max_int - | Some (m, `Inclusive) -> int_of_float m - | Some (m, `Exclusive) -> int_of_float m - 1 in - input.int minimum maximum title path >>= fun i -> - Lwt.return (`Float (float i)) + | None -> + max_int + | Some (m, `Inclusive) -> + int_of_float m + | Some (m, `Exclusive) -> + int_of_float m - 1 + in + input.int minimum maximum title path + >>= fun i -> Lwt.return (`Float (float i)) | Number _ -> - input.float title path >>= fun f -> - Lwt.return (`Float f) + input.float title path >>= fun f -> Lwt.return (`Float f) | Boolean -> - input.bool title path >>= fun f -> - Lwt.return (`Bool f) + input.bool title path >>= fun f -> Lwt.return (`Bool f) | String _ -> - input.string title path >>= fun f -> - Lwt.return (`String f) + input.string title path >>= fun f -> Lwt.return (`String f) | Combine ((One_of | Any_of), elts) -> let nb = List.length elts in - input.int 0 (nb - 1) (Some "Select the schema to follow") path >>= fun n -> - element path (List.nth elts n) - | Combine ((All_of | Not), _) -> Lwt.fail Unsupported_construct + input.int 0 (nb - 1) (Some "Select the schema to follow") path + >>= fun n -> element path (List.nth elts n) + | Combine ((All_of | Not), _) -> + Lwt.fail Unsupported_construct | Def_ref name -> Lwt.return (`String (Json_query.json_pointer_of_path name)) | Id_ref _ | Ext_ref _ -> @@ -81,107 +87,124 @@ let fill_in ?(show_optionals=true) input schema = | Array (elts, _) -> let rec fill_loop acc n ls = match ls with - | [] -> Lwt.return acc + | [] -> + Lwt.return acc | elt :: elts -> - element (string_of_int n :: path) elt >>= fun json -> - fill_loop (json :: acc) (succ n) elts + element (string_of_int n :: path) elt + >>= fun json -> fill_loop (json :: acc) (succ n) elts in - fill_loop [] 0 elts >>= fun acc -> - Lwt.return (`A (List.rev acc)) - | Object { properties ; _ } -> + fill_loop [] 0 elts >>= fun acc -> Lwt.return (`A (List.rev acc)) + | Object {properties; _} -> let properties = - if show_optionals - then properties - else (List.filter (fun (_, _, b, _) -> b) properties) in + if show_optionals then properties + else List.filter (fun (_, _, b, _) -> b) properties + in let rec fill_loop acc ls = match ls with - | [] -> Lwt.return acc + | [] -> + Lwt.return acc | (n, elt, _, _) :: elts -> - element (n :: path) elt >>= fun json -> - fill_loop ((n, json) :: acc) elts + element (n :: path) elt + >>= fun json -> fill_loop ((n, json) :: acc) elts in - fill_loop [] properties >>= fun acc -> - Lwt.return (`O (List.rev acc)) + fill_loop [] properties >>= fun acc -> Lwt.return (`O (List.rev acc)) | Monomorphic_array (elt, specs) -> let rec fill_loop acc min n max = - if n > max then - Lwt.return acc + if n > max then Lwt.return acc else - element (string_of_int n :: path) elt >>= fun json -> - (if n < min then Lwt.return_true else input.continue title path) >>= function - | true -> fill_loop (json :: acc) min (succ n) max - | false -> Lwt.return (json :: acc) + element (string_of_int n :: path) elt + >>= fun json -> + (if n < min then Lwt.return_true else input.continue title path) + >>= function + | true -> + fill_loop (json :: acc) min (succ n) max + | false -> + Lwt.return (json :: acc) in let max = match specs.max_items with None -> max_int | Some m -> m in - fill_loop [] specs.min_items 0 max >>= fun acc -> - Lwt.return (`A (List.rev acc)) - | Any -> Lwt.fail Unsupported_construct - | Dummy -> Lwt.fail Unsupported_construct - | Null -> Lwt.return `Null + fill_loop [] specs.min_items 0 max + >>= fun acc -> Lwt.return (`A (List.rev acc)) + | Any -> + Lwt.fail Unsupported_construct + | Dummy -> + Lwt.fail Unsupported_construct + | Null -> + Lwt.return `Null in element [] (Json_schema.root schema) -let random_fill_in ?(show_optionals=true) schema = +let random_fill_in ?(show_optionals = true) schema = let display _ = Lwt.return_unit in let int min max _ _ = - let max = Int64.of_int max - and min = Int64.of_int min in + let max = Int64.of_int max and min = Int64.of_int min in let range = Int64.sub max min in let random_int64 = Int64.add (Random.int64 range) min in - Lwt.return (Int64.to_int random_int64) in + Lwt.return (Int64.to_int random_int64) + in let string _title _ = Lwt.return "" in let float _ _ = Lwt.return (Random.float infinity) in let bool _ _ = Lwt.return (Random.int 2 = 0) in let continue _ _ = Lwt.return (Random.int 4 = 0) in Lwt.catch (fun () -> - fill_in ~show_optionals - { int ; float ; string ; bool ; display ; continue } - schema >>= fun json -> - Lwt.return_ok json) + fill_in + ~show_optionals + {int; float; string; bool; display; continue} + schema + >>= fun json -> Lwt.return_ok json) (fun e -> - let msg = Printf.sprintf "Fill-in failed %s\n%!" (Printexc.to_string e) in - Lwt.return_error msg) + let msg = + Printf.sprintf "Fill-in failed %s\n%!" (Printexc.to_string e) + in + Lwt.return_error msg) -let editor_fill_in ?(show_optionals=true) schema = +let editor_fill_in ?(show_optionals = true) schema = let tmp = Filename.temp_file "tezos_rpc_call_" ".json" in let rec init () = (* write a temp file with instructions *) - random_fill_in ~show_optionals schema >>= function - | Error msg -> Lwt.return_error msg + random_fill_in ~show_optionals schema + >>= function + | Error msg -> + Lwt.return_error msg | Ok json -> - Lwt_io.(with_file ~mode:Output tmp (fun fp -> - write_line fp (Data_encoding.Json.to_string json))) >>= fun () -> - edit () + Lwt_io.( + with_file ~mode:Output tmp (fun fp -> + write_line fp (Data_encoding.Json.to_string json))) + >>= fun () -> edit () and edit () = (* launch the user's editor on it *) let editor_cmd = let ed = - match Sys.getenv_opt "EDITOR", Sys.getenv_opt "VISUAL" with - | Some ed, _ -> ed - | None, Some ed -> ed - | None, None when Sys.win32 -> + match (Sys.getenv_opt "EDITOR", Sys.getenv_opt "VISUAL") with + | (Some ed, _) -> + ed + | (None, Some ed) -> + ed + | (None, None) when Sys.win32 -> (* TODO: I have no idea what I'm doing here *) "notepad.exe" | _ -> (* TODO: vi on MacOSX ? *) - "nano" in - Lwt_process.shell (ed ^ " " ^ tmp) in - (Lwt_process.open_process_none editor_cmd) # status >>= function + "nano" + in + Lwt_process.shell (ed ^ " " ^ tmp) + in + (Lwt_process.open_process_none editor_cmd)#status + >>= function | Unix.WEXITED 0 -> - reread () >>= fun json -> - delete () >>= fun () -> - Lwt.return json + reread () >>= fun json -> delete () >>= fun () -> Lwt.return json | Unix.WSIGNALED x | Unix.WSTOPPED x | Unix.WEXITED x -> let msg = Printf.sprintf "FAILED %d \n%!" x in - delete () >>= fun () -> - Lwt.return_error msg + delete () >>= fun () -> Lwt.return_error msg and reread () = (* finally reread the file *) - Lwt_io.(with_file ~mode:Input tmp (fun fp -> read fp)) >>= fun text -> + Lwt_io.(with_file ~mode:Input tmp (fun fp -> read fp)) + >>= fun text -> match Data_encoding.Json.from_string text with - | Ok r -> Lwt.return_ok r - | Error msg -> Lwt.return_error (Format.asprintf "bad input: %s" msg) + | Ok r -> + Lwt.return_ok r + | Error msg -> + Lwt.return_error (Format.asprintf "bad input: %s" msg) and delete () = (* and delete the temp file *) Lwt_unix.unlink tmp @@ -193,234 +216,260 @@ let editor_fill_in ?(show_optionals=true) schema = let rec count = let open RPC_description in function - | Empty -> 0 - | Dynamic _ -> 1 - | Static { services ; subdirs } -> + | Empty -> + 0 + | Dynamic _ -> + 1 + | Static {services; subdirs} -> let service = RPC_service.MethMap.cardinal services in let subdirs = match subdirs with - | None -> 0 + | None -> + 0 | Some (Suffixes subdirs) -> Resto.StringMap.fold (fun _ t r -> r + count t) subdirs 0 - | Some (Arg (_, subdir)) -> count subdir in + | Some (Arg (_, subdir)) -> + count subdir + in service + subdirs (*-- Commands ---------------------------------------------------------------*) let list url (cctxt : #Client_context.full) = let args = String.split '/' url in - RPC_description.describe cctxt - ~recurse:true args >>=? fun tree -> + RPC_description.describe cctxt ~recurse:true args + >>=? fun tree -> let open RPC_description in let collected_args = ref [] in let collect arg = if not (arg.RPC_arg.descr <> None && List.mem arg !collected_args) then - collected_args := arg :: !collected_args in + collected_args := arg :: !collected_args + in let display_paragraph ppf description = - Format.fprintf ppf "@, @[%a@]" + Format.fprintf + ppf + "@, @[%a@]" (fun ppf words -> List.iter (Format.fprintf ppf "%s@ ") words) (String.split ' ' description) in let display_arg ppf arg = match arg.RPC_arg.descr with - | None -> Format.fprintf ppf "%s" arg.RPC_arg.name + | None -> + Format.fprintf ppf "%s" arg.RPC_arg.name | Some descr -> Format.fprintf ppf "<%s>%a" arg.RPC_arg.name display_paragraph descr in let display_service ppf (_path, tpath, service) = - Format.fprintf ppf "- %s /%s" + Format.fprintf + ppf + "- %s /%s" (RPC_service.string_of_meth service.meth) (String.concat "/" tpath) ; match service.description with - | None | Some "" -> () - | Some description -> display_paragraph ppf description + | None | Some "" -> + () + | Some description -> + display_paragraph ppf description in let display_services ppf (_path, tpath, services) = Format.pp_print_list - (fun ppf (_,s) -> display_service ppf (_path, tpath, s)) + (fun ppf (_, s) -> display_service ppf (_path, tpath, s)) ppf (RPC_service.MethMap.bindings services) in let rec display ppf (path, tpath, tree) = match tree with - | Dynamic description -> begin + | Dynamic description -> ( Format.fprintf ppf "- /%s " (String.concat "/" tpath) ; match description with - | None | Some "" -> () - | Some description -> display_paragraph ppf description - end - | Empty -> () - | Static { services ; subdirs = None } -> + | None | Some "" -> + () + | Some description -> + display_paragraph ppf description ) + | Empty -> + () + | Static {services; subdirs = None} -> display_services ppf (path, tpath, services) - | Static { services ; subdirs = Some (Suffixes subdirs) } -> begin - match RPC_service.MethMap.cardinal services, Resto.StringMap.bindings subdirs with - | 0, [] -> () - | 0, [ n, solo ] -> - display ppf (path @ [ n ], tpath @ [ n ], solo) - | _, items when count tree >= 3 && path <> [] -> - Format.fprintf ppf "@[+ %s/@,%a@]" - (String.concat "/" path) (display_list tpath) items - | _, items when count tree >= 3 && path <> [] -> - Format.fprintf ppf "@[+ %s@,%a@,%a@]" - (String.concat "/" path) - display_services (path, tpath, services) - (display_list tpath) items - | 0, (n, t) :: items -> - Format.fprintf ppf "%a" - display (path @ [ n ], tpath @ [ n ], t) ; - List.iter - (fun (n, t) -> - Format.fprintf ppf "@,%a" - display (path @ [ n ], tpath @ [ n ], t)) - items - | _, items -> - display_services ppf (path, tpath, services) ; - List.iter - (fun (n, t) -> - Format.fprintf ppf "@,%a" - display (path @ [ n ], tpath @ [ n ], t)) - items - end - | Static { services ; subdirs = Some (Arg (arg, solo)) } + | Static {services; subdirs = Some (Suffixes subdirs)} -> ( + match + ( RPC_service.MethMap.cardinal services, + Resto.StringMap.bindings subdirs ) + with + | (0, []) -> + () + | (0, [(n, solo)]) -> + display ppf (path @ [n], tpath @ [n], solo) + | (_, items) when count tree >= 3 && path <> [] -> + Format.fprintf + ppf + "@[+ %s/@,%a@]" + (String.concat "/" path) + (display_list tpath) + items + | (_, items) when count tree >= 3 && path <> [] -> + Format.fprintf + ppf + "@[+ %s@,%a@,%a@]" + (String.concat "/" path) + display_services + (path, tpath, services) + (display_list tpath) + items + | (0, (n, t) :: items) -> + Format.fprintf ppf "%a" display (path @ [n], tpath @ [n], t) ; + List.iter + (fun (n, t) -> + Format.fprintf ppf "@,%a" display (path @ [n], tpath @ [n], t)) + items + | (_, items) -> + display_services ppf (path, tpath, services) ; + List.iter + (fun (n, t) -> + Format.fprintf ppf "@,%a" display (path @ [n], tpath @ [n], t)) + items ) + | Static {services; subdirs = Some (Arg (arg, solo))} when RPC_service.MethMap.cardinal services = 0 -> collect arg ; let name = Printf.sprintf "<%s>" arg.RPC_arg.name in - display ppf (path @ [ name ], tpath @ [ name ], solo) - | Static { services; - subdirs = Some (Arg (arg, solo)) } -> + display ppf (path @ [name], tpath @ [name], solo) + | Static {services; subdirs = Some (Arg (arg, solo))} -> collect arg ; display_services ppf (path, tpath, services) ; Format.fprintf ppf "@," ; let name = Printf.sprintf "<%s>" arg.RPC_arg.name in - display ppf (path @ [ name ], tpath @ [ name ], solo) + display ppf (path @ [name], tpath @ [name], solo) and display_list tpath = - Format.pp_print_list - (fun ppf (n,t) -> display ppf ([ n ], tpath @ [ n ], t)) + Format.pp_print_list (fun ppf (n, t) -> display ppf ([n], tpath @ [n], t)) in - cctxt#message "@ @[Available services:@ @ %a@]@." - display (args, args, tree) >>= fun () -> - if !collected_args <> [] then begin - cctxt#message "@,@[Dynamic parameter description:@ @ %a@]@." - (Format.pp_print_list display_arg) !collected_args >>= fun () -> - return_unit - end else return_unit - + cctxt#message + "@ @[Available services:@ @ %a@]@." + display + (args, args, tree) + >>= fun () -> + if !collected_args <> [] then + cctxt#message + "@,@[Dynamic parameter description:@ @ %a@]@." + (Format.pp_print_list display_arg) + !collected_args + >>= fun () -> return_unit + else return_unit let schema meth url (cctxt : #Client_context.full) = let args = String.split '/' url in let open RPC_description in - RPC_description.describe cctxt ~recurse:false args >>=? function - | Static { services ; _ } -> begin - match RPC_service.MethMap.find_opt meth services with - | None -> - cctxt#message - "No service found at this URL (but this is a valid prefix)\n%!" >>= fun () -> - return_unit - | Some ({ input = Some input ; output ; _ }) -> - let json = `O [ "input", Json_schema.to_json (fst input) ; - "output", Json_schema.to_json (fst output) ] in - cctxt#message "%a" Json_repr.(pp (module Ezjsonm)) json >>= fun () -> - return_unit - | Some ({ input = None ; output ; _ }) -> - let json = `O [ "output", Json_schema.to_json (fst output) ] in - cctxt#message "%a" Json_repr.(pp (module Ezjsonm)) json >>= fun () -> - return_unit - end + RPC_description.describe cctxt ~recurse:false args + >>=? function + | Static {services; _} -> ( + match RPC_service.MethMap.find_opt meth services with + | None -> + cctxt#message + "No service found at this URL (but this is a valid prefix)\n%!" + >>= fun () -> return_unit + | Some {input = Some input; output; _} -> + let json = + `O + [ ("input", Json_schema.to_json (fst input)); + ("output", Json_schema.to_json (fst output)) ] + in + cctxt#message "%a" Json_repr.(pp (module Ezjsonm)) json + >>= fun () -> return_unit + | Some {input = None; output; _} -> + let json = `O [("output", Json_schema.to_json (fst output))] in + cctxt#message "%a" Json_repr.(pp (module Ezjsonm)) json + >>= fun () -> return_unit ) | _ -> cctxt#message - "No service found at this URL (but this is a valid prefix)\n%!" >>= fun () -> - return_unit + "No service found at this URL (but this is a valid prefix)\n%!" + >>= fun () -> return_unit let format binary meth url (cctxt : #Client_context.io_rpcs) = let args = String.split '/' url in let open RPC_description in let pp = - if binary then - (fun ppf (_, schema) -> Data_encoding.Binary_schema.pp ppf schema) - else - (fun ppf (schema, _) -> Json_schema.pp ppf schema) in - RPC_description.describe cctxt ~recurse:false args >>=? function - | Static { services ; _ } -> begin - match RPC_service.MethMap.find_opt meth services with - | None -> - cctxt#message - "No service found at this URL (but this is a valid prefix)\n%!" >>= fun () -> - return_unit - | Some ({ input = Some input ; output ; _ }) -> - cctxt#message - "@[\ - @[Input format:@,%a@]@,\ - @[Output format:@,%a@]@,\ - @]" - pp input - pp output >>= fun () -> - return_unit - | Some ({ input = None ; output ; _ }) -> - cctxt#message - "@[\ - @[Output format:@,%a@]@,\ - @]" - pp output >>= fun () -> - return_unit - end + if binary then fun ppf (_, schema) -> + Data_encoding.Binary_schema.pp ppf schema + else fun ppf (schema, _) -> Json_schema.pp ppf schema + in + RPC_description.describe cctxt ~recurse:false args + >>=? function + | Static {services; _} -> ( + match RPC_service.MethMap.find_opt meth services with + | None -> + cctxt#message + "No service found at this URL (but this is a valid prefix)\n%!" + >>= fun () -> return_unit + | Some {input = Some input; output; _} -> + cctxt#message + "@[@[Input format:@,%a@]@,@[Output format:@,%a@]@,@]" + pp + input + pp + output + >>= fun () -> return_unit + | Some {input = None; output; _} -> + cctxt#message "@[@[Output format:@,%a@]@,@]" pp output + >>= fun () -> return_unit ) | _ -> cctxt#message - "No service found at this URL (but this is a valid prefix)\n%!" >>= fun () -> - return_unit + "No service found at this URL (but this is a valid prefix)\n%!" + >>= fun () -> return_unit -let fill_in ?(show_optionals=true) schema = +let fill_in ?(show_optionals = true) schema = let open Json_schema in match (root schema).kind with - | Null -> Lwt.return_ok `Null - | Any | Object { properties = [] ; _ } -> Lwt.return_ok (`O []) - | _ -> editor_fill_in ~show_optionals schema + | Null -> + Lwt.return_ok `Null + | Any | Object {properties = []; _} -> + Lwt.return_ok (`O []) + | _ -> + editor_fill_in ~show_optionals schema let display_answer (cctxt : #Client_context.full) = function | `Ok json -> - cctxt#message "%a" - Json_repr.(pp (module Ezjsonm)) json >>= fun () -> - return_unit + cctxt#message "%a" Json_repr.(pp (module Ezjsonm)) json + >>= fun () -> return_unit | `Not_found _ -> - cctxt#message "No service found at this URL\n%!" >>= fun () -> - return_unit + cctxt#message "No service found at this URL\n%!" + >>= fun () -> return_unit | `Error (Some json) -> - cctxt#message "@[Command failed :@[ %a@]@]@." - (Format.pp_print_list Error_monad.pp) + cctxt#message + "@[Command failed :@[ %a@]@]@." + (Format.pp_print_list Error_monad.pp) (Data_encoding.Json.destruct - (Data_encoding.list Error_monad.error_encoding) json) >>= fun () -> - return_unit + (Data_encoding.list Error_monad.error_encoding) + json) + >>= fun () -> return_unit | `Error None | `Unauthorized _ | `Forbidden _ | `Conflict _ -> - cctxt#message "Unexpected server answer\n%!" >>= fun () -> - return_unit + cctxt#message "Unexpected server answer\n%!" >>= fun () -> return_unit let call meth raw_url (cctxt : #Client_context.full) = let uri = Uri.of_string raw_url in let args = String.split_path (Uri.path uri) in - RPC_description.describe cctxt ~recurse:false args >>=? function - | Static { services ; _ } -> begin - match RPC_service.MethMap.find_opt meth services with - | None -> - cctxt#message - "No service found at this URL with this method \ - (but this is a valid prefix)\n%!" >>= fun () -> - return_unit - | Some ({ input = None ; _ }) -> - cctxt#generic_json_call meth uri >>=? - display_answer cctxt - | Some ({ input = Some input ; _ }) -> - fill_in ~show_optionals:false (fst input) >>= function - | Error msg -> - cctxt#error "%s" msg >>= fun () -> - return_unit - | Ok json -> - cctxt#generic_json_call meth ~body:json uri >>=? - display_answer cctxt - end + RPC_description.describe cctxt ~recurse:false args + >>=? function + | Static {services; _} -> ( + match RPC_service.MethMap.find_opt meth services with + | None -> + cctxt#message + "No service found at this URL with this method (but this is a valid \ + prefix)\n\ + %!" + >>= fun () -> return_unit + | Some {input = None; _} -> + cctxt#generic_json_call meth uri >>=? display_answer cctxt + | Some {input = Some input; _} -> ( + fill_in ~show_optionals:false (fst input) + >>= function + | Error msg -> + cctxt#error "%s" msg >>= fun () -> return_unit + | Ok json -> + cctxt#generic_json_call meth ~body:json uri + >>=? display_answer cctxt ) ) | _ -> - cctxt#message "No service found at this URL\n%!" >>= fun () -> - return_unit + cctxt#message "No service found at this URL\n%!" + >>= fun () -> return_unit -let call_with_json meth raw_url json (cctxt: #Client_context.full) = +let call_with_json meth raw_url json (cctxt : #Client_context.full) = let uri = Uri.of_string raw_url in match Data_encoding.Json.from_string json with | exception Assert_failure _ -> @@ -428,136 +477,148 @@ let call_with_json meth raw_url json (cctxt: #Client_context.full) = cctxt#error "Failed to parse the provided json: unwrapped JSON value.\n%!" | Error err -> - cctxt#error - "Failed to parse the provided json: %s\n%!" - err + cctxt#error "Failed to parse the provided json: %s\n%!" err | Ok body -> - cctxt#generic_json_call meth ~body uri >>=? - display_answer cctxt - -let call_with_file_or_json meth url maybe_file (cctxt: #Client_context.full) = - begin - match TzString.split ':' ~limit:1 maybe_file with - | [ "file" ; filename] -> - (* Mostly copied from src/client/client_aliases.ml *) - Lwt.catch - (fun () -> - Lwt_io.(with_file ~mode:Input filename read) >>= fun content -> - return content) - (fun exn -> - failwith - "cannot read file (%s)" (Printexc.to_string exn)) - | _ -> return maybe_file - end >>=? fun json -> - call_with_json meth url json cctxt + cctxt#generic_json_call meth ~body uri >>=? display_answer cctxt + +let call_with_file_or_json meth url maybe_file (cctxt : #Client_context.full) = + ( match TzString.split ':' ~limit:1 maybe_file with + | ["file"; filename] -> + (* Mostly copied from src/client/client_aliases.ml *) + Lwt.catch + (fun () -> + Lwt_io.(with_file ~mode:Input filename read) + >>= fun content -> return content) + (fun exn -> failwith "cannot read file (%s)" (Printexc.to_string exn)) + | _ -> + return maybe_file ) + >>=? fun json -> call_with_json meth url json cctxt let meth_params ?(name = "HTTP method") ?(desc = "") params = - param ~name ~desc - (parameter ~autocomplete:(fun _ -> - return @@ - List.map String.lowercase_ascii @@ - List.map Resto.string_of_meth @@ - [ `GET ; `POST ; `DELETE ; `PUT ; `PATCH ]) - (fun _ name -> - match Resto.meth_of_string (String.uppercase_ascii name) with - | None -> failwith "Unknown HTTP method: %s" name - | Some meth -> return meth)) + param + ~name + ~desc + (parameter + ~autocomplete:(fun _ -> + return + @@ List.map String.lowercase_ascii + @@ List.map Resto.string_of_meth + @@ [`GET; `POST; `DELETE; `PUT; `PATCH]) + (fun _ name -> + match Resto.meth_of_string (String.uppercase_ascii name) with + | None -> + failwith "Unknown HTTP method: %s" name + | Some meth -> + return meth)) params -let group = - { Clic.name = "rpc" ; - title = "Commands for the low level RPC layer" } - -let commands = [ - - command ~group - ~desc: "List RPCs under a given URL prefix.\n\ - Some parts of the RPC service hierarchy depend on parameters,\n\ - they are marked by a suffix ``.\n\ - You can list these sub-hierarchies by providing a concrete URL prefix \ - whose arguments are set to a valid value." - no_options - (prefixes [ "rpc" ; "list" ] @@ string ~name:"url" ~desc: "the URL prefix" @@ stop) - (fun () -> list) ; - - command ~group - ~desc: "Alias to `rpc list /`." - no_options - (prefixes [ "rpc" ; "list" ] @@ stop) - (fun () -> (list "/")); - - command ~group - ~desc: "Get the input and output JSON schemas of an RPC." - no_options - (prefixes [ "rpc" ; "schema" ] @@ - meth_params @@ - string ~name: "url" ~desc: "the RPC url" @@ - stop) - (fun () -> schema) ; - - command ~group - ~desc: "Get the humanoid readable input and output formats of an RPC." - (args1 - (switch - ~doc:"Binary format" - ~short:'b' - ~long:"binary" ())) - (prefixes [ "rpc" ; "format"] @@ - meth_params @@ - string ~name: "url" ~desc: "the RPC URL" @@ - stop) - format ; - - command ~group - ~desc: "Call an RPC with the GET method." - no_options - (prefixes [ "rpc" ; "get" ] @@ string ~name: "url" ~desc: "the RPC URL" @@ stop) - (fun () -> call `GET) ; - - command ~group - ~desc: "Call an RPC with the POST method.\n\ - It invokes $EDITOR if input data is needed." - no_options - (prefixes [ "rpc" ; "post" ] @@ string ~name: "url" ~desc: "the RPC URL" @@ stop) - (fun () -> call `POST) ; - - command ~group - ~desc: "Call an RPC with the POST method, \ - \ providing input data via the command line." - no_options - (prefixes [ "rpc" ; "post" ] @@ string ~name: "url" ~desc: "the RPC URL" - @@ prefix "with" - @@ string ~name:"input" - ~desc:"the raw JSON input to the RPC\n\ +let group = {Clic.name = "rpc"; title = "Commands for the low level RPC layer"} + +let commands = + [ command + ~group + ~desc: + "List RPCs under a given URL prefix.\n\ + Some parts of the RPC service hierarchy depend on parameters,\n\ + they are marked by a suffix ``.\n\ + You can list these sub-hierarchies by providing a concrete URL \ + prefix whose arguments are set to a valid value." + no_options + ( prefixes ["rpc"; "list"] + @@ string ~name:"url" ~desc:"the URL prefix" + @@ stop ) + (fun () -> list); + command + ~group + ~desc:"Alias to `rpc list /`." + no_options + (prefixes ["rpc"; "list"] @@ stop) + (fun () -> list "/"); + command + ~group + ~desc:"Get the input and output JSON schemas of an RPC." + no_options + ( prefixes ["rpc"; "schema"] + @@ meth_params + @@ string ~name:"url" ~desc:"the RPC url" + @@ stop ) + (fun () -> schema); + command + ~group + ~desc:"Get the humanoid readable input and output formats of an RPC." + (args1 (switch ~doc:"Binary format" ~short:'b' ~long:"binary" ())) + ( prefixes ["rpc"; "format"] + @@ meth_params + @@ string ~name:"url" ~desc:"the RPC URL" + @@ stop ) + format; + command + ~group + ~desc:"Call an RPC with the GET method." + no_options + ( prefixes ["rpc"; "get"] + @@ string ~name:"url" ~desc:"the RPC URL" + @@ stop ) + (fun () -> call `GET); + command + ~group + ~desc: + "Call an RPC with the POST method.\n\ + It invokes $EDITOR if input data is needed." + no_options + ( prefixes ["rpc"; "post"] + @@ string ~name:"url" ~desc:"the RPC URL" + @@ stop ) + (fun () -> call `POST); + command + ~group + ~desc: + "Call an RPC with the POST method, providing input data via the \ + command line." + no_options + ( prefixes ["rpc"; "post"] + @@ string ~name:"url" ~desc:"the RPC URL" + @@ prefix "with" + @@ string + ~name:"input" + ~desc: + "the raw JSON input to the RPC\n\ For instance, use `{}` to send the empty document.\n\ Alternatively, use `file:path` to read the JSON data from a file." - @@ stop) - (fun () -> call_with_file_or_json `POST) ; - - command ~group - ~desc: "Call an RPC with the PUT method.\n\ - It invokes $EDITOR if input data is needed." - no_options - (prefixes [ "rpc" ; "put" ] @@ string ~name: "url" ~desc: "the RPC URL" @@ stop) - (fun () -> call `PUT) ; - - command ~group - ~desc: "Call an RPC with the PUT method, \ - \ providing input data via the command line." - no_options - (prefixes [ "rpc" ; "put" ] @@ string ~name: "url" ~desc: "the RPC URL" - @@ prefix "with" - @@ string ~name:"input" - ~desc:"the raw JSON input to the RPC\n\ + @@ stop ) + (fun () -> call_with_file_or_json `POST); + command + ~group + ~desc: + "Call an RPC with the PUT method.\n\ + It invokes $EDITOR if input data is needed." + no_options + ( prefixes ["rpc"; "put"] + @@ string ~name:"url" ~desc:"the RPC URL" + @@ stop ) + (fun () -> call `PUT); + command + ~group + ~desc: + "Call an RPC with the PUT method, providing input data via the \ + command line." + no_options + ( prefixes ["rpc"; "put"] + @@ string ~name:"url" ~desc:"the RPC URL" + @@ prefix "with" + @@ string + ~name:"input" + ~desc: + "the raw JSON input to the RPC\n\ For instance, use `{}` to send the empty document.\n\ Alternatively, use `file:path` to read the JSON data from a file." - @@ stop) - (fun () -> call_with_file_or_json `PUT) ; - - command ~group - ~desc: "Call an RPC with the DELETE method." - no_options - (prefixes [ "rpc" ; "delete" ] @@ string ~name: "url" ~desc: "the RPC URL" @@ stop) - (fun () -> call `DELETE) ; - -] + @@ stop ) + (fun () -> call_with_file_or_json `PUT); + command + ~group + ~desc:"Call an RPC with the DELETE method." + no_options + ( prefixes ["rpc"; "delete"] + @@ string ~name:"url" ~desc:"the RPC URL" + @@ stop ) + (fun () -> call `DELETE) ] diff --git a/src/bin_client/client_rpc_commands.mli b/src/bin_client/client_rpc_commands.mli index 1aa7d5a9c1fbfa5e42a6b2a93e9b2810e092e2bc..b1e55b8e7918784796be2306e448a9566ee7041f 100644 --- a/src/bin_client/client_rpc_commands.mli +++ b/src/bin_client/client_rpc_commands.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val commands: Client_commands.command list +val commands : Client_commands.command list diff --git a/src/bin_client/main_admin.ml b/src/bin_client/main_admin.ml index caba560664ecebbf25c063cad625152b83129190..6a880a2ea9df872598945e3e09668ae019f4e682 100644 --- a/src/bin_client/main_admin.ml +++ b/src/bin_client/main_admin.ml @@ -26,11 +26,11 @@ let select_commands _ _ = return (List.flatten - [ Client_report_commands.commands () ; - Client_admin_commands.commands () ; - Client_p2p_commands.commands () ; - Client_protocols_commands.commands () ; - Client_rpc_commands.commands ; + [ Client_report_commands.commands (); + Client_admin_commands.commands (); + Client_p2p_commands.commands (); + Client_protocols_commands.commands (); + Client_rpc_commands.commands; Client_event_logging_commands.commands () ]) let () = Client_main_run.run (module Client_config) ~select_commands diff --git a/src/bin_client/main_client.ml b/src/bin_client/main_client.ml index 6ba9f9842fa078dfe04a68b1c1878aebf247a502..f4907d79969533a600e6d33626df675195cec6c1 100644 --- a/src/bin_client/main_client.ml +++ b/src/bin_client/main_client.ml @@ -28,30 +28,36 @@ open Client_config let disable_disclaimer = match Sys.getenv_opt "TEZOS_CLIENT_UNSAFE_DISABLE_DISCLAIMER" with - | Some ("yes" | "y" | "YES" | "Y") -> true - | _ -> false + | Some ("yes" | "y" | "YES" | "Y") -> + true + | _ -> + false let zeronet () = if not disable_disclaimer then Format.eprintf - "@[@{@{Warning@}@}@,@,\ + "@[<v 2>@{<warning>@{<title>Warning@}@}@,\ + @,\ \ This is @{<warning>NOT@} the Tezos Mainnet.@,\ @,\ \ The node you are connecting to claims to be running on the@,\ \ @{<warning>Tezos Zeronet DEVELOPMENT NETWORK@}.@,\ \ Do @{<warning>NOT@} use your fundraiser keys on this network.@,\ - Zeronet is a testing network, with free tokens and frequent resets.@]@\n@." + Zeronet is a testing network, with free tokens and frequent resets.@]@\n\ + @." let alphanet () = if not disable_disclaimer then Format.eprintf - "@[<v 2>@{<warning>@{<title>Warning@}@}@,@,\ + "@[<v 2>@{<warning>@{<title>Warning@}@}@,\ + @,\ \ This is @{<warning>NOT@} the Tezos Mainnet.@,\ @,\ \ The node you are connecting to claims to be running on the@,\ \ @{<warning>Tezos Alphanet DEVELOPMENT NETWORK.@}@,\ \ Do @{<warning>NOT@} use your fundraiser keys on this network.@,\ - \ Alphanet is a testing network, with free tokens.@]@\n@." + \ Alphanet is a testing network, with free tokens.@]@\n\ + @." let mainnet () = if not disable_disclaimer then @@ -62,76 +68,90 @@ let mainnet () = with usage of the Tezos network. Users should do their@,\ own research to determine if Tezos is the appropriate@,\ platform for their needs and should apply judgement and@,\ - care in their network interactions.@]@\n@." + care in their network interactions.@]@\n\ + @." let sandbox () = if not disable_disclaimer then Format.eprintf - "@[<v 2>@{<warning>@{<title>Warning@}@}@,@,\ + "@[<v 2>@{<warning>@{<title>Warning@}@}@,\ + @,\ \ The node you are connecting to claims to be running in a@,\ \ @{<warning>Tezos TEST SANDBOX@}.@,\ \ Do @{<warning>NOT@} use your fundraiser keys on this network.@,\ - You should not see this message if you are not a developer.@]@\n@." + You should not see this message if you are not a developer.@]@\n\ + @." let check_network ctxt = - Shell_services.P2p.version ctxt >>= function - | Error _ -> Lwt.return_none + Shell_services.P2p.version ctxt + >>= function + | Error _ -> + Lwt.return_none | Ok version -> let has_prefix prefix = - String.has_prefix ~prefix (version.chain_name :> string) in - if has_prefix "SANDBOXED" then begin + String.has_prefix ~prefix (version.chain_name :> string) + in + if has_prefix "SANDBOXED" then ( sandbox () ; - Lwt.return_some `Sandbox - end else if has_prefix "TEZOS_ZERONET" then begin + Lwt.return_some `Sandbox ) + else if has_prefix "TEZOS_ZERONET" then ( zeronet () ; - Lwt.return_some `Zeronet - end else if has_prefix "TEZOS_ALPHANET" then begin + Lwt.return_some `Zeronet ) + else if has_prefix "TEZOS_ALPHANET" then ( alphanet () ; - Lwt.return_some `Alphanet - end else if has_prefix "TEZOS_BETANET" || has_prefix "TEZOS_MAINNET" then begin + Lwt.return_some `Alphanet ) + else if has_prefix "TEZOS_BETANET" || has_prefix "TEZOS_MAINNET" then ( mainnet () ; - Lwt.return_some `Mainnet - end else - Lwt.return_none + Lwt.return_some `Mainnet ) + else Lwt.return_none let get_commands_for_version ctxt network chain block protocol = - Shell_services.Blocks.protocols ctxt ~chain ~block () >>= function - | Ok { next_protocol = version ; _ } -> begin - match protocol with - | None -> - return (Some version, Client_commands.commands_for_version version network) - | Some given_version -> begin - if not (Protocol_hash.equal version given_version) then - Format.eprintf - "@[<v 2>@{<warning>@{<title>Warning@}@}@,\ - The protocol provided via `--protocol` (%a)@,\ - is not the one retrieved from the node (%a).@]@\n@." - Protocol_hash.pp_short given_version - Protocol_hash.pp_short version ; - return (Some version, Client_commands.commands_for_version given_version network) - end - end - | Error errs -> begin - match protocol with - | None -> begin + Shell_services.Blocks.protocols ctxt ~chain ~block () + >>= function + | Ok {next_protocol = version; _} -> ( + match protocol with + | None -> + return + (Some version, Client_commands.commands_for_version version network) + | Some given_version -> + if not (Protocol_hash.equal version given_version) then Format.eprintf "@[<v 2>@{<warning>@{<title>Warning@}@}@,\ - Failed to acquire the protocol version from the node@,%a@]@\n@." - (Format.pp_print_list pp) errs ; - return (None, []) - end - | Some version -> - return (Some version, Client_commands.commands_for_version version network) - end + The protocol provided via `--protocol` (%a)@,\ + is not the one retrieved from the node (%a).@]@\n\ + @." + Protocol_hash.pp_short + given_version + Protocol_hash.pp_short + version ; + return + ( Some version, + Client_commands.commands_for_version given_version network ) ) + | Error errs -> ( + match protocol with + | None -> + Format.eprintf + "@[<v 2>@{<warning>@{<title>Warning@}@}@,\ + Failed to acquire the protocol version from the node@,\ + %a@]@\n\ + @." + (Format.pp_print_list pp) + errs ; + return (None, []) + | Some version -> + return + (Some version, Client_commands.commands_for_version version network) + ) -let select_commands ctxt { chain ; block ; protocol ; _ } = - check_network ctxt >>= fun network -> - get_commands_for_version - ctxt network chain block protocol >>|? fun (_, commands_for_version) -> - Client_rpc_commands.commands @ - Tezos_signer_backends.Ledger.commands () @ - Client_keys_commands.commands network @ - Client_helpers_commands.commands () @ - commands_for_version +let select_commands ctxt {chain; block; protocol; _} = + check_network ctxt + >>= fun network -> + get_commands_for_version ctxt network chain block protocol + >>|? fun (_, commands_for_version) -> + Client_rpc_commands.commands + @ Tezos_signer_backends.Ledger.commands () + @ Client_keys_commands.commands network + @ Client_helpers_commands.commands () + @ commands_for_version let () = Client_main_run.run (module Client_config) ~select_commands diff --git a/src/bin_client/test/proto_test_injection/main.ml b/src/bin_client/test/proto_test_injection/main.ml index f8a11fd7f366e4cd01f60e74524e7d5cedea78b1..d6e5bb61cae83252ff8fcdac303b83a43637f51c 100644 --- a/src/bin_client/test/proto_test_injection/main.ml +++ b/src/bin_client/test/proto_test_injection/main.ml @@ -24,20 +24,25 @@ (*****************************************************************************) type block_header_data = MBytes.t + type block_header = { - shell : Block_header.shell_header ; - protocol_data : block_header_data ; + shell : Block_header.shell_header; + protocol_data : block_header_data } + let block_header_data_encoding = Data_encoding.(obj1 (req "random_data" Variable.bytes)) type block_header_metadata = unit + let block_header_metadata_encoding = Data_encoding.unit type operation_data = unit + let operation_data_encoding = Data_encoding.unit type operation_receipt = unit + let operation_receipt_encoding = Data_encoding.unit let operation_data_and_receipt_encoding = @@ -47,68 +52,58 @@ let operation_data_and_receipt_encoding = Data_encoding.unit type operation = { - shell: Operation.shell_header ; - protocol_data: operation_data ; + shell : Operation.shell_header; + protocol_data : operation_data } let max_block_length = 42 + let max_operation_data_length = 42 + let validation_passes = [] + let acceptable_passes _op = [] let compare_operations _ _ = 0 -type validation_state = { - context : Context.t ; - fitness : Int64.t ; -} +type validation_state = {context : Context.t; fitness : Int64.t} -let current_context { context } = - return context +let current_context {context} = return context module Fitness = struct - type error += Invalid_fitness + type error += Invalid_fitness2 let int64_to_bytes i = let b = MBytes.create 8 in - MBytes.set_int64 b 0 i; - b + MBytes.set_int64 b 0 i ; b let int64_of_bytes b = - if Compare.Int.(MBytes.length b <> 8) then - fail Invalid_fitness2 - else - return (MBytes.get_int64 b 0) + if Compare.Int.(MBytes.length b <> 8) then fail Invalid_fitness2 + else return (MBytes.get_int64 b 0) - let from_int64 fitness = - [ int64_to_bytes fitness ] + let from_int64 fitness = [int64_to_bytes fitness] let to_int64 = function - | [ fitness ] -> int64_of_bytes fitness - | [] -> return 0L - | _ -> fail Invalid_fitness - - let get { fitness } = fitness - + | [fitness] -> + int64_of_bytes fitness + | [] -> + return 0L + | _ -> + fail Invalid_fitness + + let get {fitness} = fitness end -let begin_application - ~chain_id:_ - ~predecessor_context:context - ~predecessor_timestamp:_ - ~predecessor_fitness:_ - (raw_block : block_header) = - Fitness.to_int64 raw_block.shell.fitness >>=? fun fitness -> - return { context ; fitness } +let begin_application ~chain_id:_ ~predecessor_context:context + ~predecessor_timestamp:_ ~predecessor_fitness:_ (raw_block : block_header) + = + Fitness.to_int64 raw_block.shell.fitness + >>=? fun fitness -> return {context; fitness} -let begin_partial_application - ~chain_id - ~ancestor_context - ~predecessor_timestamp - ~predecessor_fitness - raw_block = +let begin_partial_application ~chain_id ~ancestor_context + ~predecessor_timestamp ~predecessor_fitness raw_block = begin_application ~chain_id ~predecessor_context:ancestor_context @@ -116,35 +111,37 @@ let begin_partial_application ~predecessor_fitness raw_block -let begin_construction - ~chain_id:_ - ~predecessor_context:context - ~predecessor_timestamp:_ - ~predecessor_level:_ - ~predecessor_fitness:pred_fitness - ~predecessor:_ - ~timestamp:_ +let begin_construction ~chain_id:_ ~predecessor_context:context + ~predecessor_timestamp:_ ~predecessor_level:_ + ~predecessor_fitness:pred_fitness ~predecessor:_ ~timestamp:_ ?protocol_data:_ () = - Fitness.to_int64 pred_fitness >>=? fun pred_fitness -> + Fitness.to_int64 pred_fitness + >>=? fun pred_fitness -> let fitness = Int64.succ pred_fitness in - return { context ; fitness } + return {context; fitness} -let apply_operation ctxt _ = - return (ctxt, ()) +let apply_operation ctxt _ = return (ctxt, ()) let finalize_block ctxt = let fitness = Fitness.get ctxt in let message = Some (Format.asprintf "fitness <- %Ld" fitness) in let fitness = Fitness.from_int64 fitness in - return ({ Updater.message ; context = ctxt.context ; fitness ; - max_operations_ttl = 0 ; last_allowed_fork_level = 0l ; - }, ()) + return + ( { Updater.message; + context = ctxt.context; + fitness; + max_operations_ttl = 0; + last_allowed_fork_level = 0l }, + () ) let rpc_services = RPC_directory.empty let init ctxt block_header = let fitness = block_header.Block_header.fitness in let message = None in - return { Updater.message ; context = ctxt ; fitness ; - max_operations_ttl = 0 ; last_allowed_fork_level = 0l ; - } + return + { Updater.message; + context = ctxt; + fitness; + max_operations_ttl = 0; + last_allowed_fork_level = 0l } diff --git a/src/bin_flextesa/command_accusations.ml b/src/bin_flextesa/command_accusations.ml index decfaec6919be4343d560aee6ab9ac5900d3bfd5..5e14cfb52d3d9a3e18967f3081545561217ff0b3 100644 --- a/src/bin_flextesa/command_accusations.ml +++ b/src/bin_flextesa/command_accusations.ml @@ -21,7 +21,7 @@ let little_mesh_with_bakers ?base_port ?generate_kiln_config state ; bootstrap_accounts= List.map d.bootstrap_accounts ~f:(fun (n, v) -> if List.exists bakers ~f:(fun baker -> n = fst baker) then (n, v) - else (n, 1_000L) ) } + else (n, 1_000L)) } , bakers ) in let net_size = 3 in @@ -72,14 +72,14 @@ let little_mesh_with_bakers ?base_port ?generate_kiln_config state ~sandbox_json:(Tezos_protocol.sandbox_path ~config:state protocol) ~nodes: (List.map all_nodes ~f:(fun {Tezos_node.rpc_port; _} -> - sprintf "http://localhost:%d" rpc_port )) + sprintf "http://localhost:%d" rpc_port)) ~bakers: (List.map protocol.Tezos_protocol.bootstrap_accounts ~f:(fun (account, _) -> - Tezos_protocol.Account.(name account, pubkey_hash account) )) + Tezos_protocol.Account.(name account, pubkey_hash account))) ~network_string:network_id ~node_exec ~client_exec >>= fun () -> - return EF.(wf "Kiln was configured at `%s`" kiln_config.path) ) + return EF.(wf "Kiln was configured at `%s`" kiln_config.path)) >>= fun kiln_info_opt -> let bake msg baker = Tezos_client.Keyed.bake state baker msg in List.fold @@ -90,7 +90,7 @@ let little_mesh_with_bakers ?base_port ?generate_kiln_config state >>= fun () -> bake (sprintf "first bakes: [%d/%d]" (n + 1) (starting_level - 1)) - baker_0 ) + baker_0) >>= fun () -> Test_scenario.Queries.wait_for_all_levels_to_be state ~attempts:default_attempts ~seconds:8. all_nodes (`Equal_to starting_level) @@ -114,13 +114,13 @@ let wait_for_operation_in_mempools state ~nodes:all_nodes ~kind ~client_exec >>= fun prev -> let client = Tezos_client.of_node node ~exec:client_exec in Tezos_client.mempool_has_operation state ~client ~kind - >>= fun client_result -> return (combine client_result prev) ) + >>= fun client_result -> return (combine client_result prev)) >>= function | true -> return (`Done ()) | false -> return (`Not_done - (sprintf "Waiting for %S to show up in the mempool" kind)) ) + (sprintf "Waiting for %S to show up in the mempool" kind))) let simple_double_baking ~starting_level ?generate_kiln_config ~state ~base_port node_exec client_exec () = @@ -137,7 +137,7 @@ let simple_double_baking ~starting_level ?generate_kiln_config ~state kill_nth 2 >>= fun () -> Loop.n_times number_of_lonely_bakes (fun _ -> - Tezos_client.Keyed.bake state baker_0 "Bake-on-0" ) + Tezos_client.Keyed.bake state baker_0 "Bake-on-0") >>= fun () -> Tezos_client.get_block_header state ~client:client_0 `Head >>= fun baking_0_header -> @@ -153,7 +153,7 @@ let simple_double_baking ~starting_level ?generate_kiln_config ~state restart_nth 2 >>= fun () -> Loop.n_times number_of_lonely_bakes (fun _ -> - Tezos_client.Keyed.bake state baker_1 "Bake-on-1" ) + Tezos_client.Keyed.bake state baker_1 "Bake-on-1") >>= fun () -> Tezos_client.get_block_header state ~client:client_1 `Head >>= fun baking_1_header -> @@ -221,7 +221,7 @@ let simple_double_baking ~starting_level ?generate_kiln_config ~state return (`Not_done (sprintf "Waiting for accusation to show up in block %d" - last_level)) ) + last_level))) >>= fun () -> say state EF.(af "Test done.") let find_endorsement_in_mempool state ~client = @@ -230,10 +230,10 @@ let find_endorsement_in_mempool state ~client = Jqo.field o ~k:"contents" |> Jqo.list_exists ~f:(fun op -> (* Dbg.e EF.(ef_json "op" op) ; *) - Jqo.field op ~k:"kind" = `String "endorsement" ) ) + Jqo.field op ~k:"kind" = `String "endorsement")) >>= function | None -> return (`Not_done (sprintf "No endorsement so far")) - | Some e -> return (`Done e) ) + | Some e -> return (`Done e)) let simple_double_endorsement ~starting_level ?generate_kiln_config ~state ~base_port node_exec client_exec () = @@ -358,7 +358,7 @@ let simple_double_endorsement ~starting_level ?generate_kiln_config ~state return (`Not_done (sprintf "Waiting for accusation to show up in block %d" - last_level)) ) + last_level))) >>= fun () -> say state EF.(af "Test done.") let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = @@ -373,7 +373,7 @@ let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = time_between_blocks= [block_interval; block_interval * 2] ; bootstrap_accounts= List.map d.bootstrap_accounts ~f:(fun (n, v) -> - if n = fst baker then (n, v) else (n, 1_000L) ) } + if n = fst baker then (n, v) else (n, 1_000L)) } , baker ) in let topology = @@ -428,7 +428,7 @@ let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = pm >>= fun () -> Tezos_client.Keyed.bake state baker_0 - (sprintf "first bakes: [%d/%d]" (n + 1) (starting_level - 1)) ) + (sprintf "first bakes: [%d/%d]" (n + 1) (starting_level - 1))) >>= fun () -> Test_scenario.Queries.wait_for_all_levels_to_be state ~attempts:default_attempts ~seconds:8. all_nodes (`Equal_to starting_level) @@ -455,12 +455,12 @@ let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = (ocaml_string_list res#out)) in List_sequential.iter intermediary_nodes ~f:(fun x -> - Helpers.kill_node state x ) + Helpers.kill_node state x) >>= fun () -> let kill_all_but nodes iths = List_sequential.iteri nodes ~f:(fun ith n -> if List.mem iths ith ~equal:Int.equal then return () - else Helpers.kill_node state n ) + else Helpers.kill_node state n) in let kill_nth_node nodes nth = Helpers.kill_node state @@ -485,7 +485,7 @@ let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = transfer "node0 only alive" client_0 >>= fun () -> Loop.n_times number_of_lonely_bakes (fun n -> - Tezos_client.Keyed.bake state baker_0 (sprintf "n0 only alive: %d" n) ) + Tezos_client.Keyed.bake state baker_0 (sprintf "n0 only alive: %d" n)) >>= fun () -> get_block_header ~client:client_0 `Head >>= fun baking_0_header -> @@ -500,7 +500,7 @@ let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = transfer "node1 only one alive" client_1 >>= fun () -> Loop.n_times number_of_lonely_bakes (fun _ -> - Tezos_client.Keyed.bake state baker_1 "after transfer" ) + Tezos_client.Keyed.bake state baker_1 "after transfer") >>= fun () -> get_block_header ~client:client_1 `Head >>= fun baking_1_header -> @@ -512,7 +512,7 @@ let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = ; af "Node 1 transfered"; af "Node 1 baked"; af "Node 1 was killed" ] >>= fun () -> List.fold ~init:(return ()) intermediary_nodes ~f:(fun prev x -> - prev >>= fun () -> Helpers.restart_node state ~client_exec x ) + prev >>= fun () -> Helpers.restart_node state ~client_exec x) >>= fun () -> let node_0 = List.nth_exn mesh_nodes 0 in let except_0 l = List.filter l ~f:Tezos_node.(fun n -> n.id <> node_0.id) in @@ -536,13 +536,13 @@ let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = let client = Tezos_client.of_node node ~exec:client_exec in Tezos_client.mempool_has_operation state ~client ~kind:"double_baking_evidence" - >>= fun client_result -> return (client_result || prev) ) + >>= fun client_result -> return (client_result || prev)) >>= function | true -> return (`Done ()) | false -> return (`Not_done - (sprintf "Waiting for accusation to show up in the mempool")) ) + (sprintf "Waiting for accusation to show up in the mempool"))) >>= fun () -> Tezos_client.Keyed.bake state baker_2 (sprintf "all at lvl %d" (starting_level + number_of_lonely_bakes + 1)) @@ -556,8 +556,7 @@ let with_accusers ~state ~base_port node_exec accuser_exec client_exec () = | false -> return (`Not_done - (sprintf "Waiting for accusation to show up in block %d" level)) - ) + (sprintf "Waiting for accusation to show up in block %d" level))) >>= fun () -> pause EF. @@ -596,14 +595,14 @@ let cmd ~pp_error () = Test_command_line.Run_command.make ~pp_error ( pure (fun test - base_port - (`Starting_level starting_level) - bnod - bcli - accex - generate_kiln_config - state - -> + base_port + (`Starting_level starting_level) + bnod + bcli + accex + generate_kiln_config + state + -> let checks () = let acc = if test = `With_accusers then [accex] else [] in Helpers.System_dependencies.precheck state `Or_fail @@ -626,8 +625,7 @@ let cmd ~pp_error () = simple_double_endorsement ~state bnod bcli ~base_port ?generate_kiln_config ~starting_level () in - (state, Interactive_test.Pauser.run_test ~pp_error state actual_test) - ) + (state, Interactive_test.Pauser.run_test ~pp_error state actual_test)) $ Arg.( required (pos 0 @@ -659,6 +657,6 @@ let cmd ~pp_error () = (List.length tests) ; `Blocks (List.map tests ~f:(fun (_, n, tit, m) -> - `Blocks [pf "* $(b,`%s`): $(i,%s)." n tit; `Noblank; m] )) ] + `Blocks [pf "* $(b,`%s`): $(i,%s)." n tit; `Noblank; m])) ] in info ~man ~doc "accusations") diff --git a/src/bin_flextesa/command_daemons_protocol_change.ml b/src/bin_flextesa/command_daemons_protocol_change.ml index eb087a55a0bbe62908746aba180152d8a0e7063c..20273ac3d7b6b44d06a7f2881077c179e433316a 100644 --- a/src/bin_flextesa/command_daemons_protocol_change.ml +++ b/src/bin_flextesa/command_daemons_protocol_change.ml @@ -28,7 +28,7 @@ let wait_for_voting_period ?level_withing_period state ~client ~attempts period return (voting_period_position <= lvl) with e -> failf "Cannot get level.voting_period_position: %s" - (Printexc.to_string e) ) + (Printexc.to_string e)) >>= fun lvl_ok -> Tezos_client.rpc state ~client `Get ~path:"/chains/main/blocks/head/votes/current_period_kind" @@ -44,7 +44,7 @@ let wait_for_voting_period ?level_withing_period state ~client ~attempts period EF.( desc_list (wf "Voting period:") [markdown_verbatim (String.concat ~sep:"\n" res#out)]) - >>= fun () -> return (`Not_done message) ) + >>= fun () -> return (`Not_done message)) let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports ?generate_kiln_config ~node_exec ~client_exec ~first_baker_exec @@ -73,11 +73,11 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports [ Tezos_daemon.accuser_of_node ~exec:first_accuser_exec ~client node ~name_tag:"first" ; Tezos_daemon.accuser_of_node ~exec:second_accuser_exec ~client node - ~name_tag:"second" ] ) + ~name_tag:"second" ]) in List_sequential.iter accusers ~f:(fun acc -> Running_processes.start state (Tezos_daemon.process acc ~state) - >>= fun {process; lwt} -> return () ) + >>= fun {process; lwt} -> return ()) >>= fun () -> let keys_and_daemons = let pick_a_node_and_client idx = @@ -101,7 +101,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports ; Tezos_daemon.endorser_of_node ~exec:first_endorser_exec ~name_tag:"first" ~client node ~key ; Tezos_daemon.endorser_of_node ~exec:second_endorser_exec - ~name_tag:"second" ~client node ~key ] ) ) + ~name_tag:"second" ~client node ~key ] )) in List_sequential.iter keys_and_daemons ~f:(fun (acc, client, daemons) -> Tezos_client.bootstrapped ~state client @@ -126,7 +126,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports >>= fun () -> List_sequential.iter daemons ~f:(fun daemon -> Running_processes.start state (Tezos_daemon.process daemon ~state) - >>= fun {process; lwt} -> return () ) ) + >>= fun {process; lwt} -> return ())) >>= fun () -> let client_0 = Tezos_client.of_node (List.nth_exn nodes 0) ~exec:client_exec @@ -187,7 +187,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports else failf "Injecting protocol %s failed (≠ %s)" new_protocol_hash hash ) - >>= fun () -> return (Some hash) ) + >>= fun () -> return (Some hash)) >>= fun prot_opt -> ( match prot_opt with | Some s -> return s @@ -199,11 +199,11 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports ~sandbox_json:(Tezos_protocol.sandbox_path ~config:state protocol) ~nodes: (List.map nodes ~f:(fun {Tezos_node.rpc_port; _} -> - sprintf "http://localhost:%d" rpc_port )) + sprintf "http://localhost:%d" rpc_port)) ~bakers: (List.map protocol.Tezos_protocol.bootstrap_accounts ~f:(fun (account, _) -> - Tezos_protocol.Account.(name account, pubkey_hash account) )) + Tezos_protocol.Account.(name account, pubkey_hash account))) ~network_string:network_id ~node_exec ~client_exec ~protocol_execs: [ ( protocol.Tezos_protocol.hash @@ -211,7 +211,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports , first_endorser_exec ) ; (new_protocol_hash, second_baker_exec, second_endorser_exec) ] >>= fun () -> - return EF.(wf "Kiln was configured at `%s`" kiln_config.path) ) + return EF.(wf "Kiln was configured at `%s`" kiln_config.path)) >>= fun kiln_info_opt -> Interactive_test.Pauser.generic state EF. @@ -234,7 +234,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports fun ppf () -> pf ppf "%s voted for %s" (Tezos_protocol.Account.name acc) - new_protocol_hash) ) + new_protocol_hash)) >>= fun () -> wait_for_voting_period state ~client:client_0 ~attempts:50 Testing_vote >>= fun _ -> @@ -249,7 +249,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports fun ppf () -> pf ppf "%s voted Yea to test %s" (Tezos_protocol.Account.name acc) - new_protocol_hash) ) + new_protocol_hash)) >>= fun () -> wait_for_voting_period state ~client:client_0 ~attempts:50 Promotion_vote >>= fun _ -> @@ -264,7 +264,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports fun ppf () -> pf ppf "%s voted Yea to promote %s" (Tezos_protocol.Account.name acc) - new_protocol_hash) ) + new_protocol_hash)) >>= fun () -> wait_for_voting_period state ~client:client_0 ~attempts:50 Proposal >>= fun _ -> @@ -284,7 +284,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports return (`Not_done (sprintf "Protocol not done: %s Vs %s" proto_hash new_protocol_hash)) - else return (`Done ()) ) + else return (`Done ())) >>= fun () -> Interactive_test.Pauser.generic state EF. @@ -299,23 +299,23 @@ let cmd ~pp_error () = Test_command_line.Run_command.make ~pp_error ( pure (fun size - base_port - (`External_peers external_peer_ports) - (`No_daemons_for no_daemons_for) - protocol - node_exec - client_exec - admin_exec - first_baker_exec - first_endorser_exec - first_accuser_exec - second_baker_exec - second_endorser_exec - second_accuser_exec - (`Protocol_path new_protocol_path) - generate_kiln_config - state - -> + base_port + (`External_peers external_peer_ports) + (`No_daemons_for no_daemons_for) + protocol + node_exec + client_exec + admin_exec + first_baker_exec + first_endorser_exec + first_accuser_exec + second_baker_exec + second_endorser_exec + second_accuser_exec + (`Protocol_path new_protocol_path) + generate_kiln_config + state + -> let actual_test = run state ~size ~base_port ~protocol ~node_exec ~client_exec ~first_baker_exec ~first_endorser_exec ~first_accuser_exec @@ -323,8 +323,7 @@ let cmd ~pp_error () = ~admin_exec ?generate_kiln_config ~external_peer_ports ~no_daemons_for ~new_protocol_path in - (state, Interactive_test.Pauser.run_test ~pp_error state actual_test) - ) + (state, Interactive_test.Pauser.run_test ~pp_error state actual_test)) $ Arg.( value & opt int 5 & info ["size"; "S"] ~doc:"Set the size of the network.") diff --git a/src/bin_flextesa/command_ledger_baking.ml b/src/bin_flextesa/command_ledger_baking.ml index 1858cf797ba386c943f0517629cfa7c0da1da005..6c21e66f41d76cd760dac3a3539e8175a7325ecd 100644 --- a/src/bin_flextesa/command_ledger_baking.ml +++ b/src/bin_flextesa/command_ledger_baking.ml @@ -77,7 +77,7 @@ let get_chain_id state ~client = Tezos_client.rpc state ~client `Get ~path:"/chains/main/chain_id" >>= (function | `String x -> return x - | _ -> failf "Failed to parse chain_id JSON from node" ) + | _ -> failf "Failed to parse chain_id JSON from node") >>= fun chain_id_string -> return (Tezos_crypto.Chain_id.of_b58check_exn chain_id_string) @@ -182,11 +182,12 @@ let setup_baking_ledger state uri ~client ~protocol = acknowledgment to provide the public key of %s" uri client.Tezos_client.id (Tezos_protocol.Account.pubkey_hash account)) - (fun () -> Tezos_client.Keyed.initialize state baker >>= fun _ -> return ()) + (fun () -> + Tezos_client.Keyed.initialize state baker >>= fun _ -> return ()) >>= assert_failure state "baking before setup should fail" (fun () -> - Tezos_client.Keyed.bake state baker "Baked by ledger" ) + Tezos_client.Keyed.bake state baker "Baked by ledger") >>= assert_failure state "endorsing before setup should fail" (fun () -> - Tezos_client.Keyed.endorse state baker "Endorsed by ledger" ) + Tezos_client.Keyed.endorse state baker "Endorsed by ledger") >>= fun () -> let test_invalid_delegations () = let ledger_pkh = Tezos_protocol.Account.pubkey_hash account in @@ -206,7 +207,7 @@ let setup_baking_ledger state uri ~client ~protocol = (sprintf "signing a delegation from %s (%s to %s) should fail" msg src dest) (sign state ~client:baker ~bytes:forged_delegation_bytes) - () ) + ()) in test_invalid_delegations () >>= fun () -> @@ -223,13 +224,13 @@ let setup_baking_ledger state uri ~client ~protocol = (fun () -> Tezos_client.successful_client_cmd state ~client [ "setup"; "ledger"; "to"; "bake"; "for"; key_name; "--main-hwm"; "0" - ; "--test-hwm"; "0" ] ) + ; "--test-hwm"; "0" ]) >>= assert_failure state "signing a 'Withdraw delegate' operation in Baking App should fail" (fun () -> Tezos_client.successful_client_cmd state ~client [ "--wait"; "none"; "withdraw"; "delegate"; "from" - ; Tezos_protocol.Account.pubkey_hash account ] ) + ; Tezos_protocol.Account.pubkey_hash account ]) >>= assert_baking_key (Some uri) >>= test_invalid_delegations >>= fun () -> return (baker, account) @@ -278,7 +279,7 @@ let run state ~node_exec ~client_exec ~admin_exec ~size ~base_port ~uri () = EF.(wf "Setting HWM to %d" level) `Succeeds ~f:(fun () -> - Tezos_client.Ledger.set_hwm state ~client:(client 0) ~uri ~level ) + Tezos_client.Ledger.set_hwm state ~client:(client 0) ~uri ~level) in get_chain_id state ~client:(client 0) >>= fun chain_id -> @@ -316,7 +317,7 @@ let run state ~node_exec ~client_exec ~admin_exec ~size ~base_port ~uri () = "originating an account from the Tezos Baking app should fail" (fun () -> originate_account_from state ~client:(client 0) ~account:ledger_account - >>= fun _ -> return () ) + >>= fun _ -> return ()) () >>= fun () -> let fee = 0.00126 in @@ -333,7 +334,7 @@ let run state ~node_exec ~client_exec ~admin_exec ~size ~base_port ~uri () = with_ledger_test_reject_and_succeed state EF.(wf "Setting HWM to %d" level) (fun () -> - Tezos_client.Ledger.set_hwm state ~client:(client 0) ~uri ~level )) + Tezos_client.Ledger.set_hwm state ~client:(client 0) ~uri ~level)) >>= assert_hwms_ ~main:1 ~test:1 >>= bake >>= assert_hwms_ ~main:3 ~test:1 @@ -386,17 +387,17 @@ let cmd ~pp_error () = Test_command_line.Run_command.make ~pp_error ( pure (fun uri - node_exec - client_exec - admin_exec - size - (`Base_port base_port) - state - -> + node_exec + client_exec + admin_exec + size + (`Base_port base_port) + state + -> ( state , Interactive_test.Pauser.run_test ~pp_error state (run state ~node_exec ~size ~admin_exec ~base_port ~client_exec - ~uri) ) ) + ~uri) )) $ Arg.( required (pos 0 (some string) None diff --git a/src/bin_flextesa/command_ledger_wallet.ml b/src/bin_flextesa/command_ledger_wallet.ml index 8ce228eb5f42c95dfcfa3118b14a9dcb4d8ad4d3..84540f1b8efe7a551845e0b8393ec317ce157b13 100644 --- a/src/bin_flextesa/command_ledger_wallet.ml +++ b/src/bin_flextesa/command_ledger_wallet.ml @@ -37,7 +37,7 @@ let find_and_print_signature_hash state stream = Console.say state EF.(wf "Hash should be: %s" x) >>= fun () -> return true else return true ) - >>= fun showed_message -> return (all_output, showed_message) ) + >>= fun showed_message -> return (all_output, showed_message)) >>= fun (output, _) -> return output let ledger_prompt_notice state ~ef ?(button = `Checkmark) () = @@ -111,7 +111,7 @@ let get_chain_id state ~client = Tezos_client.rpc state ~client `Get ~path:"/chains/main/chain_id" >>= (function | `String x -> return x - | _ -> failf "Failed to parse chain_id JSON from node" ) + | _ -> failf "Failed to parse chain_id JSON from node") >>= fun chain_id_string -> return (Tezos_crypto.Chain_id.of_b58check_exn chain_id_string) @@ -141,7 +141,7 @@ let forge_batch_transactions state ~client ~src ~dest ~n ?(fee = 0.00126) () = ) ; ("counter", `String (string_of_int i)) ; ("gas_limit", `String (string_of_int 127)) - ; ("storage_limit", `String (string_of_int 277)) ] )) ) ] + ; ("storage_limit", `String (string_of_int 277)) ])) ) ] in Tezos_client.rpc state ~client ~path:"/chains/main/blocks/head/helpers/forge/operations" @@ -199,7 +199,7 @@ let run state ~node_exec ~client_exec ~admin_exec ~size ~base_port ~uri () = uri (client 0).Tezos_client.id (Tezos_protocol.Account.pubkey_hash ledger_account)) (fun () -> - Tezos_client.Keyed.initialize state signer >>= fun _ -> return () ) + Tezos_client.Keyed.initialize state signer >>= fun _ -> return ()) >>= fun _ -> let submit_proposals () = client_async_cmd state ~client:(client 0) @@ -261,17 +261,17 @@ let cmd ~pp_error () = Test_command_line.Run_command.make ~pp_error ( pure (fun uri - node_exec - client_exec - admin_exec - size - (`Base_port base_port) - state - -> + node_exec + client_exec + admin_exec + size + (`Base_port base_port) + state + -> ( state , Interactive_test.Pauser.run_test ~pp_error state (run state ~node_exec ~size ~admin_exec ~base_port ~client_exec - ~uri) ) ) + ~uri) )) $ Arg.( required (pos 0 (some string) None diff --git a/src/bin_flextesa/command_mini_network.ml b/src/bin_flextesa/command_mini_network.ml index 37200658b2e65181262e6de7b565325379104731..eb116bcc84fbb4b92fbc84271c3ae59f36220a10 100644 --- a/src/bin_flextesa/command_mini_network.ml +++ b/src/bin_flextesa/command_mini_network.ml @@ -25,23 +25,23 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports ~sandbox_json:(Tezos_protocol.sandbox_path ~config:state protocol) ~nodes: (List.map nodes ~f:(fun {Tezos_node.rpc_port; _} -> - sprintf "http://localhost:%d" rpc_port )) + sprintf "http://localhost:%d" rpc_port)) ~bakers: (List.map protocol.Tezos_protocol.bootstrap_accounts ~f:(fun (account, _) -> - Tezos_protocol.Account.(name account, pubkey_hash account) )) + Tezos_protocol.Account.(name account, pubkey_hash account))) ~network_string:network_id ~node_exec ~client_exec ~protocol_execs: - [(protocol.Tezos_protocol.hash, baker_exec, endorser_exec)] ) + [(protocol.Tezos_protocol.hash, baker_exec, endorser_exec)]) >>= fun (_ : unit option) -> let accusers = List.map nodes ~f:(fun node -> let client = Tezos_client.of_node node ~exec:client_exec in - Tezos_daemon.accuser_of_node ~exec:accuser_exec ~client node ) + Tezos_daemon.accuser_of_node ~exec:accuser_exec ~client node) in List_sequential.iter accusers ~f:(fun acc -> Running_processes.start state (Tezos_daemon.process acc ~state) - >>= fun {process; lwt} -> return () ) + >>= fun {process; lwt} -> return ()) >>= fun () -> let keys_and_daemons = let pick_a_node_and_client idx = @@ -60,7 +60,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports , client , [ Tezos_daemon.baker_of_node ~exec:baker_exec ~client node ~key ; Tezos_daemon.endorser_of_node ~exec:endorser_exec ~client - node ~key ] ) ) + node ~key ] )) in List_sequential.iter keys_and_daemons ~f:(fun (acc, client, daemons) -> Tezos_client.bootstrapped ~state client @@ -85,7 +85,7 @@ let run state ~protocol ~size ~base_port ~no_daemons_for ?external_peer_ports >>= fun () -> List_sequential.iter daemons ~f:(fun daemon -> Running_processes.start state (Tezos_daemon.process daemon ~state) - >>= fun {process; lwt} -> return () ) ) + >>= fun {process; lwt} -> return ())) >>= fun () -> Prompt.( command state @@ -104,24 +104,23 @@ let cmd ~pp_error () = Test_command_line.Run_command.make ~pp_error ( pure (fun size - base_port - (`External_peers external_peer_ports) - (`No_daemons_for no_daemons_for) - protocol - bnod - bcli - bak - endo - accu - generate_kiln_config - state - -> + base_port + (`External_peers external_peer_ports) + (`No_daemons_for no_daemons_for) + protocol + bnod + bcli + bak + endo + accu + generate_kiln_config + state + -> let actual_test = run state ~size ~base_port ~protocol bnod bcli bak endo accu ?generate_kiln_config ~external_peer_ports ~no_daemons_for in - (state, Interactive_test.Pauser.run_test ~pp_error state actual_test) - ) + (state, Interactive_test.Pauser.run_test ~pp_error state actual_test)) $ Arg.( value & opt int 5 & info ["size"; "S"] ~doc:"Set the size of the network.") diff --git a/src/bin_flextesa/command_voting.ml b/src/bin_flextesa/command_voting.ml index 57adbec7dd3ff8604d22608485eda7b6025a6e39..b9cc0c96bf7e200553259cdeed809f1ed8a1cdd0 100644 --- a/src/bin_flextesa/command_voting.ml +++ b/src/bin_flextesa/command_voting.ml @@ -50,7 +50,8 @@ let transfer state ~client ~src ~dst ~amount = let register state ~client ~dst = Tezos_client.successful_client_cmd state ~client - [ "--wait"; "none"; "register"; "key" ; dst ; "as" ; "delegate" ; "--fee"; "0.05" ] + [ "--wait"; "none"; "register"; "key"; dst; "as"; "delegate"; "--fee" + ; "0.05" ] let bake_until_voting_period ?keep_alive_delegate state ~baker ~attempts period = @@ -63,15 +64,14 @@ let bake_until_voting_period ?keep_alive_delegate state ~baker ~attempts period | `String p when p = period_name -> return (`Done (nth - 1)) | other -> Asynchronous_result.map_option keep_alive_delegate ~f:(fun dst -> - register state ~client ~dst - >>= fun res -> return () ) + register state ~client ~dst >>= fun res -> return ()) >>= fun _ -> ksprintf (Tezos_client.Keyed.bake state baker) "Baker %s bakes %d/%d waiting for %S voting period" client.id nth attempts period_name >>= fun () -> - return (`Not_done (sprintf "Waiting for %S period" period_name)) ) + return (`Not_done (sprintf "Waiting for %S period" period_name))) let check_understood_protocols state ~chain ~client ~protocol_hash ~expect_clueless_client = @@ -82,14 +82,14 @@ let check_understood_protocols state ~chain ~client ~protocol_hash | Ok client_protocols_result -> ( match List.find client_protocols_result#out ~f:(fun prefix -> - String.is_prefix protocol_hash ~prefix ) + String.is_prefix protocol_hash ~prefix) with | Some p -> return `Proper_understanding | None when expect_clueless_client -> return `Expected_misunderstanding | None -> return `Failure_to_understand ) | Error (`Client_command_error _) when expect_clueless_client -> return `Expected_misunderstanding - | Error e -> fail e ) + | Error e -> fail e) let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec ~clueless_winner ~admin_exec ~winner_client_exec ~size ~base_port @@ -114,7 +114,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec ; time_between_blocks= [1; 0] ; bootstrap_accounts= List.map d.bootstrap_accounts ~f:(fun (n, v) -> - if fst baker = n then (n, v) else (n, 1_000L) ) } + if fst baker = n then (n, v) else (n, 1_000L)) } , fst baker , snd baker ) in @@ -145,7 +145,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec let level_counter = Counter_log.create () in let first_bakes = 5 in Loop.n_times first_bakes (fun nth -> - ksprintf (Tezos_client.Keyed.bake state baker_0) "initial-bake %d" nth ) + ksprintf (Tezos_client.Keyed.bake state baker_0) "initial-bake %d" nth) >>= fun () -> let initial_level = first_bakes + 1 in Counter_log.add level_counter "initial_level" initial_level ; @@ -198,7 +198,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec Loop.n_times after_transfer_bakes (fun nth -> ksprintf (Tezos_client.Keyed.bake state baker_0) - "after-transfer-bake %d" nth ) + "after-transfer-bake %d" nth) >>= fun () -> Counter_log.add level_counter "after-transfer-bakes" after_transfer_bakes ; Test_scenario.Queries.wait_for_all_levels_to_be state @@ -206,7 +206,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec (`At_least (Counter_log.sum level_counter)) >>= fun () -> Asynchronous_result.map_option with_ledger ~f:(fun _ -> - ledger_prompt_notice state EF.(wf "Registering as delegate.") ) + ledger_prompt_notice state EF.(wf "Registering as delegate.")) >>= fun (_ : unit option) -> Tezos_client.successful_client_cmd state ~client:(client 0) [ "--wait"; "none"; "register"; "key" @@ -228,7 +228,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec Console.say state EF.( desc (haf "Baking rights") - (markdown_verbatim (String.concat ~sep:"\n" res#out))) ) + (markdown_verbatim (String.concat ~sep:"\n" res#out)))) >>= fun () -> Counter_log.add level_counter "activation-bakes" activation_bakes ; Tezos_client.Keyed.bake state special_baker "Baked by Special Baker™" @@ -295,7 +295,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec match p with | _ when p = winner_hash -> "injected winner" | _ when p = demo_hash -> "injected demo" - | _ -> "injected unknown" ) )) ] + | _ -> "injected unknown" ))) ] >>= fun () -> Asynchronous_result.map_option with_ledger ~f:(fun _ -> Interactive_test.Pauser.generic state @@ -303,7 +303,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec [ af "About to VOTE" ; haf "Please switch to the Wallet app and quit (`q`) this prompt." ] - ~force:true ) + ~force:true) >>= fun (_ : unit option) -> let submit_proposals baker props = Asynchronous_result.map_option with_ledger ~f:(fun _ -> @@ -311,7 +311,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec EF.( wf "Submitting proposal%s: %s" (if List.length props = 1 then "" else "s") - (String.concat ~sep:", " props)) ) + (String.concat ~sep:", " props))) >>= fun _ -> Tezos_client.successful_client_cmd state ~client:baker.Tezos_client.Keyed.client @@ -323,7 +323,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec | false -> submit_proposals special_baker to_submit_first | true -> List_sequential.iter to_submit_first ~f:(fun one -> - submit_proposals special_baker [one] ) ) + submit_proposals special_baker [one]) ) >>= fun () -> Tezos_client.successful_client_cmd state ~client:baker_0.client ["submit"; "proposals"; "for"; baker_0.key_name; winner_hash] @@ -348,14 +348,14 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec (sprintf "Waiting for current_proposal_json to be %s (%s)" winner_hash Ezjsonm.(to_string (wrap current_proposal_json)))) - else return (`Done ()) ) + else return (`Done ())) >>= fun () -> Tezos_client.successful_client_cmd state ~client:baker_0.client ["submit"; "ballot"; "for"; baker_0.key_name; winner_hash; "yay"] >>= fun _ -> Asynchronous_result.map_option with_ledger ~f:(fun _ -> ledger_prompt_notice state - EF.(wf "Submitting “Yes” ballot for %S" winner_hash) ) + EF.(wf "Submitting “Yes” ballot for %S" winner_hash)) >>= fun (_ : unit option) -> Tezos_client.successful_client_cmd state ~client:special_baker.client ["submit"; "ballot"; "for"; special_baker.key_name; winner_hash; "yay"] @@ -385,7 +385,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec ; haf "Please switch back to the Baking app and quit (`q`) \ this prompt." ] - ~force:true ) + ~force:true) >>= fun (_ : unit option) -> let testing_bakes = 5 in Loop.n_times testing_bakes (fun ith -> @@ -395,7 +395,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec in Tezos_client.Keyed.bake ~chain state baker (sprintf "Baking on the test chain [%d/%d]" (ith + 1) - testing_bakes) ) + testing_bakes)) >>= fun () -> Test_scenario.Queries.wait_for_all_levels_to_be state ~chain ~attempts:default_attempts ~seconds:8. nodes @@ -408,7 +408,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec Console.say state EF.(wf "Winner-Client cannot bake on test chain (expected)") | `Failure_to_understand -> - failf "Winner-Client cannot bake on test chain!" ) + failf "Winner-Client cannot bake on test chain!") >>= fun () -> Helpers.wait_for state ~attempts:default_attempts ~seconds:0.3 (fun nth -> Tezos_client.rpc state ~client:(client 1) `Get @@ -429,7 +429,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec (`Not_done (sprintf "Cannot get test-chain protocol: %s → %s" (Exn.to_string e) - Ezjsonm.(to_string (wrap metadata_json)))) ) + Ezjsonm.(to_string (wrap metadata_json))))) >>= fun () -> bake_until_voting_period state ~baker:baker_0 ~attempts:(1 + protocol.blocks_per_voting_period) @@ -456,7 +456,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec ~force:true >>= fun () -> ledger_prompt_notice state - EF.(wf "Submitting “Yes” ballot for %S" winner_hash) ) + EF.(wf "Submitting “Yes” ballot for %S" winner_hash)) >>= fun (_ : unit option) -> Tezos_client.successful_client_cmd state ~client:special_baker.client ["submit"; "ballot"; "for"; special_baker.key_name; winner_hash; "yay"] @@ -466,8 +466,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec >>= fun () -> let ballot_bakes = 1 in Loop.n_times ballot_bakes (fun _ -> - Tezos_client.Keyed.bake state baker_0 "Baking the promotion vote ballots" - ) + Tezos_client.Keyed.bake state baker_0 "Baking the promotion vote ballots") >>= fun () -> Counter_log.add level_counter "bake-the-ballots" ballot_bakes ; Tezos_client.successful_client_cmd state ~client:(client 0) @@ -506,7 +505,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec return (`Not_done (sprintf "Waiting for next_protocol: %S (≠ %s)" winner_hash - Ezjsonm.(to_string (wrap other)))) ) + Ezjsonm.(to_string (wrap other))))) >>= fun extra_bakes_waiting_for_next_protocol -> Counter_log.add level_counter "wait-for-next-protocol" extra_bakes_waiting_for_next_protocol ; @@ -534,7 +533,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec desc (shout "Warning") (wf "Command `upgrade baking state` failed, but we \ - keep going with the baking.")) ) + keep going with the baking."))) >>= fun () -> Asynchronous_result.map_option with_ledger ~f:(fun _ -> Interactive_test.Pauser.generic state @@ -547,7 +546,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec >>= fun () -> Console.say state EF.(wf "Sleeping for a couple of seconds…") >>= fun () -> System.sleep 4. - (* USB thing is often slower than humans hitting `q` *) ) + (* USB thing is often slower than humans hitting `q` *)) >>= fun (_ : unit option) -> Tezos_client.Keyed.bake state winner_baker_0 "First bake on new protocol !!" @@ -565,7 +564,7 @@ let run state ~winner_path ~demo_path ~current_hash ~node_exec ~client_exec | `String p when p = winner_hash -> return () | other -> failf "Protocol is not `%s` but `%s`" winner_hash - Ezjsonm.(to_string (wrap other)) ) ) + Ezjsonm.(to_string (wrap other)) )) >>= fun () -> Interactive_test.Pauser.generic state EF. @@ -581,24 +580,24 @@ let cmd ~pp_error () = Test_command_line.Run_command.make ~pp_error ( pure (fun winner_path - demo_path - node_exec - client_exec - admin_exec - winner_client_exec - size - (`Clueless_winner clueless_winner) - (`Hash current_hash) - (`Base_port base_port) - (`With_ledger with_ledger) - (`Serialize_proposals serialize_proposals) - state - -> + demo_path + node_exec + client_exec + admin_exec + winner_client_exec + size + (`Clueless_winner clueless_winner) + (`Hash current_hash) + (`Base_port base_port) + (`With_ledger with_ledger) + (`Serialize_proposals serialize_proposals) + state + -> ( state , Interactive_test.Pauser.run_test state ~pp_error (run state ~serialize_proposals ~current_hash ~winner_path ~clueless_winner ~demo_path ~node_exec ~size ~admin_exec - ~base_port ~client_exec ~winner_client_exec ?with_ledger) ) ) + ~base_port ~client_exec ~winner_client_exec ?with_ledger) )) $ Arg.( pure Filename.dirname $ required diff --git a/src/bin_flextesa/main.ml b/src/bin_flextesa/main.ml index cdacab276ea2dab96c1cc65c5087a3132e6d11f6..f9dff8c013fdc22bb10a88bc95010197bb7ebdef 100644 --- a/src/bin_flextesa/main.ml +++ b/src/bin_flextesa/main.ml @@ -9,7 +9,7 @@ module Small_utilities = struct let open Tezos_protocol.Account in let account = of_name n in Printf.printf "%s,%s,%s,%s\n%!" (name account) (pubkey account) - (pubkey_hash account) (private_key account) ) + (pubkey_hash account) (private_key account)) $ Arg.( required (pos 0 (some string) None @@ -47,7 +47,7 @@ module Small_utilities = struct (list ~sep:(fun ppf () -> string ppf "," ; sp ppf ()) (fun ppf p -> fmt "%d" ppf p)) - ppf to_display )) ) ) + ppf to_display)) )) $ Test_command_line.cli_state ~disable_interactivity:true ~name:"netstat-ports" () ) (info "netstat-listening-ports" diff --git a/src/bin_node/.ocamlformat b/src/bin_node/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/bin_node/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/bin_node/genesis_chain.ml b/src/bin_node/genesis_chain.ml index ed8e2a3b29a838cf117a39fb52957d05f541b836..913a3102f7e5d2a764a4517ec404ba7e7dc540b5 100644 --- a/src/bin_node/genesis_chain.ml +++ b/src/bin_node/genesis_chain.ml @@ -24,13 +24,11 @@ (* *) (*****************************************************************************) -let genesis : State.Chain.genesis = { - time = - Time.Protocol.of_notation_exn "2018-06-30T16:07:32Z" ; - block = - Block_hash.of_b58check_exn - "BLockGenesisGenesisGenesisGenesisGenesisf79b5d1CoW2" ; - protocol = - Protocol_hash.of_b58check_exn - "ProtoGenesisGenesisGenesisGenesisGenesisGenesk612im" ; -} +let genesis : State.Chain.genesis = + { time = Time.Protocol.of_notation_exn "2018-06-30T16:07:32Z"; + block = + Block_hash.of_b58check_exn + "BLockGenesisGenesisGenesisGenesisGenesisf79b5d1CoW2"; + protocol = + Protocol_hash.of_b58check_exn + "ProtoGenesisGenesisGenesisGenesisGenesisGenesk612im" } diff --git a/src/bin_node/main.ml b/src/bin_node/main.ml index 19dd1dc870a41dc04891ef0d77e5472e37c18b9b..a2450865678a362a94ea29364c6f41e667d05d5e 100644 --- a/src/bin_node/main.ml +++ b/src/bin_node/main.ml @@ -24,54 +24,50 @@ (*****************************************************************************) let () = - if Filename.basename Sys.argv.(0) = Updater.compiler_name then begin + if Filename.basename Sys.argv.(0) = Updater.compiler_name then ( try Tezos_protocol_compiler.Compiler.main Tezos_protocol_compiler_native.Native.driver ; Pervasives.exit 0 with exn -> - Format.eprintf "%a\n%!" Opterrors.report_error exn; - Pervasives.exit 1 - end + Format.eprintf "%a\n%!" Opterrors.report_error exn ; + Pervasives.exit 1 ) let term = let open Cmdliner.Term in ret (const (`Help (`Pager, None))) -let description = [ - `S "DESCRIPTION" ; - `P "Entry point for initializing, configuring and running a Tezos node." ; - `P Node_identity_command.Manpage.command_description ; - `P Node_run_command.Manpage.command_description ; - `P Node_config_command.Manpage.command_description ; - `P Node_snapshot_command.Manpage.command_description ; -] +let description = + [ `S "DESCRIPTION"; + `P "Entry point for initializing, configuring and running a Tezos node."; + `P Node_identity_command.Manpage.command_description; + `P Node_run_command.Manpage.command_description; + `P Node_config_command.Manpage.command_description; + `P Node_snapshot_command.Manpage.command_description ] -let man = - description @ - Node_run_command.Manpage.examples +let man = description @ Node_run_command.Manpage.examples let info = let version = - Tezos_base.Current_git_info.abbreviated_commit_hash ^ - " ("^Tezos_base.Current_git_info.committer_date^")" in - Cmdliner.Term.info - ~doc:"The Tezos node" - ~man - ~version - "tezos-node" + Tezos_base.Current_git_info.abbreviated_commit_hash ^ " (" + ^ Tezos_base.Current_git_info.committer_date ^ ")" + in + Cmdliner.Term.info ~doc:"The Tezos node" ~man ~version "tezos-node" -let commands = [ - Node_run_command.cmd ; - Node_config_command.cmd ; - Node_identity_command.cmd ; - Node_snapshot_command.cmd ; -] +let commands = + [ Node_run_command.cmd; + Node_config_command.cmd; + Node_identity_command.cmd; + Node_snapshot_command.cmd ] let () = Random.self_init () ; match Cmdliner.Term.eval_choice (term, info) commands with - | `Error _ -> exit 1 - | `Help -> exit 0 - | `Version -> exit 1 - | `Ok () -> exit 0 + | `Error _ -> + exit 1 + | `Help -> + exit 0 + | `Version -> + exit 1 + | `Ok () -> + exit 0 diff --git a/src/bin_node/node_config_command.ml b/src/bin_node/node_config_command.ml index 28edb52b500e06a36a909753d3c5ebc7ffb03699..b53334184e33ece85844fd0fff55d2fbcb7c5f3f 100644 --- a/src/bin_node/node_config_command.ml +++ b/src/bin_node/node_config_command.ml @@ -30,10 +30,13 @@ let show (args : Node_shared_arg.t) = Format.eprintf "\n\ Warning: no config file at %s,\n\ - \ displaying the default configuration.\n@." + \ displaying the default configuration.\n\ + @." args.config_file ; - Node_shared_arg.read_and_patch_config_file args >>=? fun cfg -> - Node_config_file.check cfg >>= fun () -> + Node_shared_arg.read_and_patch_config_file args + >>=? fun cfg -> + Node_config_file.check cfg + >>= fun () -> print_endline @@ Node_config_file.to_string cfg ; return_unit @@ -42,126 +45,130 @@ let reset (args : Node_shared_arg.t) = Format.eprintf "Ignoring previous configuration file: %s.@." args.config_file ; - Node_shared_arg.read_and_patch_config_file args >>=? fun cfg -> - Node_config_file.check cfg >>= fun () -> - Node_config_file.write args.config_file cfg + Node_shared_arg.read_and_patch_config_file args + >>=? fun cfg -> + Node_config_file.check cfg + >>= fun () -> Node_config_file.write args.config_file cfg let init (args : Node_shared_arg.t) = if Sys.file_exists args.config_file then - failwith - "Pre-existing config file at %s, use `reset`." - args.config_file + failwith "Pre-existing config file at %s, use `reset`." args.config_file else - Node_shared_arg.read_and_patch_config_file args >>=? fun cfg -> - Node_config_file.check cfg >>= fun () -> - Node_config_file.write args.config_file cfg + Node_shared_arg.read_and_patch_config_file args + >>=? fun cfg -> + Node_config_file.check cfg + >>= fun () -> Node_config_file.write args.config_file cfg let update (args : Node_shared_arg.t) = if not (Sys.file_exists args.config_file) then failwith - "Missing configuration file at %s. \ - Use `%s config init [options]` to generate a new file" - args.config_file Sys.argv.(0) + "Missing configuration file at %s. Use `%s config init [options]` to \ + generate a new file" + args.config_file + Sys.argv.(0) else - Node_shared_arg.read_and_patch_config_file args >>=? fun cfg -> - Node_config_file.check cfg >>= fun () -> - Node_config_file.write args.config_file cfg + Node_shared_arg.read_and_patch_config_file args + >>=? fun cfg -> + Node_config_file.check cfg + >>= fun () -> Node_config_file.write args.config_file cfg (** Main *) module Term = struct - type subcommand = Show | Reset | Init | Update - let process subcommand args = + let process subcommand args = let res = match subcommand with - | Show -> show args - | Reset -> reset args - | Init -> init args - | Update -> update args in + | Show -> + show args + | Reset -> + reset args + | Init -> + init args + | Update -> + update args + in match Lwt_main.run res with - | Ok () -> `Ok () - | Error err -> `Error (false, Format.asprintf "%a" pp_print_error err) + | Ok () -> + `Ok () + | Error err -> + `Error (false, Format.asprintf "%a" pp_print_error err) let subcommand_arg = let parser = function - | "show" -> `Ok Show - | "reset" -> `Ok Reset - | "init" -> `Ok Init - | "update" -> `Ok Update - | s -> `Error ("invalid argument: " ^ s) + | "show" -> + `Ok Show + | "reset" -> + `Ok Reset + | "init" -> + `Ok Init + | "update" -> + `Ok Update + | s -> + `Error ("invalid argument: " ^ s) and printer ppf = function - | Show -> Format.fprintf ppf "show" - | Reset -> Format.fprintf ppf "reset" - | Init -> Format.fprintf ppf "init" - | Update -> Format.fprintf ppf "update" in + | Show -> + Format.fprintf ppf "show" + | Reset -> + Format.fprintf ppf "reset" + | Init -> + Format.fprintf ppf "init" + | Update -> + Format.fprintf ppf "update" + in let open Cmdliner.Arg in let doc = - "Operation to perform. \ - Possible values: $(b,show), $(b,reset), $(b,init), $(b,update)." in + "Operation to perform. Possible values: $(b,show), $(b,reset), \ + $(b,init), $(b,update)." + in value & pos 0 (parser, printer) Show & info [] ~docv:"OPERATION" ~doc let term = let open Cmdliner.Term in ret (const process $ subcommand_arg $ Node_shared_arg.Term.args) - end module Manpage = struct - let command_description = - "The $(b,config) command is meant to inspect and amend the \ - configuration of the Tezos node. \ - This command is complementary to manually editing the tezos \ - node configuration file. Its arguments are a subset of the $(i,run) \ - command ones." - - let description = [ - `S "DESCRIPTION" ; - `P (command_description ^ " Several operations are possible: "); - `P "$(b,show) reads, parses and displays Tezos current config file. \ - Use this command to see exactly what config file will be used by \ - Tezos. If additional command-line arguments are provided, \ - the displayed configuration will be amended accordingly. \ - This is the default operation." ; - `P "$(b,reset) will overwrite the current configuration file with a \ - factory default one. \ - If additional command-line arguments are provided, \ - they will amend the generated file. \ - It assumes that a configuration file already exists \ - and will abort otherwise." ; - `P "$(b,init) is like reset but assumes that \ - no configuration file is present \ - and will abort otherwise." ; - `P "$(b,update) is the main option to edit the configuration file of Tezos. \ - It will parse command line arguments and add or replace corresponding \ - entries in the Tezos configuration file." - ] + "The $(b,config) command is meant to inspect and amend the configuration \ + of the Tezos node. This command is complementary to manually editing the \ + tezos node configuration file. Its arguments are a subset of the \ + $(i,run) command ones." + + let description = + [ `S "DESCRIPTION"; + `P (command_description ^ " Several operations are possible: "); + `P + "$(b,show) reads, parses and displays Tezos current config file. Use \ + this command to see exactly what config file will be used by Tezos. \ + If additional command-line arguments are provided, the displayed \ + configuration will be amended accordingly. This is the default \ + operation."; + `P + "$(b,reset) will overwrite the current configuration file with a \ + factory default one. If additional command-line arguments are \ + provided, they will amend the generated file. It assumes that a \ + configuration file already exists and will abort otherwise."; + `P + "$(b,init) is like reset but assumes that no configuration file is \ + present and will abort otherwise."; + `P + "$(b,update) is the main option to edit the configuration file of \ + Tezos. It will parse command line arguments and add or replace \ + corresponding entries in the Tezos configuration file." ] let options = - let schema = Data_encoding.Json.schema (Node_config_file.encoding) in + let schema = Data_encoding.Json.schema Node_config_file.encoding in let schema = Format.asprintf "@[%a@]" Json_schema.pp schema in let schema = String.concat "\\$" (String.split '$' schema) in - [ - `S "OPTIONS" ; - `P "All options available in the config file"; - `Pre schema - ] + [`S "OPTIONS"; `P "All options available in the config file"; `Pre schema] let man = - description @ - Node_shared_arg.Manpage.args @ - options @ - Node_shared_arg.Manpage.bugs - - let info = - Cmdliner.Term.info - ~doc:"Manage node configuration" - ~man - "config" + description @ Node_shared_arg.Manpage.args @ options + @ Node_shared_arg.Manpage.bugs + let info = Cmdliner.Term.info ~doc:"Manage node configuration" ~man "config" end -let cmd = - Term.term, Manpage.info +let cmd = (Term.term, Manpage.info) diff --git a/src/bin_node/node_config_command.mli b/src/bin_node/node_config_command.mli index 018f1c8a3d25580b22084981c3382269901441c2..18d1c802fa9a67aa11701734768b6837b7b97c23 100644 --- a/src/bin_node/node_config_command.mli +++ b/src/bin_node/node_config_command.mli @@ -26,5 +26,5 @@ val cmd : unit Cmdliner.Term.t * Cmdliner.Term.info module Manpage : sig - val command_description: string + val command_description : string end diff --git a/src/bin_node/node_config_file.ml b/src/bin_node/node_config_file.ml index 60df693ce111274db152d45ee12a9d5c17f7ea3b..d742440236d68aea4177d53f5094ab2bfa1dc358 100644 --- a/src/bin_node/node_config_file.ml +++ b/src/bin_node/node_config_file.ml @@ -26,214 +26,271 @@ [@@@ocaml.warning "-30"] -let (//) = Filename.concat +let ( // ) = Filename.concat -let home = - try Sys.getenv "HOME" - with Not_found -> "/root" +let home = try Sys.getenv "HOME" with Not_found -> "/root" let default_data_dir = home // ".tezos-node" -let default_rpc_port = 8732 -let default_p2p_port = 9732 + +let default_rpc_port = 8732 + +let default_p2p_port = 9732 + let default_discovery_port = 10732 type t = { - data_dir : string ; - p2p : p2p ; - rpc : rpc ; - log : Lwt_log_sink_unix.cfg ; - internal_events : Internal_event_unix.Configuration.t ; - shell : shell ; + data_dir : string; + p2p : p2p; + rpc : rpc; + log : Lwt_log_sink_unix.cfg; + internal_events : Internal_event_unix.Configuration.t; + shell : shell } and p2p = { - expected_pow : float ; - bootstrap_peers : string list ; - listen_addr : string option ; - discovery_addr : string option ; - private_mode : bool ; - limits : P2p.limits ; - disable_mempool : bool ; - disable_testchain : bool ; - greylisting_config : P2p_point_state.Info.greylisting_config ; + expected_pow : float; + bootstrap_peers : string list; + listen_addr : string option; + discovery_addr : string option; + private_mode : bool; + limits : P2p.limits; + disable_mempool : bool; + disable_testchain : bool; + greylisting_config : P2p_point_state.Info.greylisting_config } and rpc = { - listen_addr : string option ; - cors_origins : string list ; - cors_headers : string list ; - tls : tls option ; + listen_addr : string option; + cors_origins : string list; + cors_headers : string list; + tls : tls option } -and tls = { - cert : string ; - key : string ; -} +and tls = {cert : string; key : string} and shell = { - block_validator_limits : Node.block_validator_limits ; - prevalidator_limits : Node.prevalidator_limits ; - peer_validator_limits : Node.peer_validator_limits ; - chain_validator_limits : Node.chain_validator_limits ; - history_mode : History_mode.t option ; -} - -let default_p2p_limits : P2p.limits = { - connection_timeout = Time.System.Span.of_seconds_exn 10. ; - authentication_timeout = Time.System.Span.of_seconds_exn 5. ; - greylist_timeout = Time.System.Span.of_seconds_exn 86400. (* one day *) ; - maintenance_idle_time = Time.System.Span.of_seconds_exn 120. (* two minutes *) ; - min_connections = 10 ; - expected_connections = 50 ; - max_connections = 100 ; - backlog = 20 ; - max_incoming_connections = 20 ; - max_download_speed = None ; - max_upload_speed = None ; - read_buffer_size = 1 lsl 14 ; - read_queue_size = None ; - write_queue_size = None ; - incoming_app_message_queue_size = None ; - incoming_message_queue_size = None ; - outgoing_message_queue_size = None ; - known_points_history_size = 500 ; - known_peer_ids_history_size = 500 ; - max_known_points = Some (400, 300) ; - max_known_peer_ids = Some (400, 300) ; - swap_linger = Time.System.Span.of_seconds_exn 30. ; - binary_chunks_size = None ; + block_validator_limits : Node.block_validator_limits; + prevalidator_limits : Node.prevalidator_limits; + peer_validator_limits : Node.peer_validator_limits; + chain_validator_limits : Node.chain_validator_limits; + history_mode : History_mode.t option } -let default_p2p = { - expected_pow = 26. ; - bootstrap_peers = [] ; - listen_addr = Some ("[::]:" ^ string_of_int default_p2p_port) ; - discovery_addr = None ; - private_mode = false ; - limits = default_p2p_limits ; - disable_mempool = false ; - disable_testchain = false ; - greylisting_config = P2p_point_state.Info.default_greylisting_config -} - -let default_rpc = { - listen_addr = None ; - cors_origins = [] ; - cors_headers = [] ; - tls = None ; -} - -let default_shell = { - block_validator_limits = Node.default_block_validator_limits ; - prevalidator_limits = Node.default_prevalidator_limits ; - peer_validator_limits = Node.default_peer_validator_limits ; - chain_validator_limits = Node.default_chain_validator_limits ; - history_mode = None ; -} - -let default_config = { - data_dir = default_data_dir ; - p2p = default_p2p ; - rpc = default_rpc ; - log = Lwt_log_sink_unix.default_cfg ; - internal_events = Internal_event_unix.Configuration.default ; - shell = default_shell ; -} +let default_p2p_limits : P2p.limits = + { connection_timeout = Time.System.Span.of_seconds_exn 10.; + authentication_timeout = Time.System.Span.of_seconds_exn 5.; + greylist_timeout = Time.System.Span.of_seconds_exn 86400. (* one day *); + maintenance_idle_time = + Time.System.Span.of_seconds_exn 120. (* two minutes *); + min_connections = 10; + expected_connections = 50; + max_connections = 100; + backlog = 20; + max_incoming_connections = 20; + max_download_speed = None; + max_upload_speed = None; + read_buffer_size = 1 lsl 14; + read_queue_size = None; + write_queue_size = None; + incoming_app_message_queue_size = None; + incoming_message_queue_size = None; + outgoing_message_queue_size = None; + known_points_history_size = 500; + known_peer_ids_history_size = 500; + max_known_points = Some (400, 300); + max_known_peer_ids = Some (400, 300); + swap_linger = Time.System.Span.of_seconds_exn 30.; + binary_chunks_size = None } + +let default_p2p = + { expected_pow = 26.; + bootstrap_peers = []; + listen_addr = Some ("[::]:" ^ string_of_int default_p2p_port); + discovery_addr = None; + private_mode = false; + limits = default_p2p_limits; + disable_mempool = false; + disable_testchain = false; + greylisting_config = P2p_point_state.Info.default_greylisting_config } + +let default_rpc = + {listen_addr = None; cors_origins = []; cors_headers = []; tls = None} + +let default_shell = + { block_validator_limits = Node.default_block_validator_limits; + prevalidator_limits = Node.default_prevalidator_limits; + peer_validator_limits = Node.default_peer_validator_limits; + chain_validator_limits = Node.default_chain_validator_limits; + history_mode = None } + +let default_config = + { data_dir = default_data_dir; + p2p = default_p2p; + rpc = default_rpc; + log = Lwt_log_sink_unix.default_cfg; + internal_events = Internal_event_unix.Configuration.default; + shell = default_shell } let limit : P2p.limits Data_encoding.t = let open Data_encoding in conv - (fun { P2p.connection_timeout ; authentication_timeout ; greylist_timeout ; - maintenance_idle_time ; - min_connections ; expected_connections ; max_connections ; - backlog ; max_incoming_connections ; - max_download_speed ; max_upload_speed ; - read_buffer_size ; read_queue_size ; write_queue_size ; - incoming_app_message_queue_size ; - incoming_message_queue_size ; outgoing_message_queue_size ; - known_points_history_size ; known_peer_ids_history_size ; - max_known_points ; max_known_peer_ids ; - swap_linger ; binary_chunks_size - } -> - (((( connection_timeout, authentication_timeout, - min_connections, expected_connections, - max_connections, backlog, max_incoming_connections, - max_download_speed, max_upload_speed, swap_linger), - ( binary_chunks_size, read_buffer_size, read_queue_size, write_queue_size, - incoming_app_message_queue_size, - incoming_message_queue_size, outgoing_message_queue_size, - known_points_history_size, known_peer_ids_history_size, - max_known_points)), - ( max_known_peer_ids, greylist_timeout, maintenance_idle_time)))) - (fun (((( connection_timeout, authentication_timeout, - min_connections, expected_connections, - max_connections, backlog, max_incoming_connections, - max_download_speed, max_upload_speed, swap_linger), - ( binary_chunks_size, read_buffer_size, read_queue_size, write_queue_size, - incoming_app_message_queue_size, - incoming_message_queue_size, outgoing_message_queue_size, - known_points_history_size, known_peer_ids_history_size, - max_known_points)), - ( max_known_peer_ids, greylist_timeout, maintenance_idle_time))) -> - { connection_timeout ; authentication_timeout ; greylist_timeout ; - maintenance_idle_time ; - min_connections ; expected_connections ; - max_connections ; backlog ; max_incoming_connections ; - max_download_speed ; max_upload_speed ; - read_buffer_size ; read_queue_size ; write_queue_size ; - incoming_app_message_queue_size ; - incoming_message_queue_size ; outgoing_message_queue_size ; - known_points_history_size ; known_peer_ids_history_size ; - max_known_points ; max_known_peer_ids ; swap_linger ; - binary_chunks_size - }) + (fun { P2p.connection_timeout; + authentication_timeout; + greylist_timeout; + maintenance_idle_time; + min_connections; + expected_connections; + max_connections; + backlog; + max_incoming_connections; + max_download_speed; + max_upload_speed; + read_buffer_size; + read_queue_size; + write_queue_size; + incoming_app_message_queue_size; + incoming_message_queue_size; + outgoing_message_queue_size; + known_points_history_size; + known_peer_ids_history_size; + max_known_points; + max_known_peer_ids; + swap_linger; + binary_chunks_size } -> + ( ( ( connection_timeout, + authentication_timeout, + min_connections, + expected_connections, + max_connections, + backlog, + max_incoming_connections, + max_download_speed, + max_upload_speed, + swap_linger ), + ( binary_chunks_size, + read_buffer_size, + read_queue_size, + write_queue_size, + incoming_app_message_queue_size, + incoming_message_queue_size, + outgoing_message_queue_size, + known_points_history_size, + known_peer_ids_history_size, + max_known_points ) ), + (max_known_peer_ids, greylist_timeout, maintenance_idle_time) )) + (fun ( ( ( connection_timeout, + authentication_timeout, + min_connections, + expected_connections, + max_connections, + backlog, + max_incoming_connections, + max_download_speed, + max_upload_speed, + swap_linger ), + ( binary_chunks_size, + read_buffer_size, + read_queue_size, + write_queue_size, + incoming_app_message_queue_size, + incoming_message_queue_size, + outgoing_message_queue_size, + known_points_history_size, + known_peer_ids_history_size, + max_known_points ) ), + (max_known_peer_ids, greylist_timeout, maintenance_idle_time) ) -> + { connection_timeout; + authentication_timeout; + greylist_timeout; + maintenance_idle_time; + min_connections; + expected_connections; + max_connections; + backlog; + max_incoming_connections; + max_download_speed; + max_upload_speed; + read_buffer_size; + read_queue_size; + write_queue_size; + incoming_app_message_queue_size; + incoming_message_queue_size; + outgoing_message_queue_size; + known_points_history_size; + known_peer_ids_history_size; + max_known_points; + max_known_peer_ids; + swap_linger; + binary_chunks_size }) (merge_objs (merge_objs (obj10 - (dft "connection-timeout" - ~description: "Delay acceptable when initiating a \ - connection to a new peer, in seconds." - Time.System.Span.encoding default_p2p_limits.authentication_timeout) - (dft "authentication-timeout" - ~description: "Delay granted to a peer to perform authentication, \ - in seconds." - Time.System.Span.encoding default_p2p_limits.authentication_timeout) - (dft "min-connections" - ~description: "Strict minimum number of connections (triggers an \ - urgent maintenance)." + (dft + "connection-timeout" + ~description: + "Delay acceptable when initiating a connection to a new \ + peer, in seconds." + Time.System.Span.encoding + default_p2p_limits.authentication_timeout) + (dft + "authentication-timeout" + ~description: + "Delay granted to a peer to perform authentication, in \ + seconds." + Time.System.Span.encoding + default_p2p_limits.authentication_timeout) + (dft + "min-connections" + ~description: + "Strict minimum number of connections (triggers an urgent \ + maintenance)." uint16 default_p2p_limits.min_connections) - (dft "expected-connections" - ~description: "Targeted number of connections to reach when \ - bootstrapping / maintaining." + (dft + "expected-connections" + ~description: + "Targeted number of connections to reach when bootstrapping \ + / maintaining." uint16 default_p2p_limits.expected_connections) - (dft "max-connections" - ~description: "Maximum number of connections (exceeding peers are \ - disconnected)." + (dft + "max-connections" + ~description: + "Maximum number of connections (exceeding peers are \ + disconnected)." uint16 default_p2p_limits.max_connections) - (dft "backlog" - ~description: "Number above which pending incoming connections are \ - immediately rejected." + (dft + "backlog" + ~description: + "Number above which pending incoming connections are \ + immediately rejected." uint8 default_p2p_limits.backlog) - (dft "max-incoming-connections" - ~description: "Number above which pending incoming connections are \ - immediately rejected." + (dft + "max-incoming-connections" + ~description: + "Number above which pending incoming connections are \ + immediately rejected." uint8 default_p2p_limits.max_incoming_connections) - (opt "max-download-speed" - ~description: "Max download speeds in KiB/s." + (opt + "max-download-speed" + ~description:"Max download speeds in KiB/s." int31) - (opt "max-upload-speed" - ~description: "Max upload speeds in KiB/s." + (opt + "max-upload-speed" + ~description:"Max upload speeds in KiB/s." int31) - (dft "swap-linger" Time.System.Span.encoding default_p2p_limits.swap_linger)) + (dft + "swap-linger" + Time.System.Span.encoding + default_p2p_limits.swap_linger)) (obj10 (opt "binary-chunks-size" uint8) - (dft "read-buffer-size" - ~description: "Size of the buffer passed to read(2)." + (dft + "read-buffer-size" + ~description:"Size of the buffer passed to read(2)." int31 default_p2p_limits.read_buffer_size) (opt "read-queue-size" int31) @@ -241,155 +298,206 @@ let limit : P2p.limits Data_encoding.t = (opt "incoming-app-message-queue-size" int31) (opt "incoming-message-queue-size" int31) (opt "outgoing-message-queue-size" int31) - (dft "known_points_history_size" uint16 + (dft + "known_points_history_size" + uint16 default_p2p_limits.known_points_history_size) - (dft "known_peer_ids_history_size" uint16 + (dft + "known_peer_ids_history_size" + uint16 default_p2p_limits.known_points_history_size) - (opt "max_known_points" (tup2 uint16 uint16)) - )) + (opt "max_known_points" (tup2 uint16 uint16)))) (obj3 (opt "max_known_peer_ids" (tup2 uint16 uint16)) - (dft "greylist-timeout" - ~description: "GC delay for the greylists tables, in seconds." - Time.System.Span.encoding default_p2p_limits.greylist_timeout) - (dft "maintenance-idle-time" - ~description: "How long to wait at most, in seconds, \ - before running a maintenance loop." - Time.System.Span.encoding default_p2p_limits.maintenance_idle_time) - ) - ) + (dft + "greylist-timeout" + ~description:"GC delay for the greylists tables, in seconds." + Time.System.Span.encoding + default_p2p_limits.greylist_timeout) + (dft + "maintenance-idle-time" + ~description: + "How long to wait at most, in seconds, before running a \ + maintenance loop." + Time.System.Span.encoding + default_p2p_limits.maintenance_idle_time))) let p2p = let open Data_encoding in conv - (fun { expected_pow ; bootstrap_peers ; - listen_addr ; discovery_addr ; private_mode ; - limits ; disable_mempool ; disable_testchain ; greylisting_config } -> - (expected_pow, bootstrap_peers, - listen_addr, discovery_addr, private_mode, limits, - disable_mempool, disable_testchain, greylisting_config)) - (fun (expected_pow, bootstrap_peers, - listen_addr, discovery_addr, private_mode, limits, - disable_mempool, disable_testchain, greylisting_config) -> - { expected_pow ; bootstrap_peers ; - listen_addr ; discovery_addr ; private_mode ; limits ; - disable_mempool ; disable_testchain ; greylisting_config }) + (fun { expected_pow; + bootstrap_peers; + listen_addr; + discovery_addr; + private_mode; + limits; + disable_mempool; + disable_testchain; + greylisting_config } -> + ( expected_pow, + bootstrap_peers, + listen_addr, + discovery_addr, + private_mode, + limits, + disable_mempool, + disable_testchain, + greylisting_config )) + (fun ( expected_pow, + bootstrap_peers, + listen_addr, + discovery_addr, + private_mode, + limits, + disable_mempool, + disable_testchain, + greylisting_config ) -> + { expected_pow; + bootstrap_peers; + listen_addr; + discovery_addr; + private_mode; + limits; + disable_mempool; + disable_testchain; + greylisting_config }) (obj9 - (dft "expected-proof-of-work" - ~description: "Floating point number between 0 and 256 that represents a \ - difficulty, 24 signifies for example that at least 24 leading \ - zeroes are expected in the hash." - float default_p2p.expected_pow) - (dft "bootstrap-peers" - ~description: "List of hosts. Tezos can connect to both IPv6 and IPv4 hosts. \ - If the port is not specified, default port 9732 will be assumed." - (list string) default_p2p.bootstrap_peers) - (opt "listen-addr" - ~description: "Host to listen to. If the port is not \ - specified, the default port 8732 will be \ - assumed." + (dft + "expected-proof-of-work" + ~description: + "Floating point number between 0 and 256 that represents a \ + difficulty, 24 signifies for example that at least 24 leading \ + zeroes are expected in the hash." + float + default_p2p.expected_pow) + (dft + "bootstrap-peers" + ~description: + "List of hosts. Tezos can connect to both IPv6 and IPv4 hosts. If \ + the port is not specified, default port 9732 will be assumed." + (list string) + default_p2p.bootstrap_peers) + (opt + "listen-addr" + ~description: + "Host to listen to. If the port is not specified, the default \ + port 8732 will be assumed." string) - (dft "discovery-addr" - ~description: "Host for local peer discovery. If the port is not \ - specified, the default port 10732 will be \ - assumed." - (option string) default_p2p.discovery_addr) - (dft "private-mode" - ~description: "Specify if the node is in private mode or \ - not. A node in private mode rejects incoming \ - connections from untrusted peers and only \ - opens outgoing connections to peers listed in \ - 'bootstrap-peers' or provided with '--peer' \ - option. Moreover, these peers will keep the \ - identity and the address of the private node \ - secret." - bool false) - (dft "limits" - ~description: "Network limits" - limit default_p2p_limits) - (dft "disable_mempool" - ~description: "If set to [true], the node will not participate in \ - the propagation of pending operations (mempool). \ - Default value is [false]. \ - It can be used to decrease the memory and \ - computation footprints of the node." - bool false) - (dft "disable_testchain" - ~description: "If set to [true], the node will not spawn a testchain during \ - the protocol's testing voting period. \ - Default value is [false]. It may be used used to decrease the \ - node storage usage and computation by droping the validation \ - of the test network blocks." - bool false) + (dft + "discovery-addr" + ~description: + "Host for local peer discovery. If the port is not specified, the \ + default port 10732 will be assumed." + (option string) + default_p2p.discovery_addr) + (dft + "private-mode" + ~description: + "Specify if the node is in private mode or not. A node in private \ + mode rejects incoming connections from untrusted peers and only \ + opens outgoing connections to peers listed in 'bootstrap-peers' \ + or provided with '--peer' option. Moreover, these peers will \ + keep the identity and the address of the private node secret." + bool + false) + (dft "limits" ~description:"Network limits" limit default_p2p_limits) + (dft + "disable_mempool" + ~description: + "If set to [true], the node will not participate in the \ + propagation of pending operations (mempool). Default value is \ + [false]. It can be used to decrease the memory and computation \ + footprints of the node." + bool + false) + (dft + "disable_testchain" + ~description: + "If set to [true], the node will not spawn a testchain during the \ + protocol's testing voting period. Default value is [false]. It \ + may be used used to decrease the node storage usage and \ + computation by droping the validation of the test network blocks." + bool + false) (let open P2p_point_state.Info in - dft "greylisting_config" - ~description: "The greylisting policy." - greylisting_config_encoding default_greylisting_config) - ) + dft + "greylisting_config" + ~description:"The greylisting policy." + greylisting_config_encoding + default_greylisting_config)) let rpc : rpc Data_encoding.t = let open Data_encoding in conv - (fun { cors_origins ; cors_headers ; listen_addr ; tls } -> - let cert, key = - match tls with - | None -> None, None - | Some { cert ; key } -> Some cert, Some key in - (listen_addr, cors_origins, cors_headers, cert, key )) - (fun (listen_addr, cors_origins, cors_headers, cert, key ) -> - let tls = - match cert, key with - | None, _ | _, None -> None - | Some cert, Some key -> Some { cert ; key } in - { listen_addr ; cors_origins ; cors_headers ; tls }) + (fun {cors_origins; cors_headers; listen_addr; tls} -> + let (cert, key) = + match tls with + | None -> + (None, None) + | Some {cert; key} -> + (Some cert, Some key) + in + (listen_addr, cors_origins, cors_headers, cert, key)) + (fun (listen_addr, cors_origins, cors_headers, cert, key) -> + let tls = + match (cert, key) with + | (None, _) | (_, None) -> + None + | (Some cert, Some key) -> + Some {cert; key} + in + {listen_addr; cors_origins; cors_headers; tls}) (obj5 - (opt "listen-addr" - ~description: "Host to listen to. If the port is not specified, \ - the default port 8732 will be assumed." - string) - (dft "cors-origin" - ~description: "Cross Origin Resource Sharing parameters, see \ - https://en.wikipedia.org/wiki/Cross-origin_resource_sharing." - (list string) default_rpc.cors_origins) - (dft "cors-headers" - ~description: "Cross Origin Resource Sharing parameters, see \ - https://en.wikipedia.org/wiki/Cross-origin_resource_sharing." - (list string) default_rpc.cors_headers) - (opt "crt" - ~description: "Certificate file (necessary when TLS is used)." + (opt + "listen-addr" + ~description: + "Host to listen to. If the port is not specified, the default \ + port 8732 will be assumed." string) - (opt "key" - ~description: "Key file (necessary when TLS is used)." + (dft + "cors-origin" + ~description: + "Cross Origin Resource Sharing parameters, see \ + https://en.wikipedia.org/wiki/Cross-origin_resource_sharing." + (list string) + default_rpc.cors_origins) + (dft + "cors-headers" + ~description: + "Cross Origin Resource Sharing parameters, see \ + https://en.wikipedia.org/wiki/Cross-origin_resource_sharing." + (list string) + default_rpc.cors_headers) + (opt + "crt" + ~description:"Certificate file (necessary when TLS is used)." string) - ) + (opt "key" ~description:"Key file (necessary when TLS is used)." string)) -let worker_limits_encoding - default_size - default_level = +let worker_limits_encoding default_size default_level = let open Data_encoding in conv - (fun { Worker_types.backlog_size ; backlog_level ;} -> - (backlog_size, backlog_level)) - (fun (backlog_size, backlog_level) -> - { backlog_size ; backlog_level }) + (fun {Worker_types.backlog_size; backlog_level} -> + (backlog_size, backlog_level)) + (fun (backlog_size, backlog_level) -> {backlog_size; backlog_level}) (obj2 (dft "worker_backlog_size" uint16 default_size) - (dft "worker_backlog_level" - Internal_event.Level.encoding default_level)) + (dft "worker_backlog_level" Internal_event.Level.encoding default_level)) -let timeout_encoding = - Time.System.Span.encoding +let timeout_encoding = Time.System.Span.encoding let block_validator_limits_encoding = let open Data_encoding in conv - (fun { Node.protocol_timeout ; worker_limits } -> - (protocol_timeout, worker_limits)) + (fun {Node.protocol_timeout; worker_limits} -> + (protocol_timeout, worker_limits)) (fun (protocol_timeout, worker_limits) -> - { protocol_timeout ; worker_limits}) + {protocol_timeout; worker_limits}) (merge_objs (obj1 - (dft "protocol_request_timeout" timeout_encoding + (dft + "protocol_request_timeout" + timeout_encoding default_shell.block_validator_limits.protocol_timeout)) (worker_limits_encoding default_shell.block_validator_limits.worker_limits.backlog_size @@ -398,57 +506,84 @@ let block_validator_limits_encoding = let prevalidator_limits_encoding = let open Data_encoding in conv - (fun { Node.operation_timeout ; max_refused_operations ; worker_limits } -> - ((operation_timeout, max_refused_operations), worker_limits)) + (fun {Node.operation_timeout; max_refused_operations; worker_limits} -> + ((operation_timeout, max_refused_operations), worker_limits)) (fun ((operation_timeout, max_refused_operations), worker_limits) -> - { operation_timeout ; max_refused_operations ; worker_limits}) + {operation_timeout; max_refused_operations; worker_limits}) (merge_objs (obj2 - (dft "operations_request_timeout" timeout_encoding + (dft + "operations_request_timeout" + timeout_encoding default_shell.prevalidator_limits.operation_timeout) - (dft "max_refused_operations" uint16 + (dft + "max_refused_operations" + uint16 default_shell.prevalidator_limits.max_refused_operations)) (worker_limits_encoding default_shell.prevalidator_limits.worker_limits.backlog_size - default_shell.prevalidator_limits.worker_limits.backlog_level - )) + default_shell.prevalidator_limits.worker_limits.backlog_level)) let peer_validator_limits_encoding = let open Data_encoding in let default_limits = default_shell.peer_validator_limits in conv - (fun { Node.block_header_timeout ; block_operations_timeout ; - protocol_timeout ; new_head_request_timeout ; worker_limits } -> - ((block_header_timeout, block_operations_timeout, - protocol_timeout, new_head_request_timeout), worker_limits)) - (fun ((block_header_timeout, block_operations_timeout, - protocol_timeout, new_head_request_timeout), worker_limits) -> - { block_header_timeout ; block_operations_timeout ; - protocol_timeout ; new_head_request_timeout ; worker_limits }) + (fun { Node.block_header_timeout; + block_operations_timeout; + protocol_timeout; + new_head_request_timeout; + worker_limits } -> + ( ( block_header_timeout, + block_operations_timeout, + protocol_timeout, + new_head_request_timeout ), + worker_limits )) + (fun ( ( block_header_timeout, + block_operations_timeout, + protocol_timeout, + new_head_request_timeout ), + worker_limits ) -> + { block_header_timeout; + block_operations_timeout; + protocol_timeout; + new_head_request_timeout; + worker_limits }) (merge_objs (obj4 - (dft "block_header_request_timeout" timeout_encoding default_limits.block_header_timeout) - (dft "block_operations_request_timeout" timeout_encoding default_limits.block_operations_timeout) - (dft "protocol_request_timeout" timeout_encoding default_limits.protocol_timeout) - (dft "new_head_request_timeout" timeout_encoding default_limits.new_head_request_timeout)) + (dft + "block_header_request_timeout" + timeout_encoding + default_limits.block_header_timeout) + (dft + "block_operations_request_timeout" + timeout_encoding + default_limits.block_operations_timeout) + (dft + "protocol_request_timeout" + timeout_encoding + default_limits.protocol_timeout) + (dft + "new_head_request_timeout" + timeout_encoding + default_limits.new_head_request_timeout)) (worker_limits_encoding default_limits.worker_limits.backlog_size - default_limits.worker_limits.backlog_level - )) + default_limits.worker_limits.backlog_level)) let chain_validator_limits_encoding = let open Data_encoding in conv - (fun { Node.bootstrap_threshold ; worker_limits } -> - (bootstrap_threshold, worker_limits)) + (fun {Node.bootstrap_threshold; worker_limits} -> + (bootstrap_threshold, worker_limits)) (fun (bootstrap_threshold, worker_limits) -> - { bootstrap_threshold ; worker_limits}) + {bootstrap_threshold; worker_limits}) (merge_objs (obj1 - (dft "bootstrap_threshold" + (dft + "bootstrap_threshold" ~description: - "Set the number of peers with whom a chain synchronization must \ - be completed to bootstrap the node." + "Set the number of peers with whom a chain synchronization \ + must be completed to bootstrap the node." uint8 default_shell.chain_validator_limits.bootstrap_threshold)) (worker_limits_encoding @@ -458,188 +593,185 @@ let chain_validator_limits_encoding = let shell = let open Data_encoding in conv - (fun { peer_validator_limits ; block_validator_limits ; - prevalidator_limits ; chain_validator_limits ; history_mode } -> - (peer_validator_limits, block_validator_limits, - prevalidator_limits, chain_validator_limits, history_mode)) - (fun (peer_validator_limits, block_validator_limits, - prevalidator_limits, chain_validator_limits, history_mode) -> - { peer_validator_limits ; block_validator_limits ; - prevalidator_limits ; chain_validator_limits ; history_mode }) + (fun { peer_validator_limits; + block_validator_limits; + prevalidator_limits; + chain_validator_limits; + history_mode } -> + ( peer_validator_limits, + block_validator_limits, + prevalidator_limits, + chain_validator_limits, + history_mode )) + (fun ( peer_validator_limits, + block_validator_limits, + prevalidator_limits, + chain_validator_limits, + history_mode ) -> + { peer_validator_limits; + block_validator_limits; + prevalidator_limits; + chain_validator_limits; + history_mode }) (obj5 - (dft "peer_validator" peer_validator_limits_encoding default_shell.peer_validator_limits) - (dft "block_validator" block_validator_limits_encoding default_shell.block_validator_limits) - (dft "prevalidator" prevalidator_limits_encoding default_shell.prevalidator_limits) - (dft "chain_validator" chain_validator_limits_encoding default_shell.chain_validator_limits) - (opt "history_mode" History_mode.encoding) - ) + (dft + "peer_validator" + peer_validator_limits_encoding + default_shell.peer_validator_limits) + (dft + "block_validator" + block_validator_limits_encoding + default_shell.block_validator_limits) + (dft + "prevalidator" + prevalidator_limits_encoding + default_shell.prevalidator_limits) + (dft + "chain_validator" + chain_validator_limits_encoding + default_shell.chain_validator_limits) + (opt "history_mode" History_mode.encoding)) let encoding = let open Data_encoding in conv - (fun { data_dir ; rpc ; p2p ; log ; internal_events ; shell } -> - (data_dir, rpc, p2p, log, internal_events, shell)) + (fun {data_dir; rpc; p2p; log; internal_events; shell} -> + (data_dir, rpc, p2p, log, internal_events, shell)) (fun (data_dir, rpc, p2p, log, internal_events, shell) -> - { data_dir ; rpc ; p2p ; log ; internal_events ; shell }) + {data_dir; rpc; p2p; log; internal_events; shell}) (obj6 - (dft "data-dir" - ~description: "Location of the data dir on disk." - string default_data_dir) - (dft "rpc" - ~description: "Configuration of rpc parameters" - rpc default_rpc) - (req "p2p" - ~description: "Configuration of network parameters" p2p) - (dft "log" + (dft + "data-dir" + ~description:"Location of the data dir on disk." + string + default_data_dir) + (dft + "rpc" + ~description:"Configuration of rpc parameters" + rpc + default_rpc) + (req "p2p" ~description:"Configuration of network parameters" p2p) + (dft + "log" ~description: "Configuration of the Lwt-log sink (part of the logging framework)" - Lwt_log_sink_unix.cfg_encoding Lwt_log_sink_unix.default_cfg) - (dft "internal-events" - ~description: "Configuration of the structured logging framework" + Lwt_log_sink_unix.cfg_encoding + Lwt_log_sink_unix.default_cfg) + (dft + "internal-events" + ~description:"Configuration of the structured logging framework" Internal_event_unix.Configuration.encoding Internal_event_unix.Configuration.default) - (dft "shell" - ~description: "Configuration of network parameters" - shell default_shell)) + (dft + "shell" + ~description:"Configuration of network parameters" + shell + default_shell)) let read fp = - if Sys.file_exists fp then begin - Lwt_utils_unix.Json.read_file fp >>=? fun json -> + if Sys.file_exists fp then + Lwt_utils_unix.Json.read_file fp + >>=? fun json -> try return (Data_encoding.Json.destruct encoding json) with exn -> fail (Exn exn) - end else - return default_config + else return default_config let write fp cfg = - Node_data_version.ensure_data_dir (Filename.dirname fp) >>=? fun () -> - Lwt_utils_unix.Json.write_file fp - (Data_encoding.Json.construct encoding cfg) + Node_data_version.ensure_data_dir (Filename.dirname fp) + >>=? fun () -> + Lwt_utils_unix.Json.write_file fp (Data_encoding.Json.construct encoding cfg) let to_string cfg = - Data_encoding.Json.to_string - (Data_encoding.Json.construct encoding cfg) - -let update - ?data_dir - ?min_connections - ?expected_connections - ?max_connections - ?max_download_speed - ?max_upload_speed - ?binary_chunks_size - ?peer_table_size - ?expected_pow - ?bootstrap_peers - ?listen_addr - ?discovery_addr - ?rpc_listen_addr - ?(private_mode = false) - ?(disable_mempool = false) - ?(disable_testchain = false) - ?(cors_origins = []) - ?(cors_headers = []) - ?rpc_tls - ?log_output - ?bootstrap_threshold - ?history_mode - cfg = + Data_encoding.Json.to_string (Data_encoding.Json.construct encoding cfg) + +let update ?data_dir ?min_connections ?expected_connections ?max_connections + ?max_download_speed ?max_upload_speed ?binary_chunks_size ?peer_table_size + ?expected_pow ?bootstrap_peers ?listen_addr ?discovery_addr + ?rpc_listen_addr ?(private_mode = false) ?(disable_mempool = false) + ?(disable_testchain = false) ?(cors_origins = []) ?(cors_headers = []) + ?rpc_tls ?log_output ?bootstrap_threshold ?history_mode cfg = let data_dir = Option.unopt ~default:cfg.data_dir data_dir in - Node_data_version.ensure_data_dir data_dir >>=? fun () -> + Node_data_version.ensure_data_dir data_dir + >>=? fun () -> let peer_table_size = - Option.map peer_table_size ~f:(fun i -> i, i / 4 * 3) in - let unopt_list ~default = function - | [] -> default - | l -> l in - let limits : P2p.limits = { - cfg.p2p.limits with - min_connections = - Option.unopt - ~default:cfg.p2p.limits.min_connections - min_connections ; - expected_connections = - Option.unopt - ~default:cfg.p2p.limits.expected_connections - expected_connections ; - max_connections = - Option.unopt - ~default:cfg.p2p.limits.max_connections - max_connections ; - max_download_speed = - Option.first_some - max_download_speed cfg.p2p.limits.max_download_speed ; - max_upload_speed = - Option.first_some - max_upload_speed cfg.p2p.limits.max_upload_speed ; - max_known_points = - Option.first_some - peer_table_size cfg.p2p.limits.max_known_points ; - max_known_peer_ids = - Option.first_some - peer_table_size cfg.p2p.limits.max_known_peer_ids ; - binary_chunks_size = - Option.map ~f:(fun x -> x lsl 10) binary_chunks_size ; - } in - let p2p : p2p = { - expected_pow = - Option.unopt ~default:cfg.p2p.expected_pow expected_pow ; - bootstrap_peers = - Option.unopt ~default:cfg.p2p.bootstrap_peers bootstrap_peers ; - listen_addr = - Option.first_some listen_addr cfg.p2p.listen_addr ; - discovery_addr = - Option.first_some discovery_addr cfg.p2p.discovery_addr ; - private_mode = cfg.p2p.private_mode || private_mode ; - limits ; - disable_mempool = cfg.p2p.disable_mempool || disable_mempool ; - disable_testchain = cfg.p2p.disable_testchain || disable_testchain ; - greylisting_config = cfg.p2p.greylisting_config ; - } - and rpc : rpc = { - listen_addr = - Option.first_some rpc_listen_addr cfg.rpc.listen_addr ; - cors_origins = - unopt_list ~default:cfg.rpc.cors_origins cors_origins ; - cors_headers = - unopt_list ~default:cfg.rpc.cors_headers cors_headers ; - tls = - Option.first_some rpc_tls cfg.rpc.tls ; - } - and log : Lwt_log_sink_unix.cfg = { - cfg.log with - output = Option.unopt ~default:cfg.log.output log_output ; - } - and shell : shell = { - peer_validator_limits = cfg.shell.peer_validator_limits ; - block_validator_limits = cfg.shell.block_validator_limits ; - prevalidator_limits = cfg.shell.prevalidator_limits ; - chain_validator_limits = - Option.unopt_map - ~default:cfg.shell.chain_validator_limits - ~f:(fun bootstrap_threshold -> - { cfg.shell.chain_validator_limits - with bootstrap_threshold }) - bootstrap_threshold ; - history_mode = Option.first_some history_mode cfg.shell.history_mode; - } + Option.map peer_table_size ~f:(fun i -> (i, i / 4 * 3)) + in + let unopt_list ~default = function [] -> default | l -> l in + let limits : P2p.limits = + { cfg.p2p.limits with + min_connections = + Option.unopt ~default:cfg.p2p.limits.min_connections min_connections; + expected_connections = + Option.unopt + ~default:cfg.p2p.limits.expected_connections + expected_connections; + max_connections = + Option.unopt ~default:cfg.p2p.limits.max_connections max_connections; + max_download_speed = + Option.first_some max_download_speed cfg.p2p.limits.max_download_speed; + max_upload_speed = + Option.first_some max_upload_speed cfg.p2p.limits.max_upload_speed; + max_known_points = + Option.first_some peer_table_size cfg.p2p.limits.max_known_points; + max_known_peer_ids = + Option.first_some peer_table_size cfg.p2p.limits.max_known_peer_ids; + binary_chunks_size = Option.map ~f:(fun x -> x lsl 10) binary_chunks_size + } + in + let p2p : p2p = + { expected_pow = Option.unopt ~default:cfg.p2p.expected_pow expected_pow; + bootstrap_peers = + Option.unopt ~default:cfg.p2p.bootstrap_peers bootstrap_peers; + listen_addr = Option.first_some listen_addr cfg.p2p.listen_addr; + discovery_addr = Option.first_some discovery_addr cfg.p2p.discovery_addr; + private_mode = cfg.p2p.private_mode || private_mode; + limits; + disable_mempool = cfg.p2p.disable_mempool || disable_mempool; + disable_testchain = cfg.p2p.disable_testchain || disable_testchain; + greylisting_config = cfg.p2p.greylisting_config } + and rpc : rpc = + { listen_addr = Option.first_some rpc_listen_addr cfg.rpc.listen_addr; + cors_origins = unopt_list ~default:cfg.rpc.cors_origins cors_origins; + cors_headers = unopt_list ~default:cfg.rpc.cors_headers cors_headers; + tls = Option.first_some rpc_tls cfg.rpc.tls } + and log : Lwt_log_sink_unix.cfg = + {cfg.log with output = Option.unopt ~default:cfg.log.output log_output} + and shell : shell = + { peer_validator_limits = cfg.shell.peer_validator_limits; + block_validator_limits = cfg.shell.block_validator_limits; + prevalidator_limits = cfg.shell.prevalidator_limits; + chain_validator_limits = + Option.unopt_map + ~default:cfg.shell.chain_validator_limits + ~f:(fun bootstrap_threshold -> + {cfg.shell.chain_validator_limits with bootstrap_threshold}) + bootstrap_threshold; + history_mode = Option.first_some history_mode cfg.shell.history_mode } in let internal_events = cfg.internal_events in - return { data_dir ; p2p ; rpc ; log ; internal_events ; shell } + return {data_dir; p2p; rpc; log; internal_events; shell} let resolve_addr ~default_addr ?default_port ?(passive = false) peer = - let addr, port = P2p_point.Id.parse_addr_port peer in + let (addr, port) = P2p_point.Id.parse_addr_port peer in let node = if addr = "" || addr = "_" then default_addr else addr and service = - match port, default_port with - | "", None -> invalid_arg "" - | "", Some default_port -> string_of_int default_port - | port, _ -> port in + match (port, default_port) with + | ("", None) -> + invalid_arg "" + | ("", Some default_port) -> + string_of_int default_port + | (port, _) -> + port + in Lwt_utils_unix.getaddrinfo ~passive ~node ~service let resolve_addrs ~default_addr ?default_port ?passive peers = - Lwt_list.fold_left_s begin fun a peer -> - resolve_addr ~default_addr ?default_port ?passive peer >>= fun points -> - Lwt.return (List.rev_append points a) - end [] peers + Lwt_list.fold_left_s + (fun a peer -> + resolve_addr ~default_addr ?default_port ?passive peer + >>= fun points -> Lwt.return (List.rev_append points a)) + [] + peers let resolve_discovery_addrs discovery_addr = resolve_addr @@ -649,16 +781,19 @@ let resolve_discovery_addrs discovery_addr = discovery_addr >>= fun addrs -> let rec to_ipv4 acc = function - | [] -> Lwt.return (List.rev acc) - | (ip, port) :: xs -> begin match Ipaddr.v4_of_v6 ip with - | Some v -> to_ipv4 ((v, port) :: acc) xs - | None -> - Format.eprintf - "Warning: failed to convert %S to an ipv4 address@." - (Ipaddr.V6.to_string ip) ; - to_ipv4 acc xs - end - in to_ipv4 [] addrs + | [] -> + Lwt.return (List.rev acc) + | (ip, port) :: xs -> ( + match Ipaddr.v4_of_v6 ip with + | Some v -> + to_ipv4 ((v, port) :: acc) xs + | None -> + Format.eprintf + "Warning: failed to convert %S to an ipv4 address@." + (Ipaddr.V6.to_string ip) ; + to_ipv4 acc xs ) + in + to_ipv4 [] addrs let resolve_listening_addrs listen_addr = resolve_addr @@ -675,131 +810,147 @@ let resolve_rpc_listening_addrs listen_addr = listen_addr let resolve_bootstrap_addrs peers = - resolve_addrs - ~default_addr:"::" - ~default_port:default_p2p_port - peers + resolve_addrs ~default_addr:"::" ~default_port:default_p2p_port peers let check_listening_addr config = match config.p2p.listen_addr with - | None -> Lwt.return_unit + | None -> + Lwt.return_unit | Some addr -> - Lwt.catch begin fun () -> - resolve_listening_addrs addr >>= function - | [] -> - Format.eprintf "Warning: failed to resolve %S\n@." addr ; - Lwt.return_unit - | _ :: _ -> - Lwt.return_unit - end begin function - | (Invalid_argument msg) -> - Format.eprintf "Warning: failed to parse %S:\ %s\n@." addr msg ; - Lwt.return_unit - | exn -> Lwt.fail exn - end + Lwt.catch + (fun () -> + resolve_listening_addrs addr + >>= function + | [] -> + Format.eprintf "Warning: failed to resolve %S\n@." addr ; + Lwt.return_unit + | _ :: _ -> + Lwt.return_unit) + (function + | Invalid_argument msg -> + Format.eprintf "Warning: failed to parse %S: %s\n@." addr msg ; + Lwt.return_unit + | exn -> + Lwt.fail exn) let check_discovery_addr config = match config.p2p.discovery_addr with - | None -> Lwt.return_unit + | None -> + Lwt.return_unit | Some addr -> - Lwt.catch begin fun () -> - resolve_discovery_addrs addr >>= function - | [] -> - Format.eprintf "Warning: failed to resolve %S\n@." addr ; - Lwt.return_unit - | _ :: _ -> - Lwt.return_unit - end begin function - | (Invalid_argument msg) -> - Format.eprintf "Warning: failed to parse %S:\ %s\n@." addr msg ; - Lwt.return_unit - | exn -> Lwt.fail exn - end + Lwt.catch + (fun () -> + resolve_discovery_addrs addr + >>= function + | [] -> + Format.eprintf "Warning: failed to resolve %S\n@." addr ; + Lwt.return_unit + | _ :: _ -> + Lwt.return_unit) + (function + | Invalid_argument msg -> + Format.eprintf "Warning: failed to parse %S: %s\n@." addr msg ; + Lwt.return_unit + | exn -> + Lwt.fail exn) let check_rpc_listening_addr config = match config.rpc.listen_addr with - | None -> Lwt.return_unit + | None -> + Lwt.return_unit | Some addr -> - Lwt.catch begin fun () -> - resolve_rpc_listening_addrs addr >>= function - | [] -> - Format.eprintf "Warning: failed to resolve %S\n@." addr ; - Lwt.return_unit - | _ :: _ -> - Lwt.return_unit - end begin function - | (Invalid_argument msg) -> - Format.eprintf "Warning: failed to parse %S:\ %s\n@." addr msg ; - Lwt.return_unit - | exn -> Lwt.fail exn - end + Lwt.catch + (fun () -> + resolve_rpc_listening_addrs addr + >>= function + | [] -> + Format.eprintf "Warning: failed to resolve %S\n@." addr ; + Lwt.return_unit + | _ :: _ -> + Lwt.return_unit) + (function + | Invalid_argument msg -> + Format.eprintf "Warning: failed to parse %S: %s\n@." addr msg ; + Lwt.return_unit + | exn -> + Lwt.fail exn) let check_bootstrap_peer addr = - Lwt.catch begin fun () -> - resolve_bootstrap_addrs [addr] >>= function - | [] -> - Format.eprintf "Warning: cannot resolve %S\n@." addr ; - Lwt.return_unit - | _ :: _ -> - Lwt.return_unit - end begin function - | (Invalid_argument msg) -> - Format.eprintf "Warning: failed to parse %S:\ %s\n@." addr msg ; - Lwt.return_unit - | exn -> Lwt.fail exn - end - + Lwt.catch + (fun () -> + resolve_bootstrap_addrs [addr] + >>= function + | [] -> + Format.eprintf "Warning: cannot resolve %S\n@." addr ; + Lwt.return_unit + | _ :: _ -> + Lwt.return_unit) + (function + | Invalid_argument msg -> + Format.eprintf "Warning: failed to parse %S: %s\n@." addr msg ; + Lwt.return_unit + | exn -> + Lwt.fail exn) let check_bootstrap_peers config = Lwt_list.iter_p check_bootstrap_peer config.p2p.bootstrap_peers - -let fail fmt = - Format.kasprintf (fun s -> prerr_endline s ; exit 1) fmt +let fail fmt = Format.kasprintf (fun s -> prerr_endline s ; exit 1) fmt let check_connections config = - if config.p2p.limits.min_connections > config.p2p.limits.expected_connections then - fail "Error: The minumum number of connections is greater than \ - the expected number of connections" + if config.p2p.limits.min_connections > config.p2p.limits.expected_connections + then + fail + "Error: The minumum number of connections is greater than the expected \ + number of connections" config.p2p.limits.min_connections config.p2p.limits.expected_connections ; - if config.p2p.limits.expected_connections > config.p2p.limits.max_connections then - fail "Error: The expected number of connections is greater than \ - the maximum number of connections" + if config.p2p.limits.expected_connections > config.p2p.limits.max_connections + then + fail + "Error: The expected number of connections is greater than the maximum \ + number of connections" config.p2p.limits.expected_connections config.p2p.limits.max_connections ; - begin - match config.p2p.limits.max_known_peer_ids with - | None -> () - | Some (max_known_peer_ids, target_known_peer_ids) -> - if target_known_peer_ids > max_known_peer_ids then - fail "Error: The target number of known peer ids is greater than \ - the maximum number of known peer ids." - target_known_peer_ids max_known_peer_ids ; - if config.p2p.limits.max_connections > target_known_peer_ids then - fail "Error: The target number of known peer ids is lower than \ - the maximum number of connections." - target_known_peer_ids max_known_peer_ids ; - end ; - begin - match config.p2p.limits.max_known_points with - | None -> () - | Some (max_known_points, target_known_points) -> - if target_known_points > max_known_points then - fail "Error: The target number of known points is greater than \ - the maximum number of known points." - target_known_points max_known_points ; - if config.p2p.limits.max_connections > target_known_points then - fail "Error: The target number of known points is lower than \ - the maximum number of connections." - target_known_points max_known_points ; - end - + ( match config.p2p.limits.max_known_peer_ids with + | None -> + () + | Some (max_known_peer_ids, target_known_peer_ids) -> + if target_known_peer_ids > max_known_peer_ids then + fail + "Error: The target number of known peer ids is greater than the \ + maximum number of known peer ids." + target_known_peer_ids + max_known_peer_ids ; + if config.p2p.limits.max_connections > target_known_peer_ids then + fail + "Error: The target number of known peer ids is lower than the \ + maximum number of connections." + target_known_peer_ids + max_known_peer_ids ) ; + match config.p2p.limits.max_known_points with + | None -> + () + | Some (max_known_points, target_known_points) -> + if target_known_points > max_known_points then + fail + "Error: The target number of known points is greater than the \ + maximum number of known points." + target_known_points + max_known_points ; + if config.p2p.limits.max_connections > target_known_points then + fail + "Error: The target number of known points is lower than the maximum \ + number of connections." + target_known_points + max_known_points let check config = - check_listening_addr config >>= fun () -> - check_rpc_listening_addr config >>= fun () -> - check_discovery_addr config >>= fun () -> - check_bootstrap_peers config >>= fun () -> - check_connections config ; - Lwt.return_unit + check_listening_addr config + >>= fun () -> + check_rpc_listening_addr config + >>= fun () -> + check_discovery_addr config + >>= fun () -> + check_bootstrap_peers config + >>= fun () -> check_connections config ; Lwt.return_unit diff --git a/src/bin_node/node_config_file.mli b/src/bin_node/node_config_file.mli index 9e181910205fe093b584630b6b2b6c95e897f7d6..8c258e17d781bc989cf723494bafc9c14bd23b84 100644 --- a/src/bin_node/node_config_file.mli +++ b/src/bin_node/node_config_file.mli @@ -27,60 +27,61 @@ [@@@ocaml.warning "-30"] type t = { - data_dir : string ; - p2p : p2p ; - rpc : rpc ; - log : Lwt_log_sink_unix.cfg ; - internal_events : Internal_event_unix.Configuration.t ; - shell : shell ; + data_dir : string; + p2p : p2p; + rpc : rpc; + log : Lwt_log_sink_unix.cfg; + internal_events : Internal_event_unix.Configuration.t; + shell : shell } and p2p = { - expected_pow : float ; - bootstrap_peers : string list ; - listen_addr : string option ; - discovery_addr : string option ; - private_mode : bool ; - limits : P2p.limits ; - disable_mempool : bool ; - disable_testchain : bool ; - greylisting_config : P2p_point_state.Info.greylisting_config ; + expected_pow : float; + bootstrap_peers : string list; + listen_addr : string option; + discovery_addr : string option; + private_mode : bool; + limits : P2p.limits; + disable_mempool : bool; + disable_testchain : bool; + greylisting_config : P2p_point_state.Info.greylisting_config } and rpc = { - listen_addr : string option ; - cors_origins : string list ; - cors_headers : string list ; - tls : tls option ; + listen_addr : string option; + cors_origins : string list; + cors_headers : string list; + tls : tls option } -and tls = { - cert : string ; - key : string ; -} +and tls = {cert : string; key : string} and shell = { - block_validator_limits : Node.block_validator_limits ; - prevalidator_limits : Node.prevalidator_limits ; - peer_validator_limits : Node.peer_validator_limits ; - chain_validator_limits : Node.chain_validator_limits ; - history_mode : History_mode.t option ; + block_validator_limits : Node.block_validator_limits; + prevalidator_limits : Node.prevalidator_limits; + peer_validator_limits : Node.peer_validator_limits; + chain_validator_limits : Node.chain_validator_limits; + history_mode : History_mode.t option } -val default_data_dir: string -val default_p2p_port: int -val default_rpc_port: int -val default_p2p: p2p -val default_config: t +val default_data_dir : string + +val default_p2p_port : int + +val default_rpc_port : int + +val default_p2p : p2p + +val default_config : t -val update: +val update : ?data_dir:string -> ?min_connections:int -> ?expected_connections:int -> ?max_connections:int -> ?max_download_speed:int -> ?max_upload_speed:int -> - ?binary_chunks_size:int-> + ?binary_chunks_size:int -> ?peer_table_size:int -> ?expected_pow:float -> ?bootstrap_peers:string list -> @@ -96,17 +97,23 @@ val update: ?log_output:Lwt_log_sink_unix.Output.t -> ?bootstrap_threshold:int -> ?history_mode:History_mode.t -> - t -> t tzresult Lwt.t + t -> + t tzresult Lwt.t + +val to_string : t -> string + +val read : string -> t tzresult Lwt.t + +val write : string -> t -> unit tzresult Lwt.t + +val resolve_listening_addrs : string -> (P2p_addr.t * int) list Lwt.t + +val resolve_discovery_addrs : string -> (Ipaddr.V4.t * int) list Lwt.t -val to_string: t -> string -val read: string -> t tzresult Lwt.t -val write: string -> t -> unit tzresult Lwt.t +val resolve_rpc_listening_addrs : string -> (P2p_addr.t * int) list Lwt.t -val resolve_listening_addrs: string -> (P2p_addr.t * int) list Lwt.t -val resolve_discovery_addrs: string -> (Ipaddr.V4.t * int) list Lwt.t -val resolve_rpc_listening_addrs: string -> (P2p_addr.t * int) list Lwt.t -val resolve_bootstrap_addrs: string list -> (P2p_addr.t * int) list Lwt.t +val resolve_bootstrap_addrs : string list -> (P2p_addr.t * int) list Lwt.t -val encoding: t Data_encoding.t +val encoding : t Data_encoding.t -val check: t -> unit Lwt.t +val check : t -> unit Lwt.t diff --git a/src/bin_node/node_data_version.ml b/src/bin_node/node_data_version.ml index 54390ff7c65d4fde1dd08b279589592ad6321349..74ea3c7cd21acce55a4035e774c0d51fcc12c346 100644 --- a/src/bin_node/node_data_version.ml +++ b/src/bin_node/node_data_version.ml @@ -23,7 +23,7 @@ (* *) (*****************************************************************************) -let (//) = Filename.concat +let ( // ) = Filename.concat type t = string @@ -34,12 +34,14 @@ let data_version = "0.0.3" much, an idea would be to have triples (version, version, converter), and to sequence them dynamically instead of statically. *) -let upgradable_data_version = [ -] +let upgradable_data_version = [] let store_dir data_dir = data_dir // "store" + let context_dir data_dir = data_dir // "context" + let protocol_dir data_dir = data_dir // "protocol" + let lock_file data_dir = data_dir // "lock" let default_identity_file_name = "identity.json" @@ -51,110 +53,117 @@ let version_file_name = "version.json" let pp ppf version = Format.pp_print_string ppf version type error += Invalid_data_dir_version of t * t + type error += Invalid_data_dir of string + type error += No_data_dir_version_file of string + type error += Could_not_read_data_dir_version of string -type error += Data_dir_needs_upgrade of { expected: t ; actual: t } + +type error += Data_dir_needs_upgrade of {expected : t; actual : t} let () = register_error_kind `Permanent - ~id: "invalidDataDir" - ~title: "Invalid data directory" - ~description: "The data directory cannot be accessed or created" + ~id:"invalidDataDir" + ~title:"Invalid data directory" + ~description:"The data directory cannot be accessed or created" ~pp:(fun ppf path -> - Format.fprintf ppf - "Invalid data directory '%s'." - path) + Format.fprintf ppf "Invalid data directory '%s'." path) Data_encoding.(obj1 (req "datadir_path" string)) - (function - | Invalid_data_dir path -> - Some path - | _ -> None) + (function Invalid_data_dir path -> Some path | _ -> None) (fun path -> Invalid_data_dir path) ; register_error_kind `Permanent - ~id: "invalidDataDirVersion" - ~title: "Invalid data directory version" - ~description: "The data directory version was not the one that was expected" + ~id:"invalidDataDirVersion" + ~title:"Invalid data directory version" + ~description:"The data directory version was not the one that was expected" ~pp:(fun ppf (exp, got) -> - Format.fprintf ppf - "Invalid data directory version '%s' (expected '%s')." - got exp) - Data_encoding.(obj2 - (req "expected_version" string) - (req "actual_version" string)) + Format.fprintf + ppf + "Invalid data directory version '%s' (expected '%s')." + got + exp) + Data_encoding.( + obj2 (req "expected_version" string) (req "actual_version" string)) (function | Invalid_data_dir_version (expected, actual) -> Some (expected, actual) - | _ -> None) + | _ -> + None) (fun (expected, actual) -> Invalid_data_dir_version (expected, actual)) ; register_error_kind `Permanent - ~id: "couldNotReadDataDirVersion" - ~title: "Could not read data directory version file" - ~description: "Data directory version file was invalid." + ~id:"couldNotReadDataDirVersion" + ~title:"Could not read data directory version file" + ~description:"Data directory version file was invalid." Data_encoding.(obj1 (req "version_path" string)) ~pp:(fun ppf path -> - Format.fprintf ppf - "Tried to read version file at '%s', \ - \ but the file could not be parsed." - path) + Format.fprintf + ppf + "Tried to read version file at '%s', but the file could not be parsed." + path) (function Could_not_read_data_dir_version path -> Some path | _ -> None) - (fun path -> Could_not_read_data_dir_version path); + (fun path -> Could_not_read_data_dir_version path) ; register_error_kind `Permanent - ~id: "noDataDirVersionFile" - ~title: "Data directory version file does not exist" - ~description: "Data directory version file does not exist" + ~id:"noDataDirVersionFile" + ~title:"Data directory version file does not exist" + ~description:"Data directory version file does not exist" Data_encoding.(obj1 (req "version_path" string)) ~pp:(fun ppf path -> - Format.fprintf ppf - "Expected to find data directory version file at '%s', \ - \ but the file does not exist." - path) + Format.fprintf + ppf + "Expected to find data directory version file at '%s', but the file \ + does not exist." + path) (function No_data_dir_version_file path -> Some path | _ -> None) (fun path -> No_data_dir_version_file path) ; register_error_kind `Permanent - ~id: "dataDirNeedsUpgrade" - ~title: "The data directory needs to be upgraded" - ~description: "The data directory needs to be upgraded" + ~id:"dataDirNeedsUpgrade" + ~title:"The data directory needs to be upgraded" + ~description:"The data directory needs to be upgraded" ~pp:(fun ppf (exp, got) -> - Format.fprintf ppf - "The data directory version is too old.@,\ - Found '%s', expected '%s'.@,\ - It needs to be upgraded with `tezos-node upgrade_storage`." - got exp) - Data_encoding.(obj2 - (req "expected_version" string) - (req "actual_version" string)) + Format.fprintf + ppf + "The data directory version is too old.@,\ + Found '%s', expected '%s'.@,\ + It needs to be upgraded with `tezos-node upgrade_storage`." + got + exp) + Data_encoding.( + obj2 (req "expected_version" string) (req "actual_version" string)) (function - | Data_dir_needs_upgrade { expected ; actual } -> + | Data_dir_needs_upgrade {expected; actual} -> Some (expected, actual) - | _ -> None) - (fun (expected, actual) -> Data_dir_needs_upgrade { expected ; actual }) + | _ -> + None) + (fun (expected, actual) -> Data_dir_needs_upgrade {expected; actual}) -let version_file data_dir = - (Filename.concat data_dir version_file_name) +let version_file data_dir = Filename.concat data_dir version_file_name let check_data_dir_version data_dir = let version_file = version_file data_dir in - fail_unless (Sys.file_exists version_file) - (No_data_dir_version_file version_file) >>=? fun () -> + fail_unless + (Sys.file_exists version_file) + (No_data_dir_version_file version_file) + >>=? fun () -> Lwt_utils_unix.Json.read_file version_file - |> trace (Could_not_read_data_dir_version version_file) >>=? fun json -> - begin - try return (Data_encoding.Json.destruct version_encoding json) - with _ -> fail (Could_not_read_data_dir_version version_file) - end >>=? fun version -> - if String.equal version data_version then - return_none + |> trace (Could_not_read_data_dir_version version_file) + >>=? fun json -> + ( try return (Data_encoding.Json.destruct version_encoding json) + with _ -> fail (Could_not_read_data_dir_version version_file) ) + >>=? fun version -> + if String.equal version data_version then return_none else match - List.find_opt (fun (v, _) -> String.equal v version) upgradable_data_version + List.find_opt + (fun (v, _) -> String.equal v version) + upgradable_data_version with - | Some f -> return_some f + | Some f -> + return_some f | None -> fail (Invalid_data_dir_version (data_version, version)) @@ -164,45 +173,52 @@ let write_version data_dir = (Data_encoding.Json.construct version_encoding data_version) let ensure_data_dir bare data_dir = - let write_version () = - write_version data_dir >>=? fun () -> return_none in + let write_version () = write_version data_dir >>=? fun () -> return_none in try if Sys.file_exists data_dir then match Sys.readdir data_dir with - | [||] -> write_version () - | [| single |] when single = default_identity_file_name -> + | [||] -> + write_version () + | [|single|] when single = default_identity_file_name -> write_version () - | [| file_a ; file_b |] when bare && - (file_a = version_file_name && - file_b = default_identity_file_name) || - (file_b = version_file_name && - file_a = default_identity_file_name) -> + | [|file_a; file_b|] + when bare && file_a = version_file_name + && file_b = default_identity_file_name + || file_b = version_file_name + && file_a = default_identity_file_name -> write_version () | files when bare -> let files = List.filter (fun e -> e <> default_identity_file_name) - (Array.to_list files) in + (Array.to_list files) + in let to_delete = - Format.asprintf "@[<v>%a@]" - (Format.pp_print_list ~pp_sep:Format.pp_print_cut Format.pp_print_string) files in - fail (Invalid_data_dir - (Format.asprintf - "Please provide a clean directory (only %s is allowed) by deleting :@ %s" - default_identity_file_name - to_delete)) - | _ -> check_data_dir_version data_dir - else begin - Lwt_utils_unix.create_dir ~perm:0o700 data_dir >>= fun () -> - write_version () - end - with - | Sys_error _ | Unix.Unix_error _ -> - fail (Invalid_data_dir data_dir) + Format.asprintf + "@[<v>%a@]" + (Format.pp_print_list + ~pp_sep:Format.pp_print_cut + Format.pp_print_string) + files + in + fail + (Invalid_data_dir + (Format.asprintf + "Please provide a clean directory (only %s is allowed) by \ + deleting :@ %s" + default_identity_file_name + to_delete)) + | _ -> + check_data_dir_version data_dir + else + Lwt_utils_unix.create_dir ~perm:0o700 data_dir + >>= fun () -> write_version () + with Sys_error _ | Unix.Unix_error _ -> fail (Invalid_data_dir data_dir) let ensure_data_dir ?(bare = false) data_dir = - ensure_data_dir bare data_dir >>=? function - | None -> return_unit + ensure_data_dir bare data_dir + >>=? function + | None -> + return_unit | Some (version, _) -> - fail (Data_dir_needs_upgrade { expected = data_version ; - actual = version }) + fail (Data_dir_needs_upgrade {expected = data_version; actual = version}) diff --git a/src/bin_node/node_data_version.mli b/src/bin_node/node_data_version.mli index a6ae82bc6e27317eec5cf9529008648487747cc2..20d4b7a9f3915b01e7c15179dbc013e5474a9df5 100644 --- a/src/bin_node/node_data_version.mli +++ b/src/bin_node/node_data_version.mli @@ -26,9 +26,11 @@ type t type error += Invalid_data_dir_version of t * t + type error += Could_not_read_data_dir_version of string val data_version : t + val default_identity_file_name : string val pp : Format.formatter -> t -> unit @@ -37,7 +39,10 @@ val version_encoding : t Data_encoding.encoding val ensure_data_dir : ?bare:bool -> string -> unit tzresult Lwt.t -val store_dir: string -> string -val context_dir: string -> string -val protocol_dir: string -> string -val lock_file: string -> string +val store_dir : string -> string + +val context_dir : string -> string + +val protocol_dir : string -> string + +val lock_file : string -> string diff --git a/src/bin_node/node_identity_command.ml b/src/bin_node/node_identity_command.ml index 985bde913423b9783e665e8f10e0ef78d27bb917..b04a418c3705cfa4abc2dc3a4d66a5dcb6eb3d1f 100644 --- a/src/bin_node/node_identity_command.ml +++ b/src/bin_node/node_identity_command.ml @@ -23,149 +23,165 @@ (* *) (*****************************************************************************) -let (//) = Filename.concat +let ( // ) = Filename.concat (** Commands *) -let identity_file data_dir = data_dir // Node_data_version.default_identity_file_name +let identity_file data_dir = + data_dir // Node_data_version.default_identity_file_name -let show { Node_config_file.data_dir ; _ } = - Node_identity_file.read (identity_file data_dir) >>=? fun id -> +let show {Node_config_file.data_dir; _} = + Node_identity_file.read (identity_file data_dir) + >>=? fun id -> Format.printf "Peer_id: %a.@." P2p_peer.Id.pp id.peer_id ; return_unit -let generate { Node_config_file.data_dir ; p2p ; _ } = +let generate {Node_config_file.data_dir; p2p; _} = let identity_file = identity_file data_dir in if Sys.file_exists identity_file then fail (Node_identity_file.Existent_identity_file identity_file) else let target = Crypto_box.make_target p2p.expected_pow in - Format.eprintf "Generating a new identity... (level: %.2f) " p2p.expected_pow ; + Format.eprintf + "Generating a new identity... (level: %.2f) " + p2p.expected_pow ; let id = - P2p_identity.generate_with_animation Format.err_formatter target in - Node_identity_file.write identity_file id >>=? fun () -> + P2p_identity.generate_with_animation Format.err_formatter target + in + Node_identity_file.write identity_file id + >>=? fun () -> Format.eprintf "Stored the new identity (%a) into '%s'.@." - P2p_peer.Id.pp id.peer_id identity_file ; + P2p_peer.Id.pp + id.peer_id + identity_file ; return_unit -let check { Node_config_file.data_dir ; p2p = { expected_pow ; _ } ; _ } = - Node_identity_file.read - ~expected_pow (identity_file data_dir) >>=? fun id -> +let check {Node_config_file.data_dir; p2p = {expected_pow; _}; _} = + Node_identity_file.read ~expected_pow (identity_file data_dir) + >>=? fun id -> Format.printf "Peer_id: %a. Proof of work is higher than %.2f.@." - P2p_peer.Id.pp id.peer_id expected_pow ; + P2p_peer.Id.pp + id.peer_id + expected_pow ; return_unit (** Main *) module Term = struct - type subcommand = Show | Generate | Check let process subcommand data_dir config_file expected_pow = let res = - begin - match data_dir, config_file with - | None, None -> - let default_config = - Node_config_file.default_data_dir // "config.json" in - if Sys.file_exists default_config then - Node_config_file.read default_config - else - return Node_config_file.default_config - | None, Some config_file -> - Node_config_file.read config_file - | Some data_dir, None -> - Node_config_file.read (data_dir // "config.json") >>=? fun cfg -> - return { cfg with data_dir } - | Some data_dir, Some config_file -> - Node_config_file.read config_file >>=? fun cfg -> - return { cfg with data_dir } - end >>=? fun cfg -> - Node_config_file.update ?expected_pow cfg >>=? fun cfg -> + ( match (data_dir, config_file) with + | (None, None) -> + let default_config = + Node_config_file.default_data_dir // "config.json" + in + if Sys.file_exists default_config then + Node_config_file.read default_config + else return Node_config_file.default_config + | (None, Some config_file) -> + Node_config_file.read config_file + | (Some data_dir, None) -> + Node_config_file.read (data_dir // "config.json") + >>=? fun cfg -> return {cfg with data_dir} + | (Some data_dir, Some config_file) -> + Node_config_file.read config_file + >>=? fun cfg -> return {cfg with data_dir} ) + >>=? fun cfg -> + Node_config_file.update ?expected_pow cfg + >>=? fun cfg -> match subcommand with - | Show -> show cfg - | Generate -> generate cfg - | Check -> check cfg in + | Show -> + show cfg + | Generate -> + generate cfg + | Check -> + check cfg + in match Lwt_main.run res with - | Ok () -> `Ok () - | Error err -> `Error (false, Format.asprintf "%a" pp_print_error err) + | Ok () -> + `Ok () + | Error err -> + `Error (false, Format.asprintf "%a" pp_print_error err) let subcommand_arg = let parser = function - | "show" -> `Ok Show - | "generate" -> `Ok Generate - | "check" -> `Ok Check - | s -> `Error ("invalid argument: " ^ s) + | "show" -> + `Ok Show + | "generate" -> + `Ok Generate + | "check" -> + `Ok Check + | s -> + `Error ("invalid argument: " ^ s) and printer fmt = function - | Show -> Format.fprintf fmt "show" - | Generate -> Format.fprintf fmt "generate" - | Check -> Format.fprintf fmt "check" in + | Show -> + Format.fprintf fmt "show" + | Generate -> + Format.fprintf fmt "generate" + | Check -> + Format.fprintf fmt "check" + in let doc = - "Operation to perform. \ - Possible values: $(b,show), $(b,generate), $(b,check)." in + "Operation to perform. Possible values: $(b,show), $(b,generate), \ + $(b,check)." + in let open Cmdliner.Arg in value & pos 0 (parser, printer) Show & info [] ~docv:"OPERATION" ~doc let expected_pow = let open Cmdliner in let doc = - "Expected amount of proof-of-work for the node identity. \ - The optional parameter should be a float between 0 and 256, where - 0 disables the proof-of-work mechanism." in + "Expected amount of proof-of-work for the node identity. The optional \ + parameter should be a float between 0 and 256, where\n\ + \ 0 disables the proof-of-work mechanism." + in Arg.(value & pos 1 (some float) None & info [] ~docv:"DIFFICULTY" ~doc) let term = - Cmdliner.Term.(ret (const process - $ subcommand_arg - $ Node_shared_arg.Term.data_dir - $ Node_shared_arg.Term.config_file - $ expected_pow)) + Cmdliner.Term.( + ret + ( const process $ subcommand_arg $ Node_shared_arg.Term.data_dir + $ Node_shared_arg.Term.config_file $ expected_pow )) end module Manpage = struct - let command_description = - "The $(b,identity) command is meant to create and manage node \ - identities. An $(i,identity) uniquely identifies a peer on the \ - network and consists of a cryptographic key pair as well as a \ - proof-of-work stamp that certifies \ - that enough CPU time has been dedicated to produce the identity, \ - to avoid sybil attacks. An identity with enough proof-of-work is \ - required to participate in the Tezos network, therefore this command \ + "The $(b,identity) command is meant to create and manage node identities. \ + An $(i,identity) uniquely identifies a peer on the network and consists \ + of a cryptographic key pair as well as a proof-of-work stamp that \ + certifies that enough CPU time has been dedicated to produce the \ + identity, to avoid sybil attacks. An identity with enough proof-of-work \ + is required to participate in the Tezos network, therefore this command \ is necessary to launch Tezos the first time." - let description = [ - `S "DESCRIPTION" ; - `P (command_description ^ " Several options are possible:"); - `P "$(b,show) reads, parses and displays the current identity of the node. \ - Use this command to see what identity will be used by Tezos. \ - This is the default operation." ; - `P "$(b,generate [difficulty]) generates an identity whose \ - proof of work stamp difficulty is at least equal to $(i,difficulty). \ - The value provided must be a floating point number between 0 and 256. \ - It roughly reflects the numbers of expected leading zeroes in the hash \ - of the identity data-structure. \ - Therefore, a value of 0 means no proof-of-work, and the difficulty \ - doubles for each increment of 1 in the difficulty value." ; - `P "$(b,check [difficulty]) checks that an identity is valid and that its \ - proof of work stamp difficulty is at least equal to $(i,difficulty)." ; - ] - - let man = - description @ - (* [ `S misc_docs ] @ *) - Node_shared_arg.Manpage.bugs - - let info = - Cmdliner.Term.info - ~doc: "Manage node identities" - ~man - "identity" - + let description = + [ `S "DESCRIPTION"; + `P (command_description ^ " Several options are possible:"); + `P + "$(b,show) reads, parses and displays the current identity of the \ + node. Use this command to see what identity will be used by Tezos. \ + This is the default operation."; + `P + "$(b,generate [difficulty]) generates an identity whose proof of work \ + stamp difficulty is at least equal to $(i,difficulty). The value \ + provided must be a floating point number between 0 and 256. It \ + roughly reflects the numbers of expected leading zeroes in the hash \ + of the identity data-structure. Therefore, a value of 0 means no \ + proof-of-work, and the difficulty doubles for each increment of 1 in \ + the difficulty value."; + `P + "$(b,check [difficulty]) checks that an identity is valid and that \ + its proof of work stamp difficulty is at least equal to \ + $(i,difficulty)." ] + + let man = description @ (* [ `S misc_docs ] @ *) + Node_shared_arg.Manpage.bugs + + let info = Cmdliner.Term.info ~doc:"Manage node identities" ~man "identity" end -let cmd = - Term.term, Manpage.info - +let cmd = (Term.term, Manpage.info) diff --git a/src/bin_node/node_identity_command.mli b/src/bin_node/node_identity_command.mli index ebbeb09375b970c82d4b6542afa82e68bc2f175c..18d1c802fa9a67aa11701734768b6837b7b97c23 100644 --- a/src/bin_node/node_identity_command.mli +++ b/src/bin_node/node_identity_command.mli @@ -23,8 +23,8 @@ (* *) (*****************************************************************************) -val cmd: unit Cmdliner.Term.t * Cmdliner.Term.info +val cmd : unit Cmdliner.Term.t * Cmdliner.Term.info module Manpage : sig - val command_description: string + val command_description : string end diff --git a/src/bin_node/node_identity_file.ml b/src/bin_node/node_identity_file.ml index 17c1aab7a8b76e6e09272922394078d35a8984cc..a3570156da70504a5aa61d356ec8723d2c7aae94 100644 --- a/src/bin_node/node_identity_file.ml +++ b/src/bin_node/node_identity_file.ml @@ -24,15 +24,18 @@ (*****************************************************************************) type error += No_identity_file of string -type error += Insufficient_proof_of_work of { expected: float } -type error += Identity_mismatch of { - filename: string ; - peer_id: Crypto_box.Public_key_hash.t ; - } -type error += Identity_keys_mismatch of { - filename: string ; - expected_key:Crypto_box.public_key ; - } + +type error += Insufficient_proof_of_work of {expected : float} + +type error += + | Identity_mismatch of + { filename : string; + peer_id : Crypto_box.Public_key_hash.t } + +type error += + | Identity_keys_mismatch of + { filename : string; + expected_key : Crypto_box.public_key } let () = register_error_kind @@ -41,10 +44,12 @@ let () = ~title:"No identity file" ~description:"The node identity file cannot be found" ~pp:(fun ppf file -> - Format.fprintf ppf - "Cannot read the identity file: `%s`. \ - See `%s identity --help` on how to generate an identity." - file Sys.argv.(0)) + Format.fprintf + ppf + "Cannot read the identity file: `%s`. See `%s identity --help` on how \ + to generate an identity." + file + Sys.argv.(0)) Data_encoding.(obj1 (req "file" string)) (function No_identity_file file -> Some file | _ -> None) (fun file -> No_identity_file file) @@ -54,83 +59,108 @@ let () = `Permanent ~id:"main.identity.insufficient_proof_of_work" ~title:"Insufficient proof of work" - ~description:"The proof of work embeded by the current identity is not sufficient" + ~description: + "The proof of work embeded by the current identity is not sufficient" ~pp:(fun ppf expected -> - Format.fprintf ppf - "The current identity does not embed a sufficient stamp of proof-of-work. \ - (expected level: %.2f). \ - See `%s identity --help` on how to generate a new identity." - expected Sys.argv.(0)) + Format.fprintf + ppf + "The current identity does not embed a sufficient stamp of \ + proof-of-work. (expected level: %.2f). See `%s identity --help` on \ + how to generate a new identity." + expected + Sys.argv.(0)) Data_encoding.(obj1 (req "expected" float)) - (function Insufficient_proof_of_work { expected } -> Some expected | _ -> None) - (fun expected -> Insufficient_proof_of_work { expected }) - + (function + | Insufficient_proof_of_work {expected} -> Some expected | _ -> None) + (fun expected -> Insufficient_proof_of_work {expected}) let () = register_error_kind `Permanent ~id:"main.identity.identity_mismatch" ~title:"Identity mismatch" - ~description:"The identity (public key hash) does not match the keys provided with it" + ~description: + "The identity (public key hash) does not match the keys provided with it" ~pp:(fun ppf (file, public_key_hash) -> - Format.fprintf ppf - "The current identity (public key hash) does not match the keys in %s. - Expected identity %a." - file - Crypto_box.Public_key_hash.pp - public_key_hash) - Data_encoding.(obj2 (req "file" string) (req "public_key_hash" Crypto_box.Public_key_hash.encoding)) - (function Identity_mismatch { filename ; peer_id } -> - Some (filename, peer_id) | _ -> None) - (fun (filename,peer_id) -> Identity_mismatch { filename ; peer_id }) + Format.fprintf + ppf + "The current identity (public key hash) does not match the keys in %s.\n\ + \ Expected identity %a." + file + Crypto_box.Public_key_hash.pp + public_key_hash) + Data_encoding.( + obj2 + (req "file" string) + (req "public_key_hash" Crypto_box.Public_key_hash.encoding)) + (function + | Identity_mismatch {filename; peer_id} -> + Some (filename, peer_id) + | _ -> + None) + (fun (filename, peer_id) -> Identity_mismatch {filename; peer_id}) let () = register_error_kind `Permanent ~id:"main.identity.identity_keys_mismatch" ~title:"Identity keys mismatch" - ~description:"The current identity file has non-matching keys (secret key/ public key pair is not valid)" + ~description: + "The current identity file has non-matching keys (secret key/ public \ + key pair is not valid)" ~pp:(fun ppf (file, public_key) -> - Format.fprintf ppf - "The current identity file %s has non-matching keys (secret key/ public key pair is not valid). - Expected public key %a." - file - Crypto_box.pp_pk - public_key) - Data_encoding.(obj2 (req "file" string) (req "public_key" Crypto_box.public_key_encoding)) + Format.fprintf + ppf + "The current identity file %s has non-matching keys (secret key/ \ + public key pair is not valid).\n\ + \ Expected public key %a." + file + Crypto_box.pp_pk + public_key) + Data_encoding.( + obj2 + (req "file" string) + (req "public_key" Crypto_box.public_key_encoding)) (function - | Identity_keys_mismatch { filename ; expected_key } -> + | Identity_keys_mismatch {filename; expected_key} -> Some (filename, expected_key) - | _ -> None) + | _ -> + None) (fun (filename, expected_key) -> - Identity_keys_mismatch { filename ; expected_key }) + Identity_keys_mismatch {filename; expected_key}) let read ?expected_pow filename = - Lwt_unix.file_exists filename >>= function + Lwt_unix.file_exists filename + >>= function | false -> fail (No_identity_file filename) - | true -> - Lwt_utils_unix.Json.read_file filename >>=? fun json -> + | true -> ( + Lwt_utils_unix.Json.read_file filename + >>=? fun json -> let id = Data_encoding.Json.destruct P2p_identity.encoding json in let pkh = Crypto_box.hash id.public_key in (* check public_key hash *) if not (Crypto_box.Public_key_hash.equal pkh id.peer_id) then - fail (Identity_mismatch { filename ; peer_id = pkh }) + fail (Identity_mismatch {filename; peer_id = pkh}) (* check public/private keys correspondance *) - else if not Crypto_box.(equal (neuterize id.secret_key) id.public_key) then - fail (Identity_keys_mismatch { filename ; expected_key = id.public_key }) - else (* check PoW level *) + else if not Crypto_box.(equal (neuterize id.secret_key) id.public_key) + then + fail (Identity_keys_mismatch {filename; expected_key = id.public_key}) + else + (* check PoW level *) match expected_pow with - | None -> return id + | None -> + return id | Some expected -> let target = Crypto_box.make_target expected in if - not (Crypto_box.check_proof_of_work - id.public_key id.proof_of_work_stamp target) - then - fail (Insufficient_proof_of_work { expected }) - else - return id + not + (Crypto_box.check_proof_of_work + id.public_key + id.proof_of_work_stamp + target) + then fail (Insufficient_proof_of_work {expected}) + else return id ) type error += Existent_identity_file of string @@ -141,18 +171,21 @@ let () = ~title:"Cannot overwrite identity file" ~description:"Cannot implicitely overwrite the current identity file" ~pp:(fun ppf file -> - Format.fprintf ppf - "Cannot implicitely overwrite the current identity file: '%s'. \ - See `%s identity --help` on how to generate a new identity." - file Sys.argv.(0)) + Format.fprintf + ppf + "Cannot implicitely overwrite the current identity file: '%s'. See \ + `%s identity --help` on how to generate a new identity." + file + Sys.argv.(0)) Data_encoding.(obj1 (req "file" string)) (function Existent_identity_file file -> Some file | _ -> None) (fun file -> Existent_identity_file file) let write file identity = - if Sys.file_exists file then - fail (Existent_identity_file file) + if Sys.file_exists file then fail (Existent_identity_file file) else - Node_data_version.ensure_data_dir (Filename.dirname file) >>=? fun () -> - Lwt_utils_unix.Json.write_file file + Node_data_version.ensure_data_dir (Filename.dirname file) + >>=? fun () -> + Lwt_utils_unix.Json.write_file + file (Data_encoding.Json.construct P2p_identity.encoding identity) diff --git a/src/bin_node/node_identity_file.mli b/src/bin_node/node_identity_file.mli index 51ca2548d4a357a9069b69f97ebf9c4ef3c24d4c..5d1bdbde7d40373e155232a347dc02330463e797 100644 --- a/src/bin_node/node_identity_file.mli +++ b/src/bin_node/node_identity_file.mli @@ -24,12 +24,11 @@ (*****************************************************************************) type error += No_identity_file of string -type error += Insufficient_proof_of_work of { expected: float } -val read: - ?expected_pow:float -> - string -> P2p_identity.t tzresult Lwt.t +type error += Insufficient_proof_of_work of {expected : float} + +val read : ?expected_pow:float -> string -> P2p_identity.t tzresult Lwt.t type error += Existent_identity_file of string -val write: string -> P2p_identity.t -> unit tzresult Lwt.t +val write : string -> P2p_identity.t -> unit tzresult Lwt.t diff --git a/src/bin_node/node_logging.ml b/src/bin_node/node_logging.ml index ec5fadcc58dd7a9080a0b755161ba31bc890ca01..ac2e0057fa0d09c77cef77bef977c16d469f2100 100644 --- a/src/bin_node/node_logging.ml +++ b/src/bin_node/node_logging.ml @@ -23,6 +23,6 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make(struct - let name = "node.main" - end) +include Internal_event.Legacy_logging.Make (struct + let name = "node.main" +end) diff --git a/src/bin_node/node_run_command.ml b/src/bin_node/node_run_command.ml index 353e69bd844ce03238bec42e080f59480f976515..8229bce9286cfe365d2497d5c6faac38d090e2f0 100644 --- a/src/bin_node/node_run_command.ml +++ b/src/bin_node/node_run_command.ml @@ -28,6 +28,7 @@ open Node_logging open Genesis_chain type error += Non_private_sandbox of P2p_addr.t + type error += RPC_Port_already_in_use of P2p_point.Id.t list let () = @@ -36,120 +37,121 @@ let () = ~id:"main.run.non_private_sandbox" ~title:"Forbidden public sandbox" ~description:"A sandboxed node should not listen on a public address." - ~pp:begin fun ppf addr -> - Format.fprintf ppf - "The node is configured to listen on a public address (%a), \ - while only 'private' networks are authorised with `--sandbox`. - See `%s run --help` on how to change the listening address." - Ipaddr.V6.pp addr Sys.argv.(0) - end + ~pp:(fun ppf addr -> + Format.fprintf + ppf + "The node is configured to listen on a public address (%a), while \ + only 'private' networks are authorised with `--sandbox`.\n\ + \ See `%s run --help` on how to change the listening address." + Ipaddr.V6.pp + addr + Sys.argv.(0)) Data_encoding.(obj1 (req "addr" P2p_addr.encoding)) (function Non_private_sandbox addr -> Some addr | _ -> None) - (fun addr -> Non_private_sandbox addr); + (fun addr -> Non_private_sandbox addr) ; register_error_kind `Permanent ~id:"main.run.port_already_in_use" ~title:"Cannot start node: RPC port already in use" ~description:"Another tezos node is probably running on the same RPC port." - ~pp:begin fun ppf addrlist -> - Format.fprintf ppf - "Another tezos node is probably running on one of these addresses (%a). \ - Please choose another RPC port." - (Format.pp_print_list P2p_point.Id.pp) addrlist - end + ~pp:(fun ppf addrlist -> + Format.fprintf + ppf + "Another tezos node is probably running on one of these addresses \ + (%a). Please choose another RPC port." + (Format.pp_print_list P2p_point.Id.pp) + addrlist) Data_encoding.(obj1 (req "addrlist" (list P2p_point.Id.encoding))) - (function | RPC_Port_already_in_use addrlist -> Some addrlist | _ -> None) + (function RPC_Port_already_in_use addrlist -> Some addrlist | _ -> None) (fun addrlist -> RPC_Port_already_in_use addrlist) -let (//) = Filename.concat +let ( // ) = Filename.concat let init_node ?sandbox ?checkpoint (config : Node_config_file.t) = - begin - match sandbox with - | None -> Lwt.return_none - | Some sandbox_param -> - match sandbox_param with - | None -> Lwt.return_none - | Some file -> - Lwt_utils_unix.Json.read_file file >>= function - | Error err -> - lwt_warn - "Cannot parse sandbox parameters: %s" file >>= fun () -> - lwt_debug "%a" pp_print_error err >>= fun () -> - Lwt.return_none - | Ok json -> - Lwt.return_some json - end >>= fun sandbox_param -> - (* TODO "WARN" when pow is below our expectation. *) - begin - match config.p2p.discovery_addr with - | None -> - lwt_log_notice "No local peer discovery." >>= fun () -> - return (None, None) - | Some addr -> - Node_config_file.resolve_discovery_addrs addr >>= function - | [] -> - failwith "Cannot resolve P2P discovery address: %S" addr - | (addr, port) :: _ -> - return (Some addr, Some port) - end >>=? fun (discovery_addr, discovery_port) -> - begin - match config.p2p.listen_addr with + ( match sandbox with + | None -> + Lwt.return_none + | Some sandbox_param -> ( + match sandbox_param with | None -> - lwt_log_notice "Not listening to P2P calls." >>= fun () -> - return (None, None) - | Some addr -> - Node_config_file.resolve_listening_addrs addr >>= function - | [] -> - failwith "Cannot resolve P2P listening address: %S" addr - | (addr, port) :: _ -> return (Some addr, Some port) - end >>=? fun (listening_addr, listening_port) -> - begin - match listening_addr, sandbox with - | Some addr, Some _ - when Ipaddr.V6.(compare addr unspecified) = 0 -> - return_none - | Some addr, Some _ when not (Ipaddr.V6.is_private addr) -> - fail (Non_private_sandbox addr) - | None, Some _ -> return_none - | _ -> - (Node_config_file.resolve_bootstrap_addrs - config.p2p.bootstrap_peers) >>= fun trusted_points -> - Node_identity_file.read - (config.data_dir // - Node_data_version.default_identity_file_name) >>=? fun identity -> - lwt_log_notice - "Peer's global id: %a" - P2p_peer.Id.pp identity.peer_id >>= fun () -> - let p2p_config : P2p.config = - { listening_addr ; - listening_port ; - discovery_addr ; - discovery_port ; - trusted_points ; - peers_file = - (config.data_dir // "peers.json") ; - private_mode = config.p2p.private_mode ; - greylisting_config = config.p2p.greylisting_config ; - identity ; - proof_of_work_target = - Crypto_box.make_target config.p2p.expected_pow ; - disable_mempool = config.p2p.disable_mempool ; - trust_discovered_peers = (sandbox_param <> None) ; - disable_testchain = config.p2p.disable_testchain ; - } - in - return_some (p2p_config, config.p2p.limits) - end >>=? fun p2p_config -> - let node_config : Node.config = { - genesis ; - patch_context = Some (Patch_context.patch_context sandbox_param) ; - store_root = Node_data_version.store_dir config.data_dir ; - context_root = Node_data_version.context_dir config.data_dir ; - p2p = p2p_config ; - test_chain_max_tll = Some (48 * 3600) ; (* 2 days *) - checkpoint ; - } in + Lwt.return_none + | Some file -> ( + Lwt_utils_unix.Json.read_file file + >>= function + | Error err -> + lwt_warn "Cannot parse sandbox parameters: %s" file + >>= fun () -> + lwt_debug "%a" pp_print_error err >>= fun () -> Lwt.return_none + | Ok json -> + Lwt.return_some json ) ) ) + >>= fun sandbox_param -> + (* TODO "WARN" when pow is below our expectation. *) + ( match config.p2p.discovery_addr with + | None -> + lwt_log_notice "No local peer discovery." + >>= fun () -> return (None, None) + | Some addr -> ( + Node_config_file.resolve_discovery_addrs addr + >>= function + | [] -> + failwith "Cannot resolve P2P discovery address: %S" addr + | (addr, port) :: _ -> + return (Some addr, Some port) ) ) + >>=? fun (discovery_addr, discovery_port) -> + ( match config.p2p.listen_addr with + | None -> + lwt_log_notice "Not listening to P2P calls." + >>= fun () -> return (None, None) + | Some addr -> ( + Node_config_file.resolve_listening_addrs addr + >>= function + | [] -> + failwith "Cannot resolve P2P listening address: %S" addr + | (addr, port) :: _ -> + return (Some addr, Some port) ) ) + >>=? fun (listening_addr, listening_port) -> + ( match (listening_addr, sandbox) with + | (Some addr, Some _) when Ipaddr.V6.(compare addr unspecified) = 0 -> + return_none + | (Some addr, Some _) when not (Ipaddr.V6.is_private addr) -> + fail (Non_private_sandbox addr) + | (None, Some _) -> + return_none + | _ -> + Node_config_file.resolve_bootstrap_addrs config.p2p.bootstrap_peers + >>= fun trusted_points -> + Node_identity_file.read + (config.data_dir // Node_data_version.default_identity_file_name) + >>=? fun identity -> + lwt_log_notice "Peer's global id: %a" P2p_peer.Id.pp identity.peer_id + >>= fun () -> + let p2p_config : P2p.config = + { listening_addr; + listening_port; + discovery_addr; + discovery_port; + trusted_points; + peers_file = config.data_dir // "peers.json"; + private_mode = config.p2p.private_mode; + greylisting_config = config.p2p.greylisting_config; + identity; + proof_of_work_target = Crypto_box.make_target config.p2p.expected_pow; + disable_mempool = config.p2p.disable_mempool; + trust_discovered_peers = sandbox_param <> None; + disable_testchain = config.p2p.disable_testchain } + in + return_some (p2p_config, config.p2p.limits) ) + >>=? fun p2p_config -> + let node_config : Node.config = + { genesis; + patch_context = Some (Patch_context.patch_context sandbox_param); + store_root = Node_data_version.store_dir config.data_dir; + context_root = Node_data_version.context_dir config.data_dir; + p2p = p2p_config; + test_chain_max_tll = Some (48 * 3600); + (* 2 days *) + checkpoint } + in Node.create ~sandboxed:(sandbox <> None) node_config @@ -161,242 +163,278 @@ let init_node ?sandbox ?checkpoint (config : Node_config_file.t) = (* Add default accepted CORS headers *) let sanitize_cors_headers ~default headers = - List.map String.lowercase_ascii headers |> - String.Set.of_list |> - String.Set.(union (of_list default)) |> - String.Set.elements + List.map String.lowercase_ascii headers + |> String.Set.of_list + |> String.Set.(union (of_list default)) + |> String.Set.elements -let launch_rpc_server - (rpc_config : Node_config_file.rpc) node (addr, port) = +let launch_rpc_server (rpc_config : Node_config_file.rpc) node (addr, port) = let host = Ipaddr.V6.to_string addr in let dir = Node.build_rpc_directory node in let mode = match rpc_config.tls with - | None -> `TCP (`Port port) - | Some { cert ; key } -> - `TLS (`Crt_file_path cert, `Key_file_path key, - `No_password, `Port port) in + | None -> + `TCP (`Port port) + | Some {cert; key} -> + `TLS (`Crt_file_path cert, `Key_file_path key, `No_password, `Port port) + in lwt_log_notice "Starting a RPC server listening on %s:%d%s." - host port - (if rpc_config.tls = None then "" else " (TLS enabled)") >>= fun () -> + host + port + (if rpc_config.tls = None then "" else " (TLS enabled)") + >>= fun () -> let cors_headers = - sanitize_cors_headers - ~default:["Content-Type"] rpc_config.cors_headers in - Lwt.catch begin fun () -> - RPC_server.launch ~host mode dir - ~media_types:Media_type.all_media_types - ~cors:{ allowed_origins = rpc_config.cors_origins ; - allowed_headers = cors_headers } >>= return - end begin function - | Unix.Unix_error(Unix.EADDRINUSE, "bind","") -> - fail (RPC_Port_already_in_use [(addr,port)]) - | exn -> Lwt.return (error_exn exn) - end + sanitize_cors_headers ~default:["Content-Type"] rpc_config.cors_headers + in + Lwt.catch + (fun () -> + RPC_server.launch + ~host + mode + dir + ~media_types:Media_type.all_media_types + ~cors: + { allowed_origins = rpc_config.cors_origins; + allowed_headers = cors_headers } + >>= return) + (function + | Unix.Unix_error (Unix.EADDRINUSE, "bind", "") -> + fail (RPC_Port_already_in_use [(addr, port)]) + | exn -> + Lwt.return (error_exn exn)) -let init_rpc (rpc_config: Node_config_file.rpc) node = +let init_rpc (rpc_config : Node_config_file.rpc) node = match rpc_config.listen_addr with | None -> - lwt_log_notice "Not listening to RPC calls." >>= fun () -> - return_nil - | Some addr -> - Node_config_file.resolve_rpc_listening_addrs addr >>= function + lwt_log_notice "Not listening to RPC calls." >>= fun () -> return_nil + | Some addr -> ( + Node_config_file.resolve_rpc_listening_addrs addr + >>= function | [] -> failwith "Cannot resolve listening address: %S" addr | addrs -> - map_s (launch_rpc_server rpc_config node) addrs + map_s (launch_rpc_server rpc_config node) addrs ) let init_signal () = - let handler name id = try + let handler name id = + try fatal_error "Received the %s signal, triggering shutdown." name ; Lwt_exit.exit id - with _ -> () in - ignore (Lwt_unix.on_signal Sys.sigint (handler "INT") : Lwt_unix.signal_handler_id) ; - ignore (Lwt_unix.on_signal Sys.sigterm (handler "TERM") : Lwt_unix.signal_handler_id) + with _ -> () + in + ignore + (Lwt_unix.on_signal Sys.sigint (handler "INT") : Lwt_unix.signal_handler_id) ; + ignore + ( Lwt_unix.on_signal Sys.sigterm (handler "TERM") + : Lwt_unix.signal_handler_id ) let run ?verbosity ?sandbox ?checkpoint (config : Node_config_file.t) = - Node_data_version.ensure_data_dir config.data_dir >>=? fun () -> + Node_data_version.ensure_data_dir config.data_dir + >>=? fun () -> Lwt_lock_file.create - ~unlink_on_exit:true (Node_data_version.lock_file config.data_dir) >>=? fun () -> + ~unlink_on_exit:true + (Node_data_version.lock_file config.data_dir) + >>=? fun () -> init_signal () ; let log_cfg = match verbosity with - | None -> config.log - | Some default_level -> { config.log with default_level } in - Internal_event_unix.init ~lwt_log_sink:log_cfg - ~configuration:config.internal_events () >>= fun () -> + | None -> + config.log + | Some default_level -> + {config.log with default_level} + in + Internal_event_unix.init + ~lwt_log_sink:log_cfg + ~configuration:config.internal_events + () + >>= fun () -> Updater.init (Node_data_version.protocol_dir config.data_dir) ; - lwt_log_notice "Starting the Tezos node..." >>= fun () -> - begin init_node ?sandbox ?checkpoint config >>= function - | Ok node -> return node - | Error (State.Incorrect_history_mode_switch { previous_mode ; next_mode } :: _) -> - failwith "@[Cannot switch from history mode '%a' to \ - '%a'. Import a context from a corresponding snapshot \ - or re-synchronize a node with an empty tezos node \ - directory.@]" - History_mode.pp previous_mode - History_mode.pp next_mode - | Error _ as err -> Lwt.return err - end >>=? fun node -> - init_rpc config.rpc node >>=? fun rpc -> - lwt_log_notice "The Tezos node is now running!" >>= fun () -> - Lwt_exit.termination_thread >>= fun x -> - lwt_log_notice "Shutting down the Tezos node..." >>= fun () -> - Node.shutdown node >>= fun () -> - lwt_log_notice "Shutting down the RPC server..." >>= fun () -> - Lwt_list.iter_s RPC_server.shutdown rpc >>= fun () -> - lwt_log_notice "BYE (%d)" x >>= fun () -> - Internal_event_unix.close () >>= fun () -> - return_unit + lwt_log_notice "Starting the Tezos node..." + >>= fun () -> + init_node ?sandbox ?checkpoint config + >>= (function + | Ok node -> + return node + | Error + (State.Incorrect_history_mode_switch {previous_mode; next_mode} + :: _) -> + failwith + "@[Cannot switch from history mode '%a' to '%a'. Import a \ + context from a corresponding snapshot or re-synchronize a node \ + with an empty tezos node directory.@]" + History_mode.pp + previous_mode + History_mode.pp + next_mode + | Error _ as err -> + Lwt.return err) + >>=? fun node -> + init_rpc config.rpc node + >>=? fun rpc -> + lwt_log_notice "The Tezos node is now running!" + >>= fun () -> + Lwt_exit.termination_thread + >>= fun x -> + lwt_log_notice "Shutting down the Tezos node..." + >>= fun () -> + Node.shutdown node + >>= fun () -> + lwt_log_notice "Shutting down the RPC server..." + >>= fun () -> + Lwt_list.iter_s RPC_server.shutdown rpc + >>= fun () -> + lwt_log_notice "BYE (%d)" x + >>= fun () -> Internal_event_unix.close () >>= fun () -> return_unit let process sandbox verbosity checkpoint args = let verbosity = let open Internal_event in - match verbosity with - | [] -> None - | [_] -> Some Info - | _ -> Some Debug in + match verbosity with [] -> None | [_] -> Some Info | _ -> Some Debug + in let run = Node_shared_arg.read_and_patch_config_file - ~ignore_bootstrap_peers:(match sandbox with - | Some _ -> true - | None -> false) - args >>=? fun config -> - begin match sandbox with - | Some _ -> - if config.data_dir = Node_config_file.default_data_dir - then failwith "Cannot use default data directory while in sandbox mode" - else return_unit - | None -> return_unit - end >>=? fun () -> - begin - match checkpoint with - | None -> return_none - | Some s -> - match Block_header.of_b58check s with - | Some b -> return_some b - | None -> - failwith "Failed to parse the provided checkpoint (Base58Check-encoded)." - end >>=? fun checkpoint -> - Lwt_lock_file.is_locked - (Node_data_version.lock_file config.data_dir) >>=? function + ~ignore_bootstrap_peers: + (match sandbox with Some _ -> true | None -> false) + args + >>=? fun config -> + ( match sandbox with + | Some _ -> + if config.data_dir = Node_config_file.default_data_dir then + failwith "Cannot use default data directory while in sandbox mode" + else return_unit + | None -> + return_unit ) + >>=? fun () -> + ( match checkpoint with + | None -> + return_none + | Some s -> ( + match Block_header.of_b58check s with + | Some b -> + return_some b + | None -> + failwith + "Failed to parse the provided checkpoint (Base58Check-encoded)." ) + ) + >>=? fun checkpoint -> + Lwt_lock_file.is_locked (Node_data_version.lock_file config.data_dir) + >>=? function | false -> Lwt.catch (fun () -> run ?sandbox ?verbosity ?checkpoint config) (function - |Unix.Unix_error(Unix.EADDRINUSE, "bind","") -> - begin match config.rpc.listen_addr with - | None -> assert false - | Some addr -> - Node_config_file.resolve_rpc_listening_addrs addr >>= fun addrlist -> - fail (RPC_Port_already_in_use addrlist) - end - | exn -> Lwt.return (error_exn exn) - ) - | true -> failwith "Data directory is locked by another process" in + | Unix.Unix_error (Unix.EADDRINUSE, "bind", "") -> ( + match config.rpc.listen_addr with + | None -> + assert false + | Some addr -> + Node_config_file.resolve_rpc_listening_addrs addr + >>= fun addrlist -> fail (RPC_Port_already_in_use addrlist) ) + | exn -> + Lwt.return (error_exn exn)) + | true -> + failwith "Data directory is locked by another process" + in match Lwt_main.run run with - | Ok () -> `Ok () - | Error err -> `Error (false, Format.asprintf "%a" pp_print_error err) + | Ok () -> + `Ok () + | Error err -> + `Error (false, Format.asprintf "%a" pp_print_error err) module Term = struct - let verbosity = let open Cmdliner in let doc = - "Increase log level. Using $(b,-v) is equivalent to \ - using $(b,TEZOS_LOG='* -> info'), and $(b,-vv) is equivalent to using \ - $(b,TEZOS_LOG='* -> debug')." in - Arg.(value & flag_all & - info ~docs:Node_shared_arg.Manpage.misc_section ~doc ["v"]) + "Increase log level. Using $(b,-v) is equivalent to using \ + $(b,TEZOS_LOG='* -> info'), and $(b,-vv) is equivalent to using \ + $(b,TEZOS_LOG='* -> debug')." + in + Arg.( + value & flag_all + & info ~docs:Node_shared_arg.Manpage.misc_section ~doc ["v"]) let sandbox = let open Cmdliner in let doc = - "Run the daemon in sandbox mode. \ - P2P to non-localhost addresses are disabled, and constants of \ - the economic protocol can be altered with an optional JSON file. \ - $(b,IMPORTANT): Using sandbox mode affects the node state and \ - subsequent runs of Tezos node must also use sandbox mode. \ - In order to run the node in normal mode afterwards, a full reset \ + "Run the daemon in sandbox mode. P2P to non-localhost addresses are \ + disabled, and constants of the economic protocol can be altered with \ + an optional JSON file. $(b,IMPORTANT): Using sandbox mode affects the \ + node state and subsequent runs of Tezos node must also use sandbox \ + mode. In order to run the node in normal mode afterwards, a full reset \ must be performed (by removing the node's data directory)." in - Arg.(value & opt ~vopt:(Some None) (some (some string)) None & - info ~docs:Node_shared_arg.Manpage.misc_section - ~doc ~docv:"FILE.json" ["sandbox"]) + Arg.( + value + & opt ~vopt:(Some None) (some (some string)) None + & info + ~docs:Node_shared_arg.Manpage.misc_section + ~doc + ~docv:"FILE.json" + ["sandbox"]) let checkpoint = let open Cmdliner in let doc = - "When asked to take a block hash as a checkpoint, the daemon \ - will only accept the chains that contains that block and those \ - that might reach it." + "When asked to take a block hash as a checkpoint, the daemon will only \ + accept the chains that contains that block and those that might reach \ + it." in - Arg.(value & opt (some string) None & - info ~docs:Node_shared_arg.Manpage.misc_section - ~doc ~docv:"<level>,<block_hash>" ["checkpoint"]) + Arg.( + value + & opt (some string) None + & info + ~docs:Node_shared_arg.Manpage.misc_section + ~doc + ~docv:"<level>,<block_hash>" + ["checkpoint"]) let term = - Cmdliner.Term.(ret (const process $ sandbox $ verbosity $ checkpoint $ - Node_shared_arg.Term.args)) - + Cmdliner.Term.( + ret + ( const process $ sandbox $ verbosity $ checkpoint + $ Node_shared_arg.Term.args )) end module Manpage = struct - let command_description = - "The $(b,run) command is meant to run the Tezos node. \ - Most of its command line arguments corresponds to config file \ - entries, and will have priority over the latter if used." + "The $(b,run) command is meant to run the Tezos node. Most of its command \ + line arguments corresponds to config file entries, and will have \ + priority over the latter if used." - let description = [ - `S "DESCRIPTION" ; - `P command_description ; - ] + let description = [`S "DESCRIPTION"; `P command_description] let debug = let log_sections = - String.concat " " - (List.rev !Internal_event.Legacy_logging.sections) in - [ - `S "DEBUG" ; - `P ("The environment variable $(b,TEZOS_LOG) is used to fine-tune \ - what is going to be logged. The syntax is \ - $(b,TEZOS_LOG='<section> -> <level> [ ; ...]') \ - where section is one of $(i," - ^ log_sections ^ - ") and level is one of $(i,fatal), $(i,error), $(i,warn), \ - $(i,notice), $(i,info) or $(i,debug). \ - A $(b,*) can be used as a wildcard \ - in sections, i.e. $(b, client* -> debug). \ - The rules are matched left to right, \ - therefore the leftmost rule is highest priority ." - ) ; - ] + String.concat " " (List.rev !Internal_event.Legacy_logging.sections) + in + [ `S "DEBUG"; + `P + ( "The environment variable $(b,TEZOS_LOG) is used to fine-tune what \ + is going to be logged. The syntax is \ + $(b,TEZOS_LOG='<section> -> <level> [ ; ...]') where section is \ + one of $(i," ^ log_sections + ^ ") and level is one of $(i,fatal), $(i,error), $(i,warn), \ + $(i,notice), $(i,info) or $(i,debug). A $(b,*) can be used as a \ + wildcard in sections, i.e. $(b, client* -> debug). The rules are \ + matched left to right, therefore the leftmost rule is highest \ + priority ." ) ] let examples = - [ - `S "EXAMPLES" ; - `I ("$(b,Run in sandbox mode listening to RPC commands \ - at localhost port 8732)", - "$(mname) run --sandbox --data-dir /custom/data/dir \ - --rpc-addr localhost:8732" ) ; - `I ("$(b,Run a node that accepts network connections)", - "$(mname) run" ) ; + [ `S "EXAMPLES"; + `I + ( "$(b,Run in sandbox mode listening to RPC commands at localhost \ + port 8732)", + "$(mname) run --sandbox --data-dir /custom/data/dir --rpc-addr \ + localhost:8732" ); + `I ("$(b,Run a node that accepts network connections)", "$(mname) run") ] let man = - description @ - Node_shared_arg.Manpage.args @ - debug @ - examples @ - Node_shared_arg.Manpage.bugs - - let info = - Cmdliner.Term.info - ~doc:"Run the Tezos node" - ~man - "run" + description @ Node_shared_arg.Manpage.args @ debug @ examples + @ Node_shared_arg.Manpage.bugs + let info = Cmdliner.Term.info ~doc:"Run the Tezos node" ~man "run" end -let cmd = Term.term, Manpage.info +let cmd = (Term.term, Manpage.info) diff --git a/src/bin_node/node_run_command.mli b/src/bin_node/node_run_command.mli index 515842ac449199dab9bd462a32aab5756f78b836..083b3e938f654cf5d8b8a71613b4e0c2567d1be6 100644 --- a/src/bin_node/node_run_command.mli +++ b/src/bin_node/node_run_command.mli @@ -23,9 +23,10 @@ (* *) (*****************************************************************************) -val cmd: unit Cmdliner.Term.t * Cmdliner.Term.info +val cmd : unit Cmdliner.Term.t * Cmdliner.Term.info module Manpage : sig - val command_description: string - val examples: Cmdliner.Manpage.block list + val command_description : string + + val examples : Cmdliner.Manpage.block list end diff --git a/src/bin_node/node_shared_arg.ml b/src/bin_node/node_shared_arg.ml index 52a0f91abac2a194c4c9285059d11a4305aad2b7..f776679c2b628cc5e146741a45d8be0580db206e 100644 --- a/src/bin_node/node_shared_arg.ml +++ b/src/bin_node/node_shared_arg.ml @@ -27,124 +27,126 @@ open Cmdliner open Node_logging -let (//) = Filename.concat +let ( // ) = Filename.concat type t = { - data_dir: string option ; - config_file: string ; - min_connections: int option ; - expected_connections: int option ; - max_connections: int option ; - max_download_speed: int option ; - max_upload_speed: int option ; - binary_chunks_size: int option ; - peer_table_size: int option ; - expected_pow: float option ; - peers: string list ; - no_bootstrap_peers: bool ; - listen_addr: string option ; - discovery_addr: string option ; - rpc_listen_addr: string option ; - private_mode: bool ; - disable_mempool: bool ; - disable_testchain: bool ; - cors_origins: string list ; - cors_headers: string list ; - rpc_tls: Node_config_file.tls option ; - log_output: Lwt_log_sink_unix.Output.t option ; - bootstrap_threshold: int option ; - history_mode: History_mode.t option ; + data_dir : string option; + config_file : string; + min_connections : int option; + expected_connections : int option; + max_connections : int option; + max_download_speed : int option; + max_upload_speed : int option; + binary_chunks_size : int option; + peer_table_size : int option; + expected_pow : float option; + peers : string list; + no_bootstrap_peers : bool; + listen_addr : string option; + discovery_addr : string option; + rpc_listen_addr : string option; + private_mode : bool; + disable_mempool : bool; + disable_testchain : bool; + cors_origins : string list; + cors_headers : string list; + rpc_tls : Node_config_file.tls option; + log_output : Lwt_log_sink_unix.Output.t option; + bootstrap_threshold : int option; + history_mode : History_mode.t option } -let wrap - data_dir config_file - connections max_download_speed max_upload_speed binary_chunks_size - peer_table_size - listen_addr discovery_addr peers no_bootstrap_peers bootstrap_threshold private_mode - disable_mempool disable_testchain - expected_pow rpc_listen_addr rpc_tls - cors_origins cors_headers log_output history_mode = - +let wrap data_dir config_file connections max_download_speed max_upload_speed + binary_chunks_size peer_table_size listen_addr discovery_addr peers + no_bootstrap_peers bootstrap_threshold private_mode disable_mempool + disable_testchain expected_pow rpc_listen_addr rpc_tls cors_origins + cors_headers log_output history_mode = let actual_data_dir = - Option.unopt ~default:Node_config_file.default_data_dir data_dir in - + Option.unopt ~default:Node_config_file.default_data_dir data_dir + in let config_file = - Option.unopt ~default:(actual_data_dir // "config.json") config_file in - + Option.unopt ~default:(actual_data_dir // "config.json") config_file + in let rpc_tls = - Option.map - ~f:(fun (cert, key) -> { Node_config_file.cert ; key }) - rpc_tls in - + Option.map ~f:(fun (cert, key) -> {Node_config_file.cert; key}) rpc_tls + in (* when `--connections` is used, override all the bounds defined in the configuration file. *) - let bootstrap_threshold, - min_connections, expected_connections, max_connections, - peer_table_size = + let ( bootstrap_threshold, + min_connections, + expected_connections, + max_connections, + peer_table_size ) = match connections with - | None -> bootstrap_threshold, None, None, None, peer_table_size - | Some x -> + | None -> + (bootstrap_threshold, None, None, None, peer_table_size) + | Some x -> ( let peer_table_size = match peer_table_size with - | None -> Some (8*x) - | Some _ -> peer_table_size in - begin match bootstrap_threshold with - | None -> Some (min (x/4) 2), Some (x/2), Some x, Some (3*x/2), peer_table_size - | Some bs -> Some bs, Some (x/2), Some x, Some (3*x/2), peer_table_size - end + | None -> + Some (8 * x) + | Some _ -> + peer_table_size + in + match bootstrap_threshold with + | None -> + ( Some (min (x / 4) 2), + Some (x / 2), + Some x, + Some (3 * x / 2), + peer_table_size ) + | Some bs -> + (Some bs, Some (x / 2), Some x, Some (3 * x / 2), peer_table_size) + ) in - { data_dir ; - config_file ; - min_connections ; - expected_connections ; - max_connections ; - max_download_speed ; - max_upload_speed ; - binary_chunks_size ; - expected_pow ; - peers ; - no_bootstrap_peers ; - listen_addr ; - discovery_addr ; - rpc_listen_addr ; - private_mode ; - disable_mempool ; - disable_testchain ; - cors_origins ; - cors_headers ; - rpc_tls ; - log_output ; - peer_table_size ; - bootstrap_threshold ; - history_mode ; - } + { data_dir; + config_file; + min_connections; + expected_connections; + max_connections; + max_download_speed; + max_upload_speed; + binary_chunks_size; + expected_pow; + peers; + no_bootstrap_peers; + listen_addr; + discovery_addr; + rpc_listen_addr; + private_mode; + disable_mempool; + disable_testchain; + cors_origins; + cors_headers; + rpc_tls; + log_output; + peer_table_size; + bootstrap_threshold; + history_mode } module Manpage = struct - let misc_section = "MISC OPTIONS" + let p2p_section = "P2P OPTIONS" - let rpc_section = "RPC OPTIONS" - let args = [ - `S p2p_section ; - `S rpc_section ; - `S misc_section ; - ] + let rpc_section = "RPC OPTIONS" - let bugs = [ - `S "BUGS"; - `P "Check bug reports at https://gitlab.com/tezos/tezos/issues."; - ] + let args = [`S p2p_section; `S rpc_section; `S misc_section] + let bugs = + [ `S "BUGS"; + `P "Check bug reports at https://gitlab.com/tezos/tezos/issues." ] end module Term = struct - let log_output_converter = - (fun s -> match Lwt_log_sink_unix.Output.of_string s with - | Some res -> `Ok res - | None -> `Error s), - Lwt_log_sink_unix.Output.pp + ( (fun s -> + match Lwt_log_sink_unix.Output.of_string s with + | Some res -> + `Ok res + | None -> + `Error s), + Lwt_log_sink_unix.Output.pp ) (* misc args *) @@ -152,21 +154,25 @@ module Term = struct let log_output = let doc = - "Log output. Either $(i,stdout), $(i,stderr), \ - $(i,syslog:<facility>) or a file path." in - Arg.(value & opt (some log_output_converter) None & - info ~docs ~docv:"OUTPUT" ~doc ["log-output"]) + "Log output. Either $(i,stdout), $(i,stderr), $(i,syslog:<facility>) or \ + a file path." + in + Arg.( + value + & opt (some log_output_converter) None + & info ~docs ~docv:"OUTPUT" ~doc ["log-output"]) let data_dir = - let doc = - "The directory where the Tezos node will store all its data." in - Arg.(value & opt (some string) None & - info ~docs ~doc ~docv:"DIR" ["data-dir"]) + let doc = "The directory where the Tezos node will store all its data." in + Arg.( + value & opt (some string) None & info ~docs ~doc ~docv:"DIR" ["data-dir"]) let config_file = let doc = "The main configuration file." in - Arg.(value & opt (some string) None & - info ~docs ~doc ~docv:"FILE" ["config-file"]) + Arg.( + value + & opt (some string) None + & info ~docs ~doc ~docv:"FILE" ["config-file"]) (* P2p args *) @@ -174,99 +180,118 @@ module Term = struct let connections = let doc = - "Sets min_connections, expected_connections, max_connections to NUM / 2, \ - NUM, (3 * NUM) / 2, respectively. Sets peer_table_size to 8 * NUM \ + "Sets min_connections, expected_connections, max_connections to NUM / \ + 2, NUM, (3 * NUM) / 2, respectively. Sets peer_table_size to 8 * NUM \ unless it is already defined in the configuration file. Sets \ bootstrap_threshold to min(NUM / 4, 2) unless it is already defined in \ - the configuration file." in - Arg.(value & opt (some int) None & - info ~docs ~doc ~docv:"NUM" ["connections"]) + the configuration file." + in + Arg.( + value & opt (some int) None & info ~docs ~doc ~docv:"NUM" ["connections"]) let max_download_speed = - let doc = - "The maximum number of bytes read per second." in - Arg.(value & opt (some int) None & - info ~docs ~doc ~docv:"NUM" ["max-download-speed"]) + let doc = "The maximum number of bytes read per second." in + Arg.( + value + & opt (some int) None + & info ~docs ~doc ~docv:"NUM" ["max-download-speed"]) let max_upload_speed = - let doc = - "The maximum number of bytes sent per second." in - Arg.(value & opt (some int) None & - info ~docs ~doc ~docv:"NUM" ["max-upload-speed"]) + let doc = "The maximum number of bytes sent per second." in + Arg.( + value + & opt (some int) None + & info ~docs ~doc ~docv:"NUM" ["max-upload-speed"]) let binary_chunks_size = let doc = - "Size limit (in kB) of binary blocks that are sent to other peers." in - Arg.(value & opt (some int) None & - info ~docs ~doc ~docv:"NUM" ["binary-chunks-size"]) + "Size limit (in kB) of binary blocks that are sent to other peers." + in + Arg.( + value + & opt (some int) None + & info ~docs ~doc ~docv:"NUM" ["binary-chunks-size"]) let peer_table_size = - let doc = "Maximum size of internal peer tables, \ - used to store metadata/logs about a peer or about a \ - to-be-authenticated host:port couple." in - Arg.(value & opt (some int) None & - info ~docs ~doc ~docv:"NUM" ["peer-table-size"]) + let doc = + "Maximum size of internal peer tables, used to store metadata/logs \ + about a peer or about a to-be-authenticated host:port couple." + in + Arg.( + value + & opt (some int) None + & info ~docs ~doc ~docv:"NUM" ["peer-table-size"]) let listen_addr = let doc = - "The TCP address and port at which this instance can be reached." in - Arg.(value & opt (some string) None & - info ~docs ~doc ~docv:"ADDR:PORT" ["net-addr"]) + "The TCP address and port at which this instance can be reached." + in + Arg.( + value + & opt (some string) None + & info ~docs ~doc ~docv:"ADDR:PORT" ["net-addr"]) let discovery_addr = let doc = "The UDP address and port used for local peer discovery." in - Arg.(value & opt (some string) None & - info ~docs ~doc ~docv:"ADDR:PORT" ["discovery-addr"]) + Arg.( + value + & opt (some string) None + & info ~docs ~doc ~docv:"ADDR:PORT" ["discovery-addr"]) let no_bootstrap_peers = let doc = - "Ignore the peers found in the config file (or the hard-coded \ - bootstrap peers in the absence of config file)." in - Arg.(value & flag & - info ~docs ~doc ["no-bootstrap-peers"]) + "Ignore the peers found in the config file (or the hard-coded bootstrap \ + peers in the absence of config file)." + in + Arg.(value & flag & info ~docs ~doc ["no-bootstrap-peers"]) let bootstrap_threshold = let doc = - "Set the number of peers with whom a chain synchronization must \ - be completed to bootstrap the node" in - Arg.(value & opt (some int) None & - info ~docs ~doc ~docv:"NUM" ["bootstrap-threshold"]) + "Set the number of peers with whom a chain synchronization must be \ + completed to bootstrap the node" + in + Arg.( + value + & opt (some int) None + & info ~docs ~doc ~docv:"NUM" ["bootstrap-threshold"]) let peers = let doc = - "A peer to bootstrap the network from. \ - Can be used several times to add several peers." in - Arg.(value & opt_all string [] & - info ~docs ~doc ~docv:"ADDR:PORT" ["peer"]) + "A peer to bootstrap the network from. Can be used several times to add \ + several peers." + in + Arg.( + value & opt_all string [] & info ~docs ~doc ~docv:"ADDR:PORT" ["peer"]) let expected_pow = - let doc = - "Expected level of proof-of-work for peers identity." in - Arg.(value & opt (some float) None & - info ~docs ~doc ~docv:"FLOAT" ["expected-pow"]) + let doc = "Expected level of proof-of-work for peers identity." in + Arg.( + value + & opt (some float) None + & info ~docs ~doc ~docv:"FLOAT" ["expected-pow"]) let private_mode = let doc = - "Only open outgoing/accept incoming connections to/from peers \ - listed in 'bootstrap-peers' or provided with '--peer' option." in + "Only open outgoing/accept incoming connections to/from peers listed in \ + 'bootstrap-peers' or provided with '--peer' option." + in Arg.(value & flag & info ~docs ~doc ["private-mode"]) let disable_mempool = let doc = - "If set to [true], the node will not participate in the propagation \ - of pending operations (mempool). \ - Default value is [false]. \ - It can be used to decrease the memory and computation footprints \ - of the node." in + "If set to [true], the node will not participate in the propagation of \ + pending operations (mempool). Default value is [false]. It can be used \ + to decrease the memory and computation footprints of the node." + in Arg.(value & flag & info ~docs ~doc ["disable-mempool"]) let disable_testchain = let doc = - "If set to [true], the node will not spawn a testchain during \ - the protocol's testing voting period. \ - Default value is [false]. It may be used used to decrease the \ - node storage usage and computation by droping the validation \ - of the test network blocks." in + "If set to [true], the node will not spawn a testchain during the \ + protocol's testing voting period. Default value is [false]. It may be \ + used used to decrease the node storage usage and computation by \ + droping the validation of the test network blocks." + in Arg.(value & flag & info ~docs ~doc ["disable-testchain"]) (* rpc args *) @@ -274,109 +299,147 @@ module Term = struct let rpc_listen_addr = let doc = - "The TCP socket address at which this RPC server \ - instance can be reached." in - Arg.(value & opt (some string) None & - info ~docs ~doc ~docv:"ADDR:PORT" ["rpc-addr"]) + "The TCP socket address at which this RPC server instance can be reached." + in + Arg.( + value + & opt (some string) None + & info ~docs ~doc ~docv:"ADDR:PORT" ["rpc-addr"]) let rpc_tls = let doc = - "Enable TLS for this RPC server \ - with the provided certificate and key." in - Arg.(value & opt (some (pair string string)) None & - info ~docs ~doc ~docv:"crt,key" ["rpc-tls"]) + "Enable TLS for this RPC server with the provided certificate and key." + in + Arg.( + value + & opt (some (pair string string)) None + & info ~docs ~doc ~docv:"crt,key" ["rpc-tls"]) let cors_origins = let doc = - "CORS origin allowed by the RPC server \ - via Access-Control-Allow-Origin; may be used multiple times" in - Arg.(value & opt_all string [] & - info ~docs ~doc ~docv:"ORIGIN" ["cors-origin"]) + "CORS origin allowed by the RPC server via Access-Control-Allow-Origin; \ + may be used multiple times" + in + Arg.( + value & opt_all string [] & info ~docs ~doc ~docv:"ORIGIN" ["cors-origin"]) let cors_headers = let doc = - "Header reported by Access-Control-Allow-Headers \ - reported during CORS preflighting; may be used multiple times" in - Arg.(value & opt_all string [] & - info ~docs ~doc ~docv:"HEADER" ["cors-header"]) + "Header reported by Access-Control-Allow-Headers reported during CORS \ + preflighting; may be used multiple times" + in + Arg.( + value & opt_all string [] & info ~docs ~doc ~docv:"HEADER" ["cors-header"]) (* History mode. *) let history_mode_converter = let open History_mode in - let conv s = match s with - | "archive" -> `Ok Archive - | "full" -> `Ok Full - | "experimental-rolling" -> `Ok Rolling - | s -> `Error s in + let conv s = + match s with + | "archive" -> + `Ok Archive + | "full" -> + `Ok Full + | "experimental-rolling" -> + `Ok Rolling + | s -> + `Error s + in let to_string = Format.asprintf "%a" History_mode.pp in let pp fmt mode = Format.fprintf fmt "%s" (to_string mode) in (conv, pp) let history_mode = - let doc = "History mode. Possible values: \ - $(i,archive), $(i,full) (used by default), $(i,experimental-rolling)" in - Arg.(value & opt (some history_mode_converter) None & - info ~docs ~doc ~docv:"History mode" ["history-mode"]) + let doc = + "History mode. Possible values: $(i,archive), $(i,full) (used by \ + default), $(i,experimental-rolling)" + in + Arg.( + value + & opt (some history_mode_converter) None + & info ~docs ~doc ~docv:"History mode" ["history-mode"]) (* Args. *) let args = let open Term in - const wrap $ data_dir $ config_file - $ connections - $ max_download_speed $ max_upload_speed $ binary_chunks_size - $ peer_table_size - $ listen_addr $ discovery_addr $ peers $ no_bootstrap_peers $ bootstrap_threshold - $ private_mode $ disable_mempool $ disable_testchain - $ expected_pow $ rpc_listen_addr $ rpc_tls - $ cors_origins $ cors_headers - $ log_output + const wrap $ data_dir $ config_file $ connections $ max_download_speed + $ max_upload_speed $ binary_chunks_size $ peer_table_size $ listen_addr + $ discovery_addr $ peers $ no_bootstrap_peers $ bootstrap_threshold + $ private_mode $ disable_mempool $ disable_testchain $ expected_pow + $ rpc_listen_addr $ rpc_tls $ cors_origins $ cors_headers $ log_output $ history_mode - end let read_config_file args = if Sys.file_exists args.config_file then Node_config_file.read args.config_file - else - return Node_config_file.default_config + else return Node_config_file.default_config let read_data_dir args = - read_config_file args >>=? fun cfg -> - let { data_dir ; _ } = args in + read_config_file args + >>=? fun cfg -> + let {data_dir; _} = args in let data_dir = Option.unopt ~default:cfg.data_dir data_dir in return data_dir -let read_and_patch_config_file ?(ignore_bootstrap_peers=false) args = - read_config_file args >>=? fun cfg -> - let { data_dir ; - min_connections ; expected_connections ; max_connections ; - max_download_speed ; max_upload_speed ; binary_chunks_size ; - peer_table_size ; - expected_pow ; - peers ; no_bootstrap_peers ; - listen_addr ; private_mode ; - discovery_addr ; - disable_mempool ; disable_testchain ; - rpc_listen_addr ; rpc_tls ; - cors_origins ; cors_headers ; - log_output ; - bootstrap_threshold ; - history_mode ; - config_file = _ ; - } = args in +let read_and_patch_config_file ?(ignore_bootstrap_peers = false) args = + read_config_file args + >>=? fun cfg -> + let { data_dir; + min_connections; + expected_connections; + max_connections; + max_download_speed; + max_upload_speed; + binary_chunks_size; + peer_table_size; + expected_pow; + peers; + no_bootstrap_peers; + listen_addr; + private_mode; + discovery_addr; + disable_mempool; + disable_testchain; + rpc_listen_addr; + rpc_tls; + cors_origins; + cors_headers; + log_output; + bootstrap_threshold; + history_mode; + config_file = _ } = + args + in let bootstrap_peers = - if no_bootstrap_peers || ignore_bootstrap_peers - then begin + if no_bootstrap_peers || ignore_bootstrap_peers then ( log_info "Ignoring bootstrap peers" ; - peers - end else - cfg.p2p.bootstrap_peers @ peers in + peers ) + else cfg.p2p.bootstrap_peers @ peers + in Node_config_file.update - ?data_dir ?min_connections ?expected_connections ?max_connections - ?max_download_speed ?max_upload_speed ?binary_chunks_size - ?peer_table_size ?expected_pow - ~bootstrap_peers ?listen_addr ?discovery_addr ?rpc_listen_addr ~private_mode - ~disable_mempool ~disable_testchain ~cors_origins ~cors_headers ?rpc_tls - ?log_output ?bootstrap_threshold ?history_mode cfg + ?data_dir + ?min_connections + ?expected_connections + ?max_connections + ?max_download_speed + ?max_upload_speed + ?binary_chunks_size + ?peer_table_size + ?expected_pow + ~bootstrap_peers + ?listen_addr + ?discovery_addr + ?rpc_listen_addr + ~private_mode + ~disable_mempool + ~disable_testchain + ~cors_origins + ~cors_headers + ?rpc_tls + ?log_output + ?bootstrap_threshold + ?history_mode + cfg diff --git a/src/bin_node/node_shared_arg.mli b/src/bin_node/node_shared_arg.mli index 1face9304ab22dc083b919ced0c7678966367541..8ace36451c51e9787096a8da2dced2a4047b334f 100644 --- a/src/bin_node/node_shared_arg.mli +++ b/src/bin_node/node_shared_arg.mli @@ -25,45 +25,49 @@ (*****************************************************************************) type t = { - data_dir: string option ; - config_file: string ; - min_connections: int option ; - expected_connections: int option ; - max_connections: int option ; - max_download_speed: int option ; - max_upload_speed: int option ; - binary_chunks_size: int option ; - peer_table_size: int option ; - expected_pow: float option ; - peers: string list ; - no_bootstrap_peers: bool ; - listen_addr: string option ; - discovery_addr: string option ; - rpc_listen_addr: string option ; - private_mode: bool ; - disable_mempool: bool ; - disable_testchain: bool ; - cors_origins: string list ; - cors_headers: string list ; - rpc_tls: Node_config_file.tls option ; - log_output: Lwt_log_sink_unix.Output.t option ; - bootstrap_threshold: int option ; - history_mode: History_mode.t option ; + data_dir : string option; + config_file : string; + min_connections : int option; + expected_connections : int option; + max_connections : int option; + max_download_speed : int option; + max_upload_speed : int option; + binary_chunks_size : int option; + peer_table_size : int option; + expected_pow : float option; + peers : string list; + no_bootstrap_peers : bool; + listen_addr : string option; + discovery_addr : string option; + rpc_listen_addr : string option; + private_mode : bool; + disable_mempool : bool; + disable_testchain : bool; + cors_origins : string list; + cors_headers : string list; + rpc_tls : Node_config_file.tls option; + log_output : Lwt_log_sink_unix.Output.t option; + bootstrap_threshold : int option; + history_mode : History_mode.t option } module Term : sig - val args: t Cmdliner.Term.t - val data_dir: string option Cmdliner.Term.t - val config_file: string option Cmdliner.Term.t + val args : t Cmdliner.Term.t + + val data_dir : string option Cmdliner.Term.t + + val config_file : string option Cmdliner.Term.t end -val read_data_dir: t -> string tzresult Lwt.t +val read_data_dir : t -> string tzresult Lwt.t -val read_and_patch_config_file: ?ignore_bootstrap_peers:bool -> t -> Node_config_file.t tzresult Lwt.t +val read_and_patch_config_file : + ?ignore_bootstrap_peers:bool -> t -> Node_config_file.t tzresult Lwt.t module Manpage : sig - val misc_section: string - val args: Cmdliner.Manpage.block list - val bugs: Cmdliner.Manpage.block list -end + val misc_section : string + val args : Cmdliner.Manpage.block list + + val bugs : Cmdliner.Manpage.block list +end diff --git a/src/bin_node/node_snapshot_command.ml b/src/bin_node/node_snapshot_command.ml index b84fca312acdf684f4ad32fb634db5188772fb86..e191ca2fcb5c16d27ca533ea6ee0da8e6215ed2d 100644 --- a/src/bin_node/node_snapshot_command.ml +++ b/src/bin_node/node_snapshot_command.ml @@ -26,66 +26,90 @@ open Node_logging -let (//) = Filename.concat +let ( // ) = Filename.concat + let context_dir data_dir = data_dir // "context" + let store_dir data_dir = data_dir // "store" (** Main *) module Term = struct - type subcommand = Export | Import let dir_cleaner data_dir = - lwt_log_notice "Cleaning directory %s because of failure" data_dir >>= fun () -> - Lwt_utils_unix.remove_dir @@ store_dir data_dir >>= fun () -> - Lwt_utils_unix.remove_dir @@ context_dir data_dir + lwt_log_notice "Cleaning directory %s because of failure" data_dir + >>= fun () -> + Lwt_utils_unix.remove_dir @@ store_dir data_dir + >>= fun () -> Lwt_utils_unix.remove_dir @@ context_dir data_dir let process subcommand args snapshot_file block export_rolling = let run = - Internal_event_unix.init () >>= fun () -> - Node_shared_arg.read_data_dir args >>=? fun data_dir -> + Internal_event_unix.init () + >>= fun () -> + Node_shared_arg.read_data_dir args + >>=? fun data_dir -> let genesis = Genesis_chain.genesis in match subcommand with | Export -> - Node_data_version.ensure_data_dir data_dir >>=? fun () -> + Node_data_version.ensure_data_dir data_dir + >>=? fun () -> let context_root = context_dir data_dir in let store_root = store_dir data_dir in - Store.init store_root >>=? fun store -> - Context.init ~readonly:true context_root >>= fun context_index -> + Store.init store_root + >>=? fun store -> + Context.init ~readonly:true context_root + >>= fun context_index -> Snapshots.export ~export_rolling ~context_index ~store - ~genesis:genesis.block snapshot_file block >>=? fun () -> - Store.close store |> return + ~genesis:genesis.block + snapshot_file + block + >>=? fun () -> Store.close store |> return | Import -> - Node_data_version.ensure_data_dir ~bare:true data_dir >>=? fun () -> - Lwt_lock_file.create ~unlink_on_exit:true - (Node_data_version.lock_file data_dir) >>=? fun () -> - Snapshots.import ~data_dir ~dir_cleaner - ~genesis ~patch_context:Patch_context.patch_context - snapshot_file block + Node_data_version.ensure_data_dir ~bare:true data_dir + >>=? fun () -> + Lwt_lock_file.create + ~unlink_on_exit:true + (Node_data_version.lock_file data_dir) + >>=? fun () -> + Snapshots.import + ~data_dir + ~dir_cleaner + ~genesis + ~patch_context:Patch_context.patch_context + snapshot_file + block in match Lwt_main.run run with - | Ok () -> `Ok () + | Ok () -> + `Ok () | Error err -> `Error (false, Format.asprintf "%a" pp_print_error err) let subcommand_arg = let parser = function - | "export" -> `Ok Export - | "import" -> `Ok Import - | s -> `Error ("invalid argument: " ^ s) + | "export" -> + `Ok Export + | "import" -> + `Ok Import + | s -> + `Error ("invalid argument: " ^ s) and printer ppf = function - | Export -> Format.fprintf ppf "export" - | Import -> Format.fprintf ppf "import" + | Export -> + Format.fprintf ppf "export" + | Import -> + Format.fprintf ppf "import" in let open Cmdliner.Arg in let doc = - "Operation to perform. \ - Possible values: $(b,export), $(b,import)." in - required & pos 0 (some (parser, printer)) None & info [] ~docv:"OPERATION" ~doc + "Operation to perform. Possible values: $(b,export), $(b,import)." + in + required + & pos 0 (some (parser, printer)) None + & info [] ~docv:"OPERATION" ~doc let file_arg = let open Cmdliner.Arg in @@ -93,65 +117,52 @@ module Term = struct let blocks = let open Cmdliner.Arg in - let doc ="Block hash of the block to export/import." in + let doc = "Block hash of the block to export/import." in value & opt (some string) None & info ~docv:"<block_hash>" ~doc ["block"] let export_rolling = let open Cmdliner in let doc = - "Force export command to dump a minimal snapshot based on the rolling mode." in - Arg.(value & flag & - info ~docs:Node_shared_arg.Manpage.misc_section ~doc ["rolling"]) + "Force export command to dump a minimal snapshot based on the rolling \ + mode." + in + Arg.( + value & flag + & info ~docs:Node_shared_arg.Manpage.misc_section ~doc ["rolling"]) let term = let open Cmdliner.Term in - ret (const process $ subcommand_arg - $ Node_shared_arg.Term.args - $ file_arg - $ blocks - $ export_rolling) - + ret + ( const process $ subcommand_arg $ Node_shared_arg.Term.args $ file_arg + $ blocks $ export_rolling ) end module Manpage = struct - let command_description = "The $(b,snapshot) command is meant to export and import snapshots files." - let description = [ - `S "DESCRIPTION" ; - `P (command_description ^ " Several operations are possible: "); - `P "$(b,export) allows to export a snapshot of the current node state into a file." ; + let description = + [ `S "DESCRIPTION"; + `P (command_description ^ " Several operations are possible: "); + `P + "$(b,export) allows to export a snapshot of the current node state \ + into a file."; + `P "$(b,import) allows to import a snapshot from a given file." ] - `P "$(b,import) allows to import a snapshot from a given file." ; - ] - - let options = [ - `S "OPTIONS" ; - ] + let options = [`S "OPTIONS"] let examples = - [ - `S "EXAMPLES" ; - `I ("$(b,Export a snapshot using the rolling mode)", - "$(mname) snapshot export latest.rolling --rolling") ; - `I ("$(b,Import a snapshot located in file.full)", - "$(mname) snapshot import file.full") - ] - - let man = - description @ - options @ - examples @ - Node_shared_arg.Manpage.bugs - - let info = - Cmdliner.Term.info - ~doc:"Manage snapshots" - ~man - "snapshot" + [ `S "EXAMPLES"; + `I + ( "$(b,Export a snapshot using the rolling mode)", + "$(mname) snapshot export latest.rolling --rolling" ); + `I + ( "$(b,Import a snapshot located in file.full)", + "$(mname) snapshot import file.full" ) ] + + let man = description @ options @ examples @ Node_shared_arg.Manpage.bugs + let info = Cmdliner.Term.info ~doc:"Manage snapshots" ~man "snapshot" end -let cmd = - Term.term, Manpage.info +let cmd = (Term.term, Manpage.info) diff --git a/src/bin_node/node_snapshot_command.mli b/src/bin_node/node_snapshot_command.mli index b4c12155202f4634be490da2e1e23d264a1bd4f9..8aaeb509e496bb70ccde77eefc1d595b9063e9c6 100644 --- a/src/bin_node/node_snapshot_command.mli +++ b/src/bin_node/node_snapshot_command.mli @@ -24,9 +24,8 @@ (* *) (*****************************************************************************) - val cmd : unit Cmdliner.Term.t * Cmdliner.Term.info module Manpage : sig - val command_description: string + val command_description : string end diff --git a/src/bin_node/patch_context.ml b/src/bin_node/patch_context.ml index 1a9218b4ec4170685b046a2240d9359192feec73..900e39100797df20911c96f2fe8b5486ebd67f98 100644 --- a/src/bin_node/patch_context.ml +++ b/src/bin_node/patch_context.ml @@ -27,25 +27,28 @@ open Genesis_chain let patch_context json ctxt = - begin - match json with - | None -> Lwt.return ctxt - | Some json -> - Tezos_storage.Context.set ctxt - ["sandbox_parameter"] - (Data_encoding.Binary.to_bytes_exn Data_encoding.json json) - end >>= fun ctxt -> + ( match json with + | None -> + Lwt.return ctxt + | Some json -> + Tezos_storage.Context.set + ctxt + ["sandbox_parameter"] + (Data_encoding.Binary.to_bytes_exn Data_encoding.json json) ) + >>= fun ctxt -> let module Proto = (val Registered_protocol.get_exn genesis.protocol) in - Proto.init ctxt { - level = 0l ; - proto_level = 0 ; - predecessor = genesis.block ; - timestamp = genesis.time ; - validation_passes = 0 ; - operations_hash = Operation_list_list_hash.empty ; - fitness = [] ; - context = Context_hash.zero ; - } >>= function - | Error _ -> assert false (* FIXME error *) - | Ok { context = ctxt ; _ } -> + Proto.init + ctxt + { level = 0l; + proto_level = 0; + predecessor = genesis.block; + timestamp = genesis.time; + validation_passes = 0; + operations_hash = Operation_list_list_hash.empty; + fitness = []; + context = Context_hash.zero } + >>= function + | Error _ -> + assert false (* FIXME error *) + | Ok {context = ctxt; _} -> Lwt.return ctxt diff --git a/src/bin_node/patch_context.mli b/src/bin_node/patch_context.mli index fa2873b79a277409e7da29e5c7bc458f17059976..40755734b5b359ba1c2a18f5e31549e27b642661 100644 --- a/src/bin_node/patch_context.mli +++ b/src/bin_node/patch_context.mli @@ -24,5 +24,4 @@ (* *) (*****************************************************************************) - -val patch_context: Data_encoding.json option -> Context.t -> Context.t Lwt.t +val patch_context : Data_encoding.json option -> Context.t -> Context.t Lwt.t diff --git a/src/bin_signer/.ocamlformat b/src/bin_signer/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/bin_signer/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/bin_signer/handler.ml b/src/bin_signer/handler.ml index 03ac7a3863603491020d17170cf02cbbefda22d4..cadc6fe229a60066b666d3d050e7be360e624e17 100644 --- a/src/bin_signer/handler.ml +++ b/src/bin_signer/handler.ml @@ -30,200 +30,269 @@ let log = lwt_log_notice module High_watermark = struct let encoding = let open Data_encoding in - let raw_hash = - conv Blake2B.to_bytes Blake2B.of_bytes_exn bytes in + let raw_hash = conv Blake2B.to_bytes Blake2B.of_bytes_exn bytes in conv - (List.map (fun (chain_id, marks) -> Chain_id.to_b58check chain_id, marks)) - (List.map (fun (chain_id, marks) -> Chain_id.of_b58check_exn chain_id, marks)) @@ - assoc @@ - conv - (List.map (fun (pkh, mark) -> Signature.Public_key_hash.to_b58check pkh, mark)) - (List.map (fun (pkh, mark) -> Signature.Public_key_hash.of_b58check_exn pkh, mark)) @@ - assoc @@ - obj3 - (req "level" int32) - (req "hash" raw_hash) - (opt "signature" Signature.encoding) + (List.map (fun (chain_id, marks) -> + (Chain_id.to_b58check chain_id, marks))) + (List.map (fun (chain_id, marks) -> + (Chain_id.of_b58check_exn chain_id, marks))) + @@ assoc + @@ conv + (List.map (fun (pkh, mark) -> + (Signature.Public_key_hash.to_b58check pkh, mark))) + (List.map (fun (pkh, mark) -> + (Signature.Public_key_hash.of_b58check_exn pkh, mark))) + @@ assoc + @@ obj3 + (req "level" int32) + (req "hash" raw_hash) + (opt "signature" Signature.encoding) - let mark_if_block_or_endorsement (cctxt : #Client_context.wallet) pkh bytes sign = + let mark_if_block_or_endorsement (cctxt : #Client_context.wallet) pkh bytes + sign = let mark art name get_level = let file = name ^ "_high_watermark" in - cctxt#with_lock @@ fun () -> - cctxt#load file ~default:[] encoding >>=? fun all -> + cctxt#with_lock + @@ fun () -> + cctxt#load file ~default:[] encoding + >>=? fun all -> if MBytes.length bytes < 9 then failwith "byte sequence too short to be %s %s" art name else - let hash = Blake2B.hash_bytes [ bytes ] in + let hash = Blake2B.hash_bytes [bytes] in let chain_id = Chain_id.of_bytes_exn (MBytes.sub bytes 1 4) in let level = get_level () in - begin match List.assoc_opt chain_id all with - | None -> return_none - | Some marks -> - match List.assoc_opt pkh marks with - | None -> return_none - | Some (previous_level, _, None) -> - if previous_level >= level then - failwith "%s level %ld not above high watermark %ld" name level previous_level - else - return_none - | Some (previous_level, previous_hash, Some signature) -> - if previous_level > level then - failwith "%s level %ld below high watermark %ld" name level previous_level - else if previous_level = level then - if previous_hash <> hash then - failwith "%s level %ld already signed with different data" name level - else - return_some signature - else return_none - end >>=? function - | Some signature -> return signature + ( match List.assoc_opt chain_id all with + | None -> + return_none + | Some marks -> ( + match List.assoc_opt pkh marks with + | None -> + return_none + | Some (previous_level, _, None) -> + if previous_level >= level then + failwith + "%s level %ld not above high watermark %ld" + name + level + previous_level + else return_none + | Some (previous_level, previous_hash, Some signature) -> + if previous_level > level then + failwith + "%s level %ld below high watermark %ld" + name + level + previous_level + else if previous_level = level then + if previous_hash <> hash then + failwith + "%s level %ld already signed with different data" + name + level + else return_some signature + else return_none ) ) + >>=? function + | Some signature -> + return signature | None -> - sign bytes >>=? fun signature -> + sign bytes + >>=? fun signature -> let rec update = function - | [] -> [ chain_id, [ pkh, (level, hash, Some signature) ] ] + | [] -> + [(chain_id, [(pkh, (level, hash, Some signature))])] | (e_chain_id, marks) :: rest -> if chain_id = e_chain_id then - let marks = (pkh, (level, hash, Some signature)) :: List.filter (fun (pkh', _) -> pkh <> pkh') marks in + let marks = + (pkh, (level, hash, Some signature)) + :: List.filter (fun (pkh', _) -> pkh <> pkh') marks + in (e_chain_id, marks) :: rest - else - (e_chain_id, marks) :: update rest in - cctxt#write file (update all) encoding >>=? fun () -> - return signature in + else (e_chain_id, marks) :: update rest + in + cctxt#write file (update all) encoding + >>=? fun () -> return signature + in if MBytes.length bytes > 0 && MBytes.get_uint8 bytes 0 = 0x01 then mark "a" "block" (fun () -> MBytes.get_int32 bytes 5) else if MBytes.length bytes > 0 && MBytes.get_uint8 bytes 0 = 0x02 then - mark "an" "endorsement" (fun () -> MBytes.get_int32 bytes (MBytes.length bytes - 4)) + mark "an" "endorsement" (fun () -> + MBytes.get_int32 bytes (MBytes.length bytes - 4)) else sign bytes - end -module Authorized_key = - Client_aliases.Alias (struct - include Signature.Public_key - let name = "authorized_key" - let to_source s = return (to_b58check s) - let of_source t = Lwt.return (of_b58check t) - end) +module Authorized_key = Client_aliases.Alias (struct + include Signature.Public_key + + let name = "authorized_key" + + let to_source s = return (to_b58check s) + + let of_source t = Lwt.return (of_b58check t) +end) let check_magic_byte magic_bytes data = match magic_bytes with - | None -> return_unit + | None -> + return_unit | Some magic_bytes -> let byte = MBytes.get_uint8 data 0 in - if MBytes.length data > 1 - && (List.mem byte magic_bytes) then - return_unit - else - failwith "magic byte 0x%02X not allowed" byte - + if MBytes.length data > 1 && List.mem byte magic_bytes then return_unit + else failwith "magic byte 0x%02X not allowed" byte let check_authorization cctxt pkh data require_auth signature = - match require_auth, signature with - | false, _ -> return_unit - | true, None -> failwith "missing authentication signature field" - | true, Some signature -> + match (require_auth, signature) with + | (false, _) -> + return_unit + | (true, None) -> + failwith "missing authentication signature field" + | (true, Some signature) -> let to_sign = Signer_messages.Sign.Request.to_sign ~pkh ~data in - Authorized_key.load cctxt >>=? fun keys -> - if List.fold_left + Authorized_key.load cctxt + >>=? fun keys -> + if + List.fold_left (fun acc (_, key) -> acc || Signature.check key signature to_sign) - false keys - then - return_unit - else - failwith "invalid authentication signature" + false + keys + then return_unit + else failwith "invalid authentication signature" -let sign - (cctxt : #Client_context.wallet) - Signer_messages.Sign.Request.{ pkh ; data ; signature } - ?magic_bytes ~check_high_watermark ~require_auth = - log Tag.DSL.(fun f -> - f "Request for signing %d bytes of data for key %a, magic byte = %02X" - -% t event "request_for_signing" - -% s num_bytes (MBytes.length data) - -% a Signature.Public_key_hash.Logging.tag pkh - -% s magic_byte (MBytes.get_uint8 data 0)) >>= fun () -> - check_magic_byte magic_bytes data >>=? fun () -> - check_authorization cctxt pkh data require_auth signature >>=? fun () -> - Client_keys.get_key cctxt pkh >>=? fun (name, _pkh, sk_uri) -> - log Tag.DSL.(fun f -> - f "Signing data for key %s" - -% t event "signing_data" - -% s Client_keys.Logging.tag name) >>= fun () -> +let sign (cctxt : #Client_context.wallet) + Signer_messages.Sign.Request.{pkh; data; signature} ?magic_bytes + ~check_high_watermark ~require_auth = + log + Tag.DSL.( + fun f -> + f "Request for signing %d bytes of data for key %a, magic byte = %02X" + -% t event "request_for_signing" + -% s num_bytes (MBytes.length data) + -% a Signature.Public_key_hash.Logging.tag pkh + -% s magic_byte (MBytes.get_uint8 data 0)) + >>= fun () -> + check_magic_byte magic_bytes data + >>=? fun () -> + check_authorization cctxt pkh data require_auth signature + >>=? fun () -> + Client_keys.get_key cctxt pkh + >>=? fun (name, _pkh, sk_uri) -> + log + Tag.DSL.( + fun f -> + f "Signing data for key %s" + -% t event "signing_data" + -% s Client_keys.Logging.tag name) + >>= fun () -> let sign = Client_keys.sign cctxt sk_uri in if check_high_watermark then High_watermark.mark_if_block_or_endorsement cctxt pkh data sign - else - sign data + else sign data -let deterministic_nonce - (cctxt : #Client_context.wallet) - Signer_messages.Deterministic_nonce.Request.{ pkh ; data ; signature } +let deterministic_nonce (cctxt : #Client_context.wallet) + Signer_messages.Deterministic_nonce.Request.{pkh; data; signature} ~require_auth = - log Tag.DSL.(fun f -> - f "Request for creating a nonce from %d input bytes for key %a" - -% t event "request_for_deterministic_nonce" - -% s num_bytes (MBytes.length data) - -% a Signature.Public_key_hash.Logging.tag pkh) >>= fun () -> - check_authorization cctxt pkh data require_auth signature >>=? fun () -> - Client_keys.get_key cctxt pkh >>=? fun (name, _pkh, sk_uri) -> - log Tag.DSL.(fun f -> - f "Creating nonce for key %s" - -% t event "creating_nonce" - -% s Client_keys.Logging.tag name) >>= fun () -> - Client_keys.deterministic_nonce sk_uri data + log + Tag.DSL.( + fun f -> + f "Request for creating a nonce from %d input bytes for key %a" + -% t event "request_for_deterministic_nonce" + -% s num_bytes (MBytes.length data) + -% a Signature.Public_key_hash.Logging.tag pkh) + >>= fun () -> + check_authorization cctxt pkh data require_auth signature + >>=? fun () -> + Client_keys.get_key cctxt pkh + >>=? fun (name, _pkh, sk_uri) -> + log + Tag.DSL.( + fun f -> + f "Creating nonce for key %s" + -% t event "creating_nonce" + -% s Client_keys.Logging.tag name) + >>= fun () -> Client_keys.deterministic_nonce sk_uri data -let deterministic_nonce_hash - (cctxt : #Client_context.wallet) - Signer_messages.Deterministic_nonce_hash.Request.{ pkh ; data ; signature } +let deterministic_nonce_hash (cctxt : #Client_context.wallet) + Signer_messages.Deterministic_nonce_hash.Request.{pkh; data; signature} ~require_auth = - log Tag.DSL.(fun f -> - f "Request for creating a nonce hash from %d input bytes for key %a" - -% t event "request_for_deterministic_nonce_hash" - -% s num_bytes (MBytes.length data) - -% a Signature.Public_key_hash.Logging.tag pkh) >>= fun () -> - check_authorization cctxt pkh data require_auth signature >>=? fun () -> - Client_keys.get_key cctxt pkh >>=? fun (name, _pkh, sk_uri) -> - log Tag.DSL.(fun f -> - f "Creating nonce hash for key %s" - -% t event "creating_nonce_hash" - -% s Client_keys.Logging.tag name) >>= fun () -> - Client_keys.deterministic_nonce_hash sk_uri data + log + Tag.DSL.( + fun f -> + f "Request for creating a nonce hash from %d input bytes for key %a" + -% t event "request_for_deterministic_nonce_hash" + -% s num_bytes (MBytes.length data) + -% a Signature.Public_key_hash.Logging.tag pkh) + >>= fun () -> + check_authorization cctxt pkh data require_auth signature + >>=? fun () -> + Client_keys.get_key cctxt pkh + >>=? fun (name, _pkh, sk_uri) -> + log + Tag.DSL.( + fun f -> + f "Creating nonce hash for key %s" + -% t event "creating_nonce_hash" + -% s Client_keys.Logging.tag name) + >>= fun () -> Client_keys.deterministic_nonce_hash sk_uri data let supports_deterministic_nonces (cctxt : #Client_context.wallet) pkh = - log Tag.DSL.(fun f -> - f "Request for checking whether the signer supports deterministic nonces for key %a" - -% t event "request_for_supports_deterministic_nonces" - -% a Signature.Public_key_hash.Logging.tag pkh) >>= fun () -> - Client_keys.get_key cctxt pkh >>=? fun (name, _pkh, sk_uri) -> - log Tag.DSL.(fun f -> - f "Returns true if and only if signer can generate determinstic nonces for key %s" - -% t event "supports_deterministic_nonces" - -% s Client_keys.Logging.tag name) >>= fun () -> - Client_keys.supports_deterministic_nonces sk_uri + log + Tag.DSL.( + fun f -> + f + "Request for checking whether the signer supports deterministic \ + nonces for key %a" + -% t event "request_for_supports_deterministic_nonces" + -% a Signature.Public_key_hash.Logging.tag pkh) + >>= fun () -> + Client_keys.get_key cctxt pkh + >>=? fun (name, _pkh, sk_uri) -> + log + Tag.DSL.( + fun f -> + f + "Returns true if and only if signer can generate determinstic \ + nonces for key %s" + -% t event "supports_deterministic_nonces" + -% s Client_keys.Logging.tag name) + >>= fun () -> Client_keys.supports_deterministic_nonces sk_uri let public_key (cctxt : #Client_context.wallet) pkh = - log Tag.DSL.(fun f -> - f "Request for public key %a" - -% t event "request_for_public_key" - -% a Signature.Public_key_hash.Logging.tag pkh) >>= fun () -> - Client_keys.list_keys cctxt >>=? fun all_keys -> - match List.find_opt (fun (_, h, _, _) -> Signature.Public_key_hash.equal h pkh) all_keys with + log + Tag.DSL.( + fun f -> + f "Request for public key %a" + -% t event "request_for_public_key" + -% a Signature.Public_key_hash.Logging.tag pkh) + >>= fun () -> + Client_keys.list_keys cctxt + >>=? fun all_keys -> + match + List.find_opt + (fun (_, h, _, _) -> Signature.Public_key_hash.equal h pkh) + all_keys + with | None -> - log Tag.DSL.(fun f -> - f "No public key found for hash %a" - -% t event "not_found_public_key" - -% a Signature.Public_key_hash.Logging.tag pkh) >>= fun () -> - Lwt.fail Not_found + log + Tag.DSL.( + fun f -> + f "No public key found for hash %a" + -% t event "not_found_public_key" + -% a Signature.Public_key_hash.Logging.tag pkh) + >>= fun () -> Lwt.fail Not_found | Some (_, _, None, _) -> - log Tag.DSL.(fun f -> - f "No public key found for hash %a" - -% t event "not_found_public_key" - -% a Signature.Public_key_hash.Logging.tag pkh) >>= fun () -> - Lwt.fail Not_found + log + Tag.DSL.( + fun f -> + f "No public key found for hash %a" + -% t event "not_found_public_key" + -% a Signature.Public_key_hash.Logging.tag pkh) + >>= fun () -> Lwt.fail Not_found | Some (name, _, Some pk, _) -> - log Tag.DSL.(fun f -> - f "Found public key for hash %a (name: %s)" - -% t event "found_public_key" - -% a Signature.Public_key_hash.Logging.tag pkh - -% s Client_keys.Logging.tag name) >>= fun () -> - return pk + log + Tag.DSL.( + fun f -> + f "Found public key for hash %a (name: %s)" + -% t event "found_public_key" + -% a Signature.Public_key_hash.Logging.tag pkh + -% s Client_keys.Logging.tag name) + >>= fun () -> return pk diff --git a/src/bin_signer/handler.mli b/src/bin_signer/handler.mli index 69ef13d747f54eab522afc5cf3a4fe5a39f54699..f992d2353f6ac5e4bbc7a13291c3cf4c67258e48 100644 --- a/src/bin_signer/handler.mli +++ b/src/bin_signer/handler.mli @@ -23,42 +23,45 @@ (* *) (*****************************************************************************) +(** Storage for keys that have been authorized for baking. *) module Authorized_key : Client_aliases.Alias with type t := Signature.public_key -(** Storage for keys that have been authorized for baking. *) -val public_key : - #Client_context.wallet -> - Signature.public_key_hash -> Signature.public_key tzresult Lwt.t (** [public_key cctxt pkh] returns the public key whose hash is [pkh] iff it is present if [cctxt]. *) +val public_key : + #Client_context.wallet -> + Signature.public_key_hash -> + Signature.public_key tzresult Lwt.t +(** [sign cctxt req ?magic_bytes ~check_high_watermark ~require_auth] + signs [req] and returns a signature. *) val sign : #Client_context.wallet -> Signer_messages.Sign.Request.t -> ?magic_bytes:int list -> - check_high_watermark:bool -> require_auth:bool -> Signature.t tzresult Lwt.t -(** [sign cctxt req ?magic_bytes ~check_high_watermark ~require_auth] - signs [req] and returns a signature. *) + check_high_watermark:bool -> + require_auth:bool -> + Signature.t tzresult Lwt.t +(** [deterministic_nonce cctxt req ~require_auth] generates + deterministically a nonce from [req.data]. *) val deterministic_nonce : #Client_context.wallet -> Signer_messages.Deterministic_nonce.Request.t -> - require_auth:bool -> MBytes.t tzresult Lwt.t -(** [deterministic_nonce cctxt req ~require_auth] generates - deterministically a nonce from [req.data]. *) + require_auth:bool -> + MBytes.t tzresult Lwt.t -val deterministic_nonce_hash : - #Client_context.wallet -> - Signer_messages.Deterministic_nonce_hash.Request.t -> - require_auth:bool -> MBytes.t tzresult Lwt.t (** [deterministic_nonce_hash cctxt req ~require_auth] generates deterministically a nonce from [req.data] and returns the hash of this nonce. *) - -val supports_deterministic_nonces : +val deterministic_nonce_hash : #Client_context.wallet -> - Signature.public_key_hash -> - bool tzresult Lwt.t + Signer_messages.Deterministic_nonce_hash.Request.t -> + require_auth:bool -> + MBytes.t tzresult Lwt.t + (** [supports_deterministic_nonces cctxt pkh] determines whether the the signer provides the determinsitic nonce functionality. *) +val supports_deterministic_nonces : + #Client_context.wallet -> Signature.public_key_hash -> bool tzresult Lwt.t diff --git a/src/bin_signer/http_daemon.ml b/src/bin_signer/http_daemon.ml index 9b8747059fdb897f52dc66141f3f5301d0f62e50..55ed889a3857c60c1897043ffa8a27a7ed493dce 100644 --- a/src/bin_signer/http_daemon.ml +++ b/src/bin_signer/http_daemon.ml @@ -24,69 +24,111 @@ (*****************************************************************************) let log = Signer_logging.lwt_log_notice + open Signer_logging -let run (cctxt : #Client_context.wallet) ~hosts ?magic_bytes ~check_high_watermark ~require_auth mode = +let run (cctxt : #Client_context.wallet) ~hosts ?magic_bytes + ~check_high_watermark ~require_auth mode = let dir = RPC_directory.empty in let dir = - RPC_directory.register1 dir Signer_services.sign begin fun pkh signature data -> - Handler.sign cctxt { pkh ; data ; signature } ?magic_bytes ~check_high_watermark ~require_auth - end in + RPC_directory.register1 dir Signer_services.sign (fun pkh signature data -> + Handler.sign + cctxt + {pkh; data; signature} + ?magic_bytes + ~check_high_watermark + ~require_auth) + in let dir = - RPC_directory.register1 dir Signer_services.public_key begin fun pkh () () -> - Handler.public_key cctxt pkh - end in + RPC_directory.register1 dir Signer_services.public_key (fun pkh () () -> + Handler.public_key cctxt pkh) + in let dir = - RPC_directory.register0 dir Signer_services.authorized_keys begin fun () () -> - if require_auth then - Handler.Authorized_key.load cctxt >>=? fun keys -> - return_some (keys |> List.split |> snd |> List.map Signature.Public_key.hash) - else - return_none - end in + RPC_directory.register0 dir Signer_services.authorized_keys (fun () () -> + if require_auth then + Handler.Authorized_key.load cctxt + >>=? fun keys -> + return_some + (keys |> List.split |> snd |> List.map Signature.Public_key.hash) + else return_none) + in Lwt.catch (fun () -> - List.map - (fun host -> - let host = Ipaddr.V6.to_string host in - log Tag.DSL.(fun f -> + List.map + (fun host -> + let host = Ipaddr.V6.to_string host in + log + Tag.DSL.( + fun f -> f "Listening on address %s" - -% t event "signer_listening" - -% s host_name host) >>= fun () -> - RPC_server.launch ~host mode dir - ~media_types:Media_type.all_media_types - >>= fun _server -> - fst (Lwt.wait ())) - hosts |> Lwt.choose) + -% t event "signer_listening" -% s host_name host) + >>= fun () -> + RPC_server.launch + ~host + mode + dir + ~media_types:Media_type.all_media_types + >>= fun _server -> fst (Lwt.wait ())) + hosts + |> Lwt.choose) (function - | Unix.Unix_error(Unix.EADDRINUSE, "bind","") -> + | Unix.Unix_error (Unix.EADDRINUSE, "bind", "") -> failwith "Port already in use." - | exn -> Lwt.return (error_exn exn)) + | exn -> + Lwt.return (error_exn exn)) -let run_https (cctxt : #Client_context.wallet) ~host ~port ~cert ~key ?magic_bytes ~check_high_watermark ~require_auth = - Lwt_utils_unix.getaddrinfo ~passive:true ~node:host ~service:(string_of_int port) >>= function - | []-> +let run_https (cctxt : #Client_context.wallet) ~host ~port ~cert ~key + ?magic_bytes ~check_high_watermark ~require_auth = + Lwt_utils_unix.getaddrinfo + ~passive:true + ~node:host + ~service:(string_of_int port) + >>= function + | [] -> failwith "Cannot resolve listening address: %S" host | points -> let hosts = fst (List.split points) in - log Tag.DSL.(fun f -> - f "Accepting HTTPS requests on port %d" - -% t event "accepting_https_requests" - -% s port_number port) >>= fun () -> + log + Tag.DSL.( + fun f -> + f "Accepting HTTPS requests on port %d" + -% t event "accepting_https_requests" + -% s port_number port) + >>= fun () -> let mode : Conduit_lwt_unix.server = - `TLS (`Crt_file_path cert, `Key_file_path key, `No_password, `Port port) in - run (cctxt : #Client_context.wallet) ~hosts ?magic_bytes ~check_high_watermark ~require_auth mode + `TLS (`Crt_file_path cert, `Key_file_path key, `No_password, `Port port) + in + run + (cctxt : #Client_context.wallet) + ~hosts + ?magic_bytes + ~check_high_watermark + ~require_auth + mode -let run_http (cctxt : #Client_context.wallet) ~host ~port ?magic_bytes ~check_high_watermark ~require_auth = - Lwt_utils_unix.getaddrinfo ~passive:true ~node:host ~service:(string_of_int port) >>= function +let run_http (cctxt : #Client_context.wallet) ~host ~port ?magic_bytes + ~check_high_watermark ~require_auth = + Lwt_utils_unix.getaddrinfo + ~passive:true + ~node:host + ~service:(string_of_int port) + >>= function | [] -> failwith "Cannot resolve listening address: %S" host | points -> let hosts = fst (List.split points) in - log Tag.DSL.(fun f -> - f "Accepting HTTP requests on port %d" - -% t event "accepting_http_requests" - -% s port_number port) >>= fun () -> - let mode : Conduit_lwt_unix.server = - `TCP (`Port port) in - run (cctxt : #Client_context.wallet) ~hosts ?magic_bytes ~check_high_watermark ~require_auth mode + log + Tag.DSL.( + fun f -> + f "Accepting HTTP requests on port %d" + -% t event "accepting_http_requests" + -% s port_number port) + >>= fun () -> + let mode : Conduit_lwt_unix.server = `TCP (`Port port) in + run + (cctxt : #Client_context.wallet) + ~hosts + ?magic_bytes + ~check_high_watermark + ~require_auth + mode diff --git a/src/bin_signer/http_daemon.mli b/src/bin_signer/http_daemon.mli index e070436524e8b61abe0402a342f9f657757e9fde..5483d0987f25bf961abe43cd23b758ab3680a0eb 100644 --- a/src/bin_signer/http_daemon.mli +++ b/src/bin_signer/http_daemon.mli @@ -23,18 +23,22 @@ (* *) (*****************************************************************************) -val run_https: +val run_https : #Client_context.io_wallet -> - host:string -> port:int -> cert:string -> key:string -> - ?magic_bytes: int list -> - check_high_watermark: bool -> - require_auth: bool -> + host:string -> + port:int -> + cert:string -> + key:string -> + ?magic_bytes:int list -> + check_high_watermark:bool -> + require_auth:bool -> 'a tzresult Lwt.t -val run_http: +val run_http : #Client_context.io_wallet -> - host:string -> port:int -> - ?magic_bytes: int list -> - check_high_watermark: bool -> - require_auth: bool -> + host:string -> + port:int -> + ?magic_bytes:int list -> + check_high_watermark:bool -> + require_auth:bool -> 'a tzresult Lwt.t diff --git a/src/bin_signer/main_signer.ml b/src/bin_signer/main_signer.ml index d56678a55057fd758df6302b2c37cee071658f6c..135a9080d1212261c5ae5021ddcf8edc7e7821e8 100644 --- a/src/bin_signer/main_signer.ml +++ b/src/bin_signer/main_signer.ml @@ -26,245 +26,300 @@ let default_tcp_host = match Sys.getenv_opt "TEZOS_SIGNER_TCP_HOST" with - | None -> "localhost" - | Some host -> host + | None -> + "localhost" + | Some host -> + host let default_tcp_port = match Sys.getenv_opt "TEZOS_SIGNER_TCP_PORT" with - | None -> "7732" - | Some port -> port + | None -> + "7732" + | Some port -> + port let default_https_host = match Sys.getenv_opt "TEZOS_SIGNER_HTTPS_HOST" with - | None -> "localhost" - | Some host -> host + | None -> + "localhost" + | Some host -> + host let default_https_port = match Sys.getenv_opt "TEZOS_SIGNER_HTTPS_PORT" with - | None -> "443" - | Some port -> port + | None -> + "443" + | Some port -> + port let default_http_host = match Sys.getenv_opt "TEZOS_SIGNER_HTTP_HOST" with - | None -> "localhost" - | Some host -> host + | None -> + "localhost" + | Some host -> + host let default_http_port = match Sys.getenv_opt "TEZOS_SIGNER_HTTP_PORT" with - | None -> "6732" - | Some port -> port + | None -> + "6732" + | Some port -> + port open Clic let group = - { Clic.name = "signer" ; - title = "Commands specific to the signing daemon" } + {Clic.name = "signer"; title = "Commands specific to the signing daemon"} let magic_bytes_arg = Clic.arg - ~doc: "values allowed for the magic bytes, defaults to any" - ~short: 'M' - ~long: "magic-bytes" - ~placeholder: "0xHH,0xHH,..." + ~doc:"values allowed for the magic bytes, defaults to any" + ~short:'M' + ~long:"magic-bytes" + ~placeholder:"0xHH,0xHH,..." (Clic.parameter (fun _ s -> try return (List.map (fun s -> - let b = int_of_string s in - if b < 0 || b > 255 then raise Exit else b) + let b = int_of_string s in + if b < 0 || b > 255 then raise Exit else b) (String.split ',' s)) with _ -> - failwith "Bad format for magic bytes, a series of numbers \ - is expected, separated by commas.")) + failwith + "Bad format for magic bytes, a series of numbers is expected, \ + separated by commas.")) let high_watermark_switch = Clic.switch - ~doc: "high watermark restriction\n\ - Stores the highest level signed for blocks and endorsements \ - for each address, and forbids to sign a level that is \ - inferior or equal afterwards, except for the exact same \ - input data." - ~short: 'W' - ~long: "check-high-watermark" + ~doc: + "high watermark restriction\n\ + Stores the highest level signed for blocks and endorsements for each \ + address, and forbids to sign a level that is inferior or equal \ + afterwards, except for the exact same input data." + ~short:'W' + ~long:"check-high-watermark" () let pidfile_arg = arg - ~doc: "write process id in file" - ~short: 'P' - ~long: "pidfile" - ~placeholder: "filename" + ~doc:"write process id in file" + ~short:'P' + ~long:"pidfile" + ~placeholder:"filename" (parameter (fun _ s -> return s)) let init_signal () = - let handler name id = try + let handler name id = + try Format.eprintf "Received the %s signal, triggering shutdown.@." name ; exit id - with _ -> () in - ignore (Lwt_unix.on_signal Sys.sigint (handler "INT") : Lwt_unix.signal_handler_id) ; - ignore (Lwt_unix.on_signal Sys.sigterm (handler "TERM") : Lwt_unix.signal_handler_id) + with _ -> () + in + ignore + (Lwt_unix.on_signal Sys.sigint (handler "INT") : Lwt_unix.signal_handler_id) ; + ignore + ( Lwt_unix.on_signal Sys.sigterm (handler "TERM") + : Lwt_unix.signal_handler_id ) let may_setup_pidfile = function - | None -> return_unit + | None -> + return_unit | Some pidfile -> - trace (failure "Failed to create the pidfile: %s" pidfile) @@ - Lwt_lock_file.create ~unlink_on_exit:true pidfile + trace (failure "Failed to create the pidfile: %s" pidfile) + @@ Lwt_lock_file.create ~unlink_on_exit:true pidfile let commands base_dir require_auth : Client_context.full command list = - Tezos_signer_backends.Ledger.commands () @ - (Client_keys_commands.commands None @ - [ command ~group - ~desc: "Launch a signer daemon over a TCP socket." - (args5 - pidfile_arg - magic_bytes_arg - high_watermark_switch - (default_arg - ~doc: "listening address or host name" - ~short: 'a' - ~long: "address" - ~placeholder: "host|address" - ~default: default_tcp_host - (parameter (fun _ s -> return s))) - (default_arg - ~doc: "listening TCP port or service name" - ~short: 'p' - ~long: "port" - ~placeholder: "port number" - ~default: default_tcp_port - (parameter (fun _ s -> return s)))) - (prefixes [ "launch" ; "socket" ; "signer" ] @@ stop) - (fun (pidfile, magic_bytes, check_high_watermark, host, port) cctxt -> + Tezos_signer_backends.Ledger.commands () + @ Client_keys_commands.commands None + @ [ command + ~group + ~desc:"Launch a signer daemon over a TCP socket." + (args5 + pidfile_arg + magic_bytes_arg + high_watermark_switch + (default_arg + ~doc:"listening address or host name" + ~short:'a' + ~long:"address" + ~placeholder:"host|address" + ~default:default_tcp_host + (parameter (fun _ s -> return s))) + (default_arg + ~doc:"listening TCP port or service name" + ~short:'p' + ~long:"port" + ~placeholder:"port number" + ~default:default_tcp_port + (parameter (fun _ s -> return s)))) + (prefixes ["launch"; "socket"; "signer"] @@ stop) + (fun (pidfile, magic_bytes, check_high_watermark, host, port) cctxt -> init_signal () ; - may_setup_pidfile pidfile >>=? fun () -> - Tezos_signer_backends.Encrypted.decrypt_all cctxt >>=? fun () -> + may_setup_pidfile pidfile + >>=? fun () -> + Tezos_signer_backends.Encrypted.decrypt_all cctxt + >>=? fun () -> Socket_daemon.run - cctxt (Tcp (host, port, [AI_SOCKTYPE SOCK_STREAM])) - ?magic_bytes ~check_high_watermark ~require_auth >>=? fun _ -> - return_unit) ; - command ~group - ~desc: "Launch a signer daemon over a local Unix socket." - (args4 - pidfile_arg - magic_bytes_arg - high_watermark_switch - (default_arg - ~doc: "path to the local socket file" - ~short: 's' - ~long: "socket" - ~placeholder: "path" - ~default: (Filename.concat base_dir "socket") - (parameter (fun _ s -> return s)))) - (prefixes [ "launch" ; "local" ; "signer" ] @@ stop) - (fun (pidfile, magic_bytes, check_high_watermark, path) cctxt -> + cctxt + (Tcp (host, port, [AI_SOCKTYPE SOCK_STREAM])) + ?magic_bytes + ~check_high_watermark + ~require_auth + >>=? fun _ -> return_unit); + command + ~group + ~desc:"Launch a signer daemon over a local Unix socket." + (args4 + pidfile_arg + magic_bytes_arg + high_watermark_switch + (default_arg + ~doc:"path to the local socket file" + ~short:'s' + ~long:"socket" + ~placeholder:"path" + ~default:(Filename.concat base_dir "socket") + (parameter (fun _ s -> return s)))) + (prefixes ["launch"; "local"; "signer"] @@ stop) + (fun (pidfile, magic_bytes, check_high_watermark, path) cctxt -> init_signal () ; - may_setup_pidfile pidfile >>=? fun () -> - Tezos_signer_backends.Encrypted.decrypt_all cctxt >>=? fun () -> + may_setup_pidfile pidfile + >>=? fun () -> + Tezos_signer_backends.Encrypted.decrypt_all cctxt + >>=? fun () -> Socket_daemon.run - cctxt (Unix path) ?magic_bytes ~check_high_watermark ~require_auth >>=? fun _ -> - return_unit) ; - command ~group - ~desc: "Launch a signer daemon over HTTP." - (args5 - pidfile_arg - magic_bytes_arg - high_watermark_switch - (default_arg - ~doc: "listening address or host name" - ~short: 'a' - ~long: "address" - ~placeholder: "host|address" - ~default: default_http_host - (parameter (fun _ s -> return s))) - (default_arg - ~doc: "listening HTTP port" - ~short: 'p' - ~long: "port" - ~placeholder: "port number" - ~default: default_http_port - (parameter - (fun _ x -> + cctxt + (Unix path) + ?magic_bytes + ~check_high_watermark + ~require_auth + >>=? fun _ -> return_unit); + command + ~group + ~desc:"Launch a signer daemon over HTTP." + (args5 + pidfile_arg + magic_bytes_arg + high_watermark_switch + (default_arg + ~doc:"listening address or host name" + ~short:'a' + ~long:"address" + ~placeholder:"host|address" + ~default:default_http_host + (parameter (fun _ s -> return s))) + (default_arg + ~doc:"listening HTTP port" + ~short:'p' + ~long:"port" + ~placeholder:"port number" + ~default:default_http_port + (parameter (fun _ x -> try return (int_of_string x) with Failure _ -> failwith "Invalid port %s" x)))) - (prefixes [ "launch" ; "http" ; "signer" ] @@ stop) - (fun (pidfile, magic_bytes, check_high_watermark, host, port) cctxt -> + (prefixes ["launch"; "http"; "signer"] @@ stop) + (fun (pidfile, magic_bytes, check_high_watermark, host, port) cctxt -> init_signal () ; - may_setup_pidfile pidfile >>=? fun () -> - Tezos_signer_backends.Encrypted.decrypt_all cctxt >>=? fun () -> - Http_daemon.run_http cctxt ~host ~port ?magic_bytes ~check_high_watermark ~require_auth) ; - command ~group - ~desc: "Launch a signer daemon over HTTPS." - (args5 - pidfile_arg - magic_bytes_arg - high_watermark_switch - (default_arg - ~doc: "listening address or host name" - ~short: 'a' - ~long: "address" - ~placeholder: "host|address" - ~default: default_https_host - (parameter (fun _ s -> return s))) - (default_arg - ~doc: "listening HTTPS port" - ~short: 'p' - ~long: "port" - ~placeholder: "port number" - ~default: default_https_port - (parameter - (fun _ x -> + may_setup_pidfile pidfile + >>=? fun () -> + Tezos_signer_backends.Encrypted.decrypt_all cctxt + >>=? fun () -> + Http_daemon.run_http + cctxt + ~host + ~port + ?magic_bytes + ~check_high_watermark + ~require_auth); + command + ~group + ~desc:"Launch a signer daemon over HTTPS." + (args5 + pidfile_arg + magic_bytes_arg + high_watermark_switch + (default_arg + ~doc:"listening address or host name" + ~short:'a' + ~long:"address" + ~placeholder:"host|address" + ~default:default_https_host + (parameter (fun _ s -> return s))) + (default_arg + ~doc:"listening HTTPS port" + ~short:'p' + ~long:"port" + ~placeholder:"port number" + ~default:default_https_port + (parameter (fun _ x -> try return (int_of_string x) with Failure _ -> failwith "Invalid port %s" x)))) - (prefixes [ "launch" ; "https" ; "signer" ] @@ - param - ~name:"cert" - ~desc: "path to the TLS certificate" - (parameter (fun _ s -> - if not (Sys.file_exists s) then - failwith "No such TLS certificate file %s" s - else - return s)) @@ - param - ~name:"key" - ~desc: "path to the TLS key" - (parameter (fun _ s -> - if not (Sys.file_exists s) then - failwith "No such TLS key file %s" s - else - return s)) @@ stop) - (fun (pidfile, magic_bytes, check_high_watermark, host, port) cert key cctxt -> + ( prefixes ["launch"; "https"; "signer"] + @@ param + ~name:"cert" + ~desc:"path to the TLS certificate" + (parameter (fun _ s -> + if not (Sys.file_exists s) then + failwith "No such TLS certificate file %s" s + else return s)) + @@ param + ~name:"key" + ~desc:"path to the TLS key" + (parameter (fun _ s -> + if not (Sys.file_exists s) then + failwith "No such TLS key file %s" s + else return s)) + @@ stop ) + (fun (pidfile, magic_bytes, check_high_watermark, host, port) + cert + key + cctxt -> init_signal () ; - may_setup_pidfile pidfile >>=? fun () -> - Tezos_signer_backends.Encrypted.decrypt_all cctxt >>=? fun () -> - Http_daemon.run_https cctxt ~host ~port ~cert ~key ?magic_bytes ~check_high_watermark ~require_auth) ; - command ~group - ~desc: "Authorize a given public key to perform signing requests." - (args1 - (arg - ~doc: "an optional name for the key (defaults to the hash)" - ~short: 'N' - ~long: "name" - ~placeholder: "name" - (parameter (fun _ s -> return s)))) - (prefixes [ "add" ; "authorized" ; "key" ] @@ - param - ~name:"pk" - ~desc: "full public key (Base58 encoded)" - (parameter (fun _ s -> Lwt.return (Signature.Public_key.of_b58check s))) @@ - stop) - (fun name key cctxt -> + may_setup_pidfile pidfile + >>=? fun () -> + Tezos_signer_backends.Encrypted.decrypt_all cctxt + >>=? fun () -> + Http_daemon.run_https + cctxt + ~host + ~port + ~cert + ~key + ?magic_bytes + ~check_high_watermark + ~require_auth); + command + ~group + ~desc:"Authorize a given public key to perform signing requests." + (args1 + (arg + ~doc:"an optional name for the key (defaults to the hash)" + ~short:'N' + ~long:"name" + ~placeholder:"name" + (parameter (fun _ s -> return s)))) + ( prefixes ["add"; "authorized"; "key"] + @@ param + ~name:"pk" + ~desc:"full public key (Base58 encoded)" + (parameter (fun _ s -> + Lwt.return (Signature.Public_key.of_b58check s))) + @@ stop ) + (fun name key cctxt -> let pkh = Signature.Public_key.hash key in - let name = match name with - | Some name -> name - | None -> Signature.Public_key_hash.to_b58check pkh in - Handler.Authorized_key.add ~force:false cctxt name key) - ]) + let name = + match name with + | Some name -> + name + | None -> + Signature.Public_key_hash.to_b58check pkh + in + Handler.Authorized_key.add ~force:false cctxt name key) ] let home = try Sys.getenv "HOME" with Not_found -> "/root" -let default_base_dir = - Filename.concat home ".tezos-signer" +let default_base_dir = Filename.concat home ".tezos-signer" let string_parameter () : (string, _) parameter = parameter (fun _ x -> return x) @@ -274,9 +329,10 @@ let base_dir_arg () = ~long:"base-dir" ~short:'d' ~placeholder:"path" - ~doc:("signer data directory\n\ - The directory where the Tezos client will store all its data.\n\ - By default: '" ^ default_base_dir ^"'.") + ~doc: + ( "signer data directory\n\ + The directory where the Tezos client will store all its data.\n\ + By default: '" ^ default_base_dir ^ "'." ) (string_parameter ()) let require_auth_arg () = @@ -295,29 +351,29 @@ let password_filename_arg () = (string_parameter ()) let global_options () = - args3 - (base_dir_arg ()) - (require_auth_arg ()) - (password_filename_arg ()) + args3 (base_dir_arg ()) (require_auth_arg ()) (password_filename_arg ()) -module C = -struct +module C = struct type t = string option * bool * string option + let global_options = global_options + let parse_config_args ctx argv = Clic.parse_global_options (global_options ()) ctx argv >>=? fun ((base_dir, require_auth, password_filename), remaining) -> return - ({Client_config.default_parsed_config_args with - base_dir ; - require_auth ; - password_filename - }, - remaining) + ( { Client_config.default_parsed_config_args with + base_dir; + require_auth; + password_filename }, + remaining ) let default_chain = Client_config.default_chain + let default_block = Client_config.default_block + let default_base_dir = default_base_dir + let other_registrations = None let clic_commands ~base_dir ~config_commands:_ ~builtin_commands:_ @@ -328,6 +384,4 @@ struct end let () = - Client_main_run.run - (module C) - ~select_commands:(fun _ _ -> return_nil) + Client_main_run.run (module C) ~select_commands:(fun _ _ -> return_nil) diff --git a/src/bin_signer/signer_logging.ml b/src/bin_signer/signer_logging.ml index ffecf903ce73fd39cc98fcddb9b4eeedd488953b..0cc564c357c4596e7355cf8f537d044c01cfd3e4 100644 --- a/src/bin_signer/signer_logging.ml +++ b/src/bin_signer/signer_logging.ml @@ -23,12 +23,19 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make_semantic - (struct let name = "client.signer" end) +include Internal_event.Legacy_logging.Make_semantic (struct + let name = "client.signer" +end) let host_name = Tag.def ~doc:"Host name" "host" Format.pp_print_text + let service_name = Tag.def ~doc:"Service name" "service" Format.pp_print_text + let port_number = Tag.def ~doc:"Port number" "port" Format.pp_print_int + let magic_byte = Tag.def ~doc:"Magic byte" "magic_byte" Format.pp_print_int + let num_bytes = Tag.def ~doc:"Number of bytes" "num_bytes" Format.pp_print_int -let unix_socket_path = Tag.def ~doc:"UNIX socket file path" "unix_socket" Format.pp_print_text + +let unix_socket_path = + Tag.def ~doc:"UNIX socket file path" "unix_socket" Format.pp_print_text diff --git a/src/bin_signer/signer_logging.mli b/src/bin_signer/signer_logging.mli index 0f90dc6b68f1a9c41022a7d054703ca9ed71b0f8..338524ef7f42d74fff7da62ccb6797bedbe268f3 100644 --- a/src/bin_signer/signer_logging.mli +++ b/src/bin_signer/signer_logging.mli @@ -25,9 +25,14 @@ include Internal_event.Legacy_logging.SEMLOG -val host_name: string Tag.def -val service_name: string Tag.def -val port_number: int Tag.def -val magic_byte: int Tag.def -val num_bytes: int Tag.def -val unix_socket_path: string Tag.def +val host_name : string Tag.def + +val service_name : string Tag.def + +val port_number : int Tag.def + +val magic_byte : int Tag.def + +val num_bytes : int Tag.def + +val unix_socket_path : string Tag.def diff --git a/src/bin_signer/socket_daemon.ml b/src/bin_signer/socket_daemon.ml index afdb90d580e1cdb9d2655c8d22650c460c0d0716..7af57f8ba51fb8becbbae2cd4b401db2daf66d84 100644 --- a/src/bin_signer/socket_daemon.ml +++ b/src/bin_signer/socket_daemon.ml @@ -29,84 +29,100 @@ open Signer_messages let log = lwt_log_notice let handle_client ?magic_bytes ~check_high_watermark ~require_auth cctxt fd = - Lwt_utils_unix.Socket.recv fd Request.encoding >>=? function + Lwt_utils_unix.Socket.recv fd Request.encoding + >>=? function | Sign req -> let encoding = result_encoding Sign.Response.encoding in - Handler.sign cctxt req ?magic_bytes ~check_high_watermark ~require_auth >>= fun res -> - Lwt_utils_unix.Socket.send fd encoding res >>= fun _ -> - Lwt_unix.close fd >>= fun () -> - return_unit + Handler.sign cctxt req ?magic_bytes ~check_high_watermark ~require_auth + >>= fun res -> + Lwt_utils_unix.Socket.send fd encoding res + >>= fun _ -> Lwt_unix.close fd >>= fun () -> return_unit | Deterministic_nonce req -> let encoding = result_encoding Deterministic_nonce.Response.encoding in - Handler.deterministic_nonce cctxt req ~require_auth >>= fun res -> - Lwt_utils_unix.Socket.send fd encoding res >>= fun _ -> - Lwt_unix.close fd >>= fun () -> - return_unit + Handler.deterministic_nonce cctxt req ~require_auth + >>= fun res -> + Lwt_utils_unix.Socket.send fd encoding res + >>= fun _ -> Lwt_unix.close fd >>= fun () -> return_unit | Deterministic_nonce_hash req -> - let encoding = result_encoding Deterministic_nonce_hash.Response.encoding in - Handler.deterministic_nonce_hash cctxt req ~require_auth >>= fun res -> - Lwt_utils_unix.Socket.send fd encoding res >>= fun _ -> - Lwt_unix.close fd >>= fun () -> - return_unit + let encoding = + result_encoding Deterministic_nonce_hash.Response.encoding + in + Handler.deterministic_nonce_hash cctxt req ~require_auth + >>= fun res -> + Lwt_utils_unix.Socket.send fd encoding res + >>= fun _ -> Lwt_unix.close fd >>= fun () -> return_unit | Supports_deterministic_nonces req -> - let encoding = result_encoding Supports_deterministic_nonces.Response.encoding in - Handler.supports_deterministic_nonces cctxt req >>= fun res -> - Lwt_utils_unix.Socket.send fd encoding res >>= fun _ -> - Lwt_unix.close fd >>= fun () -> - return_unit + let encoding = + result_encoding Supports_deterministic_nonces.Response.encoding + in + Handler.supports_deterministic_nonces cctxt req + >>= fun res -> + Lwt_utils_unix.Socket.send fd encoding res + >>= fun _ -> Lwt_unix.close fd >>= fun () -> return_unit | Public_key pkh -> let encoding = result_encoding Public_key.Response.encoding in - Handler.public_key cctxt pkh >>= fun res -> - Lwt_utils_unix.Socket.send fd encoding res >>= fun _ -> - Lwt_unix.close fd >>= fun () -> - return_unit + Handler.public_key cctxt pkh + >>= fun res -> + Lwt_utils_unix.Socket.send fd encoding res + >>= fun _ -> Lwt_unix.close fd >>= fun () -> return_unit | Authorized_keys -> let encoding = result_encoding Authorized_keys.Response.encoding in - begin if require_auth then - Handler.Authorized_key.load cctxt >>=? fun keys -> - return (Authorized_keys.Response.Authorized_keys - (keys |> List.split |> snd |> List.map Signature.Public_key.hash)) - else return Authorized_keys.Response.No_authentication - end >>= fun res -> - Lwt_utils_unix.Socket.send fd encoding res >>= fun _ -> - Lwt_unix.close fd >>= fun () -> - return_unit + ( if require_auth then + Handler.Authorized_key.load cctxt + >>=? fun keys -> + return + (Authorized_keys.Response.Authorized_keys + (keys |> List.split |> snd |> List.map Signature.Public_key.hash)) + else return Authorized_keys.Response.No_authentication ) + >>= fun res -> + Lwt_utils_unix.Socket.send fd encoding res + >>= fun _ -> Lwt_unix.close fd >>= fun () -> return_unit -let run (cctxt : #Client_context.wallet) path ?magic_bytes ~check_high_watermark ~require_auth = +let run (cctxt : #Client_context.wallet) path ?magic_bytes + ~check_high_watermark ~require_auth = let open Lwt_utils_unix.Socket in - begin - match path with - | Tcp (host, service, _opts) -> - log Tag.DSL.(fun f -> + ( match path with + | Tcp (host, service, _opts) -> + log + Tag.DSL.( + fun f -> f "Accepting TCP requests on %s:%s" -% t event "accepting_tcp_requests" - -% s host_name host - -% s service_name service) - | Unix path -> - ListLabels.iter Sys.[sigint ; sigterm] ~f:begin fun signal -> - Sys.set_signal signal (Signal_handle begin fun _ -> - Format.printf "Removing the local socket file and quitting.@." ; - Unix.unlink path ; - exit 0 - end) - end ; - log Tag.DSL.(fun f -> + -% s host_name host -% s service_name service) + | Unix path -> + ListLabels.iter + Sys.[sigint; sigterm] + ~f:(fun signal -> + Sys.set_signal + signal + (Signal_handle + (fun _ -> + Format.printf "Removing the local socket file and quitting.@." ; + Unix.unlink path ; + exit 0))) ; + log + Tag.DSL.( + fun f -> f "Accepting UNIX requests on %s" -% t event "accepting_unix_requests" - -% s unix_socket_path path) - end >>= fun () -> - bind path >>=? fun fds -> + -% s unix_socket_path path) ) + >>= fun () -> + bind path + >>=? fun fds -> let rec loop fd = - Lwt_unix.accept fd >>= fun (cfd, _) -> - Lwt.async begin fun () -> - protect - ~on_error:(function - | [Exn End_of_file] -> return_unit - | errs -> Lwt.return_error errs) - (fun () -> - handle_client ?magic_bytes ~check_high_watermark ~require_auth cctxt cfd) - end ; + Lwt_unix.accept fd + >>= fun (cfd, _) -> + Lwt.async (fun () -> + protect + ~on_error:(function + | [Exn End_of_file] -> return_unit | errs -> Lwt.return_error errs) + (fun () -> + handle_client + ?magic_bytes + ~check_high_watermark + ~require_auth + cctxt + cfd)) ; loop fd in - Lwt_list.map_p loop fds >>= - return + Lwt_list.map_p loop fds >>= return diff --git a/src/bin_signer/socket_daemon.mli b/src/bin_signer/socket_daemon.mli index c54c2374cb9846cf3c6b0652efe6f404bf07f3bd..8d4e8acd4cfa57bc07ef7d6d79797b12005bb9b4 100644 --- a/src/bin_signer/socket_daemon.mli +++ b/src/bin_signer/socket_daemon.mli @@ -23,10 +23,10 @@ (* *) (*****************************************************************************) -val run: +val run : #Client_context.io_wallet -> Lwt_utils_unix.Socket.addr -> - ?magic_bytes: int list -> - check_high_watermark: bool -> - require_auth: bool -> + ?magic_bytes:int list -> + check_high_watermark:bool -> + require_auth:bool -> 'a list tzresult Lwt.t diff --git a/src/lib_base/.ocamlformat b/src/lib_base/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_base/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_base/base_logging.ml b/src/lib_base/base_logging.ml index 51098430177e89e21b85e1edab3c16f82116d06c..e681eb4a152453559ea8e20e859c742fb4bf98d3 100644 --- a/src/lib_base/base_logging.ml +++ b/src/lib_base/base_logging.ml @@ -23,13 +23,23 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make_semantic(struct let name = "base" end) +include Internal_event.Legacy_logging.Make_semantic (struct + let name = "base" +end) let pp_exn_trace ppf backtrace = if String.length backtrace <> 0 then - Format.fprintf ppf + Format.fprintf + ppf "@,Backtrace:@, @[<h>%a@]" - Format.pp_print_text backtrace + Format.pp_print_text + backtrace -let pid = Tag.def ~doc:"unix process ID where problem occurred" "pid" Format.pp_print_int -let exn_trace = Tag.def ~doc:"backtrace from native Ocaml exception" "exn_trace" pp_exn_trace +let pid = + Tag.def + ~doc:"unix process ID where problem occurred" + "pid" + Format.pp_print_int + +let exn_trace = + Tag.def ~doc:"backtrace from native Ocaml exception" "exn_trace" pp_exn_trace diff --git a/src/lib_base/base_logging.mli b/src/lib_base/base_logging.mli index 2b8b5bd4127a0f7493e2b449cbe204be2fe765f6..9543d47b1656e55e6ff23b481916e05389137c11 100644 --- a/src/lib_base/base_logging.mli +++ b/src/lib_base/base_logging.mli @@ -26,4 +26,5 @@ include Internal_event.Legacy_logging.SEMLOG val pid : int Tag.def + val exn_trace : string Tag.def diff --git a/src/lib_base/block_header.ml b/src/lib_base/block_header.ml index a46804838e5a094ae36001039c3bec62cf537d34..2a240572d726aff29c65a6009eae8dc4669fcafe 100644 --- a/src/lib_base/block_header.ml +++ b/src/lib_base/block_header.ml @@ -24,109 +24,143 @@ (*****************************************************************************) type shell_header = { - level: Int32.t ; - proto_level: int ; (* uint8 *) - predecessor: Block_hash.t ; - timestamp: Time.Protocol.t ; - validation_passes: int ; (* uint8 *) - operations_hash: Operation_list_list_hash.t ; - fitness: Fitness.t ; - context: Context_hash.t ; + level : Int32.t; + proto_level : int; + (* uint8 *) + predecessor : Block_hash.t; + timestamp : Time.Protocol.t; + validation_passes : int; + (* uint8 *) + operations_hash : Operation_list_list_hash.t; + fitness : Fitness.t; + context : Context_hash.t } let shell_header_encoding = let open Data_encoding in - def "block_header.shell" @@ - conv - (fun { level ; proto_level ; predecessor ; - timestamp ; validation_passes ; operations_hash ; fitness ; - context } -> - (level, proto_level, predecessor, - timestamp, validation_passes, operations_hash, fitness, - context)) - (fun (level, proto_level, predecessor, - timestamp, validation_passes, operations_hash, fitness, - context) -> - { level ; proto_level ; predecessor ; - timestamp ; validation_passes ; operations_hash ; fitness ; - context }) - (obj8 - (req "level" int32) - (req "proto" uint8) - (req "predecessor" Block_hash.encoding) - (req "timestamp" Time.Protocol.encoding) - (req "validation_pass" uint8) - (req "operations_hash" Operation_list_list_hash.encoding) - (req "fitness" Fitness.encoding) - (req "context" Context_hash.encoding)) - -type t = { - shell: shell_header ; - protocol_data: MBytes.t ; -} + def "block_header.shell" + @@ conv + (fun { level; + proto_level; + predecessor; + timestamp; + validation_passes; + operations_hash; + fitness; + context } -> + ( level, + proto_level, + predecessor, + timestamp, + validation_passes, + operations_hash, + fitness, + context )) + (fun ( level, + proto_level, + predecessor, + timestamp, + validation_passes, + operations_hash, + fitness, + context ) -> + { level; + proto_level; + predecessor; + timestamp; + validation_passes; + operations_hash; + fitness; + context }) + (obj8 + (req "level" int32) + (req "proto" uint8) + (req "predecessor" Block_hash.encoding) + (req "timestamp" Time.Protocol.encoding) + (req "validation_pass" uint8) + (req "operations_hash" Operation_list_list_hash.encoding) + (req "fitness" Fitness.encoding) + (req "context" Context_hash.encoding)) + +type t = {shell : shell_header; protocol_data : MBytes.t} include Compare.Make (struct - type nonrec t = t - let compare b1 b2 = - let (>>) x y = if x = 0 then y () else x in - let rec list compare xs ys = - match xs, ys with - | [], [] -> 0 - | _ :: _, [] -> -1 - | [], _ :: _ -> 1 - | x :: xs, y :: ys -> - compare x y >> fun () -> list compare xs ys in - Block_hash.compare b1.shell.predecessor b2.shell.predecessor >> fun () -> - compare b1.protocol_data b2.protocol_data >> fun () -> - Operation_list_list_hash.compare - b1.shell.operations_hash b2.shell.operations_hash >> fun () -> - Time.Protocol.compare b1.shell.timestamp b2.shell.timestamp >> fun () -> - list compare b1.shell.fitness b2.shell.fitness - end) + type nonrec t = t + + let compare b1 b2 = + let ( >> ) x y = if x = 0 then y () else x in + let rec list compare xs ys = + match (xs, ys) with + | ([], []) -> + 0 + | (_ :: _, []) -> + -1 + | ([], _ :: _) -> + 1 + | (x :: xs, y :: ys) -> + compare x y >> fun () -> list compare xs ys + in + Block_hash.compare b1.shell.predecessor b2.shell.predecessor + >> fun () -> + compare b1.protocol_data b2.protocol_data + >> fun () -> + Operation_list_list_hash.compare + b1.shell.operations_hash + b2.shell.operations_hash + >> fun () -> + Time.Protocol.compare b1.shell.timestamp b2.shell.timestamp + >> fun () -> list compare b1.shell.fitness b2.shell.fitness +end) let encoding = let open Data_encoding in conv - (fun { shell ; protocol_data } -> (shell, protocol_data)) - (fun (shell, protocol_data) -> { shell ; protocol_data }) + (fun {shell; protocol_data} -> (shell, protocol_data)) + (fun (shell, protocol_data) -> {shell; protocol_data}) (merge_objs shell_header_encoding (obj1 (req "protocol_data" Variable.bytes))) let bounded_encoding ?max_size () = match max_size with - | None -> encoding - | Some max_size -> Data_encoding.check_size max_size encoding + | None -> + encoding + | Some max_size -> + Data_encoding.check_size max_size encoding let pp ppf op = - Data_encoding.Json.pp ppf - (Data_encoding.Json.construct encoding op) + Data_encoding.Json.pp ppf (Data_encoding.Json.construct encoding op) let to_bytes v = Data_encoding.Binary.to_bytes_exn encoding v + let of_bytes b = Data_encoding.Binary.of_bytes encoding b + let of_bytes_exn b = Data_encoding.Binary.of_bytes_exn encoding b -let to_b58check v = - Base58.safe_encode (MBytes.to_string (to_bytes v)) +let to_b58check v = Base58.safe_encode (MBytes.to_string (to_bytes v)) let of_b58check b = - Option.apply (Base58.safe_decode b) ~f: begin fun s -> - Data_encoding.Binary.of_bytes encoding (MBytes.of_string s) - end + Option.apply (Base58.safe_decode b) ~f:(fun s -> + Data_encoding.Binary.of_bytes encoding (MBytes.of_string s)) let hash block = Block_hash.hash_bytes [to_bytes block] + let hash_raw bytes = Block_hash.hash_bytes [bytes] -let forced_protocol_upgrades : (Int32.t * Protocol_hash.t) list = [ - (* nothing *) -] +let forced_protocol_upgrades : (Int32.t * Protocol_hash.t) list = + [ (* nothing *) ] + +module LevelMap = Map.Make (struct + type t = Int32.t + + let compare = Int32.compare +end) -module LevelMap = - Map.Make(struct type t = Int32.t let compare = Int32.compare end) let get_forced_protocol_upgrade = let table = List.fold_left (fun map (level, hash) -> LevelMap.add level hash map) LevelMap.empty - forced_protocol_upgrades in + forced_protocol_upgrades + in fun ~level -> LevelMap.find_opt level table diff --git a/src/lib_base/block_header.mli b/src/lib_base/block_header.mli index 4ea95b295b1094e523b9cfd004f2bfa301e1e365..56a52f3e751a4a1357bc9508b90d823d316b9209 100644 --- a/src/lib_base/block_header.mli +++ b/src/lib_base/block_header.mli @@ -24,42 +24,40 @@ (*****************************************************************************) type shell_header = { - level: Int32.t ; - (** Height of the block, from the genesis block. *) - proto_level: int ; (* uint8 *) - (** Number of protocol changes since genesis modulo 256. *) - predecessor: Block_hash.t ; - (** Hash of the preceding block. *) - timestamp: Time.Protocol.t ; - (** Timestamp at which the block is claimed to have been created. *) - validation_passes: int ; (* uint8 *) - (** Number of validation passes (also number of lists of operations). *) - operations_hash: Operation_list_list_hash.t ; - (** Hash of the list of lists (actually root hashes of merkle trees) + level : Int32.t; (** Height of the block, from the genesis block. *) + proto_level : int; + (* uint8 *) + (** Number of protocol changes since genesis modulo 256. *) + predecessor : Block_hash.t; (** Hash of the preceding block. *) + timestamp : Time.Protocol.t; + (** Timestamp at which the block is claimed to have been created. *) + validation_passes : int; + (* uint8 *) + (** Number of validation passes (also number of lists of operations). *) + operations_hash : Operation_list_list_hash.t; + (** Hash of the list of lists (actually root hashes of merkle trees) of operations included in the block. There is one list of operations per validation pass. *) - fitness: Fitness.t ; - (** A sequence of sequences of unsigned bytes, ordered by length and + fitness : Fitness.t; + (** A sequence of sequences of unsigned bytes, ordered by length and then lexicographically. It represents the claimed fitness of the chain ending in this block. *) - context: Context_hash.t ; - (** Hash of the state of the context after application of this block. *) + context : Context_hash.t + (** Hash of the state of the context after application of this block. *) } -val shell_header_encoding: shell_header Data_encoding.t +val shell_header_encoding : shell_header Data_encoding.t -type t = { - shell: shell_header ; - protocol_data: MBytes.t ; -} +type t = {shell : shell_header; protocol_data : MBytes.t} + +include S.HASHABLE with type t := t and type hash := Block_hash.t + +val of_bytes_exn : MBytes.t -> t -include S.HASHABLE with type t := t - and type hash := Block_hash.t -val of_bytes_exn: MBytes.t -> t +val to_b58check : t -> string -val to_b58check: t -> string -val of_b58check: string -> t option +val of_b58check : string -> t option -val bounded_encoding: ?max_size:int -> unit -> t Data_encoding.t +val bounded_encoding : ?max_size:int -> unit -> t Data_encoding.t -val get_forced_protocol_upgrade: level:Int32.t -> Protocol_hash.t option +val get_forced_protocol_upgrade : level:Int32.t -> Protocol_hash.t option diff --git a/src/lib_base/block_locator.ml b/src/lib_base/block_locator.ml index 0fa8326a87ca689e39493cf17cad53ce0c6c4bdb..19711278d6ed1ce5c00b47aa53d22bd06888592d 100644 --- a/src/lib_base/block_locator.ml +++ b/src/lib_base/block_locator.ml @@ -26,6 +26,7 @@ open Lwt.Infix type t = raw + and raw = Block_header.t * Block_hash.t list let raw x = x @@ -34,45 +35,56 @@ let pp ppf (hd, h_lst) = let repeats = 10 in let coef = 2 in (* list of hashes *) - let rec pp_hash_list ppf (h_lst , acc , d , r) = + let rec pp_hash_list ppf (h_lst, acc, d, r) = match h_lst with | [] -> Format.fprintf ppf "" | hd :: tl -> let new_d = if r > 1 then d else d * coef in let new_r = if r > 1 then r - 1 else repeats in - Format.fprintf ppf "%a (%i)\n%a" - Block_hash.pp hd acc pp_hash_list (tl , acc - d , new_d , new_r) in - Format.fprintf ppf "%a (head)\n%a" - Block_hash.pp (Block_header.hash hd) - pp_hash_list (h_lst , -1, 1, repeats - 1) + Format.fprintf + ppf + "%a (%i)\n%a" + Block_hash.pp + hd + acc + pp_hash_list + (tl, acc - d, new_d, new_r) + in + Format.fprintf + ppf + "%a (head)\n%a" + Block_hash.pp + (Block_header.hash hd) + pp_hash_list + (h_lst, -1, 1, repeats - 1) let pp_short ppf (hd, h_lst) = - Format.fprintf ppf "head: %a, %d predecessors" - Block_hash.pp (Block_header.hash hd) + Format.fprintf + ppf + "head: %a, %d predecessors" + Block_hash.pp + (Block_header.hash hd) (List.length h_lst) let encoding = let open Data_encoding in (* TODO add a [description] *) - (obj2 - (req "current_head" (dynamic_size Block_header.encoding)) - (req "history" (Variable.list Block_hash.encoding))) + obj2 + (req "current_head" (dynamic_size Block_header.encoding)) + (req "history" (Variable.list Block_hash.encoding)) let bounded_encoding ?max_header_size ?max_length () = let open Data_encoding in (* TODO add a [description] *) - (obj2 - (req "current_head" - (dynamic_size - (Block_header.bounded_encoding ?max_size:max_header_size ()))) - (req "history" (Variable.list ?max_length Block_hash.encoding))) + obj2 + (req + "current_head" + (dynamic_size + (Block_header.bounded_encoding ?max_size:max_header_size ()))) + (req "history" (Variable.list ?max_length Block_hash.encoding)) - -type seed = { - sender_id: P2p_peer.Id.t ; - receiver_id: P2p_peer.Id.t ; -} +type seed = {sender_id : P2p_peer.Id.t; receiver_id : P2p_peer.Id.t} (* Random generator for locator steps. @@ -84,133 +96,125 @@ type seed = { The sequence is deterministic for a given triple of sender, receiver and block hash. *) module Step : sig - type state - val init: seed -> Block_hash.t -> state - val next: state -> int * state -end = struct + val init : seed -> Block_hash.t -> state + val next : state -> int * state +end = struct type state = Int32.t * int * MBytes.t let init seed head = let open Hacl.Hash in let st = SHA256.init () in - List.iter (SHA256.update st) [ - P2p_peer.Id.to_bytes seed.sender_id ; - P2p_peer.Id.to_bytes seed.receiver_id ; - Block_hash.to_bytes head ] ; + List.iter + (SHA256.update st) + [ P2p_peer.Id.to_bytes seed.sender_id; + P2p_peer.Id.to_bytes seed.receiver_id; + Block_hash.to_bytes head ] ; (1l, 9, SHA256.finish st) let draw seed n = - Int32.rem (MBytes.get_int32 seed 0) n, - Hacl.Hash.SHA256.digest seed + (Int32.rem (MBytes.get_int32 seed 0) n, Hacl.Hash.SHA256.digest seed) let next (step, counter, seed) = - let random_gap, seed = - if step <= 1l then - 0l, seed - else - draw seed (Int32.succ (Int32.div step 2l)) in + let (random_gap, seed) = + if step <= 1l then (0l, seed) + else draw seed (Int32.succ (Int32.div step 2l)) + in let new_state = - if counter = 0 then - (Int32.mul step 2l, 9, seed) - else - (step, counter - 1, seed) in - Int32.to_int (Int32.sub step random_gap), new_state - + if counter = 0 then (Int32.mul step 2l, 9, seed) + else (step, counter - 1, seed) + in + (Int32.to_int (Int32.sub step random_gap), new_state) end let estimated_length seed (head, hist) = let rec loop acc state = function - | [] -> acc + | [] -> + acc | _ :: hist -> - let step, state = Step.next state in - loop (acc + step) state hist in + let (step, state) = Step.next state in + loop (acc + step) state hist + in let state = Step.init seed (Block_header.hash head) in - let step, state = Step.next state in + let (step, state) = Step.next state in loop step state hist let fold ~f ~init (head, hist) seed = let rec loop state acc = function - | [] | [_] -> acc + | [] | [_] -> + acc | block :: (pred :: rem as hist) -> - let step, state = Step.next state in + let (step, state) = Step.next state in let acc = f acc ~block ~pred ~step ~strict_step:(rem <> []) in - loop state acc hist in + loop state acc hist + in let head = Block_header.hash head in let state = Step.init seed head in loop state init (head :: hist) type step = { - block: Block_hash.t ; - predecessor: Block_hash.t ; - step: int ; - strict_step: bool ; + block : Block_hash.t; + predecessor : Block_hash.t; + step : int; + strict_step : bool } -let pp_step ppf step = Format.fprintf ppf "%d%s" step.step (if step.strict_step then "" else " max") +let pp_step ppf step = + Format.fprintf ppf "%d%s" step.step (if step.strict_step then "" else " max") let to_steps seed locator = - fold locator seed - ~init:[] - ~f: begin fun acc ~block ~pred ~step ~strict_step -> - { block ; predecessor = pred ; step ; strict_step } :: acc - end + fold locator seed ~init:[] ~f:(fun acc ~block ~pred ~step ~strict_step -> + {block; predecessor = pred; step; strict_step} :: acc) let fold_truncate ~f ~init ~save_point ~limit (head, hist) seed = let rec loop state step_sum acc = function - | [] | [_] -> acc + | [] | [_] -> + acc | block :: (pred :: rem as hist) -> - let step, state = Step.next state in + let (step, state) = Step.next state in let new_step_sum = step + step_sum in if new_step_sum >= limit then f acc ~block ~pred:save_point ~step ~strict_step:false else let acc = f acc ~block ~pred ~step ~strict_step:(rem <> []) in - loop state new_step_sum acc hist in + loop state new_step_sum acc hist + in let hash = Block_header.hash head in let initial_state = Step.init seed hash in loop initial_state 0 init (hash :: hist) let to_steps_truncate ~limit ~save_point seed locator = - fold_truncate locator seed - ~init:[] ~save_point ~limit - ~f: begin fun acc ~block ~pred ~step ~strict_step -> - { block ; predecessor = pred ; step ; strict_step } :: acc - end - -let compute - ~get_predecessor - ~caboose - ~size - block_hash - header - seed = + fold_truncate + locator + seed + ~init:[] + ~save_point + ~limit + ~f:(fun acc ~block ~pred ~step ~strict_step -> + {block; predecessor = pred; step; strict_step} :: acc) + +let compute ~get_predecessor ~caboose ~size block_hash header seed = let rec loop acc size state current_block_hash = - if size = 0 then - Lwt.return acc + if size = 0 then Lwt.return acc else let (step, state) = Step.next state in - get_predecessor current_block_hash step >>= function + get_predecessor current_block_hash step + >>= function | None -> - if Block_hash.equal caboose current_block_hash then - Lwt.return acc - else - Lwt.return (caboose :: acc) + if Block_hash.equal caboose current_block_hash then Lwt.return acc + else Lwt.return (caboose :: acc) | Some predecessor -> - loop (predecessor :: acc) (pred size) state predecessor in - if size <= 0 then - Lwt.return (header, []) + loop (predecessor :: acc) (pred size) state predecessor + in + if size <= 0 then Lwt.return (header, []) else let initial_state = Step.init seed block_hash in - loop [] size initial_state block_hash >>= fun hist -> - Lwt.return (header, List.rev hist) + loop [] size initial_state block_hash + >>= fun hist -> Lwt.return (header, List.rev hist) -type validity = - | Unknown - | Known_valid - | Known_invalid +type validity = Unknown | Known_valid | Known_invalid let unknown_prefix ~is_known locator = let (head, history) = locator in @@ -218,16 +222,18 @@ let unknown_prefix ~is_known locator = match hist with | [] -> Lwt.return (Unknown, locator) - | h :: t -> - is_known h >>= function + | h :: t -> ( + is_known h + >>= function | Known_valid -> Lwt.return (Known_valid, (head, List.rev (h :: acc))) | Known_invalid -> Lwt.return (Known_invalid, (head, List.rev (h :: acc))) | Unknown -> - loop t (h :: acc) + loop t (h :: acc) ) in - is_known (Block_header.hash head) >>= function + is_known (Block_header.hash head) + >>= function | Known_valid -> Lwt.return (Known_valid, (head, [])) | Known_invalid -> diff --git a/src/lib_base/block_locator.mli b/src/lib_base/block_locator.mli index a3015e8cf663c967224e9118140d57a33dcebe68..6c7fddfb383f111e75647b76272d71463518086d 100644 --- a/src/lib_base/block_locator.mli +++ b/src/lib_base/block_locator.mli @@ -29,24 +29,23 @@ type t = private raw (** Non private version of Block_store_locator.t for coercions. *) and raw = Block_header.t * Block_hash.t list -val raw: t -> raw -val pp: Format.formatter -> t -> unit -val pp_short: Format.formatter -> t -> unit -val encoding: t Data_encoding.t -val bounded_encoding: - ?max_header_size:int -> - ?max_length:int -> - unit -> t Data_encoding.t +val raw : t -> raw + +val pp : Format.formatter -> t -> unit + +val pp_short : Format.formatter -> t -> unit + +val encoding : t Data_encoding.t + +val bounded_encoding : + ?max_header_size:int -> ?max_length:int -> unit -> t Data_encoding.t (** Argument to the seed used to randomize the locator. *) -type seed = { - sender_id: P2p_peer.Id.t ; - receiver_id: P2p_peer.Id.t -} +type seed = {sender_id : P2p_peer.Id.t; receiver_id : P2p_peer.Id.t} (** [estimated_length seed locator] estimate the length of the chain represented by [locator] using [seed]. *) -val estimated_length: seed -> t -> int +val estimated_length : seed -> t -> int (** [compute ~get_predecessor ~caboose ~size block_hash header seed] returns a sparse block locator whose header is the given [header] and whose @@ -54,42 +53,43 @@ val estimated_length: seed -> t -> int the [block_hash], adding the [caboose] at the end of the sparse block. The sparse block locator contains at most [size + 1] elements, including the caboose. *) -val compute: - get_predecessor: (Block_hash.t -> int -> Block_hash.t option Lwt.t) -> - caboose:Block_hash.t -> size:int -> Block_hash.t -> Block_header.t -> - seed -> t Lwt.t +val compute : + get_predecessor:(Block_hash.t -> int -> Block_hash.t option Lwt.t) -> + caboose:Block_hash.t -> + size:int -> + Block_hash.t -> + Block_header.t -> + seed -> + t Lwt.t (** A 'step' in a locator is a couple of consecutive hashes in the locator, and the expected difference of level between the two blocks (or an upper bounds when [strict_step = false]). *) type step = { - block: Block_hash.t ; - predecessor: Block_hash.t ; - step: int ; - strict_step: bool ; + block : Block_hash.t; + predecessor : Block_hash.t; + step : int; + strict_step : bool } -val pp_step: Format.formatter -> step -> unit +val pp_step : Format.formatter -> step -> unit (** [to_steps seed t] builds all the 'steps' composing the locator using the given [seed], starting with the oldest one (typically the predecessor of the first step will be the `caboose`). All steps contains [strict_step = true], except the oldest one. *) -val to_steps: seed -> t -> step list +val to_steps : seed -> t -> step list (** [to_steps_truncate ~limit ~save_point seed t] behaves as [to_steps] except that when the sum of all the steps already done, and the steps to do in order to reach the next block is superior to [limit], we return a truncated list of steps, setting the [predecessor] of the last step as [save_point] and its field [strict] to [false]. *) -val to_steps_truncate: limit:int -> save_point:Block_hash.t -> - seed -> t -> step list +val to_steps_truncate : + limit:int -> save_point:Block_hash.t -> seed -> t -> step list (** A block can either be known valid, invalid or unknown. *) -type validity = - | Unknown - | Known_valid - | Known_invalid +type validity = Unknown | Known_valid | Known_invalid (** [unknown_prefix ~is_known t] either returns : @@ -103,6 +103,5 @@ type validity = - [(Unknown, (h, hist))] when no block is known valid nor invalid (w.r.t [is_known]), where [(h, hist)] is the given [locator]. *) -val unknown_prefix: - is_known:(Block_hash.t -> validity Lwt.t) -> - t -> (validity * t) Lwt.t +val unknown_prefix : + is_known:(Block_hash.t -> validity Lwt.t) -> t -> (validity * t) Lwt.t diff --git a/src/lib_base/current_git_info.ml b/src/lib_base/current_git_info.ml index f1e1eba70a3134ec83e2bd4d6f184249d9cf9462..63aedcf66bd90ec1630d1bb23a10c67e239ca195 100644 --- a/src/lib_base/current_git_info.ml +++ b/src/lib_base/current_git_info.ml @@ -26,20 +26,23 @@ let raw_commit_hash = "$Format:%H$" let commit_hash = - if String.equal raw_commit_hash ("$Format:"^"%H$"(*trick to avoid git-subst*)) + if + String.equal + raw_commit_hash + ("$Format:" ^ "%H$" (*trick to avoid git-subst*)) then Generated_git_info.commit_hash else raw_commit_hash let raw_abbreviated_commit_hash = "$Format:%h$" let abbreviated_commit_hash = - if String.equal raw_abbreviated_commit_hash ("$Format:"^"%h$") - then Generated_git_info.abbreviated_commit_hash + if String.equal raw_abbreviated_commit_hash ("$Format:" ^ "%h$") then + Generated_git_info.abbreviated_commit_hash else raw_abbreviated_commit_hash let raw_committer_date = "$Format:%ci$" let committer_date = - if String.equal raw_committer_date ("$Format:"^"%ci$") - then Generated_git_info.committer_date + if String.equal raw_committer_date ("$Format:" ^ "%ci$") then + Generated_git_info.committer_date else raw_committer_date diff --git a/src/lib_base/current_git_info.mli b/src/lib_base/current_git_info.mli index fc4f9eff6500d4343c27506be35a68e0f6bed7ac..9711d43be878de318689e791013be5af930adefe 100644 --- a/src/lib_base/current_git_info.mli +++ b/src/lib_base/current_git_info.mli @@ -24,5 +24,7 @@ (*****************************************************************************) val commit_hash : string + val abbreviated_commit_hash : string + val committer_date : string diff --git a/src/lib_base/distributed_db_version.ml b/src/lib_base/distributed_db_version.ml index 58b08574be7470b5d0284b0b2dbda3aff136daf2..d821dff767292a33ae87da6ee291fd53669d9c87 100644 --- a/src/lib_base/distributed_db_version.ml +++ b/src/lib_base/distributed_db_version.ml @@ -29,15 +29,17 @@ type name = string let pp_name = Format.pp_print_string + let name_encoding = Data_encoding.string let chain_name = "TEZOS" + let sandboxed_chain_name = "SANDBOXED_TEZOS" type t = int let pp = Format.pp_print_int + let encoding = Data_encoding.uint16 let zero = 0 - diff --git a/src/lib_base/distributed_db_version.mli b/src/lib_base/distributed_db_version.mli index 854943de27b690d2e01f5e2002db2099f9b528bb..a015fc7545e143b1e2f96c9a4d653f91ef1a3d45 100644 --- a/src/lib_base/distributed_db_version.mli +++ b/src/lib_base/distributed_db_version.mli @@ -28,16 +28,19 @@ type name = private string -val pp_name: Format.formatter -> name -> unit -val name_encoding: name Data_encoding.t +val pp_name : Format.formatter -> name -> unit -val chain_name: name -val sandboxed_chain_name: name +val name_encoding : name Data_encoding.t + +val chain_name : name + +val sandboxed_chain_name : name (** An abstract version number for the high-level distributed_db messages. *) type t = private int -val pp: Format.formatter -> t -> unit -val encoding: t Data_encoding.t +val pp : Format.formatter -> t -> unit + +val encoding : t Data_encoding.t -val zero: t +val zero : t diff --git a/src/lib_base/fitness.ml b/src/lib_base/fitness.ml index 223e1df990eae6c54db05095f30006c7568822b9..3f6196471702cbabd14e6ad1a6e98e90025829f6 100644 --- a/src/lib_base/fitness.ml +++ b/src/lib_base/fitness.ml @@ -25,61 +25,61 @@ type t = MBytes.t list -include Compare.Make(struct +include Compare.Make (struct + type nonrec t = t - type nonrec t = t - - (* Fitness comparison: + (* Fitness comparison: - shortest lists are smaller ; - lexicographical order for lists of the same length. *) - let compare_bytes b1 b2 = - let len1 = MBytes.length b1 in - let len2 = MBytes.length b2 in - let c = compare len1 len2 in - if c <> 0 - then c - else - let rec compare_byte b1 b2 pos len = - if pos = len - then 0 - else - let c = compare (MBytes.get_char b1 pos) (MBytes.get_char b2 pos) in - if c <> 0 - then c - else compare_byte b1 b2 (pos+1) len - in - compare_byte b1 b2 0 len1 + let compare_bytes b1 b2 = + let len1 = MBytes.length b1 in + let len2 = MBytes.length b2 in + let c = compare len1 len2 in + if c <> 0 then c + else + let rec compare_byte b1 b2 pos len = + if pos = len then 0 + else + let c = compare (MBytes.get_char b1 pos) (MBytes.get_char b2 pos) in + if c <> 0 then c else compare_byte b1 b2 (pos + 1) len + in + compare_byte b1 b2 0 len1 - let compare f1 f2 = - let rec compare_rec f1 f2 = match f1, f2 with - | [], [] -> 0 - | i1 :: f1, i2 :: f2 -> - let i = compare_bytes i1 i2 in - if i = 0 then compare_rec f1 f2 else i - | _, _ -> assert false in - let len = compare (List.length f1) (List.length f2) in - if len = 0 then compare_rec f1 f2 else len - end) + let compare f1 f2 = + let rec compare_rec f1 f2 = + match (f1, f2) with + | ([], []) -> + 0 + | (i1 :: f1, i2 :: f2) -> + let i = compare_bytes i1 i2 in + if i = 0 then compare_rec f1 f2 else i + | (_, _) -> + assert false + in + let len = compare (List.length f1) (List.length f2) in + if len = 0 then compare_rec f1 f2 else len +end) let rec pp fmt = function - | [] -> () - | [f] -> Format.fprintf fmt "%a" Hex.pp (MBytes.to_hex f) - | f1 :: f -> Format.fprintf fmt "%a::%a" Hex.pp (MBytes.to_hex f1) pp f + | [] -> + () + | [f] -> + Format.fprintf fmt "%a" Hex.pp (MBytes.to_hex f) + | f1 :: f -> + Format.fprintf fmt "%a::%a" Hex.pp (MBytes.to_hex f1) pp f let encoding = let open Data_encoding in - def "fitness" - ~title: "Block fitness" + def + "fitness" + ~title:"Block fitness" ~description: - "The fitness, or score, of a block, that allow the Tezos to \ - decide which chain is the best. A fitness value is a list of \ - byte sequences. They are compared as follows: shortest lists \ - are smaller; lists of the same length are compared according to \ - the lexicographical order." @@ - splitted - ~json: (list bytes) - ~binary: - (list (def "fitness.elem" bytes)) + "The fitness, or score, of a block, that allow the Tezos to decide \ + which chain is the best. A fitness value is a list of byte sequences. \ + They are compared as follows: shortest lists are smaller; lists of the \ + same length are compared according to the lexicographical order." + @@ splitted ~json:(list bytes) ~binary:(list (def "fitness.elem" bytes)) let to_bytes v = Data_encoding.Binary.to_bytes_exn encoding v + let of_bytes b = Data_encoding.Binary.of_bytes encoding b diff --git a/src/lib_base/lwt_exit.ml b/src/lib_base/lwt_exit.ml index aaf20b3b44dceff426bfa08c8b21ebcac009f2f4..63d331c333e2ceb74116b9fb6cab1810836b3b71 100644 --- a/src/lib_base/lwt_exit.ml +++ b/src/lib_base/lwt_exit.ml @@ -25,25 +25,30 @@ exception Exit -let termination_thread, exit_wakener = Lwt.wait () -let exit x = Lwt.wakeup exit_wakener x; raise Exit +let (termination_thread, exit_wakener) = Lwt.wait () + +let exit x = Lwt.wakeup exit_wakener x ; raise Exit let () = Lwt.async_exception_hook := - (function - | Exit -> () - | e -> - let backtrace = Printexc.get_backtrace () in - let pp_exn_trace ppf backtrace = - if String.length backtrace <> 0 then - Format.fprintf ppf - "@,Backtrace:@, @[<h>%a@]" - Format.pp_print_text backtrace - in - (* TODO Improve this *) - Format.eprintf - "@[<v 2>@[Uncaught (asynchronous) exception (%d):@ %s@]%a@]@.%!" - (Unix.getpid ()) - (Printexc.to_string e) - pp_exn_trace backtrace ; - Lwt.wakeup exit_wakener 1) + function + | Exit -> + () + | e -> + let backtrace = Printexc.get_backtrace () in + let pp_exn_trace ppf backtrace = + if String.length backtrace <> 0 then + Format.fprintf + ppf + "@,Backtrace:@, @[<h>%a@]" + Format.pp_print_text + backtrace + in + (* TODO Improve this *) + Format.eprintf + "@[<v 2>@[Uncaught (asynchronous) exception (%d):@ %s@]%a@]@.%!" + (Unix.getpid ()) + (Printexc.to_string e) + pp_exn_trace + backtrace ; + Lwt.wakeup exit_wakener 1 diff --git a/src/lib_base/lwt_exit.mli b/src/lib_base/lwt_exit.mli index 9c6573017edae13d4e6c89fac78103bb66ce11b8..6a1ea3f31a3a48c1136e354362c3870910b191fe 100644 --- a/src/lib_base/lwt_exit.mli +++ b/src/lib_base/lwt_exit.mli @@ -26,9 +26,9 @@ (** A global thread that resumes the first time {!exit} is called anywhere in the program. Called by the main to wait for any other thread in the system to call {!exit}. *) -val termination_thread: int Lwt.t +val termination_thread : int Lwt.t (** Awakens the {!termination_thread} with the given return value, and raises an exception that cannot be caught, except by a catch-all. Should only be called once. *) -val exit: int -> 'a +val exit : int -> 'a diff --git a/src/lib_base/mempool.ml b/src/lib_base/mempool.ml index 9972accc64cf86f2e63adf44dc986ea236be5d6c..714a5676974eb7ae1732df2e53345069dcfd3547 100644 --- a/src/lib_base/mempool.ml +++ b/src/lib_base/mempool.ml @@ -23,30 +23,26 @@ (* *) (*****************************************************************************) -type t = { - known_valid: Operation_hash.t list ; - pending: Operation_hash.Set.t ; -} +type t = {known_valid : Operation_hash.t list; pending : Operation_hash.Set.t} + type mempool = t let encoding = let open Data_encoding in conv - (fun { known_valid ; pending } -> (known_valid, pending)) - (fun (known_valid, pending) -> { known_valid ; pending }) + (fun {known_valid; pending} -> (known_valid, pending)) + (fun (known_valid, pending) -> {known_valid; pending}) (obj2 (req "known_valid" (list Operation_hash.encoding)) (req "pending" (dynamic_size Operation_hash.Set.encoding))) let bounded_encoding ?max_operations () = match max_operations with - | None -> encoding + | None -> + encoding | Some max_operations -> Data_encoding.check_size - (8 + max_operations * Operation_hash.size) + (8 + (max_operations * Operation_hash.size)) encoding -let empty = { - known_valid = [] ; - pending = Operation_hash.Set.empty ; -} +let empty = {known_valid = []; pending = Operation_hash.Set.empty} diff --git a/src/lib_base/mempool.mli b/src/lib_base/mempool.mli index bd1156c9e11e86ec9c8be12ca35a8633c9c1d764..e87758fb06eba93a0120d59aeb9786fb45cf08da 100644 --- a/src/lib_base/mempool.mli +++ b/src/lib_base/mempool.mli @@ -27,15 +27,16 @@ broadcasted. *) type t = { - known_valid: Operation_hash.t list ; - (** A valid sequence of operations on top of the current head. *) - pending: Operation_hash.Set.t ; - (** Set of known not-invalid operation. *) + known_valid : Operation_hash.t list; + (** A valid sequence of operations on top of the current head. *) + pending : Operation_hash.Set.t (** Set of known not-invalid operation. *) } + type mempool = t -val encoding: mempool Data_encoding.t -val bounded_encoding: ?max_operations:int -> unit -> mempool Data_encoding.t +val encoding : mempool Data_encoding.t + +val bounded_encoding : ?max_operations:int -> unit -> mempool Data_encoding.t -val empty: mempool (** Empty mempool. *) +val empty : mempool diff --git a/src/lib_base/network_version.ml b/src/lib_base/network_version.ml index b1946374a3f052c83349c69f1b3ff3274de1915b..55503d7135eb26459aee8bd122ad972ad9339928 100644 --- a/src/lib_base/network_version.ml +++ b/src/lib_base/network_version.ml @@ -25,71 +25,61 @@ (*****************************************************************************) type t = { - chain_name : Distributed_db_version.name ; - distributed_db_version : Distributed_db_version.t ; - p2p_version : P2p_version.t ; + chain_name : Distributed_db_version.name; + distributed_db_version : Distributed_db_version.t; + p2p_version : P2p_version.t } -let pp ppf { chain_name ; distributed_db_version ; p2p_version } = - Format.fprintf ppf +let pp ppf {chain_name; distributed_db_version; p2p_version} = + Format.fprintf + ppf "%a.%a (p2p: %a)" - Distributed_db_version.pp_name chain_name - Distributed_db_version.pp distributed_db_version - P2p_version.pp p2p_version + Distributed_db_version.pp_name + chain_name + Distributed_db_version.pp + distributed_db_version + P2p_version.pp + p2p_version let encoding = let open Data_encoding in conv - (fun { chain_name ; distributed_db_version ; p2p_version } -> - (chain_name, distributed_db_version, p2p_version)) + (fun {chain_name; distributed_db_version; p2p_version} -> + (chain_name, distributed_db_version, p2p_version)) (fun (chain_name, distributed_db_version, p2p_version) -> - { chain_name ; distributed_db_version ; p2p_version }) + {chain_name; distributed_db_version; p2p_version}) (obj3 (req "chain_name" Distributed_db_version.name_encoding) (req "distributed_db_version" Distributed_db_version.encoding) (req "p2p_version" P2p_version.encoding)) let greatest = function - | [] -> raise (Invalid_argument "Network_version.greatest") - | h :: t -> List.fold_left max h t + | [] -> + raise (Invalid_argument "Network_version.greatest") + | h :: t -> + List.fold_left max h t -let announced - ~chain_name - ~distributed_db_versions - ~p2p_versions = +let announced ~chain_name ~distributed_db_versions ~p2p_versions = assert (distributed_db_versions <> []) ; assert (p2p_versions <> []) ; - { chain_name ; - distributed_db_version = greatest distributed_db_versions ; - p2p_version = greatest p2p_versions ; - } + { chain_name; + distributed_db_version = greatest distributed_db_versions; + p2p_version = greatest p2p_versions } let may_select_version accepted_versions remote_version = let best_local_version = greatest accepted_versions in - if best_local_version <= remote_version then - Some best_local_version - else if List.mem remote_version accepted_versions then - Some remote_version - else - None + if best_local_version <= remote_version then Some best_local_version + else if List.mem remote_version accepted_versions then Some remote_version + else None -let select - ~chain_name - ~distributed_db_versions - ~p2p_versions - remote = +let select ~chain_name ~distributed_db_versions ~p2p_versions remote = assert (distributed_db_versions <> []) ; assert (p2p_versions <> []) ; - if chain_name <> remote.chain_name then - None + if chain_name <> remote.chain_name then None else let open Option in - may_select_version - distributed_db_versions - remote.distributed_db_version >>= fun distributed_db_version -> - may_select_version - p2p_versions - remote.p2p_version >>= fun p2p_version -> - some { chain_name ; - distributed_db_version ; - p2p_version } + may_select_version distributed_db_versions remote.distributed_db_version + >>= fun distributed_db_version -> + may_select_version p2p_versions remote.p2p_version + >>= fun p2p_version -> + some {chain_name; distributed_db_version; p2p_version} diff --git a/src/lib_base/network_version.mli b/src/lib_base/network_version.mli index 1182f834acfecdbb4d45250173823e1b3c607044..e0deb5f5883c982ffa761c85c24596efef07cfe4 100644 --- a/src/lib_base/network_version.mli +++ b/src/lib_base/network_version.mli @@ -25,29 +25,31 @@ (*****************************************************************************) type t = { - chain_name : Distributed_db_version.name ; - distributed_db_version : Distributed_db_version.t ; - p2p_version : P2p_version.t ; + chain_name : Distributed_db_version.name; + distributed_db_version : Distributed_db_version.t; + p2p_version : P2p_version.t } -val pp: Format.formatter -> t -> unit -val encoding: t Data_encoding.t +val pp : Format.formatter -> t -> unit + +val encoding : t Data_encoding.t (** [announced supported] computes the network protocol version announced on peer connection, given the [supported] versions for the higher-level messages. *) -val announced: - chain_name: Distributed_db_version.name -> - distributed_db_versions: Distributed_db_version.t list -> - p2p_versions: P2p_version.t list -> +val announced : + chain_name:Distributed_db_version.name -> + distributed_db_versions:Distributed_db_version.t list -> + p2p_versions:P2p_version.t list -> t (** [select acceptables remote] computes network protocol version to be used on a given connection where [remote] is version annouced by the remote peer, and [acceptables] the locally accepted versions for the higher-level messages. *) -val select: - chain_name: Distributed_db_version.name -> - distributed_db_versions: Distributed_db_version.t list -> - p2p_versions: P2p_version.t list -> - t -> t option +val select : + chain_name:Distributed_db_version.name -> + distributed_db_versions:Distributed_db_version.t list -> + p2p_versions:P2p_version.t list -> + t -> + t option diff --git a/src/lib_base/operation.ml b/src/lib_base/operation.ml index a16d43756c9d0ff75d512c439acc378d5cb1ea8b..e0eb2abd7a44f0921e6bd69ee9e3adfd5db5ba10 100644 --- a/src/lib_base/operation.ml +++ b/src/lib_base/operation.ml @@ -23,46 +23,42 @@ (* *) (*****************************************************************************) -type shell_header = { - branch: Block_hash.t ; -} +type shell_header = {branch : Block_hash.t} let shell_header_encoding = let open Data_encoding in conv - (fun { branch } -> branch) - (fun branch -> { branch }) + (fun {branch} -> branch) + (fun branch -> {branch}) (obj1 (req "branch" Block_hash.encoding)) -type t = { - shell: shell_header ; - proto: MBytes.t ; -} +type t = {shell : shell_header; proto : MBytes.t} -include Compare.Make(struct - type nonrec t = t - let compare o1 o2 = - let (>>) x y = if x = 0 then y () else x in - Block_hash.compare o1.shell.branch o1.shell.branch >> fun () -> - MBytes.compare o1.proto o2.proto - end) +include Compare.Make (struct + type nonrec t = t + + let compare o1 o2 = + let ( >> ) x y = if x = 0 then y () else x in + Block_hash.compare o1.shell.branch o1.shell.branch + >> fun () -> MBytes.compare o1.proto o2.proto +end) let encoding = let open Data_encoding in conv - (fun { shell ; proto } -> (shell, proto)) - (fun (shell, proto) -> { shell ; proto }) - (merge_objs - shell_header_encoding - (obj1 (req "data" Variable.bytes))) + (fun {shell; proto} -> (shell, proto)) + (fun (shell, proto) -> {shell; proto}) + (merge_objs shell_header_encoding (obj1 (req "data" Variable.bytes))) let bounded_encoding ?max_size () = match max_size with - | None -> encoding - | Some max_size -> Data_encoding.check_size max_size encoding + | None -> + encoding + | Some max_size -> + Data_encoding.check_size max_size encoding -let bounded_list_encoding - ?max_length ?max_size ?max_operation_size ?max_pass () = +let bounded_list_encoding ?max_length ?max_size ?max_operation_size ?max_pass + () = let open Data_encoding in let op_encoding = bounded_encoding ?max_size:max_operation_size () in let op_list_encoding = @@ -70,28 +66,33 @@ let bounded_list_encoding | None -> Variable.list ?max_length (dynamic_size op_encoding) | Some max_size -> - check_size max_size - (Variable.list ?max_length (dynamic_size op_encoding)) in + check_size + max_size + (Variable.list ?max_length (dynamic_size op_encoding)) + in obj2 - (req "operation_hashes_path" + (req + "operation_hashes_path" (Operation_list_list_hash.bounded_path_encoding ?max_length:max_pass ())) (req "operations" op_list_encoding) let bounded_hash_list_encoding ?max_length ?max_pass () = let open Data_encoding in obj2 - (req "operation_hashes_path" + (req + "operation_hashes_path" (Operation_list_list_hash.bounded_path_encoding ?max_length:max_pass ())) (req "operation_hashes" (Variable.list ?max_length Operation_hash.encoding)) let pp fmt op = - Data_encoding.Json.pp fmt - (Data_encoding.Json.construct encoding op) + Data_encoding.Json.pp fmt (Data_encoding.Json.construct encoding op) let to_bytes v = Data_encoding.Binary.to_bytes_exn encoding v + let of_bytes b = Data_encoding.Binary.of_bytes encoding b + let of_bytes_exn b = Data_encoding.Binary.of_bytes_exn encoding b let hash op = Operation_hash.hash_bytes [to_bytes op] -let hash_raw bytes = Operation_hash.hash_bytes [bytes] +let hash_raw bytes = Operation_hash.hash_bytes [bytes] diff --git a/src/lib_base/operation.mli b/src/lib_base/operation.mli index 54c22cd20718e7501e9fc4e25f7979bfb763fb02..2380a032425d1ce8158d19f8b8630aa4efad3e5f 100644 --- a/src/lib_base/operation.mli +++ b/src/lib_base/operation.mli @@ -23,29 +23,28 @@ (* *) (*****************************************************************************) -type shell_header = { - branch: Block_hash.t ; -} -val shell_header_encoding: shell_header Data_encoding.t - -type t = { - shell: shell_header ; - proto: MBytes.t ; -} - -include S.HASHABLE with type t := t - and type hash := Operation_hash.t -val of_bytes_exn: MBytes.t -> t - -val bounded_encoding: ?max_size:int -> unit -> t Data_encoding.t -val bounded_list_encoding: +type shell_header = {branch : Block_hash.t} + +val shell_header_encoding : shell_header Data_encoding.t + +type t = {shell : shell_header; proto : MBytes.t} + +include S.HASHABLE with type t := t and type hash := Operation_hash.t + +val of_bytes_exn : MBytes.t -> t + +val bounded_encoding : ?max_size:int -> unit -> t Data_encoding.t + +val bounded_list_encoding : ?max_length:int -> ?max_size:int -> ?max_operation_size:int -> ?max_pass:int -> - unit -> (Operation_list_list_hash.path * t list) Data_encoding.t -val bounded_hash_list_encoding: + unit -> + (Operation_list_list_hash.path * t list) Data_encoding.t + +val bounded_hash_list_encoding : ?max_length:int -> ?max_pass:int -> - unit -> (Operation_list_list_hash.path * Operation_hash.t list) Data_encoding.t - + unit -> + (Operation_list_list_hash.path * Operation_hash.t list) Data_encoding.t diff --git a/src/lib_base/p2p_addr.ml b/src/lib_base/p2p_addr.ml index d9139c4b50d4f9e6ddbe7cc3665dbe84e6dfbb48..5ed57c3ab9b1d79c7d00e48776412678adf609d5 100644 --- a/src/lib_base/p2p_addr.ml +++ b/src/lib_base/p2p_addr.ml @@ -28,18 +28,8 @@ type t = Ipaddr.V6.t let encoding = let open Data_encoding in splitted - ~json:begin - conv - Ipaddr.V6.to_string - Ipaddr.V6.of_string_exn - string - end - ~binary:begin - conv - Ipaddr.V6.to_bytes - Ipaddr.V6.of_bytes_exn - string - end + ~json:(conv Ipaddr.V6.to_string Ipaddr.V6.of_string_exn string) + ~binary:(conv Ipaddr.V6.to_bytes Ipaddr.V6.of_bytes_exn string) type port = int @@ -52,13 +42,18 @@ let pp ppf addr = let of_string_opt str = match Ipaddr.of_string str with - | Ok (Ipaddr.V4 addr) -> Some (Ipaddr.v6_of_v4 addr) - | Ok (V6 addr) -> Some addr - | Error (`Msg _) -> None + | Ok (Ipaddr.V4 addr) -> + Some (Ipaddr.v6_of_v4 addr) + | Ok (V6 addr) -> + Some addr + | Error (`Msg _) -> + None let of_string_exn str = match of_string_opt str with - | None -> Pervasives.failwith "P2p_addr.of_string" - | Some t -> t + | None -> + Pervasives.failwith "P2p_addr.of_string" + | Some t -> + t let to_string saddr = Format.asprintf "%a" pp saddr diff --git a/src/lib_base/p2p_addr.mli b/src/lib_base/p2p_addr.mli index 7c7b0424654d5719d7e8fc33b33dd7b64e0a19fa..8aff0261733739e2e88ce7f005511cfac5e5bc04 100644 --- a/src/lib_base/p2p_addr.mli +++ b/src/lib_base/p2p_addr.mli @@ -24,6 +24,7 @@ (*****************************************************************************) type t = Ipaddr.V6.t + type port = int val encoding : t Data_encoding.t @@ -31,6 +32,7 @@ val encoding : t Data_encoding.t val pp : Format.formatter -> t -> unit val of_string_opt : string -> t option + val of_string_exn : string -> t val to_string : t -> string diff --git a/src/lib_base/p2p_connection.ml b/src/lib_base/p2p_connection.ml index 9ed6b59760f18a40c98d8462bc146d123b21b219..d26269a59477484ac2d58612f3b7f7ddfb731fd2 100644 --- a/src/lib_base/p2p_connection.ml +++ b/src/lib_base/p2p_connection.ml @@ -25,43 +25,52 @@ (*****************************************************************************) module Id = struct - (* A net point (address x port). *) type t = P2p_addr.t * P2p_addr.port option + let compare (a1, p1) (a2, p2) = - match Ipaddr.V6.compare a1 a2 with - | 0 -> Pervasives.compare p1 p2 - | x -> x + match Ipaddr.V6.compare a1 a2 with 0 -> Pervasives.compare p1 p2 | x -> x + let equal p1 p2 = compare p1 p2 = 0 + let hash = Hashtbl.hash + let pp ppf (addr, port) = match port with | None -> Format.fprintf ppf "[%a]:??" Ipaddr.V6.pp addr | Some port -> Format.fprintf ppf "[%a]:%d" Ipaddr.V6.pp addr port + let pp_opt ppf = function - | None -> Format.pp_print_string ppf "none" - | Some point -> pp ppf point + | None -> + Format.pp_print_string ppf "none" + | Some point -> + pp ppf point + let to_string t = Format.asprintf "%a" pp t let is_local (addr, _) = Ipaddr.V6.is_private addr + let is_global (addr, _) = not @@ Ipaddr.V6.is_private addr - let of_point (addr, port) = addr, Some port + let of_point (addr, port) = (addr, Some port) + let to_point = function - | _, None -> None - | addr, Some port -> Some (addr, port) + | (_, None) -> + None + | (addr, Some port) -> + Some (addr, port) + let to_point_exn = function - | _, None -> invalid_arg "to_point_exn" - | addr, Some port -> addr, port + | (_, None) -> + invalid_arg "to_point_exn" + | (addr, Some port) -> + (addr, port) let encoding = let open Data_encoding in - (obj2 - (req "addr" P2p_addr.encoding) - (opt "port" uint16)) - + obj2 (req "addr" P2p_addr.encoding) (opt "port" uint16) end module Map = Map.Make (Id) @@ -69,33 +78,52 @@ module Set = Set.Make (Id) module Table = Hashtbl.Make (Id) module Info = struct - type 'meta t = { - incoming : bool ; - peer_id : P2p_peer_id.t ; - id_point : Id.t ; - remote_socket_port : P2p_addr.port ; - announced_version : Network_version.t ; - private_node : bool ; - local_metadata : 'meta ; - remote_metadata : 'meta ; + incoming : bool; + peer_id : P2p_peer_id.t; + id_point : Id.t; + remote_socket_port : P2p_addr.port; + announced_version : Network_version.t; + private_node : bool; + local_metadata : 'meta; + remote_metadata : 'meta } let encoding metadata_encoding = let open Data_encoding in conv - (fun { incoming ; peer_id ; id_point ; remote_socket_port ; - announced_version ; private_node ; - local_metadata ; remote_metadata } -> - (incoming, peer_id, id_point, remote_socket_port, - announced_version, private_node, - local_metadata, remote_metadata)) - (fun (incoming, peer_id, id_point, remote_socket_port, - announced_version, private_node, - local_metadata, remote_metadata) -> - { incoming ; peer_id ; id_point ; remote_socket_port ; - announced_version ; private_node ; - local_metadata ; remote_metadata }) + (fun { incoming; + peer_id; + id_point; + remote_socket_port; + announced_version; + private_node; + local_metadata; + remote_metadata } -> + ( incoming, + peer_id, + id_point, + remote_socket_port, + announced_version, + private_node, + local_metadata, + remote_metadata )) + (fun ( incoming, + peer_id, + id_point, + remote_socket_port, + announced_version, + private_node, + local_metadata, + remote_metadata ) -> + { incoming; + peer_id; + id_point; + remote_socket_port; + announced_version; + private_node; + local_metadata; + remote_metadata }) (obj8 (req "incoming" bool) (req "peer_id" P2p_peer_id.encoding) @@ -107,38 +135,46 @@ module Info = struct (req "remote_metadata" metadata_encoding)) let pp pp_meta ppf - { incoming ; id_point = (remote_addr, remote_port) ; - remote_socket_port ; peer_id ; announced_version ; - private_node ; - local_metadata = _ ; remote_metadata } = - let point = match remote_port with - | None -> remote_addr, remote_socket_port - | Some port -> remote_addr, port in - Format.fprintf ppf "%s %a %a (%a) %s%a" + { incoming; + id_point = (remote_addr, remote_port); + remote_socket_port; + peer_id; + announced_version; + private_node; + local_metadata = _; + remote_metadata } = + let point = + match remote_port with + | None -> + (remote_addr, remote_socket_port) + | Some port -> + (remote_addr, port) + in + Format.fprintf + ppf + "%s %a %a (%a) %s%a" (if incoming then "↘" else "↗") - P2p_peer_id.pp peer_id - P2p_point.Id.pp point - Network_version.pp announced_version + P2p_peer_id.pp + peer_id + P2p_point.Id.pp + point + Network_version.pp + announced_version (if private_node then " private" else "") - pp_meta remote_metadata - + pp_meta + remote_metadata end module Pool_event = struct - (** Pool-level events *) type t = - | Too_few_connections | Too_many_connections - | New_point of P2p_point.Id.t | New_peer of P2p_peer_id.t - | Gc_points | Gc_peer_ids - | Incoming_connection of P2p_point.Id.t | Outgoing_connection of P2p_point.Id.t | Authentication_failed of P2p_point.Id.t @@ -146,26 +182,32 @@ module Pool_event = struct | Rejecting_request of P2p_point.Id.t * Id.t * P2p_peer_id.t | Request_rejected of P2p_point.Id.t * (Id.t * P2p_peer_id.t) option | Connection_established of Id.t * P2p_peer_id.t - - | Swap_request_received of { source : P2p_peer_id.t } - | Swap_ack_received of { source : P2p_peer_id.t } - | Swap_request_sent of { source : P2p_peer_id.t } - | Swap_ack_sent of { source : P2p_peer_id.t } - | Swap_request_ignored of { source : P2p_peer_id.t } - | Swap_success of { source : P2p_peer_id.t } - | Swap_failure of { source : P2p_peer_id.t } - + | Swap_request_received of {source : P2p_peer_id.t} + | Swap_ack_received of {source : P2p_peer_id.t} + | Swap_request_sent of {source : P2p_peer_id.t} + | Swap_ack_sent of {source : P2p_peer_id.t} + | Swap_request_ignored of {source : P2p_peer_id.t} + | Swap_success of {source : P2p_peer_id.t} + | Swap_failure of {source : P2p_peer_id.t} | Disconnection of P2p_peer_id.t | External_disconnection of P2p_peer_id.t - let pp ppf (event:t) = + let pp ppf (event : t) = match event with - | Too_few_connections -> Format.pp_print_string ppf "Too_few_connections" - | Too_many_connections -> Format.pp_print_string ppf "Too_many_connections" - | New_point p -> Format.pp_print_string ppf "New_point " ; P2p_point.Id.pp ppf p - | New_peer p -> Format.pp_print_string ppf "New_peer " ; P2p_peer_id.pp ppf p - | Gc_points -> Format.pp_print_string ppf "Gc_points" - | Gc_peer_ids -> Format.pp_print_string ppf "Gc_peer_ids" + | Too_few_connections -> + Format.pp_print_string ppf "Too_few_connections" + | Too_many_connections -> + Format.pp_print_string ppf "Too_many_connections" + | New_point p -> + Format.pp_print_string ppf "New_point " ; + P2p_point.Id.pp ppf p + | New_peer p -> + Format.pp_print_string ppf "New_peer " ; + P2p_peer_id.pp ppf p + | Gc_points -> + Format.pp_print_string ppf "Gc_points" + | Gc_peer_ids -> + Format.pp_print_string ppf "Gc_peer_ids" | Incoming_connection p -> Format.pp_print_string ppf "Incoming_connection " ; P2p_point.Id.pp ppf p @@ -187,25 +229,25 @@ module Pool_event = struct | Connection_established (_, pi) -> Format.pp_print_string ppf "Connection_established " ; P2p_peer_id.pp ppf pi - | Swap_request_received { source } -> + | Swap_request_received {source} -> Format.pp_print_string ppf "Swap_request_received " ; P2p_peer_id.pp ppf source - | Swap_ack_received { source } -> + | Swap_ack_received {source} -> Format.pp_print_string ppf "Swap_ack_received " ; P2p_peer_id.pp ppf source - | Swap_request_sent { source } -> + | Swap_request_sent {source} -> Format.pp_print_string ppf "Swap_request_sent " ; P2p_peer_id.pp ppf source - | Swap_ack_sent { source } -> + | Swap_ack_sent {source} -> Format.pp_print_string ppf "Swap_ack_sent " ; P2p_peer_id.pp ppf source - | Swap_request_ignored { source } -> + | Swap_request_ignored {source} -> Format.pp_print_string ppf "Swap_request_ignored " ; P2p_peer_id.pp ppf source - | Swap_success { source } -> + | Swap_success {source} -> Format.pp_print_string ppf "Swap_success " ; P2p_peer_id.pp ppf source - | Swap_failure { source } -> + | Swap_failure {source} -> Format.pp_print_string ppf "Swap_failure " ; P2p_peer_id.pp ppf source | Disconnection source -> @@ -218,166 +260,193 @@ module Pool_event = struct let encoding = let open Data_encoding in let branch_encoding name obj = - conv (fun x -> (), x) (fun ((), x) -> x) - (merge_objs - (obj1 (req "event" (constant name))) obj) in - union ~tag_size:`Uint8 [ - case (Tag 0) - ~title:"Too_few_connections" - (branch_encoding "too_few_connections" empty) - (function Too_few_connections -> Some () | _ -> None) - (fun () -> Too_few_connections) ; - case (Tag 1) - ~title:"Too_many_connections" - (branch_encoding "too_many_connections" empty) - (function Too_many_connections -> Some () | _ -> None) - (fun () -> Too_many_connections) ; - case (Tag 2) - ~title:"New_point" - (branch_encoding "new_point" - (obj1 (req "point" P2p_point.Id.encoding))) - (function New_point p -> Some p | _ -> None) - (fun p -> New_point p) ; - case (Tag 3) - ~title:"New_peer" - (branch_encoding "new_peer" - (obj1 (req "peer_id" P2p_peer_id.encoding))) - (function New_peer p -> Some p | _ -> None) - (fun p -> New_peer p) ; - case (Tag 4) - ~title:"Incoming_connection" - (branch_encoding "incoming_connection" - (obj1 (req "point" P2p_point.Id.encoding))) - (function Incoming_connection p -> Some p | _ -> None) - (fun p -> Incoming_connection p) ; - case (Tag 5) - ~title:"Outgoing_connection" - (branch_encoding "outgoing_connection" - (obj1 (req "point" P2p_point.Id.encoding))) - (function Outgoing_connection p -> Some p | _ -> None) - (fun p -> Outgoing_connection p) ; - case (Tag 6) - ~title:"Authentication_failed" - (branch_encoding "authentication_failed" - (obj1 (req "point" P2p_point.Id.encoding))) - (function Authentication_failed p -> Some p | _ -> None) - (fun p -> Authentication_failed p) ; - case (Tag 7) - ~title:"Accepting_request" - (branch_encoding "accepting_request" - (obj3 - (req "point" P2p_point.Id.encoding) - (req "id_point" Id.encoding) - (req "peer_id" P2p_peer_id.encoding))) - (function Accepting_request (p, id_p, g) -> - Some (p, id_p, g) | _ -> None) - (fun (p, id_p, g) -> Accepting_request (p, id_p, g)) ; - case (Tag 8) - ~title:"Rejecting_request" - (branch_encoding "rejecting_request" - (obj3 - (req "point" P2p_point.Id.encoding) - (req "id_point" Id.encoding) - (req "peer_id" P2p_peer_id.encoding))) - (function Rejecting_request (p, id_p, g) -> - Some (p, id_p, g) | _ -> None) - (fun (p, id_p, g) -> Rejecting_request (p, id_p, g)) ; - case (Tag 9) - ~title:"Request_rejected" - (branch_encoding "request_rejected" - (obj2 - (req "point" P2p_point.Id.encoding) - (opt "identity" - (tup2 Id.encoding P2p_peer_id.encoding)))) - (function Request_rejected (p, id) -> Some (p, id) | _ -> None) - (fun (p, id) -> Request_rejected (p, id)) ; - case (Tag 10) - ~title:"Connection_established" - (branch_encoding "connection_established" - (obj2 - (req "id_point" Id.encoding) - (req "peer_id" P2p_peer_id.encoding))) - (function Connection_established (id_p, g) -> - Some (id_p, g) | _ -> None) - (fun (id_p, g) -> Connection_established (id_p, g)) ; - case (Tag 11) - ~title:"Disconnection" - (branch_encoding "disconnection" - (obj1 (req "peer_id" P2p_peer_id.encoding))) - (function Disconnection g -> Some g | _ -> None) - (fun g -> Disconnection g) ; - case (Tag 12) - ~title:"External_disconnection" - (branch_encoding "external_disconnection" - (obj1 (req "peer_id" P2p_peer_id.encoding))) - (function External_disconnection g -> Some g | _ -> None) - (fun g -> External_disconnection g) ; - case (Tag 13) - ~title:"Gc_points" - (branch_encoding "gc_points" empty) - (function Gc_points -> Some () | _ -> None) - (fun () -> Gc_points) ; - case (Tag 14) - ~title:"Gc_peer_ids" - (branch_encoding "gc_peer_ids" empty) - (function Gc_peer_ids -> Some () | _ -> None) - (fun () -> Gc_peer_ids) ; - case (Tag 15) - ~title:"Swap_request_received" - (branch_encoding "swap_request_received" - (obj1 (req "source" P2p_peer_id.encoding))) - (function - | Swap_request_received { source } -> Some source - | _ -> None) - (fun source -> Swap_request_received { source }) ; - case (Tag 16) - ~title:"Swap_ack_received" - (branch_encoding "swap_ack_received" - (obj1 (req "source" P2p_peer_id.encoding))) - (function - | Swap_ack_received { source } -> Some source - | _ -> None) - (fun source -> Swap_ack_received { source }) ; - case (Tag 17) - ~title:"Swap_request_sent" - (branch_encoding "swap_request_sent" - (obj1 (req "source" P2p_peer_id.encoding))) - (function - | Swap_request_sent { source } -> Some source - | _ -> None) - (fun source -> Swap_request_sent { source }) ; - case (Tag 18) - ~title:"Swap_ack_sent" - (branch_encoding "swap_ack_sent" - (obj1 (req "source" P2p_peer_id.encoding))) - (function - | Swap_ack_sent { source } -> Some source - | _ -> None) - (fun source -> Swap_ack_sent { source }) ; - case (Tag 19) - ~title:"Swap_request_ignored" - (branch_encoding "swap_request_ignored" - (obj1 (req "source" P2p_peer_id.encoding))) - (function - | Swap_request_ignored { source } -> Some source - | _ -> None) - (fun source -> Swap_request_ignored { source }) ; - case (Tag 20) - ~title:"Swap_success" - (branch_encoding "swap_success" - (obj1 (req "source" P2p_peer_id.encoding))) - (function - | Swap_success { source } -> Some source - | _ -> None) - (fun source -> Swap_success { source }) ; - case (Tag 21) - ~title:"Swap_failure" - (branch_encoding "swap_failure" - (obj1 (req "source" P2p_peer_id.encoding))) - (function - | Swap_failure { source } -> Some source - | _ -> None) - (fun source -> Swap_failure { source }) ; - ] - + conv + (fun x -> ((), x)) + (fun ((), x) -> x) + (merge_objs (obj1 (req "event" (constant name))) obj) + in + union + ~tag_size:`Uint8 + [ case + (Tag 0) + ~title:"Too_few_connections" + (branch_encoding "too_few_connections" empty) + (function Too_few_connections -> Some () | _ -> None) + (fun () -> Too_few_connections); + case + (Tag 1) + ~title:"Too_many_connections" + (branch_encoding "too_many_connections" empty) + (function Too_many_connections -> Some () | _ -> None) + (fun () -> Too_many_connections); + case + (Tag 2) + ~title:"New_point" + (branch_encoding + "new_point" + (obj1 (req "point" P2p_point.Id.encoding))) + (function New_point p -> Some p | _ -> None) + (fun p -> New_point p); + case + (Tag 3) + ~title:"New_peer" + (branch_encoding + "new_peer" + (obj1 (req "peer_id" P2p_peer_id.encoding))) + (function New_peer p -> Some p | _ -> None) + (fun p -> New_peer p); + case + (Tag 4) + ~title:"Incoming_connection" + (branch_encoding + "incoming_connection" + (obj1 (req "point" P2p_point.Id.encoding))) + (function Incoming_connection p -> Some p | _ -> None) + (fun p -> Incoming_connection p); + case + (Tag 5) + ~title:"Outgoing_connection" + (branch_encoding + "outgoing_connection" + (obj1 (req "point" P2p_point.Id.encoding))) + (function Outgoing_connection p -> Some p | _ -> None) + (fun p -> Outgoing_connection p); + case + (Tag 6) + ~title:"Authentication_failed" + (branch_encoding + "authentication_failed" + (obj1 (req "point" P2p_point.Id.encoding))) + (function Authentication_failed p -> Some p | _ -> None) + (fun p -> Authentication_failed p); + case + (Tag 7) + ~title:"Accepting_request" + (branch_encoding + "accepting_request" + (obj3 + (req "point" P2p_point.Id.encoding) + (req "id_point" Id.encoding) + (req "peer_id" P2p_peer_id.encoding))) + (function + | Accepting_request (p, id_p, g) -> Some (p, id_p, g) | _ -> None) + (fun (p, id_p, g) -> Accepting_request (p, id_p, g)); + case + (Tag 8) + ~title:"Rejecting_request" + (branch_encoding + "rejecting_request" + (obj3 + (req "point" P2p_point.Id.encoding) + (req "id_point" Id.encoding) + (req "peer_id" P2p_peer_id.encoding))) + (function + | Rejecting_request (p, id_p, g) -> Some (p, id_p, g) | _ -> None) + (fun (p, id_p, g) -> Rejecting_request (p, id_p, g)); + case + (Tag 9) + ~title:"Request_rejected" + (branch_encoding + "request_rejected" + (obj2 + (req "point" P2p_point.Id.encoding) + (opt "identity" (tup2 Id.encoding P2p_peer_id.encoding)))) + (function Request_rejected (p, id) -> Some (p, id) | _ -> None) + (fun (p, id) -> Request_rejected (p, id)); + case + (Tag 10) + ~title:"Connection_established" + (branch_encoding + "connection_established" + (obj2 + (req "id_point" Id.encoding) + (req "peer_id" P2p_peer_id.encoding))) + (function + | Connection_established (id_p, g) -> Some (id_p, g) | _ -> None) + (fun (id_p, g) -> Connection_established (id_p, g)); + case + (Tag 11) + ~title:"Disconnection" + (branch_encoding + "disconnection" + (obj1 (req "peer_id" P2p_peer_id.encoding))) + (function Disconnection g -> Some g | _ -> None) + (fun g -> Disconnection g); + case + (Tag 12) + ~title:"External_disconnection" + (branch_encoding + "external_disconnection" + (obj1 (req "peer_id" P2p_peer_id.encoding))) + (function External_disconnection g -> Some g | _ -> None) + (fun g -> External_disconnection g); + case + (Tag 13) + ~title:"Gc_points" + (branch_encoding "gc_points" empty) + (function Gc_points -> Some () | _ -> None) + (fun () -> Gc_points); + case + (Tag 14) + ~title:"Gc_peer_ids" + (branch_encoding "gc_peer_ids" empty) + (function Gc_peer_ids -> Some () | _ -> None) + (fun () -> Gc_peer_ids); + case + (Tag 15) + ~title:"Swap_request_received" + (branch_encoding + "swap_request_received" + (obj1 (req "source" P2p_peer_id.encoding))) + (function + | Swap_request_received {source} -> Some source | _ -> None) + (fun source -> Swap_request_received {source}); + case + (Tag 16) + ~title:"Swap_ack_received" + (branch_encoding + "swap_ack_received" + (obj1 (req "source" P2p_peer_id.encoding))) + (function Swap_ack_received {source} -> Some source | _ -> None) + (fun source -> Swap_ack_received {source}); + case + (Tag 17) + ~title:"Swap_request_sent" + (branch_encoding + "swap_request_sent" + (obj1 (req "source" P2p_peer_id.encoding))) + (function Swap_request_sent {source} -> Some source | _ -> None) + (fun source -> Swap_request_sent {source}); + case + (Tag 18) + ~title:"Swap_ack_sent" + (branch_encoding + "swap_ack_sent" + (obj1 (req "source" P2p_peer_id.encoding))) + (function Swap_ack_sent {source} -> Some source | _ -> None) + (fun source -> Swap_ack_sent {source}); + case + (Tag 19) + ~title:"Swap_request_ignored" + (branch_encoding + "swap_request_ignored" + (obj1 (req "source" P2p_peer_id.encoding))) + (function Swap_request_ignored {source} -> Some source | _ -> None) + (fun source -> Swap_request_ignored {source}); + case + (Tag 20) + ~title:"Swap_success" + (branch_encoding + "swap_success" + (obj1 (req "source" P2p_peer_id.encoding))) + (function Swap_success {source} -> Some source | _ -> None) + (fun source -> Swap_success {source}); + case + (Tag 21) + ~title:"Swap_failure" + (branch_encoding + "swap_failure" + (obj1 (req "source" P2p_peer_id.encoding))) + (function Swap_failure {source} -> Some source | _ -> None) + (fun source -> Swap_failure {source}) ] end diff --git a/src/lib_base/p2p_connection.mli b/src/lib_base/p2p_connection.mli index 3f016fac897fb038ea9713a56a0a7fae75060912..6555d39f98ea8f36589004a840f7a4360a5be4a9 100644 --- a/src/lib_base/p2p_connection.mli +++ b/src/lib_base/p2p_connection.mli @@ -25,104 +25,101 @@ (*****************************************************************************) module Id : sig - type t = P2p_addr.t * P2p_addr.port option + val compare : t -> t -> int + val equal : t -> t -> bool + val pp : Format.formatter -> t -> unit + val pp_opt : Format.formatter -> t option -> unit + val to_string : t -> string + val encoding : t Data_encoding.t + val is_local : t -> bool + val is_global : t -> bool + val of_point : P2p_point.Id.t -> t + val to_point : t -> P2p_point.Id.t option - val to_point_exn : t -> P2p_point.Id.t + val to_point_exn : t -> P2p_point.Id.t end module Map : Map.S with type key = Id.t + module Set : Set.S with type elt = Id.t + module Table : Hashtbl.S with type key = Id.t (** Information about a connection *) module Info : sig - type 'meta t = { incoming : bool; peer_id : P2p_peer_id.t; id_point : Id.t; remote_socket_port : P2p_addr.port; - announced_version : Network_version.t ; - private_node : bool ; - local_metadata : 'meta ; - remote_metadata : 'meta ; + announced_version : Network_version.t; + private_node : bool; + local_metadata : 'meta; + remote_metadata : 'meta } val pp : - (Format.formatter -> 'meta -> unit) -> - Format.formatter -> 'meta t -> unit - val encoding : 'meta Data_encoding.t -> 'meta t Data_encoding.t + (Format.formatter -> 'meta -> unit) -> Format.formatter -> 'meta t -> unit + val encoding : 'meta Data_encoding.t -> 'meta t Data_encoding.t end module Pool_event : sig - type t = - | Too_few_connections | Too_many_connections - | New_point of P2p_point.Id.t | New_peer of P2p_peer_id.t - | Gc_points - (** Garbage collection of known point table has been triggered. *) - + (** Garbage collection of known point table has been triggered. *) | Gc_peer_ids - (** Garbage collection of known peer_ids table has been triggered. *) - + (** Garbage collection of known peer_ids table has been triggered. *) (* Connection-level events *) - | Incoming_connection of P2p_point.Id.t - (** We accept(2)-ed an incoming connection *) + (** We accept(2)-ed an incoming connection *) | Outgoing_connection of P2p_point.Id.t - (** We connect(2)-ed to a remote endpoint *) + (** We connect(2)-ed to a remote endpoint *) | Authentication_failed of P2p_point.Id.t - (** Remote point failed authentication *) - + (** Remote point failed authentication *) | Accepting_request of P2p_point.Id.t * Id.t * P2p_peer_id.t - (** We accepted a connection after authentifying the remote peer. *) + (** We accepted a connection after authentifying the remote peer. *) | Rejecting_request of P2p_point.Id.t * Id.t * P2p_peer_id.t - (** We rejected a connection after authentifying the remote peer. *) + (** We rejected a connection after authentifying the remote peer. *) | Request_rejected of P2p_point.Id.t * (Id.t * P2p_peer_id.t) option - (** The remote peer rejected our connection. *) - + (** The remote peer rejected our connection. *) | Connection_established of Id.t * P2p_peer_id.t - (** We successfully established a authentified connection. *) - - | Swap_request_received of { source : P2p_peer_id.t } - (** A swap request has been received. *) - | Swap_ack_received of { source : P2p_peer_id.t } - (** A swap ack has been received *) - | Swap_request_sent of { source : P2p_peer_id.t } - (** A swap request has been sent *) - | Swap_ack_sent of { source : P2p_peer_id.t } - (** A swap ack has been sent *) - | Swap_request_ignored of { source : P2p_peer_id.t } - (** A swap request has been ignored *) - | Swap_success of { source : P2p_peer_id.t } - (** A swap operation has succeeded *) - | Swap_failure of { source : P2p_peer_id.t } - (** A swap operation has failed *) - + (** We successfully established a authentified connection. *) + | Swap_request_received of {source : P2p_peer_id.t} + (** A swap request has been received. *) + | Swap_ack_received of {source : P2p_peer_id.t} + (** A swap ack has been received *) + | Swap_request_sent of {source : P2p_peer_id.t} + (** A swap request has been sent *) + | Swap_ack_sent of {source : P2p_peer_id.t} + (** A swap ack has been sent *) + | Swap_request_ignored of {source : P2p_peer_id.t} + (** A swap request has been ignored *) + | Swap_success of {source : P2p_peer_id.t} + (** A swap operation has succeeded *) + | Swap_failure of {source : P2p_peer_id.t} + (** A swap operation has failed *) | Disconnection of P2p_peer_id.t - (** We decided to close the connection. *) + (** We decided to close the connection. *) | External_disconnection of P2p_peer_id.t - (** The connection was closed for external reason. *) + (** The connection was closed for external reason. *) val pp : Format.formatter -> t -> unit val encoding : t Data_encoding.t - end diff --git a/src/lib_base/p2p_id_point.ml b/src/lib_base/p2p_id_point.ml index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..8b137891791fe96927ad78e64b0aad7bded08bdc 100644 --- a/src/lib_base/p2p_id_point.ml +++ b/src/lib_base/p2p_id_point.ml @@ -0,0 +1 @@ + diff --git a/src/lib_base/p2p_id_point.mli b/src/lib_base/p2p_id_point.mli index 87e22816f0c9d8703bfbe0bc50a5d8dfb8a2b50f..a7b5dedd24a2a34ddc8e39bc1b8879325cae07e6 100644 --- a/src/lib_base/p2p_id_point.mli +++ b/src/lib_base/p2p_id_point.mli @@ -22,4 +22,3 @@ (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************) - diff --git a/src/lib_base/p2p_identity.ml b/src/lib_base/p2p_identity.ml index 6194cf0b81362bb79bf006ee4c8207a38d0c5258..db4a29f72ceea69f3ae4d37d133a3094dcf4a27f 100644 --- a/src/lib_base/p2p_identity.ml +++ b/src/lib_base/p2p_identity.ml @@ -24,23 +24,26 @@ (*****************************************************************************) type t = { - peer_id : P2p_peer.Id.t ; - public_key : Crypto_box.public_key ; - secret_key : Crypto_box.secret_key ; - proof_of_work_stamp : Crypto_box.nonce ; + peer_id : P2p_peer.Id.t; + public_key : Crypto_box.public_key; + secret_key : Crypto_box.secret_key; + proof_of_work_stamp : Crypto_box.nonce } let encoding = let open Data_encoding in conv - (fun { peer_id ; public_key ; secret_key ; proof_of_work_stamp } -> - (Some peer_id, public_key, secret_key, proof_of_work_stamp)) + (fun {peer_id; public_key; secret_key; proof_of_work_stamp} -> + (Some peer_id, public_key, secret_key, proof_of_work_stamp)) (fun (peer_id_opt, public_key, secret_key, proof_of_work_stamp) -> - let peer_id = - match peer_id_opt with - | Some peer_id -> peer_id - | None -> Tezos_crypto.Crypto_box.hash public_key in - { peer_id ; public_key ; secret_key ; proof_of_work_stamp }) + let peer_id = + match peer_id_opt with + | Some peer_id -> + peer_id + | None -> + Tezos_crypto.Crypto_box.hash public_key + in + {peer_id; public_key; secret_key; proof_of_work_stamp}) (obj4 (opt "peer_id" P2p_peer_id.encoding) (req "public_key" Crypto_box.public_key_encoding) @@ -48,30 +51,34 @@ let encoding = (req "proof_of_work_stamp" Crypto_box.nonce_encoding)) let generate ?max target = - let secret_key, public_key, peer_id = Crypto_box.random_keypair () in + let (secret_key, public_key, peer_id) = Crypto_box.random_keypair () in let proof_of_work_stamp = - Crypto_box.generate_proof_of_work ?max public_key target in - { peer_id ; public_key ; secret_key ; proof_of_work_stamp } + Crypto_box.generate_proof_of_work ?max public_key target + in + {peer_id; public_key; secret_key; proof_of_work_stamp} -let animation = [| - "|.....|" ; - "|o....|" ; - "|oo...|" ; - "|ooo..|" ; - "|.ooo.|" ; - "|..ooo|" ; - "|...oo|" ; - "|....o|" ; - "|.....|" ; - "|.....|" ; - "|.....|" ; - "|.....|" ; -|] +let animation = + [| "|.....|"; + "|o....|"; + "|oo...|"; + "|ooo..|"; + "|.ooo.|"; + "|..ooo|"; + "|...oo|"; + "|....o|"; + "|.....|"; + "|.....|"; + "|.....|"; + "|.....|" |] let init = String.make (String.length animation.(0)) '\ ' + let clean = String.make (String.length animation.(0)) '\b' + let animation = Array.map (fun x -> clean ^ x) animation + let animation_size = Array.length animation + let duration = 1200 / animation_size let generate_with_animation ppf target = @@ -79,16 +86,14 @@ let generate_with_animation ppf target = let count = ref 10000 in let rec loop n = let start = Mtime_clock.counter () in - Format.fprintf ppf "%s%!" animation.(n mod animation_size); + Format.fprintf ppf "%s%!" animation.(n mod animation_size) ; try generate ~max:!count target with Not_found -> let time = Mtime.Span.to_ms (Mtime_clock.count start) in count := - if time <= 0. then - !count * 10 - else - !count * duration / int_of_float time ; - loop (n+1) + if time <= 0. then !count * 10 + else !count * duration / int_of_float time ; + loop (n + 1) in let id = loop 0 in Format.fprintf ppf "%s%s\n%!" clean init ; diff --git a/src/lib_base/p2p_identity.mli b/src/lib_base/p2p_identity.mli index 1a7a0d2e7629789c6891d20914d71c5b78ed1c56..0513cf748bb49535dbf72c36160b11bb7ee05d0e 100644 --- a/src/lib_base/p2p_identity.mli +++ b/src/lib_base/p2p_identity.mli @@ -23,24 +23,22 @@ (* *) (*****************************************************************************) -type t = { - peer_id : P2p_peer.Id.t ; - public_key : Crypto_box.public_key ; - secret_key : Crypto_box.secret_key ; - proof_of_work_stamp : Crypto_box.nonce ; -} (** Type of an identity, comprising a peer_id, a crypto keypair, and a proof of work stamp with enough difficulty so that the network accept this identity as genuine. *) +type t = { + peer_id : P2p_peer.Id.t; + public_key : Crypto_box.public_key; + secret_key : Crypto_box.secret_key; + proof_of_work_stamp : Crypto_box.nonce +} val encoding : t Data_encoding.t -val generate : Crypto_box.target -> t (** [generate target] is a freshly minted identity whose proof of work stamp difficulty is at least equal to [target]. *) +val generate : Crypto_box.target -> t -val generate_with_animation : - Format.formatter -> Crypto_box.target -> t (** [generate_with_animation ppf target] is a freshly minted identity whose proof of work stamp difficulty is at least equal to [target]. *) - +val generate_with_animation : Format.formatter -> Crypto_box.target -> t diff --git a/src/lib_base/p2p_peer.ml b/src/lib_base/p2p_peer.ml index df63ade893946a0067c7c94792004335665f201c..a72bed01fb08471ed9b7f39d9e73538826642ccc 100644 --- a/src/lib_base/p2p_peer.ml +++ b/src/lib_base/p2p_peer.ml @@ -24,112 +24,137 @@ (*****************************************************************************) module Id = P2p_peer_id - module Table = Id.Table -module InitializationTable = Error_table.Make(Table) +module InitializationTable = Error_table.Make (Table) module Map = Id.Map module Set = Id.Set module Filter = struct - - type t = - | Accepted - | Running - | Disconnected + type t = Accepted | Running | Disconnected let rpc_arg = RPC_arg.make ~name:"p2p.point.state_filter" ~destruct:(function - | "accepted" -> Ok Accepted - | "running" -> Ok Running - | "disconnected" -> Ok Disconnected - | s -> Error (Format.asprintf "Invalid state: %s" s)) + | "accepted" -> + Ok Accepted + | "running" -> + Ok Running + | "disconnected" -> + Ok Disconnected + | s -> + Error (Format.asprintf "Invalid state: %s" s)) ~construct:(function - | Accepted -> "accepted" - | Running -> "running" - | Disconnected -> "disconnected") + | Accepted -> + "accepted" + | Running -> + "running" + | Disconnected -> + "disconnected") () - end module State = struct - - type t = - | Accepted - | Running - | Disconnected + type t = Accepted | Running | Disconnected let pp_digram ppf = function - | Accepted -> Format.fprintf ppf "⚎" - | Running -> Format.fprintf ppf "⚌" - | Disconnected -> Format.fprintf ppf "⚏" + | Accepted -> + Format.fprintf ppf "⚎" + | Running -> + Format.fprintf ppf "⚌" + | Disconnected -> + Format.fprintf ppf "⚏" let encoding = let open Data_encoding in - string_enum [ - "accepted", Accepted ; - "running", Running ; - "disconnected", Disconnected ; - ] + string_enum + [ ("accepted", Accepted); + ("running", Running); + ("disconnected", Disconnected) ] let raw_filter (f : Filter.t) (s : t) = - match f, s with - | Accepted, Accepted -> true - | Accepted, (Running | Disconnected) - | (Running | Disconnected), Accepted -> false - | Running, Running -> true - | Disconnected, Disconnected -> true - | Running, Disconnected - | Disconnected, Running -> false - - let filter filters state = - List.exists (fun f -> raw_filter f state) filters - + match (f, s) with + | (Accepted, Accepted) -> + true + | (Accepted, (Running | Disconnected)) + | ((Running | Disconnected), Accepted) -> + false + | (Running, Running) -> + true + | (Disconnected, Disconnected) -> + true + | (Running, Disconnected) | (Disconnected, Running) -> + false + + let filter filters state = List.exists (fun f -> raw_filter f state) filters end module Info = struct - type ('peer_meta, 'conn_meta) t = { - score : float ; - trusted : bool ; - conn_metadata : 'conn_meta option ; - peer_metadata : 'peer_meta ; - state : State.t ; - id_point : P2p_connection.Id.t option ; - stat : P2p_stat.t ; - last_failed_connection : (P2p_connection.Id.t * Time.System.t) option ; - last_rejected_connection : (P2p_connection.Id.t * Time.System.t) option ; - last_established_connection : (P2p_connection.Id.t * Time.System.t) option ; - last_disconnection : (P2p_connection.Id.t * Time.System.t) option ; - last_seen : (P2p_connection.Id.t * Time.System.t) option ; - last_miss : (P2p_connection.Id.t * Time.System.t) option ; + score : float; + trusted : bool; + conn_metadata : 'conn_meta option; + peer_metadata : 'peer_meta; + state : State.t; + id_point : P2p_connection.Id.t option; + stat : P2p_stat.t; + last_failed_connection : (P2p_connection.Id.t * Time.System.t) option; + last_rejected_connection : (P2p_connection.Id.t * Time.System.t) option; + last_established_connection : (P2p_connection.Id.t * Time.System.t) option; + last_disconnection : (P2p_connection.Id.t * Time.System.t) option; + last_seen : (P2p_connection.Id.t * Time.System.t) option; + last_miss : (P2p_connection.Id.t * Time.System.t) option } let encoding peer_metadata_encoding conn_metadata_encoding = let open Data_encoding in conv - (fun ( - { score ; trusted ; conn_metadata ; peer_metadata ; - state ; id_point ; stat ; - last_failed_connection ; last_rejected_connection ; - last_established_connection ; last_disconnection ; - last_seen ; last_miss }) -> - ((score, trusted, conn_metadata, peer_metadata, - state, id_point, stat), - (last_failed_connection, last_rejected_connection, - last_established_connection, last_disconnection, - last_seen, last_miss))) - (fun ((score, trusted, conn_metadata, peer_metadata, - state, id_point, stat), - (last_failed_connection, last_rejected_connection, - last_established_connection, last_disconnection, - last_seen, last_miss)) -> - { score ; trusted ; conn_metadata ; peer_metadata ; - state ; id_point ; stat ; - last_failed_connection ; last_rejected_connection ; - last_established_connection ; last_disconnection ; - last_seen ; last_miss }) + (fun { score; + trusted; + conn_metadata; + peer_metadata; + state; + id_point; + stat; + last_failed_connection; + last_rejected_connection; + last_established_connection; + last_disconnection; + last_seen; + last_miss } -> + ( (score, trusted, conn_metadata, peer_metadata, state, id_point, stat), + ( last_failed_connection, + last_rejected_connection, + last_established_connection, + last_disconnection, + last_seen, + last_miss ) )) + (fun ( ( score, + trusted, + conn_metadata, + peer_metadata, + state, + id_point, + stat ), + ( last_failed_connection, + last_rejected_connection, + last_established_connection, + last_disconnection, + last_seen, + last_miss ) ) -> + { score; + trusted; + conn_metadata; + peer_metadata; + state; + id_point; + stat; + last_failed_connection; + last_rejected_connection; + last_established_connection; + last_disconnection; + last_seen; + last_miss }) (merge_objs (obj7 (req "score" float) @@ -140,17 +165,27 @@ module Info = struct (opt "reachable_at" P2p_connection.Id.encoding) (req "stat" P2p_stat.encoding)) (obj6 - (opt "last_failed_connection" (tup2 P2p_connection.Id.encoding Time.System.encoding)) - (opt "last_rejected_connection" (tup2 P2p_connection.Id.encoding Time.System.encoding)) - (opt "last_established_connection" (tup2 P2p_connection.Id.encoding Time.System.encoding)) - (opt "last_disconnection" (tup2 P2p_connection.Id.encoding Time.System.encoding)) - (opt "last_seen" (tup2 P2p_connection.Id.encoding Time.System.encoding)) - (opt "last_miss" (tup2 P2p_connection.Id.encoding Time.System.encoding)))) - + (opt + "last_failed_connection" + (tup2 P2p_connection.Id.encoding Time.System.encoding)) + (opt + "last_rejected_connection" + (tup2 P2p_connection.Id.encoding Time.System.encoding)) + (opt + "last_established_connection" + (tup2 P2p_connection.Id.encoding Time.System.encoding)) + (opt + "last_disconnection" + (tup2 P2p_connection.Id.encoding Time.System.encoding)) + (opt + "last_seen" + (tup2 P2p_connection.Id.encoding Time.System.encoding)) + (opt + "last_miss" + (tup2 P2p_connection.Id.encoding Time.System.encoding)))) end module Pool_event = struct - type kind = | Accepting_request | Rejecting_request @@ -160,32 +195,30 @@ module Pool_event = struct | External_disconnection let kind_encoding = - Data_encoding.string_enum [ - "incoming_request", Accepting_request ; - "rejecting_request", Rejecting_request ; - "request_rejected", Request_rejected ; - "connection_established", Connection_established ; - "disconnection", Disconnection ; - "external_disconnection", External_disconnection ; - ] + Data_encoding.string_enum + [ ("incoming_request", Accepting_request); + ("rejecting_request", Rejecting_request); + ("request_rejected", Request_rejected); + ("connection_established", Connection_established); + ("disconnection", Disconnection); + ("external_disconnection", External_disconnection) ] type t = { - kind : kind ; - timestamp : Time.System.t ; - point : P2p_connection.Id.t ; + kind : kind; + timestamp : Time.System.t; + point : P2p_connection.Id.t } let encoding = let open Data_encoding in conv - (fun { kind ; timestamp ; point = (addr, port) } -> - (kind, timestamp, addr, port)) + (fun {kind; timestamp; point = (addr, port)} -> + (kind, timestamp, addr, port)) (fun (kind, timestamp, addr, port) -> - { kind ; timestamp ; point = (addr, port) }) + {kind; timestamp; point = (addr, port)}) (obj4 (req "kind" kind_encoding) (req "timestamp" Time.System.encoding) (req "addr" P2p_addr.encoding) (opt "port" uint16)) - end diff --git a/src/lib_base/p2p_peer.mli b/src/lib_base/p2p_peer.mli index 830e26e58bc5b676aa0c467d67304a75ab5d7f8a..9e729218b86d0abf2c2ac647a9a0114c2b9ef7f3 100644 --- a/src/lib_base/p2p_peer.mli +++ b/src/lib_base/p2p_peer.mli @@ -24,83 +24,69 @@ (*****************************************************************************) module Id = P2p_peer_id - module Map = Id.Map module Set = Id.Set module Table = Id.Table -module InitializationTable : Error_table.S - with type key = Table.key -module Filter : sig +module InitializationTable : Error_table.S with type key = Table.key - type t = - | Accepted - | Running - | Disconnected +module Filter : sig + type t = Accepted | Running | Disconnected val rpc_arg : t RPC_arg.t - end module State : sig - - type t = - | Accepted - | Running - | Disconnected + type t = Accepted | Running | Disconnected val pp_digram : Format.formatter -> t -> unit + val encoding : t Data_encoding.t val filter : Filter.t list -> t -> bool - end module Info : sig - type ('peer_meta, 'conn_meta) t = { - score : float ; - trusted : bool ; - conn_metadata : 'conn_meta option ; - peer_metadata : 'peer_meta ; - state : State.t ; - id_point : P2p_connection.Id.t option ; - stat : P2p_stat.t ; - last_failed_connection : (P2p_connection.Id.t * Time.System.t) option ; - last_rejected_connection : (P2p_connection.Id.t * Time.System.t) option ; - last_established_connection : (P2p_connection.Id.t * Time.System.t) option ; - last_disconnection : (P2p_connection.Id.t * Time.System.t) option ; - last_seen : (P2p_connection.Id.t * Time.System.t) option ; - last_miss : (P2p_connection.Id.t * Time.System.t) option ; + score : float; + trusted : bool; + conn_metadata : 'conn_meta option; + peer_metadata : 'peer_meta; + state : State.t; + id_point : P2p_connection.Id.t option; + stat : P2p_stat.t; + last_failed_connection : (P2p_connection.Id.t * Time.System.t) option; + last_rejected_connection : (P2p_connection.Id.t * Time.System.t) option; + last_established_connection : (P2p_connection.Id.t * Time.System.t) option; + last_disconnection : (P2p_connection.Id.t * Time.System.t) option; + last_seen : (P2p_connection.Id.t * Time.System.t) option; + last_miss : (P2p_connection.Id.t * Time.System.t) option } - val encoding : 'peer_meta Data_encoding.t -> - 'conn_meta Data_encoding.t -> ('peer_meta, 'conn_meta) t Data_encoding.t - + val encoding : + 'peer_meta Data_encoding.t -> + 'conn_meta Data_encoding.t -> + ('peer_meta, 'conn_meta) t Data_encoding.t end module Pool_event : sig - type kind = | Accepting_request - (** We accepted a connection after authentifying the remote peer. *) + (** We accepted a connection after authentifying the remote peer. *) | Rejecting_request - (** We rejected a connection after authentifying the remote peer. *) - | Request_rejected - (** The remote peer rejected our connection. *) + (** We rejected a connection after authentifying the remote peer. *) + | Request_rejected (** The remote peer rejected our connection. *) | Connection_established - (** We successfully established a authentified connection. *) - | Disconnection - (** We decided to close the connection. *) + (** We successfully established a authentified connection. *) + | Disconnection (** We decided to close the connection. *) | External_disconnection - (** The connection was closed for external reason. *) + (** The connection was closed for external reason. *) type t = { - kind : kind ; - timestamp : Time.System.t ; - point : P2p_connection.Id.t ; + kind : kind; + timestamp : Time.System.t; + point : P2p_connection.Id.t } val encoding : t Data_encoding.t - end diff --git a/src/lib_base/p2p_peer_id.ml b/src/lib_base/p2p_peer_id.ml index 3d9c4bda7429c3e07fe6fba1439707f8572b8c42..407be70cb75886672964dcf309ee0c73c090e726 100644 --- a/src/lib_base/p2p_peer_id.ml +++ b/src/lib_base/p2p_peer_id.ml @@ -32,16 +32,26 @@ let rpc_arg = "peer_id" let pp_source ppf = function - | None -> () - | Some peer -> Format.fprintf ppf " from peer %a" pp peer + | None -> + () + | Some peer -> + Format.fprintf ppf " from peer %a" pp peer module Logging = struct - include Internal_event.Legacy_logging.Make_semantic - (struct let name = "node.distributed_db.p2p_peer_id" end) + include Internal_event.Legacy_logging.Make_semantic (struct + let name = "node.distributed_db.p2p_peer_id" + end) + let mk_tag pp = Tag.def ~doc:"P2P peer ID" "p2p_peer_id" pp + let tag = mk_tag pp_short - let tag_opt = mk_tag (fun ppf -> function - | None -> () - | Some peer -> pp_short ppf peer) - let tag_source = Tag.def ~doc:"Peer which provided information" "p2p_peer_id_source" pp_source + + let tag_opt = + mk_tag (fun ppf -> function None -> () | Some peer -> pp_short ppf peer) + + let tag_source = + Tag.def + ~doc:"Peer which provided information" + "p2p_peer_id_source" + pp_source end diff --git a/src/lib_base/p2p_peer_id.mli b/src/lib_base/p2p_peer_id.mli index 0162577226d456ba25313f3d89db62a0aa2c3ea0..a5f61bea14b821dee6d833d95770cad03dea5350 100644 --- a/src/lib_base/p2p_peer_id.mli +++ b/src/lib_base/p2p_peer_id.mli @@ -25,9 +25,12 @@ include Tezos_crypto.S.HASH with type t = Crypto_box.Public_key_hash.t -module Logging: sig +module Logging : sig include Internal_event.Legacy_logging.SEMLOG - val tag: t Tag.def - val tag_opt: t option Tag.def - val tag_source: t option Tag.def + + val tag : t Tag.def + + val tag_opt : t option Tag.def + + val tag_source : t option Tag.def end diff --git a/src/lib_base/p2p_point.ml b/src/lib_base/p2p_point.ml index 856acaa9fff161015cb87f06b4a33525aa020796..057c831fc47fa0b6accdc6a11ba0b6c314741753 100644 --- a/src/lib_base/p2p_point.ml +++ b/src/lib_base/p2p_point.ml @@ -25,87 +25,96 @@ (*****************************************************************************) module Id = struct - (* A net point (address x port). *) type t = P2p_addr.t * P2p_addr.port + let compare (a1, p1) (a2, p2) = - match Ipaddr.V6.compare a1 a2 with - | 0 -> p1 - p2 - | x -> x + match Ipaddr.V6.compare a1 a2 with 0 -> p1 - p2 | x -> x + let equal p1 p2 = compare p1 p2 = 0 + let hash = Hashtbl.hash + let pp ppf (addr, port) = match Ipaddr.v4_of_v6 addr with | Some addr -> Format.fprintf ppf "%a:%d" Ipaddr.V4.pp addr port | None -> Format.fprintf ppf "[%a]:%d" Ipaddr.V6.pp addr port + let pp_opt ppf = function - | None -> Format.pp_print_string ppf "none" - | Some point -> pp ppf point + | None -> + Format.pp_print_string ppf "none" + | Some point -> + pp ppf point + let pp_list ppf point_list = - Format.pp_print_list ~pp_sep:Format.pp_print_space pp ppf point_list + Format.pp_print_list ~pp_sep:Format.pp_print_space pp ppf point_list let is_local (addr, _) = Ipaddr.V6.is_private addr + let is_global (addr, _) = not @@ Ipaddr.V6.is_private addr let check_port port = - if TzString.mem_char port '[' || - TzString.mem_char port ']' || - TzString.mem_char port ':' then - invalid_arg "Utils.parse_addr_port (invalid character in port)" + if + TzString.mem_char port '[' || TzString.mem_char port ']' + || TzString.mem_char port ':' + then invalid_arg "Utils.parse_addr_port (invalid character in port)" let parse_addr_port s = let len = String.length s in - if len = 0 then - ("", "") - else if s.[0] = '[' then begin (* inline IPv6 *) + if len = 0 then ("", "") + else if s.[0] = '[' then ( + (* inline IPv6 *) match String.rindex_opt s ']' with | None -> invalid_arg "Utils.parse_addr_port (missing ']')" | Some pos -> let addr = String.sub s 1 (pos - 1) in let port = - if pos = len - 1 then - "" - else if s.[pos+1] <> ':' then + if pos = len - 1 then "" + else if s.[pos + 1] <> ':' then invalid_arg "Utils.parse_addr_port (unexpected char after ']')" - else - String.sub s (pos + 2) (len - pos - 2) in - check_port port ; - addr, port - end else begin + else String.sub s (pos + 2) (len - pos - 2) + in + check_port port ; (addr, port) ) + else match String.rindex_opt s ']' with | Some _pos -> invalid_arg "Utils.parse_addr_port (unexpected char ']')" - | None -> - match String.index s ':' with - | exception _ -> s, "" - | pos -> - match String.index_from s (pos+1) ':' with - | exception _ -> - let addr = String.sub s 0 pos in - let port = String.sub s (pos + 1) (len - pos - 1) in - check_port port ; - addr, port - | _pos -> - invalid_arg "Utils.parse_addr_port: IPv6 addresses must be bracketed" - end + | None -> ( + match String.index s ':' with + | exception _ -> + (s, "") + | pos -> ( + match String.index_from s (pos + 1) ':' with + | exception _ -> + let addr = String.sub s 0 pos in + let port = String.sub s (pos + 1) (len - pos - 1) in + check_port port ; (addr, port) + | _pos -> + invalid_arg + "Utils.parse_addr_port: IPv6 addresses must be bracketed" ) ) let of_string_exn str = - let addr, port = parse_addr_port str in + let (addr, port) = parse_addr_port str in let port = int_of_string port in - if port < 0 && port > 1 lsl 16 - 1 then + if port < 0 && port > (1 lsl 16) - 1 then invalid_arg "port must be between 0 and 65535" ; match Ipaddr.of_string_exn addr with - | V4 addr -> Ipaddr.v6_of_v4 addr, port - | V6 addr -> addr, port + | V4 addr -> + (Ipaddr.v6_of_v4 addr, port) + | V6 addr -> + (addr, port) let of_string str = try Ok (of_string_exn str) with - | Invalid_argument s -> Error s - | Failure s -> Error s - | _ -> Error "P2p_point.of_string" + | Invalid_argument s -> + Error s + | Failure s -> + Error s + | _ -> + Error "P2p_point.of_string" let to_string saddr = Format.asprintf "%a" pp saddr @@ -119,7 +128,6 @@ module Id = struct ~destruct:of_string ~construct:to_string () - end module Map = Map.Make (Id) @@ -127,33 +135,35 @@ module Set = Set.Make (Id) module Table = Hashtbl.Make (Id) module Filter = struct - - type t = - | Requested - | Accepted - | Running - | Disconnected + type t = Requested | Accepted | Running | Disconnected let rpc_arg = RPC_arg.make ~name:"p2p.point.state_filter" ~destruct:(function - | "requested" -> Ok Requested - | "accepted" -> Ok Accepted - | "running" -> Ok Running - | "disconnected" -> Ok Disconnected - | s -> Error (Format.asprintf "Invalid state: %s" s)) + | "requested" -> + Ok Requested + | "accepted" -> + Ok Accepted + | "running" -> + Ok Running + | "disconnected" -> + Ok Disconnected + | s -> + Error (Format.asprintf "Invalid state: %s" s)) ~construct:(function - | Requested -> "requested" - | Accepted -> "accepted" - | Running -> "running" - | Disconnected -> "disconnected") + | Requested -> + "requested" + | Accepted -> + "accepted" + | Running -> + "running" + | Disconnected -> + "disconnected") () - end module State = struct - type t = | Requested | Accepted of P2p_peer_id.t @@ -161,125 +171,175 @@ module State = struct | Disconnected let of_p2p_peer_id = function - | Requested -> None - | Accepted pi -> Some pi - | Running pi -> Some pi - | Disconnected -> None + | Requested -> + None + | Accepted pi -> + Some pi + | Running pi -> + Some pi + | Disconnected -> + None let of_peerid_state state pi = - match state, pi with - | Requested, _ -> Requested - | Accepted _, Some pi -> Accepted pi - | Running _, Some pi -> Running pi - | Disconnected, _ -> Disconnected - | _ -> invalid_arg "state_of_state_peerid" + match (state, pi) with + | (Requested, _) -> + Requested + | (Accepted _, Some pi) -> + Accepted pi + | (Running _, Some pi) -> + Running pi + | (Disconnected, _) -> + Disconnected + | _ -> + invalid_arg "state_of_state_peerid" let pp_digram ppf = function - | Requested -> Format.fprintf ppf "⚎" - | Accepted _ -> Format.fprintf ppf "⚍" - | Running _ -> Format.fprintf ppf "⚌" - | Disconnected -> Format.fprintf ppf "⚏" + | Requested -> + Format.fprintf ppf "⚎" + | Accepted _ -> + Format.fprintf ppf "⚍" + | Running _ -> + Format.fprintf ppf "⚌" + | Disconnected -> + Format.fprintf ppf "⚏" let encoding = let open Data_encoding in let branch_encoding name obj = - conv (fun x -> (), x) (fun ((), x) -> x) - (merge_objs - (obj1 (req "event_kind" (constant name))) obj) in - union ~tag_size:`Uint8 [ - case (Tag 0) - ~title:"Requested" - (branch_encoding "requested" empty) - (function Requested -> Some () | _ -> None) - (fun () -> Requested) ; - case (Tag 1) - ~title:"Accepted" - (branch_encoding "accepted" - (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) - (function Accepted p2p_peer_id -> Some p2p_peer_id | _ -> None) - (fun p2p_peer_id -> Accepted p2p_peer_id) ; - case (Tag 2) - ~title:"Running" - (branch_encoding "running" - (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) - (function Running p2p_peer_id -> Some p2p_peer_id | _ -> None) - (fun p2p_peer_id -> Running p2p_peer_id) ; - case (Tag 3) - ~title:"Disconnected" - (branch_encoding "disconnected" empty) - (function Disconnected -> Some () | _ -> None) - (fun () -> Disconnected) ; - ] + conv + (fun x -> ((), x)) + (fun ((), x) -> x) + (merge_objs (obj1 (req "event_kind" (constant name))) obj) + in + union + ~tag_size:`Uint8 + [ case + (Tag 0) + ~title:"Requested" + (branch_encoding "requested" empty) + (function Requested -> Some () | _ -> None) + (fun () -> Requested); + case + (Tag 1) + ~title:"Accepted" + (branch_encoding + "accepted" + (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) + (function Accepted p2p_peer_id -> Some p2p_peer_id | _ -> None) + (fun p2p_peer_id -> Accepted p2p_peer_id); + case + (Tag 2) + ~title:"Running" + (branch_encoding + "running" + (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) + (function Running p2p_peer_id -> Some p2p_peer_id | _ -> None) + (fun p2p_peer_id -> Running p2p_peer_id); + case + (Tag 3) + ~title:"Disconnected" + (branch_encoding "disconnected" empty) + (function Disconnected -> Some () | _ -> None) + (fun () -> Disconnected) ] let raw_filter (f : Filter.t) (s : t) = - match f, s with - | Requested, Requested -> true - | Requested, (Accepted _ | Running _ | Disconnected) - | (Accepted | Running | Disconnected), Requested -> false - | Accepted, Accepted _-> true - | Accepted, (Running _ | Disconnected) - | (Running | Disconnected), Accepted _ -> false - | Running, Running _ -> true - | Disconnected, Disconnected -> true - | Running, Disconnected - | Disconnected, Running _ -> false - - let filter filters state = - List.exists (fun f -> raw_filter f state) filters - + match (f, s) with + | (Requested, Requested) -> + true + | (Requested, (Accepted _ | Running _ | Disconnected)) + | ((Accepted | Running | Disconnected), Requested) -> + false + | (Accepted, Accepted _) -> + true + | (Accepted, (Running _ | Disconnected)) + | ((Running | Disconnected), Accepted _) -> + false + | (Running, Running _) -> + true + | (Disconnected, Disconnected) -> + true + | (Running, Disconnected) | (Disconnected, Running _) -> + false + + let filter filters state = List.exists (fun f -> raw_filter f state) filters end module Info = struct - type t = { - trusted : bool ; - greylisted_until : Time.System.t ; - state : State.t ; - last_failed_connection : Time.System.t option ; - last_rejected_connection : (P2p_peer_id.t * Time.System.t) option ; - last_established_connection : (P2p_peer_id.t * Time.System.t) option ; - last_disconnection : (P2p_peer_id.t * Time.System.t) option ; - last_seen : (P2p_peer_id.t * Time.System.t) option ; - last_miss : Time.System.t option ; + trusted : bool; + greylisted_until : Time.System.t; + state : State.t; + last_failed_connection : Time.System.t option; + last_rejected_connection : (P2p_peer_id.t * Time.System.t) option; + last_established_connection : (P2p_peer_id.t * Time.System.t) option; + last_disconnection : (P2p_peer_id.t * Time.System.t) option; + last_seen : (P2p_peer_id.t * Time.System.t) option; + last_miss : Time.System.t option } let encoding = let open Data_encoding in conv - (fun { trusted ; greylisted_until ; state ; - last_failed_connection ; last_rejected_connection ; - last_established_connection ; last_disconnection ; - last_seen ; last_miss } -> + (fun { trusted; + greylisted_until; + state; + last_failed_connection; + last_rejected_connection; + last_established_connection; + last_disconnection; + last_seen; + last_miss } -> let p2p_peer_id = State.of_p2p_peer_id state in - (trusted, greylisted_until, state, p2p_peer_id, - last_failed_connection, last_rejected_connection, - last_established_connection, last_disconnection, - last_seen, last_miss)) - (fun (trusted, greylisted_until, state, p2p_peer_id, - last_failed_connection, last_rejected_connection, - last_established_connection, last_disconnection, - last_seen, last_miss) -> + ( trusted, + greylisted_until, + state, + p2p_peer_id, + last_failed_connection, + last_rejected_connection, + last_established_connection, + last_disconnection, + last_seen, + last_miss )) + (fun ( trusted, + greylisted_until, + state, + p2p_peer_id, + last_failed_connection, + last_rejected_connection, + last_established_connection, + last_disconnection, + last_seen, + last_miss ) -> let state = State.of_peerid_state state p2p_peer_id in - { trusted ; greylisted_until ; state ; - last_failed_connection ; last_rejected_connection ; - last_established_connection ; last_disconnection ; - last_seen ; last_miss }) + { trusted; + greylisted_until; + state; + last_failed_connection; + last_rejected_connection; + last_established_connection; + last_disconnection; + last_seen; + last_miss }) (obj10 (req "trusted" bool) (dft "greylisted_until" Time.System.encoding Time.System.epoch) (req "state" State.encoding) (opt "p2p_peer_id" P2p_peer_id.encoding) (opt "last_failed_connection" Time.System.encoding) - (opt "last_rejected_connection" (tup2 P2p_peer_id.encoding Time.System.encoding)) - (opt "last_established_connection" (tup2 P2p_peer_id.encoding Time.System.encoding)) - (opt "last_disconnection" (tup2 P2p_peer_id.encoding Time.System.encoding)) + (opt + "last_rejected_connection" + (tup2 P2p_peer_id.encoding Time.System.encoding)) + (opt + "last_established_connection" + (tup2 P2p_peer_id.encoding Time.System.encoding)) + (opt + "last_disconnection" + (tup2 P2p_peer_id.encoding Time.System.encoding)) (opt "last_seen" (tup2 P2p_peer_id.encoding Time.System.encoding)) (opt "last_miss" Time.System.encoding)) - end module Pool_event = struct - type kind = | Outgoing_request | Accepting_request of P2p_peer_id.t @@ -292,54 +352,81 @@ module Pool_event = struct let kind_encoding = let open Data_encoding in let branch_encoding name obj = - conv (fun x -> (), x) (fun ((), x) -> x) - (merge_objs - (obj1 (req "event_kind" (constant name))) obj) in - union ~tag_size:`Uint8 [ - case (Tag 0) - ~title:"Outgoing_request" - (branch_encoding "outgoing_request" empty) - (function Outgoing_request -> Some () | _ -> None) - (fun () -> Outgoing_request) ; - case (Tag 1) - ~title:"Accepting_request" - (branch_encoding "accepting_request" - (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) - (function Accepting_request p2p_peer_id -> Some p2p_peer_id | _ -> None) - (fun p2p_peer_id -> Accepting_request p2p_peer_id) ; - case (Tag 2) - ~title:"Rejecting_request" - (branch_encoding "rejecting_request" - (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) - (function Rejecting_request p2p_peer_id -> Some p2p_peer_id | _ -> None) - (fun p2p_peer_id -> Rejecting_request p2p_peer_id) ; - case (Tag 3) - ~title:"Rejecting_rejected" - (branch_encoding "request_rejected" - (obj1 (opt "p2p_peer_id" P2p_peer_id.encoding))) - (function Request_rejected p2p_peer_id -> Some p2p_peer_id | _ -> None) - (fun p2p_peer_id -> Request_rejected p2p_peer_id) ; - case (Tag 4) - ~title:"Connection_established" - (branch_encoding "rejecting_request" - (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) - (function Connection_established p2p_peer_id -> Some p2p_peer_id | _ -> None) - (fun p2p_peer_id -> Connection_established p2p_peer_id) ; - case (Tag 5) - ~title:"Disconnection" - (branch_encoding "rejecting_request" - (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) - (function Disconnection p2p_peer_id -> Some p2p_peer_id | _ -> None) - (fun p2p_peer_id -> Disconnection p2p_peer_id) ; - case (Tag 6) - ~title:"External_disconnection" - (branch_encoding "rejecting_request" - (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) - (function External_disconnection p2p_peer_id -> Some p2p_peer_id | _ -> None) - (fun p2p_peer_id -> External_disconnection p2p_peer_id) ; - ] + conv + (fun x -> ((), x)) + (fun ((), x) -> x) + (merge_objs (obj1 (req "event_kind" (constant name))) obj) + in + union + ~tag_size:`Uint8 + [ case + (Tag 0) + ~title:"Outgoing_request" + (branch_encoding "outgoing_request" empty) + (function Outgoing_request -> Some () | _ -> None) + (fun () -> Outgoing_request); + case + (Tag 1) + ~title:"Accepting_request" + (branch_encoding + "accepting_request" + (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) + (function + | Accepting_request p2p_peer_id -> Some p2p_peer_id | _ -> None) + (fun p2p_peer_id -> Accepting_request p2p_peer_id); + case + (Tag 2) + ~title:"Rejecting_request" + (branch_encoding + "rejecting_request" + (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) + (function + | Rejecting_request p2p_peer_id -> Some p2p_peer_id | _ -> None) + (fun p2p_peer_id -> Rejecting_request p2p_peer_id); + case + (Tag 3) + ~title:"Rejecting_rejected" + (branch_encoding + "request_rejected" + (obj1 (opt "p2p_peer_id" P2p_peer_id.encoding))) + (function + | Request_rejected p2p_peer_id -> Some p2p_peer_id | _ -> None) + (fun p2p_peer_id -> Request_rejected p2p_peer_id); + case + (Tag 4) + ~title:"Connection_established" + (branch_encoding + "rejecting_request" + (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) + (function + | Connection_established p2p_peer_id -> + Some p2p_peer_id + | _ -> + None) + (fun p2p_peer_id -> Connection_established p2p_peer_id); + case + (Tag 5) + ~title:"Disconnection" + (branch_encoding + "rejecting_request" + (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) + (function + | Disconnection p2p_peer_id -> Some p2p_peer_id | _ -> None) + (fun p2p_peer_id -> Disconnection p2p_peer_id); + case + (Tag 6) + ~title:"External_disconnection" + (branch_encoding + "rejecting_request" + (obj1 (req "p2p_peer_id" P2p_peer_id.encoding))) + (function + | External_disconnection p2p_peer_id -> + Some p2p_peer_id + | _ -> + None) + (fun p2p_peer_id -> External_disconnection p2p_peer_id) ] type t = kind Time.System.stamped - let encoding = Time.System.stamped_encoding kind_encoding + let encoding = Time.System.stamped_encoding kind_encoding end diff --git a/src/lib_base/p2p_point.mli b/src/lib_base/p2p_point.mli index a0408a0c112e745e050171240fa65f4ffcc68fc7..c328a93f2fd1bdd7ac54e89e997dccec75efb55e 100644 --- a/src/lib_base/p2p_point.mli +++ b/src/lib_base/p2p_point.mli @@ -25,44 +25,48 @@ (*****************************************************************************) module Id : sig - type t = P2p_addr.t * P2p_addr.port + val compare : t -> t -> int - val equal : t -> t -> bool + + val equal : t -> t -> bool val pp : Format.formatter -> t -> unit + val pp_opt : Format.formatter -> t option -> unit + val pp_list : Format.formatter -> t list -> unit val of_string_exn : string -> t + val of_string : string -> (t, string) result + val to_string : t -> string + val encoding : t Data_encoding.t + val is_local : t -> bool + val is_global : t -> bool + val parse_addr_port : string -> string * string val rpc_arg : t RPC_arg.t end module Map : Map.S with type key = Id.t + module Set : Set.S with type elt = Id.t + module Table : Hashtbl.S with type key = Id.t module Filter : sig - - type t = - | Requested - | Accepted - | Running - | Disconnected + type t = Requested | Accepted | Running | Disconnected val rpc_arg : t RPC_arg.t - end module State : sig - type t = | Requested | Accepted of P2p_peer_id.t @@ -70,55 +74,49 @@ module State : sig | Disconnected val pp_digram : Format.formatter -> t -> unit + val encoding : t Data_encoding.t val of_p2p_peer_id : t -> P2p_peer_id.t option + val of_peerid_state : t -> P2p_peer_id.t option -> t val filter : Filter.t list -> t -> bool - end module Info : sig - type t = { - trusted : bool ; - greylisted_until : Time.System.t ; - state : State.t ; - last_failed_connection : Time.System.t option ; - last_rejected_connection : (P2p_peer_id.t * Time.System.t) option ; - last_established_connection : (P2p_peer_id.t * Time.System.t) option ; - last_disconnection : (P2p_peer_id.t * Time.System.t) option ; - last_seen : (P2p_peer_id.t * Time.System.t) option ; - last_miss : Time.System.t option ; + trusted : bool; + greylisted_until : Time.System.t; + state : State.t; + last_failed_connection : Time.System.t option; + last_rejected_connection : (P2p_peer_id.t * Time.System.t) option; + last_established_connection : (P2p_peer_id.t * Time.System.t) option; + last_disconnection : (P2p_peer_id.t * Time.System.t) option; + last_seen : (P2p_peer_id.t * Time.System.t) option; + last_miss : Time.System.t option } - val encoding: t Data_encoding.t - + val encoding : t Data_encoding.t end module Pool_event : sig - type kind = - | Outgoing_request - (** We initiated a connection. *) + | Outgoing_request (** We initiated a connection. *) | Accepting_request of P2p_peer_id.t - (** We accepted a connection after authentifying the remote peer. *) + (** We accepted a connection after authentifying the remote peer. *) | Rejecting_request of P2p_peer_id.t - (** We rejected a connection after authentifying the remote peer. *) + (** We rejected a connection after authentifying the remote peer. *) | Request_rejected of P2p_peer_id.t option - (** The remote peer rejected our connection. *) + (** The remote peer rejected our connection. *) | Connection_established of P2p_peer_id.t - (** We successfully established a authentified connection. *) + (** We successfully established a authentified connection. *) | Disconnection of P2p_peer_id.t - (** We decided to close the connection. *) + (** We decided to close the connection. *) | External_disconnection of P2p_peer_id.t - (** The connection was closed for external reason. *) + (** The connection was closed for external reason. *) type t = kind Time.System.stamped val encoding : t Data_encoding.t - end - - diff --git a/src/lib_base/p2p_stat.ml b/src/lib_base/p2p_stat.ml index cd0c327ce4b7b9ce33371fd53edec091adfd8bb3..fea3fe60ea2a4f151bf5cd1bd5c0ace4ee5a923b 100644 --- a/src/lib_base/p2p_stat.ml +++ b/src/lib_base/p2p_stat.ml @@ -24,55 +24,50 @@ (*****************************************************************************) type t = { - total_sent : int64 ; - total_recv : int64 ; - current_inflow : int ; - current_outflow : int ; + total_sent : int64; + total_recv : int64; + current_inflow : int; + current_outflow : int } -let empty = { - total_sent = 0L ; - total_recv = 0L ; - current_inflow = 0 ; - current_outflow = 0 ; -} +let empty = + {total_sent = 0L; total_recv = 0L; current_inflow = 0; current_outflow = 0} let print_size ppf sz = - let ratio n = (float_of_int sz /. float_of_int (1 lsl n)) in - if sz < 1 lsl 10 then - Format.fprintf ppf "%d B" sz - else if sz < 1 lsl 20 then - Format.fprintf ppf "%.2f kiB" (ratio 10) - else - Format.fprintf ppf "%.2f MiB" (ratio 20) + let ratio n = float_of_int sz /. float_of_int (1 lsl n) in + if sz < 1 lsl 10 then Format.fprintf ppf "%d B" sz + else if sz < 1 lsl 20 then Format.fprintf ppf "%.2f kiB" (ratio 10) + else Format.fprintf ppf "%.2f MiB" (ratio 20) let print_size64 ppf sz = let open Int64 in - let ratio n = (to_float sz /. float_of_int (1 lsl n)) in - if sz < shift_left 1L 10 then - Format.fprintf ppf "%Ld B" sz - else if sz < shift_left 1L 20 then - Format.fprintf ppf "%.2f kiB" (ratio 10) - else if sz < shift_left 1L 30 then - Format.fprintf ppf "%.2f MiB" (ratio 20) - else if sz < shift_left 1L 40 then - Format.fprintf ppf "%.2f GiB" (ratio 30) - else - Format.fprintf ppf "%.2f TiB" (ratio 40) + let ratio n = to_float sz /. float_of_int (1 lsl n) in + if sz < shift_left 1L 10 then Format.fprintf ppf "%Ld B" sz + else if sz < shift_left 1L 20 then Format.fprintf ppf "%.2f kiB" (ratio 10) + else if sz < shift_left 1L 30 then Format.fprintf ppf "%.2f MiB" (ratio 20) + else if sz < shift_left 1L 40 then Format.fprintf ppf "%.2f GiB" (ratio 30) + else Format.fprintf ppf "%.2f TiB" (ratio 40) let pp ppf stat = - Format.fprintf ppf + Format.fprintf + ppf "↗ %a (%a/s) ↘ %a (%a/s)" - print_size64 stat.total_sent print_size stat.current_outflow - print_size64 stat.total_recv print_size stat.current_inflow + print_size64 + stat.total_sent + print_size + stat.current_outflow + print_size64 + stat.total_recv + print_size + stat.current_inflow let encoding = let open Data_encoding in conv - (fun { total_sent ; total_recv ; current_inflow ; current_outflow } -> - (total_sent, total_recv, current_inflow, current_outflow)) + (fun {total_sent; total_recv; current_inflow; current_outflow} -> + (total_sent, total_recv, current_inflow, current_outflow)) (fun (total_sent, total_recv, current_inflow, current_outflow) -> - { total_sent ; total_recv ; current_inflow ; current_outflow }) + {total_sent; total_recv; current_inflow; current_outflow}) (obj4 (req "total_sent" int64) (req "total_recv" int64) diff --git a/src/lib_base/p2p_stat.mli b/src/lib_base/p2p_stat.mli index 616464f3977c9ee1c45aa82e74480bb40609a0d2..28301c306f7ffa8e71803ea616400b238e6b5eca 100644 --- a/src/lib_base/p2p_stat.mli +++ b/src/lib_base/p2p_stat.mli @@ -26,12 +26,14 @@ (** Bandwidth usage statistics *) type t = { - total_sent : int64 ; - total_recv : int64 ; - current_inflow : int ; - current_outflow : int ; + total_sent : int64; + total_recv : int64; + current_inflow : int; + current_outflow : int } val empty : t + val pp : Format.formatter -> t -> unit + val encoding : t Data_encoding.t diff --git a/src/lib_base/p2p_version.ml b/src/lib_base/p2p_version.ml index 469b25236baf8e5ebcd50eb569e4abd4e9120162..efea7adca083b749bab6a3fb7a989172d404edbe 100644 --- a/src/lib_base/p2p_version.ml +++ b/src/lib_base/p2p_version.ml @@ -27,8 +27,9 @@ type t = int let pp = Format.pp_print_int + let encoding = Data_encoding.uint16 let zero = 0 -let supported = [ zero ] +let supported = [zero] diff --git a/src/lib_base/p2p_version.mli b/src/lib_base/p2p_version.mli index 614a6dbd68601713c063a74b626ad6658b53b5de..629d6b48e9f4f0be3104170c03497ddd05a6a469 100644 --- a/src/lib_base/p2p_version.mli +++ b/src/lib_base/p2p_version.mli @@ -29,9 +29,10 @@ (** An abstract version number for the low-level p2p layer. *) type t = private int -val pp: Format.formatter -> t -> unit -val encoding: t Data_encoding.t +val pp : Format.formatter -> t -> unit -val supported: t list +val encoding : t Data_encoding.t -val zero: t +val supported : t list + +val zero : t diff --git a/src/lib_base/preapply_result.ml b/src/lib_base/preapply_result.ml index 839f891e7ed1e97fef088f13a54dd6e83157b411..807accee311df3e6b02b244df0694ae221b8fa7b 100644 --- a/src/lib_base/preapply_result.ml +++ b/src/lib_base/preapply_result.ml @@ -24,52 +24,56 @@ (*****************************************************************************) type 'error t = { - applied: (Operation_hash.t * Operation.t) list; - refused: (Operation.t * 'error list) Operation_hash.Map.t; - branch_refused: (Operation.t * 'error list) Operation_hash.Map.t; - branch_delayed: (Operation.t * 'error list) Operation_hash.Map.t; + applied : (Operation_hash.t * Operation.t) list; + refused : (Operation.t * 'error list) Operation_hash.Map.t; + branch_refused : (Operation.t * 'error list) Operation_hash.Map.t; + branch_delayed : (Operation.t * 'error list) Operation_hash.Map.t } -let empty = { - applied = [] ; - refused = Operation_hash.Map.empty ; - branch_refused = Operation_hash.Map.empty ; - branch_delayed = Operation_hash.Map.empty ; -} +let empty = + { applied = []; + refused = Operation_hash.Map.empty; + branch_refused = Operation_hash.Map.empty; + branch_delayed = Operation_hash.Map.empty } -let map f r = { - applied = r.applied; - refused = Operation_hash.Map.map f r.refused ; - branch_refused = Operation_hash.Map.map f r.branch_refused ; - branch_delayed = Operation_hash.Map.map f r.branch_delayed ; -} +let map f r = + { applied = r.applied; + refused = Operation_hash.Map.map f r.refused; + branch_refused = Operation_hash.Map.map f r.branch_refused; + branch_delayed = Operation_hash.Map.map f r.branch_delayed } let encoding error_encoding = let open Data_encoding in let operation_encoding = merge_objs (obj1 (req "hash" Operation_hash.encoding)) - (dynamic_size Operation.encoding) in + (dynamic_size Operation.encoding) + in let refused_encoding = merge_objs (obj1 (req "hash" Operation_hash.encoding)) (merge_objs (dynamic_size Operation.encoding) - (obj1 (req "error" error_encoding))) in + (obj1 (req "error" error_encoding))) + in let build_list map = Operation_hash.Map.bindings map in let build_map list = List.fold_right (fun (k, e) m -> Operation_hash.Map.add k e m) - list Operation_hash.Map.empty in + list + Operation_hash.Map.empty + in conv - (fun { applied ; refused ; branch_refused ; branch_delayed } -> - (applied, build_list refused, - build_list branch_refused, build_list branch_delayed)) + (fun {applied; refused; branch_refused; branch_delayed} -> + ( applied, + build_list refused, + build_list branch_refused, + build_list branch_delayed )) (fun (applied, refused, branch_refused, branch_delayed) -> - let refused = build_map refused in - let branch_refused = build_map branch_refused in - let branch_delayed = build_map branch_delayed in - { applied ; refused ; branch_refused ; branch_delayed }) + let refused = build_map refused in + let branch_refused = build_map branch_refused in + let branch_delayed = build_map branch_delayed in + {applied; refused; branch_refused; branch_delayed}) (obj4 (req "applied" (list operation_encoding)) (req "refused" (list refused_encoding)) @@ -80,13 +84,19 @@ let operations t = let ops = List.fold_left (fun acc (h, op) -> Operation_hash.Map.add h op acc) - Operation_hash.Map.empty t.applied in + Operation_hash.Map.empty + t.applied + in let ops = Operation_hash.Map.fold (fun h (op, _err) acc -> Operation_hash.Map.add h op acc) - t.branch_delayed ops in + t.branch_delayed + ops + in let ops = Operation_hash.Map.fold (fun h (op, _err) acc -> Operation_hash.Map.add h op acc) - t.branch_refused ops in + t.branch_refused + ops + in ops diff --git a/src/lib_base/preapply_result.mli b/src/lib_base/preapply_result.mli index 9d1629ce9e98ec2e791307fac6d12a0b99b2ece7..9c50a2cf3af8c7c682cb86fc96c026870d2d5fec 100644 --- a/src/lib_base/preapply_result.mli +++ b/src/lib_base/preapply_result.mli @@ -24,23 +24,19 @@ (*****************************************************************************) type 'error t = { - applied: (Operation_hash.t * Operation.t) list; - refused: (Operation.t * 'error list) Operation_hash.Map.t; + applied : (Operation_hash.t * Operation.t) list; + refused : (Operation.t * 'error list) Operation_hash.Map.t; (* e.g. invalid signature *) - branch_refused: (Operation.t * 'error list) Operation_hash.Map.t; + branch_refused : (Operation.t * 'error list) Operation_hash.Map.t; (* e.g. insufficent balance *) - branch_delayed: (Operation.t * 'error list) Operation_hash.Map.t; - (* e.g. timestamp in the future *) + branch_delayed : (Operation.t * 'error list) Operation_hash.Map.t + (* e.g. timestamp in the future *) } val empty : 'error t -val map : - (Operation.t * 'a list -> Operation.t * 'b list) -> 'a t -> 'b t +val map : (Operation.t * 'a list -> Operation.t * 'b list) -> 'a t -> 'b t -val operations : - 'error t -> Operation.t Operation_hash.Map.t +val operations : 'error t -> Operation.t Operation_hash.Map.t -val encoding : - 'error list Data_encoding.t -> - 'error t Data_encoding.t +val encoding : 'error list Data_encoding.t -> 'error t Data_encoding.t diff --git a/src/lib_base/protocol.ml b/src/lib_base/protocol.ml index 5eaebb5063421b13c4f11b8982397ec41e31ef58..eb2ae670893221dd499d1235ae6f061d625f2439 100644 --- a/src/lib_base/protocol.ml +++ b/src/lib_base/protocol.ml @@ -23,31 +23,29 @@ (* *) (*****************************************************************************) -type t = { - expected_env: env_version ; - components: component list ; -} +type t = {expected_env : env_version; components : component list} and component = { - name: string ; - interface: string option ; - implementation: string ; + name : string; + interface : string option; + implementation : string } and env_version = V1 -include Compare.Make(struct - type nonrec t = t - let compare = Pervasives.compare - end) +include Compare.Make (struct + type nonrec t = t + + let compare = Pervasives.compare +end) let component_encoding = let open Data_encoding in conv - (fun { name ; interface; implementation } -> - (name, interface, implementation)) + (fun {name; interface; implementation} -> + (name, interface, implementation)) (fun (name, interface, implementation) -> - { name ; interface ; implementation }) + {name; interface; implementation}) (obj3 (req "name" string) (opt "interface" (conv MBytes.of_string MBytes.to_string bytes)) @@ -63,36 +61,37 @@ let env_version_encoding = let encoding = let open Data_encoding in conv - (fun { expected_env ; components } -> (expected_env, components)) - (fun (expected_env, components) -> { expected_env ; components }) + (fun {expected_env; components} -> (expected_env, components)) + (fun (expected_env, components) -> {expected_env; components}) (obj2 (req "expected_env_version" env_version_encoding) (req "components" (list component_encoding))) let bounded_encoding ?max_size () = match max_size with - | None -> encoding - | Some max_size -> Data_encoding.check_size max_size encoding + | None -> + encoding + | Some max_size -> + Data_encoding.check_size max_size encoding let pp ppf op = - Data_encoding.Json.pp ppf - (Data_encoding.Json.construct encoding op) + Data_encoding.Json.pp ppf (Data_encoding.Json.construct encoding op) -let env_version_to_string = function - | V1 -> "V1" +let env_version_to_string = function V1 -> "V1" -let pp_ocaml_component ppf { name ; interface ; implementation } = - Format.fprintf ppf +let pp_ocaml_component ppf {name; interface; implementation} = + Format.fprintf + ppf "@[{@[<v 1> name = %S ;@ interface = %a ;@ implementation = %S ;@]@ }@]" name - (fun ppf -> function - | None -> Format.fprintf ppf "None" - | Some s -> Format.fprintf ppf "Some %S" s) + (fun ppf -> function None -> Format.fprintf ppf "None" | Some s -> + Format.fprintf ppf "Some %S" s) interface implementation -let pp_ocaml ppf { expected_env ; components } = - Format.fprintf ppf +let pp_ocaml ppf {expected_env; components} = + Format.fprintf + ppf "@[{@[<v 1> expected_env = %s ;@ components = [@[<v>%a@]] ;@]@ }@]" (env_version_to_string expected_env) (Format.pp_print_list @@ -101,34 +100,37 @@ let pp_ocaml ppf { expected_env ; components } = components let to_bytes v = Data_encoding.Binary.to_bytes_exn encoding v + let of_bytes b = Data_encoding.Binary.of_bytes encoding b + let of_bytes_exn b = Data_encoding.Binary.of_bytes_exn encoding b + let hash proto = Protocol_hash.hash_bytes [to_bytes proto] + let hash_raw proto = Protocol_hash.hash_bytes [proto] module Meta = struct - type t = { - hash: Protocol_hash.t option ; - expected_env_version: env_version option ; - modules: string list ; + hash : Protocol_hash.t option; + expected_env_version : env_version option; + modules : string list } let encoding = let open Data_encoding in conv - (fun { hash ; expected_env_version ; modules } -> - (hash, expected_env_version, modules)) + (fun {hash; expected_env_version; modules} -> + (hash, expected_env_version, modules)) (fun (hash, expected_env_version, modules) -> - { hash ; expected_env_version ; modules }) @@ - obj3 - (opt "hash" - ~description:"Used to force the hash of the protocol" - Protocol_hash.encoding) - (opt "expected_env_version" - env_version_encoding) - (req "modules" - ~description:"Modules comprising the protocol" - (list string)) - + {hash; expected_env_version; modules}) + @@ obj3 + (opt + "hash" + ~description:"Used to force the hash of the protocol" + Protocol_hash.encoding) + (opt "expected_env_version" env_version_encoding) + (req + "modules" + ~description:"Modules comprising the protocol" + (list string)) end diff --git a/src/lib_base/protocol.mli b/src/lib_base/protocol.mli index 4e2b5a6e5eaeb0b7cd8d3090acd7da37323a8091..59e72b58622901734a0846eed71edb681a78f44c 100644 --- a/src/lib_base/protocol.mli +++ b/src/lib_base/protocol.mli @@ -23,38 +23,34 @@ (* *) (*****************************************************************************) -type t = { - expected_env: env_version ; - components: component list ; -} +type t = {expected_env : env_version; components : component list} and component = { - name: string ; - interface: string option ; - implementation: string ; + name : string; + interface : string option; + implementation : string } and env_version = V1 -val component_encoding: component Data_encoding.t -val env_version_encoding: env_version Data_encoding.t +val component_encoding : component Data_encoding.t + +val env_version_encoding : env_version Data_encoding.t -val pp_ocaml: Format.formatter -> t -> unit +val pp_ocaml : Format.formatter -> t -> unit -include S.HASHABLE with type t := t - and type hash := Protocol_hash.t -val of_bytes_exn: MBytes.t -> t +include S.HASHABLE with type t := t and type hash := Protocol_hash.t -val bounded_encoding: ?max_size:int -> unit -> t Data_encoding.t +val of_bytes_exn : MBytes.t -> t -module Meta: sig +val bounded_encoding : ?max_size:int -> unit -> t Data_encoding.t +module Meta : sig type t = { - hash: Protocol_hash.t option ; - expected_env_version: env_version option ; - modules: string list ; + hash : Protocol_hash.t option; + expected_env_version : env_version option; + modules : string list } - val encoding: t Data_encoding.t - + val encoding : t Data_encoding.t end diff --git a/src/lib_base/s.ml b/src/lib_base/s.ml index 01b358067a87eb44588175bb885e57925319e1e6..1b122a42167ef1740a0430f1f7afe384a790784b 100644 --- a/src/lib_base/s.ml +++ b/src/lib_base/s.ml @@ -24,92 +24,152 @@ (*****************************************************************************) module type T = sig - type t + include Compare.S with type t := t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit - val encoding: t Data_encoding.t - val to_bytes: t -> MBytes.t - val of_bytes: MBytes.t -> t option + val encoding : t Data_encoding.t + val to_bytes : t -> MBytes.t + + val of_bytes : MBytes.t -> t option end module type HASHABLE = sig - include T type hash - val hash: t -> hash - val hash_raw: MBytes.t -> hash + val hash : t -> hash + + val hash_raw : MBytes.t -> hash end module type SET = sig type elt + type t - val empty: t - val is_empty: t -> bool - val mem: elt -> t -> bool - val add: elt -> t -> t - val singleton: elt -> t - val remove: elt -> t -> t - val union: t -> t -> t - val inter: t -> t -> t - val diff: t -> t -> t - val compare: t -> t -> int - val equal: t -> t -> bool - val subset: t -> t -> bool - val iter: (elt -> unit) -> t -> unit - val map: (elt -> elt) -> t -> t - val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a - val for_all: (elt -> bool) -> t -> bool - val exists: (elt -> bool) -> t -> bool - val filter: (elt -> bool) -> t -> t - val partition: (elt -> bool) -> t -> t * t - val cardinal: t -> int - val elements: t -> elt list - val min_elt_opt: t -> elt option - val max_elt_opt: t -> elt option - val choose_opt: t -> elt option - val split: elt -> t -> t * bool * t - val find_opt: elt -> t -> elt option - val find_first_opt: (elt -> bool) -> t -> elt option - val find_last_opt: (elt -> bool) -> t -> elt option - val of_list: elt list -> t + + val empty : t + + val is_empty : t -> bool + + val mem : elt -> t -> bool + + val add : elt -> t -> t + + val singleton : elt -> t + + val remove : elt -> t -> t + + val union : t -> t -> t + + val inter : t -> t -> t + + val diff : t -> t -> t + + val compare : t -> t -> int + + val equal : t -> t -> bool + + val subset : t -> t -> bool + + val iter : (elt -> unit) -> t -> unit + + val map : (elt -> elt) -> t -> t + + val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a + + val for_all : (elt -> bool) -> t -> bool + + val exists : (elt -> bool) -> t -> bool + + val filter : (elt -> bool) -> t -> t + + val partition : (elt -> bool) -> t -> t * t + + val cardinal : t -> int + + val elements : t -> elt list + + val min_elt_opt : t -> elt option + + val max_elt_opt : t -> elt option + + val choose_opt : t -> elt option + + val split : elt -> t -> t * bool * t + + val find_opt : elt -> t -> elt option + + val find_first_opt : (elt -> bool) -> t -> elt option + + val find_last_opt : (elt -> bool) -> t -> elt option + + val of_list : elt list -> t end module type MAP = sig type key - type (+'a) t - val empty: 'a t - val is_empty: 'a t -> bool - val mem: key -> 'a t -> bool - val add: key -> 'a -> 'a t -> 'a t - val update: key -> ('a option -> 'a option) -> 'a t -> 'a t - val singleton: key -> 'a -> 'a t - val remove: key -> 'a t -> 'a t - val merge: + + type +'a t + + val empty : 'a t + + val is_empty : 'a t -> bool + + val mem : key -> 'a t -> bool + + val add : key -> 'a -> 'a t -> 'a t + + val update : key -> ('a option -> 'a option) -> 'a t -> 'a t + + val singleton : key -> 'a -> 'a t + + val remove : key -> 'a t -> 'a t + + val merge : (key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t - val union: (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t - val compare: ('a -> 'a -> int) -> 'a t -> 'a t -> int - val equal: ('a -> 'a -> bool) -> 'a t -> 'a t -> bool - val iter: (key -> 'a -> unit) -> 'a t -> unit - val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b - val for_all: (key -> 'a -> bool) -> 'a t -> bool - val exists: (key -> 'a -> bool) -> 'a t -> bool - val filter: (key -> 'a -> bool) -> 'a t -> 'a t - val partition: (key -> 'a -> bool) -> 'a t -> 'a t * 'a t - val cardinal: 'a t -> int - val bindings: 'a t -> (key * 'a) list - val min_binding_opt: 'a t -> (key * 'a) option - val max_binding_opt: 'a t -> (key * 'a) option - val choose_opt: 'a t -> (key * 'a) option - val split: key -> 'a t -> 'a t * 'a option * 'a t - val find_opt: key -> 'a t -> 'a option - val find_first_opt: (key -> bool) -> 'a t -> (key * 'a) option - val find_last_opt: (key -> bool) -> 'a t -> (key * 'a) option - val map: ('a -> 'b) -> 'a t -> 'b t - val mapi: (key -> 'a -> 'b) -> 'a t -> 'b t + + val union : (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t + + val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int + + val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + + val iter : (key -> 'a -> unit) -> 'a t -> unit + + val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + + val for_all : (key -> 'a -> bool) -> 'a t -> bool + + val exists : (key -> 'a -> bool) -> 'a t -> bool + + val filter : (key -> 'a -> bool) -> 'a t -> 'a t + + val partition : (key -> 'a -> bool) -> 'a t -> 'a t * 'a t + + val cardinal : 'a t -> int + + val bindings : 'a t -> (key * 'a) list + + val min_binding_opt : 'a t -> (key * 'a) option + + val max_binding_opt : 'a t -> (key * 'a) option + + val choose_opt : 'a t -> (key * 'a) option + + val split : key -> 'a t -> 'a t * 'a option * 'a t + + val find_opt : key -> 'a t -> 'a option + + val find_first_opt : (key -> bool) -> 'a t -> (key * 'a) option + + val find_last_opt : (key -> bool) -> 'a t -> (key * 'a) option + + val map : ('a -> 'b) -> 'a t -> 'b t + + val mapi : (key -> 'a -> 'b) -> 'a t -> 'b t end diff --git a/src/lib_base/test_chain_status.ml b/src/lib_base/test_chain_status.ml index a404abd06a3445318746ed13998ada7833d4e3b0..78a183b107fe8a43e08ad047313efd1916a4f73d 100644 --- a/src/lib_base/test_chain_status.ml +++ b/src/lib_base/test_chain_status.ml @@ -25,66 +25,73 @@ type t = | Not_running - | Forking of { - protocol: Protocol_hash.t ; - expiration: Time.Protocol.t ; - } - | Running of { - chain_id: Chain_id.t ; - genesis: Block_hash.t ; - protocol: Protocol_hash.t ; - expiration: Time.Protocol.t ; - } + | Forking of {protocol : Protocol_hash.t; expiration : Time.Protocol.t} + | Running of + { chain_id : Chain_id.t; + genesis : Block_hash.t; + protocol : Protocol_hash.t; + expiration : Time.Protocol.t } let encoding = let open Data_encoding in - def "test_chain_status" @@ - union [ - case (Tag 0) ~title:"Not_running" - (obj1 (req "status" (constant "not_running"))) - (function Not_running -> Some () | _ -> None) - (fun () -> Not_running) ; - case (Tag 1) ~title:"Forking" - (obj3 - (req "status" (constant "forking")) - (req "protocol" Protocol_hash.encoding) - (req "expiration" Time.Protocol.encoding)) - (function - | Forking { protocol ; expiration } -> - Some ((), protocol, expiration) - | _ -> None) - (fun ((), protocol, expiration) -> - Forking { protocol ; expiration }) ; - case (Tag 2) ~title:"Running" - (obj5 - (req "status" (constant "running")) - (req "chain_id" Chain_id.encoding) - (req "genesis" Block_hash.encoding) - (req "protocol" Protocol_hash.encoding) - (req "expiration" Time.Protocol.encoding)) - (function - | Running { chain_id ; genesis ; protocol ; expiration } -> - Some ((), chain_id, genesis, protocol, expiration) - | _ -> None) - (fun ((), chain_id, genesis, protocol, expiration) -> - Running { chain_id ; genesis ; protocol ; expiration }) ; - ] + def "test_chain_status" + @@ union + [ case + (Tag 0) + ~title:"Not_running" + (obj1 (req "status" (constant "not_running"))) + (function Not_running -> Some () | _ -> None) + (fun () -> Not_running); + case + (Tag 1) + ~title:"Forking" + (obj3 + (req "status" (constant "forking")) + (req "protocol" Protocol_hash.encoding) + (req "expiration" Time.Protocol.encoding)) + (function + | Forking {protocol; expiration} -> + Some ((), protocol, expiration) + | _ -> + None) + (fun ((), protocol, expiration) -> Forking {protocol; expiration}); + case + (Tag 2) + ~title:"Running" + (obj5 + (req "status" (constant "running")) + (req "chain_id" Chain_id.encoding) + (req "genesis" Block_hash.encoding) + (req "protocol" Protocol_hash.encoding) + (req "expiration" Time.Protocol.encoding)) + (function + | Running {chain_id; genesis; protocol; expiration} -> + Some ((), chain_id, genesis, protocol, expiration) + | _ -> + None) + (fun ((), chain_id, genesis, protocol, expiration) -> + Running {chain_id; genesis; protocol; expiration}) ] let pp ppf = function - | Not_running -> Format.fprintf ppf "@[<v 2>Not running@]" - | Forking { protocol ; expiration } -> - Format.fprintf ppf + | Not_running -> + Format.fprintf ppf "@[<v 2>Not running@]" + | Forking {protocol; expiration} -> + Format.fprintf + ppf "@[<v 2>Forking %a (expires %a)@]" Protocol_hash.pp protocol - Time.System.pp_hum (Time.System.of_protocol_exn expiration) - | Running { chain_id ; genesis ; protocol ; expiration } -> - Format.fprintf ppf - "@[<v 2>Running %a\ - @ Genesis: %a\ - @ Net id: %a\ - @ Expiration: %a@]" - Protocol_hash.pp protocol - Block_hash.pp genesis - Chain_id.pp chain_id - Time.System.pp_hum (Time.System.of_protocol_exn expiration) + Time.System.pp_hum + (Time.System.of_protocol_exn expiration) + | Running {chain_id; genesis; protocol; expiration} -> + Format.fprintf + ppf + "@[<v 2>Running %a@ Genesis: %a@ Net id: %a@ Expiration: %a@]" + Protocol_hash.pp + protocol + Block_hash.pp + genesis + Chain_id.pp + chain_id + Time.System.pp_hum + (Time.System.of_protocol_exn expiration) diff --git a/src/lib_base/test_chain_status.mli b/src/lib_base/test_chain_status.mli index bea0f4e84a959fa1997bb15769035a1d4a72b855..98ed8ee50b23884bee0c8fa97703aa74d9896065 100644 --- a/src/lib_base/test_chain_status.mli +++ b/src/lib_base/test_chain_status.mli @@ -25,17 +25,13 @@ type t = | Not_running - | Forking of { - protocol: Protocol_hash.t ; - expiration: Time.Protocol.t ; - } - | Running of { - chain_id: Chain_id.t ; - genesis: Block_hash.t ; - protocol: Protocol_hash.t ; - expiration: Time.Protocol.t ; - } + | Forking of {protocol : Protocol_hash.t; expiration : Time.Protocol.t} + | Running of + { chain_id : Chain_id.t; + genesis : Block_hash.t; + protocol : Protocol_hash.t; + expiration : Time.Protocol.t } -val encoding: t Data_encoding.t +val encoding : t Data_encoding.t val pp : Format.formatter -> t -> unit diff --git a/src/lib_base/time.ml b/src/lib_base/time.ml index bfcc04cca00e649929cd21540c8ae9038810120a..084a6940894c1f9ead3cec479f6c17c9f495f197 100644 --- a/src/lib_base/time.ml +++ b/src/lib_base/time.ml @@ -24,235 +24,274 @@ (*****************************************************************************) module Protocol = struct - type t = int64 + let epoch = 0L let diff = Int64.sub + let add = Int64.add let of_ptime t = - let (days,ps) = (Ptime.Span.to_d_ps (Ptime.to_span t)) in + let (days, ps) = Ptime.Span.to_d_ps (Ptime.to_span t) in let s_days = Int64.mul (Int64.of_int days) 86_400L in Int64.add s_days (Int64.div ps 1_000_000_000_000L) + let to_ptime t = let days = Int64.to_int (Int64.div t 86_400L) in let ps = Int64.mul (Int64.rem t 86_400L) 1_000_000_000_000L in - match Option.apply ~f:Ptime.of_span (Ptime.Span.of_d_ps (days,ps)) with - | None -> invalid_arg ("Time.Protocol.to_ptime") - | Some ptime -> ptime + match Option.apply ~f:Ptime.of_span (Ptime.Span.of_d_ps (days, ps)) with + | None -> + invalid_arg "Time.Protocol.to_ptime" + | Some ptime -> + ptime let of_notation s = match Ptime.of_rfc3339 s with - | Ok (t, _, _) -> Some (of_ptime t) - | Error _ -> None + | Ok (t, _, _) -> + Some (of_ptime t) + | Error _ -> + None + let of_notation_exn s = match Ptime.(rfc3339_error_to_msg (of_rfc3339 s)) with - | Error (`Msg msg) -> invalid_arg ("Time.Protocol.of_notation: " ^ msg) - | Ok (t, _, _) -> of_ptime t - let to_notation t = - Ptime.to_rfc3339 ~frac_s:0 ~tz_offset_s:0 (to_ptime t) + | Error (`Msg msg) -> + invalid_arg ("Time.Protocol.of_notation: " ^ msg) + | Ok (t, _, _) -> + of_ptime t + + let to_notation t = Ptime.to_rfc3339 ~frac_s:0 ~tz_offset_s:0 (to_ptime t) let of_seconds x = x + let to_seconds x = x let rfc_encoding = let open Data_encoding in def "timestamp.rfc" - ~title: "RFC 3339 formatted timestamp" - ~description: "A date in RFC 3339 notation." @@ - conv - to_notation - (fun s -> match of_notation s with - | Some s -> s - | None -> Data_encoding.Json.cannot_destruct "Time.Protocol.of_notation") - string + ~title:"RFC 3339 formatted timestamp" + ~description:"A date in RFC 3339 notation." + @@ conv + to_notation + (fun s -> + match of_notation s with + | Some s -> + s + | None -> + Data_encoding.Json.cannot_destruct "Time.Protocol.of_notation") + string let encoding = let open Data_encoding in - def "timestamp" @@ - splitted - ~binary: int64 - ~json: - (union [ - case Json_only - ~title:"RFC encoding" - rfc_encoding - (fun i -> Some i) - (fun i -> i) ; - case Json_only - ~title:"Second since epoch" - int64 - (fun _ -> None) - (fun i -> i) ; - ]) - + def "timestamp" + @@ splitted + ~binary:int64 + ~json: + (union + [ case + Json_only + ~title:"RFC encoding" + rfc_encoding + (fun i -> Some i) + (fun i -> i); + case + Json_only + ~title:"Second since epoch" + int64 + (fun _ -> None) + (fun i -> i) ]) let rpc_arg = RPC_arg.make ~name:(Format.asprintf "date") ~descr:(Format.asprintf "A date in seconds from epoch") - ~destruct: - (fun s -> - if s = "none" || s = "epoch" then - Ok epoch - else - match Int64.of_string s with - | t -> Ok t - | exception _ -> - Error (Format.asprintf "failed to parse time (epoch): %S" s)) + ~destruct:(fun s -> + if s = "none" || s = "epoch" then Ok epoch + else + match Int64.of_string s with + | t -> + Ok t + | exception _ -> + Error (Format.asprintf "failed to parse time (epoch): %S" s)) ~construct:Int64.to_string () let pp_hum ppf t = Ptime.pp_rfc3339 () ppf (to_ptime t) - include Compare.Make(Int64) - + include Compare.Make (Int64) end module System = struct - type t = Ptime.t + let epoch = Ptime.epoch module Span = struct type t = Ptime.Span.t + let multiply_exn f s = let open Ptime.Span in Option.unopt_exn (Failure "Time.System.Span.multiply_exn") (of_float_s (f *. Ptime.Span.to_float_s s)) + let of_seconds_exn f = match Ptime.Span.of_float_s f with - | None -> invalid_arg "Time.System.Span.of_seconds_exn" - | Some s -> s + | None -> + invalid_arg "Time.System.Span.of_seconds_exn" + | Some s -> + s + let encoding = let open Data_encoding in conv Ptime.Span.to_float_s - (fun f -> match Ptime.Span.of_float_s f with - | None -> invalid_arg "Time.System.Span.encoding" - | Some s -> s) + (fun f -> + match Ptime.Span.of_float_s f with + | None -> + invalid_arg "Time.System.Span.encoding" + | Some s -> + s) float + let rpc_arg = RPC_arg.make ~name:(Format.asprintf "timespan") ~descr:(Format.asprintf "A span of time in seconds") - ~destruct: - (fun s -> - match Ptime.Span.of_float_s (float_of_string s) with - | Some t -> Ok t - | None -> Error (Format.asprintf "failed to parse timespan: %S" s) - | exception _ -> Error (Format.asprintf "failed to parse timespan: %S" s)) + ~destruct:(fun s -> + match Ptime.Span.of_float_s (float_of_string s) with + | Some t -> + Ok t + | None -> + Error (Format.asprintf "failed to parse timespan: %S" s) + | exception _ -> + Error (Format.asprintf "failed to parse timespan: %S" s)) ~construct:(fun s -> string_of_float (Ptime.Span.to_float_s s)) () - end let of_seconds_opt x = let days = Int64.to_int (Int64.div x 86_400L) in let ps = Int64.mul (Int64.rem x 86_400L) 1_000_000_000_000L in - Option.apply ~f:Ptime.of_span (Ptime.Span.of_d_ps (days,ps)) - let of_seconds_exn x = match of_seconds_opt x with - | Some t -> t - | None -> invalid_arg "Time.of_seconds" + Option.apply ~f:Ptime.of_span (Ptime.Span.of_d_ps (days, ps)) + + let of_seconds_exn x = + match of_seconds_opt x with + | Some t -> + t + | None -> + invalid_arg "Time.of_seconds" + let to_seconds x = - let (days,ps) = Ptime.(Span.to_d_ps (to_span x)) in + let (days, ps) = Ptime.(Span.to_d_ps (to_span x)) in let s_days = Int64.mul (Int64.of_int days) 86_400L in Int64.add s_days (Int64.div ps 1_000_000_000_000L) let of_protocol_exn = of_seconds_exn + let of_protocol_opt = of_seconds_opt + let to_protocol = to_seconds let of_notation_opt s = - match Ptime.of_rfc3339 s with - | Ok (t, _, _) -> Some t - | Error _ -> None + match Ptime.of_rfc3339 s with Ok (t, _, _) -> Some t | Error _ -> None + let of_notation_exn s = match Ptime.(rfc3339_error_to_msg (of_rfc3339 s)) with - | Ok (t, _, _) -> t - | Error (`Msg msg) -> invalid_arg ("Time.of_notation: " ^ msg) + | Ok (t, _, _) -> + t + | Error (`Msg msg) -> + invalid_arg ("Time.of_notation: " ^ msg) + let to_notation t = Ptime.to_rfc3339 t let rfc_encoding = let open Data_encoding in def "timestamp.rfc" - ~title: "RFC 3339 formatted timestamp" - ~description: "A date in RFC 3339 notation." @@ - conv - to_notation - (fun s -> match of_notation_opt s with - | Some s -> s - | None -> Data_encoding.Json.cannot_destruct "Time.of_notation") - string + ~title:"RFC 3339 formatted timestamp" + ~description:"A date in RFC 3339 notation." + @@ conv + to_notation + (fun s -> + match of_notation_opt s with + | Some s -> + s + | None -> + Data_encoding.Json.cannot_destruct "Time.of_notation") + string let encoding = let open Data_encoding in let binary = conv to_seconds of_seconds_exn int64 in let json = - union [ - case Json_only - ~title:"RFC encoding" - rfc_encoding - (fun i -> Some i) - (fun i -> i) ; - case Json_only - ~title:"Second since epoch" - int64 - (fun _ -> None) - (fun i -> of_seconds_exn i) ; - ] in - def "timestamp" @@ - splitted ~binary ~json + union + [ case + Json_only + ~title:"RFC encoding" + rfc_encoding + (fun i -> Some i) + (fun i -> i); + case + Json_only + ~title:"Second since epoch" + int64 + (fun _ -> None) + (fun i -> of_seconds_exn i) ] + in + def "timestamp" @@ splitted ~binary ~json let rpc_arg = RPC_arg.make ~name:(Format.asprintf "date") ~descr:(Format.asprintf "A date in seconds from epoch") - ~destruct: - (fun s -> - if s = "none" || s = "epoch" then - Ok Ptime.epoch - else - match of_notation_opt s with - | Some t -> Ok t - | None -> - match of_seconds_exn (Int64.of_string s) with - | t -> Ok t - | exception _ -> - Error (Format.asprintf "failed to parse time (epoch): %S" s)) + ~destruct:(fun s -> + if s = "none" || s = "epoch" then Ok Ptime.epoch + else + match of_notation_opt s with + | Some t -> + Ok t + | None -> ( + match of_seconds_exn (Int64.of_string s) with + | t -> + Ok t + | exception _ -> + Error (Format.asprintf "failed to parse time (epoch): %S" s) )) ~construct:to_notation () let pp_hum ppf t = Ptime.pp_rfc3339 () ppf t - type 'a stamped = { - data: 'a ; - stamp: Ptime.t ; - } + type 'a stamped = {data : 'a; stamp : Ptime.t} + let stamped_encoding arg_encoding = let open Data_encoding in conv (fun {stamp; data} -> (stamp, data)) (fun (stamp, data) -> {stamp; data}) (tup2 encoding arg_encoding) - let stamp ~time data = - { data ; stamp = time } + + let stamp ~time data = {data; stamp = time} + let recent a1 a2 = - match a1, a2 with - | (None, None) -> None - | (None, (Some _ as a)) - | (Some _ as a, None) -> a + match (a1, a2) with + | (None, None) -> + None + | (None, (Some _ as a)) | ((Some _ as a), None) -> + a | (Some (_, t1), Some (_, t2)) -> if Ptime.compare t1 t2 < 0 then a2 else a1 let hash t = Int64.to_int (to_seconds t) + include Compare.Make (Ptime) module Set = Set.Make (Ptime) module Map = Map.Make (Ptime) - module Table = Hashtbl.Make (struct include Ptime let hash = hash end) + module Table = Hashtbl.Make (struct + include Ptime + + let hash = hash + end) end diff --git a/src/lib_base/time.mli b/src/lib_base/time.mli index 0235c558b03a19970289bc7206c49727c9e31238..9484fff1d03f21122eb3aa114d76a70aa82e8bd8 100644 --- a/src/lib_base/time.mli +++ b/src/lib_base/time.mli @@ -47,7 +47,6 @@ *) module Protocol : sig - (** {1:Protocol time} *) (** The out-of-protocol view of in-protocol timestamps. The precision of @@ -56,47 +55,49 @@ module Protocol : sig Note that the out-of-protocol view does not necessarily match the in-protocol representation. *) - - type t (** The type of protocol times *) + type t - val epoch : t (** Unix epoch is 1970-01-01 00:00:00 +0000 (UTC) *) + val epoch : t include Compare.S with type t := t - val add: t -> int64 -> t (** [add t s] is [s] seconds later than [t] *) + val add : t -> int64 -> t - val diff: t -> t -> int64 (** [diff a b] is the number of seconds between [a] and [b]. It is negative if [b] is later than [a]. *) + val diff : t -> t -> int64 (** Conversions to and from string representations. *) val of_notation : string -> t option + val of_notation_exn : string -> t + val to_notation : t -> string (** Conversion to and from "number of seconds since epoch" representation. *) val of_seconds : int64 -> t + val to_seconds : t -> int64 (** Serialization functions *) val encoding : t Data_encoding.t + val rfc_encoding : t Data_encoding.t + val rpc_arg : t RPC_arg.t (** Pretty-printing functions *) val pp_hum : Format.formatter -> t -> unit - end module System : sig - (** {1:System time} *) (** A representation of timestamps. @@ -109,43 +110,51 @@ module System : sig [Mtime]. *) type t = Ptime.t + val epoch : t module Span : sig - type t = Ptime.Span.t (** A representation of spans of time between two timestamps. *) + type t = Ptime.Span.t - val multiply_exn : float -> t -> t (** [multiply_exn factor t] is a time spans that lasts [factor] time as long as [t]. It fails if the time span cannot be represented. *) + val multiply_exn : float -> t -> t - val of_seconds_exn : float -> t (** [of_seconds_exn f] is a time span of [f] seconds. It fails if the time span cannot be represented. *) + val of_seconds_exn : float -> t (** Serialization functions *) val rpc_arg : t RPC_arg.t + val encoding : t Data_encoding.t end (** Conversions to and from Protocol time. Note that converting system time to protocol time truncates any subsecond precision. *) - val of_protocol_opt: Protocol.t -> t option - val of_protocol_exn: Protocol.t -> t - val to_protocol: t -> Protocol.t + val of_protocol_opt : Protocol.t -> t option + + val of_protocol_exn : Protocol.t -> t + + val to_protocol : t -> Protocol.t (** Conversions to and from string. It uses rfc3339. *) val of_notation_opt : string -> t option + val of_notation_exn : string -> t + val to_notation : t -> string (** Serialization. *) val encoding : t Data_encoding.t + val rfc_encoding : t Data_encoding.t + val rpc_arg : t RPC_arg.t (** Pretty-printing *) @@ -154,28 +163,27 @@ module System : sig (** Timestamping data. *) - type 'a stamped = { - data: 'a ; - stamp: t ; - } (** Data with an associated time stamp. *) + type 'a stamped = {data : 'a; stamp : t} val stamped_encoding : 'a Data_encoding.t -> 'a stamped Data_encoding.t - val stamp : time:t -> 'a -> 'a stamped (** [stamp d] is a timestamped version of [d]. *) + val stamp : time:t -> 'a -> 'a stamped - val recent : ('a * t) option -> ('a * t) option -> ('a * t) option (** [recent a b] is either [a] or [b] (which ever carries the most recent timestamp), or [None] if both [a] and [b] are [None]. *) + val recent : ('a * t) option -> ('a * t) option -> ('a * t) option (** Helper modules *) - val hash: t -> int + val hash : t -> int + include Compare.S with type t := t + module Set : Set.S with type elt = t + module Map : Map.S with type key = t - module Table : Hashtbl.S with type key = t + module Table : Hashtbl.S with type key = t end - diff --git a/src/lib_base/tzPervasives.ml b/src/lib_base/tzPervasives.ml index 6e9c546ba362916c64382d058e44b46f402c1789..0460c6ccd0111831ffa5e75f39b05a3898cd2339 100644 --- a/src/lib_base/tzPervasives.ml +++ b/src/lib_base/tzPervasives.ml @@ -29,13 +29,13 @@ include Tezos_rpc include Tezos_clic include Tezos_crypto include Tezos_micheline - module Data_encoding = Tezos_data_encoding.Data_encoding module List = struct include List include Tezos_stdlib.TzList end + module String = struct include String include Tezos_stdlib.TzString @@ -46,13 +46,10 @@ module Fitness = Fitness module Block_header = Block_header module Operation = Operation module Protocol = Protocol - module Test_chain_status = Test_chain_status module Preapply_result = Preapply_result - module Block_locator = Block_locator module Mempool = Mempool - module P2p_addr = P2p_addr module P2p_identity = P2p_identity module P2p_peer = P2p_peer @@ -60,13 +57,9 @@ module P2p_point = P2p_point module P2p_connection = P2p_connection module P2p_stat = P2p_stat module P2p_version = P2p_version - module Distributed_db_version = Distributed_db_version module Network_version = Network_version - module Lwt_exit = Lwt_exit - include Utils.Infix include Error_monad - module Internal_event = Internal_event diff --git a/src/lib_base/tzPervasives.mli b/src/lib_base/tzPervasives.mli index fc9e36a8a62c87bf36cf17d2af5d503784848289..73fcbe14bd845599285850174958d8f6a898200f 100644 --- a/src/lib_base/tzPervasives.mli +++ b/src/lib_base/tzPervasives.mli @@ -23,21 +23,46 @@ (* *) (*****************************************************************************) -include (module type of (struct include Tezos_stdlib end)) -include (module type of (struct include Tezos_error_monad end)) -include (module type of (struct include Tezos_rpc end)) -include (module type of (struct include Tezos_clic end)) -include (module type of (struct include Tezos_crypto end)) +include module type of struct + include Tezos_stdlib +end + +include module type of struct + include Tezos_error_monad +end + +include module type of struct + include Tezos_rpc +end + +include module type of struct + include Tezos_clic +end + +include module type of struct + include Tezos_crypto +end module Data_encoding = Data_encoding module List : sig - include (module type of (struct include List end)) - include (module type of (struct include Tezos_stdlib.TzList end)) + include module type of struct + include List + end + + include module type of struct + include Tezos_stdlib.TzList + end end + module String : sig - include (module type of (struct include String end)) - include (module type of (struct include Tezos_stdlib.TzString end)) + include module type of struct + include String + end + + include module type of struct + include Tezos_stdlib.TzString + end end module Time = Time @@ -49,7 +74,6 @@ module Test_chain_status = Test_chain_status module Preapply_result = Preapply_result module Block_locator = Block_locator module Mempool = Mempool - module P2p_addr = P2p_addr module P2p_identity = P2p_identity module P2p_peer = P2p_peer @@ -57,13 +81,16 @@ module P2p_point = P2p_point module P2p_connection = P2p_connection module P2p_stat = P2p_stat module P2p_version = P2p_version - module Distributed_db_version = Distributed_db_version module Network_version = Network_version - module Lwt_exit = Lwt_exit -include (module type of (struct include Utils.Infix end)) -include (module type of (struct include Error_monad end)) +include module type of struct + include Utils.Infix +end + +include module type of struct + include Error_monad +end module Internal_event = Internal_event diff --git a/src/lib_clic/.ocamlformat b/src/lib_clic/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_clic/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_clic/clic.ml b/src/lib_clic/clic.ml index 839bc186ececba5eb2bf6e69531f67ed26168d63..c74e37dd3f5153f49359dfafb3a8eb06e2bd1797 100644 --- a/src/lib_clic/clic.ml +++ b/src/lib_clic/clic.ml @@ -25,519 +25,729 @@ open Error_monad -type ('p, 'ctx) parameter = - { converter: ('ctx -> string -> 'p tzresult Lwt.t) ; - autocomplete: ('ctx -> string list tzresult Lwt.t) option } - -let parameter ?autocomplete converter = - { converter ; autocomplete } - -let compose_parameters { converter = c1; autocomplete = a1' } { converter = c2; autocomplete = a2' } = - { converter = (fun ctx s -> - c1 ctx s >>= function - | Ok r -> return r - | Error _ -> c2 ctx s); - autocomplete = match a1' with - | None -> a2' - | Some a1 -> match a2' with - | None -> a1' - | Some a2 -> Some (fun ctx -> - a1 ctx >>=? fun r1 -> - a2 ctx >>=? fun r2 -> - return (List.concat [r1; r2])) - } - -let map_parameter ~f { converter; autocomplete } = - { converter = (fun ctx s -> converter ctx s >>|? f); - autocomplete - } - -type label = - { long : string ; - short : char option } +type ('p, 'ctx) parameter = { + converter : 'ctx -> string -> 'p tzresult Lwt.t; + autocomplete : ('ctx -> string list tzresult Lwt.t) option +} + +let parameter ?autocomplete converter = {converter; autocomplete} + +let compose_parameters {converter = c1; autocomplete = a1'} + {converter = c2; autocomplete = a2'} = + { converter = + (fun ctx s -> + c1 ctx s >>= function Ok r -> return r | Error _ -> c2 ctx s); + autocomplete = + ( match a1' with + | None -> + a2' + | Some a1 -> ( + match a2' with + | None -> + a1' + | Some a2 -> + Some + (fun ctx -> + a1 ctx + >>=? fun r1 -> + a2 ctx >>=? fun r2 -> return (List.concat [r1; r2])) ) ) } + +let map_parameter ~f {converter; autocomplete} = + {converter = (fun ctx s -> converter ctx s >>|? f); autocomplete} + +type label = {long : string; short : char option} type ('a, 'ctx) arg = - | Arg : { doc : string ; - label : label ; - placeholder : string ; - kind : ('p, 'ctx) parameter } -> - ('p option, 'ctx) arg - | DefArg : { doc : string ; - label : label ; - placeholder : string ; - kind : ('p, 'ctx) parameter ; - default : string } -> ('p, 'ctx) arg - | Switch : { label : label ; - doc : string } -> - (bool, 'ctx) arg - | Constant : 'a -> ('a, 'ctx) arg + | Arg : + { doc : string; + label : label; + placeholder : string; + kind : ('p, 'ctx) parameter } + -> ('p option, 'ctx) arg + | DefArg : + { doc : string; + label : label; + placeholder : string; + kind : ('p, 'ctx) parameter; + default : string } + -> ('p, 'ctx) arg + | Switch : {label : label; doc : string} -> (bool, 'ctx) arg + | Constant : 'a -> ('a, 'ctx) arg type ('a, 'arg) args = | NoArgs : (unit, 'args) args - | AddArg : ('a, 'args) arg * ('b, 'args) args -> - ('a * 'b, 'args) args + | AddArg : ('a, 'args) arg * ('b, 'args) args -> ('a * 'b, 'args) args (* A simple structure for command interpreters. This is more generic than the exported one, see end of file. *) type ('a, 'ctx) params = - | Prefix : string * ('a, 'ctx) params -> - ('a, 'ctx) params - | Param : string * string * - ('p, 'ctx) parameter * - ('a, 'ctx) params -> - ('p -> 'a, 'ctx) params - | Stop : - ('ctx -> unit tzresult Lwt.t, 'ctx) params - | Seq : string * string * - ('p, 'ctx) parameter -> - ('p list -> 'ctx -> unit tzresult Lwt.t, 'ctx) params + | Prefix : string * ('a, 'ctx) params -> ('a, 'ctx) params + | Param : + string * string * ('p, 'ctx) parameter * ('a, 'ctx) params + -> ('p -> 'a, 'ctx) params + | Stop : ('ctx -> unit tzresult Lwt.t, 'ctx) params + | Seq : + string * string * ('p, 'ctx) parameter + -> ('p list -> 'ctx -> unit tzresult Lwt.t, 'ctx) params type (_, _) options = - Argument : { spec : ('a, 'arg) args ; - converter : 'a -> 'b } -> ('b, 'arg) options + | Argument : + { spec : ('a, 'arg) args; + converter : 'a -> 'b } + -> ('b, 'arg) options (* A command group *) -type group = - { name : string ; - title : string } +type group = {name : string; title : string} (* A command wraps a callback with its type and info *) type 'arg command = - | Command - : { params : ('a, 'iarg) params ; - options : ('b, 'iarg) options ; - handler : 'b -> 'a ; - desc : string ; - group : group option ; + | Command : + { params : ('a, 'iarg) params; + options : ('b, 'iarg) options; + handler : 'b -> 'a; + desc : string; + group : group option; conv : 'arg -> 'iarg } - -> 'arg command + -> 'arg command type error += Bad_argument of int * string + type error += Unterminated_command : string list * 'ctx command list -> error + type error += Command_not_found : string list * 'ctx command list -> error + type error += Unknown_option : string * 'ctx command option -> error -type error += Option_expected_argument : string * 'ctx command option -> error + +type error += + | Option_expected_argument : string * 'ctx command option -> error + type error += Bad_option_argument : string * 'ctx command option -> error + type error += Multiple_occurences : string * 'ctx command option -> error + type error += Extra_arguments : string list * 'ctx command -> error -let trim s = (* config-file wokaround *) - TzString.split '\n' s |> - List.map String.trim |> - String.concat "\n" +let trim s = + (* config-file wokaround *) + TzString.split '\n' s |> List.map String.trim |> String.concat "\n" let print_desc ppf doc = - let short, long = try + let (short, long) = + try let len = String.index doc '\n' in - String.sub doc 0 len, - Some (String.sub doc (len + 1) (String.length doc - len - 1)) - with _ -> doc, None in + ( String.sub doc 0 len, + Some (String.sub doc (len + 1) (String.length doc - len - 1)) ) + with _ -> (doc, None) + in match long with | None -> Format.fprintf ppf "%s" short | Some doc -> - Format.fprintf ppf "%s@{<full>@\n @[<hov 0>%a@]@}" short Format.pp_print_text doc + Format.fprintf + ppf + "%s@{<full>@\n @[<hov 0>%a@]@}" + short + Format.pp_print_text + doc let print_label ppf = function - | { long ; short = None } -> Format.fprintf ppf "--%s" long - | { long ; short = Some short } -> Format.fprintf ppf "-%c --%s" short long + | {long; short = None} -> + Format.fprintf ppf "--%s" long + | {long; short = Some short} -> + Format.fprintf ppf "-%c --%s" short long let print_options_detailed (type ctx) = - let help_option : type a.Format.formatter -> (a, ctx) arg -> unit = - fun ppf -> function - | Arg { label ; placeholder ; doc ; _ } -> - Format.fprintf ppf "@{<opt>%a <%s>@}: %a" - print_label label placeholder - print_desc doc ; - | DefArg { label ; placeholder ; doc ; default ; _ } -> - Format.fprintf ppf "@{<opt>%a <%s>@}: %a" - print_label label placeholder - print_desc (doc ^ "\nDefaults to `" ^ default ^ "`.") - | Switch { label ; doc } -> - Format.fprintf ppf "@{<opt>%a@}: %a" - print_label label - print_desc doc - | Constant _ -> () in + let help_option : type a. Format.formatter -> (a, ctx) arg -> unit = + fun ppf -> function + | Arg {label; placeholder; doc; _} -> + Format.fprintf + ppf + "@{<opt>%a <%s>@}: %a" + print_label + label + placeholder + print_desc + doc + | DefArg {label; placeholder; doc; default; _} -> + Format.fprintf + ppf + "@{<opt>%a <%s>@}: %a" + print_label + label + placeholder + print_desc + (doc ^ "\nDefaults to `" ^ default ^ "`.") + | Switch {label; doc} -> + Format.fprintf ppf "@{<opt>%a@}: %a" print_label label print_desc doc + | Constant _ -> + () + in let rec help : type b. Format.formatter -> (b, ctx) args -> unit = - fun ppf -> function - | NoArgs -> () - | AddArg (arg, NoArgs) -> - Format.fprintf ppf "%a" - help_option arg - | AddArg (arg, rest) -> - Format.fprintf ppf "%a@,%a" - help_option arg help rest - in help + fun ppf -> function + | NoArgs -> + () + | AddArg (arg, NoArgs) -> + Format.fprintf ppf "%a" help_option arg + | AddArg (arg, rest) -> + Format.fprintf ppf "%a@,%a" help_option arg help rest + in + help let has_args : type a ctx. (a, ctx) args -> bool = function - | NoArgs -> false - | AddArg (_,_) -> true + | NoArgs -> + false + | AddArg (_, _) -> + true let print_options_brief (type ctx) = - let help_option : - type a. Format.formatter -> (a, ctx) arg -> unit = - fun ppf -> function - | DefArg { label ; placeholder ; _ } -> - Format.fprintf ppf "[@{<opt>%a <%s>@}]" - print_label label placeholder - | Arg { label ; placeholder ; _ } -> - Format.fprintf ppf "[@{<opt>%a <%s>@}]" - print_label label placeholder - | Switch { label ; _ } -> - Format.fprintf ppf "[@{<opt>%a@}]" - print_label label - | Constant _ -> () - in let rec help : type b. Format.formatter -> (b, ctx) args -> unit = - fun ppf -> function - | NoArgs -> () - | AddArg (arg, NoArgs) -> - Format.fprintf ppf "%a" help_option arg - | AddArg (arg, rest) -> - Format.fprintf ppf "%a@ %a" - help_option arg help rest - in help + let help_option : type a. Format.formatter -> (a, ctx) arg -> unit = + fun ppf -> function + | DefArg {label; placeholder; _} -> + Format.fprintf ppf "[@{<opt>%a <%s>@}]" print_label label placeholder + | Arg {label; placeholder; _} -> + Format.fprintf ppf "[@{<opt>%a <%s>@}]" print_label label placeholder + | Switch {label; _} -> + Format.fprintf ppf "[@{<opt>%a@}]" print_label label + | Constant _ -> + () + in + let rec help : type b. Format.formatter -> (b, ctx) args -> unit = + fun ppf -> function + | NoArgs -> + () + | AddArg (arg, NoArgs) -> + Format.fprintf ppf "%a" help_option arg + | AddArg (arg, rest) -> + Format.fprintf ppf "%a@ %a" help_option arg help rest + in + help let print_highlight highlight_strings formatter str = let rec print_string = function - | [] -> Format.fprintf formatter "%s" str - | regex :: tl -> - begin match Re.Str.full_split regex str with - | [] - | [ Re.Str.Text _ ] -> print_string tl - | list -> - List.iter - (function - | Re.Str.Text text -> Format.fprintf formatter "%s" text - | Re.Str.Delim delimiter -> - Format.fprintf formatter "@{<hilight>%s@}" delimiter) - list - end - in print_string (List.map Re.Str.regexp_string highlight_strings) + | [] -> + Format.fprintf formatter "%s" str + | regex :: tl -> ( + match Re.Str.full_split regex str with + | [] | [Re.Str.Text _] -> + print_string tl + | list -> + List.iter + (function + | Re.Str.Text text -> + Format.fprintf formatter "%s" text + | Re.Str.Delim delimiter -> + Format.fprintf formatter "@{<hilight>%s@}" delimiter) + list ) + in + print_string (List.map Re.Str.regexp_string highlight_strings) let print_commandline ppf (highlights, options, args) = - let rec print - : type a ctx. Format.formatter -> (a, ctx) params -> unit = - fun ppf -> function - | Stop -> Format.fprintf ppf "%a" print_options_brief options - | Seq (n, _, _) when not (has_args options) -> - Format.fprintf ppf "[@{<arg>%s@}...]" n - | Seq (n, _, _) -> - Format.fprintf ppf "[@{<arg>%s@}...] %a" n print_options_brief options - | Prefix (n, Stop) when not (has_args options) -> - Format.fprintf ppf "@{<kwd>%a@}" (print_highlight highlights) n - | Prefix (n, next) -> - Format.fprintf ppf "@{<kwd>%a@} %a" - (print_highlight highlights) n print next - | Param (n, _, _, Stop) when not (has_args options) -> - Format.fprintf ppf "@{<arg>%s@}" n - | Param (n, _, _, next) -> - Format.fprintf ppf "@{<arg>%s@} %a" n print next in + let rec print : type a ctx. Format.formatter -> (a, ctx) params -> unit = + fun ppf -> function + | Stop -> + Format.fprintf ppf "%a" print_options_brief options + | Seq (n, _, _) when not (has_args options) -> + Format.fprintf ppf "[@{<arg>%s@}...]" n + | Seq (n, _, _) -> + Format.fprintf ppf "[@{<arg>%s@}...] %a" n print_options_brief options + | Prefix (n, Stop) when not (has_args options) -> + Format.fprintf ppf "@{<kwd>%a@}" (print_highlight highlights) n + | Prefix (n, next) -> + Format.fprintf + ppf + "@{<kwd>%a@} %a" + (print_highlight highlights) + n + print + next + | Param (n, _, _, Stop) when not (has_args options) -> + Format.fprintf ppf "@{<arg>%s@}" n + | Param (n, _, _, next) -> + Format.fprintf ppf "@{<arg>%s@} %a" n print next + in Format.fprintf ppf "@{<commandline>%a@}" print args -let rec print_params_detailed - : type a b ctx. (b, ctx) args -> Format.formatter -> (a, ctx) params -> unit - = fun spec ppf -> function - | Stop -> print_options_detailed ppf spec - | Seq (n, desc, _) -> - Format.fprintf ppf "@{<arg>%s@}: %a" - n print_desc (trim desc) ; - begin match spec with - | NoArgs -> () - | _ -> Format.fprintf ppf "@,%a" print_options_detailed spec - end - | Prefix (_, next) -> - print_params_detailed spec ppf next - | Param (n, desc, _, Stop) -> - Format.fprintf ppf "@{<arg>%s@}: %a" - n print_desc (trim desc); - begin match spec with - | NoArgs -> () - | _ -> Format.fprintf ppf "@,%a" print_options_detailed spec - end - | Param (n, desc, _, next) -> - Format.fprintf ppf "@{<arg>%s@}: %a@,%a" - n print_desc (trim desc) (print_params_detailed spec) next +let rec print_params_detailed : + type a b ctx. (b, ctx) args -> Format.formatter -> (a, ctx) params -> unit + = + fun spec ppf -> function + | Stop -> + print_options_detailed ppf spec + | Seq (n, desc, _) -> ( + Format.fprintf ppf "@{<arg>%s@}: %a" n print_desc (trim desc) ; + match spec with + | NoArgs -> + () + | _ -> + Format.fprintf ppf "@,%a" print_options_detailed spec ) + | Prefix (_, next) -> + print_params_detailed spec ppf next + | Param (n, desc, _, Stop) -> ( + Format.fprintf ppf "@{<arg>%s@}: %a" n print_desc (trim desc) ; + match spec with + | NoArgs -> + () + | _ -> + Format.fprintf ppf "@,%a" print_options_detailed spec ) + | Param (n, desc, _, next) -> + Format.fprintf + ppf + "@{<arg>%s@}: %a@,%a" + n + print_desc + (trim desc) + (print_params_detailed spec) + next let contains_params_args : - type arg ctx. (arg, ctx) params -> (_, ctx) args -> bool - = fun params args -> - let rec help : (arg, ctx) params -> bool = function - | Stop -> has_args args - | Seq (_, _, _) -> true - | Prefix (_, next) -> help next - | Param (_, _, _, _) -> true - in help params + type arg ctx. (arg, ctx) params -> (_, ctx) args -> bool = + fun params args -> + let rec help : (arg, ctx) params -> bool = function + | Stop -> + has_args args + | Seq (_, _, _) -> + true + | Prefix (_, next) -> + help next + | Param (_, _, _, _) -> + true + in + help params let print_command : - type ctx. - ?prefix:(Format.formatter -> unit -> unit) -> - ?highlights:string list -> Format.formatter -> ctx command -> unit - = fun - ?(prefix = (fun _ () -> ())) - ?(highlights=[]) ppf - (Command { params ; desc ; options = Argument { spec ; _ } ; _ }) -> - if contains_params_args params spec - then - Format.fprintf ppf "@{<command>%a%a@{<short>@,@{<commanddoc>%a@,%a@}@}@}" - prefix () - print_commandline (highlights, spec, params) - print_desc desc - (print_params_detailed spec) params - else - Format.fprintf ppf "@{<command>%a%a@{<short>@,@{<commanddoc>%a@}@}@}" - prefix () - print_commandline (highlights, spec, params) - print_desc desc + type ctx. + ?prefix:(Format.formatter -> unit -> unit) -> + ?highlights:string list -> + Format.formatter -> + ctx command -> + unit = + fun ?(prefix = fun _ () -> ()) + ?(highlights = []) + ppf + (Command {params; desc; options = Argument {spec; _}; _}) -> + if contains_params_args params spec then + Format.fprintf + ppf + "@{<command>%a%a@{<short>@,@{<commanddoc>%a@,%a@}@}@}" + prefix + () + print_commandline + (highlights, spec, params) + print_desc + desc + (print_params_detailed spec) + params + else + Format.fprintf + ppf + "@{<command>%a%a@{<short>@,@{<commanddoc>%a@}@}@}" + prefix + () + print_commandline + (highlights, spec, params) + print_desc + desc type ex_command = Ex : _ command -> ex_command let group_commands commands = let (grouped, ungrouped) = List.fold_left - (fun (grouped, ungrouped) (Ex (Command { group ; _ }) as command) -> - match group with - | None -> (grouped, command :: ungrouped) - | Some group -> - match - List.find_opt (fun ({ name ; _ }, _) -> group.name = name) grouped with - | None -> ((group, ref [ command ]) :: grouped, ungrouped) - | Some ({ title ; _ }, r) -> - if title <> group.title then - invalid_arg "Clic.usage: duplicate group name" ; - r := command :: !r ; - (grouped, ungrouped)) + (fun (grouped, ungrouped) (Ex (Command {group; _}) as command) -> + match group with + | None -> + (grouped, command :: ungrouped) + | Some group -> ( + match + List.find_opt (fun ({name; _}, _) -> group.name = name) grouped + with + | None -> + ((group, ref [command]) :: grouped, ungrouped) + | Some ({title; _}, r) -> + if title <> group.title then + invalid_arg "Clic.usage: duplicate group name" ; + r := command :: !r ; + (grouped, ungrouped) )) ([], []) - commands in - List.map (fun (g, c) -> (g, List.rev !c)) - (match ungrouped with - | [] -> grouped - | l -> (grouped @ - [ { name = "misc" ; - title = "Miscellaneous commands" }, - ref l ])) - -let print_group print_command ppf ({ title ; _ }, commands) = - Format.fprintf ppf "@{<title>%s@}@,@{<list>%a@}" + commands + in + List.map + (fun (g, c) -> (g, List.rev !c)) + ( match ungrouped with + | [] -> + grouped + | l -> + grouped @ [({name = "misc"; title = "Miscellaneous commands"}, ref l)] + ) + +let print_group print_command ppf ({title; _}, commands) = + Format.fprintf + ppf + "@{<title>%s@}@,@{<list>%a@}" title - (Format.pp_print_list print_command) commands + (Format.pp_print_list print_command) + commands type formatter_state = Format.formatter_out_functions * Format.formatter_tag_functions * bool type format = Plain | Ansi | Html + type verbosity = Terse | Short | Details | Full let setup_formatter ppf format verbosity = let skip = ref false in - let orig_out_functions, _, _ as orig_state = - Format.pp_get_formatter_out_functions ppf (), - Format.pp_get_formatter_tag_functions ppf (), - Format.pp_get_print_tags ppf () in - begin - Format.pp_print_flush ppf () ; - Format.pp_set_formatter_out_functions ppf + let ((orig_out_functions, _, _) as orig_state) = + ( Format.pp_get_formatter_out_functions ppf (), + Format.pp_get_formatter_tag_functions ppf (), + Format.pp_get_print_tags ppf () ) + in + ( Format.pp_print_flush ppf () ; + Format.pp_set_formatter_out_functions + ppf { out_string = (fun s b a -> - if s = "\000\000\000" then skip := true - else if s = "\255\255\255" then skip := false - else if not !skip then orig_out_functions.out_string s b a) ; - out_spaces = (fun n -> if not !skip then orig_out_functions.out_spaces n) ; - out_newline = (fun () -> if not !skip then orig_out_functions.out_newline ()) ; - out_flush = (fun () -> if not !skip then orig_out_functions.out_flush ()) ; + if s = "\000\000\000" then skip := true + else if s = "\255\255\255" then skip := false + else if not !skip then orig_out_functions.out_string s b a); + out_spaces = + (fun n -> if not !skip then orig_out_functions.out_spaces n); + out_newline = + (fun () -> if not !skip then orig_out_functions.out_newline ()); + out_flush = + (fun () -> if not !skip then orig_out_functions.out_flush ()); out_indent = orig_out_functions.out_indent } ; let levels = ref [] in let setup_level (level, op) = - if op level verbosity then - Format.fprintf ppf "@<0>%s" "\255\255\255" - else Format.fprintf ppf "@<0>%s" "\000\000\000" in + if op level verbosity then Format.fprintf ppf "@<0>%s" "\255\255\255" + else Format.fprintf ppf "@<0>%s" "\000\000\000" + in let push_level level = levels := level :: !levels ; - setup_level level in + setup_level level + in let pop_level () = match !levels with - | _ :: level :: rest -> levels := level :: rest ; setup_level level - | [ _ ] | [] -> Pervasives.failwith "Clic: unclosed verbosity tag" in - push_level (Terse, (<=)) ; + | _ :: level :: rest -> + levels := level :: rest ; + setup_level level + | [_] | [] -> + Pervasives.failwith "Clic: unclosed verbosity tag" + in + push_level (Terse, ( <= )) ; let push_level_tag tag = let push op = function - | "full" -> push_level (Full, op) - | "details" -> push_level (Details, op) - | "short" -> push_level (Short, op) - | "terse" -> push_level (Terse, op) - | tag -> Pervasives.failwith ("Clic: invalid semantic tag <" ^ tag ^ ">") in - if String.length tag > 0 && String.get tag 0 = '=' then - push (=) (String.sub tag 1 (String.length tag - 1)) - else if String.length tag > 0 && String.get tag 0 = '-' then - push (>) (String.sub tag 1 (String.length tag - 1)) - else push (<=) tag in + | "full" -> + push_level (Full, op) + | "details" -> + push_level (Details, op) + | "short" -> + push_level (Short, op) + | "terse" -> + push_level (Terse, op) + | tag -> + Pervasives.failwith ("Clic: invalid semantic tag <" ^ tag ^ ">") + in + if String.length tag > 0 && tag.[0] = '=' then + push ( = ) (String.sub tag 1 (String.length tag - 1)) + else if String.length tag > 0 && tag.[0] = '-' then + push ( > ) (String.sub tag 1 (String.length tag - 1)) + else push ( <= ) tag + in let pop_level_tag = function - | "full" | "details" | "short" | "terse" - | "-full" | "-details" | "-short" | "-terse" - | "=full" | "=details" | "=short" | "=terse" -> pop_level () - | tag -> Pervasives.failwith ("Clic: invalid semantic tag <" ^ tag ^ ">") in + | "full" + | "details" + | "short" + | "terse" + | "-full" + | "-details" + | "-short" + | "-terse" + | "=full" + | "=details" + | "=short" + | "=terse" -> + pop_level () + | tag -> + Pervasives.failwith ("Clic: invalid semantic tag <" ^ tag ^ ">") + in match format with | Ansi -> let color_num = function - | `Auto -> None - | `Black -> Some 0 - | `Red -> Some 1 - | `Green -> Some 2 - | `Yellow -> Some 3 - | `Blue -> Some 4 - | `Magenta -> Some 5 - | `Cyan -> Some 6 - | `White -> Some 7 in + | `Auto -> + None + | `Black -> + Some 0 + | `Red -> + Some 1 + | `Green -> + Some 2 + | `Yellow -> + Some 3 + | `Blue -> + Some 4 + | `Magenta -> + Some 5 + | `Cyan -> + Some 6 + | `White -> + Some 7 + in let ansi_format ppf (fg, bg, b, u) = Format.fprintf ppf "@<0>%s" "\027[0m" ; match - (match color_num fg with Some n -> [ string_of_int (30 + n) ] | None -> []) @ - (match color_num bg with Some n -> [ string_of_int (40 + n) ] | None -> []) @ - (if b then [ "1" ] else []) @ - (if u then [ "4" ] else []) + ( match color_num fg with + | Some n -> + [string_of_int (30 + n)] + | None -> + [] ) + @ ( match color_num bg with + | Some n -> + [string_of_int (40 + n)] + | None -> + [] ) + @ (if b then ["1"] else []) + @ if u then ["4"] else [] with - | [] -> () - | l -> Format.fprintf ppf "@<0>%s" ("\027[" ^ String.concat ";" l ^ "m") in - let ansi_stack = ref [ (`Auto, `Auto, false, false) ] in + | [] -> + () + | l -> + Format.fprintf ppf "@<0>%s" ("\027[" ^ String.concat ";" l ^ "m") + in + let ansi_stack = ref [(`Auto, `Auto, false, false)] in let push_ansi_format (fg, bg, b, u) = - let format = match !ansi_stack with + let format = + match !ansi_stack with | (pfg, pbg, pb, pu) :: _ -> - (Option.unopt ~default: pfg fg, - Option.unopt ~default: pbg bg, - pb || b, - pu || u) - | [] -> assert false in + ( Option.unopt ~default:pfg fg, + Option.unopt ~default:pbg bg, + pb || b, + pu || u ) + | [] -> + assert false + in ansi_stack := format :: !ansi_stack ; - Format.fprintf ppf "@<0>%a" ansi_format format in + Format.fprintf ppf "@<0>%a" ansi_format format + in let pop_ansi_format () = Format.fprintf ppf "@<0>%s" "\027[0m" ; match !ansi_stack with | _ :: format :: rest -> ansi_stack := format :: rest ; Format.fprintf ppf "@<0>%a" ansi_format format - | [ _ ] | [] -> Pervasives.failwith "Clic: unclosed ansi format" in - Format.pp_set_formatter_tag_functions ppf - { mark_open_tag = (fun _ -> "") ; - mark_close_tag = (fun _ -> "") ; - print_open_tag = begin function - | "title" -> push_ansi_format (None, None, true, true) - | "commandline" -> Format.fprintf ppf "@[<hov 4>" - | "commanddoc" -> Format.fprintf ppf " @[<v 0>" - | "opt" -> push_ansi_format (Some `Green, None, false, false) - | "arg" -> push_ansi_format (Some `Yellow, None, false, false) ; Format.fprintf ppf "<" - | "kwd" -> push_ansi_format (None, None, false, true) - | "error" -> push_ansi_format (Some `Red, None, true, true) - | "warning" -> push_ansi_format (Some `Yellow, None, true, true) - | "hilight" -> push_ansi_format (Some `White, Some `Yellow, true, true) - | "list" -> Format.fprintf ppf " @[<v 0>" - | "command" -> Format.fprintf ppf "@[<v 0>" - | "document" -> Format.fprintf ppf "@[<v 0>" - | other -> push_level_tag other - end ; - print_close_tag = begin function - | "title" -> Format.fprintf ppf ":" ; pop_ansi_format () - | "commandline" -> Format.fprintf ppf "@]" - | "commanddoc" -> Format.fprintf ppf "@]" - | "opt" -> pop_ansi_format () - | "arg" -> Format.fprintf ppf ">" ; pop_ansi_format () - | "kwd" -> pop_ansi_format () - | "error" -> pop_ansi_format () - | "warning" -> pop_ansi_format () - | "hilight" -> pop_ansi_format () - | "command" | "list" -> Format.fprintf ppf "@]" - | "document" -> Format.fprintf ppf "@]" - | other -> pop_level_tag other - end } ; + | [_] | [] -> + Pervasives.failwith "Clic: unclosed ansi format" + in + Format.pp_set_formatter_tag_functions + ppf + { mark_open_tag = (fun _ -> ""); + mark_close_tag = (fun _ -> ""); + print_open_tag = + (function + | "title" -> + push_ansi_format (None, None, true, true) + | "commandline" -> + Format.fprintf ppf "@[<hov 4>" + | "commanddoc" -> + Format.fprintf ppf " @[<v 0>" + | "opt" -> + push_ansi_format (Some `Green, None, false, false) + | "arg" -> + push_ansi_format (Some `Yellow, None, false, false) ; + Format.fprintf ppf "<" + | "kwd" -> + push_ansi_format (None, None, false, true) + | "error" -> + push_ansi_format (Some `Red, None, true, true) + | "warning" -> + push_ansi_format (Some `Yellow, None, true, true) + | "hilight" -> + push_ansi_format (Some `White, Some `Yellow, true, true) + | "list" -> + Format.fprintf ppf " @[<v 0>" + | "command" -> + Format.fprintf ppf "@[<v 0>" + | "document" -> + Format.fprintf ppf "@[<v 0>" + | other -> + push_level_tag other); + print_close_tag = + (function + | "title" -> + Format.fprintf ppf ":" ; pop_ansi_format () + | "commandline" -> + Format.fprintf ppf "@]" + | "commanddoc" -> + Format.fprintf ppf "@]" + | "opt" -> + pop_ansi_format () + | "arg" -> + Format.fprintf ppf ">" ; pop_ansi_format () + | "kwd" -> + pop_ansi_format () + | "error" -> + pop_ansi_format () + | "warning" -> + pop_ansi_format () + | "hilight" -> + pop_ansi_format () + | "command" | "list" -> + Format.fprintf ppf "@]" + | "document" -> + Format.fprintf ppf "@]" + | other -> + pop_level_tag other) } ; Format.pp_set_print_tags ppf true | Plain -> - Format.pp_set_formatter_tag_functions ppf - { mark_open_tag = (fun _ -> "") ; - mark_close_tag = (fun _ -> "") ; - print_open_tag = begin function - | "title" -> () - | "commandline" -> Format.fprintf ppf "@[<hov 4>" - | "commanddoc" -> Format.fprintf ppf " @[<v 0>" - | "opt" -> () - | "arg" -> Format.fprintf ppf "<" - | "kwd" -> () - | "hilight" -> () - | "error" -> () - | "warning" -> () - | "list" -> Format.fprintf ppf " @[<v 0>" - | "command" -> Format.fprintf ppf "@[<v 0>" - | "document" -> Format.fprintf ppf "@[<v 0>" - | other -> push_level_tag other - end ; - print_close_tag = begin function - | "title" -> Format.fprintf ppf ":" - | "commandline" -> Format.fprintf ppf "@]" - | "commanddoc" -> Format.fprintf ppf "@]" - | "opt" -> () - | "arg" -> Format.fprintf ppf ">" - | "kwd" -> () - | "error" -> () - | "warning" -> () - | "hilight" -> () - | "command" | "list" -> Format.fprintf ppf "@]" - | "document" -> Format.fprintf ppf "@]" - | other -> pop_level_tag other - end } ; + Format.pp_set_formatter_tag_functions + ppf + { mark_open_tag = (fun _ -> ""); + mark_close_tag = (fun _ -> ""); + print_open_tag = + (function + | "title" -> + () + | "commandline" -> + Format.fprintf ppf "@[<hov 4>" + | "commanddoc" -> + Format.fprintf ppf " @[<v 0>" + | "opt" -> + () + | "arg" -> + Format.fprintf ppf "<" + | "kwd" -> + () + | "hilight" -> + () + | "error" -> + () + | "warning" -> + () + | "list" -> + Format.fprintf ppf " @[<v 0>" + | "command" -> + Format.fprintf ppf "@[<v 0>" + | "document" -> + Format.fprintf ppf "@[<v 0>" + | other -> + push_level_tag other); + print_close_tag = + (function + | "title" -> + Format.fprintf ppf ":" + | "commandline" -> + Format.fprintf ppf "@]" + | "commanddoc" -> + Format.fprintf ppf "@]" + | "opt" -> + () + | "arg" -> + Format.fprintf ppf ">" + | "kwd" -> + () + | "error" -> + () + | "warning" -> + () + | "hilight" -> + () + | "command" | "list" -> + Format.fprintf ppf "@]" + | "document" -> + Format.fprintf ppf "@]" + | other -> + pop_level_tag other) } ; Format.pp_set_print_tags ppf true | Html -> - Format.pp_set_formatter_tag_functions ppf - { mark_open_tag = (fun _ -> "") ; - mark_close_tag = (fun _ -> "") ; - print_open_tag = begin function - | "title" -> Format.fprintf ppf "\003h3\004" - | "commandline" -> Format.fprintf ppf "\003div class='cmdline'\004@[<h>" - | "commanddoc" -> Format.fprintf ppf "\003div class='cmddoc'\004" - | "opt" -> Format.fprintf ppf "\003span class='opt'\004" - | "arg" -> Format.fprintf ppf "\003span class='arg'\004" - | "kwd" -> Format.fprintf ppf "\003span class='kwd'\004" - | "hilight" -> () - | "error" -> () - | "warning" -> () - | "list" -> Format.fprintf ppf "\003ul\004@\n" - | "command" -> Format.fprintf ppf "\003li\004@\n" + Format.pp_set_formatter_tag_functions + ppf + { mark_open_tag = (fun _ -> ""); + mark_close_tag = (fun _ -> ""); + print_open_tag = + (function + | "title" -> + Format.fprintf ppf "\003h3\004" + | "commandline" -> + Format.fprintf ppf "\003div class='cmdline'\004@[<h>" + | "commanddoc" -> + Format.fprintf ppf "\003div class='cmddoc'\004" + | "opt" -> + Format.fprintf ppf "\003span class='opt'\004" + | "arg" -> + Format.fprintf ppf "\003span class='arg'\004" + | "kwd" -> + Format.fprintf ppf "\003span class='kwd'\004" + | "hilight" -> + () + | "error" -> + () + | "warning" -> + () + | "list" -> + Format.fprintf ppf "\003ul\004@\n" + | "command" -> + Format.fprintf ppf "\003li\004@\n" | "document" -> - Format.fprintf ppf - "@[<v 0>\003style\004\ - .cmdline { font-family: monospace }\ - .cmddoc { white-space: pre-wrap ; font-family: monospace; line-height: 170%%; margin: 0 0 20px 0 }\ - .cmdline { background: #343131; padding: 2px 8px; border-radius:10px; color: white; margin: 5px; }\ - .cmdline+.cmddoc { margin: -5px 5px 0 20px; padding: 5px }\ - .opt,.arg { background: #343131; font-weight: bold; padding: 2px 4px; border-radius:5px; }\ - .kwd { font-weight: bold; } .opt { color:#CF0; background: #460; } .arg { color: #CEF; background: #369; }\ - \003/style\004@\n" ; - | other -> push_level_tag other - end ; - print_close_tag = begin function - | "title" -> Format.fprintf ppf "\003/h3\004@\n" - | "commandline" -> Format.fprintf ppf "@]\003/div\004@\n" - | "commanddoc" -> Format.fprintf ppf "\003/div\004@\n" - | "opt" | "arg" | "kwd" -> Format.fprintf ppf "\003/span\004" - | "error" | "warning" | "hilight" -> () - | "list" -> Format.fprintf ppf "\003/ul\004@\n" - | "command" -> Format.fprintf ppf "\003/li\004@\n" - | "document" -> Format.fprintf ppf "@]" - | other -> pop_level_tag other - end } ; + Format.fprintf + ppf + "@[<v 0>\003style\004.cmdline { font-family: monospace \ + }.cmddoc { white-space: pre-wrap ; font-family: \ + monospace; line-height: 170%%; margin: 0 0 20px 0 \ + }.cmdline { background: #343131; padding: 2px \ + 8px;\tborder-radius:10px; color: white; margin: 5px; \ + }.cmdline+.cmddoc { margin: -5px 5px 0 20px; padding: \ + 5px }.opt,.arg { background: #343131; font-weight: \ + bold; padding: 2px 4px; border-radius:5px; }.kwd { \ + font-weight: bold; } .opt { color:#CF0; background: \ + #460; } .arg { color: #CEF; background: #369; \ + }\003/style\004@\n" + | other -> + push_level_tag other); + print_close_tag = + (function + | "title" -> + Format.fprintf ppf "\003/h3\004@\n" + | "commandline" -> + Format.fprintf ppf "@]\003/div\004@\n" + | "commanddoc" -> + Format.fprintf ppf "\003/div\004@\n" + | "opt" | "arg" | "kwd" -> + Format.fprintf ppf "\003/span\004" + | "error" | "warning" | "hilight" -> + () + | "list" -> + Format.fprintf ppf "\003/ul\004@\n" + | "command" -> + Format.fprintf ppf "\003/li\004@\n" + | "document" -> + Format.fprintf ppf "@]" + | other -> + pop_level_tag other) } ; let orig_out_functions = - Format.pp_get_formatter_out_functions ppf () in - Format.pp_set_formatter_out_functions ppf + Format.pp_get_formatter_out_functions ppf () + in + Format.pp_set_formatter_out_functions + ppf { orig_out_functions with - out_string = (fun s i j -> + out_string = + (fun s i j -> let buf = Buffer.create (j - i) in - for n = i to j - 1 do match String.get s n with - | '\003' -> Buffer.add_char buf '<' - | '\004' -> Buffer.add_char buf '>' - | '>' -> Buffer.add_string buf ">" - | '<' -> Buffer.add_string buf "<" - | c -> Buffer.add_char buf c + for n = i to j - 1 do + match s.[n] with + | '\003' -> + Buffer.add_char buf '<' + | '\004' -> + Buffer.add_char buf '>' + | '>' -> + Buffer.add_string buf ">" + | '<' -> + Buffer.add_string buf "<" + | c -> + Buffer.add_char buf c done ; let s' = Buffer.contents buf in - orig_out_functions.out_string s' 0 (String.length s'))} ; - Format.pp_set_print_tags ppf true - end ; + orig_out_functions.out_string s' 0 (String.length s')) } ; + Format.pp_set_print_tags ppf true ) ; orig_state let restore_formatter ppf (out_functions, tag_functions, tags) = @@ -546,770 +756,1280 @@ let restore_formatter ppf (out_functions, tag_functions, tags) = Format.pp_set_formatter_tag_functions ppf tag_functions ; Format.pp_set_print_tags ppf tags -let usage_internal ppf ~executable_name ~global_options ?(highlights=[]) commands = +let usage_internal ppf ~executable_name ~global_options ?(highlights = []) + commands = let by_group = group_commands commands in - let (Argument { spec ; _ }) = global_options in + let (Argument {spec; _}) = global_options in let print_groups = Format.pp_print_list - ~pp_sep: (fun ppf () -> Format.fprintf ppf "@,@,") - (print_group (fun ppf (Ex command) -> print_command ?prefix:None ~highlights ppf command)) in - Format.fprintf ppf + ~pp_sep:(fun ppf () -> Format.fprintf ppf "@,@,") + (print_group (fun ppf (Ex command) -> + print_command ?prefix:None ~highlights ppf command)) + in + Format.fprintf + ppf "@{<document>@{<title>Usage@}@,\ - @{<list>\ - @{<command>@{<commandline>\ - %s [@{<opt>global options@}] @{<kwd>command@} [@{<opt>command options@}]@}@}@,\ - @{<command>@{<commandline>\ - %s @{<opt>--help@} (for global options)@}@}@,\ - @{<command>@{<commandline>\ - %s [@{<opt>global options@}] @{<kwd>command@} @{<opt>--help@} (for command options)@}@}\ - @}@,@,\ + @{<list>@{<command>@{<commandline>%s [@{<opt>global options@}] \ + @{<kwd>command@} [@{<opt>command options@}]@}@}@,\ + @{<command>@{<commandline>%s @{<opt>--help@} (for global options)@}@}@,\ + @{<command>@{<commandline>%s [@{<opt>global options@}] @{<kwd>command@} \ + @{<opt>--help@} (for command options)@}@}@}@,\ + @,\ @{<title>To browse the documentation@}@,\ - @{<list>\ - @{<command>@{<commandline>\ - %s [@{<opt>global options@}] @{<kwd>man@} (for a list of commands)@}@}@,\ - @{<command>@{<commandline>\ - %s [@{<opt>global options@}] @{<kwd>man@} @{<opt>-v 3@} (for the full manual)@}@}\ - @}@,@,\ + @{<list>@{<command>@{<commandline>%s [@{<opt>global options@}] \ + @{<kwd>man@} (for a list of commands)@}@}@,\ + @{<command>@{<commandline>%s [@{<opt>global options@}] @{<kwd>man@} \ + @{<opt>-v 3@} (for the full manual)@}@}@}@,\ + @,\ @{<title>Global options (must come before the command)@}@,\ - @{<commanddoc>%a@}%a\ - %a@}@." - executable_name executable_name executable_name executable_name executable_name - print_options_detailed spec - (fun ppf () -> if by_group <> [] then Format.fprintf ppf "@,@,") () - print_groups by_group + @{<commanddoc>%a@}%a%a@}@." + executable_name + executable_name + executable_name + executable_name + executable_name + print_options_detailed + spec + (fun ppf () -> if by_group <> [] then Format.fprintf ppf "@,@,") + () + print_groups + by_group let constant c = Constant c let arg ~doc ?short ~long ~placeholder kind = - Arg { doc ; - label = { long ; short } ; - placeholder ; - kind } + Arg {doc; label = {long; short}; placeholder; kind} let default_arg ~doc ?short ~long ~placeholder ~default kind = - DefArg { doc ; - placeholder ; - label = { long ; short } ; - kind ; - default } + DefArg {doc; placeholder; label = {long; short}; kind; default} -let switch ~doc ?short ~long () = - Switch { doc ; label = { long ; short } } +let switch ~doc ?short ~long () = Switch {doc; label = {long; short}} let parse_arg : - type a ctx. ?command:_ command -> (a, ctx) arg -> string list TzString.Map.t -> ctx -> a tzresult Lwt.t = - fun ?command spec args_dict ctx -> - match spec with - | Arg { label = { long ; short = _ } ; kind = { converter ; _ } ; _ } -> - begin match TzString.Map.find_opt long args_dict with - | None - | Some [] -> return_none - | Some [ s ] -> - (trace - (Bad_option_argument ("--" ^ long, command)) - (converter ctx s)) >>|? fun x -> - Some x - | Some (_ :: _) -> - fail (Multiple_occurences ("--" ^ long, command)) - end - | DefArg { label = { long ; short = _ } ; kind = { converter ; _ } ; default ; _ } -> - converter ctx default >>= fun default -> - begin match default with - | Ok x -> return x - | Error _ -> - invalid_arg - (Format.sprintf - "Value provided as default for '%s' could not be parsed by converter function." - long) end >>=? fun default -> - begin match TzString.Map.find_opt long args_dict with - | None - | Some [] -> return default - | Some [ s ] -> - (trace - (Bad_option_argument (long, command)) - (converter ctx s)) - | Some (_ :: _) -> - fail (Multiple_occurences (long, command)) - end - | Switch { label = { long ; short = _ } ; _ } -> - begin match TzString.Map.find_opt long args_dict with - | None - | Some [] -> return_false - | Some [ _ ] -> return_true - | Some (_ :: _) -> fail (Multiple_occurences (long, command)) - end - | Constant c -> return c + type a ctx. + ?command:_ command -> + (a, ctx) arg -> + string list TzString.Map.t -> + ctx -> + a tzresult Lwt.t = + fun ?command spec args_dict ctx -> + match spec with + | Arg {label = {long; short = _}; kind = {converter; _}; _} -> ( + match TzString.Map.find_opt long args_dict with + | None | Some [] -> + return_none + | Some [s] -> + trace (Bad_option_argument ("--" ^ long, command)) (converter ctx s) + >>|? fun x -> Some x + | Some (_ :: _) -> + fail (Multiple_occurences ("--" ^ long, command)) ) + | DefArg {label = {long; short = _}; kind = {converter; _}; default; _} -> ( + converter ctx default + >>= fun default -> + ( match default with + | Ok x -> + return x + | Error _ -> + invalid_arg + (Format.sprintf + "Value provided as default for '%s' could not be parsed by \ + converter function." + long) ) + >>=? fun default -> + match TzString.Map.find_opt long args_dict with + | None | Some [] -> + return default + | Some [s] -> + trace (Bad_option_argument (long, command)) (converter ctx s) + | Some (_ :: _) -> + fail (Multiple_occurences (long, command)) ) + | Switch {label = {long; short = _}; _} -> ( + match TzString.Map.find_opt long args_dict with + | None | Some [] -> + return_false + | Some [_] -> + return_true + | Some (_ :: _) -> + fail (Multiple_occurences (long, command)) ) + | Constant c -> + return c (* Argument parsing *) let rec parse_args : - type a ctx. ?command:_ command -> (a, ctx) args -> string list TzString.Map.t -> ctx -> a tzresult Lwt.t = - fun ?command spec args_dict ctx -> - match spec with - | NoArgs -> return_unit - | AddArg (arg, rest) -> - parse_arg ?command arg args_dict ctx >>=? fun arg -> - parse_args ?command rest args_dict ctx >>|? fun rest -> - (arg, rest) + type a ctx. + ?command:_ command -> + (a, ctx) args -> + string list TzString.Map.t -> + ctx -> + a tzresult Lwt.t = + fun ?command spec args_dict ctx -> + match spec with + | NoArgs -> + return_unit + | AddArg (arg, rest) -> + parse_arg ?command arg args_dict ctx + >>=? fun arg -> + parse_args ?command rest args_dict ctx >>|? fun rest -> (arg, rest) let empty_args_dict = TzString.Map.empty let rec make_arities_dict : - type a b. (a, b) args -> (int * string) TzString.Map.t -> (int * string) TzString.Map.t = - fun args acc -> match args with - | NoArgs -> acc - | AddArg (arg, rest) -> - let recur { long ; short } num = - (match short with - | None -> acc - | Some c -> TzString.Map.add ("-" ^ String.make 1 c) (num, long) acc) |> - TzString.Map.add ("-" ^ long) (num, long) |> - TzString.Map.add ("--" ^ long) (num, long) |> - make_arities_dict rest in - match arg with - | Arg { label ; _ } -> recur label 1 - | DefArg { label ; _ } -> recur label 1 - | Switch { label ; _ } -> recur label 0 - | Constant _c -> make_arities_dict rest acc + type a b. + (a, b) args -> + (int * string) TzString.Map.t -> + (int * string) TzString.Map.t = + fun args acc -> + match args with + | NoArgs -> + acc + | AddArg (arg, rest) -> ( + let recur {long; short} num = + ( match short with + | None -> + acc + | Some c -> + TzString.Map.add ("-" ^ String.make 1 c) (num, long) acc ) + |> TzString.Map.add ("-" ^ long) (num, long) + |> TzString.Map.add ("--" ^ long) (num, long) + |> make_arities_dict rest + in + match arg with + | Arg {label; _} -> + recur label 1 + | DefArg {label; _} -> + recur label 1 + | Switch {label; _} -> + recur label 0 + | Constant _c -> + make_arities_dict rest acc ) type error += Help : 'a command option -> error let check_help_flag ?command = function - | ("-h" | "--help") :: _ -> fail (Help command) - | _ -> return_unit + | ("-h" | "--help") :: _ -> + fail (Help command) + | _ -> + return_unit let add_occurrence long value acc = match TzString.Map.find_opt long acc with - | Some v -> TzString.Map.add long v acc - | None -> TzString.Map.add long [ value ] acc + | Some v -> + TzString.Map.add long v acc + | None -> + TzString.Map.add long [value] acc let make_args_dict_consume ?command spec args = let rec make_args_dict completing arities acc args = - check_help_flag ?command args >>=? fun () -> + check_help_flag ?command args + >>=? fun () -> match args with - | [] -> return (acc, []) + | [] -> + return (acc, []) | arg :: tl -> - if String.length arg > 0 && String.get arg 0 = '-' then + if String.length arg > 0 && arg.[0] = '-' then if TzString.Map.mem arg arities then - let arity, long = TzString.Map.find arg arities in - check_help_flag ?command tl >>=? fun () -> - match arity, tl with - | 0, tl' -> - make_args_dict completing arities (add_occurrence long "" acc) tl' - | 1, value :: tl' -> - make_args_dict completing arities (add_occurrence long value acc) tl' - | 1, [] when completing -> + let (arity, long) = TzString.Map.find arg arities in + check_help_flag ?command tl + >>=? fun () -> + match (arity, tl) with + | (0, tl') -> + make_args_dict + completing + arities + (add_occurrence long "" acc) + tl' + | (1, value :: tl') -> + make_args_dict + completing + arities + (add_occurrence long value acc) + tl' + | (1, []) when completing -> return (acc, []) - | 1, [] -> + | (1, []) -> fail (Option_expected_argument (arg, None)) - | _, _ -> - raise (Failure "cli_entries: Arguments with arity not equal to 1 or 0 not supported") - else - fail (Unknown_option (arg, None)) + | (_, _) -> + raise + (Failure + "cli_entries: Arguments with arity not equal to 1 or 0 \ + not supported") + else fail (Unknown_option (arg, None)) else return (acc, args) - in make_args_dict false (make_arities_dict spec TzString.Map.empty) TzString.Map.empty args + in + make_args_dict + false + (make_arities_dict spec TzString.Map.empty) + TzString.Map.empty + args let make_args_dict_filter ?command spec args = let rec make_args_dict arities (dict, other_args) args = - check_help_flag ?command args >>=? fun () -> + check_help_flag ?command args + >>=? fun () -> match args with - | [] -> return (dict, other_args) + | [] -> + return (dict, other_args) | arg :: tl -> - if TzString.Map.mem arg arities - then let arity, long = TzString.Map.find arg arities in - check_help_flag ?command tl >>=? fun () -> - match arity, tl with - | 0, tl -> make_args_dict arities (add_occurrence long "" dict, other_args) tl - | 1, value :: tl' -> make_args_dict arities (add_occurrence long value dict, other_args) tl' - | 1, [] -> fail (Option_expected_argument (arg, command)) - | _, _ -> - raise (Failure "cli_entries: Arguments with arity not equal to 1 or 0 not supported") + if TzString.Map.mem arg arities then + let (arity, long) = TzString.Map.find arg arities in + check_help_flag ?command tl + >>=? fun () -> + match (arity, tl) with + | (0, tl) -> + make_args_dict + arities + (add_occurrence long "" dict, other_args) + tl + | (1, value :: tl') -> + make_args_dict + arities + (add_occurrence long value dict, other_args) + tl' + | (1, []) -> + fail (Option_expected_argument (arg, command)) + | (_, _) -> + raise + (Failure + "cli_entries: Arguments with arity not equal to 1 or 0 not \ + supported") else make_args_dict arities (dict, arg :: other_args) tl - in make_args_dict + in + make_args_dict (make_arities_dict spec TzString.Map.empty) (TzString.Map.empty, []) - args >>|? fun (dict, remaining) -> - (dict, List.rev remaining) + args + >>|? fun (dict, remaining) -> (dict, List.rev remaining) + +let ( >> ) arg1 arg2 = AddArg (arg1, arg2) -let (>>) arg1 arg2 = AddArg (arg1, arg2) let args1 spec = - Argument { spec = spec >> NoArgs; - converter = fun (arg, ()) -> arg } + Argument {spec = spec >> NoArgs; converter = (fun (arg, ()) -> arg)} + let args2 spec1 spec2 = - Argument { spec = spec1 >> (spec2 >> NoArgs) ; - converter = fun (arg1, (arg2, ())) -> arg1, arg2 } + Argument + { spec = spec1 >> (spec2 >> NoArgs); + converter = (fun (arg1, (arg2, ())) -> (arg1, arg2)) } + let args3 spec1 spec2 spec3 = - Argument { spec = spec1 >> (spec2 >> (spec3 >> NoArgs)) ; - converter = fun (arg1, (arg2, (arg3, ()))) -> arg1, arg2, arg3 } + Argument + { spec = spec1 >> (spec2 >> (spec3 >> NoArgs)); + converter = (fun (arg1, (arg2, (arg3, ()))) -> (arg1, arg2, arg3)) } + let args4 spec1 spec2 spec3 spec4 = - Argument { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> NoArgs))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, ())))) -> arg1, arg2, arg3, arg4 } + Argument + { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> NoArgs))); + converter = + (fun (arg1, (arg2, (arg3, (arg4, ())))) -> (arg1, arg2, arg3, arg4)) } + let args5 spec1 spec2 spec3 spec4 spec5 = - Argument { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> NoArgs)))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, ()))))) -> arg1, arg2, arg3, arg4, arg5 } + Argument + { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> NoArgs)))); + converter = + (fun (arg1, (arg2, (arg3, (arg4, (arg5, ()))))) -> + (arg1, arg2, arg3, arg4, arg5)) } + let args6 spec1 spec2 spec3 spec4 spec5 spec6 = - Argument { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> NoArgs))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, ())))))) -> - arg1, arg2, arg3, arg4, arg5, spec6 } + Argument + { spec = + spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> NoArgs))))); + converter = + (fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, ())))))) -> + (arg1, arg2, arg3, arg4, arg5, spec6)) } + let args7 spec1 spec2 spec3 spec4 spec5 spec6 spec7 = - Argument { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> NoArgs)))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, ()))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7 } + Argument + { spec = + spec1 + >> ( spec2 + >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> NoArgs))))) ); + converter = + (fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, ()))))))) -> + (arg1, arg2, arg3, arg4, arg5, spec6, spec7)) } + let args8 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 = - Argument { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> NoArgs))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, ())))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8 } + Argument + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> NoArgs))))) + ) ); + converter = + (fun ( arg1, + (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, ()))))))) ) -> + (arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8)) } + let args9 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 = Argument - { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> NoArgs)))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, ()))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9 } + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> NoArgs)))) ) ) + ) ); + converter = + (fun ( arg1, + ( arg2, + (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, ()))))))) + ) ) -> + (arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9)) } + let args10 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 = Argument - { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> NoArgs))))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, (spec10, ())))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10 } -let args11 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 = + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> ( spec6 + >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> NoArgs)))) + ) ) ) ) ); + converter = + (fun ( arg1, + ( arg2, + ( arg3, + ( arg4, + (arg5, (spec6, (spec7, (spec8, (spec9, (spec10, ())))))) + ) ) ) ) -> + (arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10)) + } + +let args11 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 + = Argument - { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> (spec11 >> NoArgs)))))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, (spec10, (spec11, ()))))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10, spec11 } - -let args12 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 spec12 = + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> ( spec6 + >> ( spec7 + >> ( spec8 + >> (spec9 >> (spec10 >> (spec11 >> NoArgs))) ) ) + ) ) ) ) ); + converter = + (fun ( arg1, + ( arg2, + ( arg3, + ( arg4, + ( arg5, + ( spec6, + (spec7, (spec8, (spec9, (spec10, (spec11, ()))))) ) ) + ) ) ) ) -> + ( arg1, + arg2, + arg3, + arg4, + arg5, + spec6, + spec7, + spec8, + spec9, + spec10, + spec11 )) } + +let args12 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 + spec12 = Argument - { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> (spec11 >> (spec12 >> NoArgs))))))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, (spec10, (spec11, (spec12, ())))))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10, spec11, spec12 } - -let args13 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 spec12 spec13 = + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> ( spec6 + >> ( spec7 + >> ( spec8 + >> ( spec9 + >> (spec10 >> (spec11 >> (spec12 >> NoArgs))) + ) ) ) ) ) ) ) ); + converter = + (fun ( arg1, + ( arg2, + ( arg3, + ( arg4, + ( arg5, + ( spec6, + ( spec7, + (spec8, (spec9, (spec10, (spec11, (spec12, ()))))) + ) ) ) ) ) ) ) -> + ( arg1, + arg2, + arg3, + arg4, + arg5, + spec6, + spec7, + spec8, + spec9, + spec10, + spec11, + spec12 )) } + +let args13 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 + spec12 spec13 = Argument - { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> (spec11 >> (spec12 >> (spec13 >> NoArgs)))))))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, (spec10, (spec11, (spec12, (spec13, ()))))))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10, spec11, spec12, spec13 } - -let args14 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 spec12 spec13 spec14 = + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> ( spec6 + >> ( spec7 + >> ( spec8 + >> ( spec9 + >> ( spec10 + >> (spec11 >> (spec12 >> (spec13 >> NoArgs))) + ) ) ) ) ) ) ) ) ); + converter = + (fun ( arg1, + ( arg2, + ( arg3, + ( arg4, + ( arg5, + ( spec6, + ( spec7, + ( spec8, + (spec9, (spec10, (spec11, (spec12, (spec13, ()))))) + ) ) ) ) ) ) ) ) -> + ( arg1, + arg2, + arg3, + arg4, + arg5, + spec6, + spec7, + spec8, + spec9, + spec10, + spec11, + spec12, + spec13 )) } + +let args14 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 + spec12 spec13 spec14 = Argument - { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> (spec11 >> (spec12 >> (spec13 >> (spec14 >> NoArgs))))))))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, (spec10, (spec11, (spec12, (spec13, (spec14, ())))))))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10, spec11, spec12, spec13, spec14 } - -let args15 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 spec12 spec13 spec14 spec15 = + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> ( spec6 + >> ( spec7 + >> ( spec8 + >> ( spec9 + >> ( spec10 + >> ( spec11 + >> ( spec12 + >> (spec13 >> (spec14 >> NoArgs)) ) ) + ) ) ) ) ) ) ) ) ); + converter = + (fun ( arg1, + ( arg2, + ( arg3, + ( arg4, + ( arg5, + ( spec6, + ( spec7, + ( spec8, + ( spec9, + ( spec10, + (spec11, (spec12, (spec13, (spec14, ())))) ) + ) ) ) ) ) ) ) ) ) -> + ( arg1, + arg2, + arg3, + arg4, + arg5, + spec6, + spec7, + spec8, + spec9, + spec10, + spec11, + spec12, + spec13, + spec14 )) } + +let args15 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 + spec12 spec13 spec14 spec15 = Argument - { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> (spec11 >> (spec12 >> (spec13 >> (spec14 >> (spec15 >> NoArgs)))))))))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, (spec10, (spec11, (spec12, (spec13, (spec14, (spec15, ()))))))))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10, spec11, spec12, spec13, spec14, spec15 } - -let args16 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 spec12 spec13 spec14 spec15 spec16 = + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> ( spec6 + >> ( spec7 + >> ( spec8 + >> ( spec9 + >> ( spec10 + >> ( spec11 + >> ( spec12 + >> ( spec13 + >> (spec14 >> (spec15 >> NoArgs)) + ) ) ) ) ) ) ) ) ) ) ) ); + converter = + (fun ( arg1, + ( arg2, + ( arg3, + ( arg4, + ( arg5, + ( spec6, + ( spec7, + ( spec8, + ( spec9, + ( spec10, + ( spec11, + (spec12, (spec13, (spec14, (spec15, ())))) + ) ) ) ) ) ) ) ) ) ) ) -> + ( arg1, + arg2, + arg3, + arg4, + arg5, + spec6, + spec7, + spec8, + spec9, + spec10, + spec11, + spec12, + spec13, + spec14, + spec15 )) } + +let args16 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 + spec12 spec13 spec14 spec15 spec16 = Argument - { spec = spec1 >> (spec2 >> (spec3 >> (spec4 >> (spec5 >> (spec6 >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> (spec11 >> (spec12 >> (spec13 >> (spec14 >> (spec15 >> (spec16 >> NoArgs))))))))))))))) ; - converter = fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, (spec8, (spec9, (spec10, (spec11, (spec12, (spec13, (spec14, (spec15, (spec16, ())))))))))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10, spec11, spec12, spec13, spec14, spec15, spec16 } - -let args17 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 - spec11 spec12 spec13 spec14 spec15 spec16 spec17 = + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> ( spec6 + >> ( spec7 + >> ( spec8 + >> ( spec9 + >> ( spec10 + >> ( spec11 + >> ( spec12 + >> ( spec13 + >> ( spec14 + >> (spec15 >> (spec16 >> NoArgs)) + ) ) ) ) ) ) ) ) ) ) ) ) ); + converter = + (fun ( arg1, + ( arg2, + ( arg3, + ( arg4, + ( arg5, + ( spec6, + ( spec7, + ( spec8, + ( spec9, + ( spec10, + ( spec11, + ( spec12, + (spec13, (spec14, (spec15, (spec16, ())))) + ) ) ) ) ) ) ) ) ) ) ) ) -> + ( arg1, + arg2, + arg3, + arg4, + arg5, + spec6, + spec7, + spec8, + spec9, + spec10, + spec11, + spec12, + spec13, + spec14, + spec15, + spec16 )) } + +let args17 spec1 spec2 spec3 spec4 spec5 spec6 spec7 spec8 spec9 spec10 spec11 + spec12 spec13 spec14 spec15 spec16 spec17 = Argument - { spec = spec1 >> - (spec2 >> (spec3 >> (spec4 >> (spec5 >> ( - spec6 >> (spec7 >> (spec8 >> (spec9 >> (spec10 >> (spec11 >> ( - spec12 >> (spec13 >> (spec14 >> (spec15 >> ( - spec16 >> (spec17 >> NoArgs)))))))))))))))) ; + { spec = + spec1 + >> ( spec2 + >> ( spec3 + >> ( spec4 + >> ( spec5 + >> ( spec6 + >> ( spec7 + >> ( spec8 + >> ( spec9 + >> ( spec10 + >> ( spec11 + >> ( spec12 + >> ( spec13 + >> ( spec14 + >> ( spec15 + >> ( spec16 + >> (spec17 >> NoArgs) ) ) + ) ) ) ) ) ) ) ) ) ) ) ) ); converter = - fun (arg1, (arg2, (arg3, (arg4, (arg5, (spec6, (spec7, ( - spec8, (spec9, (spec10, (spec11, (spec12, (spec13, ( - spec14, (spec15, (spec16, (spec17, ()))))))))))))))))) -> - arg1, arg2, arg3, arg4, arg5, spec6, spec7, spec8, spec9, spec10, - spec11, spec12, spec13, spec14, spec15, spec16, spec17 } + (fun ( arg1, + ( arg2, + ( arg3, + ( arg4, + ( arg5, + ( spec6, + ( spec7, + ( spec8, + ( spec9, + ( spec10, + ( spec11, + ( spec12, + ( spec13, + ( spec14, + (spec15, (spec16, (spec17, ()))) ) ) + ) ) ) ) ) ) ) ) ) ) ) ) -> + ( arg1, + arg2, + arg3, + arg4, + arg5, + spec6, + spec7, + spec8, + spec9, + spec10, + spec11, + spec12, + spec13, + spec14, + spec15, + spec16, + spec17 )) } (* Some combinators for writing commands concisely. *) let param ~name ~desc kind next = Param (name, desc, kind, next) + let seq_of_param param = match param Stop with - | Param (n, desc, parameter, Stop) -> Seq (n, desc, parameter) - | _ -> invalid_arg "Clic.seq_of_param" + | Param (n, desc, parameter, Stop) -> + Seq (n, desc, parameter) + | _ -> + invalid_arg "Clic.seq_of_param" let prefix keyword next = Prefix (keyword, next) -let rec fixed = - function [] -> Stop | n :: r -> Prefix (n, fixed r) + +let rec fixed = function [] -> Stop | n :: r -> Prefix (n, fixed r) + let rec prefixes p next = match p with [] -> next | n :: r -> Prefix (n, prefixes r next) + let stop = Stop -let no_options = Argument { spec=NoArgs ; converter=fun () -> () } + +let no_options = Argument {spec = NoArgs; converter = (fun () -> ())} + let command ?group ~desc options params handler = - Command { params ; options ; handler ; desc ; group ; conv = (fun x -> x) } + Command {params; options; handler; desc; group; conv = (fun x -> x)} (* Param combinators *) let string ~name ~desc next = - param ~name ~desc { converter=(fun _ s -> return s) ; autocomplete=None } next + param + ~name + ~desc + {converter = (fun _ s -> return s); autocomplete = None} + next let string_contains ~needle ~haystack = - try - Some (Re.Str.search_forward (Re.Str.regexp_string needle) haystack 0) - with Not_found -> - None + try Some (Re.Str.search_forward (Re.Str.regexp_string needle) haystack 0) + with Not_found -> None let rec search_params_prefix : type a arg. string -> (a, arg) params -> bool = - fun prefix -> function - | Prefix (keyword, next) -> - begin - match string_contains ~needle:prefix ~haystack:keyword with - | None -> search_params_prefix prefix next - | Some _ -> true - end - | Param (_, _, _, next) -> search_params_prefix prefix next - | Stop -> false - | Seq _ -> false - -let search_command keyword (Command { params ; _ }) = + fun prefix -> function + | Prefix (keyword, next) -> ( + match string_contains ~needle:prefix ~haystack:keyword with + | None -> + search_params_prefix prefix next + | Some _ -> + true ) + | Param (_, _, _, next) -> + search_params_prefix prefix next + | Stop -> + false + | Seq _ -> + false + +let search_command keyword (Command {params; _}) = search_params_prefix keyword params - (* Command execution *) -let exec - (type ctx) - (Command { options = (Argument { converter ; spec = options_spec }) ; - params = spec ; handler ; conv ; _ } as command) - (ctx : ctx) params args_dict = - let rec exec - : type ctx a. int -> ctx -> (a, ctx) params -> a -> string list -> unit tzresult Lwt.t - = fun i ctx spec cb params -> - match spec, params with - | Stop, _ -> cb ctx - | Seq (_, _, { converter ; _ }), seq -> - let rec do_seq i acc = function - | [] -> return (List.rev acc) - | p :: rest -> - Lwt.catch - (fun () -> converter ctx p) - (function - | Failure msg -> Error_monad.failwith "%s" msg - | exn -> fail (Exn exn)) - |> trace (Bad_argument (i, p)) >>=? fun v -> - do_seq (succ i) (v :: acc) rest in - do_seq i [] seq >>=? fun parsed -> - cb parsed ctx - | Prefix (n, next), p :: rest when n = p -> - exec (succ i) ctx next cb rest - | Param (_, _, { converter ; _ }, next), p :: rest -> - Lwt.catch - (fun () -> converter ctx p) - (function - | Failure msg -> Error_monad.failwith "%s" msg - | exn -> fail (Exn exn)) - |> trace (Bad_argument (i, p)) >>=? fun v -> - exec (succ i) ctx next (cb v) rest - | _ -> raise (Failure ("cli_entries internal error: exec no case matched")) +let exec (type ctx) + ( Command + { options = Argument {converter; spec = options_spec}; + params = spec; + handler; + conv; + _ } as command ) (ctx : ctx) params args_dict = + let rec exec : + type ctx a. + int -> ctx -> (a, ctx) params -> a -> string list -> unit tzresult Lwt.t + = + fun i ctx spec cb params -> + match (spec, params) with + | (Stop, _) -> + cb ctx + | (Seq (_, _, {converter; _}), seq) -> + let rec do_seq i acc = function + | [] -> + return (List.rev acc) + | p :: rest -> + Lwt.catch + (fun () -> converter ctx p) + (function + | Failure msg -> + Error_monad.failwith "%s" msg + | exn -> + fail (Exn exn)) + |> trace (Bad_argument (i, p)) + >>=? fun v -> do_seq (succ i) (v :: acc) rest + in + do_seq i [] seq >>=? fun parsed -> cb parsed ctx + | (Prefix (n, next), p :: rest) when n = p -> + exec (succ i) ctx next cb rest + | (Param (_, _, {converter; _}, next), p :: rest) -> + Lwt.catch + (fun () -> converter ctx p) + (function + | Failure msg -> + Error_monad.failwith "%s" msg + | exn -> + fail (Exn exn)) + |> trace (Bad_argument (i, p)) + >>=? fun v -> exec (succ i) ctx next (cb v) rest + | _ -> + raise (Failure "cli_entries internal error: exec no case matched") in let ctx = conv ctx in - parse_args ~command options_spec args_dict ctx >>=? fun parsed_options -> + parse_args ~command options_spec args_dict ctx + >>=? fun parsed_options -> exec 1 ctx spec (handler (converter parsed_options)) params [@@@ocaml.warning "-30"] (* Command dispatch tree *) -type 'arg level = - { stop : ('arg) command option ; - prefix : (string * 'arg tree) list } -and 'arg param_level = - { stop : 'arg command option ; - autocomplete : ('arg -> string list tzresult Lwt.t) option ; - tree : 'arg tree } +type 'arg level = { + stop : 'arg command option; + prefix : (string * 'arg tree) list +} + +and 'arg param_level = { + stop : 'arg command option; + autocomplete : ('arg -> string list tzresult Lwt.t) option; + tree : 'arg tree +} + and 'ctx tree = | TPrefix : 'ctx level -> 'ctx tree | TParam : 'ctx param_level -> 'ctx tree | TStop : 'ctx command -> 'ctx tree - | TSeq : 'ctx command * ('ctx -> string list tzresult Lwt.t) option -> 'ctx tree + | TSeq : + 'ctx command * ('ctx -> string list tzresult Lwt.t) option + -> 'ctx tree | TEmpty : 'ctx tree let has_options : type ctx. ctx command -> bool = - fun (Command { options = Argument { spec ; _ } ; _ }) -> - let args_help : type a ctx. (a, ctx) args -> bool = function - | NoArgs -> false - | AddArg (_, _) -> true - in args_help spec - -let insert_in_dispatch_tree : - type ctx. ctx tree -> ctx command -> ctx tree = - fun root (Command { params ; conv ; _ } as command) -> - let access_autocomplete : - type p ctx. (p, ctx) parameter -> (ctx -> string list tzresult Lwt.t) option = - fun { autocomplete ; _ } -> autocomplete in - let rec insert_tree - : type a ictx. - (ctx -> ictx) -> - ctx tree -> (a, ictx) params -> ctx tree - = fun conv t c -> - let insert_tree t c = insert_tree conv t c in - match t, c with - | TEmpty, Stop -> TStop command - | TEmpty, Seq (_, _, { autocomplete ; _ }) -> - TSeq (command, - Option.map autocomplete ~f:(fun a c -> a (conv c))) - | TEmpty, Param (_, _, param, next) -> - let autocomplete = access_autocomplete param in - let autocomplete = Option.map autocomplete ~f:(fun a c -> a (conv c)) in - TParam { tree = insert_tree TEmpty next ; stop = None ; autocomplete} - | TEmpty, Prefix (n, next) -> - TPrefix { stop = None ; prefix = [ (n, insert_tree TEmpty next) ] } - | TStop cmd, Param (_, _, param, next) -> - let autocomplete = access_autocomplete param in - let autocomplete = Option.map autocomplete ~f:(fun a c -> a (conv c)) in - if not (has_options cmd) - then TParam { tree = insert_tree TEmpty next ; - stop = Some cmd ; - autocomplete } - else raise (Failure "Command cannot have both prefix and options") - | TStop cmd, Prefix (n, next) -> - TPrefix { stop = Some cmd ; - prefix = [ (n, insert_tree TEmpty next) ] } - | TParam t, Param (_, _, _, next) -> - TParam { t with tree = insert_tree t.tree next } - | TPrefix ({ prefix ; _ } as l), Prefix (n, next) -> - let rec insert_prefix = function - | [] -> [ (n, insert_tree TEmpty next) ] - | (n', t) :: rest when n = n' -> (n, insert_tree t next) :: rest - | item :: rest -> item :: insert_prefix rest in - TPrefix { l with prefix = insert_prefix prefix } - | TPrefix ({ stop = None ; _ } as l), Stop -> - TPrefix { l with stop = Some command } - | TParam ({ stop = None ; _ } as l), Stop -> - TParam { l with stop = Some command } - | _, _ -> - Pervasives.failwith - "Clic.Command_tree.insert: conflicting commands" in - insert_tree conv root params - + fun (Command {options = Argument {spec; _}; _}) -> + let args_help : type a ctx. (a, ctx) args -> bool = function + | NoArgs -> + false + | AddArg (_, _) -> + true + in + args_help spec + +let insert_in_dispatch_tree : type ctx. ctx tree -> ctx command -> ctx tree = + fun root (Command {params; conv; _} as command) -> + let access_autocomplete : + type p ctx. + (p, ctx) parameter -> (ctx -> string list tzresult Lwt.t) option = + fun {autocomplete; _} -> autocomplete + in + let rec insert_tree : + type a ictx. (ctx -> ictx) -> ctx tree -> (a, ictx) params -> ctx tree = + fun conv t c -> + let insert_tree t c = insert_tree conv t c in + match (t, c) with + | (TEmpty, Stop) -> + TStop command + | (TEmpty, Seq (_, _, {autocomplete; _})) -> + TSeq (command, Option.map autocomplete ~f:(fun a c -> a (conv c))) + | (TEmpty, Param (_, _, param, next)) -> + let autocomplete = access_autocomplete param in + let autocomplete = + Option.map autocomplete ~f:(fun a c -> a (conv c)) + in + TParam {tree = insert_tree TEmpty next; stop = None; autocomplete} + | (TEmpty, Prefix (n, next)) -> + TPrefix {stop = None; prefix = [(n, insert_tree TEmpty next)]} + | (TStop cmd, Param (_, _, param, next)) -> + let autocomplete = access_autocomplete param in + let autocomplete = + Option.map autocomplete ~f:(fun a c -> a (conv c)) + in + if not (has_options cmd) then + TParam {tree = insert_tree TEmpty next; stop = Some cmd; autocomplete} + else raise (Failure "Command cannot have both prefix and options") + | (TStop cmd, Prefix (n, next)) -> + TPrefix {stop = Some cmd; prefix = [(n, insert_tree TEmpty next)]} + | (TParam t, Param (_, _, _, next)) -> + TParam {t with tree = insert_tree t.tree next} + | (TPrefix ({prefix; _} as l), Prefix (n, next)) -> + let rec insert_prefix = function + | [] -> + [(n, insert_tree TEmpty next)] + | (n', t) :: rest when n = n' -> + (n, insert_tree t next) :: rest + | item :: rest -> + item :: insert_prefix rest + in + TPrefix {l with prefix = insert_prefix prefix} + | (TPrefix ({stop = None; _} as l), Stop) -> + TPrefix {l with stop = Some command} + | (TParam ({stop = None; _} as l), Stop) -> + TParam {l with stop = Some command} + | (_, _) -> + Pervasives.failwith "Clic.Command_tree.insert: conflicting commands" + in + insert_tree conv root params let make_dispatch_tree commands = List.fold_left insert_in_dispatch_tree TEmpty commands -let rec gather_commands ?(acc=[]) tree = +let rec gather_commands ?(acc = []) tree = match tree with - | TEmpty -> acc - | TSeq (c, _) - | TStop c -> c :: acc - | TPrefix { stop ; prefix } -> - gather_assoc ~acc:(match stop with - | None -> acc - | Some c -> c :: acc) + | TEmpty -> + acc + | TSeq (c, _) | TStop c -> + c :: acc + | TPrefix {stop; prefix} -> + gather_assoc + ~acc:(match stop with None -> acc | Some c -> c :: acc) prefix - | TParam { tree ; stop ; _ } -> - gather_commands tree - ~acc:(match stop with - | None -> acc - | Some c -> c :: acc) + | TParam {tree; stop; _} -> + gather_commands + tree + ~acc:(match stop with None -> acc | Some c -> c :: acc) -and gather_assoc ?(acc=[]) trees = +and gather_assoc ?(acc = []) trees = List.fold_left (fun acc (_, tree) -> gather_commands tree ~acc) acc trees let find_command tree initial_arguments = let rec traverse tree arguments acc = - match tree, arguments with - | (TStop _ | TSeq _ - | TPrefix { stop = Some _ ; _ } - | TParam { stop = Some _ ; _}), ("-h" | "--help") :: _ -> - begin match gather_commands tree with - | [] -> assert false - | [ command ] -> fail (Help (Some command)) - | more -> fail (Unterminated_command (initial_arguments, more)) - end - | TStop c, [] -> return (c, empty_args_dict, initial_arguments) - | TStop (Command { options = Argument { spec ; _ } ; _ } as command), remaining -> - make_args_dict_filter ~command spec remaining >>=? fun (args_dict, unparsed) -> - begin match unparsed with - | [] -> return (command, args_dict, initial_arguments) - | hd :: _ -> - if String.length hd > 0 && String.get hd 0 = '-' then - fail (Unknown_option (hd, Some command)) - else - fail (Extra_arguments (unparsed, command)) - end - | TSeq (Command { options = Argument { spec ; _ } ; _ } as command, _), remaining -> - if List.exists (function "-h" | "--help" -> true | _ -> false) remaining then + match (tree, arguments) with + | ( ( TStop _ + | TSeq _ + | TPrefix {stop = Some _; _} + | TParam {stop = Some _; _} ), + ("-h" | "--help") :: _ ) -> ( + match gather_commands tree with + | [] -> + assert false + | [command] -> fail (Help (Some command)) + | more -> + fail (Unterminated_command (initial_arguments, more)) ) + | (TStop c, []) -> + return (c, empty_args_dict, initial_arguments) + | (TStop (Command {options = Argument {spec; _}; _} as command), remaining) + -> ( + make_args_dict_filter ~command spec remaining + >>=? fun (args_dict, unparsed) -> + match unparsed with + | [] -> + return (command, args_dict, initial_arguments) + | hd :: _ -> + if String.length hd > 0 && hd.[0] = '-' then + fail (Unknown_option (hd, Some command)) + else fail (Extra_arguments (unparsed, command)) ) + | ( TSeq ((Command {options = Argument {spec; _}; _} as command), _), + remaining ) -> + if + List.exists + (function "-h" | "--help" -> true | _ -> false) + remaining + then fail (Help (Some command)) else - make_args_dict_filter ~command spec remaining >>|? fun (dict, remaining) -> + make_args_dict_filter ~command spec remaining + >>|? fun (dict, remaining) -> (command, dict, List.rev_append acc remaining) - | TPrefix { stop = Some cmd ; _ }, [] -> + | (TPrefix {stop = Some cmd; _}, []) -> return (cmd, empty_args_dict, initial_arguments) - | TPrefix { stop = None ; prefix }, ([] | ("-h" | "--help") :: _) -> + | (TPrefix {stop = None; prefix}, ([] | ("-h" | "--help") :: _)) -> fail (Unterminated_command (initial_arguments, gather_assoc prefix)) - | TPrefix { prefix ; _ }, hd_arg :: tl -> - begin - try - return (List.assoc hd_arg prefix) - with Not_found -> fail (Command_not_found (List.rev acc, gather_assoc prefix)) - end >>=? fun tree' -> - traverse tree' tl (hd_arg :: acc) - | TParam { stop = None ; _ }, ([] | ("-h" | "--help") :: _) -> + | (TPrefix {prefix; _}, hd_arg :: tl) -> + ( try return (List.assoc hd_arg prefix) + with Not_found -> + fail (Command_not_found (List.rev acc, gather_assoc prefix)) ) + >>=? fun tree' -> traverse tree' tl (hd_arg :: acc) + | (TParam {stop = None; _}, ([] | ("-h" | "--help") :: _)) -> fail (Unterminated_command (initial_arguments, gather_commands tree)) - | TParam { stop = Some c ; _ }, [] -> + | (TParam {stop = Some c; _}, []) -> return (c, empty_args_dict, initial_arguments) - | TParam { tree ; _ }, parameter :: arguments' -> + | (TParam {tree; _}, parameter :: arguments') -> traverse tree arguments' (parameter :: acc) - | TEmpty, _ -> + | (TEmpty, _) -> fail (Command_not_found (List.rev acc, [])) - in traverse tree initial_arguments [] - + in + traverse tree initial_arguments [] let get_arg_label (type a) (arg : (a, _) arg) = match arg with - | Arg { label ; _ } -> label - | DefArg { label ; _ } -> label - | Switch { label ; _ } -> label - | Constant _ -> assert false - -let get_arg - : type a ctx. (a, ctx) arg -> string list - = fun arg -> - let { long ; short } = get_arg_label arg in - ("--" ^ long) :: match short with None -> [] | Some c -> [ "-" ^ String.make 1 c ] + | Arg {label; _} -> + label + | DefArg {label; _} -> + label + | Switch {label; _} -> + label + | Constant _ -> + assert false + +let get_arg : type a ctx. (a, ctx) arg -> string list = + fun arg -> + let {long; short} = get_arg_label arg in + ("--" ^ long) + :: (match short with None -> [] | Some c -> ["-" ^ String.make 1 c]) let rec list_args : type arg ctx. (arg, ctx) args -> string list = function - | NoArgs -> [] - | AddArg (Constant _, args) -> list_args args - | AddArg (arg, args) -> get_arg arg @ list_args args + | NoArgs -> + [] + | AddArg (Constant _, args) -> + list_args args + | AddArg (arg, args) -> + get_arg arg @ list_args args let complete_func autocomplete cctxt = match autocomplete with - | None -> return_nil - | Some autocomplete -> autocomplete cctxt + | None -> + return_nil + | Some autocomplete -> + autocomplete cctxt -let list_command_args (Command { options = Argument { spec ; _ } ; _ }) = +let list_command_args (Command {options = Argument {spec; _}; _}) = list_args spec -let complete_arg : type a ctx. ctx -> (a, ctx) arg -> string list tzresult Lwt.t = - fun ctx -> function - | Arg { kind = { autocomplete ; _ } ; _ } -> complete_func autocomplete ctx - | DefArg { kind = { autocomplete ; _ } ; _ } -> complete_func autocomplete ctx - | Switch _ -> return_nil - | Constant _ -> return_nil +let complete_arg : + type a ctx. ctx -> (a, ctx) arg -> string list tzresult Lwt.t = + fun ctx -> function + | Arg {kind = {autocomplete; _}; _} -> + complete_func autocomplete ctx + | DefArg {kind = {autocomplete; _}; _} -> + complete_func autocomplete ctx + | Switch _ -> + return_nil + | Constant _ -> + return_nil let rec remaining_spec : - type a ctx. TzString.Set.t -> (a, ctx) args -> string list = - fun seen -> function - | NoArgs -> [] - | AddArg (Constant _, rest) -> - remaining_spec seen rest - | AddArg (arg, rest) -> - let { long ; _ } = get_arg_label arg in - if TzString.Set.mem long seen - then remaining_spec seen rest - else get_arg arg @ remaining_spec seen rest + type a ctx. TzString.Set.t -> (a, ctx) args -> string list = + fun seen -> function + | NoArgs -> + [] + | AddArg (Constant _, rest) -> + remaining_spec seen rest + | AddArg (arg, rest) -> + let {long; _} = get_arg_label arg in + if TzString.Set.mem long seen then remaining_spec seen rest + else get_arg arg @ remaining_spec seen rest let complete_options (type ctx) continuation args args_spec ind (ctx : ctx) = let arities = make_arities_dict args_spec TzString.Map.empty in - let rec complete_spec : type a. string -> (a, ctx) args -> string list tzresult Lwt.t = - fun name -> function - | NoArgs -> return_nil - | AddArg (Constant _, rest) -> - complete_spec name rest - | AddArg (arg, rest) -> - if (get_arg_label arg).long = name - then complete_arg ctx arg - else complete_spec name rest in + let rec complete_spec : + type a. string -> (a, ctx) args -> string list tzresult Lwt.t = + fun name -> function + | NoArgs -> + return_nil + | AddArg (Constant _, rest) -> + complete_spec name rest + | AddArg (arg, rest) -> + if (get_arg_label arg).long = name then complete_arg ctx arg + else complete_spec name rest + in let rec help args ind seen = match args with | _ when ind = 0 -> - continuation args 0 >>|? fun cont_args -> - cont_args @ remaining_spec seen args_spec + continuation args 0 + >>|? fun cont_args -> cont_args @ remaining_spec seen args_spec | [] -> - Pervasives.failwith - "cli_entries internal autocomplete error" + Pervasives.failwith "cli_entries internal autocomplete error" | arg :: tl -> - if TzString.Map.mem arg arities - then - let arity, long = TzString.Map.find arg arities in + if TzString.Map.mem arg arities then + let (arity, long) = TzString.Map.find arg arities in let seen = TzString.Set.add long seen in - match arity, tl with - | 0, args when ind = 0 -> - continuation args 0 >>|? fun cont_args -> - remaining_spec seen args_spec @ cont_args - | 0, args -> help args (ind - 1) seen - | 1, _ when ind = 1 -> complete_spec arg args_spec - | 1, _ :: tl -> help tl (ind - 2) seen - | _ -> Pervasives.failwith "cli_entries internal error, invalid arity" + match (arity, tl) with + | (0, args) when ind = 0 -> + continuation args 0 + >>|? fun cont_args -> remaining_spec seen args_spec @ cont_args + | (0, args) -> + help args (ind - 1) seen + | (1, _) when ind = 1 -> + complete_spec arg args_spec + | (1, _ :: tl) -> + help tl (ind - 2) seen + | _ -> + Pervasives.failwith "cli_entries internal error, invalid arity" else continuation args ind - in help args ind TzString.Set.empty + in + help args ind TzString.Set.empty let complete_next_tree cctxt = function - | TPrefix { stop; prefix } -> + | TPrefix {stop; prefix} -> return - ((match stop with - | None -> [] - | Some command -> list_command_args command) - @ (List.map fst prefix)) + ( ( match stop with + | None -> + [] + | Some command -> + list_command_args command ) + @ List.map fst prefix ) | TSeq (command, autocomplete) -> - complete_func autocomplete cctxt >>|? fun completions -> - completions @ (list_command_args command) - | TParam { autocomplete ; _ } -> complete_func autocomplete cctxt - | TStop command -> return (list_command_args command) - | TEmpty -> return_nil + >>|? fun completions -> completions @ list_command_args command + | TParam {autocomplete; _} -> + complete_func autocomplete cctxt + | TStop command -> + return (list_command_args command) + | TEmpty -> + return_nil let complete_tree cctxt tree index args = let rec help tree args ind = - if ind = 0 - then complete_next_tree cctxt tree + if ind = 0 then complete_next_tree cctxt tree else - match tree, args with - | TSeq _, _ -> complete_next_tree cctxt tree - | TPrefix { prefix ; _ }, hd :: tl -> - begin - try help (List.assoc hd prefix) tl (ind - 1) - with Not_found -> return_nil - end - | TParam { tree ; _ }, _ :: tl -> + match (tree, args) with + | (TSeq _, _) -> + complete_next_tree cctxt tree + | (TPrefix {prefix; _}, hd :: tl) -> ( + try help (List.assoc hd prefix) tl (ind - 1) + with Not_found -> return_nil ) + | (TParam {tree; _}, _ :: tl) -> help tree tl (ind - 1) - | TStop Command { options = Argument { spec ; _ } ; conv ;_ }, args -> + | (TStop (Command {options = Argument {spec; _}; conv; _}), args) -> complete_options (fun _ _ -> return_nil) args spec ind (conv cctxt) - | (TParam _ | TPrefix _), [] - | TEmpty, _ -> return_nil - in help tree args index + | ((TParam _ | TPrefix _), []) | (TEmpty, _) -> + return_nil + in + help tree args index -let autocompletion ~script ~cur_arg ~prev_arg ~args ~global_options commands cctxt = +let autocompletion ~script ~cur_arg ~prev_arg ~args ~global_options commands + cctxt = let tree = make_dispatch_tree commands in let rec ind n = function - | [] -> None + | [] -> + None | hd :: tl -> - if hd = prev_arg - then Some (Option.unopt ~default:(n + 1) (ind (n + 1) tl)) - else (ind (n + 1) tl) in - begin - if prev_arg = script - then complete_next_tree cctxt tree >>|? fun command_completions -> - begin - let (Argument { spec ; _ }) = global_options in - list_args spec @ command_completions - end - else - match ind 0 args with - | None -> - return_nil - | Some index -> - begin - let Argument { spec ; _ } = global_options in - complete_options - (fun args ind -> complete_tree cctxt tree ind args) - args spec index cctxt - end - end >>|? fun completions -> + if hd = prev_arg then + Some (Option.unopt ~default:(n + 1) (ind (n + 1) tl)) + else ind (n + 1) tl + in + ( if prev_arg = script then + complete_next_tree cctxt tree + >>|? fun command_completions -> + let (Argument {spec; _}) = global_options in + list_args spec @ command_completions + else + match ind 0 args with + | None -> + return_nil + | Some index -> + let (Argument {spec; _}) = global_options in + complete_options + (fun args ind -> complete_tree cctxt tree ind args) + args + spec + index + cctxt ) + >>|? fun completions -> List.filter - (fun completion -> Re.Str.(string_match (regexp_string cur_arg) completion 0)) + (fun completion -> + Re.Str.(string_match (regexp_string cur_arg) completion 0)) completions let parse_global_options global_options ctx args = - let Argument { spec ; converter } = global_options in - make_args_dict_consume spec args >>=? fun (dict, remaining) -> - parse_args spec dict ctx >>=? fun nested -> - return (converter nested, remaining) + let (Argument {spec; converter}) = global_options in + make_args_dict_consume spec args + >>=? fun (dict, remaining) -> + parse_args spec dict ctx + >>=? fun nested -> return (converter nested, remaining) let dispatch commands ctx args = let tree = make_dispatch_tree commands in match args with - | [] | [ "-h" | "--help" ] -> + | [] | [("-h" | "--help")] -> fail (Help None) | _ -> - find_command tree args >>=? fun (command, args_dict, filtered_args) -> + find_command tree args + >>=? fun (command, args_dict, filtered_args) -> exec command ctx filtered_args args_dict type error += No_manual_entry of string list -let manual_group = - { name = "man" ; - title = "Access the documentation" } +let manual_group = {name = "man"; title = "Access the documentation"} let add_manual ~executable_name ~global_options format ppf commands = - let rec with_manual = lazy - (commands @ - [ command - ~group:manual_group - ~desc:"Print documentation of commands.\n\ - Add search keywords to narrow list.\n\ - Will display only the commands by default, \ - unless [-verbosity <2|3>] is passed or the list \ - of matching commands if less than 3." - (args2 - (arg - ~doc:"level of details\n\ + let rec with_manual = + lazy + ( commands + @ [ command + ~group:manual_group + ~desc: + "Print documentation of commands.\n\ + Add search keywords to narrow list.\n\ + Will display only the commands by default, unless [-verbosity \ + <2|3>] is passed or the list of matching commands if less than \ + 3." + (args2 + (arg + ~doc: + "level of details\n\ 0. Only shows command mnemonics, without documentation.\n\ 1. Shows command mnemonics with short descriptions.\n\ 2. Show commands and arguments with short descriptions\n\ 3. Show everything" - ~long:"verbosity" - ~short:'v' - ~placeholder:"0|1|2|3" - (parameter - ~autocomplete: (fun _ -> return [ "0" ; "1" ; "2" ; "3" ]) - (fun _ arg -> match arg with - | "0" -> return Terse - | "1" -> return Short - | "2" -> return Details - | "3" -> return Full - | _ -> failwith "Level of details out of range"))) - (default_arg - ~doc:"the manual's output format" - ~placeholder: "plain|colors|html" - ~long: "format" - ~default: - (match format with - | Ansi -> "colors" - | Plain -> "plain" - | Html -> "html") - (parameter - ~autocomplete: (fun _ -> return [ "colors" ; "plain" ; "html" ]) - (fun _ arg -> match arg with - | "colors" -> return Ansi - | "plain" -> return Plain - | "html" -> return Html - | _ -> failwith "Unknown manual format")))) - (prefix "man" - (seq_of_param (string ~name:"keyword" - ~desc:"keyword to search for\n\ - If several are given they must all appear in the command."))) - (fun (verbosity, format) keywords _ -> - let commands = - List.fold_left - (fun commands keyword -> List.filter (search_command keyword) commands) - (Lazy.force with_manual) - keywords in - let verbosity = match verbosity with - | Some verbosity -> verbosity - | None when List.length commands <= 3 -> Full - | None -> Short in - match commands with - | [] -> fail (No_manual_entry keywords) - | _ -> - let state = setup_formatter ppf format verbosity in - let commands = List.map (fun c -> Ex c) commands in - usage_internal ppf ~executable_name ~global_options ~highlights:keywords commands ; - restore_formatter ppf state ; - return_unit) ]) in + ~long:"verbosity" + ~short:'v' + ~placeholder:"0|1|2|3" + (parameter + ~autocomplete:(fun _ -> return ["0"; "1"; "2"; "3"]) + (fun _ arg -> + match arg with + | "0" -> + return Terse + | "1" -> + return Short + | "2" -> + return Details + | "3" -> + return Full + | _ -> + failwith "Level of details out of range"))) + (default_arg + ~doc:"the manual's output format" + ~placeholder:"plain|colors|html" + ~long:"format" + ~default: + ( match format with + | Ansi -> + "colors" + | Plain -> + "plain" + | Html -> + "html" ) + (parameter + ~autocomplete:(fun _ -> + return ["colors"; "plain"; "html"]) + (fun _ arg -> + match arg with + | "colors" -> + return Ansi + | "plain" -> + return Plain + | "html" -> + return Html + | _ -> + failwith "Unknown manual format")))) + (prefix + "man" + (seq_of_param + (string + ~name:"keyword" + ~desc: + "keyword to search for\n\ + If several are given they must all appear in the \ + command."))) + (fun (verbosity, format) keywords _ -> + let commands = + List.fold_left + (fun commands keyword -> + List.filter (search_command keyword) commands) + (Lazy.force with_manual) + keywords + in + let verbosity = + match verbosity with + | Some verbosity -> + verbosity + | None when List.length commands <= 3 -> + Full + | None -> + Short + in + match commands with + | [] -> + fail (No_manual_entry keywords) + | _ -> + let state = setup_formatter ppf format verbosity in + let commands = List.map (fun c -> Ex c) commands in + usage_internal + ppf + ~executable_name + ~global_options + ~highlights:keywords + commands ; + restore_formatter ppf state ; + return_unit) ] ) + in Lazy.force with_manual let pp_cli_errors ppf ~executable_name ~global_options ~default errs = let pp_one = function | Bad_argument (i, v) -> - Format.fprintf ppf - "Erroneous command line argument %d (%s)." i v ; + Format.fprintf ppf "Erroneous command line argument %d (%s)." i v ; Some [] | Option_expected_argument (arg, command) -> - Format.fprintf ppf - "Command line option @{<opt>%s@} expects an argument." arg ; - Some (Option.unopt_map ~f:(fun command -> [ Ex command ]) ~default:[] command) + Format.fprintf + ppf + "Command line option @{<opt>%s@} expects an argument." + arg ; + Some + (Option.unopt_map + ~f:(fun command -> [Ex command]) + ~default:[] + command) | Bad_option_argument (arg, command) -> - Format.fprintf ppf - "Wrong value for command line option @{<opt>%s@}." arg ; - Some (Option.unopt_map ~f:(fun command -> [ Ex command ]) ~default:[] command) + Format.fprintf + ppf + "Wrong value for command line option @{<opt>%s@}." + arg ; + Some + (Option.unopt_map + ~f:(fun command -> [Ex command]) + ~default:[] + command) | Multiple_occurences (arg, command) -> - Format.fprintf ppf - "Command line option @{<opt>%s@} appears multiple times." arg ; - Some (Option.unopt_map ~f:(fun command -> [ Ex command ]) ~default:[] command) - | No_manual_entry [ keyword ] -> - Format.fprintf ppf + Format.fprintf + ppf + "Command line option @{<opt>%s@} appears multiple times." + arg ; + Some + (Option.unopt_map + ~f:(fun command -> [Ex command]) + ~default:[] + command) + | No_manual_entry [keyword] -> + Format.fprintf + ppf "No manual entry that match @{<hilight>%s@}." keyword ; Some [] | No_manual_entry (keyword :: keywords) -> - Format.fprintf ppf + Format.fprintf + ppf "No manual entry that match %a and @{<hilight>%s@}." (Format.pp_print_list ~pp_sep:(fun ppf () -> Format.fprintf ppf ", ") @@ -1318,62 +2038,84 @@ let pp_cli_errors ppf ~executable_name ~global_options ~default errs = keyword ; Some [] | Unknown_option (option, command) -> - Format.fprintf ppf - "Unexpected command line option @{<opt>%s@}." - option ; - Some (Option.unopt_map ~f:(fun command -> [ Ex command ]) ~default:[] command) + Format.fprintf ppf "Unexpected command line option @{<opt>%s@}." option ; + Some + (Option.unopt_map + ~f:(fun command -> [Ex command]) + ~default:[] + command) | Extra_arguments (extra, command) -> - Format.fprintf ppf + Format.fprintf + ppf "Extra command line arguments:@, @[<h>%a@]." - (Format.pp_print_list (fun ppf -> Format.fprintf ppf "%s")) extra ; - Some [ Ex command ] + (Format.pp_print_list (fun ppf -> Format.fprintf ppf "%s")) + extra ; + Some [Ex command] | Unterminated_command (_, commands) -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 2>Unterminated command, here are possible completions.@,%a@]" (Format.pp_print_list - (fun ppf (Command { params ; options = Argument { spec ; _ } ; _ }) -> - print_commandline ppf ([], spec, params))) commands ; + (fun ppf (Command {params; options = Argument {spec; _}; _}) -> + print_commandline ppf ([], spec, params))) + commands ; Some (List.map (fun c -> Ex c) commands) | Command_not_found ([], _all_commands) -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 0>Unrecognized command.@,\ Try using the @{<kwd>man@} command to get more information.@]" ; Some [] | Command_not_found (_, commands) -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 0>Unrecognized command.@,\ - Did you mean one of the following?@, @[<v 0>%a@]@]" + Did you mean one of the following?@,\ + \ @[<v 0>%a@]@]" (Format.pp_print_list - (fun ppf (Command { params ; options = Argument { spec ; _ } ; _ }) -> - print_commandline ppf ([], spec, params))) commands ; + (fun ppf (Command {params; options = Argument {spec; _}; _}) -> + print_commandline ppf ([], spec, params))) + commands ; Some (List.map (fun c -> Ex c) commands) - | err -> default ppf err ; None in + | err -> + default ppf err ; None + in let rec pp acc errs = let return command = - match command, acc with - | None, _ -> acc - | Some command, Some commands -> Some (command @ commands) - | Some command, None -> Some command in + match (command, acc) with + | (None, _) -> + acc + | (Some command, Some commands) -> + Some (command @ commands) + | (Some command, None) -> + Some command + in match errs with - | [] -> None - | [ last ] -> return (pp_one last) + | [] -> + None + | [last] -> + return (pp_one last) | err :: errs -> let acc = return (pp_one err) in - Format.fprintf ppf "@," ; - pp acc errs in + Format.fprintf ppf "@," ; pp acc errs + in Format.fprintf ppf "@[<v 2>@{<error>@{<title>Error@}@}@," ; match pp None errs with | None -> Format.fprintf ppf "@]@\n" | Some commands -> - Format.fprintf ppf "@]@\n@\n@[<v 0>%a@]" - (fun ppf commands -> usage_internal ppf ~executable_name ~global_options commands) + Format.fprintf + ppf + "@]@\n@\n@[<v 0>%a@]" + (fun ppf commands -> + usage_internal ppf ~executable_name ~global_options commands) commands let usage ppf ~executable_name ~global_options commands = - usage_internal ppf - ~executable_name ~global_options + usage_internal + ppf + ~executable_name + ~global_options (List.map (fun c -> Ex c) commands) -let map_command f (Command c) = - (Command { c with conv = (fun x -> c.conv (f x)) }) +let map_command f (Command c) = Command {c with conv = (fun x -> c.conv (f x))} diff --git a/src/lib_clic/clic.mli b/src/lib_clic/clic.mli index f75f80d5ebef455a38b4dc492ae0a139ea798b77..284b2717f62a741614fda9395d786c33123c9455 100644 --- a/src/lib_clic/clic.mli +++ b/src/lib_clic/clic.mli @@ -62,10 +62,12 @@ val parameter : resulting parser will try the first parser and if it fails will try the second. The auto-complete contents of the two will be concatenated. *) -val compose_parameters : ('a, 'ctx) parameter -> ('a, 'ctx) parameter -> ('a, 'ctx) parameter +val compose_parameters : + ('a, 'ctx) parameter -> ('a, 'ctx) parameter -> ('a, 'ctx) parameter (** Map a pure function over the result of a parameter parser. *) -val map_parameter : f:('a -> 'b) -> ('a, 'ctx) parameter -> ('b, 'ctx) parameter +val map_parameter : + f:('a -> 'b) -> ('a, 'ctx) parameter -> ('b, 'ctx) parameter (** {2 Flags and Options } *) @@ -78,7 +80,7 @@ val map_parameter : f:('a -> 'b) -> ('a, 'ctx) parameter -> ('b, 'ctx) parameter ["lowercase short description\nOptional longer description."]. *) type ('a, 'ctx) arg -val constant: 'a -> ('a, 'ctx) arg +val constant : 'a -> ('a, 'ctx) arg (** [arg ~doc ~long ?short converter] creates an argument to a command. The [~long] argument is the long format, without the double dashes. @@ -105,11 +107,7 @@ val default_arg : (** Create a boolean switch. The value will be set to [true] if the switch is provided and [false] if it is not. *) val switch : - doc:string -> - ?short:char -> - long:string -> - unit -> - (bool, 'ctx) arg + doc:string -> ?short:char -> long:string -> unit -> (bool, 'ctx) arg (** {2 Groups of Optional Arguments} *) @@ -123,15 +121,10 @@ type ('a, 'ctx) options val no_options : (unit, 'ctx) options (** Include 1 optional parameter *) -val args1 : - ('a, 'ctx) arg -> - ('a, 'ctx) options +val args1 : ('a, 'ctx) arg -> ('a, 'ctx) options (** Include 2 optional parameters *) -val args2 : - ('a, 'ctx) arg -> - ('b, 'ctx) arg -> - ('a * 'b, 'ctx) options +val args2 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('a * 'b, 'ctx) options (** Include 3 optional parameters *) val args3 : @@ -173,72 +166,216 @@ val args7 : ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> ('a * 'b * 'c * 'd * 'e * 'f * 'g, 'ctx) options (** Include 8 optional parameters *) -val args8 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> +val args8 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h, 'ctx) options (** Include 9 optional parameters *) -val args9 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> +val args9 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> ('i, 'ctx) arg -> ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i, 'ctx) options (** Include 10 optional parameters *) -val args10 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> - ('i, 'ctx) arg -> ('j, 'ctx) arg -> +val args10 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> + ('i, 'ctx) arg -> + ('j, 'ctx) arg -> ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j, 'ctx) options (** Include 11 optional parameters *) -val args11 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> - ('i, 'ctx) arg -> ('j, 'ctx) arg -> ('k, 'ctx) arg -> +val args11 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> + ('i, 'ctx) arg -> + ('j, 'ctx) arg -> + ('k, 'ctx) arg -> ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k, 'ctx) options (** Include 12 optional parameters *) -val args12 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> - ('i, 'ctx) arg -> ('j, 'ctx) arg -> ('k, 'ctx) arg -> ('l, 'ctx) arg -> +val args12 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> + ('i, 'ctx) arg -> + ('j, 'ctx) arg -> + ('k, 'ctx) arg -> + ('l, 'ctx) arg -> ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l, 'ctx) options (** Include 13 optional parameters *) -val args13 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> - ('i, 'ctx) arg -> ('j, 'ctx) arg -> ('k, 'ctx) arg -> ('l, 'ctx) arg -> ('m, 'ctx) arg -> - ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l * 'm, 'ctx) options +val args13 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> + ('i, 'ctx) arg -> + ('j, 'ctx) arg -> + ('k, 'ctx) arg -> + ('l, 'ctx) arg -> + ('m, 'ctx) arg -> + ( 'a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l * 'm, + 'ctx ) + options (** Include 14 optional parameters *) -val args14 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> - ('i, 'ctx) arg -> ('j, 'ctx) arg -> ('k, 'ctx) arg -> ('l, 'ctx) arg -> - ('m, 'ctx) arg -> ('n, 'ctx) arg -> - ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l * 'm * 'n, 'ctx) options +val args14 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> + ('i, 'ctx) arg -> + ('j, 'ctx) arg -> + ('k, 'ctx) arg -> + ('l, 'ctx) arg -> + ('m, 'ctx) arg -> + ('n, 'ctx) arg -> + ( 'a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l * 'm * 'n, + 'ctx ) + options (** Include 15 optional parameters *) -val args15 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> - ('i, 'ctx) arg -> ('j, 'ctx) arg -> ('k, 'ctx) arg -> ('l, 'ctx) arg -> - ('m, 'ctx) arg -> ('n, 'ctx) arg -> ('o, 'ctx) arg -> - ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l * 'm * 'n * 'o, 'ctx) options +val args15 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> + ('i, 'ctx) arg -> + ('j, 'ctx) arg -> + ('k, 'ctx) arg -> + ('l, 'ctx) arg -> + ('m, 'ctx) arg -> + ('n, 'ctx) arg -> + ('o, 'ctx) arg -> + ( 'a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l * 'm * 'n * 'o, + 'ctx ) + options (** Include 16 optional parameters *) -val args16 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> - ('i, 'ctx) arg -> ('j, 'ctx) arg -> ('k, 'ctx) arg -> ('l, 'ctx) arg -> - ('m, 'ctx) arg -> ('n, 'ctx) arg -> ('o, 'ctx) arg -> ('p, 'ctx) arg -> - ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l * 'm * 'n * 'o * 'p, 'ctx) options +val args16 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> + ('i, 'ctx) arg -> + ('j, 'ctx) arg -> + ('k, 'ctx) arg -> + ('l, 'ctx) arg -> + ('m, 'ctx) arg -> + ('n, 'ctx) arg -> + ('o, 'ctx) arg -> + ('p, 'ctx) arg -> + ( 'a + * 'b + * 'c + * 'd + * 'e + * 'f + * 'g + * 'h + * 'i + * 'j + * 'k + * 'l + * 'm + * 'n + * 'o + * 'p, + 'ctx ) + options (** Include 17 optional parameters *) -val args17 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) arg -> - ('e, 'ctx) arg -> ('f, 'ctx) arg -> ('g, 'ctx) arg -> ('h, 'ctx) arg -> - ('i, 'ctx) arg -> ('j, 'ctx) arg -> ('k, 'ctx) arg -> ('l, 'ctx) arg -> - ('m, 'ctx) arg -> ('n, 'ctx) arg -> ('o, 'ctx) arg -> ('p, 'ctx) arg -> +val args17 : + ('a, 'ctx) arg -> + ('b, 'ctx) arg -> + ('c, 'ctx) arg -> + ('d, 'ctx) arg -> + ('e, 'ctx) arg -> + ('f, 'ctx) arg -> + ('g, 'ctx) arg -> + ('h, 'ctx) arg -> + ('i, 'ctx) arg -> + ('j, 'ctx) arg -> + ('k, 'ctx) arg -> + ('l, 'ctx) arg -> + ('m, 'ctx) arg -> + ('n, 'ctx) arg -> + ('o, 'ctx) arg -> + ('p, 'ctx) arg -> ('q, 'ctx) arg -> - ('a * 'b * 'c * 'd * 'e * 'f * 'g * 'h * 'i * 'j * 'k * 'l * 'm * 'n * 'o * 'p * 'q, 'ctx) options + ( 'a + * 'b + * 'c + * 'd + * 'e + * 'f + * 'g + * 'h + * 'i + * 'j + * 'k + * 'l + * 'm + * 'n + * 'o + * 'p + * 'q, + 'ctx ) + options (** {2 Parameter based command lines} *) @@ -246,46 +383,37 @@ val args17 : ('a, 'ctx) arg -> ('b, 'ctx) arg -> ('c, 'ctx) arg -> ('d, 'ctx) ar type ('a, 'ctx) params (** A piece of data inside a command line *) -val param: - name: string -> - desc: string -> +val param : + name:string -> + desc:string -> ('a, 'ctx) parameter -> ('b, 'ctx) params -> ('a -> 'b, 'ctx) params (** A word in a command line. Should be descriptive. *) -val prefix: - string -> - ('a, 'ctx) params -> - ('a, 'ctx) params +val prefix : string -> ('a, 'ctx) params -> ('a, 'ctx) params (** Multiple words given in sequence for a command line *) -val prefixes: - string list -> - ('a, 'ctx) params -> - ('a, 'ctx) params +val prefixes : string list -> ('a, 'ctx) params -> ('a, 'ctx) params (** A fixed series of words that trigger a command. *) -val fixed: - string list -> - ('ctx -> unit tzresult Lwt.t, 'ctx) params +val fixed : string list -> ('ctx -> unit tzresult Lwt.t, 'ctx) params (** End the description of the command line *) -val stop: - ('ctx -> unit tzresult Lwt.t, 'ctx) params +val stop : ('ctx -> unit tzresult Lwt.t, 'ctx) params (** Take a sequence of parameters instead of only a single one. Must be the last thing in the command line. *) -val seq_of_param: +val seq_of_param : (('ctx -> unit tzresult Lwt.t, 'ctx) params -> - ('a -> 'ctx -> unit tzresult Lwt.t, 'ctx) params) -> + ('a -> 'ctx -> unit tzresult Lwt.t, 'ctx) params) -> ('a list -> 'ctx -> unit tzresult Lwt.t, 'ctx) params (** Parameter that expects a string *) -val string: - name: string -> - desc: string -> +val string : + name:string -> + desc:string -> ('a, 'ctx) params -> (string -> 'a, 'ctx) params @@ -297,22 +425,20 @@ type 'ctx command (** Type of a group of commands. Groups have their documentation printed together and should include a descriptive title. *) -type group = - { name : string ; - title : string } +type group = {name : string; title : string} (** A complete command, with documentation, a specification of its options, parameters, and handler function. *) -val command: - ?group: group -> - desc: string -> +val command : + ?group:group -> + desc:string -> ('b, 'ctx) options -> ('a, 'ctx) params -> ('b -> 'a) -> 'ctx command (** Combinator to use a command in an adaptated context. *) -val map_command: ('a -> 'b) -> 'b command -> 'a command +val map_command : ('a -> 'b) -> 'b command -> 'a command (** {2 Output formatting} *) @@ -359,10 +485,7 @@ type verbosity = Terse | Short | Details | Full above that level. Use prefix [=] for an exact match, or [-] for the inverse interpretation. *) val setup_formatter : - Format.formatter -> - format -> - verbosity -> - formatter_state + Format.formatter -> format -> verbosity -> formatter_state (** Restore the formatter state after [setup_formatter]. *) val restore_formatter : Format.formatter -> formatter_state -> unit @@ -374,20 +497,24 @@ type error += Help : _ command option -> error (** Find and call the applicable command on the series of arguments. @raise [Failure] if the command list would be ambiguous. *) -val dispatch: 'ctx command list -> 'ctx -> string list -> unit tzresult Lwt.t +val dispatch : 'ctx command list -> 'ctx -> string list -> unit tzresult Lwt.t (** Parse the global options, and return their value, with the rest of the command to be parsed. *) -val parse_global_options : ('a, 'ctx) options -> 'ctx -> string list -> ('a * string list) tzresult Lwt.t +val parse_global_options : + ('a, 'ctx) options -> + 'ctx -> + string list -> + ('a * string list) tzresult Lwt.t (** Pretty printfs the error messages to the given formatter. [executable_name] and [global_options] are for help screens. [default] is used to print non-cli errors. *) val pp_cli_errors : Format.formatter -> - executable_name: string -> - global_options: (_, _) options -> - default: (Format.formatter -> error -> unit) -> + executable_name:string -> + global_options:(_, _) options -> + default:(Format.formatter -> error -> unit) -> error list -> unit @@ -395,8 +522,13 @@ val pp_cli_errors : [prev_arg] is a valid prefix command, returning the list of valid next words, filtered with [cur_arg]. *) val autocompletion : - script:string -> cur_arg:string -> prev_arg:string -> args:string list -> - global_options:('a, 'ctx) options -> 'ctx command list -> 'ctx -> + script:string -> + cur_arg:string -> + prev_arg:string -> + args:string list -> + global_options:('a, 'ctx) options -> + 'ctx command list -> + 'ctx -> string list Error_monad.tzresult Lwt.t (** Displays a help page for the given commands. *) @@ -413,8 +545,8 @@ val usage : For this to work, the command list must be complete. Commands added later will not appear in the manual. *) val add_manual : - executable_name: string -> - global_options: ('a, 'ctx) options -> + executable_name:string -> + global_options:('a, 'ctx) options -> format -> Format.formatter -> 'ctx command list -> diff --git a/src/lib_clic/scriptable.ml b/src/lib_clic/scriptable.ml index ac08e08f4aec5bb172751d7cdbe4a7eed1099345..618ce6d5a2d3f0c4377472be4955ad641db62fe1 100644 --- a/src/lib_clic/scriptable.ml +++ b/src/lib_clic/scriptable.ml @@ -1,9 +1,8 @@ open Error_monad -type output_format = - | Rows of { separator : string ; escape : [ `No | `OCaml ] } +type output_format = Rows of {separator : string; escape : [`No | `OCaml]} -let rows separator escape = Rows { separator ; escape } +let rows separator escape = Rows {separator; escape} let tsv = rows "\t" `No @@ -11,38 +10,47 @@ let csv = rows "," `OCaml let clic_arg () = let open Clic in - arg ~doc:"Make the output script-friendly" ~long:"for-script" + arg + ~doc:"Make the output script-friendly" + ~long:"for-script" ~placeholder:"FORMAT" (parameter (fun _ spec -> match String.lowercase_ascii spec with - | "tsv" -> return tsv - | "csv" -> return csv + | "tsv" -> + return tsv + | "csv" -> + return csv | other -> failwith - "Cannot recognize format %S, please try 'TSV' or 'CSV'" other)) - + "Cannot recognize format %S, please try 'TSV' or 'CSV'" + other)) let fprintf_lwt chan fmt = Format.kasprintf (fun s -> - protect (fun () -> Lwt_io.write chan s >>= fun () -> return_unit)) + protect (fun () -> Lwt_io.write chan s >>= fun () -> return_unit)) fmt let output ?(channel = Lwt_io.stdout) how_option ~for_human ~for_script = match how_option with - | None -> for_human () - | Some (Rows { separator ; escape }) -> + | None -> + for_human () + | Some (Rows {separator; escape}) -> let open Format in iter_s (fun row -> - fprintf_lwt channel "%a@." - (pp_print_list - ~pp_sep:(fun fmt () -> pp_print_string fmt separator) - (fun fmt cell -> - match escape with - | `OCaml -> fprintf fmt "%S" cell - | `No -> pp_print_string fmt cell)) - row) + fprintf_lwt + channel + "%a@." + (pp_print_list + ~pp_sep:(fun fmt () -> pp_print_string fmt separator) + (fun fmt cell -> + match escape with + | `OCaml -> + fprintf fmt "%S" cell + | `No -> + pp_print_string fmt cell)) + row) (for_script ()) >>=? fun () -> protect (fun () -> Lwt_io.flush channel >>= fun () -> return_unit) @@ -51,6 +59,4 @@ let output_for_human how_option for_human = output how_option ~for_human ~for_script:(fun () -> []) let output_row ?channel how_option ~for_human ~for_script = - output ?channel how_option ~for_human - ~for_script:(fun () -> [for_script ()]) - + output ?channel how_option ~for_human ~for_script:(fun () -> [for_script ()]) diff --git a/src/lib_clic/scriptable.mli b/src/lib_clic/scriptable.mli index a7baf7276a67507b3b206a2d0f5580bd57308007..d6036e5c6a9748c68f984f41ba2dbf449af73aa0 100644 --- a/src/lib_clic/scriptable.mli +++ b/src/lib_clic/scriptable.mli @@ -28,21 +28,15 @@ open Error_monad (** Manage a common ["--for-script <FORMAT>"] option to make the output of certain commands script-friendly. *) -type output_format (** A representation of the output format. *) +type output_format -val clic_arg : unit -> (output_format option, _) Clic.arg (** Command line argument for {!Clic.command} (and the [Clic.args*] functions). Not that this is the only way to obtain a value of type [output_format]. On the command line, it appears as [--for-script] with values [TSV] or [CSV]. *) +val clic_arg : unit -> (output_format option, _) Clic.arg -val output : - ?channel: Lwt_io.output_channel -> - output_format option -> - for_human:(unit -> unit tzresult Lwt.t) -> - for_script:(unit -> string list list) -> - unit tzresult Lwt.t (** [output fmt_opt ~for_human ~for_script] behaves in one of two ways. If [fmt_opt] is [Some _], then it formats the value returned by [for_script ()]. The function's return value is formatted as lines of @@ -53,20 +47,26 @@ val output : The optional argument [channel] is used when automatically formatting the value returned by [for_script ()]. It has no effect on [for_human ()]. *) +val output : + ?channel:Lwt_io.output_channel -> + output_format option -> + for_human:(unit -> unit tzresult Lwt.t) -> + for_script:(unit -> string list list) -> + unit tzresult Lwt.t +(** Same as {!output} but for a single row of data. *) val output_row : - ?channel: Lwt_io.output_channel -> + ?channel:Lwt_io.output_channel -> output_format option -> for_human:(unit -> unit tzresult Lwt.t) -> for_script:(unit -> string list) -> unit tzresult Lwt.t -(** Same as {!output} but for a single row of data. *) -val output_for_human : - output_format option -> (unit -> unit tzresult Lwt.t) -> unit tzresult Lwt.t (** [output_for_human fmt_opt for_human] behaves in either of two ways. If [fmt_opt] is [None], then it calls [for_human ()]. Otherwise, it does nothing. Use this function to provide output that is of no interest to automatic tools. *) +val output_for_human : + output_format option -> (unit -> unit tzresult Lwt.t) -> unit tzresult Lwt.t diff --git a/src/lib_client_base/.ocamlformat b/src/lib_client_base/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_client_base/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_client_base/client_aliases.ml b/src/lib_client_base/client_aliases.ml index 6ccc7d49c334602813d9c5f7ad617f8d29a02a6e..8e5df7c9e83458b7fb695df8b2c98b97b63750af 100644 --- a/src/lib_client_base/client_aliases.ml +++ b/src/lib_client_base/client_aliases.ml @@ -30,90 +30,88 @@ open Clic module type Entity = sig type t + val encoding : t Data_encoding.t + val of_source : string -> t tzresult Lwt.t + val to_source : t -> string tzresult Lwt.t + val name : string end module type Alias = sig type t + type fresh_param - val load : - #Client_context.wallet -> - (string * t) list tzresult Lwt.t - val set : - #Client_context.wallet -> - (string * t) list -> - unit tzresult Lwt.t - val find : - #Client_context.wallet -> - string -> t tzresult Lwt.t - val find_opt : - #Client_context.wallet -> - string -> t option tzresult Lwt.t - val rev_find : - #Client_context.wallet -> - t -> string option tzresult Lwt.t - val name : - #Client_context.wallet -> - t -> string tzresult Lwt.t - val mem : - #Client_context.wallet -> - string -> bool tzresult Lwt.t + + val load : #Client_context.wallet -> (string * t) list tzresult Lwt.t + + val set : #Client_context.wallet -> (string * t) list -> unit tzresult Lwt.t + + val find : #Client_context.wallet -> string -> t tzresult Lwt.t + + val find_opt : #Client_context.wallet -> string -> t option tzresult Lwt.t + + val rev_find : #Client_context.wallet -> t -> string option tzresult Lwt.t + + val name : #Client_context.wallet -> t -> string tzresult Lwt.t + + val mem : #Client_context.wallet -> string -> bool tzresult Lwt.t + val add : - force:bool -> - #Client_context.wallet -> - string -> t -> unit tzresult Lwt.t - val del : - #Client_context.wallet -> - string -> unit tzresult Lwt.t - val update : - #Client_context.wallet -> - string -> t -> unit tzresult Lwt.t + force:bool -> #Client_context.wallet -> string -> t -> unit tzresult Lwt.t + + val del : #Client_context.wallet -> string -> unit tzresult Lwt.t + + val update : #Client_context.wallet -> string -> t -> unit tzresult Lwt.t + val of_source : string -> t tzresult Lwt.t + val to_source : t -> string tzresult Lwt.t - val alias_parameter : unit -> (string * t, #Client_context.wallet) Clic.parameter + + val alias_parameter : + unit -> (string * t, #Client_context.wallet) Clic.parameter + val alias_param : ?name:string -> ?desc:string -> ('a, (#Client_context.wallet as 'b)) Clic.params -> (string * t -> 'a, 'b) Clic.params + val fresh_alias_param : ?name:string -> ?desc:string -> ('a, (< .. > as 'obj)) Clic.params -> (fresh_param -> 'a, 'obj) Clic.params - val force_switch : - unit -> (bool, _) arg + + val force_switch : unit -> (bool, _) arg + val of_fresh : - #Client_context.wallet -> - bool -> - fresh_param -> - string tzresult Lwt.t + #Client_context.wallet -> bool -> fresh_param -> string tzresult Lwt.t + val source_param : ?name:string -> ?desc:string -> ('a, (#Client_context.wallet as 'obj)) Clic.params -> (t -> 'a, 'obj) Clic.params + val source_arg : ?long:string -> ?placeholder:string -> ?doc:string -> - unit -> (t option, (#Client_context.wallet as 'obj)) Clic.arg - val autocomplete: - #Client_context.wallet -> string list tzresult Lwt.t -end + unit -> + (t option, (#Client_context.wallet as 'obj)) Clic.arg -module Alias = functor (Entity : Entity) -> struct + val autocomplete : #Client_context.wallet -> string list tzresult Lwt.t +end +module Alias (Entity : Entity) = struct open Client_context let wallet_encoding : (string * Entity.t) list Data_encoding.encoding = let open Data_encoding in - list (obj2 - (req "name" string) - (req "value" Entity.encoding)) + list (obj2 (req "name" string) (req "value" Entity.encoding)) let load (wallet : #wallet) = wallet#load Entity.name ~default:[] wallet_encoding @@ -122,186 +120,192 @@ module Alias = functor (Entity : Entity) -> struct wallet#write Entity.name entries wallet_encoding let autocomplete wallet = - load wallet >>= function - | Error _ -> return_nil - | Ok list -> return (List.map fst list) + load wallet + >>= function + | Error _ -> return_nil | Ok list -> return (List.map fst list) let find_opt (wallet : #wallet) name = - load wallet >>=? fun list -> - try return_some (List.assoc name list) - with Not_found -> return_none + load wallet + >>=? fun list -> + try return_some (List.assoc name list) with Not_found -> return_none let find (wallet : #wallet) name = - load wallet >>=? fun list -> + load wallet + >>=? fun list -> try return (List.assoc name list) - with Not_found -> - failwith "no %s alias named %s" Entity.name name + with Not_found -> failwith "no %s alias named %s" Entity.name name let rev_find (wallet : #wallet) v = - load wallet >>=? fun list -> + load wallet + >>=? fun list -> try return_some (List.find (fun (_, v') -> v = v') list |> fst) with Not_found -> return_none let mem (wallet : #wallet) name = - load wallet >>=? fun list -> + load wallet + >>=? fun list -> try ignore (List.assoc name list) ; return_true - with - | Not_found -> return_false + with Not_found -> return_false let add ~force (wallet : #wallet) name value = let keep = ref false in - load wallet >>=? fun list -> - begin - if force then - return_unit - else - iter_s (fun (n, v) -> - if n = name && v = value then begin - keep := true ; - return_unit - end else if n = name && v <> value then begin - failwith - "another %s is already aliased as %s, \ - use --force to update" - Entity.name n - end else if n <> name && v = value then begin - failwith - "this %s is already aliased as %s, \ - use --force to insert duplicate" - Entity.name n - end else begin - return_unit - end) - list - end >>=? fun () -> + load wallet + >>=? fun list -> + ( if force then return_unit + else + iter_s + (fun (n, v) -> + if n = name && v = value then ( + keep := true ; + return_unit ) + else if n = name && v <> value then + failwith + "another %s is already aliased as %s, use --force to update" + Entity.name + n + else if n <> name && v = value then + failwith + "this %s is already aliased as %s, use --force to insert \ + duplicate" + Entity.name + n + else return_unit) + list ) + >>=? fun () -> let list = List.filter (fun (n, _) -> n <> name) list in let list = (name, value) :: list in - if !keep then - return_unit - else - wallet#write Entity.name list wallet_encoding + if !keep then return_unit + else wallet#write Entity.name list wallet_encoding let del (wallet : #wallet) name = - load wallet >>=? fun list -> + load wallet + >>=? fun list -> let list = List.filter (fun (n, _) -> n <> name) list in wallet#write Entity.name list wallet_encoding let update (wallet : #wallet) name value = - load wallet >>=? fun list -> + load wallet + >>=? fun list -> let list = - List.map - (fun (n, v) -> (n, if n = name then value else v)) - list in + List.map (fun (n, v) -> (n, if n = name then value else v)) list + in wallet#write Entity.name list wallet_encoding include Entity - let alias_parameter () = parameter - ~autocomplete - (fun cctxt s -> - find cctxt s >>=? fun v -> - return (s, v)) + let alias_parameter () = + parameter ~autocomplete (fun cctxt s -> + find cctxt s >>=? fun v -> return (s, v)) - let alias_param - ?(name = "name") ?(desc = "existing " ^ Entity.name ^ " alias") next = + let alias_param ?(name = "name") + ?(desc = "existing " ^ Entity.name ^ " alias") next = param ~name ~desc (alias_parameter ()) next type fresh_param = Fresh of string let of_fresh (wallet : #wallet) force (Fresh s) = - load wallet >>=? fun list -> - begin if force then - return_unit - else - iter_s - (fun (n, v) -> - if n = s then - Entity.to_source v >>=? fun value -> - failwith - "@[<v 2>The %s alias %s already exists.@,\ - The current value is %s.@,\ - Use --force to update@]" - Entity.name n - value - else - return_unit) - list - end >>=? fun () -> - return s - - let fresh_alias_param - ?(name = "new") ?(desc = "new " ^ Entity.name ^ " alias") next = - param ~name ~desc + load wallet + >>=? fun list -> + ( if force then return_unit + else + iter_s + (fun (n, v) -> + if n = s then + Entity.to_source v + >>=? fun value -> + failwith + "@[<v 2>The %s alias %s already exists.@,\ + The current value is %s.@,\ + Use --force to update@]" + Entity.name + n + value + else return_unit) + list ) + >>=? fun () -> return s + + let fresh_alias_param ?(name = "new") + ?(desc = "new " ^ Entity.name ^ " alias") next = + param + ~name + ~desc (parameter (fun (_ : < .. >) s -> return @@ Fresh s)) next let parse_source_string cctxt s = match String.split ~limit:1 ':' s with - | [ "alias" ; alias ]-> + | ["alias"; alias] -> find cctxt alias - | [ "text" ; text ] -> + | ["text"; text] -> of_source text - | [ "file" ; path ] -> + | ["file"; path] -> cctxt#read_file path >>=? of_source - | _ -> - find cctxt s >>= function - | Ok v -> return v - | Error a_errs -> - cctxt#read_file s >>=? of_source >>= function - | Ok v -> return v - | Error r_errs -> - of_source s >>= function - | Ok v -> return v + | _ -> ( + find cctxt s + >>= function + | Ok v -> + return v + | Error a_errs -> ( + cctxt#read_file s >>=? of_source + >>= function + | Ok v -> + return v + | Error r_errs -> ( + of_source s + >>= function + | Ok v -> + return v | Error s_errs -> - let all_errs = - List.flatten [ a_errs ; r_errs ; s_errs ] in - Lwt.return_error all_errs + let all_errs = List.flatten [a_errs; r_errs; s_errs] in + Lwt.return_error all_errs ) ) ) let source_param ?(name = "src") ?(desc = "source " ^ Entity.name) next = let desc = Format.asprintf "%s\n\ - Can be a %s name, a file or a raw %s literal. If the \ - parameter is not the name of an existing %s, the client will \ - look for a file containing a %s, and if it does not exist, \ - the argument will be read as a raw %s.\n\ - Use 'alias:name', 'file:path' or 'text:literal' to disable \ - autodetect." - desc Entity.name Entity.name Entity.name Entity.name Entity.name in - param ~name ~desc - (parameter parse_source_string) - next + Can be a %s name, a file or a raw %s literal. If the parameter is \ + not the name of an existing %s, the client will look for a file \ + containing a %s, and if it does not exist, the argument will be read \ + as a raw %s.\n\ + Use 'alias:name', 'file:path' or 'text:literal' to disable autodetect." + desc + Entity.name + Entity.name + Entity.name + Entity.name + Entity.name + in + param ~name ~desc (parameter parse_source_string) next - let source_arg - ?(long = "source " ^ Entity.name) - ?(placeholder = "src") + let source_arg ?(long = "source " ^ Entity.name) ?(placeholder = "src") ?(doc = "") () = let doc = Format.asprintf "%s\n\ - Can be a %s name, a file or a raw %s literal. If the \ - parameter is not the name of an existing %s, the client will \ - look for a file containing a %s, and if it does not exist, \ - the argument will be read as a raw %s.\n\ - Use 'alias:name', 'file:path' or 'text:literal' to disable \ - autodetect." - doc Entity.name Entity.name Entity.name Entity.name Entity.name in - arg - ~long - ~placeholder - ~doc - (parameter parse_source_string) + Can be a %s name, a file or a raw %s literal. If the parameter is \ + not the name of an existing %s, the client will look for a file \ + containing a %s, and if it does not exist, the argument will be read \ + as a raw %s.\n\ + Use 'alias:name', 'file:path' or 'text:literal' to disable autodetect." + doc + Entity.name + Entity.name + Entity.name + Entity.name + Entity.name + in + arg ~long ~placeholder ~doc (parameter parse_source_string) let force_switch () = Clic.switch - ~long:"force" ~short:'f' - ~doc:("overwrite existing " ^ Entity.name) () + ~long:"force" + ~short:'f' + ~doc:("overwrite existing " ^ Entity.name) + () let name (wallet : #wallet) d = - rev_find wallet d >>=? function - | None -> Entity.to_source d - | Some name -> return name - + rev_find wallet d + >>=? function None -> Entity.to_source d | Some name -> return name end diff --git a/src/lib_client_base/client_aliases.mli b/src/lib_client_base/client_aliases.mli index dee6d3771354c1222da59ed84164f8d85c93574d..cf15ea3467fa3be7d4163234f8b5e127f60fe35c 100644 --- a/src/lib_client_base/client_aliases.mli +++ b/src/lib_client_base/client_aliases.mli @@ -23,81 +23,82 @@ (* *) (*****************************************************************************) - module type Entity = sig type t + val encoding : t Data_encoding.t + val of_source : string -> t tzresult Lwt.t + val to_source : t -> string tzresult Lwt.t + val name : string end module type Alias = sig type t + type fresh_param - val load : - #Client_context.wallet -> - (string * t) list tzresult Lwt.t - val set : - #Client_context.wallet -> - (string * t) list -> - unit tzresult Lwt.t - val find : - #Client_context.wallet -> - string -> t tzresult Lwt.t - val find_opt : - #Client_context.wallet -> - string -> t option tzresult Lwt.t - val rev_find : - #Client_context.wallet -> - t -> string option tzresult Lwt.t - val name : - #Client_context.wallet -> - t -> string tzresult Lwt.t - val mem : - #Client_context.wallet -> - string -> bool tzresult Lwt.t + + val load : #Client_context.wallet -> (string * t) list tzresult Lwt.t + + val set : #Client_context.wallet -> (string * t) list -> unit tzresult Lwt.t + + val find : #Client_context.wallet -> string -> t tzresult Lwt.t + + val find_opt : #Client_context.wallet -> string -> t option tzresult Lwt.t + + val rev_find : #Client_context.wallet -> t -> string option tzresult Lwt.t + + val name : #Client_context.wallet -> t -> string tzresult Lwt.t + + val mem : #Client_context.wallet -> string -> bool tzresult Lwt.t + val add : - force:bool -> - #Client_context.wallet -> - string -> t -> unit tzresult Lwt.t - val del : - #Client_context.wallet -> - string -> unit tzresult Lwt.t - val update : - #Client_context.wallet -> - string -> t -> unit tzresult Lwt.t + force:bool -> #Client_context.wallet -> string -> t -> unit tzresult Lwt.t + + val del : #Client_context.wallet -> string -> unit tzresult Lwt.t + + val update : #Client_context.wallet -> string -> t -> unit tzresult Lwt.t + val of_source : string -> t tzresult Lwt.t + val to_source : t -> string tzresult Lwt.t - val alias_parameter : unit -> (string * t, #Client_context.wallet) Clic.parameter + + val alias_parameter : + unit -> (string * t, #Client_context.wallet) Clic.parameter + val alias_param : ?name:string -> ?desc:string -> ('a, (#Client_context.wallet as 'b)) Clic.params -> (string * t -> 'a, 'b) Clic.params + val fresh_alias_param : ?name:string -> ?desc:string -> ('a, (< .. > as 'obj)) Clic.params -> (fresh_param -> 'a, 'obj) Clic.params - val force_switch : - unit -> (bool, _) Clic.arg + + val force_switch : unit -> (bool, _) Clic.arg + val of_fresh : - #Client_context.wallet -> - bool -> - fresh_param -> - string tzresult Lwt.t + #Client_context.wallet -> bool -> fresh_param -> string tzresult Lwt.t + val source_param : ?name:string -> ?desc:string -> ('a, (#Client_context.wallet as 'obj)) Clic.params -> (t -> 'a, 'obj) Clic.params + val source_arg : ?long:string -> ?placeholder:string -> ?doc:string -> - unit -> (t option, (#Client_context.wallet as 'obj)) Clic.arg - val autocomplete: - #Client_context.wallet -> string list tzresult Lwt.t + unit -> + (t option, (#Client_context.wallet as 'obj)) Clic.arg + + val autocomplete : #Client_context.wallet -> string list tzresult Lwt.t end + module Alias (Entity : Entity) : Alias with type t = Entity.t diff --git a/src/lib_client_base/client_confirmations.ml b/src/lib_client_base/client_confirmations.ml index 5caf3820cdd538b638acebafbd47c06d46986560..92360549c8fb9fd4e117de082eb93e2f3108a2d5 100644 --- a/src/lib_client_base/client_confirmations.ml +++ b/src/lib_client_base/client_confirmations.ml @@ -28,62 +28,61 @@ let in_block operation_hash operations = try List.iteri (fun i ops -> - List.iteri (fun j op -> - if Operation_hash.equal operation_hash op then - raise (Found (i,j))) ops) + List.iteri + (fun j op -> + if Operation_hash.equal operation_hash op then raise (Found (i, j))) + ops) operations ; None - with Found (i,j) -> Some (i, j) + with Found (i, j) -> Some (i, j) type operation_status = | Confirmed of (Block_hash.t * int * int) | Pending | Still_not_found -let wait_for_operation_inclusion - (ctxt : #Client_context.full) - ~chain - ?(predecessors = 10) - ?(confirmations = 1) - ?branch - operation_hash = - +let wait_for_operation_inclusion (ctxt : #Client_context.full) ~chain + ?(predecessors = 10) ?(confirmations = 1) ?branch operation_hash = let exception WrapError of error list in let exception Outdated of Operation_hash.t in - (* Table of known blocks: - None: if neither the block or its predecessors contains the operation - (Some ((hash, i, j), n)): if the `hash` contains the operation in list `i` at position `j` and if `hash` denotes the `n-th` predecessors of the block. *) - let blocks : ((Block_hash.t * int * int) * int) option Block_hash.Table.t = - Block_hash.Table.create confirmations in - + Block_hash.Table.create confirmations + in (* Fetch _all_ the 'unknown' predecessors af a block. *) - let fetch_predecessors (hash, header) = let rec loop acc (_hash, header) = let predecessor = header.Block_header.predecessor in - if Block_hash.Table.mem blocks predecessor then - return acc + if Block_hash.Table.mem blocks predecessor then return acc else Chain_services.Blocks.Header.shell_header - ctxt ~chain ~block:(`Hash (predecessor, 0)) () >>=? fun shell -> + ctxt + ~chain + ~block:(`Hash (predecessor, 0)) + () + >>=? fun shell -> let block = (predecessor, shell) in - loop (block :: acc) block in - loop [hash, header.Block_header.shell] (hash, header.shell) >>= function - | Ok blocks -> Lwt.return blocks + loop (block :: acc) block + in + loop [(hash, header.Block_header.shell)] (hash, header.shell) + >>= function + | Ok blocks -> + Lwt.return blocks | Error err -> ctxt#warning "Error while fetching block (ignored): %a" - pp_print_error err >>= fun () -> + pp_print_error + err + >>= fun () -> (* Will be retried when a new head arrives *) - Lwt.return_nil in - + Lwt.return_nil + in (* Check whether a block as enough confirmations. This function assumes that the block predecessor has been processed already. *) - let process hash header = let block = `Hash (hash, 0) in let predecessor = header.Tezos_base.Block_header.predecessor in @@ -91,139 +90,162 @@ let wait_for_operation_inclusion | Some (block_with_op, n) -> ctxt#answer "Operation received %d confirmations as of block: %a" - (n+1) Block_hash.pp hash >>= fun () -> - Block_hash.Table.add blocks hash (Some (block_with_op, n+1)) ; - if n+1 < confirmations then begin - return Pending - end else - return (Confirmed block_with_op) - | None -> + (n + 1) + Block_hash.pp + hash + >>= fun () -> + Block_hash.Table.add blocks hash (Some (block_with_op, n + 1)) ; + if n + 1 < confirmations then return Pending + else return (Confirmed block_with_op) + | None -> ( Shell_services.Blocks.Operation_hashes.operation_hashes - ctxt ~chain ~block () >>=? fun operations -> + ctxt + ~chain + ~block + () + >>=? fun operations -> match in_block operation_hash operations with | None -> Block_hash.Table.add blocks hash None ; return Still_not_found - | Some (i, j) -> begin + | Some (i, j) -> ctxt#answer "Operation found in block: %a (pass: %d, offset: %d)" - Block_hash.pp hash i j >>= fun () -> + Block_hash.pp + hash + i + j + >>= fun () -> Block_hash.Table.add blocks hash (Some ((hash, i, j), 0)) ; - if confirmations <= 0 then - return (Confirmed (hash, i, j)) - else - return Pending - end in - + if confirmations <= 0 then return (Confirmed (hash, i, j)) + else return Pending ) + in (* Checks if the given branch is considered alive.*) - let check_branch_alive () = match branch with - | Some branch_hash -> - Shell_services.Blocks.live_blocks - ctxt ~chain ~block:(`Head 0) () >>= begin function - | Ok live_blocks -> - if Block_hash.Set.mem branch_hash live_blocks then - Lwt.return_unit - else - ctxt#error - "The operation %a is outdated and may \ - never be included in the chain.@,\ - We recommand to use an external block explorer." - Operation_hash.pp operation_hash >>= fun () -> - Lwt.fail (Outdated operation_hash) - | Error err -> Lwt.fail (WrapError err) - end - | None -> Lwt.return_unit + | Some branch_hash -> ( + Shell_services.Blocks.live_blocks ctxt ~chain ~block:(`Head 0) () + >>= function + | Ok live_blocks -> + if Block_hash.Set.mem branch_hash live_blocks then Lwt.return_unit + else + ctxt#error + "The operation %a is outdated and may never be included in \ + the chain.@,\ + We recommand to use an external block explorer." + Operation_hash.pp + operation_hash + >>= fun () -> Lwt.fail (Outdated operation_hash) + | Error err -> + Lwt.fail (WrapError err) ) + | None -> + Lwt.return_unit in - - Shell_services.Monitor.heads ctxt chain >>=? fun (stream, stop) -> - Lwt_stream.get stream >>= function - | None -> assert false + Shell_services.Monitor.heads ctxt chain + >>=? fun (stream, stop) -> + Lwt_stream.get stream + >>= function + | None -> + assert false | Some (head, _) -> let rec loop n = if n >= 0 then (*Search for the operation in the n head predecessors*) let block = `Hash (head, n) in - Shell_services.Blocks.hash ctxt ~chain ~block () >>=? fun hash -> - Shell_services.Blocks.Header.shell_header ctxt - ~chain ~block () >>=? fun shell -> - process hash shell >>=? function + Shell_services.Blocks.hash ctxt ~chain ~block () + >>=? fun hash -> + Shell_services.Blocks.Header.shell_header ctxt ~chain ~block () + >>=? fun shell -> + process hash shell + >>=? function | Confirmed block -> - stop () ; - return block + stop () ; return block | Pending | Still_not_found -> - loop (n-1) + loop (n - 1) else (*Search for the operation in new heads*) Lwt.catch (fun () -> - (*Fetching potential unknown blocks from potential new heads*) - let stream = Lwt_stream.map_list_s fetch_predecessors stream in - Lwt_stream.find_s - (fun (hash, header) -> - process hash header >>= function - | Ok Pending -> - Lwt.return_false - | Ok Still_not_found -> - check_branch_alive () >>= fun () -> - Lwt.return_false - | Ok (Confirmed _) -> - Lwt.return_true - | Error err -> - Lwt.fail (WrapError err)) stream >>= return) + (*Fetching potential unknown blocks from potential new heads*) + let stream = Lwt_stream.map_list_s fetch_predecessors stream in + Lwt_stream.find_s + (fun (hash, header) -> + process hash header + >>= function + | Ok Pending -> + Lwt.return_false + | Ok Still_not_found -> + check_branch_alive () >>= fun () -> Lwt.return_false + | Ok (Confirmed _) -> + Lwt.return_true + | Error err -> + Lwt.fail (WrapError err)) + stream + >>= return) (function - | WrapError e -> Lwt.return_error e - | exn -> Lwt.fail exn) >>=? function + | WrapError e -> Lwt.return_error e | exn -> Lwt.fail exn) + >>=? function | None -> failwith "..." - | Some (hash, _) -> + | Some (hash, _) -> ( stop () ; match Block_hash.Table.find_opt blocks hash with - | None | Some None -> assert false + | None | Some None -> + assert false | Some (Some (hash, _)) -> - return hash in - begin - match branch with - | Some branch_hash -> - Shell_services.Blocks.Header.shell_header - ctxt ~chain ~block:(`Hash(branch_hash,0)) () >>=? fun branch_header -> - let branch_level = branch_header.Block_header.level in - Shell_services.Blocks.Header.shell_header - ctxt ~chain ~block:(`Hash (head,0)) () >>=? fun head_shell -> - let head_level = head_shell.Block_header.level in - return (Int32.(to_int (sub head_level branch_level))) - | None -> return predecessors - end + return hash ) + in + ( match branch with + | Some branch_hash -> + Shell_services.Blocks.Header.shell_header + ctxt + ~chain + ~block:(`Hash (branch_hash, 0)) + () + >>=? fun branch_header -> + let branch_level = branch_header.Block_header.level in + Shell_services.Blocks.Header.shell_header + ctxt + ~chain + ~block:(`Hash (head, 0)) + () + >>=? fun head_shell -> + let head_level = head_shell.Block_header.level in + return Int32.(to_int (sub head_level branch_level)) + | None -> + return predecessors ) >>=? fun block_hook -> Block_services.Empty.hash - ctxt ~chain ~block:(`Hash (head, block_hook+1)) () >>=? fun oldest -> + ctxt + ~chain + ~block:(`Hash (head, block_hook + 1)) + () + >>=? fun oldest -> Block_hash.Table.add blocks oldest None ; loop block_hook let lookup_operation_in_previous_block ctxt chain operation_hash i = Block_services.Empty.hash ctxt ~block:(`Head i) () >>=? fun block -> - Shell_services.Blocks.Operation_hashes.operation_hashes ctxt ~chain - ~block:(`Hash (block, 0)) () + Shell_services.Blocks.Operation_hashes.operation_hashes + ctxt + ~chain + ~block:(`Hash (block, 0)) + () >>=? fun operations -> match in_block operation_hash operations with - | None -> return_none - | Some (a, b) -> return_some (block, a, b) + | None -> + return_none + | Some (a, b) -> + return_some (block, a, b) -let lookup_operation_in_previous_blocks - (ctxt : #Client_context.full) - ~chain - ~predecessors - operation_hash = +let lookup_operation_in_previous_blocks (ctxt : #Client_context.full) ~chain + ~predecessors operation_hash = let rec loop i = - if i = predecessors + 1 then - return_none - else begin - lookup_operation_in_previous_block ctxt chain operation_hash i >>=? - function - | None -> loop (i + 1) - | Some (block, a, b) -> return_some (block, a, b) - end + if i = predecessors + 1 then return_none + else + lookup_operation_in_previous_block ctxt chain operation_hash i + >>=? function + | None -> loop (i + 1) | Some (block, a, b) -> return_some (block, a, b) in loop 0 diff --git a/src/lib_client_base/client_confirmations.mli b/src/lib_client_base/client_confirmations.mli index 3d8cadc8db53c160ab23adfb3b4ab993714c771f..1e8a18567ad79816723de89bfcd346d9f6ac1b48 100644 --- a/src/lib_client_base/client_confirmations.mli +++ b/src/lib_client_base/client_confirmations.mli @@ -30,7 +30,7 @@ This functions also looks for the operations in the `predecessors` of the intial chain head. *) -val wait_for_operation_inclusion: +val wait_for_operation_inclusion : #Client_context.full -> chain:Chain_services.chain -> ?predecessors:int -> @@ -41,7 +41,7 @@ val wait_for_operation_inclusion: (** lookup an operation in [predecessors] previous blocks, starting from head *) -val lookup_operation_in_previous_blocks: +val lookup_operation_in_previous_blocks : #Client_context.full -> chain:Block_services.chain -> predecessors:int -> diff --git a/src/lib_client_base/client_context.ml b/src/lib_client_base/client_context.ml index fe48ddc1f7740d4ff6be17a6bb2e630a6acfdf38..a845bb21e2b51785d512b7e4e4f656cdc53a8b7d 100644 --- a/src/lib_client_base/client_context.ml +++ b/src/lib_client_base/client_context.ml @@ -24,117 +24,172 @@ (* *) (*****************************************************************************) -type ('a, 'b) lwt_format = - ('a, Format.formatter, unit, 'b Lwt.t) format4 - -class type printer = object - method error : ('a, 'b) lwt_format -> 'a - method warning : ('a, unit) lwt_format -> 'a - method message : ('a, unit) lwt_format -> 'a - method answer : ('a, unit) lwt_format -> 'a - method log : string -> ('a, unit) lwt_format -> 'a -end - -class type prompter = object - method prompt : ('a, string tzresult) lwt_format -> 'a - method prompt_password : ('a, MBytes.t tzresult) lwt_format -> 'a -end - -class type io = object - inherit printer - inherit prompter -end +type ('a, 'b) lwt_format = ('a, Format.formatter, unit, 'b Lwt.t) format4 + +class type printer = + object + method error : ('a, 'b) lwt_format -> 'a + + method warning : ('a, unit) lwt_format -> 'a + + method message : ('a, unit) lwt_format -> 'a + + method answer : ('a, unit) lwt_format -> 'a + + method log : string -> ('a, unit) lwt_format -> 'a + end + +class type prompter = + object + method prompt : ('a, string tzresult) lwt_format -> 'a + + method prompt_password : ('a, MBytes.t tzresult) lwt_format -> 'a + end + +class type io = + object + inherit printer + + inherit prompter + end class simple_printer log = - let message = - (fun x -> - Format.kasprintf (fun msg -> log "stdout" msg) x) in + let message x = Format.kasprintf (fun msg -> log "stdout" msg) x in object method error : type a b. (a, b) lwt_format -> a = - Format.kasprintf - (fun msg -> - Lwt.fail (Failure msg)) + Format.kasprintf (fun msg -> Lwt.fail (Failure msg)) + method warning : type a. (a, unit) lwt_format -> a = - Format.kasprintf - (fun msg -> log "stderr" msg) + Format.kasprintf (fun msg -> log "stderr" msg) + method message : type a. (a, unit) lwt_format -> a = message + method answer : type a. (a, unit) lwt_format -> a = message + method log : type a. string -> (a, unit) lwt_format -> a = - fun name -> - Format.kasprintf - (fun msg -> log name msg) + fun name -> Format.kasprintf (fun msg -> log name msg) + end + +class type wallet = + object + method load_passwords : string Lwt_stream.t option + + method read_file : string -> string tzresult Lwt.t + + method with_lock : (unit -> 'a Lwt.t) -> 'a Lwt.t + + method load : + string -> default:'a -> 'a Data_encoding.encoding -> 'a tzresult Lwt.t + + method write : + string -> 'a -> 'a Data_encoding.encoding -> unit tzresult Lwt.t + end + +class type chain = + object + method chain : Shell_services.chain + end + +class type block = + object + method block : Shell_services.block + + method confirmations : int option + end + +class type io_wallet = + object + inherit printer + + inherit prompter + + inherit wallet end -class type wallet = object - method load_passwords : string Lwt_stream.t option - method read_file : string -> string tzresult Lwt.t - method with_lock : (unit -> 'a Lwt.t) -> 'a Lwt.t - method load : string -> default:'a -> 'a Data_encoding.encoding -> 'a tzresult Lwt.t - method write : string -> 'a -> 'a Data_encoding.encoding -> unit tzresult Lwt.t -end - -class type chain = object - method chain : Shell_services.chain -end - -class type block = object - method block : Shell_services.block - method confirmations : int option -end - -class type io_wallet = object - inherit printer - inherit prompter - inherit wallet -end - -class type io_rpcs = object - inherit printer - inherit prompter - inherit RPC_context.json -end - -class type ui = object - method sleep : float -> unit Lwt.t -end - -class type full = object - inherit printer - inherit prompter - inherit wallet - inherit RPC_context.json - inherit chain - inherit block - inherit ui -end - -class proxy_context (obj : full) = object - method load_passwords = obj#load_passwords - method read_file = obj#read_file - method base = obj#base - method chain = obj#chain - method block = obj#block - method confirmations = obj#confirmations - method answer : type a. (a, unit) lwt_format -> a = obj#answer - method call_service : - 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> - 'p -> 'q -> 'i -> 'o tzresult Lwt.t = obj#call_service - method call_streamed_service : - 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> - on_chunk: ('o -> unit) -> - on_close: (unit -> unit) -> - 'p -> 'q -> 'i -> (unit -> unit) tzresult Lwt.t = obj#call_streamed_service - method error : type a b. (a, b) lwt_format -> a = obj#error - method generic_json_call = obj#generic_json_call - method with_lock : type a. (unit -> a Lwt.t) -> a Lwt.t = obj#with_lock - method load : type a. string -> default:a -> a Data_encoding.encoding -> a tzresult Lwt.t = obj#load - method log : type a. string -> (a, unit) lwt_format -> a = obj#log - method message : type a. (a, unit) lwt_format -> a = obj#message - method warning : type a. (a, unit) lwt_format -> a = obj#warning - method write : type a. string -> a -> a Data_encoding.encoding -> unit tzresult Lwt.t = obj#write - method prompt : type a. (a, string tzresult) lwt_format -> a = obj#prompt - method prompt_password : type a. (a, MBytes.t tzresult) lwt_format -> a = obj#prompt_password - method sleep : float -> unit Lwt.t = obj#sleep -end +class type io_rpcs = + object + inherit printer + + inherit prompter + + inherit RPC_context.json + end + +class type ui = + object + method sleep : float -> unit Lwt.t + end + +class type full = + object + inherit printer + + inherit prompter + + inherit wallet + + inherit RPC_context.json + + inherit chain + + inherit block + + inherit ui + end + +class proxy_context (obj : full) = + object + method load_passwords = obj#load_passwords + + method read_file = obj#read_file + + method base = obj#base + + method chain = obj#chain + + method block = obj#block + + method confirmations = obj#confirmations + + method answer : type a. (a, unit) lwt_format -> a = obj#answer + + method call_service + : 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> 'p -> + 'q -> 'i -> 'o tzresult Lwt.t = + obj#call_service + + method call_streamed_service + : 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> + on_chunk:('o -> unit) -> on_close:(unit -> unit) -> 'p -> 'q -> 'i -> + (unit -> unit) tzresult Lwt.t = + obj#call_streamed_service + + method error : type a b. (a, b) lwt_format -> a = obj#error + + method generic_json_call = obj#generic_json_call + + method with_lock : type a. (unit -> a Lwt.t) -> a Lwt.t = obj#with_lock + + method load : type a. + string -> default:a -> a Data_encoding.encoding -> a tzresult Lwt.t = + obj#load + + method log : type a. string -> (a, unit) lwt_format -> a = obj#log + + method message : type a. (a, unit) lwt_format -> a = obj#message + + method warning : type a. (a, unit) lwt_format -> a = obj#warning + + method write : type a. + string -> a -> a Data_encoding.encoding -> unit tzresult Lwt.t = + obj#write + + method prompt : type a. (a, string tzresult) lwt_format -> a = obj#prompt + + method prompt_password : type a. (a, MBytes.t tzresult) lwt_format -> a = + obj#prompt_password + + method sleep : float -> unit Lwt.t = obj#sleep + end diff --git a/src/lib_client_base/client_context.mli b/src/lib_client_base/client_context.mli index aaf75966a577e5a805929a4f5c1242b07eb14167..a94d97183d6191fc5cb9158e3a1b3ed4efb8f5eb 100644 --- a/src/lib_client_base/client_context.mli +++ b/src/lib_client_base/client_context.mli @@ -24,69 +24,102 @@ (* *) (*****************************************************************************) -type ('a, 'b) lwt_format = - ('a, Format.formatter, unit, 'b Lwt.t) format4 - -class type printer = object - method error : ('a, 'b) lwt_format -> 'a - method warning : ('a, unit) lwt_format -> 'a - method message : ('a, unit) lwt_format -> 'a - method answer : ('a, unit) lwt_format -> 'a - method log : string -> ('a, unit) lwt_format -> 'a -end - -class type prompter = object - method prompt : ('a, string tzresult) lwt_format -> 'a - method prompt_password : ('a, MBytes.t tzresult) lwt_format -> 'a -end - -class type io = object - inherit printer - inherit prompter -end - -class type wallet = object - method load_passwords : string Lwt_stream.t option - method read_file : string -> string tzresult Lwt.t - method with_lock : (unit -> 'a Lwt.t) -> 'a Lwt.t - method load : string -> default:'a -> 'a Data_encoding.encoding -> 'a tzresult Lwt.t - method write : string -> 'a -> 'a Data_encoding.encoding -> unit tzresult Lwt.t -end - -class type chain = object - method chain : Shell_services.chain -end - -class type block = object - method block : Shell_services.block - method confirmations : int option -end - -class type io_wallet = object - inherit printer - inherit prompter - inherit wallet -end - -class type io_rpcs = object - inherit printer - inherit prompter - inherit RPC_context.json -end - -class type ui = object - method sleep : float -> unit Lwt.t -end - -class type full = object - inherit printer - inherit prompter - inherit wallet - inherit RPC_context.json - inherit chain - inherit block - inherit ui -end +type ('a, 'b) lwt_format = ('a, Format.formatter, unit, 'b Lwt.t) format4 + +class type printer = + object + method error : ('a, 'b) lwt_format -> 'a + + method warning : ('a, unit) lwt_format -> 'a + + method message : ('a, unit) lwt_format -> 'a + + method answer : ('a, unit) lwt_format -> 'a + + method log : string -> ('a, unit) lwt_format -> 'a + end + +class type prompter = + object + method prompt : ('a, string tzresult) lwt_format -> 'a + + method prompt_password : ('a, MBytes.t tzresult) lwt_format -> 'a + end + +class type io = + object + inherit printer + + inherit prompter + end + +class type wallet = + object + method load_passwords : string Lwt_stream.t option + + method read_file : string -> string tzresult Lwt.t + + method with_lock : (unit -> 'a Lwt.t) -> 'a Lwt.t + + method load : + string -> default:'a -> 'a Data_encoding.encoding -> 'a tzresult Lwt.t + + method write : + string -> 'a -> 'a Data_encoding.encoding -> unit tzresult Lwt.t + end + +class type chain = + object + method chain : Shell_services.chain + end + +class type block = + object + method block : Shell_services.block + + method confirmations : int option + end + +class type io_wallet = + object + inherit printer + + inherit prompter + + inherit wallet + end + +class type io_rpcs = + object + inherit printer + + inherit prompter + + inherit RPC_context.json + end + +class type ui = + object + method sleep : float -> unit Lwt.t + end + +class type full = + object + inherit printer + + inherit prompter + + inherit wallet + + inherit RPC_context.json + + inherit chain + + inherit block + + inherit ui + end class simple_printer : (string -> string -> unit Lwt.t) -> printer + class proxy_context : full -> full diff --git a/src/lib_client_base/client_keys.ml b/src/lib_client_base/client_keys.ml index 5a29700cb54af96be6fe07fbfe6cf2def6714393..35b0693e69c01fb741c385a9f20bb99920548582 100644 --- a/src/lib_client_base/client_keys.ml +++ b/src/lib_client_base/client_keys.ml @@ -24,128 +24,164 @@ (*****************************************************************************) type error += Unregistered_key_scheme of string + type error += Invalid_uri of Uri.t let () = - register_error_kind `Permanent - ~id: "cli.unregistered_key_scheme" - ~title: "Unregistered key scheme" - ~description: "A key has been provided with an \ - unregistered scheme (no corresponding plugin)" - ~pp: - (fun ppf s -> - Format.fprintf ppf "No matching plugin for key scheme %s" s) + register_error_kind + `Permanent + ~id:"cli.unregistered_key_scheme" + ~title:"Unregistered key scheme" + ~description: + "A key has been provided with an unregistered scheme (no corresponding \ + plugin)" + ~pp:(fun ppf s -> + Format.fprintf ppf "No matching plugin for key scheme %s" s) Data_encoding.(obj1 (req "value" string)) (function Unregistered_key_scheme s -> Some s | _ -> None) (fun s -> Unregistered_key_scheme s) ; - register_error_kind `Permanent - ~id: "cli.key.invalid_uri" - ~title: "Invalid key uri" - ~description: "A key has been provided with an invalid uri." - ~pp: - (fun ppf s -> - Format.fprintf ppf "Cannot parse the key uri: %s" s) + register_error_kind + `Permanent + ~id:"cli.key.invalid_uri" + ~title:"Invalid key uri" + ~description:"A key has been provided with an invalid uri." + ~pp:(fun ppf s -> Format.fprintf ppf "Cannot parse the key uri: %s" s) Data_encoding.(obj1 (req "value" string)) (function Invalid_uri s -> Some (Uri.to_string s) | _ -> None) (fun s -> Invalid_uri (Uri.of_string s)) module Public_key_hash = struct include Client_aliases.Alias (struct - type t = Signature.Public_key_hash.t - let encoding = Signature.Public_key_hash.encoding - let of_source s = Lwt.return (Signature.Public_key_hash.of_b58check s) - let to_source p = return (Signature.Public_key_hash.to_b58check p) - let name = "public key hash" - end) + type t = Signature.Public_key_hash.t + + let encoding = Signature.Public_key_hash.encoding + + let of_source s = Lwt.return (Signature.Public_key_hash.of_b58check s) + + let to_source p = return (Signature.Public_key_hash.to_b58check p) + + let name = "public key hash" + end) end module Logging = struct let tag = Tag.def ~doc:"Identity" "pk_alias" Format.pp_print_text end -let uri_encoding = - Data_encoding.(conv Uri.to_string Uri.of_string string) +let uri_encoding = Data_encoding.(conv Uri.to_string Uri.of_string string) type pk_uri = Uri.t + let make_pk_uri x = x type sk_uri = Uri.t + let make_sk_uri x = x -let pk_uri_parameter () = Clic.parameter (fun _ s -> - try return (make_pk_uri @@ Uri.of_string s) - with Failure s -> failwith "Error while parsing URI: %s" s) +let pk_uri_parameter () = + Clic.parameter (fun _ s -> + try return (make_pk_uri @@ Uri.of_string s) + with Failure s -> failwith "Error while parsing URI: %s" s) let pk_uri_param ?name ?desc params = let name = Option.unopt ~default:"uri" name in - let desc = Option.unopt - ~default:"public key\n\ - Varies from one scheme to the other.\n\ - Use command `list signing schemes` for more \ - information." desc in + let desc = + Option.unopt + ~default: + "public key\n\ + Varies from one scheme to the other.\n\ + Use command `list signing schemes` for more information." + desc + in Clic.param ~name ~desc (pk_uri_parameter ()) params -let sk_uri_parameter () = Clic.parameter (fun _ s -> - try return (make_sk_uri @@ Uri.of_string s) - with Failure s -> failwith "Error while parsing URI: %s" s) +let sk_uri_parameter () = + Clic.parameter (fun _ s -> + try return (make_sk_uri @@ Uri.of_string s) + with Failure s -> failwith "Error while parsing URI: %s" s) let sk_uri_param ?name ?desc params = let name = Option.unopt ~default:"uri" name in - let desc = Option.unopt - ~default:"secret key\n\ - Varies from one scheme to the other.\n\ - Use command `list signing schemes` for more \ - information." desc in + let desc = + Option.unopt + ~default: + "secret key\n\ + Varies from one scheme to the other.\n\ + Use command `list signing schemes` for more information." + desc + in Clic.param ~name ~desc (sk_uri_parameter ()) params -module Secret_key = - Client_aliases.Alias (struct - let name = "secret_key" - type t = Uri.t - let of_source s = return (Uri.of_string s) - let to_source t = return (Uri.to_string t) - let encoding = uri_encoding - end) +module Secret_key = Client_aliases.Alias (struct + let name = "secret_key" -module Public_key = - Client_aliases.Alias (struct - let name = "public_key" - type t = Uri.t * Signature.Public_key.t option - let of_source s = return (Uri.of_string s, None) - let to_source (t, _) = return (Uri.to_string t) - let encoding = - let open Data_encoding in - union - [ case Json_only - ~title: "Locator_only" - uri_encoding - (function (uri, None) -> Some uri | (_, Some _) -> None) - (fun uri -> (uri, None)) ; - case Json_only - ~title: "Locator_and_full_key" - (obj2 - (req "locator" uri_encoding) - (req "key" Signature.Public_key.encoding)) - (function (uri, Some key) -> Some (uri, key) | (_, None) -> None) - (fun (uri, key) -> (uri, Some key)) ] - end) + type t = Uri.t + + let of_source s = return (Uri.of_string s) + + let to_source t = return (Uri.to_string t) + + let encoding = uri_encoding +end) + +module Public_key = Client_aliases.Alias (struct + let name = "public_key" + + type t = Uri.t * Signature.Public_key.t option + + let of_source s = return (Uri.of_string s, None) + + let to_source (t, _) = return (Uri.to_string t) + + let encoding = + let open Data_encoding in + union + [ case + Json_only + ~title:"Locator_only" + uri_encoding + (function (uri, None) -> Some uri | (_, Some _) -> None) + (fun uri -> (uri, None)); + case + Json_only + ~title:"Locator_and_full_key" + (obj2 + (req "locator" uri_encoding) + (req "key" Signature.Public_key.encoding)) + (function (uri, Some key) -> Some (uri, key) | (_, None) -> None) + (fun (uri, key) -> (uri, Some key)) ] +end) module type SIGNER = sig val scheme : string + val title : string + val description : string + val neuterize : sk_uri -> pk_uri tzresult Lwt.t + val public_key : - ?interactive: Client_context.io_wallet -> - pk_uri -> Signature.Public_key.t tzresult Lwt.t + ?interactive:Client_context.io_wallet -> + pk_uri -> + Signature.Public_key.t tzresult Lwt.t + val public_key_hash : - ?interactive: Client_context.io_wallet -> - pk_uri -> (Signature.Public_key_hash.t * Signature.Public_key.t option) tzresult Lwt.t + ?interactive:Client_context.io_wallet -> + pk_uri -> + (Signature.Public_key_hash.t * Signature.Public_key.t option) tzresult + Lwt.t + val sign : - ?watermark: Signature.watermark -> - sk_uri -> MBytes.t -> Signature.t tzresult Lwt.t + ?watermark:Signature.watermark -> + sk_uri -> + MBytes.t -> + Signature.t tzresult Lwt.t + val deterministic_nonce : sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t + val deterministic_nonce_hash : sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t + val supports_deterministic_nonces : sk_uri -> bool tzresult Lwt.t end @@ -159,7 +195,8 @@ let find_signer_for_key ~scheme = match Hashtbl.find_opt signers_table scheme with | None -> fail (Unregistered_key_scheme scheme) - | Some signer -> return signer + | Some signer -> + return signer let registered_signers () : (string * (module SIGNER)) list = Hashtbl.fold (fun k v acc -> (k, v) :: acc) signers_table [] @@ -167,168 +204,205 @@ let registered_signers () : (string * (module SIGNER)) list = type error += Signature_mismatch of sk_uri let () = - register_error_kind `Permanent - ~id: "cli.signature_mismatch" - ~title: "Signature mismatch" - ~description: "The signer produced an invalid signature" - ~pp: - (fun ppf sk -> - Format.fprintf ppf - "The signer for %a produced an invalid signature" - Uri.pp_hum sk) + register_error_kind + `Permanent + ~id:"cli.signature_mismatch" + ~title:"Signature mismatch" + ~description:"The signer produced an invalid signature" + ~pp:(fun ppf sk -> + Format.fprintf + ppf + "The signer for %a produced an invalid signature" + Uri.pp_hum + sk) Data_encoding.(obj1 (req "locator" uri_encoding)) (function Signature_mismatch sk -> Some sk | _ -> None) (fun sk -> Signature_mismatch sk) let neuterize sk_uri = let scheme = Option.unopt ~default:"" (Uri.scheme sk_uri) in - find_signer_for_key ~scheme >>=? fun signer -> + find_signer_for_key ~scheme + >>=? fun signer -> let module Signer = (val signer : SIGNER) in Signer.neuterize sk_uri let public_key ?interactive pk_uri = let scheme = Option.unopt ~default:"" (Uri.scheme pk_uri) in - find_signer_for_key ~scheme >>=? fun signer -> + find_signer_for_key ~scheme + >>=? fun signer -> let module Signer = (val signer : SIGNER) in Signer.public_key ?interactive pk_uri let public_key_hash ?interactive pk_uri = - public_key ?interactive pk_uri >>=? fun pk -> - return (Signature.Public_key.hash pk, Some pk) + public_key ?interactive pk_uri + >>=? fun pk -> return (Signature.Public_key.hash pk, Some pk) let sign cctxt ?watermark sk_uri buf = let scheme = Option.unopt ~default:"" (Uri.scheme sk_uri) in - find_signer_for_key ~scheme >>=? fun signer -> + find_signer_for_key ~scheme + >>=? fun signer -> let module Signer = (val signer : SIGNER) in - Signer.sign ?watermark sk_uri buf >>=? fun signature -> - Signer.neuterize sk_uri >>=? fun pk_uri -> - Secret_key.rev_find cctxt sk_uri >>=? begin function - | None -> - public_key pk_uri - | Some name -> - Public_key.find cctxt name >>=? function - | (_, None) -> - public_key pk_uri >>=? fun pk -> - Public_key.update cctxt name (pk_uri, Some pk) >>=? fun () -> - return pk - | (_, Some pubkey) -> return pubkey - end >>=? fun pubkey -> + Signer.sign ?watermark sk_uri buf + >>=? fun signature -> + Signer.neuterize sk_uri + >>=? fun pk_uri -> + Secret_key.rev_find cctxt sk_uri + >>=? (function + | None -> + public_key pk_uri + | Some name -> ( + Public_key.find cctxt name + >>=? function + | (_, None) -> + public_key pk_uri + >>=? fun pk -> + Public_key.update cctxt name (pk_uri, Some pk) + >>=? fun () -> return pk + | (_, Some pubkey) -> + return pubkey )) + >>=? fun pubkey -> fail_unless (Signature.check ?watermark pubkey signature buf) - (Signature_mismatch sk_uri) >>=? fun () -> - return signature + (Signature_mismatch sk_uri) + >>=? fun () -> return signature let append cctxt ?watermark loc buf = - sign cctxt ?watermark loc buf >>|? fun signature -> - Signature.concat buf signature + sign cctxt ?watermark loc buf + >>|? fun signature -> Signature.concat buf signature let check ?watermark pk_uri signature buf = - public_key pk_uri >>=? fun pk -> - return (Signature.check ?watermark pk signature buf) + public_key pk_uri + >>=? fun pk -> return (Signature.check ?watermark pk signature buf) let deterministic_nonce sk_uri data = let scheme = Option.unopt ~default:"" (Uri.scheme sk_uri) in - find_signer_for_key ~scheme >>=? fun signer -> + find_signer_for_key ~scheme + >>=? fun signer -> let module Signer = (val signer : SIGNER) in Signer.deterministic_nonce sk_uri data let deterministic_nonce_hash sk_uri data = let scheme = Option.unopt ~default:"" (Uri.scheme sk_uri) in - find_signer_for_key ~scheme >>=? fun signer -> + find_signer_for_key ~scheme + >>=? fun signer -> let module Signer = (val signer : SIGNER) in Signer.deterministic_nonce_hash sk_uri data let supports_deterministic_nonces sk_uri = let scheme = Option.unopt ~default:"" (Uri.scheme sk_uri) in - find_signer_for_key ~scheme >>=? fun signer -> + find_signer_for_key ~scheme + >>=? fun signer -> let module Signer = (val signer : SIGNER) in Signer.supports_deterministic_nonces sk_uri -let register_key cctxt ?(force=false) (public_key_hash, pk_uri, sk_uri) ?public_key name = - Public_key.add ~force cctxt name (pk_uri, public_key) >>=? fun () -> - Secret_key.add ~force cctxt name sk_uri >>=? fun () -> - Public_key_hash.add ~force cctxt name public_key_hash >>=? fun () -> - return_unit +let register_key cctxt ?(force = false) (public_key_hash, pk_uri, sk_uri) + ?public_key name = + Public_key.add ~force cctxt name (pk_uri, public_key) + >>=? fun () -> + Secret_key.add ~force cctxt name sk_uri + >>=? fun () -> + Public_key_hash.add ~force cctxt name public_key_hash + >>=? fun () -> return_unit let raw_get_key (cctxt : #Client_context.wallet) pkh = - begin - Public_key_hash.rev_find cctxt pkh >>=? function - | None -> failwith "no keys for the source contract manager" - | Some n -> - Secret_key.find_opt cctxt n >>=? fun sk_uri -> - Public_key.find_opt cctxt n >>=? begin function - | None -> return_none - | Some (_, Some pk) -> return_some pk - | Some (pk_uri, None) -> - public_key pk_uri >>=? fun pk -> - Public_key.update cctxt n (pk_uri, Some pk) >>=? fun () -> - return_some pk - end >>=? fun pk -> - return (n, pk, sk_uri) - end >>= function - | (Ok (_, None, None) | Error _) as initial_result -> begin - begin - (* try to lookup for a remote key *) - find_signer_for_key ~scheme:"remote" >>=? fun signer -> - let module Signer = (val signer : SIGNER) in - let path = Signature.Public_key_hash.to_b58check pkh in - let uri = Uri.make ~scheme:Signer.scheme ~path () in - Signer.public_key uri >>=? fun pk -> - return (path, Some pk, Some uri) - end >>= function - | Error _ -> Lwt.return initial_result - | Ok _ as success -> Lwt.return success - end - | Ok _ as success -> Lwt.return success + Public_key_hash.rev_find cctxt pkh + >>=? (function + | None -> + failwith "no keys for the source contract manager" + | Some n -> + Secret_key.find_opt cctxt n + >>=? fun sk_uri -> + Public_key.find_opt cctxt n + >>=? (function + | None -> + return_none + | Some (_, Some pk) -> + return_some pk + | Some (pk_uri, None) -> + public_key pk_uri + >>=? fun pk -> + Public_key.update cctxt n (pk_uri, Some pk) + >>=? fun () -> return_some pk) + >>=? fun pk -> return (n, pk, sk_uri)) + >>= function + | (Ok (_, None, None) | Error _) as initial_result -> ( + (* try to lookup for a remote key *) + find_signer_for_key ~scheme:"remote" + >>=? (fun signer -> + let module Signer = (val signer : SIGNER) in + let path = Signature.Public_key_hash.to_b58check pkh in + let uri = Uri.make ~scheme:Signer.scheme ~path () in + Signer.public_key uri + >>=? fun pk -> return (path, Some pk, Some uri)) + >>= function + | Error _ -> + Lwt.return initial_result + | Ok _ as success -> + Lwt.return success ) + | Ok _ as success -> + Lwt.return success let get_key cctxt pkh = - raw_get_key cctxt pkh >>=? function - | (pkh, Some pk, Some sk) -> return (pkh, pk, sk) - | (_pkh, _pk, None) -> failwith "Unknown secret key for %a" Signature.Public_key_hash.pp pkh - | (_pkh, None, _sk) -> failwith "Unknown public key for %a" Signature.Public_key_hash.pp pkh + raw_get_key cctxt pkh + >>=? function + | (pkh, Some pk, Some sk) -> + return (pkh, pk, sk) + | (_pkh, _pk, None) -> + failwith "Unknown secret key for %a" Signature.Public_key_hash.pp pkh + | (_pkh, None, _sk) -> + failwith "Unknown public key for %a" Signature.Public_key_hash.pp pkh let get_public_key cctxt pkh = - raw_get_key cctxt pkh >>=? function - | (pkh, Some pk, _sk) -> return (pkh, pk) - | (_pkh, None, _sk) -> failwith "Unknown public key for %a" Signature.Public_key_hash.pp pkh + raw_get_key cctxt pkh + >>=? function + | (pkh, Some pk, _sk) -> + return (pkh, pk) + | (_pkh, None, _sk) -> + failwith "Unknown public key for %a" Signature.Public_key_hash.pp pkh let get_keys (cctxt : #Client_context.wallet) = - Secret_key.load cctxt >>=? fun sks -> - Lwt_list.filter_map_s begin fun (name, sk_uri) -> - begin - Public_key_hash.find cctxt name >>=? fun pkh -> - Public_key.find cctxt name >>=? begin function - | _, Some pk -> return pk - | pk_uri, None -> - public_key pk_uri >>=? fun pk -> - Public_key.update cctxt name (pk_uri, Some pk) >>=? fun () -> - return pk - end >>=? fun pk -> - return (name, pkh, pk, sk_uri) - end >>= function - | Ok r -> Lwt.return_some r - | Error _ -> Lwt.return_none - end sks >>= fun keys -> - return keys + Secret_key.load cctxt + >>=? fun sks -> + Lwt_list.filter_map_s + (fun (name, sk_uri) -> + Public_key_hash.find cctxt name + >>=? (fun pkh -> + Public_key.find cctxt name + >>=? (function + | (_, Some pk) -> + return pk + | (pk_uri, None) -> + public_key pk_uri + >>=? fun pk -> + Public_key.update cctxt name (pk_uri, Some pk) + >>=? fun () -> return pk) + >>=? fun pk -> return (name, pkh, pk, sk_uri)) + >>= function Ok r -> Lwt.return_some r | Error _ -> Lwt.return_none) + sks + >>= fun keys -> return keys let list_keys cctxt = - Public_key_hash.load cctxt >>=? fun l -> + Public_key_hash.load cctxt + >>=? fun l -> map_s (fun (name, pkh) -> - raw_get_key cctxt pkh >>= function - | Ok (_name, pk, sk_uri) -> - return (name, pkh, pk, sk_uri) - | Error _ -> - return (name, pkh, None, None)) + raw_get_key cctxt pkh + >>= function + | Ok (_name, pk, sk_uri) -> + return (name, pkh, pk, sk_uri) + | Error _ -> + return (name, pkh, None, None)) l let alias_keys cctxt name = - Public_key_hash.find cctxt name >>=? fun pkh -> - raw_get_key cctxt pkh >>= function - | Ok (_name, pk, sk_uri) -> return_some (pkh, pk, sk_uri) - | Error _ -> return_none + Public_key_hash.find cctxt name + >>=? fun pkh -> + raw_get_key cctxt pkh + >>= function + | Ok (_name, pk, sk_uri) -> + return_some (pkh, pk, sk_uri) + | Error _ -> + return_none let force_switch () = - Clic.switch - ~long:"force" ~short:'f' - ~doc:"overwrite existing keys" () + Clic.switch ~long:"force" ~short:'f' ~doc:"overwrite existing keys" () diff --git a/src/lib_client_base/client_keys.mli b/src/lib_client_base/client_keys.mli index 48698b9271aa761c17200c41ce15e2c8b8d682be..39aeb5ceb1b946c4508255d8ae35e66ce091a7cf 100644 --- a/src/lib_client_base/client_keys.mli +++ b/src/lib_client_base/client_keys.mli @@ -26,26 +26,36 @@ (** {2 Cryptographic keys tables } *) type pk_uri = private Uri.t + type sk_uri = private Uri.t val pk_uri_parameter : unit -> (pk_uri, 'a) Clic.parameter + val pk_uri_param : - ?name:string -> ?desc:string -> - ('a, 'b) Clic.params -> (pk_uri -> 'a, 'b) Clic.params + ?name:string -> + ?desc:string -> + ('a, 'b) Clic.params -> + (pk_uri -> 'a, 'b) Clic.params + val sk_uri_parameter : unit -> (sk_uri, 'a) Clic.parameter + val sk_uri_param : - ?name:string -> ?desc:string -> - ('a, 'b) Clic.params -> (sk_uri -> 'a, 'b) Clic.params + ?name:string -> + ?desc:string -> + ('a, 'b) Clic.params -> + (sk_uri -> 'a, 'b) Clic.params type error += Unregistered_key_scheme of string + type error += Invalid_uri of Uri.t module Public_key_hash : Client_aliases.Alias with type t = Signature.Public_key_hash.t + module Public_key : Client_aliases.Alias with type t = pk_uri * Signature.Public_key.t option -module Secret_key : - Client_aliases.Alias with type t = sk_uri + +module Secret_key : Client_aliases.Alias with type t = sk_uri module Logging : sig val tag : string Tag.def @@ -54,116 +64,129 @@ end (** {2 Interface for external signing modules.} *) module type SIGNER = sig - - val scheme : string (** [scheme] is the name of the scheme implemented by this signer module. *) + val scheme : string - val title : string (** [title] is a one-line human readable description of the signer. *) + val title : string - val description : string (** [description] is a multi-line human readable description of the signer, that should include the format of key specifications. *) + val description : string - val neuterize : sk_uri -> pk_uri tzresult Lwt.t (** [neuterize sk] is the corresponding [pk]. *) + val neuterize : sk_uri -> pk_uri tzresult Lwt.t - val public_key : - ?interactive: Client_context.io_wallet -> - pk_uri -> Signature.Public_key.t tzresult Lwt.t (** [public_key pk] is the Ed25519 version of [pk]. Some signer implementations improve long-term security by requiring human/manual validation while importing keys, the [?interactive] argument can be used to prompt the user in such case. *) - - val public_key_hash : - ?interactive: Client_context.io_wallet -> + val public_key : + ?interactive:Client_context.io_wallet -> pk_uri -> - (Signature.Public_key_hash.t * Signature.Public_key.t option) tzresult Lwt.t + Signature.Public_key.t tzresult Lwt.t + (** [public_key_hash pk] is the hash of [pk]. As some signers will query the full public key to obtain the hash, it can be optionally returned to reduce the amount of queries. See {!public_key} for the [?interactive] argument. *) + val public_key_hash : + ?interactive:Client_context.io_wallet -> + pk_uri -> + (Signature.Public_key_hash.t * Signature.Public_key.t option) tzresult + Lwt.t - val sign : - ?watermark: Signature.watermark -> - sk_uri -> MBytes.t -> Signature.t tzresult Lwt.t (** [sign ?watermark sk data] is signature obtained by signing [data] with [sk]. *) + val sign : + ?watermark:Signature.watermark -> + sk_uri -> + MBytes.t -> + Signature.t tzresult Lwt.t - val deterministic_nonce : - sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t (** [deterministic_nonce sk data] is a nonce obtained deterministically from [data] and [sk]. *) + val deterministic_nonce : sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t - val deterministic_nonce_hash : - sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t (** [deterministic_nonce_hash sk data] is a nonce hash obtained deterministically from [data] and [sk]. *) + val deterministic_nonce_hash : sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t - val supports_deterministic_nonces : sk_uri -> bool tzresult Lwt.t (** [supports_deterministic_nonces] indicates whether the [deterministic_nonce] functionality is supported. *) - + val supports_deterministic_nonces : sk_uri -> bool tzresult Lwt.t end -val register_signer : (module SIGNER) -> unit (** [register_signer signer] registers first-class module [signer] as signer for keys with scheme [(val signer : SIGNER).scheme]. *) +val register_signer : (module SIGNER) -> unit val registered_signers : unit -> (string * (module SIGNER)) list val public_key : - ?interactive: Client_context.io_wallet -> - pk_uri -> Signature.Public_key.t tzresult Lwt.t + ?interactive:Client_context.io_wallet -> + pk_uri -> + Signature.Public_key.t tzresult Lwt.t val public_key_hash : - ?interactive: Client_context.io_wallet -> - pk_uri -> (Signature.Public_key_hash.t * Signature.Public_key.t option) tzresult Lwt.t + ?interactive:Client_context.io_wallet -> + pk_uri -> + (Signature.Public_key_hash.t * Signature.Public_key.t option) tzresult Lwt.t val neuterize : sk_uri -> pk_uri tzresult Lwt.t val sign : #Client_context.wallet -> ?watermark:Signature.watermark -> - sk_uri -> MBytes.t -> Signature.t tzresult Lwt.t + sk_uri -> + MBytes.t -> + Signature.t tzresult Lwt.t val append : #Client_context.wallet -> ?watermark:Signature.watermark -> - sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t + sk_uri -> + MBytes.t -> + MBytes.t tzresult Lwt.t val check : ?watermark:Signature.watermark -> - pk_uri -> Signature.t -> MBytes.t -> bool tzresult Lwt.t + pk_uri -> + Signature.t -> + MBytes.t -> + bool tzresult Lwt.t -val deterministic_nonce : - sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t +val deterministic_nonce : sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t -val deterministic_nonce_hash : - sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t +val deterministic_nonce_hash : sk_uri -> MBytes.t -> MBytes.t tzresult Lwt.t -val supports_deterministic_nonces : - sk_uri -> bool tzresult Lwt.t +val supports_deterministic_nonces : sk_uri -> bool tzresult Lwt.t val register_key : #Client_context.wallet -> ?force:bool -> - (Signature.Public_key_hash.t * pk_uri * sk_uri) -> - ?public_key: Signature.Public_key.t -> - string -> unit tzresult Lwt.t + Signature.Public_key_hash.t * pk_uri * sk_uri -> + ?public_key:Signature.Public_key.t -> + string -> + unit tzresult Lwt.t val list_keys : #Client_context.wallet -> - (string * Public_key_hash.t * Signature.public_key option * sk_uri option) list tzresult Lwt.t + (string * Public_key_hash.t * Signature.public_key option * sk_uri option) + list + tzresult + Lwt.t val alias_keys : - #Client_context.wallet -> string -> - (Public_key_hash.t * Signature.public_key option * sk_uri option) option tzresult Lwt.t + #Client_context.wallet -> + string -> + (Public_key_hash.t * Signature.public_key option * sk_uri option) option + tzresult + Lwt.t val get_key : #Client_context.wallet -> @@ -175,13 +198,15 @@ val get_public_key : Public_key_hash.t -> (string * Signature.Public_key.t) tzresult Lwt.t -val get_keys: +val get_keys : #Client_context.wallet -> - (string * Public_key_hash.t * Signature.Public_key.t * sk_uri) list tzresult Lwt.t + (string * Public_key_hash.t * Signature.Public_key.t * sk_uri) list tzresult + Lwt.t val force_switch : unit -> (bool, 'ctx) Clic.arg (**/**) val make_pk_uri : Uri.t -> pk_uri + val make_sk_uri : Uri.t -> sk_uri diff --git a/src/lib_client_base_unix/.ocamlformat b/src/lib_client_base_unix/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_client_base_unix/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_client_base_unix/client_config.ml b/src/lib_client_base_unix/client_config.ml index fb4760df51acb217f1914c6d10b4fdb31bfe5097..08ba3fc2ba51987104dc589219cabfa63e6a060c 100644 --- a/src/lib_client_base_unix/client_config.ml +++ b/src/lib_client_base_unix/client_config.ml @@ -27,129 +27,155 @@ (* Tezos Command line interface - Configuration and Arguments Parsing *) type error += Invalid_chain_argument of string + type error += Invalid_block_argument of string + type error += Invalid_protocol_argument of string + type error += Invalid_port_arg of string + type error += Invalid_remote_signer_argument of string + type error += Invalid_wait_arg of string + let () = register_error_kind `Branch - ~id: "badChainArgument" - ~title: "Bad Chain Argument" - ~description: "Chain argument could not be parsed" - ~pp: - (fun ppf s -> - Format.fprintf ppf "Value %s is not a value chain reference." s) + ~id:"badChainArgument" + ~title:"Bad Chain Argument" + ~description:"Chain argument could not be parsed" + ~pp:(fun ppf s -> + Format.fprintf ppf "Value %s is not a value chain reference." s) Data_encoding.(obj1 (req "value" string)) (function Invalid_chain_argument s -> Some s | _ -> None) (fun s -> Invalid_chain_argument s) ; register_error_kind `Branch - ~id: "badBlockArgument" - ~title: "Bad Block Argument" - ~description: "Block argument could not be parsed" - ~pp: - (fun ppf s -> - Format.fprintf ppf "Value %s is not a value block reference." s) + ~id:"badBlockArgument" + ~title:"Bad Block Argument" + ~description:"Block argument could not be parsed" + ~pp:(fun ppf s -> + Format.fprintf ppf "Value %s is not a value block reference." s) Data_encoding.(obj1 (req "value" string)) (function Invalid_block_argument s -> Some s | _ -> None) (fun s -> Invalid_block_argument s) ; register_error_kind `Branch - ~id: "badProtocolArgument" - ~title: "Bad Protocol Argument" - ~description: "Protocol argument could not be parsed" - ~pp: - (fun ppf s -> - Format.fprintf ppf "Value %s does not correspond to any known protocol." s) + ~id:"badProtocolArgument" + ~title:"Bad Protocol Argument" + ~description:"Protocol argument could not be parsed" + ~pp:(fun ppf s -> + Format.fprintf + ppf + "Value %s does not correspond to any known protocol." + s) Data_encoding.(obj1 (req "value" string)) (function Invalid_protocol_argument s -> Some s | _ -> None) (fun s -> Invalid_protocol_argument s) ; register_error_kind `Branch - ~id: "invalidPortArgument" - ~title: "Bad Port Argument" - ~description: "Port argument could not be parsed" - ~pp: - (fun ppf s -> - Format.fprintf ppf "Value %s is not a valid TCP port." s) + ~id:"invalidPortArgument" + ~title:"Bad Port Argument" + ~description:"Port argument could not be parsed" + ~pp:(fun ppf s -> Format.fprintf ppf "Value %s is not a valid TCP port." s) Data_encoding.(obj1 (req "value" string)) (function Invalid_port_arg s -> Some s | _ -> None) (fun s -> Invalid_port_arg s) ; register_error_kind `Branch - ~id: "invalid_remote_signer_argument" - ~title: "Unexpected URI of remote signer" - ~description: "The remote signer argument could not be parsed" - ~pp: - (fun ppf s -> - Format.fprintf ppf "Value '%s' is not a valid URI." s) + ~id:"invalid_remote_signer_argument" + ~title:"Unexpected URI of remote signer" + ~description:"The remote signer argument could not be parsed" + ~pp:(fun ppf s -> Format.fprintf ppf "Value '%s' is not a valid URI." s) Data_encoding.(obj1 (req "value" string)) (function Invalid_remote_signer_argument s -> Some s | _ -> None) (fun s -> Invalid_remote_signer_argument s) ; register_error_kind `Branch - ~id: "invalidWaitArgument" - ~title: "Bad Wait Argument" - ~description: "Wait argument could not be parsed" - ~pp: - (fun ppf s -> - Format.fprintf ppf "Value %s is not a valid number of confirmation, nor 'none'." s) + ~id:"invalidWaitArgument" + ~title:"Bad Wait Argument" + ~description:"Wait argument could not be parsed" + ~pp:(fun ppf s -> + Format.fprintf + ppf + "Value %s is not a valid number of confirmation, nor 'none'." + s) Data_encoding.(obj1 (req "value" string)) (function Invalid_wait_arg s -> Some s | _ -> None) (fun s -> Invalid_wait_arg s) let home = try Sys.getenv "HOME" with Not_found -> "/root" -let default_base_dir = - Filename.concat home ".tezos-client" +let default_base_dir = Filename.concat home ".tezos-client" let default_chain = `Main + let default_block = `Head 0 -let (//) = Filename.concat +let ( // ) = Filename.concat module Cfg_file = struct - - type t = { - base_dir: string ; - node_addr: string ; - node_port: int ; - tls: bool ; - web_port: int ; - remote_signer: Uri.t option ; - confirmations: int option ; - password_filename: string option ; + type t = { + base_dir : string; + node_addr : string; + node_port : int; + tls : bool; + web_port : int; + remote_signer : Uri.t option; + confirmations : int option; + password_filename : string option } - let default = { - base_dir = default_base_dir ; - node_addr = "localhost" ; - node_port = 8732 ; - tls = false ; - web_port = 8080 ; - remote_signer = None ; - confirmations = Some 0 ; - password_filename = None ; - } + let default = + { base_dir = default_base_dir; + node_addr = "localhost"; + node_port = 8732; + tls = false; + web_port = 8080; + remote_signer = None; + confirmations = Some 0; + password_filename = None } open Data_encoding let encoding = conv - (fun { base_dir ; node_addr ; node_port ; tls ; web_port ; - remote_signer ; confirmations ; password_filename } -> - (base_dir, Some node_addr, Some node_port, - Some tls, Some web_port, remote_signer, confirmations, password_filename )) - (fun (base_dir, node_addr, node_port, tls, web_port, - remote_signer, confirmations, password_filename) -> + (fun { base_dir; + node_addr; + node_port; + tls; + web_port; + remote_signer; + confirmations; + password_filename } -> + ( base_dir, + Some node_addr, + Some node_port, + Some tls, + Some web_port, + remote_signer, + confirmations, + password_filename )) + (fun ( base_dir, + node_addr, + node_port, + tls, + web_port, + remote_signer, + confirmations, + password_filename ) -> let node_addr = Option.unopt ~default:default.node_addr node_addr in let node_port = Option.unopt ~default:default.node_port node_port in let tls = Option.unopt ~default:default.tls tls in let web_port = Option.unopt ~default:default.web_port web_port in - { base_dir ; node_addr ; node_port ; tls ; web_port ; - remote_signer ; confirmations ; password_filename }) + { base_dir; + node_addr; + node_port; + tls; + web_port; + remote_signer; + confirmations; + password_filename }) (obj8 (req "base_dir" string) (opt "node_addr" string) @@ -158,42 +184,37 @@ module Cfg_file = struct (opt "web_port" int16) (opt "remote_signer" RPC_client.uri_encoding) (opt "confirmations" int8) - (opt "password_filename" string) - ) + (opt "password_filename" string)) - let from_json json = - Data_encoding.Json.destruct encoding json + let from_json json = Data_encoding.Json.destruct encoding json let read fp = - Lwt_utils_unix.Json.read_file fp >>=? fun json -> - return (from_json json) + Lwt_utils_unix.Json.read_file fp >>=? fun json -> return (from_json json) let write out cfg = - Lwt_utils_unix.Json.write_file out + Lwt_utils_unix.Json.write_file + out (Data_encoding.Json.construct encoding cfg) - end type cli_args = { - chain: Chain_services.chain ; - block: Shell_services.block ; - confirmations: int option ; - password_filename: string option ; - protocol: Protocol_hash.t option ; - print_timings: bool ; - log_requests: bool ; -} - -let default_cli_args = { - chain = default_chain ; - block = default_block ; - confirmations = Some 0 ; - password_filename = None ; - protocol = None ; - print_timings = false ; - log_requests = false ; + chain : Chain_services.chain; + block : Shell_services.block; + confirmations : int option; + password_filename : string option; + protocol : Protocol_hash.t option; + print_timings : bool; + log_requests : bool } +let default_cli_args = + { chain = default_chain; + block = default_block; + confirmations = Some 0; + password_filename = None; + protocol = None; + print_timings = false; + log_requests = false } open Clic @@ -201,46 +222,43 @@ let string_parameter () : (string, #Client_context.full) parameter = parameter (fun _ x -> return x) let chain_parameter () = - parameter - (fun _ chain -> - match Chain_services.parse_chain chain with - | Error _ -> fail (Invalid_chain_argument chain) - | Ok chain -> return chain) + parameter (fun _ chain -> + match Chain_services.parse_chain chain with + | Error _ -> + fail (Invalid_chain_argument chain) + | Ok chain -> + return chain) let block_parameter () = - parameter - (fun _ block -> - match Block_services.parse_block block with - | Error _ -> fail (Invalid_block_argument block) - | Ok block -> return block) + parameter (fun _ block -> + match Block_services.parse_block block with + | Error _ -> + fail (Invalid_block_argument block) + | Ok block -> + return block) let wait_parameter () = - parameter - (fun _ wait -> - match wait with - | "no" | "none" -> return_none - | _ -> - try - let w = int_of_string wait in - if 0 <= w then - return_some w - else - fail (Invalid_wait_arg wait) - with _ -> fail (Invalid_wait_arg wait)) + parameter (fun _ wait -> + match wait with + | "no" | "none" -> + return_none + | _ -> ( + try + let w = int_of_string wait in + if 0 <= w then return_some w else fail (Invalid_wait_arg wait) + with _ -> fail (Invalid_wait_arg wait) )) let protocol_parameter () = - parameter - (fun _ arg -> - try - let (hash,_commands) = - List.find (fun (hash,_commands) -> - String.has_prefix ~prefix:arg - (Protocol_hash.to_b58check hash)) - (Client_commands.get_versions ()) - in - return_some hash - with Not_found -> fail (Invalid_protocol_argument arg) - ) + parameter (fun _ arg -> + try + let (hash, _commands) = + List.find + (fun (hash, _commands) -> + String.has_prefix ~prefix:arg (Protocol_hash.to_b58check hash)) + (Client_commands.get_versions ()) + in + return_some hash + with Not_found -> fail (Invalid_protocol_argument arg)) (* Command-line only args (not in config file) *) let base_dir_arg () = @@ -248,10 +266,12 @@ let base_dir_arg () = ~long:"base-dir" ~short:'d' ~placeholder:"path" - ~doc:("client data directory\n\ - The directory where the Tezos client will store all its data.\n\ - By default: '" ^ default_base_dir ^"'.") + ~doc: + ( "client data directory\n\ + The directory where the Tezos client will store all its data.\n\ + By default: '" ^ default_base_dir ^ "'." ) (string_parameter ()) + let config_file_arg () = arg ~long:"config-file" @@ -259,36 +279,40 @@ let config_file_arg () = ~placeholder:"path" ~doc:"configuration file" (string_parameter ()) + let timings_switch () = - switch - ~long:"timings" - ~short:'t' - ~doc:"show RPC request times" - () + switch ~long:"timings" ~short:'t' ~doc:"show RPC request times" () + let chain_arg () = default_arg ~long:"chain" ~placeholder:"hash|tag" - ~doc:"chain on which to apply contextual commands (possible tags \ - are 'main' and 'test')" + ~doc: + "chain on which to apply contextual commands (possible tags are 'main' \ + and 'test')" ~default:(Chain_services.to_string default_cli_args.chain) (chain_parameter ()) + let block_arg () = default_arg ~long:"block" ~short:'b' ~placeholder:"hash|tag" - ~doc:"block on which to apply contextual commands (possible tags \ - are 'head' and 'genesis')" + ~doc: + "block on which to apply contextual commands (possible tags are 'head' \ + and 'genesis')" ~default:(Block_services.to_string default_cli_args.block) (block_parameter ()) + let wait_arg () = arg ~long:"wait" ~short:'w' ~placeholder:"none|<int>" - ~doc:"how many confirmation blocks before to consider an operation as included" + ~doc: + "how many confirmation blocks before to consider an operation as included" (wait_parameter ()) + let protocol_arg () = arg ~long:"protocol" @@ -296,12 +320,9 @@ let protocol_arg () = ~placeholder:"hash" ~doc:"use commands of a specific protocol" (protocol_parameter ()) + let log_requests_switch () = - switch - ~long:"log-requests" - ~short:'l' - ~doc:"log all requests to the node" - () + switch ~long:"log-requests" ~short:'l' ~doc:"log all requests to the node" () (* Command-line args which can be set in config file as well *) let addr_arg () = @@ -311,31 +332,28 @@ let addr_arg () = ~placeholder:"IP addr|host" ~doc:"IP address of the node" (string_parameter ()) + let port_arg () = arg ~long:"port" ~short:'P' ~placeholder:"number" ~doc:"RPC port of the node" - (parameter - (fun _ x -> try - return (int_of_string x) - with Failure _ -> - fail (Invalid_port_arg x))) + (parameter (fun _ x -> + try return (int_of_string x) + with Failure _ -> fail (Invalid_port_arg x))) + let tls_switch () = - switch - ~long:"tls" - ~short:'S' - ~doc:"use TLS to connect to node." - () + switch ~long:"tls" ~short:'S' ~doc:"use TLS to connect to node." () + let remote_signer_arg () = arg ~long:"remote-signer" ~short:'R' ~placeholder:"uri" ~doc:"URI of the remote signer" - (parameter - (fun _ x -> Tezos_signer_backends.Remote.parse_base_uri x)) + (parameter (fun _ x -> Tezos_signer_backends.Remote.parse_base_uri x)) + let password_filename_arg () = arg ~long:"password-filename" @@ -345,62 +363,74 @@ let password_filename_arg () = (string_parameter ()) let read_config_file config_file = - Lwt_utils_unix.Json.read_file config_file >>=? fun cfg_json -> + Lwt_utils_unix.Json.read_file config_file + >>=? fun cfg_json -> try return @@ Cfg_file.from_json cfg_json with exn -> failwith "Can't parse the configuration file: %s@,%a" - config_file (fun ppf exn -> Json_encoding.print_error ppf exn) exn + config_file + (fun ppf exn -> Json_encoding.print_error ppf exn) + exn let default_config_file_name = "config" let commands config_file cfg = let open Clic in - let group = { Clic.name = "config" ; - title = "Commands for editing and viewing the client's config file" } in - [ command ~group ~desc:"Show the config file." + let group = + { Clic.name = "config"; + title = "Commands for editing and viewing the client's config file" } + in + [ command + ~group + ~desc:"Show the config file." no_options - (fixed [ "config" ; "show" ]) + (fixed ["config"; "show"]) (fun () (cctxt : #Client_context.full) -> - let pp_cfg ppf cfg = Format.fprintf ppf "%a" Data_encoding.Json.pp (Data_encoding.Json.construct Cfg_file.encoding cfg) in - if not @@ Sys.file_exists config_file then - cctxt#warning - "@[<v 2>Warning: no config file at %s,@,\ - displaying the default configuration.@]" - config_file >>= fun () -> - cctxt#warning "%a@," pp_cfg Cfg_file.default >>= return - else - read_config_file config_file >>=? fun cfg -> - cctxt#message "%a@," pp_cfg cfg >>= return) ; - - command ~group ~desc:"Reset the config file to the factory defaults." + let pp_cfg ppf cfg = + Format.fprintf + ppf + "%a" + Data_encoding.Json.pp + (Data_encoding.Json.construct Cfg_file.encoding cfg) + in + if not @@ Sys.file_exists config_file then + cctxt#warning + "@[<v 2>Warning: no config file at %s,@,\ + displaying the default configuration.@]" + config_file + >>= fun () -> cctxt#warning "%a@," pp_cfg Cfg_file.default >>= return + else + read_config_file config_file + >>=? fun cfg -> cctxt#message "%a@," pp_cfg cfg >>= return); + command + ~group + ~desc:"Reset the config file to the factory defaults." no_options - (fixed [ "config" ; "reset" ]) - (fun () _cctxt -> - Cfg_file.(write config_file default)) ; - - command ~group - ~desc:"Update the config based on the current cli values.\n\ - Loads the current configuration (default or as specified \ - with `-config-file`), applies alterations from other \ - command line arguments (such as the node's address, \ - etc.), and overwrites the updated configuration file." + (fixed ["config"; "reset"]) + (fun () _cctxt -> Cfg_file.(write config_file default)); + command + ~group + ~desc: + "Update the config based on the current cli values.\n\ + Loads the current configuration (default or as specified with \ + `-config-file`), applies alterations from other command line \ + arguments (such as the node's address, etc.), and overwrites the \ + updated configuration file." no_options - (fixed [ "config" ; "update" ]) - (fun () _cctxt -> - Cfg_file.(write config_file cfg)) ; - - command ~group - ~desc:"Create a config file based on the current CLI values.\n\ - If the `-file` option is not passed, this will initialize \ - the default config file, based on default parameters, \ - altered by other command line options (such as the node's \ - address, etc.).\n\ - Otherwise, it will create a new config file, based on the \ - default parameters (or the the ones specified with \ - `-config-file`), altered by other command line \ - options.\n\ - The command will always fail if the file already exists." + (fixed ["config"; "update"]) + (fun () _cctxt -> Cfg_file.(write config_file cfg)); + command + ~group + ~desc: + "Create a config file based on the current CLI values.\n\ + If the `-file` option is not passed, this will initialize the \ + default config file, based on default parameters, altered by other \ + command line options (such as the node's address, etc.).\n\ + Otherwise, it will create a new config file, based on the default \ + parameters (or the the ones specified with `-config-file`), altered \ + by other command line options.\n\ + The command will always fail if the file already exists." (args1 (default_arg ~long:"output" @@ -409,13 +439,12 @@ let commands config_file cfg = ~doc:"path at which to create the file" ~default:(cfg.base_dir // default_config_file_name) (parameter (fun _ctx str -> return str)))) - (fixed [ "config" ; "init" ]) + (fixed ["config"; "init"]) (fun config_file _cctxt -> - if not (Sys.file_exists config_file) - then Cfg_file.(write config_file cfg) (* Should be default or command would have failed *) - else failwith "Config file already exists at location") ; - ] - + if not (Sys.file_exists config_file) then + Cfg_file.(write config_file cfg) + (* Should be default or command would have failed *) + else failwith "Config file already exists at location") ] let global_options () = args13 @@ -433,139 +462,144 @@ let global_options () = (remote_signer_arg ()) (password_filename_arg ()) - type parsed_config_args = { - parsed_config_file : Cfg_file.t option ; - parsed_args : cli_args option ; - config_commands : Client_context.full command list ; - base_dir : string option ; - require_auth : bool ; - password_filename : string option ; -} -let default_parsed_config_args = { - parsed_config_file = None ; - parsed_args = None ; - config_commands = [] ; - base_dir = None ; - require_auth = false ; - password_filename = None ; + parsed_config_file : Cfg_file.t option; + parsed_args : cli_args option; + config_commands : Client_context.full command list; + base_dir : string option; + require_auth : bool; + password_filename : string option } +let default_parsed_config_args = + { parsed_config_file = None; + parsed_args = None; + config_commands = []; + base_dir = None; + require_auth = false; + password_filename = None } + let parse_config_args (ctx : #Client_context.full) argv = - parse_global_options - (global_options ()) - ctx - argv >>=? - fun ((base_dir, - config_file, - timings, - chain, - block, - confirmations, - protocol, - log_requests, - node_addr, - node_port, - tls, - remote_signer, - password_filename), remaining) -> - begin match base_dir with - | None -> - let base_dir = default_base_dir in - unless (Sys.file_exists base_dir) begin fun () -> - Lwt_utils_unix.create_dir base_dir >>= return - end >>=? fun () -> - return base_dir - | Some dir -> - if not (Sys.file_exists dir) - then failwith "Specified -base-dir does not exist. Please create the directory and try again." - else if Sys.is_directory dir - then return dir - else failwith "Specified -base-dir must be a directory" - end >>=? fun base_dir -> - begin match config_file with - | None -> return @@ base_dir // default_config_file_name - | Some config_file -> - if Sys.file_exists config_file - then return config_file - else failwith "Config file specified in option does not exist. Use `client config init` to create one." - end >>=? fun config_file -> + parse_global_options (global_options ()) ctx argv + >>=? fun ( ( base_dir, + config_file, + timings, + chain, + block, + confirmations, + protocol, + log_requests, + node_addr, + node_port, + tls, + remote_signer, + password_filename ), + remaining ) -> + ( match base_dir with + | None -> + let base_dir = default_base_dir in + unless (Sys.file_exists base_dir) (fun () -> + Lwt_utils_unix.create_dir base_dir >>= return) + >>=? fun () -> return base_dir + | Some dir -> + if not (Sys.file_exists dir) then + failwith + "Specified -base-dir does not exist. Please create the directory \ + and try again." + else if Sys.is_directory dir then return dir + else failwith "Specified -base-dir must be a directory" ) + >>=? fun base_dir -> + ( match config_file with + | None -> + return @@ (base_dir // default_config_file_name) + | Some config_file -> + if Sys.file_exists config_file then return config_file + else + failwith + "Config file specified in option does not exist. Use `client config \ + init` to create one." ) + >>=? fun config_file -> let config_dir = Filename.dirname config_file in - let protocol = - match protocol with - | None -> None - | Some p -> p - in - begin - if not (Sys.file_exists config_file) then - return { Cfg_file.default with base_dir = base_dir } - else - read_config_file config_file - end >>=? fun cfg -> + let protocol = match protocol with None -> None | Some p -> p in + ( if not (Sys.file_exists config_file) then + return {Cfg_file.default with base_dir} + else read_config_file config_file ) + >>=? fun cfg -> let tls = cfg.tls || tls in let node_addr = Option.unopt ~default:cfg.node_addr node_addr in let node_port = Option.unopt ~default:cfg.node_port node_port in - Tezos_signer_backends.Remote.read_base_uri_from_env () >>=? fun remote_signer_env -> + Tezos_signer_backends.Remote.read_base_uri_from_env () + >>=? fun remote_signer_env -> let remote_signer = - Option.first_some remote_signer - (Option.first_some remote_signer_env cfg.remote_signer) in + Option.first_some + remote_signer + (Option.first_some remote_signer_env cfg.remote_signer) + in let confirmations = Option.unopt ~default:cfg.confirmations confirmations in - let cfg = { cfg with tls ; node_port ; node_addr ; - remote_signer ; confirmations ; password_filename } in - if Sys.file_exists base_dir && not (Sys.is_directory base_dir) then begin + let cfg = + { cfg with + tls; + node_port; + node_addr; + remote_signer; + confirmations; + password_filename } + in + if Sys.file_exists base_dir && not (Sys.is_directory base_dir) then ( Format.eprintf "%s is not a directory.@." base_dir ; - exit 1 ; - end ; - if Sys.file_exists config_dir && not (Sys.is_directory config_dir) then begin + exit 1 ) ; + if Sys.file_exists config_dir && not (Sys.is_directory config_dir) then ( Format.eprintf "%s is not a directory.@." config_dir ; - exit 1 ; - end ; - Lwt_utils_unix.create_dir config_dir >>= fun () -> + exit 1 ) ; + Lwt_utils_unix.create_dir config_dir + >>= fun () -> return - ({ default_parsed_config_args with - parsed_config_file = Some cfg ; - parsed_args = - Some { chain ; block ; - confirmations ; - print_timings = timings ; - log_requests ; - password_filename ; - protocol } ; - config_commands = commands config_file cfg - }, - remaining) + ( { default_parsed_config_args with + parsed_config_file = Some cfg; + parsed_args = + Some + { chain; + block; + confirmations; + print_timings = timings; + log_requests; + password_filename; + protocol }; + config_commands = commands config_file cfg }, + remaining ) type t = - string option * - string option * - bool * - Shell_services.chain * - Shell_services.block * - int option option * - Protocol_hash.t option option * - bool * - string option * - int option * - bool * - Uri.t option * string option + * string option + * bool + * Shell_services.chain + * Shell_services.block + * int option option + * Protocol_hash.t option option + * bool + * string option + * int option + * bool + * Uri.t option + * string option + +module type Remote_params = sig + val authenticate : + Signature.public_key_hash list -> MBytes.t -> Signature.t tzresult Lwt.t -module type Remote_params = -sig - val authenticate: Signature.public_key_hash list -> - MBytes.t -> Signature.t tzresult Lwt.t val logger : RPC_client.logger end -let other_registrations : (_ -> (module Remote_params) -> _) option = - Some (fun parsed_config_file (module Remote_params) -> +let other_registrations : (_ -> (module Remote_params) -> _) option = + Some + (fun parsed_config_file (module Remote_params) -> Option.iter parsed_config_file.Cfg_file.remote_signer ~f:(fun signer -> Client_keys.register_signer - (module Tezos_signer_backends.Remote.Make(struct - let default = signer - include Remote_params - end)) - )) + ( module Tezos_signer_backends.Remote.Make (struct + let default = signer + + include Remote_params + end) ))) let clic_commands ~base_dir:_ ~config_commands ~builtin_commands ~other_commands ~require_auth:_ = diff --git a/src/lib_client_base_unix/client_confirmations_unix.ml b/src/lib_client_base_unix/client_confirmations_unix.ml index 943a45f4bcf71c5d596912df9b86fa61c20b124e..b867fe7034480f31899ef0cbb013b78da38e63a2 100644 --- a/src/lib_client_base_unix/client_confirmations_unix.ml +++ b/src/lib_client_base_unix/client_confirmations_unix.ml @@ -25,24 +25,32 @@ let wait_for_bootstrapped (ctxt : #Client_context.full) = let display = ref false in - Lwt.async begin fun () -> - Lwt_unix.sleep 0.3 >>= fun () -> - if not !display then - ctxt#answer "Waiting for the node to be bootstrapped before injection..." >>= fun () -> - display := true ; - Lwt.return_unit - else - Lwt.return_unit - end ; - Monitor_services.bootstrapped ctxt >>=? fun (stream, _stop) -> + Lwt.async (fun () -> + Lwt_unix.sleep 0.3 + >>= fun () -> + if not !display then ( + ctxt#answer + "Waiting for the node to be bootstrapped before injection..." + >>= fun () -> + display := true ; + Lwt.return_unit ) + else Lwt.return_unit) ; + Monitor_services.bootstrapped ctxt + >>=? fun (stream, _stop) -> Lwt_stream.iter_s (fun (hash, time) -> - if !display then - ctxt#message "Current head: %a (timestamp: %a, validation: %a)" - Block_hash.pp_short hash - Time.System.pp_hum (Time.System.of_protocol_exn time) - Time.System.pp_hum (Tezos_stdlib_unix.Systime_os.now ()) - else Lwt.return_unit) stream >>= fun () -> + if !display then + ctxt#message + "Current head: %a (timestamp: %a, validation: %a)" + Block_hash.pp_short + hash + Time.System.pp_hum + (Time.System.of_protocol_exn time) + Time.System.pp_hum + (Tezos_stdlib_unix.Systime_os.now ()) + else Lwt.return_unit) + stream + >>= fun () -> display := true ; - ctxt#answer "Node is bootstrapped, ready for injecting operations." >>= fun () -> - return_unit + ctxt#answer "Node is bootstrapped, ready for injecting operations." + >>= fun () -> return_unit diff --git a/src/lib_client_base_unix/client_confirmations_unix.mli b/src/lib_client_base_unix/client_confirmations_unix.mli index 0aad3255d6e13d306816c36b469942b92715975e..584b6ea2a4c0c255647447f7bc3f445a396ce6bc 100644 --- a/src/lib_client_base_unix/client_confirmations_unix.mli +++ b/src/lib_client_base_unix/client_confirmations_unix.mli @@ -23,6 +23,4 @@ (* *) (*****************************************************************************) -val wait_for_bootstrapped: - #Client_context.full -> unit tzresult Lwt.t - +val wait_for_bootstrapped : #Client_context.full -> unit tzresult Lwt.t diff --git a/src/lib_client_base_unix/client_context_unix.ml b/src/lib_client_base_unix/client_context_unix.ml index c4f01d02df918a308583ac93569c32eb1b5de682..72fbd4fc3871c41f30a9c7cc2224f6bb055955b4 100644 --- a/src/lib_client_base_unix/client_context_unix.ml +++ b/src/lib_client_base_unix/client_context_unix.ml @@ -24,132 +24,155 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make_semantic - (struct let name = "client.context.unix" end) +include Internal_event.Legacy_logging.Make_semantic (struct + let name = "client.context.unix" +end) -class unix_wallet ~base_dir ~password_filename : Client_context.wallet = object (self) - - method load_passwords = match password_filename with - | None -> None - | Some filename -> - if Sys.file_exists filename then - Some (Lwt_io.lines_of_file filename) - else +class unix_wallet ~base_dir ~password_filename : Client_context.wallet = + object (self) + method load_passwords = + match password_filename with + | None -> None + | Some filename -> + if Sys.file_exists filename then Some (Lwt_io.lines_of_file filename) + else None - method read_file path = - Lwt.catch - (fun () -> - Lwt_io.(with_file ~mode:Input path read) >>= fun content -> - return content) - (fun exn -> - failwith - "cannot read file (%s)" (Printexc.to_string exn)) - - method private filename alias_name = - Filename.concat - base_dir - (Str.(global_replace (regexp_string " ") "_" alias_name) ^ "s") - - method with_lock : type a. (unit -> a Lwt.t) -> a Lwt.t = - (fun f -> - let unlock fd = - let fd = Lwt_unix.unix_file_descr fd in - Unix.lockf fd Unix.F_ULOCK 0 ; - Unix.close fd in - let lock () = - Lwt_unix.openfile (Filename.concat base_dir "wallet_lock") - Lwt_unix.[ O_CREAT ; O_WRONLY ] 0o644 >>= fun fd -> - Lwt_unix.lockf fd Unix.F_LOCK 0 >>= fun () -> - Lwt.return (fd, (Lwt_unix.on_signal Sys.sigint - (fun _s -> - unlock fd ; - exit 0 (* exit code? *) ))) in - lock () >>= fun (fd, sh) -> - (* catch might be useless if f always uses the error monad *) - Lwt.catch f (function e -> Lwt.return (unlock fd ; raise e)) >>= fun res -> - Lwt.return (unlock fd) >>= fun () -> - Lwt_unix.disable_signal_handler sh ; - Lwt.return res) - - method load : type a. string -> default:a -> a Data_encoding.encoding -> a tzresult Lwt.t = - fun alias_name ~default encoding -> - let filename = self#filename alias_name in - if not (Sys.file_exists filename) then - return default - else - Lwt_utils_unix.Json.read_file filename - |> generic_trace - "could not read the %s alias file" alias_name >>=? fun json -> - match Data_encoding.Json.destruct encoding json with - | exception e -> - failwith "did not understand the %s alias file %s : %s" - alias_name filename (Printexc.to_string e) - | data -> - return data - - method write : - type a. string -> a -> a Data_encoding.encoding -> unit tzresult Lwt.t = - fun alias_name list encoding -> + method read_file path = Lwt.catch (fun () -> - Lwt_utils_unix.create_dir base_dir >>= fun () -> - let filename = self#filename alias_name in - let json = Data_encoding.Json.construct encoding list in - Lwt_utils_unix.Json.write_file filename json) - (fun exn -> Lwt.return (error_exn exn)) - |> generic_trace "could not write the %s alias file." alias_name -end - -class unix_prompter : Client_context.prompter = object - method prompt : type a. (a, string tzresult) Client_context.lwt_format -> a = - Format.kasprintf begin fun msg -> - print_string msg ; - let line = read_line () in - return line - end - - method prompt_password : type a. (a, MBytes.t tzresult) Client_context.lwt_format -> a = - Format.kasprintf begin fun msg -> - print_string msg ; - let line = Lwt_utils_unix.getpass () in - return (MBytes.of_string line) - end -end + Lwt_io.(with_file ~mode:Input path read) + >>= fun content -> return content) + (fun exn -> failwith "cannot read file (%s)" (Printexc.to_string exn)) + + method private filename alias_name = + Filename.concat + base_dir + (Str.(global_replace (regexp_string " ") "_" alias_name) ^ "s") + + method with_lock : type a. (unit -> a Lwt.t) -> a Lwt.t = + fun f -> + let unlock fd = + let fd = Lwt_unix.unix_file_descr fd in + Unix.lockf fd Unix.F_ULOCK 0 ; + Unix.close fd + in + let lock () = + Lwt_unix.openfile + (Filename.concat base_dir "wallet_lock") + Lwt_unix.[O_CREAT; O_WRONLY] + 0o644 + >>= fun fd -> + Lwt_unix.lockf fd Unix.F_LOCK 0 + >>= fun () -> + Lwt.return + ( fd, + Lwt_unix.on_signal Sys.sigint (fun _s -> + unlock fd ; exit 0 (* exit code? *)) ) + in + lock () + >>= fun (fd, sh) -> + (* catch might be useless if f always uses the error monad *) + Lwt.catch f (function e -> Lwt.return (unlock fd ; raise e)) + >>= fun res -> + Lwt.return (unlock fd) + >>= fun () -> + Lwt_unix.disable_signal_handler sh ; + Lwt.return res + + method load : type a. + string -> default:a -> a Data_encoding.encoding -> a tzresult Lwt.t = + fun alias_name ~default encoding -> + let filename = self#filename alias_name in + if not (Sys.file_exists filename) then return default + else + Lwt_utils_unix.Json.read_file filename + |> generic_trace "could not read the %s alias file" alias_name + >>=? fun json -> + match Data_encoding.Json.destruct encoding json with + | exception e -> + failwith + "did not understand the %s alias file %s : %s" + alias_name + filename + (Printexc.to_string e) + | data -> + return data + + method write : type a. + string -> a -> a Data_encoding.encoding -> unit tzresult Lwt.t = + fun alias_name list encoding -> + Lwt.catch + (fun () -> + Lwt_utils_unix.create_dir base_dir + >>= fun () -> + let filename = self#filename alias_name in + let json = Data_encoding.Json.construct encoding list in + Lwt_utils_unix.Json.write_file filename json) + (fun exn -> Lwt.return (error_exn exn)) + |> generic_trace "could not write the %s alias file." alias_name + end + +class unix_prompter : Client_context.prompter = + object + method prompt : type a. (a, string tzresult) Client_context.lwt_format -> a + = + Format.kasprintf (fun msg -> + print_string msg ; + let line = read_line () in + return line) + + method prompt_password : type a. + (a, MBytes.t tzresult) Client_context.lwt_format -> a = + Format.kasprintf (fun msg -> + print_string msg ; + let line = Lwt_utils_unix.getpass () in + return (MBytes.of_string line)) + end class unix_logger ~base_dir : Client_context.printer = let startup = Format.asprintf "%a" Time.System.pp_hum (Systime_os.now ()) in - let log channel msg = match channel with + let log channel msg = + match channel with | "stdout" -> - print_endline msg ; - Lwt.return_unit + print_endline msg ; Lwt.return_unit | "stderr" -> - prerr_endline msg ; - Lwt.return_unit + prerr_endline msg ; Lwt.return_unit | log -> - let (//) = Filename.concat in - Lwt_utils_unix.create_dir (base_dir // "logs" // log) >>= fun () -> + let ( // ) = Filename.concat in + Lwt_utils_unix.create_dir (base_dir // "logs" // log) + >>= fun () -> Lwt_io.with_file - ~flags: Unix.[ O_APPEND ; O_CREAT ; O_WRONLY ] - ~mode: Lwt_io.Output + ~flags:Unix.[O_APPEND; O_CREAT; O_WRONLY] + ~mode:Lwt_io.Output (base_dir // "logs" // log // startup) - (fun chan -> Lwt_io.write chan msg) in + (fun chan -> Lwt_io.write chan msg) + in object inherit Client_context.simple_printer log end -class unix_ui : Client_context.ui = object - method sleep f = Lwt_unix.sleep f -end +class unix_ui : Client_context.ui = + object + method sleep f = Lwt_unix.sleep f + end -class unix_full ~base_dir ~chain ~block ~confirmations ~password_filename ~rpc_config : Client_context.full = +class unix_full ~base_dir ~chain ~block ~confirmations ~password_filename + ~rpc_config : Client_context.full = object inherit unix_logger ~base_dir + inherit unix_prompter + inherit unix_wallet ~base_dir ~password_filename + inherit RPC_client.http_ctxt rpc_config Media_type.all_media_types + inherit unix_ui + method chain = chain + method block = block + method confirmations = confirmations end diff --git a/src/lib_client_base_unix/client_context_unix.mli b/src/lib_client_base_unix/client_context_unix.mli index bcbc0fe99286ee3a5e74e68eff1ea02dfbc0451b..73c5688bc8dbcf040fdcd726e38c00c7cb12beb2 100644 --- a/src/lib_client_base_unix/client_context_unix.mli +++ b/src/lib_client_base_unix/client_context_unix.mli @@ -25,21 +25,19 @@ (*****************************************************************************) class unix_wallet : - base_dir:string -> - password_filename: string option -> - Client_context.wallet -class unix_prompter : - Client_context.prompter -class unix_logger : - base_dir:string -> - Client_context.printer -class unix_ui : - Client_context.ui + base_dir:string -> password_filename:string option -> Client_context.wallet + +class unix_prompter : Client_context.prompter + +class unix_logger : base_dir:string -> Client_context.printer + +class unix_ui : Client_context.ui + class unix_full : - base_dir:string -> - chain:Shell_services.chain -> - block:Shell_services.block -> - confirmations:int option -> - password_filename: string option -> - rpc_config:RPC_client.config -> - Client_context.full + base_dir:string + -> chain:Shell_services.chain + -> block:Shell_services.block + -> confirmations:int option + -> password_filename:string option + -> rpc_config:RPC_client.config + -> Client_context.full diff --git a/src/lib_client_base_unix/client_main_run.ml b/src/lib_client_base_unix/client_main_run.ml index 921f6634d0aebefe10866bcecebad088c5876531..f01d994da43699744d1c5e66e77704947a6bde32 100644 --- a/src/lib_client_base_unix/client_main_run.ml +++ b/src/lib_client_base_unix/client_main_run.ml @@ -30,34 +30,36 @@ open Client_context_unix let builtin_commands = let open Clic in - [ - command - ~desc: "List the protocol versions that this client understands." + [ command + ~desc:"List the protocol versions that this client understands." no_options - (fixed [ "list" ; "understood" ; "protocols" ]) + (fixed ["list"; "understood"; "protocols"]) (fun () (cctxt : #Client_context.full) -> - Lwt_list.iter_s - (fun (ver, _) -> cctxt#message "%a" Protocol_hash.pp_short ver) - (Client_commands.get_versions ()) >>= fun () -> - return_unit) ; - ] + Lwt_list.iter_s + (fun (ver, _) -> cctxt#message "%a" Protocol_hash.pp_short ver) + (Client_commands.get_versions ()) + >>= fun () -> return_unit) ] - -module type M = -sig +module type M = sig type t - val global_options : - (unit -> (t, Client_context_unix.unix_full) Clic.options) + + val global_options : unit -> (t, Client_context_unix.unix_full) Clic.options + val parse_config_args : #Tezos_client_base.Client_context.full -> string list -> (Client_config.parsed_config_args * string list) tzresult Lwt.t + val default_chain : Chain_services.chain - val default_block : [> `Head of int ] + + val default_block : [> `Head of int] + val default_base_dir : string + val other_registrations : (Client_config.Cfg_file.t -> (module Client_config.Remote_params) -> unit) - option + option + val clic_commands : base_dir:string -> config_commands:Tezos_client_base.Client_context.full Clic.command list -> @@ -65,217 +67,265 @@ sig other_commands:Tezos_client_base.Client_context.full Clic.command list -> require_auth:bool -> Tezos_client_base.Client_context.full Clic.command list + val logger : RPC_client.logger option end - (* Main (lwt) entry *) -let main - (module C: M) - ~select_commands - = +let main (module C : M) ~select_commands = let global_options = C.global_options () in let executable_name = Filename.basename Sys.executable_name in - let original_args, autocomplete = + let (original_args, autocomplete) = (* for shell aliases *) let rec move_autocomplete_token_upfront acc = function | "bash_autocomplete" :: prev_arg :: cur_arg :: script :: args -> let args = List.rev acc @ args in - args, Some (prev_arg, cur_arg, script) - | x :: rest -> move_autocomplete_token_upfront (x :: acc) rest - | [] -> List.rev acc, None in + (args, Some (prev_arg, cur_arg, script)) + | x :: rest -> + move_autocomplete_token_upfront (x :: acc) rest + | [] -> + (List.rev acc, None) + in match Array.to_list Sys.argv with - | _ :: args -> move_autocomplete_token_upfront [] args - | [] -> [], None in + | _ :: args -> + move_autocomplete_token_upfront [] args + | [] -> + ([], None) + in Random.self_init () ; - ignore Clic.(setup_formatter Format.std_formatter - (if Unix.isatty Unix.stdout then Ansi else Plain) Short) ; - ignore Clic.(setup_formatter Format.err_formatter - (if Unix.isatty Unix.stderr then Ansi else Plain) Short) ; - Internal_event_unix.init () >>= fun () -> - Lwt.catch begin fun () -> begin + ignore + Clic.( + setup_formatter + Format.std_formatter + (if Unix.isatty Unix.stdout then Ansi else Plain) + Short) ; + ignore + Clic.( + setup_formatter + Format.err_formatter + (if Unix.isatty Unix.stderr then Ansi else Plain) + Short) ; + Internal_event_unix.init () + >>= fun () -> + Lwt.catch + (fun () -> C.parse_config_args (new unix_full - ~chain:C.default_chain - ~block:C.default_block - ~confirmations:None - ~password_filename:None - ~base_dir:C.default_base_dir - ~rpc_config:RPC_client.default_config) + ~chain:C.default_chain + ~block:C.default_block + ~confirmations:None + ~password_filename:None + ~base_dir:C.default_base_dir + ~rpc_config:RPC_client.default_config) original_args - >>=? fun (parsed, remaining) -> - let parsed_config_file = parsed.Client_config.parsed_config_file - and parsed_args = parsed.Client_config.parsed_args - and config_commands = parsed.Client_config.config_commands in - let base_dir : string = match parsed.Client_config.base_dir with - | Some p -> p - | None -> match parsed_config_file with - | None -> C.default_base_dir - | Some p -> p.Client_config.Cfg_file.base_dir - and require_auth = parsed.Client_config.require_auth in - let rpc_config = - let rpc_config : RPC_client.config = match parsed_config_file with - | None -> RPC_client.default_config - | Some parsed_config_file -> - { - RPC_client.default_config with - host = parsed_config_file.Client_config.Cfg_file.node_addr ; - port = parsed_config_file.Client_config.Cfg_file.node_port ; - tls = parsed_config_file.Client_config.Cfg_file.tls ; - } in - match parsed_args with - | Some parsed_args -> - if parsed_args.Client_config.print_timings then - { rpc_config with - logger = RPC_client.timings_logger Format.err_formatter } - else if parsed_args.Client_config.log_requests then - { rpc_config with - logger = RPC_client.full_logger Format.err_formatter } - else rpc_config - | None -> - rpc_config - in - let client_config = - new unix_full - ~chain:(match parsed_args with - | Some p -> p.Client_config.chain - | None -> Client_config.default_chain) - ~block:(match parsed_args with - | Some p -> p.Client_config.block - | None -> Client_config.default_block) - ~confirmations:(match parsed_args with - | Some p -> p.Client_config.confirmations - | None -> None) - ~password_filename:(match parsed_args with - | Some p -> p.Client_config.password_filename - | None -> None) - ~base_dir - ~rpc_config:rpc_config in - let module Remote_params = struct - let authenticate pkhs payload = - Client_keys.list_keys client_config >>=? fun keys -> - match List.filter_map - (function - | (_, known_pkh, _, Some known_sk_uri) - when List.exists (fun pkh -> Signature.Public_key_hash.equal pkh known_pkh) pkhs -> - Some known_sk_uri - | _ -> None) - keys with - | sk_uri :: _ -> - Client_keys.sign client_config sk_uri payload - | [] -> failwith - "remote signer expects authentication signature, \ - but no authorized key was found in the wallet" - let logger = - (* overriding the logger we might already have with the one from + >>=? (fun (parsed, remaining) -> + let parsed_config_file = parsed.Client_config.parsed_config_file + and parsed_args = parsed.Client_config.parsed_args + and config_commands = parsed.Client_config.config_commands in + let base_dir : string = + match parsed.Client_config.base_dir with + | Some p -> + p + | None -> ( + match parsed_config_file with + | None -> + C.default_base_dir + | Some p -> + p.Client_config.Cfg_file.base_dir ) + and require_auth = parsed.Client_config.require_auth in + let rpc_config = + let rpc_config : RPC_client.config = + match parsed_config_file with + | None -> + RPC_client.default_config + | Some parsed_config_file -> + { RPC_client.default_config with + host = + parsed_config_file.Client_config.Cfg_file.node_addr; + port = + parsed_config_file.Client_config.Cfg_file.node_port; + tls = parsed_config_file.Client_config.Cfg_file.tls } + in + match parsed_args with + | Some parsed_args -> + if parsed_args.Client_config.print_timings then + { rpc_config with + logger = RPC_client.timings_logger Format.err_formatter + } + else if parsed_args.Client_config.log_requests then + { rpc_config with + logger = RPC_client.full_logger Format.err_formatter } + else rpc_config + | None -> + rpc_config + in + let client_config = + new unix_full + ~chain: + ( match parsed_args with + | Some p -> + p.Client_config.chain + | None -> + Client_config.default_chain ) + ~block: + ( match parsed_args with + | Some p -> + p.Client_config.block + | None -> + Client_config.default_block ) + ~confirmations: + ( match parsed_args with + | Some p -> + p.Client_config.confirmations + | None -> + None ) + ~password_filename: + ( match parsed_args with + | Some p -> + p.Client_config.password_filename + | None -> + None ) + ~base_dir + ~rpc_config + in + let module Remote_params = struct + let authenticate pkhs payload = + Client_keys.list_keys client_config + >>=? fun keys -> + match + List.filter_map + (function + | (_, known_pkh, _, Some known_sk_uri) + when List.exists + (fun pkh -> + Signature.Public_key_hash.equal pkh known_pkh) + pkhs -> + Some known_sk_uri + | _ -> + None) + keys + with + | sk_uri :: _ -> + Client_keys.sign client_config sk_uri payload + | [] -> + failwith + "remote signer expects authentication signature, but \ + no authorized key was found in the wallet" + + let logger = + (* overriding the logger we might already have with the one from module C *) - match C.logger with Some logger -> logger | None -> rpc_config.logger - end in - let module Http = Tezos_signer_backends.Http.Make(Remote_params) in - let module Https = Tezos_signer_backends.Https.Make(Remote_params) in - let module Socket = Tezos_signer_backends.Socket.Make(Remote_params) in - Client_keys.register_signer - (module Tezos_signer_backends.Encrypted.Make(struct - let cctxt = (client_config :> Client_context.prompter) - end)) ; - Client_keys.register_signer (module Tezos_signer_backends.Unencrypted) ; - Client_keys.register_signer - (module Tezos_signer_backends.Ledger.Signer_implementation) ; - Client_keys.register_signer (module Socket.Unix) ; - Client_keys.register_signer (module Socket.Tcp) ; - Client_keys.register_signer (module Http) ; - Client_keys.register_signer (module Https) ; - begin - match parsed_config_file with - | None -> () - | Some parsed_config_file -> - match C.other_registrations with - | Some r -> - r parsed_config_file (module Remote_params) - | None -> () - end ; - begin - (match parsed_args with - | Some parsed_args -> - select_commands (client_config :> RPC_client.http_ctxt) parsed_args - | None -> return_nil) - >>=? fun other_commands -> - let commands = - Clic.add_manual + match C.logger with + | Some logger -> + logger + | None -> + rpc_config.logger + end in + let module Http = Tezos_signer_backends.Http.Make (Remote_params) in + let module Https = Tezos_signer_backends.Https.Make (Remote_params) in + let module Socket = + Tezos_signer_backends.Socket.Make (Remote_params) in + Client_keys.register_signer + ( module Tezos_signer_backends.Encrypted.Make (struct + let cctxt = (client_config :> Client_context.prompter) + end) ) ; + Client_keys.register_signer + (module Tezos_signer_backends.Unencrypted) ; + Client_keys.register_signer + (module Tezos_signer_backends.Ledger.Signer_implementation) ; + Client_keys.register_signer (module Socket.Unix) ; + Client_keys.register_signer (module Socket.Tcp) ; + Client_keys.register_signer (module Http) ; + Client_keys.register_signer (module Https) ; + ( match parsed_config_file with + | None -> + () + | Some parsed_config_file -> ( + match C.other_registrations with + | Some r -> + r parsed_config_file (module Remote_params) + | None -> + () ) ) ; + ( match parsed_args with + | Some parsed_args -> + select_commands + (client_config :> RPC_client.http_ctxt) + parsed_args + | None -> + return_nil ) + >>=? fun other_commands -> + let commands = + Clic.add_manual + ~executable_name + ~global_options + (if Unix.isatty Unix.stdout then Clic.Ansi else Clic.Plain) + Format.std_formatter + (C.clic_commands + ~base_dir + ~config_commands + ~builtin_commands + ~other_commands + ~require_auth) + in + match autocomplete with + | Some (prev_arg, cur_arg, script) -> + Clic.autocompletion + ~script + ~cur_arg + ~prev_arg + ~args:original_args + ~global_options + commands + client_config + >>=? fun completions -> + List.iter print_endline completions ; + return_unit + | None -> + Clic.dispatch commands client_config remaining) + >>= function + | Ok () -> + Lwt.return 0 + | Error [Clic.Help command] -> + Clic.usage + Format.std_formatter ~executable_name ~global_options - (if Unix.isatty Unix.stdout then Clic.Ansi else Clic.Plain) - Format.std_formatter - (C.clic_commands - ~base_dir - ~config_commands - ~builtin_commands - ~other_commands - ~require_auth - ) - in - begin match autocomplete with - | Some (prev_arg, cur_arg, script) -> - Clic.autocompletion - ~script ~cur_arg ~prev_arg ~args:original_args ~global_options - commands client_config >>=? fun completions -> - List.iter print_endline completions ; - return_unit - | None -> - Clic.dispatch commands client_config remaining - end - end - end >>= function - | Ok () -> - Lwt.return 0 - | Error [ Clic.Help command ] -> - Clic.usage - Format.std_formatter - ~executable_name - ~global_options - (match command with None -> [] | Some c -> [ c ]) ; - Lwt.return 0 - | Error errs -> - Clic.pp_cli_errors - Format.err_formatter - ~executable_name - ~global_options - ~default:Error_monad.pp - errs ; - Lwt.return 1 - end begin function - | Client_commands.Version_not_found -> - Format.eprintf "@{<error>@{<title>Fatal error@}@} unknown protocol version.@." ; - Lwt.return 1 - | Failure message -> - Format.eprintf "@{<error>@{<title>Fatal error@}@}@.\ - \ @[<h 0>%a@]@." - Format.pp_print_text message ; - Lwt.return 1 - | exn -> - Format.printf "@{<error>@{<title>Fatal error@}@}@.\ - \ @[<h 0>%a@]@." - Format.pp_print_text (Printexc.to_string exn) ; - Lwt.return 1 - end >>= fun retcode -> + (match command with None -> [] | Some c -> [c]) ; + Lwt.return 0 + | Error errs -> + Clic.pp_cli_errors + Format.err_formatter + ~executable_name + ~global_options + ~default:Error_monad.pp + errs ; + Lwt.return 1) + (function + | Client_commands.Version_not_found -> + Format.eprintf + "@{<error>@{<title>Fatal error@}@} unknown protocol version.@." ; + Lwt.return 1 + | Failure message -> + Format.eprintf + "@{<error>@{<title>Fatal error@}@}@. @[<h 0>%a@]@." + Format.pp_print_text + message ; + Lwt.return 1 + | exn -> + Format.printf + "@{<error>@{<title>Fatal error@}@}@. @[<h 0>%a@]@." + Format.pp_print_text + (Printexc.to_string exn) ; + Lwt.return 1) + >>= fun retcode -> Format.pp_print_flush Format.err_formatter () ; Format.pp_print_flush Format.std_formatter () ; - Internal_event_unix.close () >>= fun () -> - Lwt.return retcode - + Internal_event_unix.close () >>= fun () -> Lwt.return retcode (* Where all the user friendliness starts *) -let run - (module M:M) +let run (module M : M) ~(select_commands : - (RPC_client.http_ctxt -> - Client_config.cli_args -> - Client_context.full Clic.command list tzresult Lwt.t)) - = - Pervasives.exit - (Lwt_main.run - (main - (module M) - ~select_commands - ) - ) + RPC_client.http_ctxt -> + Client_config.cli_args -> + Client_context.full Clic.command list tzresult Lwt.t) = + Pervasives.exit (Lwt_main.run (main (module M) ~select_commands)) diff --git a/src/lib_client_base_unix/client_main_run.mli b/src/lib_client_base_unix/client_main_run.mli index f8f0aeca3e18e3edbd6859a467f02f318603f8c6..39a2bd1c685277f8de47c16466ee079a0a0a9ed1 100644 --- a/src/lib_client_base_unix/client_main_run.mli +++ b/src/lib_client_base_unix/client_main_run.mli @@ -23,56 +23,62 @@ (* *) (*****************************************************************************) -module type M = -(* This module type lists the parameters you can give to the function [run] +module type M = (* This module type lists the parameters you can give to the function [run] defined below; most calls use and will use the default value for this module type, which is module [Client_config] (client_config.ml). Another instance of this module type is in main_signer.ml *) sig type t + val global_options : (* Global options for the CLI. The presence of (unit ->) is because of weak type variables. *) - unit -> (t, Client_context_unix.unix_full) Clic.options + unit -> + (t, Client_context_unix.unix_full) Clic.options + val parse_config_args : (* How to parse CLI arguments *) #Tezos_client_base.Client_context.full -> string list -> (Client_config.parsed_config_args * string list) tzresult Lwt.t + val default_chain : Chain_services.chain - val default_block : [> `Head of int ] + + val default_block : [> `Head of int] + val default_base_dir : (* You may use the default base directory in [Client_config] or define your own one. *) string + val other_registrations : (* You may give an **optional** function that will work on the configuration file and the remote parameters. *) (Client_config.Cfg_file.t -> (module Client_config.Remote_params) -> unit) - option + option + val clic_commands : - (* This function defines how you put together different types of + base_dir:(* This function defines how you put together different types of commands. Default (in [Client_config]) is to simply append the lists together. Arguments [base_dir] and [require_auth] are to be used if you need them, default (in [Client_config]) is to ignore them. *) - base_dir:string -> + string -> config_commands:Tezos_client_base.Client_context.full Clic.command list -> builtin_commands:Tezos_client_base.Client_context.full Clic.command list -> other_commands:Tezos_client_base.Client_context.full Clic.command list -> require_auth:bool -> Tezos_client_base.Client_context.full Clic.command list + val logger : (* Provide your own [logger] here if you need to override the logger that might come from elsewhere. Default (in [Client_config]) is [None], but [Main_signer] uses this overriding feature. *) RPC_client.logger option - end val run : (module M) -> - select_commands : - (RPC_client.http_ctxt -> - Client_config.cli_args -> - Client_context.full Clic.command list tzresult Lwt.t) -> + select_commands:(RPC_client.http_ctxt -> + Client_config.cli_args -> + Client_context.full Clic.command list tzresult Lwt.t) -> unit diff --git a/src/lib_client_commands/.ocamlformat b/src/lib_client_commands/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_client_commands/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_client_commands/client_admin_commands.ml b/src/lib_client_commands/client_admin_commands.ml index eb975a0c3ef7b970054efe7eb14133952a338718..d8688a9d5920faf4ad02dc4139097dcddb3a3516 100644 --- a/src/lib_client_commands/client_admin_commands.ml +++ b/src/lib_client_commands/client_admin_commands.ml @@ -25,56 +25,68 @@ let commands () = let open Clic in - let group = { name = "admin" ; - title = "Commands to perform privileged operations on the node" } in - [ - command ~group - ~desc: "Make the node forget its decision of rejecting blocks." + let group = + { name = "admin"; + title = "Commands to perform privileged operations on the node" } + in + [ command + ~group + ~desc:"Make the node forget its decision of rejecting blocks." no_options - (prefixes [ "unmark" ; "invalid" ] - @@ seq_of_param (Block_hash.param ~name:"block" ~desc:"blocks to remove from invalid list")) + ( prefixes ["unmark"; "invalid"] + @@ seq_of_param + (Block_hash.param + ~name:"block" + ~desc:"blocks to remove from invalid list") ) (fun () blocks (cctxt : #Client_context.full) -> - iter_s - (fun block -> - Shell_services.Invalid_blocks.delete cctxt block >>=? fun () -> - cctxt#message - "Block %a no longer marked invalid." - Block_hash.pp block >>= fun () -> - return_unit) - blocks) ; - - command ~group - ~desc: "Make the node forget every decision of rejecting blocks." + iter_s + (fun block -> + Shell_services.Invalid_blocks.delete cctxt block + >>=? fun () -> + cctxt#message + "Block %a no longer marked invalid." + Block_hash.pp + block + >>= fun () -> return_unit) + blocks); + command + ~group + ~desc:"Make the node forget every decision of rejecting blocks." no_options - (prefixes [ "unmark" ; "all" ; "invalid" ; "blocks" ] - @@ stop) + (prefixes ["unmark"; "all"; "invalid"; "blocks"] @@ stop) (fun () (cctxt : #Client_context.full) -> - Shell_services.Invalid_blocks.list cctxt () >>=? fun invalid_blocks -> - iter_s (fun { Chain_services.hash ; _ } -> - Shell_services.Invalid_blocks.delete cctxt hash >>=? fun () -> - cctxt#message - "Block %a no longer marked invalid." - Block_hash.pp_short hash >>= fun () -> - return_unit) - invalid_blocks) ; - - command ~group - ~desc: "Retrieve the current checkpoint and display it in a \ - format compatible with node argument `--checkpoint`." + Shell_services.Invalid_blocks.list cctxt () + >>=? fun invalid_blocks -> + iter_s + (fun {Chain_services.hash; _} -> + Shell_services.Invalid_blocks.delete cctxt hash + >>=? fun () -> + cctxt#message + "Block %a no longer marked invalid." + Block_hash.pp_short + hash + >>= fun () -> return_unit) + invalid_blocks); + command + ~group + ~desc: + "Retrieve the current checkpoint and display it in a format \ + compatible with node argument `--checkpoint`." no_options - (fixed [ "show" ; "current" ; "checkpoint" ]) + (fixed ["show"; "current"; "checkpoint"]) (fun () (cctxt : #Client_context.full) -> - Shell_services.Chain.checkpoint cctxt ~chain:cctxt#chain () - >>=? fun (block_header, save_point, caboose, history_mode) -> - cctxt#message - "@[<v 0>Checkpoint: %s@,\ - Checkpoint level: %ld@,\ - History mode: %a@,\ - Save point level: %ld@,\ - Caboose level: %ld@]" - (Block_header.to_b58check block_header) - block_header.shell.level - History_mode.pp history_mode - save_point caboose >>= fun () -> - return ()) - ] + Shell_services.Chain.checkpoint cctxt ~chain:cctxt#chain () + >>=? fun (block_header, save_point, caboose, history_mode) -> + cctxt#message + "@[<v 0>Checkpoint: %s@,\ + Checkpoint level: %ld@,\ + History mode: %a@,\ + Save point level: %ld@,\ + Caboose level: %ld@]" + (Block_header.to_b58check block_header) + block_header.shell.level + History_mode.pp + history_mode + save_point + caboose + >>= fun () -> return ()) ] diff --git a/src/lib_client_commands/client_commands.ml b/src/lib_client_commands/client_commands.ml index 27aedc13343d4a099437a245bddb4d7557c2ebc4..b9fecff2d1c9d531b46eac2216f7129ae002bb43 100644 --- a/src/lib_client_commands/client_commands.ml +++ b/src/lib_client_commands/client_commands.ml @@ -26,24 +26,23 @@ open Client_context type command = full Clic.command -type network = [ `Mainnet | `Alphanet | `Zeronet | `Sandbox ] + +type network = [`Mainnet | `Alphanet | `Zeronet | `Sandbox] exception Version_not_found let versions = Protocol_hash.Table.create 7 let get_versions () = - Protocol_hash.Table.fold - (fun k c acc -> (k, c) :: acc) - versions - [] + Protocol_hash.Table.fold (fun k c acc -> (k, c) :: acc) versions [] let register name commands = let previous = try Protocol_hash.Table.find versions name - with Not_found -> (fun (_network : network option) -> ([] : command list)) in - Protocol_hash.Table.replace versions name - (fun (network : network option) -> (commands network @ previous network)) + with Not_found -> fun (_network : network option) -> ([] : command list) + in + Protocol_hash.Table.replace versions name (fun (network : network option) -> + commands network @ previous network) let commands_for_version version = try Protocol_hash.Table.find versions version diff --git a/src/lib_client_commands/client_commands.mli b/src/lib_client_commands/client_commands.mli index 4bf16b4f1618bc3a0fe89e1a6f13a703b44e9436..361acd889b3349a13cc90f322616dc520c413ef2 100644 --- a/src/lib_client_commands/client_commands.mli +++ b/src/lib_client_commands/client_commands.mli @@ -26,10 +26,14 @@ open Client_context type command = full Clic.command -type network = [ `Mainnet | `Alphanet | `Zeronet | `Sandbox ] + +type network = [`Mainnet | `Alphanet | `Zeronet | `Sandbox] exception Version_not_found -val register: Protocol_hash.t -> (network option -> command list) -> unit -val commands_for_version: Protocol_hash.t -> network option -> command list -val get_versions: unit -> (Protocol_hash.t * (network option -> command list)) list +val register : Protocol_hash.t -> (network option -> command list) -> unit + +val commands_for_version : Protocol_hash.t -> network option -> command list + +val get_versions : + unit -> (Protocol_hash.t * (network option -> command list)) list diff --git a/src/lib_client_commands/client_event_logging_commands.ml b/src/lib_client_commands/client_event_logging_commands.ml index b4ac658da12a8019a482ad7c4c193b1994fc187b..0d9bfa5b6d874062afbda328635af38228c9a9dd 100644 --- a/src/lib_client_commands/client_event_logging_commands.ml +++ b/src/lib_client_commands/client_event_logging_commands.ml @@ -24,8 +24,9 @@ (*****************************************************************************) let group = - Clic.{ name = "event-logging-framework" ; - title = "Commands to inspect the event-logging framework" } + Clic. + { name = "event-logging-framework"; + title = "Commands to inspect the event-logging framework" } let date_parameter option_name build = let open Clic in @@ -34,45 +35,49 @@ let date_parameter option_name build = try if String.length s <> 8 then problem "date should be `YYYYMMDD`" ; String.iteri - (fun idx -> function - | '0' .. '9' -> () - | other -> - problem "character %d is not a digit: '%c'." idx other) + (fun idx -> function '0' .. '9' -> () | other -> + problem "character %d is not a digit: '%c'." idx other) s ; let month = int_of_string (String.sub s 4 2) - 1 in if month < 0 then problem "The month cannot be '00'" ; if month > 11 then problem "The month cannot be more than '12'" ; let day = int_of_string (String.sub s 6 2) in - if day > 31 then problem "The month cannot be more than '31'" ; + if day > 31 then problem "The month cannot be more than '31'" ; let t = let tm = - Unix.{ - tm_sec = 0 ; - tm_min = 0 ; - tm_hour = 0 ; - tm_mday = day ; - tm_mon = month; - tm_year = int_of_string (String.sub s 0 4) - 1900; - tm_wday = 0; - tm_yday = 0; - tm_isdst = false } in - Unix.mktime tm |> fst in + Unix. + { tm_sec = 0; + tm_min = 0; + tm_hour = 0; + tm_mday = day; + tm_mon = month; + tm_year = int_of_string (String.sub s 0 4) - 1900; + tm_wday = 0; + tm_yday = 0; + tm_isdst = false } + in + Unix.mktime tm |> fst + in return (build t) with - | Invalid_argument e -> failwith "In `%s %S`, %s" option_name s e - | e -> failwith "Exn: %a" pp_exn e) + | Invalid_argument e -> + failwith "In `%s %S`, %s" option_name s e + | e -> + failwith "Exn: %a" pp_exn e) let flat_pp pp o = Format.( - asprintf "%a" (fun fmt () -> + asprintf + "%a" + (fun fmt () -> pp_set_margin fmt 2_000_000 ; - pp fmt o) ()) + pp fmt o) + ()) let commands () = let open Clic in let command ~desc = command ~group ~desc in - [ - command + [ command ~desc:"Query the events from an event sink." (args7 (arg @@ -87,187 +92,208 @@ let commands () = ~long:"sections" ~placeholder:"LIST" (parameter (fun _ s -> - try return ( - String.split_on_char ',' s - |> List.map (function "_" -> None | other -> Some other)) + try + return + ( String.split_on_char ',' s + |> List.map (function "_" -> None | other -> Some other) + ) with _ -> failwith "List of sections cannot be parsed"))) (arg ~doc:"Filter out events before DATE" ~long:"since" ~placeholder:"DATE" - (date_parameter "--since" (fun s -> (`Date (`Ge, s))))) + (date_parameter "--since" (fun s -> `Date (`Ge, s)))) (arg ~doc:"Filter out events after DATE" ~long:"until" ~placeholder:"DATE" - (date_parameter "--until" (fun s -> (`Date (`Le, s))))) + (date_parameter "--until" (fun s -> `Date (`Le, s)))) (switch ~doc:"Display events as JSON instead of pretty-printing them" ~long:"as-json" ()) - (switch - ~doc:"Try to display unknown events" - ~long:"dump-unknown" - ()) - (Scriptable.clic_arg ()) - ) - (prefixes [ "query" ; "events" ; "from" ] - @@ (param - ~name:"Sink-Name" - ~desc:"The URI of the SINK to query" - (parameter (fun _ s -> - try return (Uri.of_string s) - with _ -> failwith "Uri cannot be parsed"))) - - @@ stop) - (fun - (only_names, only_sections, - since, until, as_json, dump_unknown, scriptable) - uri - (cctxt : #Client_context.full) -> + (switch ~doc:"Try to display unknown events" ~long:"dump-unknown" ()) + (Scriptable.clic_arg ())) + ( prefixes ["query"; "events"; "from"] + @@ param + ~name:"Sink-Name" + ~desc:"The URI of the SINK to query" + (parameter (fun _ s -> + try return (Uri.of_string s) + with _ -> failwith "Uri cannot be parsed")) + @@ stop ) + (fun ( only_names, + only_sections, + since, + until, + as_json, + dump_unknown, + scriptable ) + uri + (cctxt : #Client_context.full) -> let open Tezos_stdlib_unix in - begin match Uri.scheme uri with - | None | Some "unix-files" -> - let script_row kind date evname data () = - [kind; date; evname; data] in - Scriptable.output_for_human scriptable (fun () -> - cctxt#message "### Events" >>= fun () -> - return_unit) - >>=? fun () -> - let on_unknown = - if not dump_unknown then None else Some (fun path -> - Scriptable.output_row scriptable + match Uri.scheme uri with + | None | Some "unix-files" -> ( + let script_row kind date evname data () = + [kind; date; evname; data] + in + Scriptable.output_for_human scriptable (fun () -> + cctxt#message "### Events" >>= fun () -> return_unit) + >>=? fun () -> + let on_unknown = + if not dump_unknown then None + else + Some + (fun path -> + Scriptable.output_row + scriptable ~for_human:(fun () -> - cctxt#message "Unknown: %s" path - >>= fun () -> - Lwt_stream.iter_s - (fun line -> cctxt#message " |%s" line) - (Lwt_io.lines_of_file path) - >>= fun () -> - return_unit) + cctxt#message "Unknown: %s" path + >>= fun () -> + Lwt_stream.iter_s + (fun line -> cctxt#message " |%s" line) + (Lwt_io.lines_of_file path) + >>= fun () -> return_unit) ~for_script:(script_row "unknown-event" "-" "-" path)) - in - let time_query = - match since, until with - | None, None -> None - | Some a, None | None, Some a -> Some a - | Some a, Some b -> Some (`And (a, b)) in - File_event_sink.Query.fold ?only_names ?on_unknown ?only_sections - ?time_query uri ~init:() - ~f:(fun () ~time_stamp ev -> - let o = - Internal_event.Generic.explode_event ev in - let time_string time_value = - let open Unix in - let tm = gmtime time_value in - Printf.sprintf "%04d%02d%02d-%02d%02d%02d-%04d" - (1900 + tm.tm_year) - (tm.tm_mon + 1) tm.tm_mday - tm.tm_hour tm.tm_min tm.tm_sec - ((time_value -. floor time_value) - *. 10_000. |> int_of_float) + in + let time_query = + match (since, until) with + | (None, None) -> + None + | (Some a, None) | (None, Some a) -> + Some a + | (Some a, Some b) -> + Some (`And (a, b)) + in + File_event_sink.Query.fold + ?only_names + ?on_unknown + ?only_sections + ?time_query + uri + ~init:() + ~f:(fun () ~time_stamp ev -> + let o = Internal_event.Generic.explode_event ev in + let time_string time_value = + let open Unix in + let tm = gmtime time_value in + Printf.sprintf + "%04d%02d%02d-%02d%02d%02d-%04d" + (1900 + tm.tm_year) + (tm.tm_mon + 1) + tm.tm_mday + tm.tm_hour + tm.tm_min + tm.tm_sec + ( (time_value -. floor time_value) *. 10_000. + |> int_of_float ) + in + let pp fmt o = + if as_json then Data_encoding.Json.pp fmt o#json + else o#pp fmt () + in + Scriptable.output_row + scriptable + ~for_human:(fun () -> + cctxt#message + "@[<2>* [%s %s]@ %a@]" + (time_string time_stamp) + o#name + pp + o + >>= fun () -> return_unit) + ~for_script:(fun () -> + let text = flat_pp pp o in + script_row "event" (time_string time_stamp) o#name text ())) + >>=? function + | ([], ()) -> + return_unit + | (errors_and_warnings, ()) -> + let open Format in + Scriptable.output + scriptable + ~for_human:(fun () -> + cctxt#message + "### Some things were not perfect:@.@[<2>%a@]" + (pp_print_list + ~pp_sep:(fun fmt () -> fprintf fmt "@.") + (fun fmt item -> + fprintf + fmt + "* %a" + File_event_sink.Query.Report.pp + item)) + errors_and_warnings + >>= fun () -> return_unit) + ~for_script:(fun () -> + let make_row e = + let text = flat_pp File_event_sink.Query.Report.pp e in + let tag = + match e with + | `Error _ -> + "error" + | `Warning _ -> + "warning" + in + script_row tag "-" "-" text () in - let pp fmt o = - if as_json - then Data_encoding.Json.pp fmt o#json - else o#pp fmt () in - Scriptable.output_row scriptable - ~for_human:(fun () -> - cctxt#message "@[<2>* [%s %s]@ %a@]" - (time_string time_stamp) o#name pp o - >>= fun () -> - return_unit) - ~for_script:(fun () -> - let text = flat_pp pp o in - script_row "event" (time_string time_stamp) o#name text ())) - >>=? begin function - | ([], ()) -> return_unit - | (errors_and_warnings, ()) -> - let open Format in - Scriptable.output scriptable - ~for_human:(fun () -> - cctxt#message - "### Some things were not perfect:@.@[<2>%a@]" - (pp_print_list - ~pp_sep:(fun fmt () -> fprintf fmt "@.") - (fun fmt item -> - fprintf fmt "* %a" - File_event_sink.Query.Report.pp item)) - errors_and_warnings - >>= fun () -> - return_unit) - ~for_script:(fun () -> - let make_row e = - let text = flat_pp File_event_sink.Query.Report.pp e in - let tag = - match e with - | `Error _ -> "error" - | `Warning _ -> "warning" in - script_row tag "-" "-" text () - in - List.map make_row errors_and_warnings) - end - | Some other -> - cctxt#message "URI scheme %S not handled as of now." other - >>= fun () -> - return_unit - end - ) ; + List.map make_row errors_and_warnings) ) + | Some other -> + cctxt#message "URI scheme %S not handled as of now." other + >>= fun () -> return_unit); command - ~desc:"Display configuration/state information about the \ - internal-event logging framework." + ~desc: + "Display configuration/state information about the internal-event \ + logging framework." no_options - (prefixes [ "show" ; "event-logging" ] @@ stop) + (prefixes ["show"; "event-logging"] @@ stop) (fun () (cctxt : #Client_context.full) -> - let pp_event_definitions fmt schs = - let open Format in - pp_open_box fmt 0 ; - pp_print_list - ~pp_sep:(fun fmt () -> fprintf fmt "@;") - (fun fmt obj_schema -> - pp_open_box fmt 2 ; - fprintf fmt "* `%s`:@ " obj_schema#name ; - pp_print_text fmt obj_schema#doc ; - pp_close_box fmt ()) - fmt - schs; - pp_close_box fmt () - in - cctxt#message "Event logging framework:@.Sinks state:@ %a@.Events registered:@ %a" - Internal_event.All_sinks.pp_state () - pp_event_definitions Internal_event.( - All_definitions.get () |> List.map Generic.json_schema) - >>= fun () -> - return_unit - ) ; + let pp_event_definitions fmt schs = + let open Format in + pp_open_box fmt 0 ; + pp_print_list + ~pp_sep:(fun fmt () -> fprintf fmt "@;") + (fun fmt obj_schema -> + pp_open_box fmt 2 ; + fprintf fmt "* `%s`:@ " obj_schema#name ; + pp_print_text fmt obj_schema#doc ; + pp_close_box fmt ()) + fmt + schs ; + pp_close_box fmt () + in + cctxt#message + "Event logging framework:@.Sinks state:@ %a@.Events registered:@ %a" + Internal_event.All_sinks.pp_state + () + pp_event_definitions + Internal_event.( + All_definitions.get () |> List.map Generic.json_schema) + >>= fun () -> return_unit); command ~desc:"Output the JSON schema of an internal-event." no_options - (prefixes [ "output" ; "schema" ; "of" ] - @@ (param - ~name:"Event-Name" - ~desc:"Name of the event" - (parameter (fun _ s -> return s))) - @@ (prefix "to") - @@ (param - ~name:"File-path" - ~desc:"Path to a JSON file" - (parameter (fun _ s -> return s))) - @@ stop) + ( prefixes ["output"; "schema"; "of"] + @@ param + ~name:"Event-Name" + ~desc:"Name of the event" + (parameter (fun _ s -> return s)) + @@ prefix "to" + @@ param + ~name:"File-path" + ~desc:"Path to a JSON file" + (parameter (fun _ s -> return s)) + @@ stop ) (fun () event path (cctxt : #Client_context.full) -> - let open Internal_event in - match All_definitions.find ((=) event) with - | None -> - failwith "Event %S not found" event - | Some ev -> - let o = Generic.json_schema ev in - Lwt_io.with_file ~mode:Lwt_io.output path - (fun chan -> - let v = Format.asprintf "%a" Json_schema.pp o#schema in - Lwt_io.write chan v) - >>= fun () -> - cctxt#message "Wrote schema of %s to %s" event path - >>= fun () -> - return_unit - ) ; - ] \ No newline at end of file + let open Internal_event in + match All_definitions.find (( = ) event) with + | None -> + failwith "Event %S not found" event + | Some ev -> + let o = Generic.json_schema ev in + Lwt_io.with_file ~mode:Lwt_io.output path (fun chan -> + let v = Format.asprintf "%a" Json_schema.pp o#schema in + Lwt_io.write chan v) + >>= fun () -> + cctxt#message "Wrote schema of %s to %s" event path + >>= fun () -> return_unit) ] diff --git a/src/lib_client_commands/client_event_logging_commands.mli b/src/lib_client_commands/client_event_logging_commands.mli index d50cc7538fbf04ef0ead4fba3aa1e197ad1f423f..bf2cb2fa6a10aaebad81920f9818a24c4c835b74 100644 --- a/src/lib_client_commands/client_event_logging_commands.mli +++ b/src/lib_client_commands/client_event_logging_commands.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val commands: unit -> Client_commands.command list +val commands : unit -> Client_commands.command list diff --git a/src/lib_client_commands/client_helpers_commands.ml b/src/lib_client_commands/client_helpers_commands.ml index df34a75d622cd60147f8ca2204217d975d45454a..66d5eab6e4c9c545a9a3503db2116caaba0ab332 100644 --- a/src/lib_client_commands/client_helpers_commands.ml +++ b/src/lib_client_commands/client_helpers_commands.ml @@ -30,40 +30,49 @@ let unique_switch = ~doc:"Fail when there is more than one possible completion." () -let commands () = Clic.[ - command - ~desc: "Autocomplete a prefix of Base58Check-encoded hash.\n\ - This actually works only for blocks, operations, public \ - key and contract identifiers." - (args1 unique_switch) - (prefixes [ "complete" ] @@ - string - ~name: "prefix" - ~desc: "the prefix of the hash to complete" @@ - stop) - (fun unique prefix (cctxt : #Client_context.full) -> - Shell_services.Blocks.Helpers.complete - cctxt ~chain:cctxt#chain ~block:cctxt#block prefix >>=? fun completions -> - match completions with - | [] -> Pervasives.exit 3 - | _ :: _ :: _ when unique -> Pervasives.exit 3 - | completions -> - List.iter print_endline completions ; - return_unit) ; - command - ~desc: "Wait for the node to be bootstrapped." - no_options - (prefixes [ "bootstrapped" ] @@ - stop) - (fun () (cctxt : #Client_context.full) -> - Monitor_services.bootstrapped cctxt >>=? fun (stream, _) -> - Lwt_stream.iter_s - (fun (hash, time) -> - cctxt#message "Current head: %a (timestamp: %a, validation: %a)" - Block_hash.pp_short hash - Time.System.pp_hum (Time.System.of_protocol_exn time) - Time.System.pp_hum (Tezos_stdlib_unix.Systime_os.now ())) stream >>= fun () -> - cctxt#answer "Bootstrapped." >>= fun () -> - return_unit - ) - ] +let commands () = + Clic. + [ command + ~desc: + "Autocomplete a prefix of Base58Check-encoded hash.\n\ + This actually works only for blocks, operations, public key and \ + contract identifiers." + (args1 unique_switch) + ( prefixes ["complete"] + @@ string ~name:"prefix" ~desc:"the prefix of the hash to complete" + @@ stop ) + (fun unique prefix (cctxt : #Client_context.full) -> + Shell_services.Blocks.Helpers.complete + cctxt + ~chain:cctxt#chain + ~block:cctxt#block + prefix + >>=? fun completions -> + match completions with + | [] -> + Pervasives.exit 3 + | _ :: _ :: _ when unique -> + Pervasives.exit 3 + | completions -> + List.iter print_endline completions ; + return_unit); + command + ~desc:"Wait for the node to be bootstrapped." + no_options + (prefixes ["bootstrapped"] @@ stop) + (fun () (cctxt : #Client_context.full) -> + Monitor_services.bootstrapped cctxt + >>=? fun (stream, _) -> + Lwt_stream.iter_s + (fun (hash, time) -> + cctxt#message + "Current head: %a (timestamp: %a, validation: %a)" + Block_hash.pp_short + hash + Time.System.pp_hum + (Time.System.of_protocol_exn time) + Time.System.pp_hum + (Tezos_stdlib_unix.Systime_os.now ())) + stream + >>= fun () -> cctxt#answer "Bootstrapped." >>= fun () -> return_unit) + ] diff --git a/src/lib_client_commands/client_helpers_commands.mli b/src/lib_client_commands/client_helpers_commands.mli index d50cc7538fbf04ef0ead4fba3aa1e197ad1f423f..bf2cb2fa6a10aaebad81920f9818a24c4c835b74 100644 --- a/src/lib_client_commands/client_helpers_commands.mli +++ b/src/lib_client_commands/client_helpers_commands.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val commands: unit -> Client_commands.command list +val commands : unit -> Client_commands.command list diff --git a/src/lib_client_commands/client_keys_commands.ml b/src/lib_client_commands/client_keys_commands.ml index c9128cca58ad1b4e98949d13f8d5ed2737df9893..c3b62280b5bf6c1ac60e68f5dfc2740b79792b08 100644 --- a/src/lib_client_commands/client_keys_commands.ml +++ b/src/lib_client_commands/client_keys_commands.ml @@ -26,7 +26,7 @@ open Client_keys let group = - { Clic.name = "keys" ; + { Clic.name = "keys"; title = "Commands for managing the wallet of cryptographic keys" } let sig_algo_arg = @@ -35,14 +35,17 @@ let sig_algo_arg = ~long:"sig" ~short:'s' ~placeholder:"ed25519|secp256k1|p256" - ~default: "ed25519" + ~default:"ed25519" (Signature.algo_param ()) -let gen_keys_containing - ?(encrypted = false) ?(prefix=false) ?(force=false) +let gen_keys_containing ?(encrypted = false) ?(prefix = false) ?(force = false) ~containing ~name (cctxt : #Client_context.io_wallet) = let unrepresentable = - List.filter (fun s -> not @@ Base58.Alphabet.all_in_alphabet Base58.Alphabet.bitcoin s) containing in + List.filter + (fun s -> + not @@ Base58.Alphabet.all_in_alphabet Base58.Alphabet.bitcoin s) + containing + in let good_initial_char = "KLMNPQRSTUVWXYZabcdefghi" in let bad_initial_char = "123456789ABCDEFGHJjkmnopqrstuvwxyz" in match unrepresentable with @@ -55,428 +58,549 @@ let gen_keys_containing ~pp_sep:(fun ppf () -> Format.fprintf ppf ", ") (fun ppf s -> Format.fprintf ppf "'%s'" s)) unrepresentable - Base58.Alphabet.pp Base58.Alphabet.bitcoin + Base58.Alphabet.pp + Base58.Alphabet.bitcoin good_initial_char - | [] -> + | [] -> ( let unrepresentable = - List.filter (fun s -> prefix && - String.contains bad_initial_char s.[0]) containing in + List.filter + (fun s -> prefix && String.contains bad_initial_char s.[0]) + containing + in match unrepresentable with | _ :: _ -> cctxt#error - "@[<v 0>The following words don't respect the first character restriction: %a.@,\ + "@[<v 0>The following words don't respect the first character \ + restriction: %a.@,\ Valid characters: %a@,\ Extra restriction for the first character: %s@]" (Format.pp_print_list ~pp_sep:(fun ppf () -> Format.fprintf ppf ", ") (fun ppf s -> Format.fprintf ppf "'%s'" s)) unrepresentable - Base58.Alphabet.pp Base58.Alphabet.bitcoin + Base58.Alphabet.pp + Base58.Alphabet.bitcoin good_initial_char | [] -> - Public_key_hash.mem cctxt name >>=? fun name_exists -> - if name_exists && not force - then + Public_key_hash.mem cctxt name + >>=? fun name_exists -> + if name_exists && not force then cctxt#warning - "Key for name '%s' already exists. Use --force to update." name >>= return + "Key for name '%s' already exists. Use --force to update." + name + >>= return else - begin - cctxt#warning "This process uses a brute force search and \ - may take a long time to find a key." >>= fun () -> - let matches = - if prefix then - let containing_tz1 = List.map ((^) "tz1") containing in - (fun key -> List.exists - (fun containing -> - String.sub key 0 (String.length containing) = containing) - containing_tz1) + cctxt#warning + "This process uses a brute force search and may take a long \ + time to find a key." + >>= fun () -> + let matches = + if prefix then + let containing_tz1 = List.map (( ^ ) "tz1") containing in + fun key -> + List.exists + (fun containing -> + String.sub key 0 (String.length containing) = containing) + containing_tz1 + else + let re = Re.Str.regexp (String.concat "\\|" containing) in + fun key -> + try + ignore (Re.Str.search_forward re key 0) ; + true + with Not_found -> false + in + let rec loop attempts = + let (public_key_hash, public_key, secret_key) = + Signature.generate_key () + in + let hash = + Signature.Public_key_hash.to_b58check + @@ Signature.Public_key.hash public_key + in + if matches hash then + let pk_uri = + Tezos_signer_backends.Unencrypted.make_pk public_key + in + ( if encrypted then + Tezos_signer_backends.Encrypted.encrypt cctxt secret_key else - let re = Re.Str.regexp (String.concat "\\|" containing) in - (fun key -> try ignore (Re.Str.search_forward re key 0); true - with Not_found -> false) in - let rec loop attempts = - let public_key_hash, public_key, secret_key = - Signature.generate_key () in - let hash = Signature.Public_key_hash.to_b58check @@ - Signature.Public_key.hash public_key in - if matches hash - then - let pk_uri = Tezos_signer_backends.Unencrypted.make_pk public_key in - begin - if encrypted then - Tezos_signer_backends.Encrypted.encrypt cctxt secret_key - else - return (Tezos_signer_backends.Unencrypted.make_sk secret_key) - end >>=? fun sk_uri -> - register_key cctxt ~force - (public_key_hash, pk_uri, sk_uri) name >>=? fun () -> - return hash - else begin if attempts mod 25_000 = 0 - then - cctxt#message "Tried %d keys without finding a match" attempts - else Lwt.return_unit end >>= fun () -> - loop (attempts + 1) in - loop 1 >>=? fun key_hash -> - cctxt#message - "Generated '%s' under the name '%s'." key_hash name >>= fun () -> - return_unit - end + return (Tezos_signer_backends.Unencrypted.make_sk secret_key) + ) + >>=? fun sk_uri -> + register_key + cctxt + ~force + (public_key_hash, pk_uri, sk_uri) + name + >>=? fun () -> return hash + else + ( if attempts mod 25_000 = 0 then + cctxt#message + "Tried %d keys without finding a match" + attempts + else Lwt.return_unit ) + >>= fun () -> loop (attempts + 1) + in + loop 1 + >>=? fun key_hash -> + cctxt#message "Generated '%s' under the name '%s'." key_hash name + >>= fun () -> return_unit ) let rec input_fundraiser_params (cctxt : #Client_context.io_wallet) = - let rec get_boolean_answer (cctxt : #Client_context.io_wallet) ~default ~msg = + let rec get_boolean_answer (cctxt : #Client_context.io_wallet) ~default ~msg + = let prompt = if default then "(Y/n/q)" else "(y/N/q)" in - cctxt#prompt "%s %s: " msg prompt >>=? fun gen -> - match default, String.lowercase_ascii gen with - | default, "" -> return default - | _, "y" -> return_true - | _, "n" -> return_false - | _, "q" -> failwith "Exit by user request." - | _ -> get_boolean_answer cctxt ~msg ~default in - cctxt#prompt "Enter the e-mail used for the paper wallet: " >>=? fun email -> + cctxt#prompt "%s %s: " msg prompt + >>=? fun gen -> + match (default, String.lowercase_ascii gen) with + | (default, "") -> + return default + | (_, "y") -> + return_true + | (_, "n") -> + return_false + | (_, "q") -> + failwith "Exit by user request." + | _ -> + get_boolean_answer cctxt ~msg ~default + in + cctxt#prompt "Enter the e-mail used for the paper wallet: " + >>=? fun email -> let rec loop_words acc i = - if i > 14 then return (List.rev acc) else - cctxt#prompt_password "Enter word %d: " i >>=? fun word -> + if i > 14 then return (List.rev acc) + else + cctxt#prompt_password "Enter word %d: " i + >>=? fun word -> match Bip39.index_of_word (MBytes.to_string word) with - | None -> loop_words acc i - | Some wordidx -> loop_words (wordidx :: acc) (succ i) in - loop_words [] 0 >>=? fun words -> + | None -> + loop_words acc i + | Some wordidx -> + loop_words (wordidx :: acc) (succ i) + in + loop_words [] 0 + >>=? fun words -> match Bip39.of_indices words with - | None -> assert false - | Some t -> - cctxt#prompt_password - "Enter the password used for the paper wallet: " >>=? fun password -> + | None -> + assert false + | Some t -> ( + cctxt#prompt_password "Enter the password used for the paper wallet: " + >>=? fun password -> (* TODO: unicode normalization (NFKD)... *) - let passphrase = MBytes.(concat "" [of_string email ; password]) in + let passphrase = MBytes.(concat "" [of_string email; password]) in let sk = Bip39.to_seed ~passphrase t in let sk = MBytes.sub sk 0 32 in let sk : Signature.Secret_key.t = Ed25519 - (Data_encoding.Binary.of_bytes_exn Ed25519.Secret_key.encoding sk) in + (Data_encoding.Binary.of_bytes_exn Ed25519.Secret_key.encoding sk) + in let pk = Signature.Secret_key.to_public_key sk in let pkh = Signature.Public_key.hash pk in - let msg = Format.asprintf + let msg = + Format.asprintf "Your public Tezos address is %a is that correct?" - Signature.Public_key_hash.pp pkh in - get_boolean_answer cctxt ~msg ~default:true >>=? function - | true -> return sk - | false -> input_fundraiser_params cctxt + Signature.Public_key_hash.pp + pkh + in + get_boolean_answer cctxt ~msg ~default:true + >>=? function + | true -> return sk | false -> input_fundraiser_params cctxt ) let commands version : Client_context.full Clic.command list = let open Clic in let encrypted_switch () = - if List.exists - (fun (scheme, _) -> - scheme = Tezos_signer_backends.Unencrypted.scheme) - (Client_keys.registered_signers ()) then - Clic.switch - ~long:"encrypted" - ~doc:("Encrypt the key on-disk") () - else - Clic.constant true in + if + List.exists + (fun (scheme, _) -> scheme = Tezos_signer_backends.Unencrypted.scheme) + (Client_keys.registered_signers ()) + then Clic.switch ~long:"encrypted" ~doc:"Encrypt the key on-disk" () + else Clic.constant true + in let show_private_switch = - switch - ~long:"show-secret" - ~short:'S' - ~doc:"show the private key" () in - [ - command ~group - ~desc: "List supported signing schemes.\n\ - Signing schemes are identifiers for signer modules: the \ - built-in signing routines, a hardware wallet, an \ - external agent, etc.\n\ - Each signer has its own format for describing secret \ - keys, such a raw secret key for the default \ - `unencrypted` scheme, the path on a hardware security \ - module, an alias for an external agent, etc.\n\ - This command gives the list of signer modules that this \ - version of the tezos client supports." + switch ~long:"show-secret" ~short:'S' ~doc:"show the private key" () + in + [ command + ~group + ~desc: + "List supported signing schemes.\n\ + Signing schemes are identifiers for signer modules: the built-in \ + signing routines, a hardware wallet, an external agent, etc.\n\ + Each signer has its own format for describing secret keys, such a \ + raw secret key for the default `unencrypted` scheme, the path on a \ + hardware security module, an alias for an external agent, etc.\n\ + This command gives the list of signer modules that this version of \ + the tezos client supports." no_options - (fixed [ "list" ; "signing" ; "schemes" ]) + (fixed ["list"; "signing"; "schemes"]) (fun () (cctxt : Client_context.full) -> - let signers = - List.sort - (fun (ka, _) (kb, _) -> String.compare ka kb) - (registered_signers ()) in - Lwt_list.iter_s - (fun (n, (module S : SIGNER)) -> - cctxt#message "@[<v 2>Scheme `%s`: %s@,@[<hov 0>%a@]@]" - n S.title Format.pp_print_text S.description) - signers >>= return) ; - - begin match version with - | Some `Mainnet -> - command ~group ~desc: "Generate a pair of keys." - (args2 (Secret_key.force_switch ()) sig_algo_arg) - (prefixes [ "gen" ; "keys" ] - @@ Secret_key.fresh_alias_param - @@ stop) - (fun (force, algo) name (cctxt : Client_context.full) -> - Secret_key.of_fresh cctxt force name >>=? fun name -> - let (pkh, pk, sk) = Signature.generate_key ~algo () in - let pk_uri = Tezos_signer_backends.Unencrypted.make_pk pk in - Tezos_signer_backends.Encrypted.encrypt cctxt sk >>=? fun sk_uri -> - register_key cctxt ~force (pkh, pk_uri, sk_uri) name) - | _ -> - command ~group ~desc: "Generate a pair of keys." - (args3 (Secret_key.force_switch ()) sig_algo_arg (encrypted_switch ())) - (prefixes [ "gen" ; "keys" ] - @@ Secret_key.fresh_alias_param - @@ stop) - (fun (force, algo, encrypted) name (cctxt : Client_context.full) -> - Secret_key.of_fresh cctxt force name >>=? fun name -> - let (pkh, pk, sk) = Signature.generate_key ~algo () in - let pk_uri = Tezos_signer_backends.Unencrypted.make_pk pk in - begin - if encrypted then - Tezos_signer_backends.Encrypted.encrypt cctxt sk - else - return (Tezos_signer_backends.Unencrypted.make_sk sk) - end >>=? fun sk_uri -> - register_key cctxt ~force (pkh, pk_uri, sk_uri) name) - end ; - - begin match version with - | Some `Mainnet -> - command ~group ~desc: "Generate keys including the given string." - (args2 - (switch - ~long:"prefix" - ~short:'P' - ~doc:"the key must begin with tz1[word]" - ()) - (force_switch ())) - (prefixes [ "gen" ; "vanity" ; "keys" ] - @@ Public_key_hash.fresh_alias_param - @@ prefix "matching" - @@ (seq_of_param @@ string ~name:"words" ~desc:"string key must contain one of these words")) - (fun (prefix, force) name containing (cctxt : Client_context.full) -> - Public_key_hash.of_fresh cctxt force name >>=? fun name -> - gen_keys_containing ~encrypted:true ~force ~prefix ~containing ~name cctxt) - | _ -> - command ~group ~desc: "Generate keys including the given string." - (args3 - (switch - ~long:"prefix" - ~short:'P' - ~doc:"the key must begin with tz1[word]" - ()) - (force_switch ()) - (encrypted_switch ())) - (prefixes [ "gen" ; "vanity" ; "keys" ] - @@ Public_key_hash.fresh_alias_param - @@ prefix "matching" - @@ (seq_of_param @@ string ~name:"words" ~desc:"string key must contain one of these words")) - (fun (prefix, force, encrypted) name containing (cctxt : Client_context.full) -> - Public_key_hash.of_fresh cctxt force name >>=? fun name -> - gen_keys_containing ~encrypted ~force ~prefix ~containing ~name cctxt) - end ; - - command ~group ~desc: "Encrypt an unencrypted secret key." + let signers = + List.sort + (fun (ka, _) (kb, _) -> String.compare ka kb) + (registered_signers ()) + in + Lwt_list.iter_s + (fun (n, (module S : SIGNER)) -> + cctxt#message + "@[<v 2>Scheme `%s`: %s@,@[<hov 0>%a@]@]" + n + S.title + Format.pp_print_text + S.description) + signers + >>= return); + ( match version with + | Some `Mainnet -> + command + ~group + ~desc:"Generate a pair of keys." + (args2 (Secret_key.force_switch ()) sig_algo_arg) + (prefixes ["gen"; "keys"] @@ Secret_key.fresh_alias_param @@ stop) + (fun (force, algo) name (cctxt : Client_context.full) -> + Secret_key.of_fresh cctxt force name + >>=? fun name -> + let (pkh, pk, sk) = Signature.generate_key ~algo () in + let pk_uri = Tezos_signer_backends.Unencrypted.make_pk pk in + Tezos_signer_backends.Encrypted.encrypt cctxt sk + >>=? fun sk_uri -> + register_key cctxt ~force (pkh, pk_uri, sk_uri) name) + | _ -> + command + ~group + ~desc:"Generate a pair of keys." + (args3 + (Secret_key.force_switch ()) + sig_algo_arg + (encrypted_switch ())) + (prefixes ["gen"; "keys"] @@ Secret_key.fresh_alias_param @@ stop) + (fun (force, algo, encrypted) name (cctxt : Client_context.full) -> + Secret_key.of_fresh cctxt force name + >>=? fun name -> + let (pkh, pk, sk) = Signature.generate_key ~algo () in + let pk_uri = Tezos_signer_backends.Unencrypted.make_pk pk in + ( if encrypted then Tezos_signer_backends.Encrypted.encrypt cctxt sk + else return (Tezos_signer_backends.Unencrypted.make_sk sk) ) + >>=? fun sk_uri -> + register_key cctxt ~force (pkh, pk_uri, sk_uri) name) ); + ( match version with + | Some `Mainnet -> + command + ~group + ~desc:"Generate keys including the given string." + (args2 + (switch + ~long:"prefix" + ~short:'P' + ~doc:"the key must begin with tz1[word]" + ()) + (force_switch ())) + ( prefixes ["gen"; "vanity"; "keys"] + @@ Public_key_hash.fresh_alias_param @@ prefix "matching" + @@ seq_of_param + @@ string + ~name:"words" + ~desc:"string key must contain one of these words" ) + (fun (prefix, force) name containing (cctxt : Client_context.full) -> + Public_key_hash.of_fresh cctxt force name + >>=? fun name -> + gen_keys_containing + ~encrypted:true + ~force + ~prefix + ~containing + ~name + cctxt) + | _ -> + command + ~group + ~desc:"Generate keys including the given string." + (args3 + (switch + ~long:"prefix" + ~short:'P' + ~doc:"the key must begin with tz1[word]" + ()) + (force_switch ()) + (encrypted_switch ())) + ( prefixes ["gen"; "vanity"; "keys"] + @@ Public_key_hash.fresh_alias_param @@ prefix "matching" + @@ seq_of_param + @@ string + ~name:"words" + ~desc:"string key must contain one of these words" ) + (fun (prefix, force, encrypted) + name + containing + (cctxt : Client_context.full) -> + Public_key_hash.of_fresh cctxt force name + >>=? fun name -> + gen_keys_containing + ~encrypted + ~force + ~prefix + ~containing + ~name + cctxt) ); + command + ~group + ~desc:"Encrypt an unencrypted secret key." no_options - (prefixes [ "encrypt" ; "secret" ; "key" ] - @@ stop) + (prefixes ["encrypt"; "secret"; "key"] @@ stop) (fun () (cctxt : Client_context.full) -> - cctxt#prompt_password "Enter unencrypted secret key: " >>=? fun sk_uri -> - let sk_uri = Uri.of_string (MBytes.to_string sk_uri) in - begin match Uri.scheme sk_uri with - | None | Some "unencrypted" -> return_unit - | _ -> failwith "This command can only be used with the \"unencrypted\" scheme" - end >>=? fun () -> - Lwt.return (Signature.Secret_key.of_b58check (Uri.path sk_uri)) >>=? fun sk -> - Tezos_signer_backends.Encrypted.encrypt cctxt sk >>=? fun sk_uri -> - cctxt#message "Encrypted secret key %a" Uri.pp_hum (sk_uri :> Uri.t) >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Add a secret key to the wallet." + cctxt#prompt_password "Enter unencrypted secret key: " + >>=? fun sk_uri -> + let sk_uri = Uri.of_string (MBytes.to_string sk_uri) in + ( match Uri.scheme sk_uri with + | None | Some "unencrypted" -> + return_unit + | _ -> + failwith + "This command can only be used with the \"unencrypted\" scheme" + ) + >>=? fun () -> + Lwt.return (Signature.Secret_key.of_b58check (Uri.path sk_uri)) + >>=? fun sk -> + Tezos_signer_backends.Encrypted.encrypt cctxt sk + >>=? fun sk_uri -> + cctxt#message "Encrypted secret key %a" Uri.pp_hum (sk_uri :> Uri.t) + >>= fun () -> return_unit); + command + ~group + ~desc:"Add a secret key to the wallet." (args1 (Secret_key.force_switch ())) - (prefix "import" - @@ prefixes [ "secret" ; "key" ] - @@ Secret_key.fresh_alias_param - @@ Client_keys.sk_uri_param - @@ stop) + ( prefix "import" + @@ prefixes ["secret"; "key"] + @@ Secret_key.fresh_alias_param @@ Client_keys.sk_uri_param @@ stop ) (fun force name sk_uri (cctxt : Client_context.full) -> - Secret_key.of_fresh cctxt force name >>=? fun name -> - Client_keys.neuterize sk_uri >>=? fun pk_uri -> - begin - Public_key.find_opt cctxt name >>=? function - | None -> return_unit - | Some (pk_uri_found, _) -> - fail_unless (pk_uri = pk_uri_found || force) - (failure - "public and secret keys '%s' don't correspond, \ - please don't use --force" name) - end >>=? fun () -> - Client_keys.public_key_hash ~interactive:(cctxt:>Client_context.io_wallet) pk_uri - >>=? fun (pkh, public_key) -> - cctxt#message - "Tezos address added: %a" - Signature.Public_key_hash.pp pkh >>= fun () -> - register_key cctxt ~force (pkh, pk_uri, sk_uri) ?public_key name) ; - ] @ - (if version <> (Some `Mainnet) then [] else [ - command ~group ~desc: "Add a fundraiser secret key to the wallet." - (args1 (Secret_key.force_switch ())) - (prefix "import" - @@ prefixes [ "fundraiser" ; "secret" ; "key" ] - @@ Secret_key.fresh_alias_param - @@ stop) - (fun force name (cctxt : Client_context.full) -> - Secret_key.of_fresh cctxt force name >>=? fun name -> - input_fundraiser_params cctxt >>=? fun sk -> - Tezos_signer_backends.Encrypted.encrypt cctxt sk >>=? fun sk_uri -> - Client_keys.neuterize sk_uri >>=? fun pk_uri -> - begin - Public_key.find_opt cctxt name >>=? function - | None -> return_unit - | Some (pk_uri_found, _) -> - fail_unless (pk_uri = pk_uri_found || force) - (failure - "public and secret keys '%s' don't correspond, \ - please don't use --force" name) - end >>=? fun () -> - Client_keys.public_key_hash pk_uri >>=? fun (pkh, _public_key) -> - register_key cctxt ~force (pkh, pk_uri, sk_uri) name) ; - ]) @ - [ - command ~group ~desc: "Add a public key to the wallet." - (args1 (Public_key.force_switch ())) - (prefix "import" - @@ prefixes [ "public" ; "key" ] - @@ Public_key.fresh_alias_param - @@ Client_keys.pk_uri_param - @@ stop) - (fun force name pk_uri (cctxt : Client_context.full) -> - Public_key.of_fresh cctxt force name >>=? fun name -> - Client_keys.public_key_hash pk_uri >>=? fun (pkh, public_key) -> - Public_key_hash.add ~force cctxt name pkh >>=? fun () -> - cctxt#message - "Tezos address added: %a" - Signature.Public_key_hash.pp pkh >>= fun () -> - Public_key.add ~force cctxt name (pk_uri, public_key)) ; - - command ~group ~desc: "Add an address to the wallet." - (args1 (Public_key.force_switch ())) - (prefixes [ "add" ; "address" ] - @@ Public_key_hash.fresh_alias_param - @@ Public_key_hash.source_param - @@ stop) - (fun force name hash cctxt -> - Public_key_hash.of_fresh cctxt force name >>=? fun name -> - Public_key_hash.add ~force cctxt name hash) ; - - command ~group ~desc: "List all addresses and associated keys." - no_options - (fixed [ "list" ; "known" ; "addresses" ]) - (fun () (cctxt : #Client_context.full) -> - list_keys cctxt >>=? fun l -> - iter_s begin fun (name, pkh, pk, sk) -> - Public_key_hash.to_source pkh >>=? fun v -> - begin match pk, sk with - | None, None -> - cctxt#message "%s: %s" name v - | _, Some uri -> - let scheme = - Option.unopt ~default:"unencrypted" @@ - Uri.scheme (uri : sk_uri :> Uri.t) in - cctxt#message "%s: %s (%s sk known)" name v scheme - | Some _, _ -> - cctxt#message "%s: %s (pk known)" name v - end >>= fun () -> return_unit - end l) ; - - command ~group ~desc: "Show the keys associated with an implicit account." - (args1 show_private_switch) - (prefixes [ "show" ; "address"] - @@ Public_key_hash.alias_param - @@ stop) - (fun show_private (name, _) (cctxt : #Client_context.full) -> - alias_keys cctxt name >>=? fun key_info -> - match key_info with - | None -> - cctxt#message "No keys found for address" >>= fun () -> - return_unit - | Some (pkh, pk, skloc) -> - cctxt#message "Hash: %a" - Signature.Public_key_hash.pp pkh >>= fun () -> - match pk with - | None -> return_unit - | Some pk -> - cctxt#message "Public Key: %a" - Signature.Public_key.pp pk >>= fun () -> - if show_private then - match skloc with - | None -> return_unit - | Some skloc -> - Secret_key.to_source skloc >>=? fun skloc -> - cctxt#message "Secret Key: %s" skloc >>= fun () -> + Secret_key.of_fresh cctxt force name + >>=? fun name -> + Client_keys.neuterize sk_uri + >>=? fun pk_uri -> + Public_key.find_opt cctxt name + >>=? (function + | None -> + return_unit + | Some (pk_uri_found, _) -> + fail_unless + (pk_uri = pk_uri_found || force) + (failure + "public and secret keys '%s' don't correspond, please \ + don't use --force" + name)) + >>=? fun () -> + Client_keys.public_key_hash + ~interactive:(cctxt :> Client_context.io_wallet) + pk_uri + >>=? fun (pkh, public_key) -> + cctxt#message + "Tezos address added: %a" + Signature.Public_key_hash.pp + pkh + >>= fun () -> + register_key cctxt ~force (pkh, pk_uri, sk_uri) ?public_key name) ] + @ ( if version <> Some `Mainnet then [] + else + [ command + ~group + ~desc:"Add a fundraiser secret key to the wallet." + (args1 (Secret_key.force_switch ())) + ( prefix "import" + @@ prefixes ["fundraiser"; "secret"; "key"] + @@ Secret_key.fresh_alias_param @@ stop ) + (fun force name (cctxt : Client_context.full) -> + Secret_key.of_fresh cctxt force name + >>=? fun name -> + input_fundraiser_params cctxt + >>=? fun sk -> + Tezos_signer_backends.Encrypted.encrypt cctxt sk + >>=? fun sk_uri -> + Client_keys.neuterize sk_uri + >>=? fun pk_uri -> + Public_key.find_opt cctxt name + >>=? (function + | None -> return_unit - else - return_unit) ; - - command ~group ~desc: "Forget one address." - (args1 (Clic.switch - ~long:"force" ~short:'f' - ~doc:"delete associated keys when present" ())) - (prefixes [ "forget" ; "address"] - @@ Public_key_hash.alias_param - @@ stop) - (fun force (name, _pkh) (cctxt : Client_context.full) -> - Secret_key.mem cctxt name >>=? fun has_secret_key -> - Public_key.mem cctxt name >>=? fun has_public_key -> - fail_when (not force && (has_secret_key || has_public_key)) - (failure "secret or public key present for %s, \ - use --force to delete" name) >>=? fun () -> - Secret_key.del cctxt name >>=? fun () -> - Public_key.del cctxt name >>=? fun () -> - Public_key_hash.del cctxt name) ; - - command ~group ~desc: "Forget the entire wallet of keys." - (args1 (Clic.switch - ~long:"force" ~short:'f' - ~doc:"you got to use the force for that" ())) - (fixed [ "forget" ; "all" ; "keys" ]) - (fun force (cctxt : Client_context.full) -> - fail_unless force - (failure "this can only be used with option --force") >>=? fun () -> - Public_key.set cctxt [] >>=? fun () -> - Secret_key.set cctxt [] >>=? fun () -> - Public_key_hash.set cctxt []) ; - - command ~group ~desc: "Compute deterministic nonce." - no_options - (prefixes [ "generate" ; "nonce"; "for" ] - @@ Public_key_hash.alias_param - @@ prefixes [ "from" ] - @@ string - ~name: "data" - ~desc: "string from which to deterministically generate the nonce" - @@ stop) - (fun () (name, _pkh) data (cctxt : Client_context.full) -> - let data = MBytes.of_string data in - Secret_key.mem cctxt name >>=? fun sk_present -> - fail_unless sk_present - (failure "secret key not present for %s" name) >>=? fun () -> - Secret_key.find cctxt name >>=? fun sk_uri -> - Client_keys.deterministic_nonce sk_uri data >>=? fun nonce -> - cctxt#message "%a" MBytes.pp_hex nonce >>= fun () -> return_unit) ; - - command ~group ~desc: "Compute deterministic nonce hash." - no_options - (prefixes [ "generate" ; "nonce"; "hash"; "for" ] - @@ Public_key_hash.alias_param - @@ prefixes [ "from" ] - @@ string - ~name: "data" - ~desc: "string from which to deterministically generate the nonce hash" - @@ stop) - (fun () (name, _pkh) data (cctxt : Client_context.full) -> - let data = MBytes.of_string data in - Secret_key.mem cctxt name >>=? fun sk_present -> - fail_unless sk_present - (failure "secret key not present for %s" name) >>=? fun () -> - Secret_key.find cctxt name >>=? fun sk_uri -> - Client_keys.deterministic_nonce_hash sk_uri data >>=? fun nonce_hash -> - cctxt#message "%a" MBytes.pp_hex nonce_hash >>= fun () -> return_unit) ; - - ] + | Some (pk_uri_found, _) -> + fail_unless + (pk_uri = pk_uri_found || force) + (failure + "public and secret keys '%s' don't correspond, \ + please don't use --force" + name)) + >>=? fun () -> + Client_keys.public_key_hash pk_uri + >>=? fun (pkh, _public_key) -> + register_key cctxt ~force (pkh, pk_uri, sk_uri) name) ] ) + @ [ command + ~group + ~desc:"Add a public key to the wallet." + (args1 (Public_key.force_switch ())) + ( prefix "import" + @@ prefixes ["public"; "key"] + @@ Public_key.fresh_alias_param @@ Client_keys.pk_uri_param @@ stop ) + (fun force name pk_uri (cctxt : Client_context.full) -> + Public_key.of_fresh cctxt force name + >>=? fun name -> + Client_keys.public_key_hash pk_uri + >>=? fun (pkh, public_key) -> + Public_key_hash.add ~force cctxt name pkh + >>=? fun () -> + cctxt#message + "Tezos address added: %a" + Signature.Public_key_hash.pp + pkh + >>= fun () -> Public_key.add ~force cctxt name (pk_uri, public_key)); + command + ~group + ~desc:"Add an address to the wallet." + (args1 (Public_key.force_switch ())) + ( prefixes ["add"; "address"] + @@ Public_key_hash.fresh_alias_param @@ Public_key_hash.source_param + @@ stop ) + (fun force name hash cctxt -> + Public_key_hash.of_fresh cctxt force name + >>=? fun name -> Public_key_hash.add ~force cctxt name hash); + command + ~group + ~desc:"List all addresses and associated keys." + no_options + (fixed ["list"; "known"; "addresses"]) + (fun () (cctxt : #Client_context.full) -> + list_keys cctxt + >>=? fun l -> + iter_s + (fun (name, pkh, pk, sk) -> + Public_key_hash.to_source pkh + >>=? fun v -> + ( match (pk, sk) with + | (None, None) -> + cctxt#message "%s: %s" name v + | (_, Some uri) -> + let scheme = + Option.unopt ~default:"unencrypted" + @@ Uri.scheme (uri : sk_uri :> Uri.t) + in + cctxt#message "%s: %s (%s sk known)" name v scheme + | (Some _, _) -> + cctxt#message "%s: %s (pk known)" name v ) + >>= fun () -> return_unit) + l); + command + ~group + ~desc:"Show the keys associated with an implicit account." + (args1 show_private_switch) + (prefixes ["show"; "address"] @@ Public_key_hash.alias_param @@ stop) + (fun show_private (name, _) (cctxt : #Client_context.full) -> + alias_keys cctxt name + >>=? fun key_info -> + match key_info with + | None -> + cctxt#message "No keys found for address" + >>= fun () -> return_unit + | Some (pkh, pk, skloc) -> ( + cctxt#message "Hash: %a" Signature.Public_key_hash.pp pkh + >>= fun () -> + match pk with + | None -> + return_unit + | Some pk -> + cctxt#message "Public Key: %a" Signature.Public_key.pp pk + >>= fun () -> + if show_private then + match skloc with + | None -> + return_unit + | Some skloc -> + Secret_key.to_source skloc + >>=? fun skloc -> + cctxt#message "Secret Key: %s" skloc + >>= fun () -> return_unit + else return_unit )); + command + ~group + ~desc:"Forget one address." + (args1 + (Clic.switch + ~long:"force" + ~short:'f' + ~doc:"delete associated keys when present" + ())) + (prefixes ["forget"; "address"] @@ Public_key_hash.alias_param @@ stop) + (fun force (name, _pkh) (cctxt : Client_context.full) -> + Secret_key.mem cctxt name + >>=? fun has_secret_key -> + Public_key.mem cctxt name + >>=? fun has_public_key -> + fail_when + ((not force) && (has_secret_key || has_public_key)) + (failure + "secret or public key present for %s, use --force to delete" + name) + >>=? fun () -> + Secret_key.del cctxt name + >>=? fun () -> + Public_key.del cctxt name + >>=? fun () -> Public_key_hash.del cctxt name); + command + ~group + ~desc:"Forget the entire wallet of keys." + (args1 + (Clic.switch + ~long:"force" + ~short:'f' + ~doc:"you got to use the force for that" + ())) + (fixed ["forget"; "all"; "keys"]) + (fun force (cctxt : Client_context.full) -> + fail_unless + force + (failure "this can only be used with option --force") + >>=? fun () -> + Public_key.set cctxt [] + >>=? fun () -> + Secret_key.set cctxt [] >>=? fun () -> Public_key_hash.set cctxt []); + command + ~group + ~desc:"Compute deterministic nonce." + no_options + ( prefixes ["generate"; "nonce"; "for"] + @@ Public_key_hash.alias_param + @@ prefixes ["from"] + @@ string + ~name:"data" + ~desc:"string from which to deterministically generate the nonce" + @@ stop ) + (fun () (name, _pkh) data (cctxt : Client_context.full) -> + let data = MBytes.of_string data in + Secret_key.mem cctxt name + >>=? fun sk_present -> + fail_unless sk_present (failure "secret key not present for %s" name) + >>=? fun () -> + Secret_key.find cctxt name + >>=? fun sk_uri -> + Client_keys.deterministic_nonce sk_uri data + >>=? fun nonce -> + cctxt#message "%a" MBytes.pp_hex nonce >>= fun () -> return_unit); + command + ~group + ~desc:"Compute deterministic nonce hash." + no_options + ( prefixes ["generate"; "nonce"; "hash"; "for"] + @@ Public_key_hash.alias_param + @@ prefixes ["from"] + @@ string + ~name:"data" + ~desc: + "string from which to deterministically generate the nonce hash" + @@ stop ) + (fun () (name, _pkh) data (cctxt : Client_context.full) -> + let data = MBytes.of_string data in + Secret_key.mem cctxt name + >>=? fun sk_present -> + fail_unless sk_present (failure "secret key not present for %s" name) + >>=? fun () -> + Secret_key.find cctxt name + >>=? fun sk_uri -> + Client_keys.deterministic_nonce_hash sk_uri data + >>=? fun nonce_hash -> + cctxt#message "%a" MBytes.pp_hex nonce_hash >>= fun () -> return_unit) + ] diff --git a/src/lib_client_commands/client_keys_commands.mli b/src/lib_client_commands/client_keys_commands.mli index afc1120481c46c9bf0c87502ef8a93f1be9e6764..65212a47abb661eaa2b69d7b583303182b41c440 100644 --- a/src/lib_client_commands/client_keys_commands.mli +++ b/src/lib_client_commands/client_keys_commands.mli @@ -23,6 +23,6 @@ (* *) (*****************************************************************************) -val commands: +val commands : [`Zeronet | `Alphanet | `Mainnet | `Sandbox] option -> Client_context.full Clic.command list diff --git a/src/lib_client_commands/client_p2p_commands.ml b/src/lib_client_commands/client_p2p_commands.ml index 08099a4df6ede2398018c285c11bfc6b59a795ec..d258dd1c7bddfe968074a9aed8bec78208c235ef 100644 --- a/src/lib_client_commands/client_p2p_commands.ml +++ b/src/lib_client_commands/client_p2p_commands.ml @@ -24,233 +24,267 @@ (*****************************************************************************) let group = - { Clic.name = "p2p" ; + { Clic.name = "p2p"; title = "Commands for monitoring and controlling p2p-layer state" } -let pp_connection_info ppf conn = P2p_connection.Info.pp (fun _ _ -> ()) ppf conn +let pp_connection_info ppf conn = + P2p_connection.Info.pp (fun _ _ -> ()) ppf conn let addr_parameter = let open Clic in - param ~name:"address" + param + ~name:"address" ~desc:"<IPv4>:PORT or <IPV6>:PORT address (PORT defaults to 9732)." (parameter (fun _ x -> return (P2p_point.Id.of_string_exn x))) let commands () = let open Clic in - [ - command ~group ~desc: "show global network status" + [ command + ~group + ~desc:"show global network status" no_options - (prefixes ["p2p" ; "stat"] stop) begin fun () (cctxt : #Client_context.full) -> - Shell_services.P2p.stat cctxt >>=? fun stat -> - Shell_services.P2p.Connections.list cctxt >>=? fun conns -> - Shell_services.P2p.Peers.list cctxt >>=? fun peers -> - Shell_services.P2p.Points.list cctxt >>=? fun points -> - cctxt#message "GLOBAL STATS" >>= fun () -> - cctxt#message " %a" P2p_stat.pp stat >>= fun () -> - cctxt#message "CONNECTIONS" >>= fun () -> - let incoming, outgoing = - List.partition (fun c -> c.P2p_connection.Info.incoming) conns in - Lwt_list.iter_s begin fun conn -> - cctxt#message " %a" pp_connection_info conn - end incoming >>= fun () -> - Lwt_list.iter_s begin fun conn -> - cctxt#message " %a" pp_connection_info conn - end outgoing >>= fun () -> - cctxt#message "KNOWN PEERS" >>= fun () -> - Lwt_list.iter_s begin fun (p, pi) -> - cctxt#message " %a %.0f %a %a %s" - P2p_peer.State.pp_digram pi.P2p_peer.Info.state - pi.score - P2p_peer.Id.pp p - P2p_stat.pp pi.stat - (if pi.trusted then "★" else " ") - end peers >>= fun () -> - cctxt#message "KNOWN POINTS" >>= fun () -> - Lwt_list.iter_s begin fun (p, pi) -> - match pi.P2p_point.Info.state with - | Running peer_id -> - cctxt#message " %a %a %a %s" - P2p_point.State.pp_digram pi.state - P2p_point.Id.pp p - P2p_peer.Id.pp peer_id - (if pi.trusted then "★" else " ") - | _ -> - match pi.last_seen with - | Some (peer_id, ts) -> - cctxt#message " %a %a (last seen: %a %a) %s" - P2p_point.State.pp_digram pi.state - P2p_point.Id.pp p - P2p_peer.Id.pp peer_id - Time.System.pp_hum ts - (if pi.trusted then "★" else " ") - | None -> - cctxt#message " %a %a %s" - P2p_point.State.pp_digram pi.state - P2p_point.Id.pp p + (prefixes ["p2p"; "stat"] stop) + (fun () (cctxt : #Client_context.full) -> + Shell_services.P2p.stat cctxt + >>=? fun stat -> + Shell_services.P2p.Connections.list cctxt + >>=? fun conns -> + Shell_services.P2p.Peers.list cctxt + >>=? fun peers -> + Shell_services.P2p.Points.list cctxt + >>=? fun points -> + cctxt#message "GLOBAL STATS" + >>= fun () -> + cctxt#message " %a" P2p_stat.pp stat + >>= fun () -> + cctxt#message "CONNECTIONS" + >>= fun () -> + let (incoming, outgoing) = + List.partition (fun c -> c.P2p_connection.Info.incoming) conns + in + Lwt_list.iter_s + (fun conn -> cctxt#message " %a" pp_connection_info conn) + incoming + >>= fun () -> + Lwt_list.iter_s + (fun conn -> cctxt#message " %a" pp_connection_info conn) + outgoing + >>= fun () -> + cctxt#message "KNOWN PEERS" + >>= fun () -> + Lwt_list.iter_s + (fun (p, pi) -> + cctxt#message + " %a %.0f %a %a %s" + P2p_peer.State.pp_digram + pi.P2p_peer.Info.state + pi.score + P2p_peer.Id.pp + p + P2p_stat.pp + pi.stat + (if pi.trusted then "★" else " ")) + peers + >>= fun () -> + cctxt#message "KNOWN POINTS" + >>= fun () -> + Lwt_list.iter_s + (fun (p, pi) -> + match pi.P2p_point.Info.state with + | Running peer_id -> + cctxt#message + " %a %a %a %s" + P2p_point.State.pp_digram + pi.state + P2p_point.Id.pp + p + P2p_peer.Id.pp + peer_id (if pi.trusted then "★" else " ") - end points >>= fun () -> - return_unit - end ; - - command ~group ~desc: "Connect to a new point." + | _ -> ( + match pi.last_seen with + | Some (peer_id, ts) -> + cctxt#message + " %a %a (last seen: %a %a) %s" + P2p_point.State.pp_digram + pi.state + P2p_point.Id.pp + p + P2p_peer.Id.pp + peer_id + Time.System.pp_hum + ts + (if pi.trusted then "★" else " ") + | None -> + cctxt#message + " %a %a %s" + P2p_point.State.pp_digram + pi.state + P2p_point.Id.pp + p + (if pi.trusted then "★" else " ") )) + points + >>= fun () -> return_unit); + command + ~group + ~desc:"Connect to a new point." no_options - (prefixes [ "connect" ; "address" ] - @@ addr_parameter - @@ stop) + (prefixes ["connect"; "address"] @@ addr_parameter @@ stop) (fun () (address, port) (cctxt : #Client_context.full) -> - let timeout = Time.System.Span.of_seconds_exn 10. in - P2p_services.connect cctxt ~timeout (address, port) >>=? fun () -> - cctxt#message "Connection to %a:%d established." P2p_addr.pp address port >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Kick a peer." + let timeout = Time.System.Span.of_seconds_exn 10. in + P2p_services.connect cctxt ~timeout (address, port) + >>=? fun () -> + cctxt#message + "Connection to %a:%d established." + P2p_addr.pp + address + port + >>= fun () -> return_unit); + command + ~group + ~desc:"Kick a peer." no_options - (prefixes [ "kick" ; "peer" ] - @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" - @@ stop) + ( prefixes ["kick"; "peer"] + @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" + @@ stop ) (fun () peer (cctxt : #Client_context.full) -> - P2p_services.Connections.kick cctxt peer >>=? fun () -> - cctxt#message "Connection to %a interrupted." P2p_peer.Id.pp peer >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Add an IP address and all its ports to the \ - blacklist and kicks it. Remove the address \ - from the whitelist if it was previously in \ - it." + P2p_services.Connections.kick cctxt peer + >>=? fun () -> + cctxt#message "Connection to %a interrupted." P2p_peer.Id.pp peer + >>= fun () -> return_unit); + command + ~group + ~desc: + "Add an IP address and all its ports to the blacklist and kicks it. \ + Remove the address from the whitelist if it was previously in it." no_options - (prefixes [ "ban" ; "address" ] - @@ addr_parameter - @@ stop) + (prefixes ["ban"; "address"] @@ addr_parameter @@ stop) (fun () (address, _port) (cctxt : #Client_context.full) -> - P2p_services.Points.ban cctxt (address, 0) >>=? fun () -> - cctxt#message "Address %a:* is now banned." P2p_addr.pp address >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Remove an IP address and all its ports \ - from the blacklist." + P2p_services.Points.ban cctxt (address, 0) + >>=? fun () -> + cctxt#message "Address %a:* is now banned." P2p_addr.pp address + >>= fun () -> return_unit); + command + ~group + ~desc:"Remove an IP address and all its ports from the blacklist." no_options - (prefixes [ "unban" ; "address" ] - @@ addr_parameter - @@ stop) + (prefixes ["unban"; "address"] @@ addr_parameter @@ stop) (fun () (address, _port) (cctxt : #Client_context.full) -> - P2p_services.Points.unban cctxt (address, 0) >>=? fun () -> - cctxt#message "Address %a:* is now unbanned." P2p_addr.pp address >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Add an IP address to the whitelist. Remove \ - the address from the blacklist if it was \ - previously in it." + P2p_services.Points.unban cctxt (address, 0) + >>=? fun () -> + cctxt#message "Address %a:* is now unbanned." P2p_addr.pp address + >>= fun () -> return_unit); + command + ~group + ~desc: + "Add an IP address to the whitelist. Remove the address from the \ + blacklist if it was previously in it." no_options - (prefixes [ "trust" ; "address" ] - @@ addr_parameter - @@ stop) + (prefixes ["trust"; "address"] @@ addr_parameter @@ stop) (fun () (address, port) (cctxt : #Client_context.full) -> - P2p_services.Points.trust cctxt (address, port) >>=? fun () -> - cctxt#message "Address %a:%d is now trusted." - P2p_addr.pp address port >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Removes an IP address from the whitelist." + P2p_services.Points.trust cctxt (address, port) + >>=? fun () -> + cctxt#message "Address %a:%d is now trusted." P2p_addr.pp address port + >>= fun () -> return_unit); + command + ~group + ~desc:"Removes an IP address from the whitelist." no_options - (prefixes [ "untrust" ; "address" ] - @@ addr_parameter - @@ stop) + (prefixes ["untrust"; "address"] @@ addr_parameter @@ stop) (fun () (address, port) (cctxt : #Client_context.full) -> - P2p_services.Points.untrust cctxt (address, port) >>=? fun () -> - cctxt#message "Address %a:%d is now untrusted." - P2p_addr.pp address port >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Check if an IP address is banned." + P2p_services.Points.untrust cctxt (address, port) + >>=? fun () -> + cctxt#message + "Address %a:%d is now untrusted." + P2p_addr.pp + address + port + >>= fun () -> return_unit); + command + ~group + ~desc:"Check if an IP address is banned." no_options - (prefixes [ "is" ; "address" ; "banned" ] - @@ addr_parameter - @@ stop) + (prefixes ["is"; "address"; "banned"] @@ addr_parameter @@ stop) (fun () (address, port) (cctxt : #Client_context.full) -> - P2p_services.Points.banned cctxt (address, port) >>=? fun banned -> - cctxt#message - "The given ip address is %s" - (if banned then "banned" else "not banned") >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Check if a peer ID is banned." + P2p_services.Points.banned cctxt (address, port) + >>=? fun banned -> + cctxt#message + "The given ip address is %s" + (if banned then "banned" else "not banned") + >>= fun () -> return_unit); + command + ~group + ~desc:"Check if a peer ID is banned." no_options - (prefixes [ "is" ; "peer" ; "banned" ] - @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" - @@ stop) + ( prefixes ["is"; "peer"; "banned"] + @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" + @@ stop ) (fun () peer (cctxt : #Client_context.full) -> - P2p_services.Peers.banned cctxt peer >>=? fun banned -> - cctxt#message - "The given peer ID is %s" - (if banned then "banned" else "not banned") >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Add a peer ID to the blacklist and kicks \ - it. Remove the peer ID from the blacklist \ - if was previously in it." + P2p_services.Peers.banned cctxt peer + >>=? fun banned -> + cctxt#message + "The given peer ID is %s" + (if banned then "banned" else "not banned") + >>= fun () -> return_unit); + command + ~group + ~desc: + "Add a peer ID to the blacklist and kicks it. Remove the peer ID from \ + the blacklist if was previously in it." no_options - (prefixes [ "ban" ; "peer" ] - @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" - @@ stop) + ( prefixes ["ban"; "peer"] + @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" + @@ stop ) (fun () peer (cctxt : #Client_context.full) -> - P2p_services.Peers.ban cctxt peer >>=? fun () -> - cctxt#message "The peer %a is now banned." - P2p_peer.Id.pp_short peer >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Removes a peer ID from the blacklist." + P2p_services.Peers.ban cctxt peer + >>=? fun () -> + cctxt#message "The peer %a is now banned." P2p_peer.Id.pp_short peer + >>= fun () -> return_unit); + command + ~group + ~desc:"Removes a peer ID from the blacklist." no_options - (prefixes [ "unban" ; "peer" ] - @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" - @@ stop) + ( prefixes ["unban"; "peer"] + @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" + @@ stop ) (fun () peer (cctxt : #Client_context.full) -> - P2p_services.Peers.unban cctxt peer >>=? fun () -> - cctxt#message "The peer %a is now unbanned." - P2p_peer.Id.pp_short peer >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Add a peer ID to the whitelist. Remove the \ - peer ID from the blacklist if it was \ - previously in it." + P2p_services.Peers.unban cctxt peer + >>=? fun () -> + cctxt#message "The peer %a is now unbanned." P2p_peer.Id.pp_short peer + >>= fun () -> return_unit); + command + ~group + ~desc: + "Add a peer ID to the whitelist. Remove the peer ID from the \ + blacklist if it was previously in it." no_options - (prefixes [ "trust" ; "peer" ] - @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" - @@ stop) + ( prefixes ["trust"; "peer"] + @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" + @@ stop ) (fun () peer (cctxt : #Client_context.full) -> - P2p_services.Peers.trust cctxt peer >>=? fun () -> - cctxt#message "The peer %a is now trusted." - P2p_peer.Id.pp_short peer >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Remove a peer ID from the whitelist." + P2p_services.Peers.trust cctxt peer + >>=? fun () -> + cctxt#message "The peer %a is now trusted." P2p_peer.Id.pp_short peer + >>= fun () -> return_unit); + command + ~group + ~desc:"Remove a peer ID from the whitelist." no_options - (prefixes [ "untrust" ; "peer" ] - @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" - @@ stop) + ( prefixes ["untrust"; "peer"] + @@ P2p_peer.Id.param ~name:"peer" ~desc:"peer network identity" + @@ stop ) (fun () peer (cctxt : #Client_context.full) -> - P2p_services.Peers.untrust cctxt peer >>=? fun () -> - cctxt#message "The peer %a is now untrusted." - P2p_peer.Id.pp_short peer >>= fun () -> - return_unit - ) ; - - command ~group ~desc: "Clear all access control rules." + P2p_services.Peers.untrust cctxt peer + >>=? fun () -> + cctxt#message "The peer %a is now untrusted." P2p_peer.Id.pp_short peer + >>= fun () -> return_unit); + command + ~group + ~desc:"Clear all access control rules." no_options - (prefixes [ "clear" ; "acls" ] @@ stop) + (prefixes ["clear"; "acls"] @@ stop) (fun () (cctxt : #Client_context.full) -> - P2p_services.ACL.clear cctxt () >>=? fun () -> - cctxt#message "The access control rules are now cleared." >>= fun () -> - return_unit - ) ; - ] + P2p_services.ACL.clear cctxt () + >>=? fun () -> + cctxt#message "The access control rules are now cleared." + >>= fun () -> return_unit) ] diff --git a/src/lib_client_commands/client_p2p_commands.mli b/src/lib_client_commands/client_p2p_commands.mli index d50cc7538fbf04ef0ead4fba3aa1e197ad1f423f..bf2cb2fa6a10aaebad81920f9818a24c4c835b74 100644 --- a/src/lib_client_commands/client_p2p_commands.mli +++ b/src/lib_client_commands/client_p2p_commands.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val commands: unit -> Client_commands.command list +val commands : unit -> Client_commands.command list diff --git a/src/lib_client_commands/client_report_commands.ml b/src/lib_client_commands/client_report_commands.ml index 99537258962cf947d5f87c34a66410bd5321b046..aa921aaac7479a99645d7cbedc11b62406efbad4 100644 --- a/src/lib_client_commands/client_report_commands.ml +++ b/src/lib_client_commands/client_report_commands.ml @@ -25,54 +25,66 @@ (* Commands used to introspect the node's state *) -let print_invalid_blocks ppf (b: Shell_services.Chain.invalid_block) = - Format.fprintf ppf - "@[<v 2>Hash: %a\ - @ Level: %ld\ - @ %a@]" - Block_hash.pp b.hash +let print_invalid_blocks ppf (b : Shell_services.Chain.invalid_block) = + Format.fprintf + ppf + "@[<v 2>Hash: %a@ Level: %ld@ %a@]" + Block_hash.pp + b.hash b.level - pp_print_error b.errors + pp_print_error + b.errors let commands () = let open Clic in - let group = { name = "report" ; - title = "Commands to report the node's status" } in + let group = + {name = "report"; title = "Commands to report the node's status"} + in let output_arg = default_arg ~doc:"write to a file" ~long:"output" ~short:'o' ~placeholder:"path" - ~default: "-" - (parameter (fun _ -> function - | "-" -> return Format.std_formatter - | file -> - let ppf = Format.formatter_of_out_channel (open_out file) in - ignore Clic.(setup_formatter ppf Plain Full) ; - return ppf)) in - [ - command ~group - ~desc: "The last heads that have been considered by the node." + ~default:"-" + (parameter (fun _ -> + function + | "-" -> + return Format.std_formatter + | file -> + let ppf = Format.formatter_of_out_channel (open_out file) in + ignore Clic.(setup_formatter ppf Plain Full) ; + return ppf)) + in + [ command + ~group + ~desc:"The last heads that have been considered by the node." (args1 output_arg) - (fixed [ "list" ; "heads" ]) + (fixed ["list"; "heads"]) (fun ppf cctxt -> - Shell_services.Blocks.list cctxt () >>=? fun heads -> - Format.fprintf ppf "@[<v>%a@]@." - (Format.pp_print_list Block_hash.pp) - (List.concat heads) ; - return_unit) ; - command ~group ~desc: "The blocks that have been marked invalid by the node." + Shell_services.Blocks.list cctxt () + >>=? fun heads -> + Format.fprintf + ppf + "@[<v>%a@]@." + (Format.pp_print_list Block_hash.pp) + (List.concat heads) ; + return_unit); + command + ~group + ~desc:"The blocks that have been marked invalid by the node." (args1 output_arg) - (fixed [ "list" ; "rejected" ; "blocks" ]) + (fixed ["list"; "rejected"; "blocks"]) (fun ppf cctxt -> - Shell_services.Invalid_blocks.list cctxt () >>=? function - | [] -> - Format.fprintf ppf "No invalid blocks.@." ; - return_unit - | _ :: _ as invalid -> - Format.fprintf ppf "@[<v>%a@]@." - (Format.pp_print_list print_invalid_blocks) - invalid ; - return_unit) ; - ] + Shell_services.Invalid_blocks.list cctxt () + >>=? function + | [] -> + Format.fprintf ppf "No invalid blocks.@." ; + return_unit + | _ :: _ as invalid -> + Format.fprintf + ppf + "@[<v>%a@]@." + (Format.pp_print_list print_invalid_blocks) + invalid ; + return_unit) ] diff --git a/src/lib_client_commands/client_report_commands.mli b/src/lib_client_commands/client_report_commands.mli index 7ad69a9f2f7cc020f0211c3cfe322c3a3f91685b..4393cb83bd914b23ef54a0d9d8a8b14f80d54325 100644 --- a/src/lib_client_commands/client_report_commands.mli +++ b/src/lib_client_commands/client_report_commands.mli @@ -23,5 +23,4 @@ (* *) (*****************************************************************************) - val commands : unit -> #Client_context.full Clic.command list diff --git a/src/lib_crypto/.ocamlformat b/src/lib_crypto/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_crypto/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_crypto/base58.ml b/src/lib_crypto/base58.ml index 4aea52cf0a810e9d71eeee81b0f8c3fb97cb829e..5cc2423634a69ff0d93d46f58611b4242efce3db 100644 --- a/src/lib_crypto/base58.ml +++ b/src/lib_crypto/base58.ml @@ -26,11 +26,11 @@ open Lwt.Infix let base = 58 + let zbase = Z.of_int base module Alphabet = struct - - type t = { encode: string ; decode: string } + type t = {encode : string; decode : string} let make alphabet = if String.length alphabet <> base then @@ -39,17 +39,22 @@ module Alphabet = struct for i = 0 to String.length alphabet - 1 do let char = int_of_char alphabet.[i] in if Bytes.get str char <> '\255' then - Format.kasprintf invalid_arg + Format.kasprintf + invalid_arg "Base58: invalid alphabet (dup '%c' %d %d)" - (char_of_int char) (int_of_char @@ Bytes.get str char) i ; - Bytes.set str char (char_of_int i) ; + (char_of_int char) + (int_of_char @@ Bytes.get str char) + i ; + Bytes.set str char (char_of_int i) done ; - { encode = alphabet ; decode = Bytes.to_string str } + {encode = alphabet; decode = Bytes.to_string str} let bitcoin = make "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + let ripple = make "rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz" + let flickr = make "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ" @@ -59,131 +64,134 @@ module Alphabet = struct let ok = Array.make 256 false in String.iter (fun x -> ok.(Char.code x) <- true) alphabet.encode ; let res = ref true in - for i = 0 to (String.length string) - 1 do + for i = 0 to String.length string - 1 do res := !res && ok.(Char.code string.[i]) - done; + done ; !res - let pp ppf { encode ; _ } = Format.fprintf ppf "%s" encode - + let pp ppf {encode; _} = Format.fprintf ppf "%s" encode end let count_trailing_char s c = let len = String.length s in let rec loop i = - if i < 0 then len - else if String.get s i <> c then (len-i-1) - else loop (i-1) in - loop (len-1) + if i < 0 then len else if s.[i] <> c then len - i - 1 else loop (i - 1) + in + loop (len - 1) let count_leading_char s c = let len = String.length s in let rec loop i = - if i = len then len - else if String.get s i <> c then i - else loop (i+1) in + if i = len then len else if s.[i] <> c then i else loop (i + 1) + in loop 0 -let of_char ?(alphabet=Alphabet.default) x = - let pos = String.get alphabet.decode (int_of_char x) in - match pos with - | '\255' -> None - | _ -> Some (int_of_char pos) +let of_char ?(alphabet = Alphabet.default) x = + let pos = alphabet.decode.[int_of_char x] in + match pos with '\255' -> None | _ -> Some (int_of_char pos) -let to_char ?(alphabet=Alphabet.default) x = - alphabet.encode.[x] +let to_char ?(alphabet = Alphabet.default) x = alphabet.encode.[x] -let raw_encode ?(alphabet=Alphabet.default) s = +let raw_encode ?(alphabet = Alphabet.default) s = let len = String.length s in - let s = String.init len (fun i -> String.get s (len - i - 1)) in + let s = String.init len (fun i -> s.[len - i - 1]) in let zero = alphabet.encode.[0] in let zeros = count_trailing_char s '\000' in - let res_len = (len * 8 + 4) / 5 in + let res_len = ((len * 8) + 4) / 5 in let res = Bytes.make res_len '\000' in let s = Z.of_bits s in let rec loop s i = - if s = Z.zero then i else - let s, r = Z.div_rem s zbase in + if s = Z.zero then i + else + let (s, r) = Z.div_rem s zbase in Bytes.set res i (to_char ~alphabet (Z.to_int r)) ; - loop s (i - 1) in + loop s (i - 1) + in let i = loop s (res_len - 1) in let ress = Bytes.sub_string res (i + 1) (res_len - i - 1) in String.make zeros zero ^ ress -let raw_decode ?(alphabet=Alphabet.default) s = - TzString.fold_left begin fun a c -> - match a, of_char ~alphabet c with - | Some a, Some i -> Some Z.(add (of_int i) (mul a zbase)) - | _ -> None - end (Some Z.zero) s |> - Option.map ~f:begin fun res -> - let res = Z.to_bits res in - let res_tzeros = count_trailing_char res '\000' in - let len = String.length res - res_tzeros in - let zeros = count_leading_char s alphabet.encode.[0] in - String.make zeros '\000' ^ - String.init len (fun i -> String.get res (len - i - 1)) - end +let raw_decode ?(alphabet = Alphabet.default) s = + TzString.fold_left + (fun a c -> + match (a, of_char ~alphabet c) with + | (Some a, Some i) -> + Some Z.(add (of_int i) (mul a zbase)) + | _ -> + None) + (Some Z.zero) + s + |> Option.map ~f:(fun res -> + let res = Z.to_bits res in + let res_tzeros = count_trailing_char res '\000' in + let len = String.length res - res_tzeros in + let zeros = count_leading_char s alphabet.encode.[0] in + String.make zeros '\000' + ^ String.init len (fun i -> res.[len - i - 1])) let checksum s = - let hash = - Hacl.Hash.SHA256.(digest (digest (Bigstring.of_string s))) in + let hash = Hacl.Hash.SHA256.(digest (digest (Bigstring.of_string s))) in let res = Bytes.make 4 '\000' in Bigstring.blit_to_bytes hash 0 res 0 4 ; Bytes.to_string res (* Append a 4-bytes cryptographic checksum before encoding string s *) -let safe_encode ?alphabet s = - raw_encode ?alphabet (s ^ checksum s) +let safe_encode ?alphabet s = raw_encode ?alphabet (s ^ checksum s) let safe_decode ?alphabet s = - raw_decode ?alphabet s |> Option.apply ~f:begin fun s -> - let len = String.length s in - if len < 4 then None else - (* only if the string is long enough to extract a checksum do we check it *) - let msg = String.sub s 0 (len-4) in - let msg_hash = String.sub s (len-4) 4 in - if msg_hash <> checksum msg then None - else Some msg - end + raw_decode ?alphabet s + |> Option.apply ~f:(fun s -> + let len = String.length s in + if len < 4 then None + else + (* only if the string is long enough to extract a checksum do we check it *) + let msg = String.sub s 0 (len - 4) in + let msg_hash = String.sub s (len - 4) 4 in + if msg_hash <> checksum msg then None else Some msg) type data = .. type 'a encoding = { - prefix: string ; - length: int ; - encoded_prefix: string ; - encoded_length: int ; - to_raw: 'a -> string ; - of_raw: string -> 'a option ; - wrap: 'a -> data ; + prefix : string; + length : int; + encoded_prefix : string; + encoded_length : int; + to_raw : 'a -> string; + of_raw : string -> 'a option; + wrap : 'a -> data } -let prefix { prefix ; _ } = prefix +let prefix {prefix; _} = prefix -let simple_decode ?alphabet { prefix ; of_raw ; _ } s = - safe_decode ?alphabet s |> - Option.apply ~f:(TzString.remove_prefix ~prefix) |> - Option.apply ~f:of_raw +let simple_decode ?alphabet {prefix; of_raw; _} s = + safe_decode ?alphabet s + |> Option.apply ~f:(TzString.remove_prefix ~prefix) + |> Option.apply ~f:of_raw -let simple_encode ?alphabet { prefix ; to_raw ; _ } d = +let simple_encode ?alphabet {prefix; to_raw; _} d = safe_encode ?alphabet (prefix ^ to_raw d) type registered_encoding = Encoding : 'a encoding -> registered_encoding -module MakeEncodings(E: sig - val encodings: registered_encoding list - end) = struct - +module MakeEncodings (E : sig + val encodings : registered_encoding list +end) = +struct let encodings = ref E.encodings let check_ambiguous_prefix prefix length encodings = List.iter - (fun (Encoding { encoded_prefix = s ; length = l ; _ }) -> - if length = l && (TzString.remove_prefix ~prefix:s prefix <> None || - TzString.remove_prefix ~prefix s <> None) then - Format.ksprintf invalid_arg - "Base58.register_encoding: duplicate prefix: %S, %S." s prefix) + (fun (Encoding {encoded_prefix = s; length = l; _}) -> + if + length = l + && ( TzString.remove_prefix ~prefix:s prefix <> None + || TzString.remove_prefix ~prefix s <> None ) + then + Format.ksprintf + invalid_arg + "Base58.register_encoding: duplicate prefix: %S, %S." + s + prefix) encodings let make_encoded_prefix prefix len = @@ -191,139 +199,169 @@ module MakeEncodings(E: sig and ones = safe_encode (prefix ^ String.make len '\255') in let len = String.length zeros in if String.length ones <> len then - Format.ksprintf invalid_arg + Format.ksprintf + invalid_arg "Base58.registered_encoding: variable length encoding." ; let rec loop i = - if i = len then len - else if zeros.[i] = ones.[i] then loop (i+1) - else i in + if i = len then len else if zeros.[i] = ones.[i] then loop (i + 1) else i + in let len = loop 0 in if len = 0 then - invalid_arg - "Base58.register_encoding: not a unique prefix." ; - String.sub zeros 0 len, String.length zeros + invalid_arg "Base58.register_encoding: not a unique prefix." ; + (String.sub zeros 0 len, String.length zeros) let register_encoding ~prefix ~length ~to_raw ~of_raw ~wrap = let to_raw x = - let s = to_raw x in assert (String.length s = length) ; s in - let of_raw s = assert (String.length s = length) ; of_raw s in - let encoded_prefix, encoded_length = make_encoded_prefix prefix length in + let s = to_raw x in + assert (String.length s = length) ; + s + in + let of_raw s = + assert (String.length s = length) ; + of_raw s + in + let (encoded_prefix, encoded_length) = make_encoded_prefix prefix length in check_ambiguous_prefix encoded_prefix encoded_length !encodings ; let encoding = - { prefix ; length ; encoded_prefix ; encoded_length ; - to_raw ; of_raw ; wrap } in + {prefix; length; encoded_prefix; encoded_length; to_raw; of_raw; wrap} + in encodings := Encoding encoding :: !encodings ; encoding let check_encoded_prefix enc p l = if enc.encoded_prefix <> p then - Format.kasprintf Pervasives.failwith + Format.kasprintf + Pervasives.failwith "Unexpected prefix %s (expected %s)" - p enc.encoded_prefix ; + p + enc.encoded_prefix ; if enc.encoded_length <> l then - Format.kasprintf Pervasives.failwith + Format.kasprintf + Pervasives.failwith "Unexpected encoded length %d for %s (expected %d)" - l p enc.encoded_length + l + p + enc.encoded_length let decode ?alphabet s = let rec find s = function - | [] -> None - | Encoding { prefix ; of_raw ; wrap ; _ } :: encodings -> - match TzString.remove_prefix ~prefix s with - | None -> find s encodings - | Some msg -> of_raw msg |> Option.map ~f:wrap in - safe_decode ?alphabet s |> - Option.apply ~f:(fun s -> find s !encodings) - + | [] -> + None + | Encoding {prefix; of_raw; wrap; _} :: encodings -> ( + match TzString.remove_prefix ~prefix s with + | None -> + find s encodings + | Some msg -> + of_raw msg |> Option.map ~f:wrap ) + in + safe_decode ?alphabet s |> Option.apply ~f:(fun s -> find s !encodings) end type 'a resolver = - Resolver : { - encoding: 'h encoding ; - resolver: 'a -> string -> 'h list Lwt.t ; - } -> 'a resolver - -module MakeResolvers(R: sig - type context - end) = struct - + | Resolver : + { encoding : 'h encoding; + resolver : 'a -> string -> 'h list Lwt.t } + -> 'a resolver + +module MakeResolvers (R : sig + type context +end) = +struct let resolvers = ref [] - let register_resolver - (type a) - (encoding : a encoding) + let register_resolver (type a) (encoding : a encoding) (resolver : R.context -> string -> a list Lwt.t) = - resolvers := Resolver { encoding ; resolver } :: !resolvers + resolvers := Resolver {encoding; resolver} :: !resolvers - let partial_decode ?(alphabet=Alphabet.default) request len = + let partial_decode ?(alphabet = Alphabet.default) request len = let zero = alphabet.encode.[0] in - let last = alphabet.encode.[base-1] in + let last = alphabet.encode.[base - 1] in let n = String.length request in let min = raw_decode ~alphabet (request ^ String.make (len - n) zero) in let max = raw_decode ~alphabet (request ^ String.make (len - n) last) in - match min, max with - | Some min, Some max -> + match (min, max) with + | (Some min, Some max) -> let prefix_len = TzString.common_prefix min max in Some (String.sub min 0 prefix_len) - | _ -> None + | _ -> + None let complete ?alphabet context request = let rec find s = function - | [] -> Lwt.return_nil - | Resolver { encoding ; resolver } :: resolvers -> + | [] -> + Lwt.return_nil + | Resolver {encoding; resolver} :: resolvers -> ( if not (TzString.has_prefix ~prefix:encoding.encoded_prefix s) then find s resolvers else match partial_decode ?alphabet request encoding.encoded_length with - | None -> find s resolvers + | None -> + find s resolvers | Some prefix -> let len = String.length prefix in let ignored = String.length encoding.prefix in let msg = if len <= ignored then "" - else begin - assert (String.sub prefix 0 ignored = encoding.prefix) ; - String.sub prefix ignored (len - ignored) - end in - resolver context msg >|= fun msgs -> + else ( + assert (String.sub prefix 0 ignored = encoding.prefix) ; + String.sub prefix ignored (len - ignored) ) + in + resolver context msg + >|= fun msgs -> TzList.filter_map (fun msg -> - let res = simple_encode encoding ?alphabet msg in - TzString.remove_prefix ~prefix:request res |> - Option.map ~f:(fun _ -> res)) - msgs in + let res = simple_encode encoding ?alphabet msg in + TzString.remove_prefix ~prefix:request res + |> Option.map ~f:(fun _ -> res)) + msgs ) + in find request !resolvers - end -include MakeEncodings(struct let encodings = [] end) -include MakeResolvers(struct - type context = unit - end) +include MakeEncodings (struct + let encodings = [] +end) + +include MakeResolvers (struct + type context = unit +end) let register_resolver enc f = register_resolver enc (fun () s -> f s) + let complete ?alphabet s = complete ?alphabet () s -module Make(C: sig type context end) = struct - include MakeEncodings(struct let encodings = !encodings end) - include MakeResolvers(struct - type context = C.context - end) +module Make (C : sig + type context +end) = +struct + include MakeEncodings (struct + let encodings = !encodings + end) + + include MakeResolvers (struct + type context = C.context + end) end module Prefix = struct - (* 32 *) let block_hash = "\001\052" (* B(51) *) + let operation_hash = "\005\116" (* o(51) *) + let operation_list_hash = "\133\233" (* Lo(52) *) + let operation_list_list_hash = "\029\159\109" (* LLo(53) *) + let protocol_hash = "\002\170" (* P(51) *) + let context_hash = "\079\199" (* Co(52) *) (* 20 *) let ed25519_public_key_hash = "\006\161\159" (* tz1(36) *) + let secp256k1_public_key_hash = "\006\161\161" (* tz2(36) *) + let p256_public_key_hash = "\006\161\164" (* tz3(36) *) (* 16 *) @@ -331,29 +369,42 @@ module Prefix = struct (* 32 *) let ed25519_seed = "\013\015\058\007" (* edsk(54) *) + let ed25519_public_key = "\013\015\037\217" (* edpk(54) *) + let secp256k1_secret_key = "\017\162\224\201" (* spsk(54) *) + let p256_secret_key = "\016\081\238\189" (* p2sk(54) *) (* 56 *) let ed25519_encrypted_seed = "\007\090\060\179\041" (* edesk(88) *) + let secp256k1_encrypted_secret_key = "\009\237\241\174\150" (* spesk(88) *) + let p256_encrypted_secret_key = "\009\048\057\115\171" (* p2esk(88) *) (* 33 *) let secp256k1_public_key = "\003\254\226\086" (* sppk(55) *) + let p256_public_key = "\003\178\139\127" (* p2pk(55) *) + let secp256k1_scalar = "\038\248\136" (* SSp(53) *) + let secp256k1_element = "\005\092\000" (* GSp(54) *) (* 64 *) let ed25519_secret_key = "\043\246\078\007" (* edsk(98) *) + let ed25519_signature = "\009\245\205\134\018" (* edsig(99) *) - let secp256k1_signature = "\013\115\101\019\063" (* spsig1(99) *) - let p256_signature = "\054\240\044\052" (* p2sig(98) *) + + let secp256k1_signature = "\013\115\101\019\063" (* spsig1(99) *) + + let p256_signature = "\054\240\044\052" (* p2sig(98) *) + let generic_signature = "\004\130\043" (* sig(96) *) (* 4 *) - let chain_id = "\087\082\000" (* Net(15) *) + let chain_id = "\087\082\000" + (* Net(15) *) end diff --git a/src/lib_crypto/base58.mli b/src/lib_crypto/base58.mli index 37b6db9c3a8f03703c8e8fdde587e41727a19ee7..a1f7c877efed9adcaa2b17e2cdd34ce9f8ccbaf3 100644 --- a/src/lib_crypto/base58.mli +++ b/src/lib_crypto/base58.mli @@ -26,36 +26,59 @@ (** {1 Prefixed Base58Check encodings} *) module Prefix : sig + val block_hash : string - val block_hash: string - val operation_hash: string - val operation_list_hash: string - val operation_list_list_hash: string - val protocol_hash: string - val context_hash: string - val ed25519_public_key_hash: string - val secp256k1_public_key_hash: string - val p256_public_key_hash: string - val cryptobox_public_key_hash: string - val ed25519_seed: string - val ed25519_public_key: string - val ed25519_secret_key: string - val ed25519_signature: string - val secp256k1_public_key: string - val secp256k1_secret_key: string - val secp256k1_signature: string - val p256_public_key: string - val p256_secret_key: string - val p256_signature: string - val ed25519_encrypted_seed: string - val secp256k1_encrypted_secret_key: string - val p256_encrypted_secret_key: string - - val generic_signature: string - val chain_id: string - val secp256k1_element: string - val secp256k1_scalar: string + val operation_hash : string + val operation_list_hash : string + + val operation_list_list_hash : string + + val protocol_hash : string + + val context_hash : string + + val ed25519_public_key_hash : string + + val secp256k1_public_key_hash : string + + val p256_public_key_hash : string + + val cryptobox_public_key_hash : string + + val ed25519_seed : string + + val ed25519_public_key : string + + val ed25519_secret_key : string + + val ed25519_signature : string + + val secp256k1_public_key : string + + val secp256k1_secret_key : string + + val secp256k1_signature : string + + val p256_public_key : string + + val p256_secret_key : string + + val p256_signature : string + + val ed25519_encrypted_seed : string + + val secp256k1_encrypted_secret_key : string + + val p256_encrypted_secret_key : string + + val generic_signature : string + + val chain_id : string + + val secp256k1_element : string + + val secp256k1_scalar : string end (** An extensible sum-type for decoded data: one case per known @@ -66,13 +89,13 @@ type data = .. (** Abstract representation of registered encodings. The type paramater is the type of the encoded data, for instance [Hash.Block_hash.t]. *) type 'a encoding = private { - prefix: string ; - length: int ; - encoded_prefix: string ; - encoded_length: int ; - to_raw: 'a -> string ; - of_raw: string -> 'a option ; - wrap: 'a -> data ; + prefix : string; + length : int; + encoded_prefix : string; + encoded_length : int; + to_raw : 'a -> string; + of_raw : string -> 'a option; + wrap : 'a -> data } (** Register a new encoding. The function might raise `Invalid_arg` if @@ -81,86 +104,96 @@ type 'a encoding = private { serialisation/deserialisation for the data. The [wrap] should wrap the deserialised value into the extensible sum-type [data] (see the generic function [decode]). *) -val register_encoding: - prefix: string -> - length: int -> - to_raw: ('a -> string) -> - of_raw: (string -> 'a option) -> - wrap: ('a -> data) -> +val register_encoding : + prefix:string -> + length:int -> + to_raw:('a -> string) -> + of_raw:(string -> 'a option) -> + wrap:('a -> data) -> 'a encoding (** Checks that an encoding has a certain prefix and length. *) -val check_encoded_prefix: 'a encoding -> string -> int -> unit +val check_encoded_prefix : 'a encoding -> string -> int -> unit module Alphabet : sig type t - val bitcoin: t - val ripple: t - val flickr: t - val make: string -> t - val all_in_alphabet : t -> string -> bool + + val bitcoin : t + + val ripple : t + + val flickr : t + + val make : string -> t + + val all_in_alphabet : t -> string -> bool + val pp : Format.formatter -> t -> unit end (** Encoder for a given kind of data. *) -val simple_encode: ?alphabet:Alphabet.t -> 'a encoding -> 'a -> string +val simple_encode : ?alphabet:Alphabet.t -> 'a encoding -> 'a -> string (** Decoder for a given kind of data. It returns [None] when the decoded data does not start with the expected prefix. *) -val simple_decode: ?alphabet:Alphabet.t -> 'a encoding -> string -> 'a option +val simple_decode : ?alphabet:Alphabet.t -> 'a encoding -> string -> 'a option (** Generic decoder. It returns [None] when the decoded data does not start with a registered prefix. *) -val decode: ?alphabet:Alphabet.t -> string -> data option +val decode : ?alphabet:Alphabet.t -> string -> data option (** {2 Completion of partial Base58Check value} *) (** Register a (global) resolver for a previsously registered kind af data. *) -val register_resolver: 'a encoding -> (string -> 'a list Lwt.t) -> unit +val register_resolver : 'a encoding -> (string -> 'a list Lwt.t) -> unit (** Try to complete a prefix of a Base58Check encoded data, by using the previously registered resolver associated to this kind of data. Note that a prefix of [n] characters of a Base58-encoded value provides at least [n/2] bytes of a prefix of the original value. *) -val complete: ?alphabet:Alphabet.t -> string -> string list Lwt.t +val complete : ?alphabet:Alphabet.t -> string -> string list Lwt.t (** {1 Low-level: distinct registering function for economic protocol} *) (** See [src/environment/v1/base58.mli] for an inlined documentation. *) -module Make(C: sig type context end) : sig - - val register_encoding: - prefix: string -> - length: int -> - to_raw: ('a -> string) -> - of_raw: (string -> 'a option) -> - wrap: ('a -> data) -> +module Make (C : sig + type context +end) : sig + val register_encoding : + prefix:string -> + length:int -> + to_raw:('a -> string) -> + of_raw:(string -> 'a option) -> + wrap:('a -> data) -> 'a encoding - val decode: ?alphabet:Alphabet.t -> string -> data option + val decode : ?alphabet:Alphabet.t -> string -> data option - val register_resolver: + val register_resolver : 'a encoding -> (C.context -> string -> 'a list Lwt.t) -> unit - val complete: + val complete : ?alphabet:Alphabet.t -> C.context -> string -> string list Lwt.t - end (** {2 Low-level Base58Check encodings} *) (** Base58Check-encoding/decoding functions (with error detections). *) -val safe_encode: ?alphabet:Alphabet.t -> string -> string -val safe_decode: ?alphabet:Alphabet.t -> string -> string option +val safe_encode : ?alphabet:Alphabet.t -> string -> string + +val safe_decode : ?alphabet:Alphabet.t -> string -> string option (** Base58-encoding/decoding functions (without error detections). *) -val raw_encode: ?alphabet:Alphabet.t -> string -> string -val raw_decode: ?alphabet:Alphabet.t -> string -> string option +val raw_encode : ?alphabet:Alphabet.t -> string -> string + +val raw_decode : ?alphabet:Alphabet.t -> string -> string option (**/**) -val partial_decode: ?alphabet:Alphabet.t -> string -> int -> string option -val make_encoded_prefix: string -> int -> string * int -val prefix: 'a encoding -> string +val partial_decode : ?alphabet:Alphabet.t -> string -> int -> string option + +val make_encoded_prefix : string -> int -> string * int + +val prefix : 'a encoding -> string diff --git a/src/lib_crypto/blake2B.ml b/src/lib_crypto/blake2B.ml index 4cf9697cca31fff0f112dbc033cbc03a7c53e49b..3f7399dd9e38f57e5ef9598cb3135b8302a8d529 100644 --- a/src/lib_crypto/blake2B.ml +++ b/src/lib_crypto/blake2B.ml @@ -29,77 +29,93 @@ open Error_monad module type Name = sig val name : string + val title : string + val size : int option end module type PrefixedName = sig include Name + val b58check_prefix : string end module Make_minimal (K : Name) = struct - open Blake2 + type t = Blake2b.hash include K - let size = - match K.size with - | None -> 32 - | Some x -> x + let size = match K.size with None -> 32 | Some x -> x let of_string_opt s = - if String.length s <> size then - None - else - Some (Blake2b.Hash (MBytes.of_string s)) + if String.length s <> size then None + else Some (Blake2b.Hash (MBytes.of_string s)) + let of_string s = match of_string_opt s with | None -> - generic_error "%s.of_string: wrong string size (%d)" - K.name (String.length s) - | Some h -> Ok h + generic_error + "%s.of_string: wrong string size (%d)" + K.name + (String.length s) + | Some h -> + Ok h + let of_string_exn s = match of_string_opt s with | None -> - Format.kasprintf invalid_arg + Format.kasprintf + invalid_arg "%s.of_string: wrong string size (%d)" - K.name (String.length s) - | Some h -> h + K.name + (String.length s) + | Some h -> + h + let to_string (Blake2b.Hash h) = MBytes.to_string h let of_hex s = of_string (Hex.to_string s) + let of_hex_opt s = of_string_opt (Hex.to_string s) + let of_hex_exn s = of_string_exn (Hex.to_string s) + let to_hex s = Hex.of_string (to_string s) let pp ppf h = - let `Hex h = to_hex h in + let (`Hex h) = to_hex h in Format.pp_print_string ppf h + let pp_short ppf h = - let `Hex h = to_hex h in + let (`Hex h) = to_hex h in Format.pp_print_string ppf (String.sub h 0 8) let of_bytes_opt b = - if MBytes.length b <> size then - None - else - Some (Blake2b.Hash b) + if MBytes.length b <> size then None else Some (Blake2b.Hash b) + let of_bytes_exn b = match of_bytes_opt b with | None -> let msg = - Printf.sprintf "%s.of_bytes: wrong string size (%d)" - K.name (MBytes.length b) in + Printf.sprintf + "%s.of_bytes: wrong string size (%d)" + K.name + (MBytes.length b) + in raise (Invalid_argument msg) - | Some h -> h + | Some h -> + h + let of_bytes s = match of_bytes_opt s with - | Some x -> Ok x + | Some x -> + Ok x | None -> generic_error "Failed to deserialize a hash (%s)" K.name + let to_bytes (Blake2b.Hash h) = h (* let read src off = of_bytes_exn @@ MBytes.sub src off size *) @@ -122,48 +138,55 @@ module Make_minimal (K : Name) = struct length [path_length] where each element is one byte, or two characters, except the last one which contains the rest. *) let to_path key l = - let `Hex key = to_hex key in - String.sub key 0 2 :: String.sub key 2 2 :: - String.sub key 4 2 :: String.sub key 6 2 :: - String.sub key 8 2 :: String.sub key 10 (size * 2 - 10) :: l + let (`Hex key) = to_hex key in + String.sub key 0 2 :: String.sub key 2 2 :: String.sub key 4 2 + :: String.sub key 6 2 :: String.sub key 8 2 + :: String.sub key 10 ((size * 2) - 10) + :: l + let of_path path = let path = String.concat "" path in of_hex_opt (`Hex path) + let of_path_exn path = let path = String.concat "" path in of_hex_exn (`Hex path) let prefix_path p = - let `Hex p = Hex.of_string p in + let (`Hex p) = Hex.of_string p in let len = String.length p in let p1 = if len >= 2 then String.sub p 0 2 else "" and p2 = if len >= 4 then String.sub p 2 2 else "" and p3 = if len >= 6 then String.sub p 4 2 else "" and p4 = if len >= 8 then String.sub p 6 2 else "" and p5 = if len >= 10 then String.sub p 8 2 else "" - and p6 = if len > 10 then String.sub p 10 (min (len - 10) (size * 2 - 10)) else "" in - [ p1 ; p2 ; p3 ; p4 ; p5 ; p6 ] + and p6 = + if len > 10 then String.sub p 10 (min (len - 10) ((size * 2) - 10)) + else "" + in + [p1; p2; p3; p4; p5; p6] let zero = of_hex_exn (`Hex (String.make (size * 2) '0')) - include Compare.Make(struct - type nonrec t = t - let compare (Blake2b.Hash h1) (Blake2b.Hash h2) = MBytes.compare h1 h2 - end) + include Compare.Make (struct + type nonrec t = t + let compare (Blake2b.Hash h1) (Blake2b.Hash h2) = MBytes.compare h1 h2 + end) end module Make (R : sig - val register_encoding: - prefix: string -> - length:int -> - to_raw: ('a -> string) -> - of_raw: (string -> 'a option) -> - wrap: ('a -> Base58.data) -> - 'a Base58.encoding - end) (K : PrefixedName) = struct - - include Make_minimal(K) + val register_encoding : + prefix:string -> + length:int -> + to_raw:('a -> string) -> + of_raw:(string -> 'a option) -> + wrap:('a -> Base58.data) -> + 'a Base58.encoding +end) +(K : PrefixedName) = +struct + include Make_minimal (K) (* Serializers *) @@ -172,206 +195,214 @@ module Make (R : sig conv to_bytes of_bytes_exn (Fixed.bytes size) let hash = - if Compare.Int.(size >= 8) then - fun h -> Int64.to_int (MBytes.get_int64 (to_bytes h) 0) - else if Compare.Int.(size >= 4) then - fun h -> Int32.to_int (MBytes.get_int32 (to_bytes h) 0) - else - fun h -> - let r = ref 0 in - let h = to_bytes h in - for i = 0 to size - 1 do - r := MBytes.get_uint8 h i + 8 * !r - done ; - !r + if Compare.Int.(size >= 8) then fun h -> + Int64.to_int (MBytes.get_int64 (to_bytes h) 0) + else if Compare.Int.(size >= 4) then fun h -> + Int32.to_int (MBytes.get_int32 (to_bytes h) 0) + else fun h -> + let r = ref 0 in + let h = to_bytes h in + for i = 0 to size - 1 do + r := MBytes.get_uint8 h i + (8 * !r) + done ; + !r type Base58.data += Data of t let b58check_encoding = R.register_encoding - ~prefix: K.b58check_prefix - ~length: size - ~wrap: (fun s -> Data s) - ~of_raw: of_string_opt - ~to_raw: to_string - - include Helpers.Make(struct - type nonrec t = t - let title = title - let name = name - let b58check_encoding = b58check_encoding - let raw_encoding = raw_encoding - let compare = compare - let equal = equal - let hash = hash - end) + ~prefix:K.b58check_prefix + ~length:size + ~wrap:(fun s -> Data s) + ~of_raw:of_string_opt + ~to_raw:to_string + + include Helpers.Make (struct + type nonrec t = t + + let title = title + + let name = name + let b58check_encoding = b58check_encoding + + let raw_encoding = raw_encoding + + let compare = compare + + let equal = equal + + let hash = hash + end) end module Generic_Merkle_tree (H : sig - type t - type elt - val empty : t - val leaf : elt -> t - val node : t -> t -> t - end) = struct + type t + + type elt + val empty : t + + val leaf : elt -> t + + val node : t -> t -> t +end) = +struct let rec step a n = - let m = (n+1) / 2 in + let m = (n + 1) / 2 in for i = 0 to m - 1 do - a.(i) <- H.node a.(2*i) a.(2*i+1) + a.(i) <- H.node a.(2 * i) a.((2 * i) + 1) done ; a.(m) <- H.node a.(n) a.(n) ; - if m = 1 then - a.(0) - else if m mod 2 = 0 then - step a m - else begin - a.(m+1) <- a.(m) ; - step a (m+1) - end + if m = 1 then a.(0) + else if m mod 2 = 0 then step a m + else ( + a.(m + 1) <- a.(m) ; + step a (m + 1) ) let empty = H.empty let compute xs = match xs with - | [] -> H.empty - | [x] -> H.leaf x + | [] -> + H.empty + | [x] -> + H.leaf x | _ :: _ :: _ -> let last = TzList.last_exn xs in let n = List.length xs in - let a = Array.make (n+1) (H.leaf last) in + let a = Array.make (n + 1) (H.leaf last) in List.iteri (fun i x -> a.(i) <- H.leaf x) xs ; step a n - type path = - | Left of path * H.t - | Right of H.t * path - | Op + type path = Left of path * H.t | Right of H.t * path | Op let rec step_path a n p j = - let m = (n+1) / 2 in - let p = if j mod 2 = 0 then Left (p, a.(j+1)) else Right (a.(j-1), p) in + let m = (n + 1) / 2 in + let p = + if j mod 2 = 0 then Left (p, a.(j + 1)) else Right (a.(j - 1), p) + in for i = 0 to m - 1 do - a.(i) <- H.node a.(2*i) a.(2*i+1) + a.(i) <- H.node a.(2 * i) a.((2 * i) + 1) done ; a.(m) <- H.node a.(n) a.(n) ; - if m = 1 then - p - else if m mod 2 = 0 then - step_path a m p (j/2) - else begin - a.(m+1) <- a.(m) ; - step_path a (m+1) p (j/2) - end + if m = 1 then p + else if m mod 2 = 0 then step_path a m p (j / 2) + else ( + a.(m + 1) <- a.(m) ; + step_path a (m + 1) p (j / 2) ) let compute_path xs i = match xs with - | [] -> invalid_arg "compute_path" - | [_] -> Op + | [] -> + invalid_arg "compute_path" + | [_] -> + Op | _ :: _ :: _ -> let last = TzList.last_exn xs in let n = List.length xs in if i < 0 || n <= i then invalid_arg "compute_path" ; - let a = Array.make (n+1) (H.leaf last) in + let a = Array.make (n + 1) (H.leaf last) in List.iteri (fun i x -> a.(i) <- H.leaf x) xs ; step_path a n Op i let rec check_path p h = match p with | Op -> - H.leaf h, 1, 0 + (H.leaf h, 1, 0) | Left (p, r) -> - let l, s, pos = check_path p h in - H.node l r, s * 2, pos + let (l, s, pos) = check_path p h in + (H.node l r, s * 2, pos) | Right (l, p) -> - let r, s, pos = check_path p h in - H.node l r, s * 2, pos + s + let (r, s, pos) = check_path p h in + (H.node l r, s * 2, pos + s) let check_path p h = - let h, _, pos = check_path p h in - h, pos - + let (h, _, pos) = check_path p h in + (h, pos) end -let rec log2 x = if x <= 1 then 0 else 1 + log2 ((x+1) / 2) - -module Make_merkle_tree - (R : sig - val register_encoding: - prefix: string -> - length:int -> - to_raw: ('a -> string) -> - of_raw: (string -> 'a option) -> - wrap: ('a -> Base58.data) -> - 'a Base58.encoding - end) - (K : PrefixedName) - (Contents: sig - type t - val to_bytes: t -> MBytes.t - end) = struct - +let rec log2 x = if x <= 1 then 0 else 1 + log2 ((x + 1) / 2) + +module Make_merkle_tree (R : sig + val register_encoding : + prefix:string -> + length:int -> + to_raw:('a -> string) -> + of_raw:(string -> 'a option) -> + wrap:('a -> Base58.data) -> + 'a Base58.encoding +end) +(K : PrefixedName) (Contents : sig + type t + + val to_bytes : t -> MBytes.t +end) = +struct include Make (R) (K) type elt = Contents.t + let elt_bytes = Contents.to_bytes let empty = hash_bytes [] - include Generic_Merkle_tree(struct - type nonrec t = t - type nonrec elt = elt - let empty = empty - let leaf x = hash_bytes [Contents.to_bytes x] - let node x y = hash_bytes [to_bytes x; to_bytes y] - end) + include Generic_Merkle_tree (struct + type nonrec t = t + + type nonrec elt = elt + + let empty = empty + + let leaf x = hash_bytes [Contents.to_bytes x] + + let node x y = hash_bytes [to_bytes x; to_bytes y] + end) let path_encoding = let open Data_encoding in - mu "path" - (fun path_encoding -> - union [ - case (Tag 240) - ~title:"Left" - (obj2 - (req "path" path_encoding) - (req "right" encoding)) - (function Left (p, r) -> Some (p, r) | _ -> None) - (fun (p, r) -> Left (p, r)) ; - case (Tag 15) - ~title:"Right" - (obj2 - (req "left" encoding) - (req "path" path_encoding)) - (function Right (r, p) -> Some (r, p) | _ -> None) - (fun (r, p) -> Right (r, p)) ; - case (Tag 0) - ~title:"Op" - unit - (function Op -> Some () | _ -> None) - (fun () -> Op) - ]) + mu "path" (fun path_encoding -> + union + [ case + (Tag 240) + ~title:"Left" + (obj2 (req "path" path_encoding) (req "right" encoding)) + (function Left (p, r) -> Some (p, r) | _ -> None) + (fun (p, r) -> Left (p, r)); + case + (Tag 15) + ~title:"Right" + (obj2 (req "left" encoding) (req "path" path_encoding)) + (function Right (r, p) -> Some (r, p) | _ -> None) + (fun (r, p) -> Right (r, p)); + case + (Tag 0) + ~title:"Op" + unit + (function Op -> Some () | _ -> None) + (fun () -> Op) ]) let bounded_path_encoding ?max_length () = match max_length with - | None -> path_encoding + | None -> + path_encoding | Some max_length -> let max_depth = log2 max_length in - Data_encoding.check_size (max_depth * (size + 1) + 1) path_encoding - + Data_encoding.check_size ((max_depth * (size + 1)) + 1) path_encoding end -include - Make_minimal (struct - let name = "Generic_hash" - let title = "" - let size = None - end) +include Make_minimal (struct + let name = "Generic_hash" + + let title = "" + + let size = None +end) let pp ppf h = - let `Hex h = to_hex h in + let (`Hex h) = to_hex h in Format.pp_print_string ppf h + let pp_short ppf h = - let `Hex h = to_hex h in + let (`Hex h) = to_hex h in Format.pp_print_string ppf (String.sub h 0 8) diff --git a/src/lib_crypto/blake2B.mli b/src/lib_crypto/blake2B.mli index 0b7333b909a64bf5a90899f6d78b8c430e9f83c5..0b3dbb58f01ace9dca27a28e18f9465d32f8fa0d 100644 --- a/src/lib_crypto/blake2B.mli +++ b/src/lib_crypto/blake2B.mli @@ -28,6 +28,7 @@ (** {2 Predefined Hashes } *) include S.MINIMAL_HASH + include S.RAW_DATA with type t := t (** {2 Building Hashes} *) @@ -38,59 +39,65 @@ include S.RAW_DATA with type t := t module type Name = sig val name : string + val title : string + val size : int option end module type PrefixedName = sig include Name + val b58check_prefix : string end (** Builds a new Hash type using Blake2B. *) module Make_minimal (Name : Name) : S.MINIMAL_HASH -module Make - (Register : sig - val register_encoding: - prefix: string -> - length: int -> - to_raw: ('a -> string) -> - of_raw: (string -> 'a option) -> - wrap: ('a -> Base58.data) -> - 'a Base58.encoding - end) - (Name : PrefixedName) : S.HASH + +module Make (Register : sig + val register_encoding : + prefix:string -> + length:int -> + to_raw:('a -> string) -> + of_raw:(string -> 'a option) -> + wrap:('a -> Base58.data) -> + 'a Base58.encoding +end) +(Name : PrefixedName) : S.HASH (**/**) -module Make_merkle_tree - (R : sig - val register_encoding: - prefix: string -> - length:int -> - to_raw: ('a -> string) -> - of_raw: (string -> 'a option) -> - wrap: ('a -> Base58.data) -> - 'a Base58.encoding - end) - (K : PrefixedName) - (Contents: sig - type t - val to_bytes: t -> MBytes.t - end) : S.MERKLE_TREE with type elt = Contents.t +module Make_merkle_tree (R : sig + val register_encoding : + prefix:string -> + length:int -> + to_raw:('a -> string) -> + of_raw:(string -> 'a option) -> + wrap:('a -> Base58.data) -> + 'a Base58.encoding +end) +(K : PrefixedName) (Contents : sig + type t + + val to_bytes : t -> MBytes.t +end) : S.MERKLE_TREE with type elt = Contents.t module Generic_Merkle_tree (H : sig - type t - type elt - val empty : t - val leaf : elt -> t - val node : t -> t -> t - end) : sig + type t + + type elt + + val empty : t + + val leaf : elt -> t + + val node : t -> t -> t +end) : sig val compute : H.elt list -> H.t - type path = - | Left of path * H.t - | Right of H.t * path - | Op - val compute_path: H.elt list -> int -> path - val check_path: path -> H.elt -> H.t * int + + type path = Left of path * H.t | Right of H.t * path | Op + + val compute_path : H.elt list -> int -> path + + val check_path : path -> H.elt -> H.t * int end diff --git a/src/lib_crypto/block_hash.ml b/src/lib_crypto/block_hash.ml index 9a8eb034d9c20a9224bd417d379d2324bf170792..7faee65331fd1a7614164c72407d8b224ba139c8 100644 --- a/src/lib_crypto/block_hash.ml +++ b/src/lib_crypto/block_hash.ml @@ -23,18 +23,23 @@ (* *) (*****************************************************************************) -include Blake2B.Make (Base58) (struct - let name = "block_hash" - let title = "A block identifier" - let b58check_prefix = Base58.Prefix.block_hash - let size = None - end) +include Blake2B.Make + (Base58) + (struct + let name = "block_hash" + let title = "A block identifier" + + let b58check_prefix = Base58.Prefix.block_hash + + let size = None + end) module Logging = struct let tag = Tag.def ~doc:"Block Hash" "block_hash" pp_short - let predecessor_tag = Tag.def ~doc:"Block Predecessor Hash" "predecessor_hash" pp_short + + let predecessor_tag = + Tag.def ~doc:"Block Predecessor Hash" "predecessor_hash" pp_short end -let () = - Base58.check_encoded_prefix b58check_encoding "B" 51 +let () = Base58.check_encoded_prefix b58check_encoding "B" 51 diff --git a/src/lib_crypto/block_hash.mli b/src/lib_crypto/block_hash.mli index 0b4fa4f8645f26d7d2c491a6305cce4225be193b..1ae58d90c6f7cc7449f2ad070e776aea88d48e6d 100644 --- a/src/lib_crypto/block_hash.mli +++ b/src/lib_crypto/block_hash.mli @@ -27,5 +27,6 @@ include S.HASH module Logging : sig val tag : t Tag.def + val predecessor_tag : t Tag.def end diff --git a/src/lib_crypto/chain_id.ml b/src/lib_crypto/chain_id.ml index f5e1a5a0e6308150675a50f991a8672551f16dde..5068d3f83596ea993ffd174523c495427f71f5d1 100644 --- a/src/lib_crypto/chain_id.ml +++ b/src/lib_crypto/chain_id.ml @@ -28,76 +28,95 @@ open Error_monad type t = string let name = "Chain_id" + let title = "Network identifier" -let extract bh = - MBytes.sub_string (Block_hash.to_bytes bh) 0 4 +let extract bh = MBytes.sub_string (Block_hash.to_bytes bh) 0 4 + let hash_bytes ?key l = extract (Block_hash.hash_bytes ?key l) + let hash_string ?key l = extract (Block_hash.hash_string ?key l) let size = 4 -let of_string_opt s = - if String.length s <> size then None else Some s +let of_string_opt s = if String.length s <> size then None else Some s + let of_string s = match of_string_opt s with | None -> generic_error "%s.of_string: wrong string size (%d)" - name (String.length s) - | Some h -> Ok h + name + (String.length s) + | Some h -> + Ok h + let of_string_exn s = match of_string_opt s with | None -> - Format.kasprintf invalid_arg + Format.kasprintf + invalid_arg "%s.of_string_exn: wrong string size (%d)" - name (String.length s) - | Some h -> h + name + (String.length s) + | Some h -> + h let to_string s = s + let of_hex s = of_string (Hex.to_string s) + let of_hex_opt s = of_string_opt (Hex.to_string s) + let of_hex_exn s = of_string_exn (Hex.to_string s) -let to_hex s = Hex.of_string (to_string s) +let to_hex s = Hex.of_string (to_string s) let of_bytes_opt b = - if MBytes.length b <> size then - None - else - Some (MBytes.to_string b) + if MBytes.length b <> size then None else Some (MBytes.to_string b) + let of_bytes_exn b = match of_bytes_opt b with | None -> let msg = - Printf.sprintf "%s.of_bytes: wrong string size (%d)" - name (MBytes.length b) in + Printf.sprintf + "%s.of_bytes: wrong string size (%d)" + name + (MBytes.length b) + in raise (Invalid_argument msg) - | Some h -> h + | Some h -> + h + let of_bytes s = match of_bytes_opt s with - | Some x -> Ok x + | Some x -> + Ok x | None -> generic_error "Failed to deserialize a hash (%s)" name + let to_bytes = MBytes.of_string (* let read src off = of_bytes_exn @@ MBytes.sub src off size *) (* let write dst off h = MBytes.blit (to_bytes h) 0 dst off size *) let path_length = 1 + let to_path key l = - let `Hex h = to_hex key in + let (`Hex h) = to_hex key in h :: l + let of_path path = let path = String.concat "" path in of_hex_opt (`Hex path) + let of_path_exn path = let path = String.concat "" path in of_hex_exn (`Hex path) let prefix_path p = - let `Hex p = Hex.of_string p in - [ p ] + let (`Hex p) = Hex.of_string p in + [p] let zero = of_hex_exn (`Hex (String.make (size * 2) '0')) @@ -105,33 +124,40 @@ type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.chain_id - ~length: size - ~wrap: (fun s -> Data s) - ~of_raw: of_string_opt - ~to_raw: to_string + ~prefix:Base58.Prefix.chain_id + ~length:size + ~wrap:(fun s -> Data s) + ~of_raw:of_string_opt + ~to_raw:to_string let raw_encoding = let open Data_encoding in conv to_bytes of_bytes_exn (Fixed.bytes size) -let hash h = - Int32.to_int (MBytes.get_int32 (to_bytes h) 0) +let hash h = Int32.to_int (MBytes.get_int32 (to_bytes h) 0) let of_block_hash bh = hash_bytes [Block_hash.to_bytes bh] -include Compare.Make(struct - type nonrec t = t - let compare = String.compare - end) - -include Helpers.Make(struct - type nonrec t = t - let title = title - let name = name - let b58check_encoding = b58check_encoding - let raw_encoding = raw_encoding - let compare = compare - let equal = equal - let hash = hash - end) +include Compare.Make (struct + type nonrec t = t + + let compare = String.compare +end) + +include Helpers.Make (struct + type nonrec t = t + + let title = title + + let name = name + + let b58check_encoding = b58check_encoding + + let raw_encoding = raw_encoding + + let compare = compare + + let equal = equal + + let hash = hash +end) diff --git a/src/lib_crypto/chain_id.mli b/src/lib_crypto/chain_id.mli index ad033691f2559818a2c55c9f6d8b3c4d54c4a1ff..f69f23f23b1e0a9cf642ef81515575dc9142b100 100644 --- a/src/lib_crypto/chain_id.mli +++ b/src/lib_crypto/chain_id.mli @@ -25,4 +25,4 @@ include S.HASH -val of_block_hash: Block_hash.t -> t +val of_block_hash : Block_hash.t -> t diff --git a/src/lib_crypto/context_hash.ml b/src/lib_crypto/context_hash.ml index 6518988c9dd122615f084c7a21e852a32969ab77..10a8d53fad5d5fdba3c375edf7cfcd3a8d16a454 100644 --- a/src/lib_crypto/context_hash.ml +++ b/src/lib_crypto/context_hash.ml @@ -23,12 +23,16 @@ (* *) (*****************************************************************************) -include Blake2B.Make (Base58) (struct - let name = "Context_hash" - let title = "A hash of context" - let b58check_prefix = Base58.Prefix.context_hash - let size = None - end) +include Blake2B.Make + (Base58) + (struct + let name = "Context_hash" -let () = - Base58.check_encoded_prefix b58check_encoding "Co" 52 + let title = "A hash of context" + + let b58check_prefix = Base58.Prefix.context_hash + + let size = None + end) + +let () = Base58.check_encoded_prefix b58check_encoding "Co" 52 diff --git a/src/lib_crypto/crypto_box.ml b/src/lib_crypto/crypto_box.ml index 79caac4218fadf7989f453f4c2feb3189a0b0a81..a05a84ca2dc6abaf08843b955edf7942c94c220a 100644 --- a/src/lib_crypto/crypto_box.ml +++ b/src/lib_crypto/crypto_box.ml @@ -28,18 +28,21 @@ open Hacl type secret_key = secret Box.key + type public_key = public Box.key + type channel_key = Box.combined Box.key + type nonce = Bigstring.t + type target = Z.t module Secretbox = struct include Secretbox - let box_noalloc key nonce msg = - box ~key ~nonce ~msg ~cmsg:msg - let box_open_noalloc key nonce cmsg = - box_open ~key ~nonce ~cmsg ~msg:cmsg + let box_noalloc key nonce msg = box ~key ~nonce ~msg ~cmsg:msg + + let box_open_noalloc key nonce cmsg = box_open ~key ~nonce ~cmsg ~msg:cmsg let box key msg nonce = let msglen = MBytes.length msg in @@ -55,58 +58,69 @@ module Secretbox = struct MBytes.fill msg '\x00' ; MBytes.blit cmsg 0 msg boxzerobytes cmsglen ; match box_open ~key ~nonce ~cmsg:msg ~msg with - | false -> None - | true -> Some (MBytes.sub msg zerobytes (cmsglen - boxzerobytes)) + | false -> + None + | true -> + Some (MBytes.sub msg zerobytes (cmsglen - boxzerobytes)) end -module Public_key_hash = Blake2B.Make (Base58) (struct - let name = "Crypto_box.Public_key_hash" - let title = "A Cryptobox public key ID" - let b58check_prefix = Base58.Prefix.cryptobox_public_key_hash - let size = Some 16 - end) +module Public_key_hash = + Blake2B.Make + (Base58) + (struct + let name = "Crypto_box.Public_key_hash" + + let title = "A Cryptobox public key ID" -let () = - Base58.check_encoded_prefix Public_key_hash.b58check_encoding "id" 30 + let b58check_prefix = Base58.Prefix.cryptobox_public_key_hash -let hash pk = - Public_key_hash.hash_bytes [Box.unsafe_to_bytes pk] + let size = Some 16 + end) + +let () = Base58.check_encoded_prefix Public_key_hash.b58check_encoding "id" 30 + +let hash pk = Public_key_hash.hash_bytes [Box.unsafe_to_bytes pk] let zerobytes = Box.zerobytes + let boxzerobytes = Box.boxzerobytes let random_keypair () = - let pk, sk = Box.keypair () in - sk, pk, hash pk + let (pk, sk) = Box.keypair () in + (sk, pk, hash pk) let zero_nonce = MBytes.make Nonce.bytes '\x00' + let random_nonce = Nonce.gen + let increment_nonce = Nonce.increment + let generate_nonce mbytes = let hash = Blake2B.hash_bytes mbytes in - Nonce.of_bytes_exn @@ (Bigstring.sub (Blake2B.to_bytes hash) 0 Nonce.bytes) + Nonce.of_bytes_exn @@ Bigstring.sub (Blake2B.to_bytes hash) 0 Nonce.bytes let init_to_resp_seed = MBytes.of_string "Init -> Resp" -let resp_to_init_seed = MBytes.of_string "Resp -> Init" + +let resp_to_init_seed = MBytes.of_string "Resp -> Init" + let generate_nonces ~incoming ~sent_msg ~recv_msg = - let (init_msg, resp_msg, false) - | (resp_msg, init_msg, true) = (sent_msg, recv_msg, incoming) in + let ((init_msg, resp_msg, false) | (resp_msg, init_msg, true)) = + (sent_msg, recv_msg, incoming) + in let nonce_init_to_resp = - generate_nonce [ init_msg ; resp_msg ; init_to_resp_seed ] in + generate_nonce [init_msg; resp_msg; init_to_resp_seed] + in let nonce_resp_to_init = - generate_nonce [ init_msg ; resp_msg ; resp_to_init_seed ] in - if incoming then - (nonce_init_to_resp, nonce_resp_to_init) - else - (nonce_resp_to_init, nonce_init_to_resp) + generate_nonce [init_msg; resp_msg; resp_to_init_seed] + in + if incoming then (nonce_init_to_resp, nonce_resp_to_init) + else (nonce_resp_to_init, nonce_init_to_resp) let precompute sk pk = Box.dh pk sk -let fast_box_noalloc k nonce msg = - Box.box ~k ~nonce ~msg ~cmsg:msg +let fast_box_noalloc k nonce msg = Box.box ~k ~nonce ~msg ~cmsg:msg -let fast_box_open_noalloc k nonce cmsg = - Box.box_open ~k ~nonce ~cmsg ~msg:cmsg +let fast_box_open_noalloc k nonce cmsg = Box.box_open ~k ~nonce ~cmsg ~msg:cmsg let fast_box k msg nonce = let msglen = MBytes.length msg in @@ -120,8 +134,10 @@ let fast_box_open k cmsg nonce = let cmsglen = MBytes.length cmsg in let msg = MBytes.create cmsglen in match Box.box_open ~k ~nonce ~cmsg ~msg with - | false -> None - | true -> Some (MBytes.sub msg zerobytes (cmsglen - zerobytes)) + | false -> + None + | true -> + Some (MBytes.sub msg zerobytes (cmsglen - zerobytes)) let compare_target hash target = let hash = Z.of_bits (Blake2B.to_string hash) in @@ -129,49 +145,44 @@ let compare_target hash target = let make_target f = if f < 0. || 256. < f then invalid_arg "Cryptobox.target_of_float" ; - let frac, shift = modf f in + let (frac, shift) = modf f in let shift = int_of_float shift in let m = - Z.of_int64 @@ - if frac = 0. then - Int64.(pred (shift_left 1L 54)) - else - Int64.of_float (2. ** (54. -. frac)) + Z.of_int64 + @@ + if frac = 0. then Int64.(pred (shift_left 1L 54)) + else Int64.of_float (2. ** (54. -. frac)) in if shift < 202 then Z.logor (Z.shift_left m (202 - shift)) (Z.pred @@ Z.shift_left Z.one (202 - shift)) - else - Z.shift_right m (shift - 202) + else Z.shift_right m (shift - 202) let default_target = make_target 24. let check_proof_of_work pk nonce target = - let hash = - Blake2B.hash_bytes [ - Box.unsafe_to_bytes pk ; - nonce ; - ] in + let hash = Blake2B.hash_bytes [Box.unsafe_to_bytes pk; nonce] in compare_target hash target let generate_proof_of_work ?max pk target = let may_interupt = match max with - | None -> (fun _ -> ()) - | Some max -> (fun cpt -> if max < cpt then raise Not_found) in + | None -> + fun _ -> () + | Some max -> + fun cpt -> if max < cpt then raise Not_found + in let rec loop nonce cpt = may_interupt cpt ; - if check_proof_of_work pk nonce target then - nonce - else - loop (Nonce.increment nonce) (cpt + 1) in + if check_proof_of_work pk nonce target then nonce + else loop (Nonce.increment nonce) (cpt + 1) + in loop (random_nonce ()) 0 let public_key_to_bigarray pk = let buf = MBytes.create Box.pkbytes in - Box.blit_to_bytes pk buf ; - buf + Box.blit_to_bytes pk buf ; buf let public_key_of_bigarray buf = let pk = MBytes.copy buf in @@ -181,8 +192,7 @@ let public_key_size = Box.pkbytes let secret_key_to_bigarray sk = let buf = MBytes.create Box.skbytes in - Box.blit_to_bytes sk buf ; - buf + Box.blit_to_bytes sk buf ; buf let secret_key_of_bigarray buf = let sk = MBytes.copy buf in @@ -206,10 +216,10 @@ let secret_key_encoding = secret_key_of_bigarray (Fixed.bytes secret_key_size) -let nonce_encoding = - Data_encoding.Fixed.bytes nonce_size +let nonce_encoding = Data_encoding.Fixed.bytes nonce_size + let neuterize : secret_key -> public_key = Box.neuterize + let equal : public_key -> public_key -> bool = Box.equal -let pp_pk ppf pk = - MBytes.pp_hex ppf (public_key_to_bigarray pk) +let pp_pk ppf pk = MBytes.pp_hex ppf (public_key_to_bigarray pk) diff --git a/src/lib_crypto/crypto_box.mli b/src/lib_crypto/crypto_box.mli index a6cf448a8c40a4ebb382488b346fb25a88da46b2..a50c85b6feafb9f37b576607dac04ceb82443618 100644 --- a/src/lib_crypto/crypto_box.mli +++ b/src/lib_crypto/crypto_box.mli @@ -26,10 +26,13 @@ (** Tezos - X25519/XSalsa20-Poly1305 cryptography *) type nonce = Bigstring.t + val nonce_size : int val zero_nonce : nonce + val random_nonce : unit -> nonce + val increment_nonce : ?step:int -> nonce -> nonce (** [generate_nonces ~incoming ~sent_msg ~recv_msg] generates two @@ -48,52 +51,70 @@ module Secretbox : sig val unsafe_of_bytes : MBytes.t -> key val box_noalloc : key -> nonce -> MBytes.t -> unit + val box_open_noalloc : key -> nonce -> MBytes.t -> bool val box : key -> MBytes.t -> nonce -> MBytes.t + val box_open : key -> MBytes.t -> nonce -> MBytes.t option end type target + val default_target : target + val make_target : float -> target type secret_key + type public_key + module Public_key_hash : S.HASH + type channel_key val hash : public_key -> Public_key_hash.t val zerobytes : int + val boxzerobytes : int val random_keypair : unit -> secret_key * public_key * Public_key_hash.t val precompute : secret_key -> public_key -> channel_key -val fast_box : channel_key -> MBytes.t -> nonce -> MBytes.t -val fast_box_open : channel_key -> MBytes.t -> nonce -> MBytes.t option +val fast_box : channel_key -> MBytes.t -> nonce -> MBytes.t + +val fast_box_open : channel_key -> MBytes.t -> nonce -> MBytes.t option val fast_box_noalloc : channel_key -> nonce -> MBytes.t -> unit + val fast_box_open_noalloc : channel_key -> nonce -> MBytes.t -> bool val check_proof_of_work : public_key -> nonce -> target -> bool + val generate_proof_of_work : ?max:int -> public_key -> target -> nonce val public_key_to_bigarray : public_key -> Cstruct.buffer + val public_key_of_bigarray : Cstruct.buffer -> public_key + val public_key_size : int val secret_key_to_bigarray : secret_key -> Cstruct.buffer + val secret_key_of_bigarray : Cstruct.buffer -> secret_key + val secret_key_size : int val public_key_encoding : public_key Data_encoding.t + val secret_key_encoding : secret_key Data_encoding.t + val nonce_encoding : nonce Data_encoding.t val neuterize : secret_key -> public_key + val equal : public_key -> public_key -> bool -val pp_pk :Format.formatter -> public_key -> unit +val pp_pk : Format.formatter -> public_key -> unit diff --git a/src/lib_crypto/ed25519.ml b/src/lib_crypto/ed25519.ml index 28639a552ded3310c8334fa1f160f25f6fb4fb73..b57a512833891b00d1bb4e950ff4b9c53882def1 100644 --- a/src/lib_crypto/ed25519.ml +++ b/src/lib_crypto/ed25519.ml @@ -26,29 +26,36 @@ open Error_monad module Public_key_hash = struct - include Blake2B.Make(Base58)(struct - let name = "Ed25519.Public_key_hash" - let title = "An Ed25519 public key hash" - let b58check_prefix = Base58.Prefix.ed25519_public_key_hash - let size = Some 20 - end) + include Blake2B.Make + (Base58) + (struct + let name = "Ed25519.Public_key_hash" + + let title = "An Ed25519 public key hash" + + let b58check_prefix = Base58.Prefix.ed25519_public_key_hash + + let size = Some 20 + end) + module Logging = struct let tag = Tag.def ~doc:title name pp end end -let () = - Base58.check_encoded_prefix Public_key_hash.b58check_encoding "tz1" 36 + +let () = Base58.check_encoded_prefix Public_key_hash.b58check_encoding "tz1" 36 open Hacl module Public_key = struct - type t = public Sign.key let name = "Ed25519.Public_key" + let title = "Ed25519 public key" let to_string s = MBytes.to_string (Sign.unsafe_to_bytes s) + let of_string_opt s = if String.length s < Sign.pkbytes then None else @@ -58,8 +65,7 @@ module Public_key = struct let to_bytes pk = let buf = MBytes.create Sign.pkbytes in - Sign.blit_to_bytes pk buf ; - buf + Sign.blit_to_bytes pk buf ; buf let of_bytes_opt buf = let buflen = MBytes.length buf in @@ -71,74 +77,84 @@ module Public_key = struct let size = Sign.pkbytes - type Base58.data += - | Data of t + type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.ed25519_public_key - ~length: size - ~to_raw: to_string - ~of_raw: of_string_opt - ~wrap: (fun x -> Data x) + ~prefix:Base58.Prefix.ed25519_public_key + ~length:size + ~to_raw:to_string + ~of_raw:of_string_opt + ~wrap:(fun x -> Data x) - let () = - Base58.check_encoded_prefix b58check_encoding "edpk" 54 - - let hash v = - Public_key_hash.hash_bytes [ Sign.unsafe_to_bytes v ] - - include Compare.Make(struct - type nonrec t = t - let compare a b = - MBytes.compare (Sign.unsafe_to_bytes a) (Sign.unsafe_to_bytes b) - end) - - include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) - - include Helpers.MakeB58(struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - conv to_bytes of_bytes_exn (Fixed.bytes size) - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) + let () = Base58.check_encoded_prefix b58check_encoding "edpk" 54 - let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) + let hash v = Public_key_hash.hash_bytes [Sign.unsafe_to_bytes v] + + include Compare.Make (struct + type nonrec t = t + + let compare a b = + MBytes.compare (Sign.unsafe_to_bytes a) (Sign.unsafe_to_bytes b) + end) + + include Helpers.MakeRaw (struct + type nonrec t = t + + let name = name + + let of_bytes_opt = of_bytes_opt + + let of_string_opt = of_string_opt + + let to_string = to_string + end) + + include Helpers.MakeB58 (struct + type nonrec t = t + + let name = name + + let b58check_encoding = b58check_encoding + end) + + include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = + let open Data_encoding in + conv to_bytes of_bytes_exn (Fixed.bytes size) + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + end) + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) end module Secret_key = struct - type t = secret Sign.key let name = "Ed25519.Secret_key" + let title = "An Ed25519 secret key" let size = Sign.skbytes let to_bytes sk = let buf = MBytes.create Sign.skbytes in - Sign.blit_to_bytes sk buf ; - buf + Sign.blit_to_bytes sk buf ; buf let of_bytes_opt s = if MBytes.length s > 64 then None @@ -148,98 +164,117 @@ module Secret_key = struct Some (Sign.unsafe_sk_of_bytes sk) let to_string s = MBytes.to_string (to_bytes s) + let of_string_opt s = of_bytes_opt (MBytes.of_string s) let to_public_key = Sign.neuterize - type Base58.data += - | Data of t + type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.ed25519_seed - ~length: size - ~to_raw: (fun sk -> MBytes.to_string (Sign.unsafe_to_bytes sk)) - ~of_raw: (fun buf -> - if String.length buf <> Sign.skbytes then None - else Some (Sign.unsafe_sk_of_bytes (MBytes.of_string buf))) - ~wrap: (fun sk -> Data sk) + ~prefix:Base58.Prefix.ed25519_seed + ~length:size + ~to_raw:(fun sk -> MBytes.to_string (Sign.unsafe_to_bytes sk)) + ~of_raw:(fun buf -> + if String.length buf <> Sign.skbytes then None + else Some (Sign.unsafe_sk_of_bytes (MBytes.of_string buf))) + ~wrap:(fun sk -> Data sk) (* Legacy NaCl secret key encoding. Used to store both sk and pk. *) let secret_key_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.ed25519_secret_key - ~length: Sign.(skbytes + pkbytes) - ~to_raw: (fun sk -> - let pk = Sign.neuterize sk in - let buf = MBytes.create Sign.(skbytes + pkbytes) in - Sign.blit_to_bytes sk buf ; - Sign.blit_to_bytes pk ~pos:Sign.skbytes buf ; - MBytes.to_string buf) - ~of_raw: (fun buf -> - if String.length buf <> Sign.(skbytes + pkbytes) then None - else - let sk = MBytes.create Sign.skbytes in - MBytes.blit_of_string buf 0 sk 0 Sign.skbytes ; - Some (Sign.unsafe_sk_of_bytes sk)) - ~wrap: (fun x -> Data x) + ~prefix:Base58.Prefix.ed25519_secret_key + ~length:Sign.(skbytes + pkbytes) + ~to_raw:(fun sk -> + let pk = Sign.neuterize sk in + let buf = MBytes.create Sign.(skbytes + pkbytes) in + Sign.blit_to_bytes sk buf ; + Sign.blit_to_bytes pk ~pos:Sign.skbytes buf ; + MBytes.to_string buf) + ~of_raw:(fun buf -> + if String.length buf <> Sign.(skbytes + pkbytes) then None + else + let sk = MBytes.create Sign.skbytes in + MBytes.blit_of_string buf 0 sk 0 Sign.skbytes ; + Some (Sign.unsafe_sk_of_bytes sk)) + ~wrap:(fun x -> Data x) let of_b58check_opt s = match Base58.simple_decode b58check_encoding s with - | Some x -> Some x - | None -> Base58.simple_decode secret_key_encoding s + | Some x -> + Some x + | None -> + Base58.simple_decode secret_key_encoding s + let of_b58check_exn s = match of_b58check_opt s with - | Some x -> x - | None -> Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + | Some x -> + x + | None -> + Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + let of_b58check s = match of_b58check_opt s with - | Some x -> Ok x + | Some x -> + Ok x | None -> - generic_error - "Failed to read a b58check_encoding data (%s): %S" - name s + generic_error "Failed to read a b58check_encoding data (%s): %S" name s let to_b58check s = Base58.simple_encode b58check_encoding s + let to_short_b58check s = String.sub - (to_b58check s) 0 + (to_b58check s) + 0 (10 + String.length (Base58.prefix b58check_encoding)) let () = Base58.check_encoded_prefix b58check_encoding "edsk" 54 ; Base58.check_encoded_prefix secret_key_encoding "edsk" 98 - include Compare.Make(struct - type nonrec t = t - let compare a b = - MBytes.compare (Sign.unsafe_to_bytes a) (Sign.unsafe_to_bytes b) - end) - - include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - conv to_bytes of_bytes_exn (Fixed.bytes size) - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) + include Compare.Make (struct + type nonrec t = t - let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) + let compare a b = + MBytes.compare (Sign.unsafe_to_bytes a) (Sign.unsafe_to_bytes b) + end) + + include Helpers.MakeRaw (struct + type nonrec t = t + + let name = name + + let of_bytes_opt = of_bytes_opt + + let of_string_opt = of_string_opt + + let to_string = to_string + end) + + include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = + let open Data_encoding in + conv to_bytes of_bytes_exn (Fixed.bytes size) + + let of_b58check = of_b58check + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + end) + + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) end type t = MBytes.t @@ -247,58 +282,72 @@ type t = MBytes.t type watermark = MBytes.t let name = "Ed25519" + let title = "An Ed25519 signature" let size = Sign.bytes -let of_bytes_opt s = - if MBytes.length s = size then Some s else None +let of_bytes_opt s = if MBytes.length s = size then Some s else None + let to_bytes x = x let to_string s = MBytes.to_string (to_bytes s) + let of_string_opt s = of_bytes_opt (MBytes.of_string s) -type Base58.data += - | Data of t +type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.ed25519_signature - ~length: size - ~to_raw: MBytes.to_string - ~of_raw: (fun s -> Some (MBytes.of_string s)) - ~wrap: (fun x -> Data x) + ~prefix:Base58.Prefix.ed25519_signature + ~length:size + ~to_raw:MBytes.to_string + ~of_raw:(fun s -> Some (MBytes.of_string s)) + ~wrap:(fun x -> Data x) -let () = - Base58.check_encoded_prefix b58check_encoding "edsig" 99 +let () = Base58.check_encoded_prefix b58check_encoding "edsig" 99 -include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) +include Helpers.MakeRaw (struct + type nonrec t = t -include Helpers.MakeB58(struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) + let name = name -include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - conv to_bytes of_bytes_exn (Fixed.bytes size) - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) + let of_bytes_opt = of_bytes_opt + + let of_string_opt = of_string_opt + + let to_string = to_string +end) + +include Helpers.MakeB58 (struct + type nonrec t = t + + let name = name + + let b58check_encoding = b58check_encoding +end) + +include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = + let open Data_encoding in + conv to_bytes of_bytes_exn (Fixed.bytes size) + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check +end) let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) @@ -306,50 +355,48 @@ let zero = MBytes.make size '\000' let sign ?watermark sk msg = let msg = - Blake2B.to_bytes @@ - Blake2B.hash_bytes @@ - match watermark with - | None -> [msg] - | Some prefix -> [ prefix ; msg ] in + Blake2B.to_bytes @@ Blake2B.hash_bytes + @@ match watermark with None -> [msg] | Some prefix -> [prefix; msg] + in let signature = MBytes.create Sign.bytes in Sign.sign ~sk ~msg ~signature ; signature let check ?watermark pk signature msg = let msg = - Blake2B.to_bytes @@ - Blake2B.hash_bytes @@ - match watermark with - | None -> [msg] - | Some prefix -> [ prefix ; msg ] in + Blake2B.to_bytes @@ Blake2B.hash_bytes + @@ match watermark with None -> [msg] | Some prefix -> [prefix; msg] + in Sign.verify ~pk ~signature ~msg let generate_key ?seed () = match seed with | None -> - let pk, sk = Sign.keypair () in - Public_key.hash pk, pk, sk + let (pk, sk) = Sign.keypair () in + (Public_key.hash pk, pk, sk) | Some seed -> let seedlen = MBytes.length seed in if seedlen < Sign.skbytes then - invalid_arg (Printf.sprintf "Ed25519.generate_key: seed must \ - be at least %d bytes long (got %d)" - Sign.skbytes seedlen) ; + invalid_arg + (Printf.sprintf + "Ed25519.generate_key: seed must be at least %d bytes long (got \ + %d)" + Sign.skbytes + seedlen) ; let sk = MBytes.create Sign.skbytes in MBytes.blit seed 0 sk 0 Sign.skbytes ; let sk = Sign.unsafe_sk_of_bytes sk in let pk = Sign.neuterize sk in - Public_key.hash pk, pk, sk - + (Public_key.hash pk, pk, sk) let deterministic_nonce sk msg = - Hash.SHA256.HMAC.digest ~key: (Secret_key.to_bytes sk) ~msg + Hash.SHA256.HMAC.digest ~key:(Secret_key.to_bytes sk) ~msg let deterministic_nonce_hash sk msg = Blake2B.to_bytes (Blake2B.hash_bytes [deterministic_nonce sk msg]) +include Compare.Make (struct + type nonrec t = t -include Compare.Make(struct - type nonrec t = t - let compare = MBytes.compare - end) + let compare = MBytes.compare +end) diff --git a/src/lib_crypto/ed25519.mli b/src/lib_crypto/ed25519.mli index c49d56e744942c2e41be31ba203b9c5d5ceb560e..d9cf2891c11430450742592a5920d329a76f7401 100644 --- a/src/lib_crypto/ed25519.mli +++ b/src/lib_crypto/ed25519.mli @@ -26,4 +26,5 @@ (** Tezos - Ed25519 cryptography *) include S.SIGNATURE with type watermark = MBytes.t + include S.RAW_DATA with type t := t diff --git a/src/lib_crypto/helpers.ml b/src/lib_crypto/helpers.ml index bb61da751f08c8d53618c8e59195afce37662462..a313eaf830a8ee0a1ca2de97ffa8b67fe24db3cc 100644 --- a/src/lib_crypto/helpers.ml +++ b/src/lib_crypto/helpers.ml @@ -25,145 +25,196 @@ open Error_monad -module MakeRaw(H : sig - type t - val name: string - val of_bytes_opt: MBytes.t -> t option - val to_string: t -> string - val of_string_opt: string -> t option - end) = struct +module MakeRaw (H : sig + type t + val name : string + + val of_bytes_opt : MBytes.t -> t option + + val to_string : t -> string + + val of_string_opt : string -> t option +end) = +struct let of_bytes_exn s = match H.of_bytes_opt s with | None -> Format.kasprintf invalid_arg "of_bytes_exn (%s)" H.name - | Some pk -> pk + | Some pk -> + pk + let of_bytes s = match H.of_bytes_opt s with - | None -> generic_error "of_bytes (%s)" H.name - | Some pk -> Ok pk + | None -> + generic_error "of_bytes (%s)" H.name + | Some pk -> + Ok pk let of_string_exn s = match H.of_string_opt s with | None -> Format.kasprintf invalid_arg "of_string_exn (%s)" H.name - | Some pk -> pk + | Some pk -> + pk + let of_string s = match H.of_string_opt s with - | None -> generic_error "of_string (%s)" H.name - | Some pk -> Ok pk + | None -> + generic_error "of_string (%s)" H.name + | Some pk -> + Ok pk let to_hex s = Hex.of_string (H.to_string s) + let of_hex_opt s = H.of_string_opt (Hex.to_string s) + let of_hex_exn s = match H.of_string_opt (Hex.to_string s) with - | Some x -> x + | Some x -> + x | None -> Format.kasprintf invalid_arg "of_hex_exn (%s)" H.name + let of_hex s = match of_hex_opt s with - | None -> generic_error "of_hex (%s)" H.name - | Some pk -> ok pk + | None -> + generic_error "of_hex (%s)" H.name + | Some pk -> + ok pk end -module MakeB58(H : sig - type t - val name: string - val b58check_encoding: t Base58.encoding - end) = struct +module MakeB58 (H : sig + type t + + val name : string + + val b58check_encoding : t Base58.encoding +end) = +struct + let of_b58check_opt s = Base58.simple_decode H.b58check_encoding s - let of_b58check_opt s = - Base58.simple_decode H.b58check_encoding s let of_b58check_exn s = match of_b58check_opt s with - | Some x -> x - | None -> Format.kasprintf Pervasives.failwith "Unexpected data (%s)" H.name + | Some x -> + x + | None -> + Format.kasprintf Pervasives.failwith "Unexpected data (%s)" H.name + let of_b58check s = match of_b58check_opt s with - | Some x -> Ok x + | Some x -> + Ok x | None -> generic_error "Failed to read a b58check_encoding data (%s): %S" - H.name s + H.name + s let to_b58check s = Base58.simple_encode H.b58check_encoding s + let to_short_b58check s = String.sub - (to_b58check s) 0 + (to_b58check s) + 0 (10 + String.length (Base58.prefix H.b58check_encoding)) - end -module MakeEncoder(H : sig - type t - val title: string - val name: string - val to_b58check: t -> string - val to_short_b58check: t -> string - val of_b58check: string -> t tzresult - val of_b58check_exn: string -> t - val of_b58check_opt: string -> t option - val raw_encoding: t Data_encoding.t - end) = struct +module MakeEncoder (H : sig + type t + + val title : string + + val name : string + + val to_b58check : t -> string + + val to_short_b58check : t -> string + + val of_b58check : string -> t tzresult + + val of_b58check_exn : string -> t - let pp ppf t = - Format.pp_print_string ppf (H.to_b58check t) + val of_b58check_opt : string -> t option - let pp_short ppf t = - Format.pp_print_string ppf (H.to_short_b58check t) + val raw_encoding : t Data_encoding.t +end) = +struct + let pp ppf t = Format.pp_print_string ppf (H.to_b58check t) + + let pp_short ppf t = Format.pp_print_string ppf (H.to_short_b58check t) let encoding = let open Data_encoding in splitted - ~binary: - (obj1 (req H.name H.raw_encoding)) + ~binary:(obj1 (req H.name H.raw_encoding)) ~json: - (def H.name - ~title: (H.title ^ " (Base58Check-encoded)") @@ - conv - H.to_b58check - (Data_encoding.Json.wrap_error H.of_b58check_exn) - string) + ( def H.name ~title:(H.title ^ " (Base58Check-encoded)") + @@ conv + H.to_b58check + (Data_encoding.Json.wrap_error H.of_b58check_exn) + string ) let rpc_arg = RPC_arg.make - ~name: H.name - ~descr: (Format.asprintf "%s (Base58Check-encoded)" H.name) - ~destruct: - (fun s -> - match H.of_b58check_opt s with - | None -> - Error (Format.asprintf - "failed to decode Base58Check-encoded data (%s): %S" - H.name s) - | Some v -> Ok v) - ~construct: H.to_b58check + ~name:H.name + ~descr:(Format.asprintf "%s (Base58Check-encoded)" H.name) + ~destruct:(fun s -> + match H.of_b58check_opt s with + | None -> + Error + (Format.asprintf + "failed to decode Base58Check-encoded data (%s): %S" + H.name + s) + | Some v -> + Ok v) + ~construct:H.to_b58check () - let param ?(name=H.name) ?(desc=H.title) t = - Clic.param ~name ~desc - (Clic.parameter (fun _ str -> Lwt.return (H.of_b58check str))) t - + let param ?(name = H.name) ?(desc = H.title) t = + Clic.param + ~name + ~desc + (Clic.parameter (fun _ str -> Lwt.return (H.of_b58check str))) + t end -module MakeIterator(H : sig - type t - val encoding: t Data_encoding.t - val compare: t -> t -> int - val equal: t -> t -> bool - val hash: t -> int - end) = struct +module MakeIterator (H : sig + type t + + val encoding : t Data_encoding.t + + val compare : t -> t -> int + val equal : t -> t -> bool + + val hash : t -> int +end) = +struct module Set = struct - include Set.Make(struct type t = H.t let compare = H.compare end) + include Set.Make (struct + type t = H.t + + let compare = H.compare + end) + exception Found of elt + let random_elt s = let n = Random.int (cardinal s) in try ignore - (fold (fun x i -> if i = n then raise (Found x) ; i+1) s 0 : int) ; + ( fold + (fun x i -> + if i = n then raise (Found x) ; + i + 1) + s + 0 + : int ) ; assert false with Found x -> x + let encoding = Data_encoding.conv elements @@ -172,72 +223,95 @@ module MakeIterator(H : sig end module Table = struct - include Hashtbl.Make(struct - type t = H.t - let hash = H.hash - let equal = H.equal - end) + include Hashtbl.Make (struct + type t = H.t + + let hash = H.hash + + let equal = H.equal + end) + let encoding arg_encoding = Data_encoding.conv (fun h -> fold (fun k v l -> (k, v) :: l) h []) (fun l -> - let h = create (List.length l) in - List.iter (fun (k,v) -> add h k v) l ; - h) + let h = create (List.length l) in + List.iter (fun (k, v) -> add h k v) l ; + h) Data_encoding.(list (tup2 H.encoding arg_encoding)) end module Map = struct - include Map.Make(struct type t = H.t let compare = H.compare end) + include Map.Make (struct + type t = H.t + + let compare = H.compare + end) + let encoding arg_encoding = Data_encoding.conv bindings - (fun l -> List.fold_left (fun m (k,v) -> add k v m) empty l) + (fun l -> List.fold_left (fun m (k, v) -> add k v m) empty l) Data_encoding.(list (tup2 H.encoding arg_encoding)) end module WeakRingTable = struct - include WeakRingTable.Make(struct - type t = H.t - let hash = H.hash - let equal = H.equal - end) + include WeakRingTable.Make (struct + type t = H.t + + let hash = H.hash + + let equal = H.equal + end) + let encoding arg_encoding = Data_encoding.conv (fun h -> fold (fun k v l -> (k, v) :: l) h []) (fun l -> - let h = create (List.length l) in - List.iter (fun (k,v) -> add h k v) l ; - h) + let h = create (List.length l) in + List.iter (fun (k, v) -> add h k v) l ; + h) Data_encoding.(list (tup2 H.encoding arg_encoding)) end +end +module Make (H : sig + type t -end + val title : string -module Make(H : sig - type t - val title: string - val name: string - val b58check_encoding: t Base58.encoding - val raw_encoding: t Data_encoding.t - val compare: t -> t -> int - val equal: t -> t -> bool - val hash: t -> int - end) = struct - - include MakeB58(H) - include MakeEncoder(struct - include H - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - end) - include MakeIterator(struct - include H - let encoding = encoding - end) + val name : string + + val b58check_encoding : t Base58.encoding + + val raw_encoding : t Data_encoding.t + + val compare : t -> t -> int + + val equal : t -> t -> bool + + val hash : t -> int +end) = +struct + include MakeB58 (H) + + include MakeEncoder (struct + include H + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + end) + + include MakeIterator (struct + include H + let encoding = encoding + end) end diff --git a/src/lib_crypto/operation_hash.ml b/src/lib_crypto/operation_hash.ml index b78975389010afde48717e75be6f85c3328d2bc8..5876c557b6fc17b6faaf337542e173f01b27943e 100644 --- a/src/lib_crypto/operation_hash.ml +++ b/src/lib_crypto/operation_hash.ml @@ -23,15 +23,19 @@ (* *) (*****************************************************************************) -include Blake2B.Make (Base58) (struct - let name = "Operation_hash" - let title = "A Tezos operation ID" - let b58check_prefix = Base58.Prefix.operation_hash - let size = None - end) +include Blake2B.Make + (Base58) + (struct + let name = "Operation_hash" -let () = - Base58.check_encoded_prefix b58check_encoding "o" 51 + let title = "A Tezos operation ID" + + let b58check_prefix = Base58.Prefix.operation_hash + + let size = None + end) + +let () = Base58.check_encoded_prefix b58check_encoding "o" 51 module Logging = struct let tag = Tag.def ~doc:title name pp diff --git a/src/lib_crypto/operation_list_hash.ml b/src/lib_crypto/operation_list_hash.ml index f71b7c62fa12f9531c30664c024d2bc37395bcf7..ce644a58363f7862f52c54561e70844175fdc7af 100644 --- a/src/lib_crypto/operation_list_hash.ml +++ b/src/lib_crypto/operation_list_hash.ml @@ -23,12 +23,17 @@ (* *) (*****************************************************************************) -include Blake2B.Make_merkle_tree (Base58) (struct - let name = "Operation_list_hash" - let title = "A list of operations" - let b58check_prefix = Base58.Prefix.operation_list_hash - let size = None - end) (Operation_hash) +include Blake2B.Make_merkle_tree + (Base58) + (struct + let name = "Operation_list_hash" -let () = - Base58.check_encoded_prefix b58check_encoding "Lo" 52 + let title = "A list of operations" + + let b58check_prefix = Base58.Prefix.operation_list_hash + + let size = None + end) + (Operation_hash) + +let () = Base58.check_encoded_prefix b58check_encoding "Lo" 52 diff --git a/src/lib_crypto/operation_list_hash.mli b/src/lib_crypto/operation_list_hash.mli index 46685205f532301f51cccd6c2b28e2cb3225a98d..c2adfe618bcd75069ac3df2f0cf34952d72ea521 100644 --- a/src/lib_crypto/operation_list_hash.mli +++ b/src/lib_crypto/operation_list_hash.mli @@ -24,4 +24,3 @@ (*****************************************************************************) include S.MERKLE_TREE with type elt = Operation_hash.t - diff --git a/src/lib_crypto/operation_list_list_hash.ml b/src/lib_crypto/operation_list_list_hash.ml index 6c2fbbbd514bb1486b55198b196a3e5e7a5ee281..3d64d451dd271785517cf554dc87a764eea363d2 100644 --- a/src/lib_crypto/operation_list_list_hash.ml +++ b/src/lib_crypto/operation_list_list_hash.ml @@ -23,12 +23,17 @@ (* *) (*****************************************************************************) -include Blake2B.Make_merkle_tree (Base58) (struct - let name = "Operation_list_list_hash" - let title = "A list of list of operations" - let b58check_prefix = Base58.Prefix.operation_list_list_hash - let size = None - end) (Operation_list_hash) +include Blake2B.Make_merkle_tree + (Base58) + (struct + let name = "Operation_list_list_hash" -let () = - Base58.check_encoded_prefix b58check_encoding "LLo" 53 ; + let title = "A list of list of operations" + + let b58check_prefix = Base58.Prefix.operation_list_list_hash + + let size = None + end) + (Operation_list_hash) + +let () = Base58.check_encoded_prefix b58check_encoding "LLo" 53 diff --git a/src/lib_crypto/p256.ml b/src/lib_crypto/p256.ml index dd94cfa19eae02cb9d4b3be73fabb9e32997ee3f..5bbefcab5140dc4083db856c59651caf890dc426 100644 --- a/src/lib_crypto/p256.ml +++ b/src/lib_crypto/p256.ml @@ -24,269 +24,309 @@ (*****************************************************************************) module Public_key_hash = struct - include Blake2B.Make(Base58)(struct - let name = "P256.Public_key_hash" - let title = "A P256 public key hash" - let b58check_prefix = Base58.Prefix.p256_public_key_hash - let size = Some 20 - end) + include Blake2B.Make + (Base58) + (struct + let name = "P256.Public_key_hash" + + let title = "A P256 public key hash" + + let b58check_prefix = Base58.Prefix.p256_public_key_hash + + let size = Some 20 + end) module Logging = struct let tag = Tag.def ~doc:title name pp end end -let () = - Base58.check_encoded_prefix Public_key_hash.b58check_encoding "tz3" 36 +let () = Base58.check_encoded_prefix Public_key_hash.b58check_encoding "tz3" 36 open Uecc module Public_key = struct - type t = (secp256r1, public) key - let name = "P256.Public_key" + let name = "P256.Public_key" + let title = "A P256 public key" let to_bytes = to_bytes ~compress:true + let of_bytes_opt = pk_of_bytes secp256r1 let to_string s = MBytes.to_string (to_bytes s) + let of_string_opt s = of_bytes_opt (MBytes.of_string s) let size = compressed_size secp256r1 - type Base58.data += - | Data of t + type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.p256_public_key - ~length: size - ~to_raw: to_string - ~of_raw: of_string_opt - ~wrap: (fun x -> Data x) - - let () = - Base58.check_encoded_prefix b58check_encoding "p2pk" 55 - - let hash v = - Public_key_hash.hash_bytes [to_bytes v] - - include Compare.Make(struct - type nonrec t = t - let compare a b = - MBytes.compare (to_bytes a) (to_bytes b) - end) - - include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) - - include Helpers.MakeB58(struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - conv to_bytes of_bytes_exn (Fixed.bytes size) - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) + ~prefix:Base58.Prefix.p256_public_key + ~length:size + ~to_raw:to_string + ~of_raw:of_string_opt + ~wrap:(fun x -> Data x) - let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) + let () = Base58.check_encoded_prefix b58check_encoding "p2pk" 55 + + let hash v = Public_key_hash.hash_bytes [to_bytes v] + + include Compare.Make (struct + type nonrec t = t + + let compare a b = MBytes.compare (to_bytes a) (to_bytes b) + end) + + include Helpers.MakeRaw (struct + type nonrec t = t + + let name = name + + let of_bytes_opt = of_bytes_opt + + let of_string_opt = of_string_opt + + let to_string = to_string + end) + + include Helpers.MakeB58 (struct + type nonrec t = t + + let name = name + + let b58check_encoding = b58check_encoding + end) + + include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + let title = title + + let raw_encoding = + let open Data_encoding in + conv to_bytes of_bytes_exn (Fixed.bytes size) + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + end) + + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) end module Secret_key = struct - type t = (secp256r1, secret) key let name = "P256.Secret_key" + let title = "A P256 secret key" let size = sk_size secp256r1 - let of_bytes_opt buf = - Option.map ~f:fst (sk_of_bytes secp256r1 buf) + let of_bytes_opt buf = Option.map ~f:fst (sk_of_bytes secp256r1 buf) let to_bytes = to_bytes ~compress:true let to_string s = MBytes.to_string (to_bytes s) + let of_string_opt s = of_bytes_opt (MBytes.of_string s) let to_public_key = neuterize - type Base58.data += - | Data of t + type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.p256_secret_key - ~length: size - ~to_raw: to_string - ~of_raw: of_string_opt - ~wrap: (fun x -> Data x) - - let () = - Base58.check_encoded_prefix b58check_encoding "p2sk" 54 - - include Compare.Make(struct - type nonrec t = t - let compare a b = - MBytes.compare (to_bytes a) (to_bytes b) - end) - - include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) - - include Helpers.MakeB58(struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - conv to_bytes of_bytes_exn (Fixed.bytes size) - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) - - let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) - -end - -type t = MBytes.t - -type watermark = MBytes.t - -let name = "P256" -let title = "A P256 signature" - -let size = pk_size secp256r1 + ~prefix:Base58.Prefix.p256_secret_key + ~length:size + ~to_raw:to_string + ~of_raw:of_string_opt + ~wrap:(fun x -> Data x) -let of_bytes_opt s = - if MBytes.length s = size then Some s else None + let () = Base58.check_encoded_prefix b58check_encoding "p2sk" 54 -let to_bytes s = s - -let to_string s = MBytes.to_string (to_bytes s) -let of_string_opt s = of_bytes_opt (MBytes.of_string s) - -type Base58.data += - | Data of t - -let b58check_encoding = - Base58.register_encoding - ~prefix: Base58.Prefix.p256_signature - ~length: size - ~to_raw: to_string - ~of_raw: of_string_opt - ~wrap: (fun x -> Data x) + include Compare.Make (struct + type nonrec t = t -let () = - Base58.check_encoded_prefix b58check_encoding "p2sig" 98 + let compare a b = MBytes.compare (to_bytes a) (to_bytes b) + end) -include Helpers.MakeRaw(struct + include Helpers.MakeRaw (struct type nonrec t = t + let name = name + let of_bytes_opt = of_bytes_opt + let of_string_opt = of_string_opt + let to_string = to_string end) -include Helpers.MakeB58(struct + include Helpers.MakeB58 (struct type nonrec t = t + let name = name + let b58check_encoding = b58check_encoding end) -include Helpers.MakeEncoder(struct + include Helpers.MakeEncoder (struct type nonrec t = t + let name = name + let title = title + let raw_encoding = let open Data_encoding in conv to_bytes of_bytes_exn (Fixed.bytes size) + let of_b58check = of_b58check + let of_b58check_opt = of_b58check_opt + let of_b58check_exn = of_b58check_exn + let to_b58check = to_b58check + let to_short_b58check = to_short_b58check end) + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) +end + +type t = MBytes.t + +type watermark = MBytes.t + +let name = "P256" + +let title = "A P256 signature" + +let size = pk_size secp256r1 + +let of_bytes_opt s = if MBytes.length s = size then Some s else None + +let to_bytes s = s + +let to_string s = MBytes.to_string (to_bytes s) + +let of_string_opt s = of_bytes_opt (MBytes.of_string s) + +type Base58.data += Data of t + +let b58check_encoding = + Base58.register_encoding + ~prefix:Base58.Prefix.p256_signature + ~length:size + ~to_raw:to_string + ~of_raw:of_string_opt + ~wrap:(fun x -> Data x) + +let () = Base58.check_encoded_prefix b58check_encoding "p2sig" 98 + +include Helpers.MakeRaw (struct + type nonrec t = t + + let name = name + + let of_bytes_opt = of_bytes_opt + + let of_string_opt = of_string_opt + + let to_string = to_string +end) + +include Helpers.MakeB58 (struct + type nonrec t = t + + let name = name + + let b58check_encoding = b58check_encoding +end) + +include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = + let open Data_encoding in + conv to_bytes of_bytes_exn (Fixed.bytes size) + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check +end) + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) let zero = of_bytes_exn (MBytes.make size '\000') let sign ?watermark sk msg = let msg = - Blake2B.to_bytes @@ - Blake2B.hash_bytes @@ - match watermark with - | None -> [msg] - | Some prefix -> [ prefix ; msg ] in + Blake2B.to_bytes @@ Blake2B.hash_bytes + @@ match watermark with None -> [msg] | Some prefix -> [prefix; msg] + in match sign sk msg with | None -> (* Will never happen in practice. This can only happen in case of RNG error. *) invalid_arg "P256.sign: internal error" - | Some signature -> signature + | Some signature -> + signature let check ?watermark public_key signature msg = let msg = - Blake2B.to_bytes @@ - Blake2B.hash_bytes @@ - match watermark with - | None -> [msg] - | Some prefix -> [ prefix ; msg ] in + Blake2B.to_bytes @@ Blake2B.hash_bytes + @@ match watermark with None -> [msg] | Some prefix -> [prefix; msg] + in verify public_key ~msg ~signature -let generate_key ?(seed=Rand.generate 32) () = +let generate_key ?(seed = Rand.generate 32) () = let seedlen = MBytes.length seed in if seedlen < 32 then - invalid_arg (Printf.sprintf "P256.generate_key: seed must be at \ - least 32 bytes long (was %d)" seedlen) ; + invalid_arg + (Printf.sprintf + "P256.generate_key: seed must be at least 32 bytes long (was %d)" + seedlen) ; match sk_of_bytes secp256r1 seed with - | None -> invalid_arg "P256.generate_key: invalid seed (very rare!)" + | None -> + invalid_arg "P256.generate_key: invalid seed (very rare!)" | Some (sk, pk) -> let pkh = Public_key.hash pk in - pkh, pk, sk + (pkh, pk, sk) let deterministic_nonce sk msg = - Hacl.Hash.SHA256.HMAC.digest ~key: (Secret_key.to_bytes sk) ~msg + Hacl.Hash.SHA256.HMAC.digest ~key:(Secret_key.to_bytes sk) ~msg let deterministic_nonce_hash sk msg = Blake2B.to_bytes (Blake2B.hash_bytes [deterministic_nonce sk msg]) +include Compare.Make (struct + type nonrec t = t -include Compare.Make(struct - type nonrec t = t - let compare = MBytes.compare - end) + let compare = MBytes.compare +end) diff --git a/src/lib_crypto/p256.mli b/src/lib_crypto/p256.mli index b616d1543e7b70cfe2c01d536d32ceb13b703573..2d30817f7a56286b9490d50d318d9d92d4074c85 100644 --- a/src/lib_crypto/p256.mli +++ b/src/lib_crypto/p256.mli @@ -26,4 +26,5 @@ (** Tezos - P256 cryptography *) include S.SIGNATURE with type watermark = MBytes.t + include S.RAW_DATA with type t := t diff --git a/src/lib_crypto/protocol_hash.ml b/src/lib_crypto/protocol_hash.ml index c1091e7327cb4af9925bf9121b3688074747c487..d63a6773df25e293211649d1dab5d559f514c25c 100644 --- a/src/lib_crypto/protocol_hash.ml +++ b/src/lib_crypto/protocol_hash.ml @@ -23,15 +23,19 @@ (* *) (*****************************************************************************) -include Blake2B.Make (Base58) (struct - let name = "Protocol_hash" - let title = "A Tezos protocol ID" - let b58check_prefix = Base58.Prefix.protocol_hash - let size = None - end) +include Blake2B.Make + (Base58) + (struct + let name = "Protocol_hash" -let () = - Base58.check_encoded_prefix b58check_encoding "P" 51 + let title = "A Tezos protocol ID" + + let b58check_prefix = Base58.Prefix.protocol_hash + + let size = None + end) + +let () = Base58.check_encoded_prefix b58check_encoding "P" 51 module Logging = struct let tag = Tag.def ~doc:title name pp diff --git a/src/lib_crypto/pvss.ml b/src/lib_crypto/pvss.ml index 44e5d4567a15218a5a3ffeca6e34f4e9d8255307..05506de7634b494b4cbc284111071a5a7a8e9764 100644 --- a/src/lib_crypto/pvss.ml +++ b/src/lib_crypto/pvss.ml @@ -26,45 +26,60 @@ module H = Blake2B (** Polynomial ring (ℤ/qℤ)[X] *) -module PZ_q (Z_q : Znz.ZN) : sig +module PZ_q (Z_q : Znz.ZN) : sig type t - module Z_q : Znz.ZN + + module Z_q : Znz.ZN (** Evaluates the polynomial p at point x *) - val eval : p:t -> x:Z_q.t -> Z_q.t + val eval : p:t -> x:Z_q.t -> Z_q.t (** Builds the polynomial from a list of coefficient, ordered by power. That is, of_list [a₀; a₁; a₂; …] = a₀ + a₁ x + a₂ x² + … *) val of_list : Z_q.t list -> t - -end with type Z_q.t = Z_q.t = struct +end +with type Z_q.t = Z_q.t = struct module Z_q = Z_q - type t = Z_q.t list - let eval ~p ~x = List.fold_right (fun c y -> Z_q.(y * x + c)) p Z_q.zero + + type t = Z_q.t list + + let eval ~p ~x = List.fold_right (fun c y -> Z_q.((y * x) + c)) p Z_q.zero + let of_list l = l end (** Functor type for an Cyclic group *) module type CYCLIC_GROUP = sig type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t - val name : string - module Z_m : Znz.ZN - val e : t - val g1 : t - val g2 : t - val ( * ) : t -> t -> t - val (=) : t -> t -> bool - val pow : t -> Z_m.t -> t - val to_bits : t -> String.t - val of_bits : String.t -> t option + + val name : string + + module Z_m : Znz.ZN + + val e : t + + val g1 : t + + val g2 : t + + val ( * ) : t -> t -> t + + val ( = ) : t -> t -> bool + + val pow : t -> Z_m.t -> t + + val to_bits : t -> String.t + + val of_bits : String.t -> t option end (** Type of a module that handles proofs for the discrete logarithm equality equation. *) module type DLEQ = sig - (** A DLEQ equation. *) type equation @@ -74,14 +89,12 @@ module type DLEQ = sig val proof_encoding : proof Data_encoding.t - (** Group element. *) type element (** Exponent, i.e. an integer modulo the group's order. *) type exponent - (** Sets up a equation of the form ∀ i, ∃ x(i), b₁ˣ⁽ⁱ⁾ = h₁ᵢ and b₂ᵢˣ⁽ⁱ⁾ = h₂ᵢ. The arguments are given as b₁, h₁ᵢ, b₂ᵢ, h₂ᵢ *) @@ -89,7 +102,7 @@ module type DLEQ = sig element -> element list -> element list -> element list -> equation (** Creates a zero-knowledge proof of knowledge of the exponent list *) - val make_proof : equation -> exponent list -> proof + val make_proof : equation -> exponent list -> proof (** Checkes the proof created by make_proof for a given equation *) val check_proof : equation -> proof -> bool @@ -98,57 +111,74 @@ end (** Functor for creating a module handling proofs for the discrete logarithm equality in cyclic group G *) module MakeDleq (G : CYCLIC_GROUP) : - DLEQ with type element = G.t and type exponent = G.Z_m.t = -struct - + DLEQ with type element = G.t and type exponent = G.Z_m.t = struct type element = G.t + type exponent = G.Z_m.t - type equation = element * (element list) * (element list) * (element list) - type proof = exponent * (exponent list) - let proof_encoding = Data_encoding.( - tup2 G.Z_m.encoding (list G.Z_m.encoding)) + type equation = element * element list * element list * element list + + type proof = exponent * exponent list + + let proof_encoding = + Data_encoding.(tup2 G.Z_m.encoding (list G.Z_m.encoding)) (* Fiat-Shamir heuristic to derive a random element of ℤ/mℤ from the hash of a list of group elements *) - let fiat_shamir ?(exponents=[]) elements = - String.concat "||" ( - "tezosftw" :: (List.map G.to_bits elements) @ (List.map G.Z_m.to_bits exponents) - ) |> (fun x -> H.hash_string [x]) |> H.to_string |> G.Z_m.of_bits_exn + let fiat_shamir ?(exponents = []) elements = + String.concat + "||" + ( ("tezosftw" :: List.map G.to_bits elements) + @ List.map G.Z_m.to_bits exponents ) + |> (fun x -> H.hash_string [x]) + |> H.to_string |> G.Z_m.of_bits_exn let setup_equation b1 h1_n b2_n h2_n = (b1, h1_n, b2_n, h2_n) + let make_proof (b1, h1_n, b2_n, h2_n) x_n = (* First, draw blinding factors. Normally these should be picked randomly. To maximize reproducibility and avoid weak random number generation, we generate the blinding factor deterministically from the problem parameters and the secret x_n. TODO: review with cryptographer *) - let - pseudo_seed = fiat_shamir (b1::(List.concat [h1_n; b2_n; h2_n])) ~exponents:x_n in - let - w_n = List.mapi (fun i __ -> fiat_shamir [] ~exponents:[pseudo_seed; G.Z_m.of_int i]) h1_n in let - a1_n = List.map (G.pow b1) w_n and - a2_n = List.map2 G.pow b2_n w_n in let - (* Pick the challenge, c, following the Fiat-Shamir heuristic. *) - c = fiat_shamir (List.concat [h1_n; h2_n; a1_n; a2_n]) in let - (* rᵢ = wᵢ - c * xᵢ *) - r_n = List.map2 (fun w x -> G.Z_m.(w - c * x)) w_n x_n in + let pseudo_seed = + fiat_shamir (b1 :: List.concat [h1_n; b2_n; h2_n]) ~exponents:x_n + in + let w_n = + List.mapi + (fun i __ -> fiat_shamir [] ~exponents:[pseudo_seed; G.Z_m.of_int i]) + h1_n + in + let a1_n = List.map (G.pow b1) w_n and a2_n = List.map2 G.pow b2_n w_n in + let (* Pick the challenge, c, following the Fiat-Shamir heuristic. *) + c = + fiat_shamir (List.concat [h1_n; h2_n; a1_n; a2_n]) + in + let (* rᵢ = wᵢ - c * xᵢ *) + r_n = + List.map2 (fun w x -> G.Z_m.(w - (c * x))) w_n x_n + in (c, r_n) let check_proof (b1, h1_n, b2_n, h2_n) (c, r_n) = (* First check that the lists have the same sizes. *) - let same_sizes = List.( - Compare.Int.((length h1_n) = (length b2_n) && (length b2_n) = (length h2_n) && - (length h2_n) = (length r_n))) in - + let same_sizes = + List.( + Compare.Int.( + length h1_n = length b2_n + && length b2_n = length h2_n + && length h2_n = length r_n)) + in if not same_sizes then false else - let - a1_n = List.map2 G.( * ) + let a1_n = + List.map2 + G.( * ) (List.map (G.pow b1) r_n) (List.map (fun h1 -> G.pow h1 c) h1_n) - and - a2_n = List.map2 G.( * ) + and a2_n = + List.map2 + G.( * ) (List.map2 G.pow b2_n r_n) (List.map (fun h2 -> G.pow h2 c) h2_n) in @@ -156,20 +186,25 @@ struct end module type PVSS = sig - module type ENCODED = sig type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t end module Commitment : ENCODED + module Encrypted_share : ENCODED + module Clear_share : ENCODED module Public_key : ENCODED + module Secret_key : sig include ENCODED + val to_public_key : t -> Public_key.t end @@ -177,33 +212,47 @@ module type PVSS = sig val proof_encoding : proof Data_encoding.t - val dealer_shares_and_proof: - secret:Secret_key.t -> t:int -> public_keys:Public_key.t list -> - (Encrypted_share.t list * Commitment.t list * proof) - - val check_dealer_proof: - Encrypted_share.t list -> Commitment.t list -> proof:proof -> - public_keys:Public_key.t list -> bool - - val reveal_share : Encrypted_share.t -> secret_key:Secret_key.t - -> public_key:Public_key.t -> Clear_share.t * proof - - val check_revealed_share: - Encrypted_share.t -> Clear_share.t -> public_key:Public_key.t -> proof - -> bool - val reconstruct: Clear_share.t list -> int list -> Public_key.t + val dealer_shares_and_proof : + secret:Secret_key.t -> + t:int -> + public_keys:Public_key.t list -> + Encrypted_share.t list * Commitment.t list * proof + + val check_dealer_proof : + Encrypted_share.t list -> + Commitment.t list -> + proof:proof -> + public_keys:Public_key.t list -> + bool + + val reveal_share : + Encrypted_share.t -> + secret_key:Secret_key.t -> + public_key:Public_key.t -> + Clear_share.t * proof + + val check_revealed_share : + Encrypted_share.t -> + Clear_share.t -> + public_key:Public_key.t -> + proof -> + bool + + val reconstruct : Clear_share.t list -> int list -> Public_key.t end module MakePvss (G : CYCLIC_GROUP) : PVSS = struct - module type ENCODED = sig type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t end (* Module to make discrete logarithm equality proofs *) module Dleq = MakeDleq (G) + type proof = Dleq.proof (* Polynomials over ℤ/mℤ *) @@ -214,6 +263,7 @@ module MakePvss (G : CYCLIC_GROUP) : PVSS = struct module Secret_key = struct include G.Z_m + let to_public_key x = G.(pow g2 x) end @@ -227,69 +277,78 @@ module MakePvss (G : CYCLIC_GROUP) : PVSS = struct let random_polynomial secret t = (* the t-1 coefficients are computed deterministically from the secret and mapped to G.Z_m *) - - let nonce = [String.concat "||" [G.Z_m.to_bits secret]] - |> H.hash_string |> H.to_string in - + let nonce = + [String.concat "||" [G.Z_m.to_bits secret]] + |> H.hash_string |> H.to_string + in (* TODO: guard against buffer overlow *) let rec make_coefs = function - | 0 -> [] - | k -> let h = - ( H.hash_string [string_of_int k; "||"; nonce]) - |> H.to_string |> G.Z_m.of_bits_exn in - h :: make_coefs (k-1) in - let coefs = secret :: (make_coefs (t-1)) in - + | 0 -> + [] + | k -> + let h = + H.hash_string [string_of_int k; "||"; nonce] + |> H.to_string |> G.Z_m.of_bits_exn + in + h :: make_coefs (k - 1) + in + let coefs = secret :: make_coefs (t - 1) in (* let coefs = secret :: List_Utils.list_init ~f:G.Z_m.random ~n:(t-1) in *) - let poly = PZ_m.of_list coefs - in (coefs, poly) + let poly = PZ_m.of_list coefs in + (coefs, poly) (* Hides secret s in a random polynomial of degree t, publishes t commitments to the polynomial coefficients and n encrypted shares for the holders of the public keys *) let dealer_shares_and_proof ~secret ~t ~public_keys = - let coefs, poly = random_polynomial secret t in + let (coefs, poly) = random_polynomial secret t in let - (* Cⱼ represents the commitment to the coefficients of the polynomial + (* Cⱼ represents the commitment to the coefficients of the polynomial Cⱼ = g₁^(aⱼ) for j in 0 to t-1 *) - - cC_j = List.map G.(pow g1) coefs and - - (* pᵢ = p(i) for i in 1…n, with i ∈ ℤ/mℤ: points of the polynomial. *) - p_i = List.mapi (fun i _ -> - PZ_m.eval ~p:poly ~x:(i+1 |> G.Z_m.of_int)) public_keys in let - - (* yᵢ = pkᵢᵖ⁽ⁱ⁾ for i ∈ 1…n: the value of p(i) encrypted with pkᵢ, + cC_j = + List.map G.(pow g1) coefs + and + (* pᵢ = p(i) for i in 1…n, with i ∈ ℤ/mℤ: points of the polynomial. *) + p_i = + List.mapi + (fun i _ -> PZ_m.eval ~p:poly ~x:(i + 1 |> G.Z_m.of_int)) + public_keys + in + let + (* yᵢ = pkᵢᵖ⁽ⁱ⁾ for i ∈ 1…n: the value of p(i) encrypted with pkᵢ, the public key of the party receiving the iᵗʰ party. The public keys use the g₂ generator of G. Thus pkᵢ = g₂ˢᵏⁱ *) - y_i = List.map2 G.pow public_keys p_i and - - (* xᵢ = g₁ᵖ⁽ⁱ⁾ for in in 1…n: commitment to polynomial points *) - x_i = List.map G.(pow g1) p_i in let - - equation = Dleq.setup_equation G.g1 x_i public_keys y_i in let - proof = Dleq.make_proof equation p_i - in (y_i, cC_j, proof) + y_i = + List.map2 G.pow public_keys p_i + and + (* xᵢ = g₁ᵖ⁽ⁱ⁾ for in in 1…n: commitment to polynomial points *) + x_i = + List.map G.(pow g1) p_i + in + let equation = Dleq.setup_equation G.g1 x_i public_keys y_i in + let proof = Dleq.make_proof equation p_i in + (y_i, cC_j, proof) let check_dealer_proof y_i cC_j ~proof ~public_keys = - (* Reconstruct Xᵢ from Cⱼ *) let x_i = (* prod_C_j_to_the__i_to_the_j = i ↦ Πⱼ₌₀ᵗ⁻¹ Cⱼ^(iʲ) *) let prod_C_j_to_the__i_to_the_j i = - List.mapi (fun j cC ->G.pow cC (G.Z_m.pow i (Z.of_int j))) - cC_j |> (List.fold_left G.( * ) G.e) + List.mapi (fun j cC -> G.pow cC (G.Z_m.pow i (Z.of_int j))) cC_j + |> List.fold_left G.( * ) G.e in - List.mapi (fun i _ -> - prod_C_j_to_the__i_to_the_j (i+1 |> G.Z_m.of_int)) y_i - in let - equation = Dleq.setup_equation G.g1 x_i public_keys y_i in + List.mapi + (fun i _ -> prod_C_j_to_the__i_to_the_j (i + 1 |> G.Z_m.of_int)) + y_i + in + let equation = Dleq.setup_equation G.g1 x_i public_keys y_i in Dleq.check_proof equation proof (* reveal a share *) - let reveal_share y ~secret_key ~public_key = + let reveal_share y ~secret_key ~public_key = match G.Z_m.inv secret_key with - | None -> failwith "Invalid secret key" + | None -> + failwith "Invalid secret key" | Some inverse_key -> let reveal = G.(pow y inverse_key) in (* y = g₂^(private_key) and public_key = reveal^(private_key) *) @@ -305,17 +364,22 @@ module MakePvss (G : CYCLIC_GROUP) : PVSS = struct (* reconstruct the secret *) let reconstruct reveals int_indices = (* check that there enough reveals *) - let indices = List.map (fun x -> G.Z_m.of_int (1+x)) int_indices in + let indices = List.map (fun x -> G.Z_m.of_int (1 + x)) int_indices in let lagrange i = - List.fold_left G.Z_m.( * ) G.Z_m.one ( - List.map ( - fun j -> - if G.Z_m.(j = i) then G.Z_m.one else - match G.Z_m.(inv (j - i)) with - | None -> failwith "Unexpected error inverting scalar." - | Some inverse -> G.Z_m.(j * inverse) - ) indices) - in let lagrange = List.map lagrange indices in + List.fold_left + G.Z_m.( * ) + G.Z_m.one + (List.map + (fun j -> + if G.Z_m.(j = i) then G.Z_m.one + else + match G.Z_m.(inv (j - i)) with + | None -> + failwith "Unexpected error inverting scalar." + | Some inverse -> + G.Z_m.(j * inverse)) + indices) + in + let lagrange = List.map lagrange indices in List.fold_left G.( * ) G.e (List.map2 G.pow reveals lagrange) - end diff --git a/src/lib_crypto/pvss.mli b/src/lib_crypto/pvss.mli index 808f641fe88fcecf5b41e291fbae0b7cea820f79..4ebc7f2bb3a62c10324e39807313181c965a8c1c 100644 --- a/src/lib_crypto/pvss.mli +++ b/src/lib_crypto/pvss.mli @@ -39,74 +39,98 @@ *) module type CYCLIC_GROUP = sig - type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t - val name : string - module Z_m : Znz.ZN - val e : t - val g1 : t - val g2 : t - val ( * ) : t -> t -> t - val (=) : t -> t -> bool - val pow : t -> Z_m.t -> t + val name : string + + module Z_m : Znz.ZN + + val e : t + + val g1 : t + + val g2 : t + + val ( * ) : t -> t -> t + + val ( = ) : t -> t -> bool + + val pow : t -> Z_m.t -> t (** Binary representation *) - val to_bits : t -> String.t - val of_bits : String.t -> t option + val to_bits : t -> String.t + val of_bits : String.t -> t option end (** PVSS construction, based on a cyclic group G of prime order *) module type PVSS = sig - module type ENCODED = sig type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t end module Commitment : ENCODED + module Encrypted_share : ENCODED + module Clear_share : ENCODED module Public_key : ENCODED + module Secret_key : sig include ENCODED + val to_public_key : t -> Public_key.t end type proof + val proof_encoding : proof Data_encoding.t - val dealer_shares_and_proof: - secret:Secret_key.t -> t:int -> public_keys:Public_key.t list -> - (Encrypted_share.t list * Commitment.t list * proof) (** Lets a dealer share a secret with a set of participant by breaking it into pieces, encrypting it with the participant's public keys, and publishing these encrypted shares. Any t participants can reconstruct the secret. A zero-knowledge proof is produced showing that the dealer correctly followed the protocol, making the protocol publicly verifiable. *) + val dealer_shares_and_proof : + secret:Secret_key.t -> + t:int -> + public_keys:Public_key.t list -> + Encrypted_share.t list * Commitment.t list * proof - val check_dealer_proof: - Encrypted_share.t list -> Commitment.t list -> proof:proof -> - public_keys:Public_key.t list -> bool (** Checks the proof produced by the dealer, given the encrypted shares, the commitment list, the proof, and the participant's public keys. *) + val check_dealer_proof : + Encrypted_share.t list -> + Commitment.t list -> + proof:proof -> + public_keys:Public_key.t list -> + bool - val reveal_share : Encrypted_share.t -> secret_key:Secret_key.t - -> public_key:Public_key.t -> Clear_share.t * proof (** Lets a participant provably decrypt an encrypted share. *) + val reveal_share : + Encrypted_share.t -> + secret_key:Secret_key.t -> + public_key:Public_key.t -> + Clear_share.t * proof - val check_revealed_share: - Encrypted_share.t -> Clear_share.t -> public_key:Public_key.t -> proof - -> bool (** Checks that the participant honestly decrypted its share. *) - - val reconstruct: Clear_share.t list -> int list -> Public_key.t - + val check_revealed_share : + Encrypted_share.t -> + Clear_share.t -> + public_key:Public_key.t -> + proof -> + bool + + val reconstruct : Clear_share.t list -> int list -> Public_key.t end -module MakePvss : functor (G: CYCLIC_GROUP) -> PVSS +module MakePvss (G : CYCLIC_GROUP) : PVSS diff --git a/src/lib_crypto/pvss_secp256k1.ml b/src/lib_crypto/pvss_secp256k1.ml index 5aa4d4c843c858a2e62864316dba9e45951c1f40..e79f435d1a3ccfc410c7b7143522664c774b6042 100644 --- a/src/lib_crypto/pvss_secp256k1.ml +++ b/src/lib_crypto/pvss_secp256k1.ml @@ -26,18 +26,24 @@ open Secp256k1_group module G : Pvss.CYCLIC_GROUP = struct - module Z_m = struct include Group.Scalar + let n = Group.order - let ( + ) = Group.Scalar.add - let ( * ) = Group.Scalar.mul - let ( - ) = Group.Scalar.sub - let ( = ) = Group.Scalar.equal - let inv = Group.Scalar.inverse + + let ( + ) = Group.Scalar.add + + let ( * ) = Group.Scalar.mul + + let ( - ) = Group.Scalar.sub + + let ( = ) = Group.Scalar.equal + + let inv = Group.Scalar.inverse end include Group + let name = "secp256k1" (* This pvss algorithm assumes the public keys of the participants receiving @@ -45,18 +51,16 @@ module G : Pvss.CYCLIC_GROUP = struct public keys. *) let g1 = Group.h + let g2 = Group.g (* We use a multiplicative notation in the pvss module, but secp256k1 usually uses an additive notation. *) let ( * ) = Group.(( + )) - let pow x n = Group.mul n x - let of_bits b = - try - Some (Group.of_bits_exn b) - with _ -> None + let pow x n = Group.mul n x + let of_bits b = try Some (Group.of_bits_exn b) with _ -> None end include Pvss.MakePvss (G) diff --git a/src/lib_crypto/rand.ml b/src/lib_crypto/rand.ml index ba6c1f5dd6f550b47b52917dde73087897022937..b93cfe892d4b1a874b233a9910bdf4f0ce17a645 100644 --- a/src/lib_crypto/rand.ml +++ b/src/lib_crypto/rand.ml @@ -25,13 +25,14 @@ let generate = Hacl.Rand.gen -let generate_into ?(pos=0) ?len buf = +let generate_into ?(pos = 0) ?len buf = let buflen = MBytes.length buf in - let len = match len with - | Some len -> len - | None -> buflen - pos in + let len = match len with Some len -> len | None -> buflen - pos in if pos < 0 || len < 0 || pos + len > buflen then - invalid_arg (Printf.sprintf "Rand.generate_into: \ - invalid slice (pos=%d len=%d)" pos len) ; + invalid_arg + (Printf.sprintf + "Rand.generate_into: invalid slice (pos=%d len=%d)" + pos + len) ; let buf = MBytes.sub buf pos len in Hacl.Rand.write buf diff --git a/src/lib_crypto/rand.mli b/src/lib_crypto/rand.mli index f78f0dac6c0873e0b043d4266727a912cb767efb..530a782f87eb5e6d271b475a21c1c11894ea3715 100644 --- a/src/lib_crypto/rand.mli +++ b/src/lib_crypto/rand.mli @@ -23,10 +23,10 @@ (* *) (*****************************************************************************) -val generate : int -> Cstruct.buffer (** [generate len] is [len] random bytes. *) +val generate : int -> Cstruct.buffer -val generate_into : ?pos:int -> ?len:int -> Cstruct.buffer -> unit (** [generate_into ?pos ?len buf] writes [len] (default: [MBytes.length buf]) bytes in [buf] starting at [pos] (default: [0]). *) +val generate_into : ?pos:int -> ?len:int -> Cstruct.buffer -> unit diff --git a/src/lib_crypto/s.ml b/src/lib_crypto/s.ml index 9e556138cdef3b66c16592872afd315a9999c404..6a17ab203f1e266999a13fc0ebe53658cbf35d0c 100644 --- a/src/lib_crypto/s.ml +++ b/src/lib_crypto/s.ml @@ -34,177 +34,206 @@ open Error_monad or in memory sets and maps. *) module type MINIMAL_HASH = sig - type t - val name: string - val title: string + val name : string + + val title : string + + val pp : Format.formatter -> t -> unit - val pp: Format.formatter -> t -> unit - val pp_short: Format.formatter -> t -> unit + val pp_short : Format.formatter -> t -> unit include Compare.S with type t := t - val hash_bytes: ?key:MBytes.t -> MBytes.t list -> t - val hash_string: ?key:string -> string list -> t + val hash_bytes : ?key:MBytes.t -> MBytes.t list -> t - val zero: t + val hash_string : ?key:string -> string list -> t + val zero : t end module type RAW_DATA = sig - type t - val size: int (* in bytes *) - val to_hex: t -> Hex.t - val of_hex: Hex.t -> t tzresult - val of_hex_opt: Hex.t -> t option - val of_hex_exn: Hex.t -> t + val size : int (* in bytes *) + + val to_hex : t -> Hex.t + + val of_hex : Hex.t -> t tzresult + + val of_hex_opt : Hex.t -> t option + + val of_hex_exn : Hex.t -> t + + val to_string : t -> string - val to_string: t -> string - val of_string: string -> t tzresult - val of_string_opt: string -> t option - val of_string_exn: string -> t + val of_string : string -> t tzresult - val to_bytes: t -> MBytes.t + val of_string_opt : string -> t option - val of_bytes: MBytes.t -> t tzresult - val of_bytes_opt: MBytes.t -> t option - val of_bytes_exn: MBytes.t -> t + val of_string_exn : string -> t + val to_bytes : t -> MBytes.t + + val of_bytes : MBytes.t -> t tzresult + + val of_bytes_opt : MBytes.t -> t option + + val of_bytes_exn : MBytes.t -> t end module type B58_DATA = sig - type t - val to_b58check: t -> string - val to_short_b58check: t -> string + val to_b58check : t -> string + + val to_short_b58check : t -> string - val of_b58check: string -> t tzresult - val of_b58check_exn: string -> t - val of_b58check_opt: string -> t option + val of_b58check : string -> t tzresult + + val of_b58check_exn : string -> t + + val of_b58check_opt : string -> t option type Base58.data += Data of t - val b58check_encoding: t Base58.encoding + val b58check_encoding : t Base58.encoding end module type ENCODER = sig - type t - val encoding: t Data_encoding.t + val encoding : t Data_encoding.t - val rpc_arg: t RPC_arg.t + val rpc_arg : t RPC_arg.t - val param: + val param : ?name:string -> ?desc:string -> ('a, 'arg) Clic.params -> (t -> 'a, 'arg) Clic.params - end module type PVSS = sig - type proof - module Clear_share : sig type t end - module Commitment : sig type t end - module Encrypted_share : sig type t end + module Clear_share : sig + type t + end + + module Commitment : sig + type t + end + + module Encrypted_share : sig + type t + end module Public_key : sig type t + include B58_DATA with type t := t + include ENCODER with type t := t end - end module type INDEXES = sig - type t val hash : t -> int - val to_path: t -> string list -> string list - val of_path: string list -> t option - val of_path_exn: string list -> t + val to_path : t -> string list -> string list - val prefix_path: string -> string list - val path_length: int + val of_path : string list -> t option + + val of_path_exn : string list -> t + + val prefix_path : string -> string list + + val path_length : int module Set : sig include Set.S with type elt = t - val random_elt: t -> elt - val encoding: t Data_encoding.t + + val random_elt : t -> elt + + val encoding : t Data_encoding.t end module Map : sig include Map.S with type key = t - val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t + + val encoding : 'a Data_encoding.t -> 'a t Data_encoding.t end module Table : sig include Hashtbl.S with type key = t - val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t + + val encoding : 'a Data_encoding.t -> 'a t Data_encoding.t end module WeakRingTable : sig include WeakRingTable.S with type key = t - val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t - end + val encoding : 'a Data_encoding.t -> 'a t Data_encoding.t + end end module type HASH = sig include MINIMAL_HASH + include RAW_DATA with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t + include INDEXES with type t := t end module type MERKLE_TREE = sig - type elt - val elt_bytes: elt -> MBytes.t + + val elt_bytes : elt -> MBytes.t include HASH - val compute: elt list -> t - val empty: t + val compute : elt list -> t + + val empty : t - type path = - | Left of path * t - | Right of t * path - | Op + type path = Left of path * t | Right of t * path | Op - val path_encoding: path Data_encoding.t - val bounded_path_encoding: ?max_length:int -> unit -> path Data_encoding.t + val path_encoding : path Data_encoding.t - val compute_path: elt list -> int -> path - val check_path: path -> elt -> t * int + val bounded_path_encoding : ?max_length:int -> unit -> path Data_encoding.t + val compute_path : elt list -> int -> path + + val check_path : path -> elt -> t * int end module type SIGNATURE = sig - module Public_key_hash : sig - type t - val pp: Format.formatter -> t -> unit - val pp_short: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + + val pp_short : Format.formatter -> t -> unit + include Compare.S with type t := t + include RAW_DATA with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t + include INDEXES with type t := t - val zero: t + val zero : t module Logging : sig val tag : t Tag.def @@ -212,48 +241,55 @@ module type SIGNATURE = sig end module Public_key : sig - type t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + include Compare.S with type t := t + include B58_DATA with type t := t - include ENCODER with type t := t - val hash: t -> Public_key_hash.t + include ENCODER with type t := t + val hash : t -> Public_key_hash.t end module Secret_key : sig - type t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + include Compare.S with type t := t + include B58_DATA with type t := t - include ENCODER with type t := t - val to_public_key: t -> Public_key.t + include ENCODER with type t := t + val to_public_key : t -> Public_key.t end type t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + include Compare.S with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t - val zero: t + val zero : t type watermark - val sign: ?watermark:watermark -> Secret_key.t -> MBytes.t -> t - val check: ?watermark:watermark -> Public_key.t -> t -> MBytes.t -> bool - val generate_key: ?seed:MBytes.t -> unit -> (Public_key_hash.t * Public_key.t * Secret_key.t) + val sign : ?watermark:watermark -> Secret_key.t -> MBytes.t -> t + + val check : ?watermark:watermark -> Public_key.t -> t -> MBytes.t -> bool - val deterministic_nonce: Secret_key.t -> MBytes.t -> MBytes.t + val generate_key : + ?seed:MBytes.t -> unit -> Public_key_hash.t * Public_key.t * Secret_key.t - val deterministic_nonce_hash: Secret_key.t -> MBytes.t -> MBytes.t + val deterministic_nonce : Secret_key.t -> MBytes.t -> MBytes.t + val deterministic_nonce_hash : Secret_key.t -> MBytes.t -> MBytes.t end diff --git a/src/lib_crypto/secp256k1.ml b/src/lib_crypto/secp256k1.ml index 733642b9daddc1bf2a0049f9a5595d8a9c32d470..45a485df11172e8fce7a5c0870516f627208fd5c 100644 --- a/src/lib_crypto/secp256k1.ml +++ b/src/lib_crypto/secp256k1.ml @@ -24,267 +24,305 @@ (*****************************************************************************) module Public_key_hash = struct - include Blake2B.Make(Base58)(struct - let name = "Secp256k1.Public_key_hash" - let title = "A Secp256k1 public key hash" - let b58check_prefix = Base58.Prefix.secp256k1_public_key_hash - let size = Some 20 - end) + include Blake2B.Make + (Base58) + (struct + let name = "Secp256k1.Public_key_hash" + + let title = "A Secp256k1 public key hash" + + let b58check_prefix = Base58.Prefix.secp256k1_public_key_hash + + let size = Some 20 + end) + module Logging = struct let tag = Tag.def ~doc:title name pp end end -let () = - Base58.check_encoded_prefix Public_key_hash.b58check_encoding "tz2" 36 +let () = Base58.check_encoded_prefix Public_key_hash.b58check_encoding "tz2" 36 open Libsecp256k1.External let context = let ctx = Context.create () in match Context.randomize ctx (Rand.generate 32) with - | false -> failwith "Secp256k1 context randomization failed. Aborting." - | true -> ctx + | false -> + failwith "Secp256k1 context randomization failed. Aborting." + | true -> + ctx module Public_key = struct - type t = Key.public Key.t - let name = "Secp256k1.Public_key" + let name = "Secp256k1.Public_key" + let title = "A Secp256k1 public key" let to_bytes pk = Key.to_bytes context pk - let of_bytes_opt s = - try Some (Key.read_pk_exn context s) - with _ -> None + + let of_bytes_opt s = try Some (Key.read_pk_exn context s) with _ -> None let to_string s = MBytes.to_string (to_bytes s) + let of_string_opt s = of_bytes_opt (MBytes.of_string s) let size = Key.compressed_pk_bytes - type Base58.data += - | Data of t + type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.secp256k1_public_key - ~length: size - ~to_raw: to_string - ~of_raw: of_string_opt - ~wrap: (fun x -> Data x) - - let () = - Base58.check_encoded_prefix b58check_encoding "sppk" 55 - - let hash v = - Public_key_hash.hash_bytes [to_bytes v] - - include Compare.Make(struct - type nonrec t = t - let compare a b = - MBytes.compare (to_bytes a) (to_bytes b) - end) - - include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) - - include Helpers.MakeB58(struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - conv to_bytes of_bytes_exn (Fixed.bytes size) - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) + ~prefix:Base58.Prefix.secp256k1_public_key + ~length:size + ~to_raw:to_string + ~of_raw:of_string_opt + ~wrap:(fun x -> Data x) - let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) + let () = Base58.check_encoded_prefix b58check_encoding "sppk" 55 -end + let hash v = Public_key_hash.hash_bytes [to_bytes v] -module Secret_key = struct + include Compare.Make (struct + type nonrec t = t - type t = Key.secret Key.t + let compare a b = MBytes.compare (to_bytes a) (to_bytes b) + end) - let name = "Secp256k1.Secret_key" - let title = "A Secp256k1 secret key" + include Helpers.MakeRaw (struct + type nonrec t = t - let size = Key.secret_bytes + let name = name - let of_bytes_opt s = - match Key.read_sk context s with - | Ok x -> Some x - | _ -> None - let to_bytes x = Key.to_bytes context x + let of_bytes_opt = of_bytes_opt - let to_string s = MBytes.to_string (to_bytes s) - let of_string_opt s = of_bytes_opt (MBytes.of_string s) + let of_string_opt = of_string_opt - let to_public_key key = Key.neuterize_exn context key + let to_string = to_string + end) - type Base58.data += - | Data of t + include Helpers.MakeB58 (struct + type nonrec t = t - let b58check_encoding = - Base58.register_encoding - ~prefix: Base58.Prefix.secp256k1_secret_key - ~length: size - ~to_raw: to_string - ~of_raw: of_string_opt - ~wrap: (fun x -> Data x) - - let () = - Base58.check_encoded_prefix b58check_encoding "spsk" 54 - - include Compare.Make(struct - type nonrec t = t - let compare a b = - MBytes.compare (Key.buffer a) (Key.buffer b) - end) - - include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) - - include Helpers.MakeB58(struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - conv to_bytes of_bytes_exn (Fixed.bytes size) - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) + let name = name - let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) + let b58check_encoding = b58check_encoding + end) + include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = + let open Data_encoding in + conv to_bytes of_bytes_exn (Fixed.bytes size) + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + end) + + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) end -type t = Sign.plain Sign.t +module Secret_key = struct + type t = Key.secret Key.t -type watermark = MBytes.t + let name = "Secp256k1.Secret_key" -let name = "Secp256k1" -let title = "A Secp256k1 signature" + let title = "A Secp256k1 secret key" -let size = Sign.plain_bytes + let size = Key.secret_bytes -let of_bytes_opt s = - match Sign.read context s with Ok s -> Some s | Error _ -> None + let of_bytes_opt s = + match Key.read_sk context s with Ok x -> Some x | _ -> None -let to_bytes = Sign.to_bytes ~der:false context + let to_bytes x = Key.to_bytes context x -let to_string s = MBytes.to_string (to_bytes s) -let of_string_opt s = of_bytes_opt (MBytes.of_string s) + let to_string s = MBytes.to_string (to_bytes s) -type Base58.data += - | Data of t + let of_string_opt s = of_bytes_opt (MBytes.of_string s) -let b58check_encoding = - Base58.register_encoding - ~prefix: Base58.Prefix.secp256k1_signature - ~length: size - ~to_raw: to_string - ~of_raw: of_string_opt - ~wrap: (fun x -> Data x) + let to_public_key key = Key.neuterize_exn context key + + type Base58.data += Data of t + + let b58check_encoding = + Base58.register_encoding + ~prefix:Base58.Prefix.secp256k1_secret_key + ~length:size + ~to_raw:to_string + ~of_raw:of_string_opt + ~wrap:(fun x -> Data x) -let () = - Base58.check_encoded_prefix b58check_encoding "spsig1" 99 + let () = Base58.check_encoded_prefix b58check_encoding "spsk" 54 -include Compare.Make(struct + include Compare.Make (struct type nonrec t = t - let compare a b = - MBytes.compare (Sign.buffer a) (Sign.buffer b) + + let compare a b = MBytes.compare (Key.buffer a) (Key.buffer b) end) -include Helpers.MakeRaw(struct + include Helpers.MakeRaw (struct type nonrec t = t + let name = name + let of_bytes_opt = of_bytes_opt + let of_string_opt = of_string_opt + let to_string = to_string end) -include Helpers.MakeB58(struct + include Helpers.MakeB58 (struct type nonrec t = t + let name = name + let b58check_encoding = b58check_encoding end) -include Helpers.MakeEncoder(struct + include Helpers.MakeEncoder (struct type nonrec t = t + let name = name + let title = title + let raw_encoding = let open Data_encoding in conv to_bytes of_bytes_exn (Fixed.bytes size) + let of_b58check = of_b58check + let of_b58check_opt = of_b58check_opt + let of_b58check_exn = of_b58check_exn + let to_b58check = to_b58check + let to_short_b58check = to_short_b58check end) + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) +end + +type t = Sign.plain Sign.t + +type watermark = MBytes.t + +let name = "Secp256k1" + +let title = "A Secp256k1 signature" + +let size = Sign.plain_bytes + +let of_bytes_opt s = + match Sign.read context s with Ok s -> Some s | Error _ -> None + +let to_bytes = Sign.to_bytes ~der:false context + +let to_string s = MBytes.to_string (to_bytes s) + +let of_string_opt s = of_bytes_opt (MBytes.of_string s) + +type Base58.data += Data of t + +let b58check_encoding = + Base58.register_encoding + ~prefix:Base58.Prefix.secp256k1_signature + ~length:size + ~to_raw:to_string + ~of_raw:of_string_opt + ~wrap:(fun x -> Data x) + +let () = Base58.check_encoded_prefix b58check_encoding "spsig1" 99 + +include Compare.Make (struct + type nonrec t = t + + let compare a b = MBytes.compare (Sign.buffer a) (Sign.buffer b) +end) + +include Helpers.MakeRaw (struct + type nonrec t = t + + let name = name + + let of_bytes_opt = of_bytes_opt + + let of_string_opt = of_string_opt + + let to_string = to_string +end) + +include Helpers.MakeB58 (struct + type nonrec t = t + + let name = name + + let b58check_encoding = b58check_encoding +end) + +include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = + let open Data_encoding in + conv to_bytes of_bytes_exn (Fixed.bytes size) + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check +end) + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) let zero = of_bytes_exn (MBytes.make size '\000') let sign ?watermark sk msg = let msg = - Blake2B.to_bytes @@ - Blake2B.hash_bytes @@ - match watermark with - | None -> [msg] - | Some prefix -> [ prefix ; msg ] in + Blake2B.to_bytes @@ Blake2B.hash_bytes + @@ match watermark with None -> [msg] | Some prefix -> [prefix; msg] + in Sign.sign_exn context ~sk msg let check ?watermark public_key signature msg = let msg = - Blake2B.to_bytes @@ - Blake2B.hash_bytes @@ - match watermark with - | None -> [msg] - | Some prefix -> [ prefix ; msg ] in + Blake2B.to_bytes @@ Blake2B.hash_bytes + @@ match watermark with None -> [msg] | Some prefix -> [prefix; msg] + in Sign.verify_exn context ~pk:public_key ~msg ~signature -let generate_key ?(seed=Rand.generate 32) () = +let generate_key ?(seed = Rand.generate 32) () = let sk = Key.read_sk_exn context seed in let pk = Key.neuterize_exn context sk in let pkh = Public_key.hash pk in - pkh, pk, sk + (pkh, pk, sk) let deterministic_nonce sk msg = - Hacl.Hash.SHA256.HMAC.digest ~key: (Secret_key.to_bytes sk) ~msg + Hacl.Hash.SHA256.HMAC.digest ~key:(Secret_key.to_bytes sk) ~msg let deterministic_nonce_hash sk msg = Blake2B.to_bytes (Blake2B.hash_bytes [deterministic_nonce sk msg]) diff --git a/src/lib_crypto/secp256k1.mli b/src/lib_crypto/secp256k1.mli index 27759aa7610e93f1e1c0f69e212c5bd6e93643d8..ce5dc7a292dc759cf2b12dd40dc2505692a19a0c 100644 --- a/src/lib_crypto/secp256k1.mli +++ b/src/lib_crypto/secp256k1.mli @@ -26,4 +26,5 @@ (** Tezos - Secp256k1 cryptography *) include S.SIGNATURE with type watermark = MBytes.t + include S.RAW_DATA with type t := t diff --git a/src/lib_crypto/secp256k1_group.ml b/src/lib_crypto/secp256k1_group.ml index 9f9a38742de17688c4578e8d7a4a92baa7c04b98..4996eec2fde553e612498c562b864bc977446877 100644 --- a/src/lib_crypto/secp256k1_group.ml +++ b/src/lib_crypto/secp256k1_group.ml @@ -27,43 +27,75 @@ module Sp = Libsecp256k1.Internal module type SCALAR_SIG = sig type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t + val zero : t + val one : t + val of_Z : Z.t -> t + val to_Z : t -> Z.t + val of_int : int -> t - val add: t -> t -> t - val mul: t -> t -> t - val negate: t -> t + + val add : t -> t -> t + + val mul : t -> t -> t + + val negate : t -> t + val sub : t -> t -> t - val of_bits_exn: string -> t - val to_bits: t -> string - val inverse: t -> t option - val pow: t -> Z.t -> t + + val of_bits_exn : string -> t + + val to_bits : t -> string + + val inverse : t -> t option + + val pow : t -> Z.t -> t + val equal : t -> t -> bool end module Group : sig - val order: Z.t + val order : Z.t + module Scalar : SCALAR_SIG + type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t - val e: t - val g: t - val h: t - val of_coordinates: x:Z.t -> y:Z.t -> t - val of_bits_exn: string -> t - val to_bits: t -> string - val mul: Scalar.t -> t -> t - val (+): t -> t -> t - val (-): t -> t -> t - val (=): t -> t -> bool -end = struct - let order = Z.of_string_base 16 "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141" + val e : t + + val g : t + + val h : t + + val of_coordinates : x:Z.t -> y:Z.t -> t + + val of_bits_exn : string -> t + + val to_bits : t -> string + + val mul : Scalar.t -> t -> t + + val ( + ) : t -> t -> t + + val ( - ) : t -> t -> t + + val ( = ) : t -> t -> bool +end = struct + let order = + Z.of_string_base + 16 + "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141" let string_rev s = let len = String.length s in @@ -72,207 +104,235 @@ end = struct let b32_of_Z z = let cs = Cstruct.create 32 in let bits = Z.to_bits z in - let length = (min 32 (String.length bits)) in - let bits = String.sub bits 0 length in + let length = min 32 (String.length bits) in + let bits = String.sub bits 0 length in let bits = string_rev bits in - Cstruct.blit_from_string bits 0 cs (32 - length) length; + Cstruct.blit_from_string bits 0 cs (32 - length) length ; cs - let z_of_b32 b = - b |> Cstruct.to_string |> string_rev |> Z.of_bits + let z_of_b32 b = b |> Cstruct.to_string |> string_rev |> Z.of_bits module Scalar : SCALAR_SIG with type t = Sp.Scalar.t = struct type t = Sp.Scalar.t let zero = Sp.Scalar.zero () - let one = Sp.Scalar.one () - let equal x y = Sp.Scalar.equal x y + let one = Sp.Scalar.one () + + let equal x y = Sp.Scalar.equal x y let of_Z z = let z = Z.erem z order in let r = Sp.Scalar.const () in let cs = b32_of_Z z in - let _ = Sp.Scalar.set_b32 r cs in r + let _ = Sp.Scalar.set_b32 r cs in + r let to_Z s = let cs = Cstruct.create 32 in - Sp.Scalar.get_b32 cs s; cs |> z_of_b32 + Sp.Scalar.get_b32 cs s ; cs |> z_of_b32 let of_int i = i |> Z.of_int |> of_Z - let pow t n = - Z.powm (to_Z t) n order |> of_Z + let pow t n = Z.powm (to_Z t) n order |> of_Z let add x y = let r = Sp.Scalar.const () in - let _ = Sp.Scalar.add r x y in r + let _ = Sp.Scalar.add r x y in + r let mul x y = let r = Sp.Scalar.const () in - Sp.Scalar.mul r x y; r + Sp.Scalar.mul r x y ; r let negate x = let r = Sp.Scalar.const () in - Sp.Scalar.negate r x; r + Sp.Scalar.negate r x ; r - let sub x y = - add x (negate y) + let sub x y = add x (negate y) let of_bits_exn bits = let r = Sp.Scalar.const () in (* trim to 32 bytes *) let cs = Cstruct.create 32 in - Cstruct.blit_from_string bits 0 cs 0 (min (String.length bits) 32); + Cstruct.blit_from_string bits 0 cs 0 (min (String.length bits) 32) ; (* ignore overflow condition, it's always 0 based on the c-code *) - let _ = Sp.Scalar.set_b32 r cs in r + let _ = Sp.Scalar.set_b32 r cs in + r (* TODO, check that we are less than the order *) let to_bits x = let c = Cstruct.create 32 in - Sp.Scalar.get_b32 c x; Cstruct.to_string c + Sp.Scalar.get_b32 c x ; Cstruct.to_string c let inverse x = - if x = zero then - None else + if x = zero then None + else let r = Sp.Scalar.const () in - Sp.Scalar.inverse r x; Some r + Sp.Scalar.inverse r x ; Some r - type Base58.data += - | Data of t + type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.secp256k1_scalar - ~length: 32 - ~to_raw: to_bits - ~of_raw: (fun s -> try Some (of_bits_exn s) with _ -> None) - ~wrap: (fun x -> Data x) + ~prefix:Base58.Prefix.secp256k1_scalar + ~length:32 + ~to_raw:to_bits + ~of_raw:(fun s -> try Some (of_bits_exn s) with _ -> None) + ~wrap:(fun x -> Data x) let title = "Secp256k1_group.Scalar" - let name = "Anscalar for the secp256k1 group" - - include Helpers.MakeB58(struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = Data_encoding.(conv to_bits of_bits_exn string) - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - end) + + let name = "Anscalar for the secp256k1 group" + + include Helpers.MakeB58 (struct + type nonrec t = t + + let name = name + + let b58check_encoding = b58check_encoding + end) + + include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = Data_encoding.(conv to_bits of_bits_exn string) + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + end) end type t = Sp.Group.Jacobian.t + (* type ge = Sp.Group.ge *) let field_of_Z z = let fe = Sp.Field.const () in let cs = b32_of_Z z in - let _ = Sp.Field.set_b32 fe cs in fe + let _ = Sp.Field.set_b32 fe cs in + fe let group_of_jacobian j = let r = Sp.Group.of_fields () in - Sp.Group.Jacobian.get_ge r j; r + Sp.Group.Jacobian.get_ge r j ; + r let jacobian_of_group g = let j = Sp.Group.Jacobian.of_fields () in - Sp.Group.Jacobian.set_ge j g; j - + Sp.Group.Jacobian.set_ge j g ; + j let of_coordinates ~x ~y = - Sp.Group.of_fields - ~x:(field_of_Z x) ~y:(field_of_Z y) () |> jacobian_of_group + Sp.Group.of_fields ~x:(field_of_Z x) ~y:(field_of_Z y) () + |> jacobian_of_group - let e = - Sp.Group.Jacobian.of_fields ~infinity:true () + let e = Sp.Group.Jacobian.of_fields ~infinity:true () let g = - let gx = Z.of_string "55066263022277343669578718895168534326250603453777594175500187360389116729240" - and gy = Z.of_string "32670510020758816978083085130507043184471273380659243275938904335757337482424" in + let gx = + Z.of_string + "55066263022277343669578718895168534326250603453777594175500187360389116729240" + and gy = + Z.of_string + "32670510020758816978083085130507043184471273380659243275938904335757337482424" + in of_coordinates ~x:gx ~y:gy (* To obtain the second generator, take the sha256 hash of the decimal representation of g1_y python -c "import hashlib;print int(hashlib.sha256('32670510020758816978083085130507043184471273380659243275938904335757337482424').hexdigest(),16)" *) let h = - let hx = Z.of_string "54850469061264194188802857211425616972714231399857248865148107587305936171824" - and hy = Z.of_string "6558914719042992724977242403721980463337660510165027616783569279181206179101" in + let hx = + Z.of_string + "54850469061264194188802857211425616972714231399857248865148107587305936171824" + and hy = + Z.of_string + "6558914719042992724977242403721980463337660510165027616783569279181206179101" + in of_coordinates ~x:hx ~y:hy - let (+) x y = + let ( + ) x y = let r = Sp.Group.Jacobian.of_fields () in - Sp.Group.Jacobian.add_var r x y; r + Sp.Group.Jacobian.add_var r x y ; + r - let (-) x y = + let ( - ) x y = let neg_y = Sp.Group.Jacobian.of_fields () in - Sp.Group.Jacobian.neg neg_y y; x + neg_y + Sp.Group.Jacobian.neg neg_y y ; + x + neg_y - let (=) x y = Sp.Group.Jacobian.is_infinity (x - y) + let ( = ) x y = Sp.Group.Jacobian.is_infinity (x - y) let mul s g = let r = Sp.Group.Jacobian.of_fields () in - Sp.Group.Jacobian.mul r (group_of_jacobian g) s; r + Sp.Group.Jacobian.mul r (group_of_jacobian g) s ; + r let to_bits j = - let x = group_of_jacobian j - and buf = Cstruct.create 33 in - let cs = (Sp.Group.to_pubkey ~compress:true buf x) in + let x = group_of_jacobian j and buf = Cstruct.create 33 in + let cs = Sp.Group.to_pubkey ~compress:true buf x in Cstruct.to_string cs let of_bits_exn bits = - let buf = Cstruct.of_string bits - and x = Sp.Group.of_fields () in - Sp.Group.from_pubkey x buf; - x |> jacobian_of_group - + let buf = Cstruct.of_string bits and x = Sp.Group.of_fields () in + Sp.Group.from_pubkey x buf ; x |> jacobian_of_group module Encoding = struct - type Base58.data += - | Data of t + type Base58.data += Data of t let title = "Secp256k1_group.Group" - let name = "An element of secp256k1" + + let name = "An element of secp256k1" let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.secp256k1_element - ~length: 33 - ~to_raw: to_bits - ~of_raw: (fun s -> try Some (of_bits_exn s) with _ -> None) - ~wrap: (fun x -> Data x) - - include Helpers.MakeB58( - struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) - - include Helpers.MakeEncoder( - struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = Data_encoding.(conv to_bits of_bits_exn string) - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - end - ) + ~prefix:Base58.Prefix.secp256k1_element + ~length:33 + ~to_raw:to_bits + ~of_raw:(fun s -> try Some (of_bits_exn s) with _ -> None) + ~wrap:(fun x -> Data x) + + include Helpers.MakeB58 (struct + type nonrec t = t + + let name = name + + let b58check_encoding = b58check_encoding + end) + + include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = Data_encoding.(conv to_bits of_bits_exn string) + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + end) end include Encoding - end diff --git a/src/lib_crypto/secp256k1_group.mli b/src/lib_crypto/secp256k1_group.mli index aa77d7da22fb0f5d13bf16ea702e12634b0f2c86..ca15e61108cbc0d30730b3ea827b1788c664754d 100644 --- a/src/lib_crypto/secp256k1_group.mli +++ b/src/lib_crypto/secp256k1_group.mli @@ -25,48 +25,71 @@ (** Type for the group of integers modulo the order of the curve ℤ/pℤ *) module type SCALAR_SIG = sig - (** Element of the scalar group *) type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t val zero : t + val one : t + val of_Z : Z.t -> t + val to_Z : t -> Z.t + val of_int : int -> t - val add: t -> t -> t - val mul: t -> t -> t - val negate: t -> t - val sub: t -> t -> t - val of_bits_exn: string -> t - val to_bits: t -> string - val inverse: t -> t option + + val add : t -> t -> t + + val mul : t -> t -> t + + val negate : t -> t + + val sub : t -> t -> t + + val of_bits_exn : string -> t + + val to_bits : t -> string + + val inverse : t -> t option (** Modular exponentiation*) - val pow: t -> Z.t -> t - val equal: t -> t -> bool + val pow : t -> Z.t -> t + + val equal : t -> t -> bool end module Group : sig - type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t - val order: Z.t + val order : Z.t + module Scalar : SCALAR_SIG - val e: t + + val e : t + val g : t + val h : t - val of_coordinates: x:Z.t -> y:Z.t -> t - val of_bits_exn: string -> t - val to_bits: t -> string - val mul: Scalar.t -> t -> t - val (+): t -> t -> t - val (-): t -> t -> t - val (=): t -> t -> bool + val of_coordinates : x:Z.t -> y:Z.t -> t + + val of_bits_exn : string -> t + + val to_bits : t -> string + + val mul : Scalar.t -> t -> t + + val ( + ) : t -> t -> t + + val ( - ) : t -> t -> t + val ( = ) : t -> t -> bool end diff --git a/src/lib_crypto/signature.ml b/src/lib_crypto/signature.ml index 8aec1057ec70da8d155aa2958d90005b6a2d5c61..b95439d7e72add6d63ebf378c575e04777e60052 100644 --- a/src/lib_crypto/signature.ml +++ b/src/lib_crypto/signature.ml @@ -47,163 +47,217 @@ type watermark = | Custom of MBytes.t module Public_key_hash = struct - type t = public_key_hash = | Ed25519 of Ed25519.Public_key_hash.t | Secp256k1 of Secp256k1.Public_key_hash.t | P256 of P256.Public_key_hash.t let name = "Signature.Public_key_hash" + let title = "A Ed25519, Secp256k1, or P256 public key hash" type Base58.data += Data of t (* unused *) - let b58check_encoding = (* unused *) + + let b58check_encoding = + (* unused *) Base58.register_encoding - ~prefix: "\255\255" - ~length: 2 - ~to_raw: (fun _ -> assert false) - ~of_raw: (fun _ -> assert false) - ~wrap: (fun x -> Data x) + ~prefix:"\255\255" + ~length:2 + ~to_raw:(fun _ -> assert false) + ~of_raw:(fun _ -> assert false) + ~wrap:(fun x -> Data x) let raw_encoding = let open Data_encoding in - def "public_key_hash" ~description:title @@ - union [ - case (Tag 0) Ed25519.Public_key_hash.encoding - ~title:"Ed25519" - (function Ed25519 x -> Some x | _ -> None) - (function x -> Ed25519 x); - case (Tag 1) Secp256k1.Public_key_hash.encoding - ~title:"Secp256k1" - (function Secp256k1 x -> Some x | _ -> None) - (function x -> Secp256k1 x) ; - case (Tag 2) - ~title:"P256" P256.Public_key_hash.encoding - (function P256 x -> Some x | _ -> None) - (function x -> P256 x) - ] - - let to_bytes s = - Data_encoding.Binary.to_bytes_exn raw_encoding s - let of_bytes_opt s = - Data_encoding.Binary.of_bytes raw_encoding s + def "public_key_hash" ~description:title + @@ union + [ case + (Tag 0) + Ed25519.Public_key_hash.encoding + ~title:"Ed25519" + (function Ed25519 x -> Some x | _ -> None) + (function x -> Ed25519 x); + case + (Tag 1) + Secp256k1.Public_key_hash.encoding + ~title:"Secp256k1" + (function Secp256k1 x -> Some x | _ -> None) + (function x -> Secp256k1 x); + case + (Tag 2) + ~title:"P256" + P256.Public_key_hash.encoding + (function P256 x -> Some x | _ -> None) + (function x -> P256 x) ] + + let to_bytes s = Data_encoding.Binary.to_bytes_exn raw_encoding s + + let of_bytes_opt s = Data_encoding.Binary.of_bytes raw_encoding s + let to_string s = MBytes.to_string (to_bytes s) + let of_string_opt s = of_bytes_opt (MBytes.of_string s) let size = 1 + Ed25519.size let zero = Ed25519 Ed25519.Public_key_hash.zero - include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) + include Helpers.MakeRaw (struct + type nonrec t = t + + let name = name + + let of_bytes_opt = of_bytes_opt + + let of_string_opt = of_string_opt + + let to_string = to_string + end) let of_b58check_opt s = match Base58.decode s with - | Some Ed25519.Public_key_hash.Data pkh -> Some (Ed25519 pkh) - | Some Secp256k1.Public_key_hash.Data pkh -> Some (Secp256k1 pkh) - | Some P256.Public_key_hash.Data pkh -> Some (P256 pkh) - | _ -> None + | Some (Ed25519.Public_key_hash.Data pkh) -> + Some (Ed25519 pkh) + | Some (Secp256k1.Public_key_hash.Data pkh) -> + Some (Secp256k1 pkh) + | Some (P256.Public_key_hash.Data pkh) -> + Some (P256 pkh) + | _ -> + None let of_b58check_exn s = match of_b58check_opt s with - | Some x -> x - | None -> Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + | Some x -> + x + | None -> + Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + let of_b58check s = match of_b58check_opt s with - | Some x -> Ok x + | Some x -> + Ok x | None -> - generic_error - "Failed to read a b58check_encoding data (%s): %S" - name s + generic_error "Failed to read a b58check_encoding data (%s): %S" name s let to_b58check = function - | Ed25519 pkh -> Ed25519.Public_key_hash.to_b58check pkh - | Secp256k1 pkh -> Secp256k1.Public_key_hash.to_b58check pkh - | P256 pkh -> P256.Public_key_hash.to_b58check pkh + | Ed25519 pkh -> + Ed25519.Public_key_hash.to_b58check pkh + | Secp256k1 pkh -> + Secp256k1.Public_key_hash.to_b58check pkh + | P256 pkh -> + P256.Public_key_hash.to_b58check pkh let to_short_b58check = function - | Ed25519 pkh -> Ed25519.Public_key_hash.to_short_b58check pkh - | Secp256k1 pkh -> Secp256k1.Public_key_hash.to_short_b58check pkh - | P256 pkh -> P256.Public_key_hash.to_short_b58check pkh - - let to_path key l = match key with - | Ed25519 h -> "ed25519" :: Ed25519.Public_key_hash.to_path h l - | Secp256k1 h -> "secp256k1" :: Secp256k1.Public_key_hash.to_path h l - | P256 h -> "p256" :: P256.Public_key_hash.to_path h l + | Ed25519 pkh -> + Ed25519.Public_key_hash.to_short_b58check pkh + | Secp256k1 pkh -> + Secp256k1.Public_key_hash.to_short_b58check pkh + | P256 pkh -> + P256.Public_key_hash.to_short_b58check pkh + + let to_path key l = + match key with + | Ed25519 h -> + "ed25519" :: Ed25519.Public_key_hash.to_path h l + | Secp256k1 h -> + "secp256k1" :: Secp256k1.Public_key_hash.to_path h l + | P256 h -> + "p256" :: P256.Public_key_hash.to_path h l let of_path = function - | "ed25519" :: q -> begin - match Ed25519.Public_key_hash.of_path q with - | Some pkh -> Some (Ed25519 pkh) - | None -> None - end - | "secp256k1" :: q -> begin - match Secp256k1.Public_key_hash.of_path q with - | Some pkh -> Some (Secp256k1 pkh) - | None -> None - end - | "p256" :: q -> begin - match P256.Public_key_hash.of_path q with - | Some pkh -> Some (P256 pkh) - | None -> None - end - | _ -> assert false (* FIXME classification des erreurs *) + | "ed25519" :: q -> ( + match Ed25519.Public_key_hash.of_path q with + | Some pkh -> + Some (Ed25519 pkh) + | None -> + None ) + | "secp256k1" :: q -> ( + match Secp256k1.Public_key_hash.of_path q with + | Some pkh -> + Some (Secp256k1 pkh) + | None -> + None ) + | "p256" :: q -> ( + match P256.Public_key_hash.of_path q with + | Some pkh -> + Some (P256 pkh) + | None -> + None ) + | _ -> + assert false + + (* FIXME classification des erreurs *) let of_path_exn = function - | "ed25519" :: q -> Ed25519 (Ed25519.Public_key_hash.of_path_exn q) - | "secp256k1" :: q -> Secp256k1 (Secp256k1.Public_key_hash.of_path_exn q) - | "p256" :: q -> P256 (P256.Public_key_hash.of_path_exn q) - | _ -> assert false (* FIXME classification des erreurs *) + | "ed25519" :: q -> + Ed25519 (Ed25519.Public_key_hash.of_path_exn q) + | "secp256k1" :: q -> + Secp256k1 (Secp256k1.Public_key_hash.of_path_exn q) + | "p256" :: q -> + P256 (P256.Public_key_hash.of_path_exn q) + | _ -> + assert false + + (* FIXME classification des erreurs *) let path_length = let l1 = Ed25519.Public_key_hash.path_length and l2 = Secp256k1.Public_key_hash.path_length and l3 = P256.Public_key_hash.path_length in - assert Compare.Int.(l1 = l2) ; - assert Compare.Int.(l1 = l3) ; + assert (Compare.Int.(l1 = l2)) ; + assert (Compare.Int.(l1 = l3)) ; 1 + l1 let prefix_path _ = assert false (* unused *) let hash = Hashtbl.hash - include Compare.Make(struct - type nonrec t = t - let compare a b = - match (a, b) with - | Ed25519 x, Ed25519 y -> - Ed25519.Public_key_hash.compare x y - | Secp256k1 x, Secp256k1 y -> - Secp256k1.Public_key_hash.compare x y - | P256 x, P256 y -> - P256.Public_key_hash.compare x y - | _ -> Pervasives.compare a b - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = raw_encoding - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) - - include Helpers.MakeIterator(struct - type nonrec t = t - let hash = hash - let compare = compare - let equal = equal - let encoding = encoding - end) + include Compare.Make (struct + type nonrec t = t + + let compare a b = + match (a, b) with + | (Ed25519 x, Ed25519 y) -> + Ed25519.Public_key_hash.compare x y + | (Secp256k1 x, Secp256k1 y) -> + Secp256k1.Public_key_hash.compare x y + | (P256 x, P256 y) -> + P256.Public_key_hash.compare x y + | _ -> + Pervasives.compare a b + end) + + include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = raw_encoding + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + end) + + include Helpers.MakeIterator (struct + type nonrec t = t + + let hash = hash + + let compare = compare + + let equal = equal + + let encoding = encoding + end) let rpc_arg = RPC_arg.like @@ -217,192 +271,262 @@ module Public_key_hash = struct end module Public_key = struct - type t = public_key = | Ed25519 of Ed25519.Public_key.t | Secp256k1 of Secp256k1.Public_key.t | P256 of P256.Public_key.t let name = "Signature.Public_key" + let title = "A Ed25519, Secp256k1, or P256 public key" let hash pk = match pk with - | Ed25519 pk -> Public_key_hash.Ed25519 (Ed25519.Public_key.hash pk) - | Secp256k1 pk -> Public_key_hash.Secp256k1 (Secp256k1.Public_key.hash pk) - | P256 pk -> Public_key_hash.P256 (P256.Public_key.hash pk) - - include Compare.Make(struct - type nonrec t = t - let compare a b = match (a, b) with - | Ed25519 x, Ed25519 y -> Ed25519.Public_key.compare x y - | Secp256k1 x, Secp256k1 y -> Secp256k1.Public_key.compare x y - | P256 x, P256 y -> P256.Public_key.compare x y - | _ -> Pervasives.compare a b - end) + | Ed25519 pk -> + Public_key_hash.Ed25519 (Ed25519.Public_key.hash pk) + | Secp256k1 pk -> + Public_key_hash.Secp256k1 (Secp256k1.Public_key.hash pk) + | P256 pk -> + Public_key_hash.P256 (P256.Public_key.hash pk) + + include Compare.Make (struct + type nonrec t = t + + let compare a b = + match (a, b) with + | (Ed25519 x, Ed25519 y) -> + Ed25519.Public_key.compare x y + | (Secp256k1 x, Secp256k1 y) -> + Secp256k1.Public_key.compare x y + | (P256 x, P256 y) -> + P256.Public_key.compare x y + | _ -> + Pervasives.compare a b + end) type Base58.data += Data of t (* unused *) - let b58check_encoding = (* unused *) + + let b58check_encoding = + (* unused *) Base58.register_encoding - ~prefix: "\255\255" - ~length: 2 - ~to_raw: (fun _ -> assert false) - ~of_raw: (fun _ -> assert false) - ~wrap: (fun x -> Data x) + ~prefix:"\255\255" + ~length:2 + ~to_raw:(fun _ -> assert false) + ~of_raw:(fun _ -> assert false) + ~wrap:(fun x -> Data x) let of_b58check_opt s = match Base58.decode s with - | Some (Ed25519.Public_key.Data public_key) -> Some (Ed25519 public_key) - | Some (Secp256k1.Public_key.Data public_key) -> Some (Secp256k1 public_key) - | Some (P256.Public_key.Data public_key) -> Some (P256 public_key) - | _ -> None + | Some (Ed25519.Public_key.Data public_key) -> + Some (Ed25519 public_key) + | Some (Secp256k1.Public_key.Data public_key) -> + Some (Secp256k1 public_key) + | Some (P256.Public_key.Data public_key) -> + Some (P256 public_key) + | _ -> + None let of_b58check_exn s = match of_b58check_opt s with - | Some x -> x - | None -> Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + | Some x -> + x + | None -> + Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + let of_b58check s = match of_b58check_opt s with - | Some x -> Ok x + | Some x -> + Ok x | None -> - generic_error - "Failed to read a b58check_encoding data (%s): %S" - name s + generic_error "Failed to read a b58check_encoding data (%s): %S" name s let to_b58check = function - | Ed25519 pk -> Ed25519.Public_key.to_b58check pk - | Secp256k1 pk -> Secp256k1.Public_key.to_b58check pk - | P256 pk -> P256.Public_key.to_b58check pk + | Ed25519 pk -> + Ed25519.Public_key.to_b58check pk + | Secp256k1 pk -> + Secp256k1.Public_key.to_b58check pk + | P256 pk -> + P256.Public_key.to_b58check pk let to_short_b58check = function - | Ed25519 pk -> Ed25519.Public_key.to_short_b58check pk - | Secp256k1 pk -> Secp256k1.Public_key.to_short_b58check pk - | P256 pk -> P256.Public_key.to_short_b58check pk - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - def "public_key" ~description:title @@ - union [ - case (Tag 0) Ed25519.Public_key.encoding - ~title:"Ed25519" - (function Ed25519 x -> Some x | _ -> None) - (function x -> Ed25519 x); - case (Tag 1) Secp256k1.Public_key.encoding - ~title:"Secp256k1" - (function Secp256k1 x -> Some x | _ -> None) - (function x -> Secp256k1 x) ; - case - ~title:"P256" (Tag 2) P256.Public_key.encoding - (function P256 x -> Some x | _ -> None) - (function x -> P256 x) - ] - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) + | Ed25519 pk -> + Ed25519.Public_key.to_short_b58check pk + | Secp256k1 pk -> + Secp256k1.Public_key.to_short_b58check pk + | P256 pk -> + P256.Public_key.to_short_b58check pk + + include Helpers.MakeEncoder (struct + type nonrec t = t - let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) + let name = name + + let title = title + + let raw_encoding = + let open Data_encoding in + def "public_key" ~description:title + @@ union + [ case + (Tag 0) + Ed25519.Public_key.encoding + ~title:"Ed25519" + (function Ed25519 x -> Some x | _ -> None) + (function x -> Ed25519 x); + case + (Tag 1) + Secp256k1.Public_key.encoding + ~title:"Secp256k1" + (function Secp256k1 x -> Some x | _ -> None) + (function x -> Secp256k1 x); + case + ~title:"P256" + (Tag 2) + P256.Public_key.encoding + (function P256 x -> Some x | _ -> None) + (function x -> P256 x) ] + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + end) + + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) end module Secret_key = struct - type t = secret_key = | Ed25519 of Ed25519.Secret_key.t | Secp256k1 of Secp256k1.Secret_key.t | P256 of P256.Secret_key.t let name = "Signature.Secret_key" + let title = "A Ed25519, Secp256k1 or P256 secret key" let to_public_key = function - | Ed25519 sk -> Public_key.Ed25519 (Ed25519.Secret_key.to_public_key sk) - | Secp256k1 sk -> Public_key.Secp256k1 (Secp256k1.Secret_key.to_public_key sk) - | P256 sk -> Public_key.P256 (P256.Secret_key.to_public_key sk) - - include Compare.Make(struct - type nonrec t = t - let compare a b = match (a, b) with - | Ed25519 x, Ed25519 y -> Ed25519.Secret_key.compare x y - | Secp256k1 x, Secp256k1 y -> Secp256k1.Secret_key.compare x y - | P256 x, P256 y -> P256.Secret_key.compare x y - | _ -> Pervasives.compare a b - end) + | Ed25519 sk -> + Public_key.Ed25519 (Ed25519.Secret_key.to_public_key sk) + | Secp256k1 sk -> + Public_key.Secp256k1 (Secp256k1.Secret_key.to_public_key sk) + | P256 sk -> + Public_key.P256 (P256.Secret_key.to_public_key sk) + + include Compare.Make (struct + type nonrec t = t + + let compare a b = + match (a, b) with + | (Ed25519 x, Ed25519 y) -> + Ed25519.Secret_key.compare x y + | (Secp256k1 x, Secp256k1 y) -> + Secp256k1.Secret_key.compare x y + | (P256 x, P256 y) -> + P256.Secret_key.compare x y + | _ -> + Pervasives.compare a b + end) type Base58.data += Data of t (* unused *) - let b58check_encoding = (* unused *) + + let b58check_encoding = + (* unused *) Base58.register_encoding - ~prefix: "\255\255" - ~length: 2 - ~to_raw: (fun _ -> assert false) - ~of_raw: (fun _ -> assert false) - ~wrap: (fun x -> Data x) + ~prefix:"\255\255" + ~length:2 + ~to_raw:(fun _ -> assert false) + ~of_raw:(fun _ -> assert false) + ~wrap:(fun x -> Data x) let of_b58check_opt b = match Base58.decode b with - | Some (Ed25519.Secret_key.Data sk) -> Some (Ed25519 sk) - | Some (Secp256k1.Secret_key.Data sk) -> Some (Secp256k1 sk) - | Some (P256.Secret_key.Data sk) -> Some (P256 sk) - | _ -> None + | Some (Ed25519.Secret_key.Data sk) -> + Some (Ed25519 sk) + | Some (Secp256k1.Secret_key.Data sk) -> + Some (Secp256k1 sk) + | Some (P256.Secret_key.Data sk) -> + Some (P256 sk) + | _ -> + None let of_b58check_exn s = match of_b58check_opt s with - | Some x -> x - | None -> Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + | Some x -> + x + | None -> + Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + let of_b58check s = match of_b58check_opt s with - | Some x -> Ok x + | Some x -> + Ok x | None -> - generic_error - "Failed to read a b58check_encoding data (%s): %S" - name s + generic_error "Failed to read a b58check_encoding data (%s): %S" name s let to_b58check = function - | Ed25519 sk -> Ed25519.Secret_key.to_b58check sk - | Secp256k1 sk -> Secp256k1.Secret_key.to_b58check sk - | P256 sk -> P256.Secret_key.to_b58check sk + | Ed25519 sk -> + Ed25519.Secret_key.to_b58check sk + | Secp256k1 sk -> + Secp256k1.Secret_key.to_b58check sk + | P256 sk -> + P256.Secret_key.to_b58check sk let to_short_b58check = function - | Ed25519 sk -> Ed25519.Secret_key.to_short_b58check sk - | Secp256k1 sk -> Secp256k1.Secret_key.to_short_b58check sk - | P256 sk -> P256.Secret_key.to_short_b58check sk - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - let open Data_encoding in - def "secret_key" ~description:title @@ - union [ - case (Tag 0) Ed25519.Secret_key.encoding - ~title:"Ed25519" - (function Ed25519 x -> Some x | _ -> None) - (function x -> Ed25519 x); - case (Tag 1) Secp256k1.Secret_key.encoding - ~title:"Secp256k1" - (function Secp256k1 x -> Some x | _ -> None) - (function x -> Secp256k1 x) ; - case (Tag 2) - ~title:"P256" P256.Secret_key.encoding - (function P256 x -> Some x | _ -> None) - (function x -> P256 x) - ] - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) + | Ed25519 sk -> + Ed25519.Secret_key.to_short_b58check sk + | Secp256k1 sk -> + Secp256k1.Secret_key.to_short_b58check sk + | P256 sk -> + P256.Secret_key.to_short_b58check sk + + include Helpers.MakeEncoder (struct + type nonrec t = t - let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) + let name = name + let title = title + + let raw_encoding = + let open Data_encoding in + def "secret_key" ~description:title + @@ union + [ case + (Tag 0) + Ed25519.Secret_key.encoding + ~title:"Ed25519" + (function Ed25519 x -> Some x | _ -> None) + (function x -> Ed25519 x); + case + (Tag 1) + Secp256k1.Secret_key.encoding + ~title:"Secp256k1" + (function Secp256k1 x -> Some x | _ -> None) + (function x -> Secp256k1 x); + case + (Tag 2) + ~title:"P256" + P256.Secret_key.encoding + (function P256 x -> Some x | _ -> None) + (function x -> P256 x) ] + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + end) + + let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) end type t = @@ -412,6 +536,7 @@ type t = | Unknown of MBytes.t let name = "Signature" + let title = "A Ed25519, Secp256k1 or P256 signature" let size = @@ -419,208 +544,248 @@ let size = Ed25519.size let to_bytes = function - | Ed25519 b -> Ed25519.to_bytes b - | Secp256k1 b -> Secp256k1.to_bytes b - | P256 b -> P256.to_bytes b - | Unknown b -> b + | Ed25519 b -> + Ed25519.to_bytes b + | Secp256k1 b -> + Secp256k1.to_bytes b + | P256 b -> + P256.to_bytes b + | Unknown b -> + b -let of_bytes_opt s = - if MBytes.length s = size then Some (Unknown s) else None +let of_bytes_opt s = if MBytes.length s = size then Some (Unknown s) else None let to_string s = MBytes.to_string (to_bytes s) + let of_string_opt s = of_bytes_opt (MBytes.of_string s) type Base58.data += Data of t + let b58check_encoding = Base58.register_encoding - ~prefix: Base58.Prefix.generic_signature - ~length: Ed25519.size - ~to_raw: to_string - ~of_raw: of_string_opt - ~wrap: (fun x -> Data x) + ~prefix:Base58.Prefix.generic_signature + ~length:Ed25519.size + ~to_raw:to_string + ~of_raw:of_string_opt + ~wrap:(fun x -> Data x) -let () = - Base58.check_encoded_prefix b58check_encoding "sig" 96 +let () = Base58.check_encoded_prefix b58check_encoding "sig" 96 -include Helpers.MakeRaw(struct - type nonrec t = t - let name = name - let of_bytes_opt = of_bytes_opt - let of_string_opt = of_string_opt - let to_string = to_string - end) +include Helpers.MakeRaw (struct + type nonrec t = t -include Compare.Make(struct - type nonrec t = t - let compare a b = - let a = to_bytes a - and b = to_bytes b in - MBytes.compare a b - end) + let name = name + + let of_bytes_opt = of_bytes_opt + + let of_string_opt = of_string_opt + + let to_string = to_string +end) + +include Compare.Make (struct + type nonrec t = t + + let compare a b = + let a = to_bytes a and b = to_bytes b in + MBytes.compare a b +end) let of_b58check_opt s = - if TzString.has_prefix ~prefix:Ed25519.b58check_encoding.encoded_prefix s then - Option.map - (Ed25519.of_b58check_opt s) - ~f: (fun x -> Ed25519 x) - else if TzString.has_prefix ~prefix:Secp256k1.b58check_encoding.encoded_prefix s then - Option.map - (Secp256k1.of_b58check_opt s) - ~f: (fun x -> Secp256k1 x) - else if TzString.has_prefix ~prefix:P256.b58check_encoding.encoded_prefix s then - Option.map - (P256.of_b58check_opt s) - ~f: (fun x -> P256 x) - else - Base58.simple_decode b58check_encoding s + if TzString.has_prefix ~prefix:Ed25519.b58check_encoding.encoded_prefix s + then Option.map (Ed25519.of_b58check_opt s) ~f:(fun x -> Ed25519 x) + else if + TzString.has_prefix ~prefix:Secp256k1.b58check_encoding.encoded_prefix s + then Option.map (Secp256k1.of_b58check_opt s) ~f:(fun x -> Secp256k1 x) + else if TzString.has_prefix ~prefix:P256.b58check_encoding.encoded_prefix s + then Option.map (P256.of_b58check_opt s) ~f:(fun x -> P256 x) + else Base58.simple_decode b58check_encoding s let of_b58check_exn s = match of_b58check_opt s with - | Some x -> x - | None -> Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + | Some x -> + x + | None -> + Format.kasprintf Pervasives.failwith "Unexpected data (%s)" name + let of_b58check s = match of_b58check_opt s with - | Some x -> Ok x + | Some x -> + Ok x | None -> - generic_error - "Failed to read a b58check_encoding data (%s): %S" - name s + generic_error "Failed to read a b58check_encoding data (%s): %S" name s let to_b58check = function - | Ed25519 b -> Ed25519.to_b58check b - | Secp256k1 b -> Secp256k1.to_b58check b - | P256 b -> P256.to_b58check b - | Unknown b -> Base58.simple_encode b58check_encoding (Unknown b) + | Ed25519 b -> + Ed25519.to_b58check b + | Secp256k1 b -> + Secp256k1.to_b58check b + | P256 b -> + P256.to_b58check b + | Unknown b -> + Base58.simple_encode b58check_encoding (Unknown b) let to_short_b58check = function - | Ed25519 b -> Ed25519.to_short_b58check b - | Secp256k1 b -> Secp256k1.to_short_b58check b - | P256 b -> P256.to_short_b58check b - | Unknown b -> Base58.simple_encode b58check_encoding (Unknown b) + | Ed25519 b -> + Ed25519.to_short_b58check b + | Secp256k1 b -> + Secp256k1.to_short_b58check b + | P256 b -> + P256.to_short_b58check b + | Unknown b -> + Base58.simple_encode b58check_encoding (Unknown b) -include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = - Data_encoding.conv - to_bytes - of_bytes_exn - (Data_encoding.Fixed.bytes size) - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - end) +include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = + Data_encoding.conv to_bytes of_bytes_exn (Data_encoding.Fixed.bytes size) + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + + let of_b58check_exn = of_b58check_exn + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check +end) let pp ppf t = Format.fprintf ppf "%s" (to_b58check t) let of_ed25519 s = Ed25519 s + let of_secp256k1 s = Secp256k1 s + let of_p256 s = P256 s let zero = of_ed25519 Ed25519.zero let bytes_of_watermark = function - | Block_header chain_id -> MBytes.concat "" [ MBytes.of_string "\x01" ; Chain_id.to_bytes chain_id ] - | Endorsement chain_id -> MBytes.concat "" [ MBytes.of_string "\x02" ; Chain_id.to_bytes chain_id ] - | Generic_operation -> MBytes.of_string "\x03" - | Custom bytes -> bytes + | Block_header chain_id -> + MBytes.concat "" [MBytes.of_string "\x01"; Chain_id.to_bytes chain_id] + | Endorsement chain_id -> + MBytes.concat "" [MBytes.of_string "\x02"; Chain_id.to_bytes chain_id] + | Generic_operation -> + MBytes.of_string "\x03" + | Custom bytes -> + bytes let pp_watermark ppf = let open Format in function - | Block_header chain_id -> fprintf ppf "Block-header: %a" Chain_id.pp chain_id - | Endorsement chain_id -> fprintf ppf "Endorsement: %a" Chain_id.pp chain_id - | Generic_operation -> pp_print_string ppf "Generic-operation" - | Custom bytes -> + | Block_header chain_id -> + fprintf ppf "Block-header: %a" Chain_id.pp chain_id + | Endorsement chain_id -> + fprintf ppf "Endorsement: %a" Chain_id.pp chain_id + | Generic_operation -> + pp_print_string ppf "Generic-operation" + | Custom bytes -> let hexed = MBytes.to_hex bytes |> Hex.show in - fprintf ppf "Custom: 0x%s" - (try String.sub hexed 0 10 ^ "..." with _ -> hexed) + fprintf + ppf + "Custom: 0x%s" + (try String.sub hexed 0 10 ^ "..." with _ -> hexed) let sign ?watermark secret_key message = let watermark = Option.map ~f:bytes_of_watermark watermark in match secret_key with - | Secret_key.Ed25519 sk -> of_ed25519 (Ed25519.sign ?watermark sk message) - | Secp256k1 sk -> of_secp256k1 (Secp256k1.sign ?watermark sk message) - | P256 sk -> of_p256 (P256.sign ?watermark sk message) + | Secret_key.Ed25519 sk -> + of_ed25519 (Ed25519.sign ?watermark sk message) + | Secp256k1 sk -> + of_secp256k1 (Secp256k1.sign ?watermark sk message) + | P256 sk -> + of_p256 (P256.sign ?watermark sk message) let check ?watermark public_key signature message = let watermark = Option.map ~f:bytes_of_watermark watermark in - match public_key, signature with - | Public_key.Ed25519 pk, Unknown signature -> begin - match Ed25519.of_bytes_opt signature with - | Some s -> Ed25519.check ?watermark pk s message - | None -> false - end - | Public_key.Secp256k1 pk, Unknown signature -> begin - match Secp256k1.of_bytes_opt signature with - | Some s -> Secp256k1.check ?watermark pk s message - | None -> false - end - | Public_key.P256 pk, Unknown signature -> begin - match P256.of_bytes_opt signature with - | Some s -> P256.check ?watermark pk s message - | None -> false - end - | Public_key.Ed25519 pk, Ed25519 signature -> + match (public_key, signature) with + | (Public_key.Ed25519 pk, Unknown signature) -> ( + match Ed25519.of_bytes_opt signature with + | Some s -> + Ed25519.check ?watermark pk s message + | None -> + false ) + | (Public_key.Secp256k1 pk, Unknown signature) -> ( + match Secp256k1.of_bytes_opt signature with + | Some s -> + Secp256k1.check ?watermark pk s message + | None -> + false ) + | (Public_key.P256 pk, Unknown signature) -> ( + match P256.of_bytes_opt signature with + | Some s -> + P256.check ?watermark pk s message + | None -> + false ) + | (Public_key.Ed25519 pk, Ed25519 signature) -> Ed25519.check ?watermark pk signature message - | Public_key.Secp256k1 pk, Secp256k1 signature -> + | (Public_key.Secp256k1 pk, Secp256k1 signature) -> Secp256k1.check ?watermark pk signature message - | Public_key.P256 pk, P256 signature -> + | (Public_key.P256 pk, P256 signature) -> P256.check ?watermark pk signature message - | _ -> false + | _ -> + false let append ?watermark sk msg = - MBytes.concat "" [msg; (to_bytes (sign ?watermark sk msg))] + MBytes.concat "" [msg; to_bytes (sign ?watermark sk msg)] -let concat msg signature = - MBytes.concat "" [msg; (to_bytes signature)] +let concat msg signature = MBytes.concat "" [msg; to_bytes signature] -type algo = - | Ed25519 - | Secp256k1 - | P256 +type algo = Ed25519 | Secp256k1 | P256 let algo_param () = Clic.parameter - ~autocomplete:(fun _ -> return [ "ed25519" ; "secp256k1" ; "p256"]) - begin fun _ name -> + ~autocomplete:(fun _ -> return ["ed25519"; "secp256k1"; "p256"]) + (fun _ name -> match name with - | "ed25519" -> return Ed25519 - | "secp256k1" -> return Secp256k1 - | "p256" -> return P256 + | "ed25519" -> + return Ed25519 + | "secp256k1" -> + return Secp256k1 + | "p256" -> + return P256 | name -> failwith - "Unknown signature algorithm (%s). \ - Available: 'ed25519', 'secp256k1' or 'p256'" - name - end + "Unknown signature algorithm (%s). Available: 'ed25519', \ + 'secp256k1' or 'p256'" + name) let generate_key ?(algo = Ed25519) ?seed () = match algo with | Ed25519 -> - let pkh, pk, sk = Ed25519.generate_key ?seed () in - (Public_key_hash.Ed25519 pkh, - Public_key.Ed25519 pk, Secret_key.Ed25519 sk) + let (pkh, pk, sk) = Ed25519.generate_key ?seed () in + ( Public_key_hash.Ed25519 pkh, + Public_key.Ed25519 pk, + Secret_key.Ed25519 sk ) | Secp256k1 -> - let pkh, pk, sk = Secp256k1.generate_key ?seed () in - (Public_key_hash.Secp256k1 pkh, - Public_key.Secp256k1 pk, Secret_key.Secp256k1 sk) + let (pkh, pk, sk) = Secp256k1.generate_key ?seed () in + ( Public_key_hash.Secp256k1 pkh, + Public_key.Secp256k1 pk, + Secret_key.Secp256k1 sk ) | P256 -> - let pkh, pk, sk = P256.generate_key ?seed () in - (Public_key_hash.P256 pkh, - Public_key.P256 pk, Secret_key.P256 sk) + let (pkh, pk, sk) = P256.generate_key ?seed () in + (Public_key_hash.P256 pkh, Public_key.P256 pk, Secret_key.P256 sk) let deterministic_nonce sk msg = match sk with - | Secret_key.Ed25519 sk -> Ed25519.deterministic_nonce sk msg - | Secret_key.Secp256k1 sk -> Secp256k1.deterministic_nonce sk msg - | Secret_key.P256 sk -> P256.deterministic_nonce sk msg + | Secret_key.Ed25519 sk -> + Ed25519.deterministic_nonce sk msg + | Secret_key.Secp256k1 sk -> + Secp256k1.deterministic_nonce sk msg + | Secret_key.P256 sk -> + P256.deterministic_nonce sk msg let deterministic_nonce_hash sk msg = match sk with - | Secret_key.Ed25519 sk -> Ed25519.deterministic_nonce_hash sk msg - | Secret_key.Secp256k1 sk -> Secp256k1.deterministic_nonce_hash sk msg - | Secret_key.P256 sk -> P256.deterministic_nonce_hash sk msg + | Secret_key.Ed25519 sk -> + Ed25519.deterministic_nonce_hash sk msg + | Secret_key.Secp256k1 sk -> + Secp256k1.deterministic_nonce_hash sk msg + | Secret_key.P256 sk -> + P256.deterministic_nonce_hash sk msg diff --git a/src/lib_crypto/signature.mli b/src/lib_crypto/signature.mli index 675c6ed89ca25c599de3bcc4faeada68ed693f34..2875601d4d617aa386656693136955d8c9e8c230 100644 --- a/src/lib_crypto/signature.mli +++ b/src/lib_crypto/signature.mli @@ -44,37 +44,39 @@ type watermark = | Generic_operation | Custom of MBytes.t -val bytes_of_watermark: watermark -> MBytes.t +val bytes_of_watermark : watermark -> MBytes.t val pp_watermark : Format.formatter -> watermark -> unit -include S.SIGNATURE with type Public_key_hash.t = public_key_hash - and type Public_key.t = public_key - and type Secret_key.t = secret_key - and type watermark := watermark +include + S.SIGNATURE + with type Public_key_hash.t = public_key_hash + and type Public_key.t = public_key + and type Secret_key.t = secret_key + and type watermark := watermark -val append : ?watermark:watermark -> secret_key -> MBytes.t -> MBytes.t (** [append sk buf] is the concatenation of [buf] and the serialization of the signature of [buf] signed by [sk]. *) +val append : ?watermark:watermark -> secret_key -> MBytes.t -> MBytes.t -val concat : MBytes.t -> t -> MBytes.t (** [concat buf t] is the concatenation of [buf] and the serialization of [t]. *) +val concat : MBytes.t -> t -> MBytes.t include S.RAW_DATA with type t := t val of_secp256k1 : Secp256k1.t -> t + val of_ed25519 : Ed25519.t -> t + val of_p256 : P256.t -> t -type algo = - | Ed25519 - | Secp256k1 - | P256 +type algo = Ed25519 | Secp256k1 | P256 -val algo_param: unit -> (algo, 'a) Clic.parameter +val algo_param : unit -> (algo, 'a) Clic.parameter -val generate_key: +val generate_key : ?algo:algo -> ?seed:MBytes.t -> - unit -> public_key_hash * public_key * secret_key + unit -> + public_key_hash * public_key * secret_key diff --git a/src/lib_crypto/test/roundtrips.ml b/src/lib_crypto/test/roundtrips.ml index 7d0cbcf0d694e6df18d7073ccdd62c4eb9768eab..f3ccfe84e22d78d0f2a39b1a935e3729f0eed375 100644 --- a/src/lib_crypto/test/roundtrips.ml +++ b/src/lib_crypto/test/roundtrips.ml @@ -23,28 +23,36 @@ (* *) (*****************************************************************************) - let test_rt_opt name testable enc dec input = try let roundtripped = dec (enc input) in Alcotest.check (Alcotest.option testable) name (Some input) roundtripped - with - exc -> - Alcotest.failf "%s failed for %a: exception whilst decoding: %s" - name (Alcotest.pp testable) input (Printexc.to_string exc) + with exc -> + Alcotest.failf + "%s failed for %a: exception whilst decoding: %s" + name + (Alcotest.pp testable) + input + (Printexc.to_string exc) let test_decode_opt_safe name testable dec encoded = match dec encoded with - | Some _ | None -> () + | Some _ | None -> + () | exception exc -> - Alcotest.failf "%s failed for %a: exception whilst decoding: %s" - name (Alcotest.pp testable) encoded (Printexc.to_string exc) + Alcotest.failf + "%s failed for %a: exception whilst decoding: %s" + name + (Alcotest.pp testable) + encoded + (Printexc.to_string exc) let test_decode_opt_fail name testable dec encoded = try let decoded = dec encoded in Alcotest.check (Alcotest.option testable) name None decoded - with - exc -> - Alcotest.failf "%s failed: exception whilst decoding: %s" - name (Printexc.to_string exc) + with exc -> + Alcotest.failf + "%s failed: exception whilst decoding: %s" + name + (Printexc.to_string exc) diff --git a/src/lib_crypto/test/test_base58.ml b/src/lib_crypto/test/test_base58.ml index 50262e6517fa5890ad36d8a42662317d7bb46f4a..b39fede4e4ab42b869d46430203656ad6ba92429 100644 --- a/src/lib_crypto/test/test_base58.ml +++ b/src/lib_crypto/test/test_base58.ml @@ -27,49 +27,49 @@ let test_roundtrip_safe input = Roundtrips.test_rt_opt "safe base58" Alcotest.string - Base58.safe_encode Base58.safe_decode + Base58.safe_encode + Base58.safe_decode input let test_roundtrip_raw input = Roundtrips.test_rt_opt "raw base58" Alcotest.string - Base58.raw_encode Base58.raw_decode + Base58.raw_encode + Base58.raw_decode input -let inputs = [ - "abc"; - (string_of_int max_int); - "0"; - "00"; - "000"; - "0000"; - "0000000000000000"; - (String.make 64 '0'); - "1"; - "11"; - "111"; - "1111"; - (String.make 2048 '0'); - "2"; - "22"; - "5"; - "Z"; - (String.make 2048 'Z'); - "z"; - "zz"; - "zzzzzzzz"; - (String.make 2048 'z'); - (*loads of ascii characters: codes between 32 and 126 *) - (String.init 1000 (fun i -> (Char.chr (32 + (i mod (126 - 32)))))); - ""; -] +let inputs = + [ "abc"; + string_of_int max_int; + "0"; + "00"; + "000"; + "0000"; + "0000000000000000"; + String.make 64 '0'; + "1"; + "11"; + "111"; + "1111"; + String.make 2048 '0'; + "2"; + "22"; + "5"; + "Z"; + String.make 2048 'Z'; + "z"; + "zz"; + "zzzzzzzz"; + String.make 2048 'z'; + (*loads of ascii characters: codes between 32 and 126 *) + String.init 1000 (fun i -> Char.chr (32 + (i mod (126 - 32)))); + "" ] let test_roundtrip_safes () = List.iter test_roundtrip_safe inputs let test_roundtrip_raws () = List.iter test_roundtrip_raw inputs - let test_safety input = Roundtrips.test_decode_opt_safe "safe base58" @@ -79,13 +79,9 @@ let test_safety input = let test_safetys () = List.iter test_safety inputs -let tests = [ - "safe decoding", `Quick, test_safetys; - "safe encoding/decoding", `Quick, test_roundtrip_safes; - "raw encoding/decoding", `Quick, test_roundtrip_raws; -] +let tests = + [ ("safe decoding", `Quick, test_safetys); + ("safe encoding/decoding", `Quick, test_roundtrip_safes); + ("raw encoding/decoding", `Quick, test_roundtrip_raws) ] -let () = - Alcotest.run "tezos-crypto" [ - "base58", tests - ] +let () = Alcotest.run "tezos-crypto" [("base58", tests)] diff --git a/src/lib_crypto/test/test_blake2b.ml b/src/lib_crypto/test/test_blake2b.ml index 78cea85c0eeb8adab42e2afc5377814b18dd63cc..015da37a37856751eb8379ba05ed40b30c516b5b 100644 --- a/src/lib_crypto/test/test_blake2b.ml +++ b/src/lib_crypto/test/test_blake2b.ml @@ -29,13 +29,14 @@ let test_hashed_roundtrip name enc dec input = name (Alcotest.testable (fun fmt (input, _) -> Format.fprintf fmt "%s" input) - (fun (_, hashed) (_, decoded) -> hashed = decoded) - ) + (fun (_, hashed) (_, decoded) -> hashed = decoded)) (fun (_, hashed) -> enc hashed) - (fun encoded -> match dec encoded with - | None -> None - | Some decoded -> Some (input, decoded) - ) + (fun encoded -> + match dec encoded with + | None -> + None + | Some decoded -> + Some (input, decoded)) (input, Blake2B.hash_string [input]) let test_roundtrip_hex input = @@ -44,27 +45,22 @@ let test_roundtrip_hex input = let test_roundtrip_string input = test_hashed_roundtrip "String" Blake2B.to_string Blake2B.of_string_opt input -let inputs = [ - "abc"; - (string_of_int max_int); - "0"; - "00"; - (String.make 64 '0'); - (*loads of ascii characters: codes between 32 and 126 *) - (String.init 1000 (fun i -> (Char.chr (32 + (i mod (126 - 32)))))); - ""; -] +let inputs = + [ "abc"; + string_of_int max_int; + "0"; + "00"; + String.make 64 '0'; + (*loads of ascii characters: codes between 32 and 126 *) + String.init 1000 (fun i -> Char.chr (32 + (i mod (126 - 32)))); + "" ] let test_roundtrip_hexs () = List.iter test_roundtrip_hex inputs let test_roundtrip_strings () = List.iter test_roundtrip_string inputs -let tests = [ - "hash hex/dehex", `Quick, test_roundtrip_hexs; - "hash print/parse", `Quick, test_roundtrip_strings; -] +let tests = + [ ("hash hex/dehex", `Quick, test_roundtrip_hexs); + ("hash print/parse", `Quick, test_roundtrip_strings) ] -let () = - Alcotest.run "tezos-crypto" [ - "blake2b", tests - ] +let () = Alcotest.run "tezos-crypto" [("blake2b", tests)] diff --git a/src/lib_crypto/test/test_deterministic_nonce.ml b/src/lib_crypto/test/test_deterministic_nonce.ml index c430eb0b54e855e89700dd653220890ae0d1a751..5535bc7faa9cc7f05a4199e345aa36d50a65d38d 100644 --- a/src/lib_crypto/test/test_deterministic_nonce.ml +++ b/src/lib_crypto/test/test_deterministic_nonce.ml @@ -24,26 +24,24 @@ (*****************************************************************************) let test_hash_matches (module X : S.SIGNATURE) () = - let _, _, sk = X.generate_key () in + let (_, _, sk) = X.generate_key () in let data = MBytes.of_string "ce input sa pun eu aici oare?" in let nonce = X.deterministic_nonce sk data in let nonce_hash = X.deterministic_nonce_hash sk data in let hashed_nonce = Blake2B.hash_bytes [nonce] in if nonce_hash <> Blake2B.to_bytes hashed_nonce then - Alcotest.failf "the hash of deterministic_nonce is NOT deterministic_nonce_hash" - + Alcotest.failf + "the hash of deterministic_nonce is NOT deterministic_nonce_hash" let ed25519 = (module Ed25519 : S.SIGNATURE) + let p256 = (module P256 : S.SIGNATURE) + let secp256k1 = (module Secp256k1 : S.SIGNATURE) -let tests = [ - "hash_matches_ed25519", `Quick, (test_hash_matches ed25519); - "hash_matches_p256", `Quick, (test_hash_matches p256); - "hash_matches_secp256k1", `Quick, (test_hash_matches secp256k1); -] +let tests = + [ ("hash_matches_ed25519", `Quick, test_hash_matches ed25519); + ("hash_matches_p256", `Quick, test_hash_matches p256); + ("hash_matches_secp256k1", `Quick, test_hash_matches secp256k1) ] -let () = - Alcotest.run "tezos-crypto" [ - "deterministic_nonce", tests - ] +let () = Alcotest.run "tezos-crypto" [("deterministic_nonce", tests)] diff --git a/src/lib_crypto/test/test_ed25519.ml b/src/lib_crypto/test/test_ed25519.ml index b26ef0a6b7b8afdf0e3cca63bcbe2fdde4b5523c..b7fc337a8b8ba81c5047a26e886dc35ebae1166e 100644 --- a/src/lib_crypto/test/test_ed25519.ml +++ b/src/lib_crypto/test/test_ed25519.ml @@ -25,51 +25,49 @@ module type B58CHECK = sig type t - val pp: Format.formatter -> t -> unit + + val pp : Format.formatter -> t -> unit + include S.B58_DATA with type t := t end -let test_b58check_roundtrip - : type t. (module B58CHECK with type t = t) -> t -> unit - = fun m input -> - let module M = (val m) in - let testable = Alcotest.testable M.pp (=) in - Roundtrips.test_rt_opt - "b58check" - testable - M.to_b58check M.of_b58check_opt - input +let test_b58check_roundtrip : + type t. (module B58CHECK with type t = t) -> t -> unit = + fun m input -> + let module M = (val m) in + let testable = Alcotest.testable M.pp ( = ) in + Roundtrips.test_rt_opt + "b58check" + testable + M.to_b58check + M.of_b58check_opt + input let test_b58check_roundtrips () = - let pubkey_hash, pubkey, seckey = Ed25519.generate_key () in - test_b58check_roundtrip (module Ed25519.Public_key_hash) pubkey_hash; - test_b58check_roundtrip (module Ed25519.Public_key) pubkey; + let (pubkey_hash, pubkey, seckey) = Ed25519.generate_key () in + test_b58check_roundtrip (module Ed25519.Public_key_hash) pubkey_hash ; + test_b58check_roundtrip (module Ed25519.Public_key) pubkey ; test_b58check_roundtrip (module Ed25519.Secret_key) seckey - let test_b58check_invalid input = Roundtrips.test_decode_opt_fail "b58check" - (Alcotest.testable Ed25519.Public_key_hash.pp Ed25519.Public_key_hash.(=)) + (Alcotest.testable Ed25519.Public_key_hash.pp Ed25519.Public_key_hash.( = )) Ed25519.Public_key_hash.of_b58check_opt input let test_b58check_invalids () = - List.iter test_b58check_invalid [ - "ThisIsGarbageNotACheck"; - "\x00"; - (String.make 1000 '\x00'); - (String.make 2048 'a'); - (String.init 2048 (fun _ -> Char.chr (Random.int 256))); - ""; - ] + List.iter + test_b58check_invalid + [ "ThisIsGarbageNotACheck"; + "\x00"; + String.make 1000 '\x00'; + String.make 2048 'a'; + String.init 2048 (fun _ -> Char.chr (Random.int 256)); + "" ] -let tests = [ - "b58check.roundtrip", `Quick, test_b58check_roundtrips; - "b58check.invalid", `Slow, test_b58check_invalids; -] +let tests = + [ ("b58check.roundtrip", `Quick, test_b58check_roundtrips); + ("b58check.invalid", `Slow, test_b58check_invalids) ] -let () = - Alcotest.run "tezos-crypto" [ - "ed25519", tests - ] +let () = Alcotest.run "tezos-crypto" [("ed25519", tests)] diff --git a/src/lib_crypto/test/test_merkle.ml b/src/lib_crypto/test/test_merkle.ml index abaac4959852d1a50c7babedffa9f4fecdf990d2..351a0cb4411546d2cf60b6ff282586d8753b381e 100644 --- a/src/lib_crypto/test/test_merkle.ml +++ b/src/lib_crypto/test/test_merkle.ml @@ -25,42 +25,48 @@ open Utils.Infix -type tree = - | Empty - | Leaf of int - | Node of tree * tree +type tree = Empty | Leaf of int | Node of tree * tree let rec list_of_tree = function - | Empty -> [], 0 - | Leaf x -> [x], 1 + | Empty -> + ([], 0) + | Leaf x -> + ([x], 1) | Node (x, y) -> - let x, sx = list_of_tree x - and y, sy = list_of_tree y in + let (x, sx) = list_of_tree x and (y, sy) = list_of_tree y in assert (sx = sy) ; - x @ y, sx + sy + (x @ y, sx + sy) -module Merkle = Blake2B.Generic_Merkle_tree(struct - type t = tree - type elt = int - let empty = Empty - let leaf i = Leaf i - let node x y = Node (x, y) - end) +module Merkle = Blake2B.Generic_Merkle_tree (struct + type t = tree + + type elt = int + + let empty = Empty + + let leaf i = Leaf i + + let node x y = Node (x, y) +end) let rec compare_list xs ys = - match xs, ys with - | [], [] -> true - | [x], y :: ys when x = y -> ys = [] || compare_list xs ys - | x :: xs, y :: ys when x = y -> compare_list xs ys - | _, _ -> false + match (xs, ys) with + | ([], []) -> + true + | ([x], y :: ys) when x = y -> + ys = [] || compare_list xs ys + | (x :: xs, y :: ys) when x = y -> + compare_list xs ys + | (_, _) -> + false let check_size i = let l = 0 -- i in - let l2, _ = list_of_tree (Merkle.compute l) in - if compare_list l l2 then - () + let (l2, _) = list_of_tree (Merkle.compute l) in + if compare_list l l2 then () else - Format.kasprintf failwith + Format.kasprintf + failwith "Failed for %d: %a" i (Format.pp_print_list @@ -68,30 +74,21 @@ let check_size i = Format.pp_print_int) l2 -let test_compute _ = - List.iter check_size (0--99) +let test_compute _ = List.iter check_size (0 -- 99) let check_path i = let l = 0 -- i in let orig = Merkle.compute l in - List.iter (fun j -> + List.iter + (fun j -> let path = Merkle.compute_path l j in - let found, pos = Merkle.check_path path j in - if found = orig && j = pos then - () - else - Format.kasprintf failwith "Failed for %d in %d." j i) + let (found, pos) = Merkle.check_path path j in + if found = orig && j = pos then () + else Format.kasprintf failwith "Failed for %d in %d." j i) l -let test_path _ = - List.iter check_path (0--128) +let test_path _ = List.iter check_path (0 -- 128) -let tests = [ - "compute", `Quick, test_compute ; - "path", `Quick, test_path ; -] +let tests = [("compute", `Quick, test_compute); ("path", `Quick, test_path)] -let () = - Alcotest.run "tezos-crypto" [ - "merkel", tests - ] +let () = Alcotest.run "tezos-crypto" [("merkel", tests)] diff --git a/src/lib_crypto/test/test_pvss.ml b/src/lib_crypto/test/test_pvss.ml index 7844e79a6d39a021feea55b331033e207025f780..a396825353dafedd34544bde9d292bcc1a0f7ca5 100644 --- a/src/lib_crypto/test/test_pvss.ml +++ b/src/lib_crypto/test/test_pvss.ml @@ -29,40 +29,59 @@ module Pvss = Pvss_secp256k1 module Sp = Secp256k1_group module Setup : sig - val shares : Pvss.Encrypted_share.t list - val commitments: Pvss.Commitment.t list - val proof: Pvss.proof + + val commitments : Pvss.Commitment.t list + + val proof : Pvss.proof val secret_scalar : Sp.Group.Scalar.t - val secret: Pvss.Secret_key.t - val public_secret: Pvss.Public_key.t + + val secret : Pvss.Secret_key.t + + val public_secret : Pvss.Public_key.t val other_shares : Pvss.Encrypted_share.t list - val other_commitments: Pvss.Commitment.t list - val other_proof: Pvss.proof - val other_secret: Pvss.Secret_key.t - type keypair = {secret_key: Pvss.Secret_key.t; public_key: Pvss.Public_key.t} + val other_commitments : Pvss.Commitment.t list + + val other_proof : Pvss.proof + + val other_secret : Pvss.Secret_key.t + + type keypair = { + secret_key : Pvss.Secret_key.t; + public_key : Pvss.Public_key.t + } + val public_keys : Pvss.Public_key.t list + val keypairs : keypair list - val reveals : (Pvss.Encrypted_share.t * (Pvss.Clear_share.t * Pvss.proof)) list + + val reveals : + (Pvss.Encrypted_share.t * (Pvss.Clear_share.t * Pvss.proof)) list val convert_encoding : 'a Data_encoding.t -> 'b Data_encoding.t -> 'a -> 'b - val group_encoding : Sp.Group.t Data_encoding.t -end = struct + val group_encoding : Sp.Group.t Data_encoding.t +end = struct + type keypair = { + secret_key : Pvss.Secret_key.t; + public_key : Pvss.Public_key.t + } - type keypair = {secret_key: Pvss.Secret_key.t; public_key: Pvss.Public_key.t} + let group_encoding = + Data_encoding.(conv Sp.Group.to_bits Sp.Group.of_bits_exn string) - let group_encoding = Data_encoding.(conv Sp.Group.to_bits Sp.Group.of_bits_exn string) - let scalar_encoding = Data_encoding.(conv Sp.Group.Scalar.to_bits Sp.Group.Scalar.of_bits_exn string) + let scalar_encoding = + Data_encoding.( + conv Sp.Group.Scalar.to_bits Sp.Group.Scalar.of_bits_exn string) let convert_encoding de1 de2 x = - Data_encoding.Binary.of_bytes_exn de2 + Data_encoding.Binary.of_bytes_exn + de2 (Data_encoding.Binary.to_bytes_exn de1 x) - (** Random value of Z in the range [0,2^256] *) let rand_Z () = [Random.int64 Int64.max_int |> Z.of_int64 |> Z.to_bits] @@ -70,94 +89,110 @@ end = struct (** Generates n random keypairs *) let random_keypairs n = - List.init n - (fun _ -> let s = Sp.Group.Scalar.of_Z (rand_Z ()) in - let secret_key = convert_encoding scalar_encoding Pvss.Secret_key.encoding s in - { secret_key ; public_key = Pvss.Secret_key.to_public_key secret_key }) + List.init n (fun _ -> + let s = Sp.Group.Scalar.of_Z (rand_Z ()) in + let secret_key = + convert_encoding scalar_encoding Pvss.Secret_key.encoding s + in + {secret_key; public_key = Pvss.Secret_key.to_public_key secret_key}) let t = 5 + let n = 8 - let random_scalar () = - Sp.Group.Scalar.of_Z (rand_Z ()) + let random_scalar () = Sp.Group.Scalar.of_Z (rand_Z ()) let secret_of_scalar s = convert_encoding scalar_encoding Pvss.Secret_key.encoding s let secret_scalar = random_scalar () + let secret = secret_of_scalar secret_scalar + let public_secret = Pvss.Secret_key.to_public_key secret - let other_secret= secret_of_scalar (random_scalar ()) + let other_secret = secret_of_scalar (random_scalar ()) let keypairs = random_keypairs n - let public_keys = List.map (fun { public_key ; _ } -> public_key) keypairs - - let ((shares, commitments, proof), - (other_shares, other_commitments, other_proof)) = - ( - Pvss.dealer_shares_and_proof ~secret ~t ~public_keys, - Pvss.dealer_shares_and_proof ~secret:other_secret ~t ~public_keys - ) - - let reveals = List.map2 ( - fun share keypair -> - (share, Pvss.reveal_share share - ~secret_key:keypair.secret_key ~public_key:keypair.public_key)) - shares keypairs + + let public_keys = List.map (fun {public_key; _} -> public_key) keypairs + + let ( (shares, commitments, proof), + (other_shares, other_commitments, other_proof) ) = + ( Pvss.dealer_shares_and_proof ~secret ~t ~public_keys, + Pvss.dealer_shares_and_proof ~secret:other_secret ~t ~public_keys ) + + let reveals = + List.map2 + (fun share keypair -> + ( share, + Pvss.reveal_share + share + ~secret_key:keypair.secret_key + ~public_key:keypair.public_key )) + shares + keypairs end let test_dealer_proof () = let shr = (Setup.shares, Setup.other_shares) and cmt = (Setup.commitments, Setup.other_commitments) and prf = (Setup.proof, Setup.other_proof) in - - begin - for i = 0 to 1 do - for j = 0 to 1 do - for k = 0 to 1 do - let pick = function 0 -> fst | _ -> snd in - assert ((Pvss.check_dealer_proof - (pick i shr) - (pick j cmt) - ~proof:(pick k prf) ~public_keys:Setup.public_keys) = (i = j && j = k)) - done + for i = 0 to 1 do + for j = 0 to 1 do + for k = 0 to 1 do + let pick = function 0 -> fst | _ -> snd in + assert ( + Pvss.check_dealer_proof + (pick i shr) + (pick j cmt) + ~proof:(pick k prf) + ~public_keys:Setup.public_keys + = (i = j && j = k) ) done done - end + done let test_share_reveal () = - (* check reveal shares *) - let shares_valid = List.map2 (fun (share, (reveal, proof)) public_key -> - Pvss.check_revealed_share share reveal ~public_key:public_key proof) - Setup.reveals Setup.public_keys in - - List.iteri (fun i b -> print_endline (string_of_int i); assert b) + let shares_valid = + List.map2 + (fun (share, (reveal, proof)) public_key -> + Pvss.check_revealed_share share reveal ~public_key proof) + Setup.reveals + Setup.public_keys + in + List.iteri + (fun i b -> + print_endline (string_of_int i) ; + assert b) shares_valid let test_reconstruct () = - let indices = [0;1;2;3;4] in - let reconstructed = Pvss.reconstruct + let indices = [0; 1; 2; 3; 4] in + let reconstructed = + Pvss.reconstruct (List.map - (fun n -> let (_, (r, _)) = List.nth Setup.reveals n in r) indices - ) + (fun n -> + let (_, (r, _)) = List.nth Setup.reveals n in + r) + indices) indices in - assert (Sp.Group.((=)) - (Setup.convert_encoding - Pvss.Public_key.encoding Setup.group_encoding reconstructed) - (Setup.convert_encoding - Pvss.Public_key.encoding Setup.group_encoding Setup.public_secret)) - - -let tests = [ - "dealer_proof", `Quick, test_dealer_proof ; - "reveal", `Quick, test_share_reveal ; - "recontruct", `Quick, test_reconstruct -] - -let () = - Alcotest.run "test-pvss" [ - "pvss", tests - ] + assert ( + Sp.Group.(( = )) + (Setup.convert_encoding + Pvss.Public_key.encoding + Setup.group_encoding + reconstructed) + (Setup.convert_encoding + Pvss.Public_key.encoding + Setup.group_encoding + Setup.public_secret) ) + +let tests = + [ ("dealer_proof", `Quick, test_dealer_proof); + ("reveal", `Quick, test_share_reveal); + ("recontruct", `Quick, test_reconstruct) ] + +let () = Alcotest.run "test-pvss" [("pvss", tests)] diff --git a/src/lib_crypto/znz.ml b/src/lib_crypto/znz.ml index fa3520688b9128d7010ceac70d12aef229e2da79..842a8d4fd9290ee067a298c95cd44456a7556f1e 100644 --- a/src/lib_crypto/znz.ml +++ b/src/lib_crypto/znz.ml @@ -25,91 +25,128 @@ module type ZN = sig type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t + val zero : t - val one : t + + val one : t + val n : Z.t - val (+) : t -> t -> t - val ( * ) : t -> t -> t - val (-) : t -> t -> t - val (=) : t -> t -> bool - val of_int : int -> t - val of_Z : Z.t -> t - val to_Z : t -> Z.t + + val ( + ) : t -> t -> t + + val ( * ) : t -> t -> t + + val ( - ) : t -> t -> t + + val ( = ) : t -> t -> bool + + val of_int : int -> t + + val of_Z : Z.t -> t + + val to_Z : t -> Z.t + val of_bits_exn : String.t -> t - val to_bits : t -> String.t - val pow : t -> Z.t -> t - val inv : t -> t option + + val to_bits : t -> String.t + + val pow : t -> Z.t -> t + + val inv : t -> t option end module type INT = sig val n : Z.t end -module MakeZn (N : INT) (B : sig val b58_prefix : string end) : ZN = struct - +module MakeZn + (N : INT) (B : sig + val b58_prefix : string + end) : ZN = struct type t = Z.t + let n = N.n - let max_char_length = 2 * (Z.numbits n) + + let max_char_length = 2 * Z.numbits n + let zero = Z.zero + let one = Z.one + let of_Z r = Z.(erem r n) + let to_Z a = a + let of_int u = u |> Z.of_int |> of_Z - let to_bits h = h |> Zplus.serialize |> (fun s -> String.sub s 0 (String.length s - 1)) + let to_bits h = + h |> Zplus.serialize |> fun s -> String.sub s 0 (String.length s - 1) + let of_bits_exn bits = (* Do not process oversized inputs. *) - if Compare.Int.((String.length bits) > max_char_length) then + if Compare.Int.(String.length bits > max_char_length) then failwith "input too long" else (* Make sure the input is in the range [0, N.n-1]. Do not reduce modulo N.n for free! *) - let x = Zplus.deserialize (bits) in - if Zplus.(x < zero || x >= N.n) then - failwith "out of range" - else - of_Z x + let x = Zplus.deserialize bits in + if Zplus.(x < zero || x >= N.n) then failwith "out of range" else of_Z x let pow a x = Z.powm a Z.(erem x (sub n one)) n - let (+) x y = Z.(erem (add x y) n) - let ( * ) x y = Z.(erem (mul x y) n) - let (-) x y = Z.(erem (sub x y) n) - let (=) x y = Z.equal x y + + let ( + ) x y = Z.(erem (add x y) n) + + let ( * ) x y = Z.(erem (mul x y) n) + + let ( - ) x y = Z.(erem (sub x y) n) + + let ( = ) x y = Z.equal x y let inv a = Zplus.invert a n let title = Format.sprintf "Znz.Make(%s)" (Z.to_string N.n) - let name = Format.sprintf "An element of Z/nZ for n = %s" (Z.to_string N.n) - type Base58.data += - | Data of t + let name = Format.sprintf "An element of Z/nZ for n = %s" (Z.to_string N.n) + + type Base58.data += Data of t let b58check_encoding = Base58.register_encoding - ~prefix: B.b58_prefix - ~length: 32 - ~to_raw: to_bits - ~of_raw: (fun s -> try Some (of_bits_exn s) with _ -> None) - ~wrap: (fun x -> Data x) - - include Helpers.MakeB58(struct - type nonrec t = t - let name = name - let b58check_encoding = b58check_encoding - end) - - include Helpers.MakeEncoder(struct - type nonrec t = t - let name = name - let title = title - let raw_encoding = Data_encoding.(conv to_bits of_bits_exn string) - let to_b58check = to_b58check - let to_short_b58check = to_short_b58check - let of_b58check = of_b58check - let of_b58check_opt = of_b58check_opt - let of_b58check_exn = of_b58check_exn - end) + ~prefix:B.b58_prefix + ~length:32 + ~to_raw:to_bits + ~of_raw:(fun s -> try Some (of_bits_exn s) with _ -> None) + ~wrap:(fun x -> Data x) + + include Helpers.MakeB58 (struct + type nonrec t = t + + let name = name + + let b58check_encoding = b58check_encoding + end) + + include Helpers.MakeEncoder (struct + type nonrec t = t + + let name = name + + let title = title + + let raw_encoding = Data_encoding.(conv to_bits of_bits_exn string) + + let to_b58check = to_b58check + + let to_short_b58check = to_short_b58check + + let of_b58check = of_b58check + + let of_b58check_opt = of_b58check_opt + let of_b58check_exn = of_b58check_exn + end) end diff --git a/src/lib_crypto/znz.mli b/src/lib_crypto/znz.mli index a9757920b45e1a94f40b47e2a159ce02bf52fe7c..0daac1b76de2f7afac0e5666760ea6a614223a0d 100644 --- a/src/lib_crypto/znz.mli +++ b/src/lib_crypto/znz.mli @@ -25,49 +25,57 @@ (** Type for a module representing the ℤ/nℤ ring*) module type ZN = sig - type t + include S.B58_DATA with type t := t + include S.ENCODER with type t := t val zero : t - val one : t + + val one : t + val n : Z.t - val (+) : t -> t -> t - val ( * ) : t -> t -> t - val (-) : t -> t -> t - val (=) : t -> t -> bool + + val ( + ) : t -> t -> t + + val ( * ) : t -> t -> t + + val ( - ) : t -> t -> t + + val ( = ) : t -> t -> bool (** Converts an integer to a ring element *) - val of_int : int -> t + val of_int : int -> t (** Converts a Zarith integer to a ring element *) - val of_Z : Z.t -> t + val of_Z : Z.t -> t (** Provides an integer representation between 0 and n-1 of an element *) - val to_Z : t -> Z.t + val to_Z : t -> Z.t (** Converts a string of bytes to an integer modulo n, requires the string of byte to represent an integer between 0 and n-1 and checks the length of the string for sanity*) - val of_bits_exn : String.t -> t + val of_bits_exn : String.t -> t (** Converts a ring element to a byte representation *) val to_bits : t -> String.t (** Modular exponentiation *) - val pow : t -> Z.t -> t + val pow : t -> Z.t -> t (** Returns the inverse of a in ℤ/nℤ, maybe *) - val inv : t -> t option - + val inv : t -> t option end - (** Type of a module wrapping an integer. *) module type INT = sig val n : Z.t end (** Functor to build the ℤ/nℤ ring given n*) -module MakeZn : functor (N : INT) (B : sig val b58_prefix : string end) -> ZN +module MakeZn + (N : INT) (B : sig + val b58_prefix : string + end) : ZN diff --git a/src/lib_crypto/zplus.ml b/src/lib_crypto/zplus.ml index 7da8e371652d2eab26f3546f51946a2cf41d6405..745b509b4355c0055596362e6b7da04f5fba5e9e 100644 --- a/src/lib_crypto/zplus.ml +++ b/src/lib_crypto/zplus.ml @@ -31,18 +31,14 @@ let remove_trailing_null s = let n = String.length s in - let i = ref (n-1) in - while (!i >= 0) && (String.get s !i = '\000') do + let i = ref (n - 1) in + while !i >= 0 && s.[!i] = '\000' do i := !i - 1 - done; String.sub s 0 (!i+1) + done ; + String.sub s 0 (!i + 1) let serialize z = - let n = - if Z.(lt z zero) then - Z.(neg (add (add z z) one)) - else - Z.(add z z) - in + let n = if Z.(lt z zero) then Z.(neg (add (add z z) one)) else Z.(add z z) in n |> Z.to_bits |> remove_trailing_null let deserialize z = @@ -50,24 +46,24 @@ let deserialize z = let z = Z.shift_right_trunc n 1 in if Z.(n land one = zero) then z else Z.neg z -let leq a b = (Z.compare a b) <= 0 +let leq a b = Z.compare a b <= 0 -let geq a b = (Z.compare a b) >= 0 +let geq a b = Z.compare a b >= 0 -let lt a b = (Z.compare a b) < 0 +let lt a b = Z.compare a b < 0 -let gt a b = (Z.compare a b) > 0 +let gt a b = Z.compare a b > 0 -let (<) = lt -let (>) = gt -let (<=) = leq -let (>=) = geq +let ( < ) = lt + +let ( > ) = gt + +let ( <= ) = leq + +let ( >= ) = geq let zero = Z.zero + let one = Z.one -let invert a n = - try - Some (Z.invert a n) - with - Division_by_zero -> None +let invert a n = try Some (Z.invert a n) with Division_by_zero -> None diff --git a/src/lib_crypto/zplus.mli b/src/lib_crypto/zplus.mli index b16dcadb7c71115e228b5c3b4f3e576d8619e00f..3808f6bb785fdf0a4012609e530bf5f2cab28548 100644 --- a/src/lib_crypto/zplus.mli +++ b/src/lib_crypto/zplus.mli @@ -23,37 +23,38 @@ (* *) (*****************************************************************************) -val serialize: Z.t -> string -val deserialize: string -> Z.t +val serialize : Z.t -> string + +val deserialize : string -> Z.t -val leq: Z.t -> Z.t -> bool (** Less than or equal. *) +val leq : Z.t -> Z.t -> bool -val geq: Z.t -> Z.t -> bool (** Greater than or equal. *) +val geq : Z.t -> Z.t -> bool -val lt: Z.t -> Z.t -> bool (** Less than (and not equal). *) +val lt : Z.t -> Z.t -> bool -val gt: Z.t -> Z.t -> bool (** Greater than (and not equal). *) +val gt : Z.t -> Z.t -> bool -val (<=): Z.t -> Z.t -> bool (** Less than or equal. *) +val ( <= ) : Z.t -> Z.t -> bool -val (>=): Z.t -> Z.t -> bool (** Greater than or equal. *) +val ( >= ) : Z.t -> Z.t -> bool -val (<): Z.t -> Z.t -> bool (** Less than (and not equal). *) +val ( < ) : Z.t -> Z.t -> bool -val (>): Z.t -> Z.t -> bool (** Greater than (and not equal). *) +val ( > ) : Z.t -> Z.t -> bool -val zero: Z.t +val zero : Z.t -val one: Z.t +val one : Z.t -val invert: Z.t -> Z.t -> Z.t option (** Invert the first argument modulo the second. Returns none if there is no inverse *) +val invert : Z.t -> Z.t -> Z.t option diff --git a/src/lib_data_encoding/.ocamlformat b/src/lib_data_encoding/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_data_encoding/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_data_encoding/binary_description.ml b/src/lib_data_encoding/binary_description.ml index ca5b75107d44b435f034aa09a274e88d9fd7db91..66e3f7795312c04eaa8253b688298d44e02aab3c 100644 --- a/src/lib_data_encoding/binary_description.ml +++ b/src/lib_data_encoding/binary_description.ml @@ -24,45 +24,57 @@ (*****************************************************************************) type recursives = string list -type references = { descriptions : (string * Binary_schema.toplevel_encoding) list } [@@unwrapped] + +type references = { + descriptions : (string * Binary_schema.toplevel_encoding) list +} +[@@unwrapped] (* Simple Union find implementation, there are several optimizations that give UF it's usual time complexity that could be added. If this is a bottleneck, they're easy to add. *) module UF : sig type t + val add : t -> Binary_schema.description -> unit + val find : t -> string -> Binary_schema.description - val union : t -> new_cannonical:Binary_schema.description -> existing:string -> unit + + val union : + t -> new_cannonical:Binary_schema.description -> existing:string -> unit + val empty : unit -> t end = struct open Binary_schema + type ele = Ref of string | Root of description + type t = (string, ele) Hashtbl.t + let add t x = Hashtbl.replace t x.title (Root x) + let rec find tbl key = - match Hashtbl.find tbl key with - | Ref s -> find tbl s - | Root desc -> desc + match Hashtbl.find tbl key with Ref s -> find tbl s | Root desc -> desc let union tbl ~new_cannonical ~existing = add tbl new_cannonical ; let root = find tbl existing in - if root.title = new_cannonical.title - then () + if root.title = new_cannonical.title then () else Hashtbl.replace tbl root.title (Ref new_cannonical.title) let empty () = Hashtbl.create 128 - end let fixup_references uf = let open Binary_schema in let rec fixup_layout = function - | Ref s -> Ref (UF.find uf s).title - | Enum (i, name) -> Enum (i, (UF.find uf name).title) - | Seq (layout, len) -> Seq (fixup_layout layout, len) - | (Zero_width + | Ref s -> + Ref (UF.find uf s).title + | Enum (i, name) -> + Enum (i, (UF.find uf name).title) + | Seq (layout, len) -> + Seq (fixup_layout layout, len) + | ( Zero_width | Int _ | Bool | RangedInt (_, _) @@ -70,447 +82,560 @@ let fixup_references uf = | Float | Bytes | String - | Padding) as enc -> enc in + | Padding ) as enc -> + enc + in let field = function | Named_field (name, kind, layout) -> Named_field (name, kind, fixup_layout layout) | Anonymous_field (kind, layout) -> Anonymous_field (kind, fixup_layout layout) - | (Dynamic_size_field _ | Optional_field _) as field -> field in + | (Dynamic_size_field _ | Optional_field _) as field -> + field + in function - | Obj { fields } -> Obj { fields = List.map field fields } - | Cases ({ cases ; _ } as x) -> - Cases { x with - cases = List.map - (fun (i, name, fields) -> - (i, name, List.map field fields)) cases } - | (Int_enum _ as ie) -> ie + | Obj {fields} -> + Obj {fields = List.map field fields} + | Cases ({cases; _} as x) -> + Cases + { x with + cases = + List.map + (fun (i, name, fields) -> (i, name, List.map field fields)) + cases } + | Int_enum _ as ie -> + ie let z_reference_name = "Z.t" let z_reference_description = - "A variable length sequence of bytes, encoding a Zarith number. \ - Each byte has a running unary size bit: the most significant bit of \ - each byte tells is this is the last byte in the sequence (0) or if \ - there is more to read (1). The second most significant bit of the \ - first byte is reserved for the sign (positive if zero). Size and \ - sign bits ignored, data is then the binary representation of the \ - absolute value of the number in little endian order." + "A variable length sequence of bytes, encoding a Zarith number. Each byte \ + has a running unary size bit: the most significant bit of each byte tells \ + is this is the last byte in the sequence (0) or if there is more to read \ + (1). The second most significant bit of the first byte is reserved for the \ + sign (positive if zero). Size and sign bits ignored, data is then the \ + binary representation of the absolute value of the number in little endian \ + order." let z_encoding = - Binary_schema.Obj { fields = [ Named_field ("Z.t", `Dynamic, Bytes) ] } + Binary_schema.Obj {fields = [Named_field ("Z.t", `Dynamic, Bytes)]} -let add_z_reference uf { descriptions } = - UF.add uf { title = z_reference_name ; - description = Some z_reference_description } ; - { descriptions = (z_reference_name, z_encoding) :: descriptions } +let add_z_reference uf {descriptions} = + UF.add + uf + {title = z_reference_name; description = Some z_reference_description} ; + {descriptions = (z_reference_name, z_encoding) :: descriptions} let n_reference_name = "N.t" let n_reference_description = - "A variable length sequence of bytes, encoding a Zarith number. \ - Each byte has a running unary size bit: the most significant bit of \ - each byte tells is this is the last byte in the sequence (0) or if \ - there is more to read (1). Size bits ignored, data is then the binary \ - representation of the absolute value of the number in little endian order." + "A variable length sequence of bytes, encoding a Zarith number. Each byte \ + has a running unary size bit: the most significant bit of each byte tells \ + is this is the last byte in the sequence (0) or if there is more to read \ + (1). Size bits ignored, data is then the binary representation of the \ + absolute value of the number in little endian order." let n_encoding = - Binary_schema.Obj { fields = [ Named_field ("N.t", `Dynamic, Bytes) ] } + Binary_schema.Obj {fields = [Named_field ("N.t", `Dynamic, Bytes)]} -let add_n_reference uf { descriptions } = - UF.add uf { title = n_reference_name ; - description = Some n_reference_description } ; - { descriptions = (n_reference_name, n_encoding) :: descriptions } +let add_n_reference uf {descriptions} = + UF.add + uf + {title = n_reference_name; description = Some n_reference_description} ; + {descriptions = (n_reference_name, n_encoding) :: descriptions} let dedup_canonicalize uf = - let tbl : (Binary_schema.toplevel_encoding, Binary_schema.description) Hashtbl.t = Hashtbl.create 100 in + let tbl : + (Binary_schema.toplevel_encoding, Binary_schema.description) Hashtbl.t = + Hashtbl.create 100 + in let rec help prev_len acc = function | [] -> let fixedup = List.map (fun (desc, layout) -> (desc, fixup_references uf layout)) - acc in - if List.length fixedup = prev_len - then - List.map - (fun (name, layout) -> - (UF.find uf name, layout)) - fixedup - else - begin - Hashtbl.clear tbl ; - help (List.length fixedup) [] fixedup - end - | (name, layout) :: tl -> - match Hashtbl.find_opt tbl layout with - | None -> - let desc = UF.find uf name in - begin - Hashtbl.add tbl layout desc ; - help prev_len ((desc.title, layout) :: acc) tl - end - | Some original_desc -> - begin - UF.union uf - ~new_cannonical:original_desc - ~existing:name ; - help prev_len acc tl - end + acc + in + if List.length fixedup = prev_len then + List.map (fun (name, layout) -> (UF.find uf name, layout)) fixedup + else ( + Hashtbl.clear tbl ; + help (List.length fixedup) [] fixedup ) + | (name, layout) :: tl -> ( + match Hashtbl.find_opt tbl layout with + | None -> + let desc = UF.find uf name in + Hashtbl.add tbl layout desc ; + help prev_len ((desc.title, layout) :: acc) tl + | Some original_desc -> + UF.union uf ~new_cannonical:original_desc ~existing:name ; + help prev_len acc tl ) in help 0 [] - type pdesc = P : 'x Encoding.desc -> pdesc + let describe (type x) (encoding : x Encoding.t) = let open Encoding in let uf = UF.empty () in - let uf_add_name title = - UF.add uf { title ; description = None } in - let add_reference name description { descriptions } = - { descriptions = (name, description) :: descriptions } in + let uf_add_name title = UF.add uf {title; description = None} in + let add_reference name description {descriptions} = + {descriptions = (name, description) :: descriptions} + in let new_reference = let x = ref ~-1 in fun () -> x := !x + 1 ; let name = "X_" ^ string_of_int !x in - uf_add_name name ; - name in + uf_add_name name ; name + in let may_new_reference = function - | None -> new_reference () + | None -> + new_reference () | Some name -> - uf_add_name name ; - name in + uf_add_name name ; name + in let rec extract_dynamic : - type x. string option -> x Encoding.desc -> Binary_size.unsigned_integer option * string option * pdesc = - fun ref_name -> function - | Conv { encoding ; _ } -> extract_dynamic ref_name encoding.encoding - | Describe { id = ref_name ; encoding ; _ } -> extract_dynamic (Some ref_name) encoding.encoding - | Splitted { encoding ; _ } -> extract_dynamic ref_name encoding.encoding - | Delayed f -> extract_dynamic ref_name (f ()).encoding - | Dynamic_size { kind ; encoding } -> (Some kind, ref_name, P encoding.encoding) - | enc -> (None, ref_name, P enc) in + type x. + string option -> + x Encoding.desc -> + Binary_size.unsigned_integer option * string option * pdesc = + fun ref_name -> function + | Conv {encoding; _} -> + extract_dynamic ref_name encoding.encoding + | Describe {id = ref_name; encoding; _} -> + extract_dynamic (Some ref_name) encoding.encoding + | Splitted {encoding; _} -> + extract_dynamic ref_name encoding.encoding + | Delayed f -> + extract_dynamic ref_name (f ()).encoding + | Dynamic_size {kind; encoding} -> + (Some kind, ref_name, P encoding.encoding) + | enc -> + (None, ref_name, P enc) + in let rec field_descr : - type a. recursives -> references -> - a Encoding.field -> Binary_schema.field_descr list * references = - fun recursives references -> function - | Req { name ; encoding = { encoding ; _ } ; _ } - | Dft { name ; encoding = { encoding ; _ } ; _ } -> begin - let (dynamics, ref_name, P field) = extract_dynamic None encoding in - let (layout, references) = layout ref_name recursives references field in - if layout = Zero_width then - ([], references) - else - let field_descr = - Binary_schema.Named_field (name, classify_desc field, layout) in - match dynamics with - | Some kind -> - ([ Dynamic_size_field (ref_name, 1, kind) ; field_descr ], references) - | None -> - ([ field_descr], references) - end - | Opt { kind = `Variable ; name ; encoding = { encoding ; _ } ; _ } -> - let (layout, references) = - layout None recursives references encoding in - ([ Named_field (name, `Variable, layout) ], references) - | Opt { kind = `Dynamic ; name ; encoding = { encoding ; _ } ; _ } -> - let (layout, references) = - layout None recursives references encoding in - ([Binary_schema.Optional_field name ; Named_field (name, classify_desc encoding, layout) ], references) - and obj fields = - Binary_schema.Obj { fields } + type a. + recursives -> + references -> + a Encoding.field -> + Binary_schema.field_descr list * references = + fun recursives references -> function + | Req {name; encoding = {encoding; _}; _} + | Dft {name; encoding = {encoding; _}; _} -> ( + let (dynamics, ref_name, P field) = extract_dynamic None encoding in + let (layout, references) = + layout ref_name recursives references field + in + if layout = Zero_width then ([], references) + else + let field_descr = + Binary_schema.Named_field (name, classify_desc field, layout) + in + match dynamics with + | Some kind -> + ( [Dynamic_size_field (ref_name, 1, kind); field_descr], + references ) + | None -> + ([field_descr], references) ) + | Opt {kind = `Variable; name; encoding = {encoding; _}; _} -> + let (layout, references) = + layout None recursives references encoding + in + ([Named_field (name, `Variable, layout)], references) + | Opt {kind = `Dynamic; name; encoding = {encoding; _}; _} -> + let (layout, references) = + layout None recursives references encoding + in + ( [ Binary_schema.Optional_field name; + Named_field (name, classify_desc encoding, layout) ], + references ) + and obj fields = Binary_schema.Obj {fields} and union : - type a. string option -> recursives -> references -> Kind.t -> Binary_size.tag_size -> a case list -> string * references= - fun ref_name recursives references kind size cases -> - let cases = - List.sort (fun (t1, _) (t2, _) -> Compare.Int.compare t1 t2) @@ - TzList.filter_map - (function - | Case { tag = Json_only ; _ } -> None - | (Case { tag = Tag tag ; _ } as case) -> Some (tag, case)) - cases in - let tag_field = - Binary_schema.Named_field ("Tag", `Fixed (Binary_size.tag_size size), Int (size :> Binary_schema.integer_extended)) in - let (cases, references) = - List.fold_right - (fun (tag, Case case) (cases, references) -> - let fields, references = fields None recursives references case.encoding.encoding in - ((tag, Some case.title, tag_field :: fields) :: cases, references)) - cases - ([], references) in - let name = may_new_reference ref_name in - let references = - add_reference - name - (Cases { kind ; - tag_size = size ; - cases }) references in - (name, references) - and describe : type b. ?description:string -> title:string -> - string -> recursives -> references -> b desc -> string * references = - fun ?description ~title name recursives references encoding -> - let new_cannonical = { Binary_schema.title ; description } in - UF.add uf new_cannonical ; - let layout, references = layout None recursives references encoding in - begin - match layout with - | Ref ref_name -> - UF.union uf ~existing:ref_name ~new_cannonical ; - (ref_name, references) - | layout -> - UF.add uf new_cannonical ; - (name, - add_reference name - (obj [ Anonymous_field (classify_desc encoding, layout) ]) - references) - end - and enum : type a. (a, _) Hashtbl.t -> a array -> _ = fun tbl encoding_array -> - (Binary_size.range_to_size ~minimum:0 ~maximum:(Array.length encoding_array), - List.map - (fun i -> (i, fst @@ Hashtbl.find tbl encoding_array.(i))) - Utils.Infix.(0 -- ((Array.length encoding_array) - 1))) + type a. + string option -> + recursives -> + references -> + Kind.t -> + Binary_size.tag_size -> + a case list -> + string * references = + fun ref_name recursives references kind size cases -> + let cases = + List.sort (fun (t1, _) (t2, _) -> Compare.Int.compare t1 t2) + @@ TzList.filter_map + (function + | Case {tag = Json_only; _} -> + None + | Case {tag = Tag tag; _} as case -> + Some (tag, case)) + cases + in + let tag_field = + Binary_schema.Named_field + ( "Tag", + `Fixed (Binary_size.tag_size size), + Int (size :> Binary_schema.integer_extended) ) + in + let (cases, references) = + List.fold_right + (fun (tag, Case case) (cases, references) -> + let (fields, references) = + fields None recursives references case.encoding.encoding + in + ((tag, Some case.title, tag_field :: fields) :: cases, references)) + cases + ([], references) + in + let name = may_new_reference ref_name in + let references = + add_reference name (Cases {kind; tag_size = size; cases}) references + in + (name, references) + and describe : + type b. + ?description:string -> + title:string -> + string -> + recursives -> + references -> + b desc -> + string * references = + fun ?description ~title name recursives references encoding -> + let new_cannonical = {Binary_schema.title; description} in + UF.add uf new_cannonical ; + let (layout, references) = layout None recursives references encoding in + match layout with + | Ref ref_name -> + UF.union uf ~existing:ref_name ~new_cannonical ; + (ref_name, references) + | layout -> + UF.add uf new_cannonical ; + ( name, + add_reference + name + (obj [Anonymous_field (classify_desc encoding, layout)]) + references ) + and enum : type a. (a, _) Hashtbl.t -> a array -> _ = + fun tbl encoding_array -> + ( Binary_size.range_to_size + ~minimum:0 + ~maximum:(Array.length encoding_array), + List.map + (fun i -> (i, fst @@ Hashtbl.find tbl encoding_array.(i))) + Utils.Infix.(0 -- (Array.length encoding_array - 1)) ) and fields : - type b. string option -> recursives -> references -> - b Encoding.desc -> Binary_schema.fields * references = - fun ref_name recursives references -> function - | Obj field -> - field_descr recursives references field - | Objs { left ; right ; _ } -> - let (left_fields, references) = - fields None recursives references left.encoding in - let (right_fields, references) = - fields None recursives references right.encoding in - (left_fields @ right_fields, references) - | Null -> ([ Anonymous_field (`Fixed 0, Zero_width) ], references) - | Empty -> ([ Anonymous_field (`Fixed 0, Zero_width) ], references) - | Ignore -> ([ Anonymous_field (`Fixed 0, Zero_width) ], references) - | Constant _ -> ([ Anonymous_field (`Fixed 0, Zero_width) ], references) - | Dynamic_size { kind ; encoding } -> - let (fields, refs) = - fields None recursives references encoding.encoding in - (Dynamic_size_field (None, List.length fields, kind) :: fields, refs) - | Check_size { encoding ; _ } -> - fields ref_name recursives references encoding.encoding - | Conv { encoding ; _ } -> - fields ref_name recursives references encoding.encoding - | Describe { id = name ; encoding ; _ } -> - fields (Some name) recursives references encoding.encoding - | Splitted { encoding ; _ } -> - fields ref_name recursives references encoding.encoding - | Delayed func -> - fields ref_name recursives references (func ()).encoding - | List (len, { encoding ; _ }) -> - let (layout, references) = - layout None recursives references encoding in - ([ Anonymous_field (`Variable, Seq (layout, len)) ], - references) - | Array (len, { encoding ; _ }) -> - let (layout, references) = - layout None recursives references encoding in - ([ Anonymous_field (`Variable, Seq (layout, len)) ], - references) - | Bytes kind -> - ([ Anonymous_field ((kind :> Kind.t), Bytes) ], references) - | String kind -> - ([ Anonymous_field ((kind :> Kind.t), String) ], references) - | Padded ({ encoding = e ; _ }, n) -> - let fields, references = fields ref_name recursives references e in - (fields @ [ Named_field ("padding", `Fixed n, Padding) ], references) - | (String_enum (tbl, encoding_array) as encoding) -> - let size, cases = enum tbl encoding_array in - let name = may_new_reference ref_name in - ([ Anonymous_field (classify_desc encoding, Ref name) ], - add_reference name (Int_enum { size ; cases }) references) - | Tup { encoding ; _ } -> - let (layout, references) = - layout ref_name recursives references encoding in - if layout = Zero_width then - ([], references) - else - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Tups { left ; right ; _ } -> - let (fields1, references) = - fields None recursives references left.encoding in - let (fields2, references) = - fields None recursives references right.encoding in - (fields1 @ fields2, references) - | Union { kind ; tag_size ; cases } -> - let name, references = union None recursives references kind tag_size cases in - ([ Anonymous_field (kind, Ref name) ], references) - | (Mu { kind ; name ; title ; description ; fix } as encoding) -> - let kind = (kind :> Kind.t) in - let title = Option.unopt ~default:name title in - if List.mem name recursives - then ([ Anonymous_field (kind, Ref name) ], references) - else - let { encoding ; _ } = fix { encoding ; json_encoding = None } in - let (name, references) = describe ~title ?description name (name :: recursives) references encoding in - ([ Anonymous_field (kind, Ref name) ], references) - | Bool as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Int8 as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Uint8 as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Int16 as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Uint16 as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Int31 as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Int32 as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Int64 as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | N as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Z as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | RangedInt _ as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | RangedFloat _ as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - | Float as encoding -> - let layout, references = - layout None recursives references encoding in - ([ Anonymous_field (classify_desc encoding, layout) ], references) - and layout : - type c. string option -> recursives -> references -> - c Encoding.desc -> Binary_schema.layout * references = - fun ref_name recursives references -> function - | Null -> (Zero_width, references) - | Empty -> (Zero_width, references) - | Ignore -> (Zero_width, references) - | Constant _ -> (Zero_width, references) - | Bool -> (Bool, references) - | Int8 -> (Int `Int8, references) - | Uint8 -> (Int `Uint8, references) - | Int16 -> (Int `Int16, references) - | Uint16 -> (Int `Uint16, references) - | Int31 -> (RangedInt (~-1073741824, 1073741823), references) - | Int32 -> (Int `Int32, references) - | Int64 -> (Int `Int64, references) - | N -> - (Ref n_reference_name, - add_n_reference uf references) - | Z -> - (Ref z_reference_name, - add_z_reference uf references) - | RangedInt { minimum ; maximum } -> - (RangedInt (minimum, maximum), references) - | RangedFloat { minimum ; maximum } -> - (RangedFloat (minimum, maximum), references) - | Float -> - (Float, references) - | Bytes _kind -> - (Bytes, references) - | String _kind -> - (String, references) - | Padded _ as enc -> - let name = may_new_reference ref_name in - let fields, references = fields None recursives references enc in - let references = add_reference name (obj fields) references in - (Ref name, references) - | String_enum (tbl, encoding_array) -> - let name = may_new_reference ref_name in - let size, cases = enum tbl encoding_array in - let references = add_reference name (Int_enum { size ; cases }) references in - (Enum (size, name), references) - | Array (len, data) -> - let (descr, references) = - layout None recursives references data.encoding in - (Seq (descr, len), references) - | List (len, data) -> - let layout, references = - layout None recursives references data.encoding in - (Seq (layout, len), references) - | Obj (Req { encoding = { encoding ; _ } ; _ }) - | Obj (Dft { encoding = { encoding ; _ } ; _ }) -> + type b. + string option -> + recursives -> + references -> + b Encoding.desc -> + Binary_schema.fields * references = + fun ref_name recursives references -> function + | Obj field -> + field_descr recursives references field + | Objs {left; right; _} -> + let (left_fields, references) = + fields None recursives references left.encoding + in + let (right_fields, references) = + fields None recursives references right.encoding + in + (left_fields @ right_fields, references) + | Null -> + ([Anonymous_field (`Fixed 0, Zero_width)], references) + | Empty -> + ([Anonymous_field (`Fixed 0, Zero_width)], references) + | Ignore -> + ([Anonymous_field (`Fixed 0, Zero_width)], references) + | Constant _ -> + ([Anonymous_field (`Fixed 0, Zero_width)], references) + | Dynamic_size {kind; encoding} -> + let (fields, refs) = + fields None recursives references encoding.encoding + in + (Dynamic_size_field (None, List.length fields, kind) :: fields, refs) + | Check_size {encoding; _} -> + fields ref_name recursives references encoding.encoding + | Conv {encoding; _} -> + fields ref_name recursives references encoding.encoding + | Describe {id = name; encoding; _} -> + fields (Some name) recursives references encoding.encoding + | Splitted {encoding; _} -> + fields ref_name recursives references encoding.encoding + | Delayed func -> + fields ref_name recursives references (func ()).encoding + | List (len, {encoding; _}) -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (`Variable, Seq (layout, len))], references) + | Array (len, {encoding; _}) -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (`Variable, Seq (layout, len))], references) + | Bytes kind -> + ([Anonymous_field ((kind :> Kind.t), Bytes)], references) + | String kind -> + ([Anonymous_field ((kind :> Kind.t), String)], references) + | Padded ({encoding = e; _}, n) -> + let (fields, references) = fields ref_name recursives references e in + (fields @ [Named_field ("padding", `Fixed n, Padding)], references) + | String_enum (tbl, encoding_array) as encoding -> + let (size, cases) = enum tbl encoding_array in + let name = may_new_reference ref_name in + ( [Anonymous_field (classify_desc encoding, Ref name)], + add_reference name (Int_enum {size; cases}) references ) + | Tup {encoding; _} -> + let (layout, references) = layout ref_name recursives references encoding - | Obj (Opt _) as enc -> - let name = may_new_reference ref_name in - let fields, references = fields None recursives references enc in - let references = add_reference name (obj fields) references in - (Ref name, references) - | Objs { left ; right ; _ } -> - let name = may_new_reference ref_name in - let fields1, references = - fields None recursives references left.encoding in - let fields2, references = - fields None recursives references right.encoding in - let references = add_reference name (obj (fields1 @ fields2)) references in - (Ref name, references) - | Tup { encoding ; _ } -> - layout ref_name recursives references encoding - | (Tups _ as descr) -> - let name = may_new_reference ref_name in - let fields, references = fields None recursives references descr in - let references = add_reference name (obj fields) references in - (Ref name, references) - | Union { kind ; tag_size ; cases } -> - let name, references = union ref_name recursives references kind tag_size cases in + in + if layout = Zero_width then ([], references) + else ([Anonymous_field (classify_desc encoding, layout)], references) + | Tups {left; right; _} -> + let (fields1, references) = + fields None recursives references left.encoding + in + let (fields2, references) = + fields None recursives references right.encoding + in + (fields1 @ fields2, references) + | Union {kind; tag_size; cases} -> + let (name, references) = + union None recursives references kind tag_size cases + in + ([Anonymous_field (kind, Ref name)], references) + | Mu {kind; name; title; description; fix} as encoding -> + let kind = (kind :> Kind.t) in + let title = Option.unopt ~default:name title in + if List.mem name recursives then + ([Anonymous_field (kind, Ref name)], references) + else + let {encoding; _} = fix {encoding; json_encoding = None} in + let (name, references) = + describe + ~title + ?description + name + (name :: recursives) + references + encoding + in + ([Anonymous_field (kind, Ref name)], references) + | Bool as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Int8 as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Uint8 as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Int16 as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Uint16 as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Int31 as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Int32 as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Int64 as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | N as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Z as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | RangedInt _ as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | RangedFloat _ as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + | Float as encoding -> + let (layout, references) = + layout None recursives references encoding + in + ([Anonymous_field (classify_desc encoding, layout)], references) + and layout : + type c. + string option -> + recursives -> + references -> + c Encoding.desc -> + Binary_schema.layout * references = + fun ref_name recursives references -> function + | Null -> + (Zero_width, references) + | Empty -> + (Zero_width, references) + | Ignore -> + (Zero_width, references) + | Constant _ -> + (Zero_width, references) + | Bool -> + (Bool, references) + | Int8 -> + (Int `Int8, references) + | Uint8 -> + (Int `Uint8, references) + | Int16 -> + (Int `Int16, references) + | Uint16 -> + (Int `Uint16, references) + | Int31 -> + (RangedInt (~-1073741824, 1073741823), references) + | Int32 -> + (Int `Int32, references) + | Int64 -> + (Int `Int64, references) + | N -> + (Ref n_reference_name, add_n_reference uf references) + | Z -> + (Ref z_reference_name, add_z_reference uf references) + | RangedInt {minimum; maximum} -> + (RangedInt (minimum, maximum), references) + | RangedFloat {minimum; maximum} -> + (RangedFloat (minimum, maximum), references) + | Float -> + (Float, references) + | Bytes _kind -> + (Bytes, references) + | String _kind -> + (String, references) + | Padded _ as enc -> + let name = may_new_reference ref_name in + let (fields, references) = fields None recursives references enc in + let references = add_reference name (obj fields) references in + (Ref name, references) + | String_enum (tbl, encoding_array) -> + let name = may_new_reference ref_name in + let (size, cases) = enum tbl encoding_array in + let references = + add_reference name (Int_enum {size; cases}) references + in + (Enum (size, name), references) + | Array (len, data) -> + let (descr, references) = + layout None recursives references data.encoding + in + (Seq (descr, len), references) + | List (len, data) -> + let (layout, references) = + layout None recursives references data.encoding + in + (Seq (layout, len), references) + | Obj (Req {encoding = {encoding; _}; _}) + | Obj (Dft {encoding = {encoding; _}; _}) -> + layout ref_name recursives references encoding + | Obj (Opt _) as enc -> + let name = may_new_reference ref_name in + let (fields, references) = fields None recursives references enc in + let references = add_reference name (obj fields) references in + (Ref name, references) + | Objs {left; right; _} -> + let name = may_new_reference ref_name in + let (fields1, references) = + fields None recursives references left.encoding + in + let (fields2, references) = + fields None recursives references right.encoding + in + let references = + add_reference name (obj (fields1 @ fields2)) references + in + (Ref name, references) + | Tup {encoding; _} -> + layout ref_name recursives references encoding + | Tups _ as descr -> + let name = may_new_reference ref_name in + let (fields, references) = fields None recursives references descr in + let references = add_reference name (obj fields) references in + (Ref name, references) + | Union {kind; tag_size; cases} -> + let (name, references) = + union ref_name recursives references kind tag_size cases + in + (Ref name, references) + | Mu {name; title; description; fix; _} as encoding -> + let title = Option.unopt ~default:name title in + if List.mem name recursives then (Ref name, references) + else + let {encoding; _} = fix {encoding; json_encoding = None} in + let (name, references) = + describe + name + ~title + ?description + (name :: recursives) + references + encoding + in (Ref name, references) - | Mu { name ; title ; description ; fix ; _ } as encoding -> - let title = Option.unopt ~default:name title in - if List.mem name recursives - then (Ref name, references) - else - let { encoding ; _ } = fix { encoding ; json_encoding = None } in - let (name, references) = describe name ~title ?description (name :: recursives) references encoding in - (Ref name, references) - | Conv { encoding ; _ } -> - layout ref_name recursives references encoding.encoding - | Describe { id = name ; encoding ; _ } -> - layout (Some name) recursives references encoding.encoding - | Splitted { encoding ; _ } -> - layout ref_name recursives references encoding.encoding - | (Dynamic_size _) as encoding -> - let name = may_new_reference ref_name in - let fields, references = fields None recursives references encoding in - UF.add uf { title = name ; description = None } ; - (Ref name, add_reference name (obj fields) references) - | Check_size { encoding ; _ } -> - layout ref_name recursives references encoding.encoding - | Delayed func -> - layout ref_name recursives references (func ()).encoding in - let fields, references = - fields None [] { descriptions = [] } encoding.encoding in + | Conv {encoding; _} -> + layout ref_name recursives references encoding.encoding + | Describe {id = name; encoding; _} -> + layout (Some name) recursives references encoding.encoding + | Splitted {encoding; _} -> + layout ref_name recursives references encoding.encoding + | Dynamic_size _ as encoding -> + let name = may_new_reference ref_name in + let (fields, references) = + fields None recursives references encoding + in + UF.add uf {title = name; description = None} ; + (Ref name, add_reference name (obj fields) references) + | Check_size {encoding; _} -> + layout ref_name recursives references encoding.encoding + | Delayed func -> + layout ref_name recursives references (func ()).encoding + in + let (fields, references) = + fields None [] {descriptions = []} encoding.encoding + in uf_add_name "" ; - let _, toplevel = List.hd (dedup_canonicalize uf ["", obj fields]) in + let (_, toplevel) = List.hd (dedup_canonicalize uf [("", obj fields)]) in let filtered = List.filter (fun (name, encoding) -> - match encoding with - | Binary_schema.Obj { fields = [ Anonymous_field (_, Ref reference) ] } -> - UF.union uf ~new_cannonical:(UF.find uf name) ~existing:reference ; - false - | _ -> true) - references.descriptions in + match encoding with + | Binary_schema.Obj {fields = [Anonymous_field (_, Ref reference)]} -> + UF.union uf ~new_cannonical:(UF.find uf name) ~existing:reference ; + false + | _ -> + true) + references.descriptions + in let fields = List.rev (dedup_canonicalize uf filtered) in - { Binary_schema.toplevel ; fields } - - - + {Binary_schema.toplevel; fields} diff --git a/src/lib_data_encoding/binary_description.mli b/src/lib_data_encoding/binary_description.mli index 87abf7ffc6db6015328661e8ae63a44a639b65fe..38c220616cf9eef6bb9fe9f18c23295a79d0987d 100644 --- a/src/lib_data_encoding/binary_description.mli +++ b/src/lib_data_encoding/binary_description.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val describe: 'a Encoding.t -> Binary_schema.t +val describe : 'a Encoding.t -> Binary_schema.t diff --git a/src/lib_data_encoding/binary_error.ml b/src/lib_data_encoding/binary_error.ml index dd94c34fe6fdf4de9f0c5656bb6bab66cc305f83..4ba48842c72a69baa5244479fbcc6a0cad7c9146 100644 --- a/src/lib_data_encoding/binary_error.ml +++ b/src/lib_data_encoding/binary_error.ml @@ -29,8 +29,8 @@ type read_error = | No_case_matched | Unexpected_tag of int | Invalid_size of int - | Invalid_int of { min : int ; v : int ; max : int } - | Invalid_float of { min : float ; v : float ; max : float } + | Invalid_int of {min : int; v : int; max : int} + | Invalid_float of {min : float; v : float; max : float} | Trailing_zero | Size_limit_exceeded | List_too_long @@ -47,9 +47,9 @@ let pp_read_error ppf = function Format.fprintf ppf "Unexpected tag %d" tag | Invalid_size sz -> Format.fprintf ppf "Invalid size %d" sz - | Invalid_int { min ; v ; max} -> + | Invalid_int {min; v; max} -> Format.fprintf ppf "Invalid int (%d <= %d <= %d) " min v max - | Invalid_float { min ; v ; max} -> + | Invalid_float {min; v; max} -> Format.fprintf ppf "Invalid float (%f <= %f <= %f) " min v max | Trailing_zero -> Format.fprintf ppf "Trailing zero in Z" @@ -65,10 +65,10 @@ exception Read_error of read_error type write_error = | Size_limit_exceeded | No_case_matched - | Invalid_int of { min : int ; v : int ; max : int } - | Invalid_float of { min : float ; v : float ; max : float } - | Invalid_bytes_length of { expected : int ; found : int } - | Invalid_string_length of { expected : int ; found : int } + | Invalid_int of {min : int; v : int; max : int} + | Invalid_float of {min : float; v : float; max : float} + | Invalid_bytes_length of {expected : int; found : int} + | Invalid_string_length of {expected : int; found : int} | Invalid_natural | List_too_long | Array_too_long @@ -78,18 +78,22 @@ let pp_write_error ppf = function Format.fprintf ppf "Size limit exceeded" | No_case_matched -> Format.fprintf ppf "No case matched" - | Invalid_int { min ; v ; max} -> + | Invalid_int {min; v; max} -> Format.fprintf ppf "Invalid int (%d <= %d <= %d) " min v max - | Invalid_float { min ; v ; max} -> + | Invalid_float {min; v; max} -> Format.fprintf ppf "Invalid float (%f <= %f <= %f) " min v max - | Invalid_bytes_length { expected ; found } -> - Format.fprintf ppf + | Invalid_bytes_length {expected; found} -> + Format.fprintf + ppf "Invalid bytes length (expected: %d ; found %d)" - expected found - | Invalid_string_length { expected ; found } -> - Format.fprintf ppf + expected + found + | Invalid_string_length {expected; found} -> + Format.fprintf + ppf "Invalid string length (expected: %d ; found %d)" - expected found + expected + found | Invalid_natural -> Format.fprintf ppf "Negative natural" | List_too_long -> diff --git a/src/lib_data_encoding/binary_error.mli b/src/lib_data_encoding/binary_error.mli index e5fb3e3f8e8dca25875ec88ebb6a3615467996d4..6986fee0b319080b84eece7d7fc03946daf433ae 100644 --- a/src/lib_data_encoding/binary_error.mli +++ b/src/lib_data_encoding/binary_error.mli @@ -32,22 +32,24 @@ type read_error = | No_case_matched | Unexpected_tag of int | Invalid_size of int - | Invalid_int of { min : int ; v : int ; max : int } - | Invalid_float of { min : float ; v : float ; max : float } + | Invalid_int of {min : int; v : int; max : int} + | Invalid_float of {min : float; v : float; max : float} | Trailing_zero | Size_limit_exceeded | List_too_long | Array_too_long + exception Read_error of read_error -val pp_read_error: Format.formatter -> read_error -> unit + +val pp_read_error : Format.formatter -> read_error -> unit type write_error = | Size_limit_exceeded | No_case_matched - | Invalid_int of { min : int ; v : int ; max : int } - | Invalid_float of { min : float ; v : float ; max : float } - | Invalid_bytes_length of { expected : int ; found : int } - | Invalid_string_length of { expected : int ; found : int } + | Invalid_int of {min : int; v : int; max : int} + | Invalid_float of {min : float; v : float; max : float} + | Invalid_bytes_length of {expected : int; found : int} + | Invalid_string_length of {expected : int; found : int} | Invalid_natural | List_too_long | Array_too_long diff --git a/src/lib_data_encoding/binary_length.ml b/src/lib_data_encoding/binary_length.ml index efe5eaad258c09ec0dc5596ba5cfd84ae470c082..27925f2218e5cc23ab815f2efef6dc0c1d745ca7 100644 --- a/src/lib_data_encoding/binary_length.ml +++ b/src/lib_data_encoding/binary_length.ml @@ -28,121 +28,158 @@ open Binary_error let n_length value = let bits = Z.numbits value in if bits = 0 then 1 else (bits + 6) / 7 + let z_length value = (Z.numbits value + 1 + 6) / 7 let rec length : type x. x Encoding.t -> x -> int = - fun e value -> - let open Encoding in - match e.encoding with - (* Fixed *) - | Null -> 0 - | Empty -> 0 - | Constant _ -> 0 - | Bool -> Binary_size.bool - | Int8 -> Binary_size.int8 - | Uint8 -> Binary_size.uint8 - | Int16 -> Binary_size.int16 - | Uint16 -> Binary_size.uint16 - | Int31 -> Binary_size.int31 - | Int32 -> Binary_size.int32 - | Int64 -> Binary_size.int64 - | N -> n_length value - | Z -> z_length value - | RangedInt { minimum ; maximum } -> - Binary_size.integer_to_size @@ - Binary_size.range_to_size ~minimum ~maximum - | Float -> Binary_size.float - | RangedFloat _ -> Binary_size.float - | Bytes `Fixed n -> n - | String `Fixed n -> n - | Padded (e, n) -> length e value + n - | String_enum (_, arr) -> - Binary_size.integer_to_size @@ Binary_size.enum_size arr - | Objs { kind = `Fixed n ; _ } -> n - | Tups { kind = `Fixed n ; _ } -> n - | Union { kind = `Fixed n ; _ } -> n - (* Dynamic *) - | Objs { kind = `Dynamic ; left ; right } -> - let (v1, v2) = value in - length left v1 + length right v2 - | Tups { kind = `Dynamic ; left ; right } -> - let (v1, v2) = value in - length left v1 + length right v2 - | Union { kind = `Dynamic ; tag_size ; cases } -> - let rec length_case = function - | [] -> raise (Write_error No_case_matched) - | Case { tag = Json_only ; _ } :: tl -> length_case tl - | Case { encoding = e ; proj ; _ } :: tl -> - match proj value with - | None -> length_case tl - | Some value -> Binary_size.tag_size tag_size + length e value in - length_case cases - | Mu { kind = `Dynamic ; fix ; _ } -> length (fix e) value - | Obj (Opt { kind = `Dynamic ; encoding = e ; _ }) -> begin - match value with - | None -> 1 - | Some value -> 1 + length e value - end - (* Variable *) - | Ignore -> 0 - | Bytes `Variable -> MBytes.length value - | String `Variable -> String.length value - | Array (Some max_length, _e) when Array.length value > max_length -> - raise (Write_error Array_too_long) - | Array (_, e) -> - Array.fold_left - (fun acc v -> length e v + acc) - 0 value - | List (Some max_length, _e) when List.length value > max_length -> - raise (Write_error List_too_long) - | List (_, e) -> - List.fold_left - (fun acc v -> length e v + acc) - 0 value - | Objs { kind = `Variable ; left ; right } -> - let (v1, v2) = value in - length left v1 + length right v2 - | Tups { kind = `Variable ; left ; right } -> - let (v1, v2) = value in - length left v1 + length right v2 - | Obj (Opt { kind = `Variable ; encoding = e ; _ }) -> begin - match value with - | None -> 0 - | Some value -> length e value - end - | Union { kind = `Variable ; tag_size ; cases } -> - let rec length_case = function - | [] -> raise (Write_error No_case_matched) - | Case { tag = Json_only ; _ } :: tl -> length_case tl - | Case { encoding = e ; proj ; _ } :: tl -> - match proj value with - | None -> length_case tl - | Some value -> Binary_size.tag_size tag_size + length e value in - length_case cases - | Mu { kind = `Variable ; fix ; _ } -> length (fix e) value - (* Recursive*) - | Obj (Req { encoding = e ; _ }) -> length e value - | Obj (Dft { encoding = e ; _ }) -> length e value - | Tup e -> length e value - | Conv { encoding = e ; proj ; _ } -> - length e (proj value) - | Describe { encoding = e ; _ } -> length e value - | Splitted { encoding = e ; _ } -> length e value - | Dynamic_size { kind ; encoding = e } -> - let length = length e value in - Binary_size.integer_to_size kind + length - | Check_size { limit ; encoding = e } -> - let length = length e value in - if length > limit then raise (Write_error Size_limit_exceeded) ; - length - | Delayed f -> length (f ()) value + fun e value -> + let open Encoding in + match e.encoding with + (* Fixed *) + | Null -> + 0 + | Empty -> + 0 + | Constant _ -> + 0 + | Bool -> + Binary_size.bool + | Int8 -> + Binary_size.int8 + | Uint8 -> + Binary_size.uint8 + | Int16 -> + Binary_size.int16 + | Uint16 -> + Binary_size.uint16 + | Int31 -> + Binary_size.int31 + | Int32 -> + Binary_size.int32 + | Int64 -> + Binary_size.int64 + | N -> + n_length value + | Z -> + z_length value + | RangedInt {minimum; maximum} -> + Binary_size.integer_to_size + @@ Binary_size.range_to_size ~minimum ~maximum + | Float -> + Binary_size.float + | RangedFloat _ -> + Binary_size.float + | Bytes (`Fixed n) -> + n + | String (`Fixed n) -> + n + | Padded (e, n) -> + length e value + n + | String_enum (_, arr) -> + Binary_size.integer_to_size @@ Binary_size.enum_size arr + | Objs {kind = `Fixed n; _} -> + n + | Tups {kind = `Fixed n; _} -> + n + | Union {kind = `Fixed n; _} -> + n + (* Dynamic *) + | Objs {kind = `Dynamic; left; right} -> + let (v1, v2) = value in + length left v1 + length right v2 + | Tups {kind = `Dynamic; left; right} -> + let (v1, v2) = value in + length left v1 + length right v2 + | Union {kind = `Dynamic; tag_size; cases} -> + let rec length_case = function + | [] -> + raise (Write_error No_case_matched) + | Case {tag = Json_only; _} :: tl -> + length_case tl + | Case {encoding = e; proj; _} :: tl -> ( + match proj value with + | None -> + length_case tl + | Some value -> + Binary_size.tag_size tag_size + length e value ) + in + length_case cases + | Mu {kind = `Dynamic; fix; _} -> + length (fix e) value + | Obj (Opt {kind = `Dynamic; encoding = e; _}) -> ( + match value with None -> 1 | Some value -> 1 + length e value ) + (* Variable *) + | Ignore -> + 0 + | Bytes `Variable -> + MBytes.length value + | String `Variable -> + String.length value + | Array (Some max_length, _e) when Array.length value > max_length -> + raise (Write_error Array_too_long) + | Array (_, e) -> + Array.fold_left (fun acc v -> length e v + acc) 0 value + | List (Some max_length, _e) when List.length value > max_length -> + raise (Write_error List_too_long) + | List (_, e) -> + List.fold_left (fun acc v -> length e v + acc) 0 value + | Objs {kind = `Variable; left; right} -> + let (v1, v2) = value in + length left v1 + length right v2 + | Tups {kind = `Variable; left; right} -> + let (v1, v2) = value in + length left v1 + length right v2 + | Obj (Opt {kind = `Variable; encoding = e; _}) -> ( + match value with None -> 0 | Some value -> length e value ) + | Union {kind = `Variable; tag_size; cases} -> + let rec length_case = function + | [] -> + raise (Write_error No_case_matched) + | Case {tag = Json_only; _} :: tl -> + length_case tl + | Case {encoding = e; proj; _} :: tl -> ( + match proj value with + | None -> + length_case tl + | Some value -> + Binary_size.tag_size tag_size + length e value ) + in + length_case cases + | Mu {kind = `Variable; fix; _} -> + length (fix e) value + (* Recursive*) + | Obj (Req {encoding = e; _}) -> + length e value + | Obj (Dft {encoding = e; _}) -> + length e value + | Tup e -> + length e value + | Conv {encoding = e; proj; _} -> + length e (proj value) + | Describe {encoding = e; _} -> + length e value + | Splitted {encoding = e; _} -> + length e value + | Dynamic_size {kind; encoding = e} -> + let length = length e value in + Binary_size.integer_to_size kind + length + | Check_size {limit; encoding = e} -> + let length = length e value in + if length > limit then raise (Write_error Size_limit_exceeded) ; + length + | Delayed f -> + length (f ()) value let fixed_length e = match Encoding.classify e with - | `Fixed n -> Some n - | `Dynamic | `Variable -> None + | `Fixed n -> + Some n + | `Dynamic | `Variable -> + None + let fixed_length_exn e = match fixed_length e with - | Some n -> n - | None -> invalid_arg "Data_encoding.Binary.fixed_length_exn" - + | Some n -> + n + | None -> + invalid_arg "Data_encoding.Binary.fixed_length_exn" diff --git a/src/lib_data_encoding/binary_length.mli b/src/lib_data_encoding/binary_length.mli index e6afa8b91f80c6bb9b074743b8fe9485cc101748..58a5612b1315f79cd9071689e014fba6f58c211d 100644 --- a/src/lib_data_encoding/binary_length.mli +++ b/src/lib_data_encoding/binary_length.mli @@ -27,8 +27,11 @@ use the corresponding module intended for use: {!Data_encoding.Binary}. *) val length : 'a Encoding.t -> 'a -> int + val fixed_length : 'a Encoding.t -> int option + val fixed_length_exn : 'a Encoding.t -> int val z_length : Z.t -> int + val n_length : Z.t -> int diff --git a/src/lib_data_encoding/binary_reader.ml b/src/lib_data_encoding/binary_reader.ml index 6f82f20a8c46d206f5c5ccf14f07bb203e688a7f..1492e2b142a1db8ea0999937f955e6e83b800ae5 100644 --- a/src/lib_data_encoding/binary_reader.ml +++ b/src/lib_data_encoding/binary_reader.ml @@ -28,21 +28,23 @@ open Binary_error let raise e = raise (Read_error e) type state = { - buffer : MBytes.t ; - mutable offset : int ; - mutable remaining_bytes : int ; - mutable allowed_bytes : int option ; + buffer : MBytes.t; + mutable offset : int; + mutable remaining_bytes : int; + mutable allowed_bytes : int option } let check_allowed_bytes state size = match state.allowed_bytes with - | Some len when len < size -> raise Size_limit_exceeded - | Some len -> Some (len - size) - | None -> None + | Some len when len < size -> + raise Size_limit_exceeded + | Some len -> + Some (len - size) + | None -> + None let check_remaining_bytes state size = - if state.remaining_bytes < size then - raise Not_enough_data ; + if state.remaining_bytes < size then raise Not_enough_data ; state.remaining_bytes - size let read_atom size conv state = @@ -54,13 +56,16 @@ let read_atom size conv state = (** Reader for all the atomic types. *) module Atom = struct - let uint8 = read_atom Binary_size.uint8 MBytes.get_uint8 + let uint16 = read_atom Binary_size.int16 MBytes.get_uint16 let int8 = read_atom Binary_size.int8 MBytes.get_int8 + let int16 = read_atom Binary_size.int16 MBytes.get_int16 + let int32 = read_atom Binary_size.int32 MBytes.get_int32 + let int64 = read_atom Binary_size.int64 MBytes.get_int64 let float = read_atom Binary_size.float MBytes.get_double @@ -68,76 +73,85 @@ module Atom = struct let bool state = int8 state <> 0 let uint30 = - read_atom Binary_size.uint30 @@ fun buffer ofs -> + read_atom Binary_size.uint30 + @@ fun buffer ofs -> let v = Int32.to_int (MBytes.get_int32 buffer ofs) in - if v < 0 then - raise (Invalid_int { min = 0 ; v ; max = (1 lsl 30) - 1 }) ; + if v < 0 then raise (Invalid_int {min = 0; v; max = (1 lsl 30) - 1}) ; v let int31 = - read_atom Binary_size.int31 @@ fun buffer ofs -> - Int32.to_int (MBytes.get_int32 buffer ofs) + read_atom Binary_size.int31 + @@ fun buffer ofs -> Int32.to_int (MBytes.get_int32 buffer ofs) let int = function - | `Int31 -> int31 - | `Int16 -> int16 - | `Int8 -> int8 - | `Uint30 -> uint30 - | `Uint16 -> uint16 - | `Uint8 -> uint8 + | `Int31 -> + int31 + | `Int16 -> + int16 + | `Int8 -> + int8 + | `Uint30 -> + uint30 + | `Uint16 -> + uint16 + | `Uint8 -> + uint8 let ranged_int ~minimum ~maximum state = let read_int = match Binary_size.range_to_size ~minimum ~maximum with - | `Int8 -> int8 - | `Int16 -> int16 - | `Int31 -> int31 - | `Uint8 -> uint8 - | `Uint16 -> uint16 - | `Uint30 -> uint30 in + | `Int8 -> + int8 + | `Int16 -> + int16 + | `Int31 -> + int31 + | `Uint8 -> + uint8 + | `Uint16 -> + uint16 + | `Uint30 -> + uint30 + in let ranged = read_int state in let ranged = if minimum > 0 then ranged + minimum else ranged in if not (minimum <= ranged && ranged <= maximum) then - raise (Invalid_int { min = minimum ; v =ranged ; max = maximum }) ; + raise (Invalid_int {min = minimum; v = ranged; max = maximum}) ; ranged let ranged_float ~minimum ~maximum state = let ranged = float state in if not (minimum <= ranged && ranged <= maximum) then - raise (Invalid_float { min = minimum ; v = ranged ; max = maximum }) ; + raise (Invalid_float {min = minimum; v = ranged; max = maximum}) ; ranged let rec read_z res value bit_in_value state = let byte = uint8 state in let value = value lor ((byte land 0x7F) lsl bit_in_value) in let bit_in_value = bit_in_value + 7 in - let bit_in_value, value = - if bit_in_value < 8 then - (bit_in_value, value) - else begin + let (bit_in_value, value) = + if bit_in_value < 8 then (bit_in_value, value) + else ( Buffer.add_char res (Char.unsafe_chr (value land 0xFF)) ; - bit_in_value - 8, value lsr 8 - end in - if byte land 0x80 = 0x80 then - read_z res value bit_in_value state - else begin + (bit_in_value - 8, value lsr 8) ) + in + if byte land 0x80 = 0x80 then read_z res value bit_in_value state + else ( if bit_in_value > 0 then Buffer.add_char res (Char.unsafe_chr value) ; if byte = 0x00 then raise Trailing_zero ; - Z.of_bits (Buffer.contents res) - end + Z.of_bits (Buffer.contents res) ) let n state = let first = uint8 state in let first_value = first land 0x7F in if first land 0x80 = 0x80 then read_z (Buffer.create 100) first_value 7 state - else - Z.of_int first_value + else Z.of_int first_value let z state = let first = uint8 state in let first_value = first land 0x3F in - let sign = (first land 0x40) <> 0 in + let sign = first land 0x40 <> 0 in if first land 0x80 = 0x80 then let n = read_z (Buffer.create 100) first_value 6 state in if sign then Z.neg n else n @@ -148,211 +162,238 @@ module Atom = struct let string_enum arr state = let read_index = match Binary_size.enum_size arr with - | `Uint8 -> uint8 - | `Uint16 -> uint16 - | `Uint30 -> uint30 in + | `Uint8 -> + uint8 + | `Uint16 -> + uint16 + | `Uint30 -> + uint30 + in let index = read_index state in - if index >= Array.length arr then - raise No_case_matched ; + if index >= Array.length arr then raise No_case_matched ; arr.(index) let fixed_length_bytes length = - read_atom length @@ fun buf ofs -> - MBytes.sub buf ofs length + read_atom length @@ fun buf ofs -> MBytes.sub buf ofs length let fixed_length_string length = - read_atom length @@ fun buf ofs -> - MBytes.sub_string buf ofs length - - let tag = function - | `Uint8 -> uint8 - | `Uint16 -> uint16 + read_atom length @@ fun buf ofs -> MBytes.sub_string buf ofs length + let tag = function `Uint8 -> uint8 | `Uint16 -> uint16 end (** Main recursive reading function, in continuation passing style. *) -let rec read_rec : type ret. ret Encoding.t -> state -> ret - = fun e state -> - let open Encoding in - match e.encoding with - | Null -> () - | Empty -> () - | Constant _ -> () - | Ignore -> () - | Bool -> Atom.bool state - | Int8 -> Atom.int8 state - | Uint8 -> Atom.uint8 state - | Int16 -> Atom.int16 state - | Uint16 -> Atom.uint16 state - | Int31 -> Atom.int31 state - | Int32 -> Atom.int32 state - | Int64 -> Atom.int64 state - | N -> Atom.n state - | Z -> Atom.z state - | Float -> Atom.float state - | Bytes (`Fixed n) -> Atom.fixed_length_bytes n state - | Bytes `Variable -> - Atom.fixed_length_bytes state.remaining_bytes state - | String (`Fixed n) -> Atom.fixed_length_string n state - | String `Variable -> - Atom.fixed_length_string state.remaining_bytes state - | Padded (e, n) -> - let v = read_rec e state in - ignore (Atom.fixed_length_string n state : string) ; - v - | RangedInt { minimum ; maximum } -> - Atom.ranged_int ~minimum ~maximum state - | RangedFloat { minimum ; maximum } -> - Atom.ranged_float ~minimum ~maximum state - | String_enum (_, arr) -> - Atom.string_enum arr state - | Array (max_length, e) -> - let max_length = Option.unopt ~default:max_int max_length in - let l = read_list List_too_long max_length e state in - Array.of_list l - | List (max_length, e) -> - let max_length = Option.unopt ~default:max_int max_length in - read_list Array_too_long max_length e state - | (Obj (Req { encoding = e ; _ })) -> read_rec e state - | (Obj (Dft { encoding = e ; _ })) -> read_rec e state - | (Obj (Opt { kind = `Dynamic ; encoding = e ; _ })) -> - let present = Atom.bool state in - if not present then - None - else - Some (read_rec e state) - | (Obj (Opt { kind = `Variable ; encoding = e ; _ })) -> - if state.remaining_bytes = 0 then - None - else - Some (read_rec e state) - | Objs { kind = `Fixed sz ; left ; right } -> - ignore (check_remaining_bytes state sz : int) ; - ignore (check_allowed_bytes state sz : int option) ; - let left = read_rec left state in - let right = read_rec right state in - (left, right) - | Objs { kind = `Dynamic ; left ; right } -> - let left = read_rec left state in - let right = read_rec right state in - (left, right) - | Objs { kind = `Variable ; left ; right } -> - read_variable_pair left right state - | Tup e -> read_rec e state - | Tups { kind = `Fixed sz ; left ; right } -> - ignore (check_remaining_bytes state sz : int) ; - ignore (check_allowed_bytes state sz : int option) ; - let left = read_rec left state in - let right = read_rec right state in - (left, right) - | Tups { kind = `Dynamic ; left ; right } -> - let left = read_rec left state in - let right = read_rec right state in - (left, right) - | Tups { kind = `Variable ; left ; right } -> - read_variable_pair left right state - | Conv { inj ; encoding ; _ } -> - inj (read_rec encoding state) - | Union { tag_size ; cases ; _ } -> - let ctag = Atom.tag tag_size state in - let Case { encoding ; inj ; _ } = - try - List.find - (function - | Case { tag = Tag tag ; _ } -> tag = ctag - | Case { tag = Json_only ; _ } -> false) - cases - with Not_found -> raise (Unexpected_tag ctag) in - inj (read_rec encoding state) - | Dynamic_size { kind ; encoding = e } -> - let sz = Atom.int kind state in - let remaining = check_remaining_bytes state sz in - state.remaining_bytes <- sz ; - ignore (check_allowed_bytes state sz : int option) ; - let v = read_rec e state in - if state.remaining_bytes <> 0 then raise Extra_bytes ; - state.remaining_bytes <- remaining ; - v - | Check_size { limit ; encoding = e } -> - let old_allowed_bytes = state.allowed_bytes in - let limit = - match state.allowed_bytes with - | None -> limit - | Some current_limit -> min current_limit limit in - state.allowed_bytes <- Some limit ; - let v = read_rec e state in - let allowed_bytes = - match old_allowed_bytes with - | None -> None - | Some old_limit -> - let remaining = - match state.allowed_bytes with - | None -> assert false - | Some remaining -> remaining in - let read = limit - remaining in - Some (old_limit - read) in - state.allowed_bytes <- allowed_bytes ; - v - | Describe { encoding = e ; _ } -> read_rec e state - | Splitted { encoding = e ; _ } -> read_rec e state - | Mu { fix ; _ } -> read_rec (fix e) state - | Delayed f -> read_rec (f ()) state - - -and read_variable_pair - : type left right. - left Encoding.t -> right Encoding.t -> state -> (left * right) - = fun e1 e2 state -> - match Encoding.classify e1, Encoding.classify e2 with - | (`Dynamic | `Fixed _), `Variable -> - let left = read_rec e1 state in - let right = read_rec e2 state in - (left, right) - | `Variable, `Fixed n -> - if n > state.remaining_bytes then raise Not_enough_data ; - state.remaining_bytes <- state.remaining_bytes - n ; - let left = read_rec e1 state in - assert (state.remaining_bytes = 0) ; - state.remaining_bytes <- n ; - let right = read_rec e2 state in - assert (state.remaining_bytes = 0) ; - (left, right) - | _ -> assert false (* Should be rejected by [Encoding.Kind.combine] *) - -and read_list : type a. read_error -> int -> a Encoding.t -> state -> a list - = fun error max_length e state -> - let rec loop max_length acc = - if state.remaining_bytes = 0 then - List.rev acc - else if max_length = 0 then - raise error - else - let v = read_rec e state in - loop (max_length - 1) (v :: acc) in - loop max_length [] - - +let rec read_rec : type ret. ret Encoding.t -> state -> ret = + fun e state -> + let open Encoding in + match e.encoding with + | Null -> + () + | Empty -> + () + | Constant _ -> + () + | Ignore -> + () + | Bool -> + Atom.bool state + | Int8 -> + Atom.int8 state + | Uint8 -> + Atom.uint8 state + | Int16 -> + Atom.int16 state + | Uint16 -> + Atom.uint16 state + | Int31 -> + Atom.int31 state + | Int32 -> + Atom.int32 state + | Int64 -> + Atom.int64 state + | N -> + Atom.n state + | Z -> + Atom.z state + | Float -> + Atom.float state + | Bytes (`Fixed n) -> + Atom.fixed_length_bytes n state + | Bytes `Variable -> + Atom.fixed_length_bytes state.remaining_bytes state + | String (`Fixed n) -> + Atom.fixed_length_string n state + | String `Variable -> + Atom.fixed_length_string state.remaining_bytes state + | Padded (e, n) -> + let v = read_rec e state in + ignore (Atom.fixed_length_string n state : string) ; + v + | RangedInt {minimum; maximum} -> + Atom.ranged_int ~minimum ~maximum state + | RangedFloat {minimum; maximum} -> + Atom.ranged_float ~minimum ~maximum state + | String_enum (_, arr) -> + Atom.string_enum arr state + | Array (max_length, e) -> + let max_length = Option.unopt ~default:max_int max_length in + let l = read_list List_too_long max_length e state in + Array.of_list l + | List (max_length, e) -> + let max_length = Option.unopt ~default:max_int max_length in + read_list Array_too_long max_length e state + | Obj (Req {encoding = e; _}) -> + read_rec e state + | Obj (Dft {encoding = e; _}) -> + read_rec e state + | Obj (Opt {kind = `Dynamic; encoding = e; _}) -> + let present = Atom.bool state in + if not present then None else Some (read_rec e state) + | Obj (Opt {kind = `Variable; encoding = e; _}) -> + if state.remaining_bytes = 0 then None else Some (read_rec e state) + | Objs {kind = `Fixed sz; left; right} -> + ignore (check_remaining_bytes state sz : int) ; + ignore (check_allowed_bytes state sz : int option) ; + let left = read_rec left state in + let right = read_rec right state in + (left, right) + | Objs {kind = `Dynamic; left; right} -> + let left = read_rec left state in + let right = read_rec right state in + (left, right) + | Objs {kind = `Variable; left; right} -> + read_variable_pair left right state + | Tup e -> + read_rec e state + | Tups {kind = `Fixed sz; left; right} -> + ignore (check_remaining_bytes state sz : int) ; + ignore (check_allowed_bytes state sz : int option) ; + let left = read_rec left state in + let right = read_rec right state in + (left, right) + | Tups {kind = `Dynamic; left; right} -> + let left = read_rec left state in + let right = read_rec right state in + (left, right) + | Tups {kind = `Variable; left; right} -> + read_variable_pair left right state + | Conv {inj; encoding; _} -> + inj (read_rec encoding state) + | Union {tag_size; cases; _} -> + let ctag = Atom.tag tag_size state in + let (Case {encoding; inj; _}) = + try + List.find + (function + | Case {tag = Tag tag; _} -> + tag = ctag + | Case {tag = Json_only; _} -> + false) + cases + with Not_found -> raise (Unexpected_tag ctag) + in + inj (read_rec encoding state) + | Dynamic_size {kind; encoding = e} -> + let sz = Atom.int kind state in + let remaining = check_remaining_bytes state sz in + state.remaining_bytes <- sz ; + ignore (check_allowed_bytes state sz : int option) ; + let v = read_rec e state in + if state.remaining_bytes <> 0 then raise Extra_bytes ; + state.remaining_bytes <- remaining ; + v + | Check_size {limit; encoding = e} -> + let old_allowed_bytes = state.allowed_bytes in + let limit = + match state.allowed_bytes with + | None -> + limit + | Some current_limit -> + min current_limit limit + in + state.allowed_bytes <- Some limit ; + let v = read_rec e state in + let allowed_bytes = + match old_allowed_bytes with + | None -> + None + | Some old_limit -> + let remaining = + match state.allowed_bytes with + | None -> + assert false + | Some remaining -> + remaining + in + let read = limit - remaining in + Some (old_limit - read) + in + state.allowed_bytes <- allowed_bytes ; + v + | Describe {encoding = e; _} -> + read_rec e state + | Splitted {encoding = e; _} -> + read_rec e state + | Mu {fix; _} -> + read_rec (fix e) state + | Delayed f -> + read_rec (f ()) state + +and read_variable_pair : + type left right. + left Encoding.t -> right Encoding.t -> state -> left * right = + fun e1 e2 state -> + match (Encoding.classify e1, Encoding.classify e2) with + | ((`Dynamic | `Fixed _), `Variable) -> + let left = read_rec e1 state in + let right = read_rec e2 state in + (left, right) + | (`Variable, `Fixed n) -> + if n > state.remaining_bytes then raise Not_enough_data ; + state.remaining_bytes <- state.remaining_bytes - n ; + let left = read_rec e1 state in + assert (state.remaining_bytes = 0) ; + state.remaining_bytes <- n ; + let right = read_rec e2 state in + assert (state.remaining_bytes = 0) ; + (left, right) + | _ -> + assert false + +(* Should be rejected by [Encoding.Kind.combine] *) +and read_list : type a. read_error -> int -> a Encoding.t -> state -> a list = + fun error max_length e state -> + let rec loop max_length acc = + if state.remaining_bytes = 0 then List.rev acc + else if max_length = 0 then raise error + else + let v = read_rec e state in + loop (max_length - 1) (v :: acc) + in + loop max_length [] (** ******************** *) + (** Various entry points *) let read encoding buffer ofs len = let state = - { buffer ; offset = ofs ; - remaining_bytes = len ; allowed_bytes = None } in + {buffer; offset = ofs; remaining_bytes = len; allowed_bytes = None} + in match read_rec encoding state with - | exception Read_error _ -> None - | v -> Some (state.offset, v) + | exception Read_error _ -> + None + | v -> + Some (state.offset, v) let of_bytes_exn encoding buffer = let len = MBytes.length buffer in let state = - { buffer ; offset = 0 ; - remaining_bytes = len ; allowed_bytes = None } in + {buffer; offset = 0; remaining_bytes = len; allowed_bytes = None} + in let v = read_rec encoding state in if state.offset <> len then raise Extra_bytes ; v let of_bytes encoding buffer = - try Some (of_bytes_exn encoding buffer) - with Read_error _ -> None + try Some (of_bytes_exn encoding buffer) with Read_error _ -> None diff --git a/src/lib_data_encoding/binary_reader.mli b/src/lib_data_encoding/binary_reader.mli index 1a846707bb7b45d7234bb00adf33bcc0de4c9b51..8a9aa030f2bed3bcbd09b79fe006d86c7d9c05ef 100644 --- a/src/lib_data_encoding/binary_reader.mli +++ b/src/lib_data_encoding/binary_reader.mli @@ -26,6 +26,8 @@ (** This is for use *within* the data encoding library only. Instead, you should use the corresponding module intended for use: {!Data_encoding.Binary}. *) -val read: 'a Encoding.t -> MBytes.t -> int -> int -> (int * 'a) option -val of_bytes: 'a Encoding.t -> MBytes.t -> 'a option -val of_bytes_exn: 'a Encoding.t -> MBytes.t -> 'a +val read : 'a Encoding.t -> MBytes.t -> int -> int -> (int * 'a) option + +val of_bytes : 'a Encoding.t -> MBytes.t -> 'a option + +val of_bytes_exn : 'a Encoding.t -> MBytes.t -> 'a diff --git a/src/lib_data_encoding/binary_schema.ml b/src/lib_data_encoding/binary_schema.ml index d6ad7f20882b5db5d692a9340ca6fe35a13845db..a701ba1be505deb58f9a64a2d7b2488c55e103c0 100644 --- a/src/lib_data_encoding/binary_schema.ml +++ b/src/lib_data_encoding/binary_schema.ml @@ -25,7 +25,7 @@ open Encoding -type integer_extended = [ Binary_size.integer | `Int32 | `Int64 ] +type integer_extended = [Binary_size.integer | `Int32 | `Int64] type field_descr = | Named_field of string * Kind.t * layout @@ -50,27 +50,22 @@ and layout = and fields = field_descr list and toplevel_encoding = - | Obj of { fields : fields } - | Cases of { kind : Kind.t ; - tag_size : Binary_size.tag_size ; - cases : (int * string option * fields) list } - | Int_enum of { size : Binary_size.integer ; - cases : (int * string) list } + | Obj of {fields : fields} + | Cases of + { kind : Kind.t; + tag_size : Binary_size.tag_size; + cases : (int * string option * fields) list } + | Int_enum of {size : Binary_size.integer; cases : (int * string) list} -and description = - { title : string ; - description : string option } +and description = {title : string; description : string option} type t = { - toplevel: toplevel_encoding ; - fields: (description * toplevel_encoding) list ; + toplevel : toplevel_encoding; + fields : (description * toplevel_encoding) list } module Printer_ast = struct - - type table = - { headers : string list ; - body : string list list } + type table = {headers : string list; body : string list list} type t = | Table of table @@ -78,25 +73,33 @@ module Printer_ast = struct let pp_size ppf = function | `Fixed size -> - Format.fprintf ppf "%d byte%s" size (if size = 1 then "" else "s") + Format.fprintf ppf "%d byte%s" size (if size = 1 then "" else "s") | `Variable -> Format.fprintf ppf "Variable" | `Dynamic -> Format.fprintf ppf "Determined from data" let pp_int ppf (int : integer_extended) = - Format.fprintf ppf "%s" - begin - match int with - | `Int16 -> "signed 16-bit integer" - | `Int31 -> "signed 31-bit integer" - | `Uint30 -> "unsigned 30-bit integer" - | `Int32 -> "signed 32-bit integer" - | `Int64 -> "signed 64-bit integer" - | `Int8 -> "signed 8-bit integer" - | `Uint16 -> "unsigned 16-bit integer" - | `Uint8 -> "unsigned 8-bit integer" - end + Format.fprintf + ppf + "%s" + ( match int with + | `Int16 -> + "signed 16-bit integer" + | `Int31 -> + "signed 31-bit integer" + | `Uint30 -> + "unsigned 30-bit integer" + | `Int32 -> + "signed 32-bit integer" + | `Int64 -> + "signed 64-bit integer" + | `Int8 -> + "signed 8-bit integer" + | `Uint16 -> + "unsigned 16-bit integer" + | `Uint8 -> + "unsigned 8-bit integer" ) let rec pp_layout ppf = function | Zero_width -> @@ -106,17 +109,28 @@ module Printer_ast = struct | Bool -> Format.fprintf ppf "boolean (0 for false, 255 for true)" | RangedInt (minimum, maximum) when minimum <= 0 -> - Format.fprintf ppf "%a in the range %d to %d" - pp_int ((Binary_size.range_to_size ~minimum ~maximum) :> integer_extended) - minimum maximum + Format.fprintf + ppf + "%a in the range %d to %d" + pp_int + (Binary_size.range_to_size ~minimum ~maximum :> integer_extended) + minimum + maximum | RangedInt (minimum, maximum) (* when minimum > 0 *) -> - Format.fprintf ppf "%a in the range %d to %d (shifted by %d)" - pp_int ((Binary_size.range_to_size ~minimum ~maximum) :> integer_extended) - minimum maximum minimum + Format.fprintf + ppf + "%a in the range %d to %d (shifted by %d)" + pp_int + (Binary_size.range_to_size ~minimum ~maximum :> integer_extended) + minimum + maximum + minimum | RangedFloat (minimum, maximum) -> - Format.fprintf ppf + Format.fprintf + ppf "double-precision floating-point number, in the range %f to %f" - minimum maximum + minimum + maximum | Float -> Format.fprintf ppf "double-precision floating-point number" | Bytes -> @@ -128,451 +142,472 @@ module Printer_ast = struct | Padding -> Format.fprintf ppf "padding" | Enum (size, reference) -> - Format.fprintf ppf "%a encoding an enumeration (see %s)" - pp_int (size :> integer_extended) + Format.fprintf + ppf + "%a encoding an enumeration (see %s)" + pp_int + (size :> integer_extended) reference - | Seq (data, len) -> + | Seq (data, len) -> ( Format.fprintf ppf "sequence of " ; - begin match len with - | None -> () - | Some len -> Format.fprintf ppf "at most %d " len - end ; - begin match data with - | Ref reference -> Format.fprintf ppf "$%s" reference - | _ -> pp_layout ppf data - end - + ( match len with + | None -> + () + | Some len -> + Format.fprintf ppf "at most %d " len ) ; + match data with + | Ref reference -> + Format.fprintf ppf "$%s" reference + | _ -> + pp_layout ppf data ) let pp_tag_size ppf tag = - Format.fprintf ppf "%s" @@ - match tag with - | `Uint8 -> "8-bit" - | `Uint16 -> "16-bit" + Format.fprintf ppf "%s" + @@ match tag with `Uint8 -> "8-bit" | `Uint16 -> "16-bit" let field_descr () = let reference = ref 0 in - let string_of_layout = Format.asprintf "%a" pp_layout in + let string_of_layout = Format.asprintf "%a" pp_layout in let anon_num () = let value = !reference in - reference := value + 1; - string_of_int value in + reference := value + 1 ; + string_of_int value + in function | Named_field (name, kind, desc) -> - [ name ; Format.asprintf "%a" pp_size kind ; string_of_layout desc ] + [name; Format.asprintf "%a" pp_size kind; string_of_layout desc] | Dynamic_size_field (Some name, 1, size) -> - [ Format.asprintf "# bytes in field \"%s\"" name ; - Format.asprintf "%a" - pp_size (`Fixed (Binary_size.integer_to_size size)) ; + [ Format.asprintf "# bytes in field \"%s\"" name; + Format.asprintf + "%a" + pp_size + (`Fixed (Binary_size.integer_to_size size)); string_of_layout (Int (size :> integer_extended)) ] | Dynamic_size_field (None, 1, size) -> - [ Format.asprintf "# bytes in next field" ; - Format.asprintf "%a" - pp_size (`Fixed (Binary_size.integer_to_size size)) ; + [ Format.asprintf "# bytes in next field"; + Format.asprintf + "%a" + pp_size + (`Fixed (Binary_size.integer_to_size size)); string_of_layout (Int (size :> integer_extended)) ] | Dynamic_size_field (_, i, size) -> - [ Format.asprintf "# bytes in next %d fields" i ; - Format.asprintf "%a" - pp_size (`Fixed (Binary_size.integer_to_size size)) ; + [ Format.asprintf "# bytes in next %d fields" i; + Format.asprintf + "%a" + pp_size + (`Fixed (Binary_size.integer_to_size size)); string_of_layout (Int (size :> integer_extended)) ] | Anonymous_field (kind, desc) -> - [ "Unnamed field " ^ anon_num () ; - Format.asprintf "%a" pp_size kind ; + [ "Unnamed field " ^ anon_num (); + Format.asprintf "%a" pp_size kind; string_of_layout desc ] | Optional_field name -> - [ Format.asprintf "? presence of field \"%s\"" name ; - Format.asprintf "%a" pp_size (`Fixed 1) ; + [ Format.asprintf "? presence of field \"%s\"" name; + Format.asprintf "%a" pp_size (`Fixed 1); string_of_layout Bool ] - let binary_table_headers = [ "Name" ; "Size" ; "Contents" ] - let enum_headers = [ "Case number" ; "Encoded string" ] + let binary_table_headers = ["Name"; "Size"; "Contents"] + + let enum_headers = ["Case number"; "Encoded string"] let toplevel (descr, encoding) = match encoding with - | Obj { fields } -> - descr, - Table { headers = binary_table_headers ; - body = List.map (field_descr ()) fields } - | Cases { kind ; tag_size ; cases } -> - { title = - Format.asprintf "%s (%a, %a tag)" - descr.title pp_size kind pp_tag_size tag_size ; - description = descr.description}, - Union (tag_size, - List.map - (fun (tag, name, fields) -> - { title = - begin - match name with - | Some name -> Format.asprintf "%s (tag %d)" name tag - | None -> Format.asprintf "Tag %d" tag - end; + | Obj {fields} -> + ( descr, + Table + { headers = binary_table_headers; + body = List.map (field_descr ()) fields } ) + | Cases {kind; tag_size; cases} -> + ( { title = + Format.asprintf + "%s (%a, %a tag)" + descr.title + pp_size + kind + pp_tag_size + tag_size; + description = descr.description }, + Union + ( tag_size, + List.map + (fun (tag, name, fields) -> + ( { title = + ( match name with + | Some name -> + Format.asprintf "%s (tag %d)" name tag + | None -> + Format.asprintf "Tag %d" tag ); description = None }, - { headers = binary_table_headers ; - body = List.map (field_descr ()) fields }) - cases) - | Int_enum { size ; cases } -> - { title = - Format.asprintf "%s (Enumeration: %a):" - descr.title pp_int (size :> integer_extended) ; - description = descr.description }, - Table - { headers = enum_headers ; - body = List.map (fun (num, str) -> [ string_of_int num ; str ]) cases } - + { headers = binary_table_headers; + body = List.map (field_descr ()) fields } )) + cases ) ) + | Int_enum {size; cases} -> + ( { title = + Format.asprintf + "%s (Enumeration: %a):" + descr.title + pp_int + (size :> integer_extended); + description = descr.description }, + Table + { headers = enum_headers; + body = + List.map (fun (num, str) -> [string_of_int num; str]) cases } + ) end module Printer = struct - let rec pad char ppf = function - | 0 -> () + | 0 -> + () | n -> Format.pp_print_char ppf char ; pad char ppf (n - 1) let pp_title level ppf title = - let char = - if level = 1 then '*' else - if level = 2 then '=' else - '`' in + let char = if level = 1 then '*' else if level = 2 then '=' else '`' in let sub = String.map (fun _ -> char) title in Format.fprintf ppf "%s@ %s@\n@\n" title sub - let pp_table ppf { Printer_ast.headers ; body } = + let pp_table ppf {Printer_ast.headers; body} = let max_widths = - List.fold_left (List.map2 (fun len str -> max (String.length str) len)) + List.fold_left + (List.map2 (fun len str -> max (String.length str) len)) (List.map String.length headers) - body in + body + in let pp_row pad_char ppf = - Format.fprintf ppf "|%a" - (fun ppf -> - List.iter2 - (fun width str -> Format.fprintf ppf " %s%a |" str (pad pad_char) (width - (String.length str))) - max_widths) in + Format.fprintf ppf "|%a" (fun ppf -> + List.iter2 + (fun width str -> + Format.fprintf + ppf + " %s%a |" + str + (pad pad_char) + (width - String.length str)) + max_widths) + in let pp_line c ppf = - Format.fprintf ppf "+%a" - (fun ppf -> - List.iter2 - (fun width _str -> Format.fprintf ppf "%a+" (pad c) (width + 2)) - max_widths) in - Format.fprintf ppf "%a@\n%a@\n%a@\n%a@\n@\n" - (pp_line '-') headers - (pp_row ' ') headers - (pp_line '=') headers + Format.fprintf ppf "+%a" (fun ppf -> + List.iter2 + (fun width _str -> Format.fprintf ppf "%a+" (pad c) (width + 2)) + max_widths) + in + Format.fprintf + ppf + "%a@\n%a@\n%a@\n%a@\n@\n" + (pp_line '-') + headers + (pp_row ' ') + headers + (pp_line '=') + headers (Format.pp_print_list ~pp_sep:(fun ppf () -> Format.fprintf ppf "@\n") (fun ppf s -> - Format.fprintf ppf "%a@\n%a" - (pp_row ' ') s - (pp_line '-') s)) + Format.fprintf ppf "%a@\n%a" (pp_row ' ') s (pp_line '-') s)) body - let pp_option_nl ppf = - Option.iter ~f:(Format.fprintf ppf "%s@\n@\n") + let pp_option_nl ppf = Option.iter ~f:(Format.fprintf ppf "%s@\n@\n") let pp_toplevel ppf = function - | Printer_ast.Table table -> pp_table ppf table + | Printer_ast.Table table -> + pp_table ppf table | Union (_tag_size, tables) -> - Format.fprintf ppf + Format.fprintf + ppf "%a" (fun ppf -> - Format.pp_print_list - ~pp_sep:(fun ppf () -> Format.fprintf ppf "@\n") - (fun ppf (descr, table) -> - Format.fprintf ppf - "%a%a%a" - (pp_title 2) descr.title - pp_option_nl descr.description - pp_table table) - ppf) + Format.pp_print_list + ~pp_sep:(fun ppf () -> Format.fprintf ppf "@\n") + (fun ppf (descr, table) -> + Format.fprintf + ppf + "%a%a%a" + (pp_title 2) + descr.title + pp_option_nl + descr.description + pp_table + table) + ppf) tables - let pp ppf { toplevel; fields } = - let _, toplevel = - Printer_ast.toplevel ({ title = "" ; description = None}, toplevel) in - Format.fprintf ppf "%a@\n%a" - pp_toplevel toplevel + let pp ppf {toplevel; fields} = + let (_, toplevel) = + Printer_ast.toplevel ({title = ""; description = None}, toplevel) + in + Format.fprintf + ppf + "%a@\n%a" + pp_toplevel + toplevel (Format.pp_print_list ~pp_sep:(fun ppf () -> Format.fprintf ppf "@\n") (fun ppf (descr, toplevel) -> - Format.fprintf ppf - "%a%a%a" - (pp_title 1) descr.title - pp_option_nl descr.description - pp_toplevel toplevel)) + Format.fprintf + ppf + "%a%a%a" + (pp_title 1) + descr.title + pp_option_nl + descr.description + pp_toplevel + toplevel)) (List.map Printer_ast.toplevel fields) - end module Encoding = struct - let description_encoding = conv - (fun { title ; description } -> (title, description)) - (fun (title, description) -> { title ; description }) - (obj2 - (req "title" string) - (opt "description" string)) - + (fun {title; description} -> (title, description)) + (fun (title, description) -> {title; description}) + (obj2 (req "title" string) (opt "description" string)) let integer_cases = - [ ("Int16", `Int16) ; - ("Int8", `Int8) ; - ("Uint16", `Uint16) ; - ("Uint8", `Uint8) ] + [("Int16", `Int16); ("Int8", `Int8); ("Uint16", `Uint16); ("Uint8", `Uint8)] let integer_encoding : Binary_size.integer encoding = string_enum integer_cases let integer_extended_encoding = - string_enum - (("Int64", `Int64) :: - ("Int32", `Int32) :: - integer_cases) + string_enum (("Int64", `Int64) :: ("Int32", `Int32) :: integer_cases) let layout_encoding = - mu "layout" - (fun layout -> - union [ - case - ~title:"Zero_width" + mu "layout" (fun layout -> + union + [ case + ~title:"Zero_width" + (Tag 0) + (obj1 (req "kind" (constant "Zero_width"))) + (function Zero_width -> Some () | _ -> None) + (fun () -> Zero_width); + case + ~title:"Int" + (Tag 1) + (obj2 + (req "size" integer_extended_encoding) + (req "kind" (constant "Int"))) + (function Int integer -> Some (integer, ()) | _ -> None) + (fun (integer, _) -> Int integer); + case + ~title:"Bool" + (Tag 2) + (obj1 (req "kind" (constant "Bool"))) + (function Bool -> Some () | _ -> None) + (fun () -> Bool); + case + ~title:"RangedInt" + (Tag 3) + (obj3 + (req "min" int31) + (req "max" int31) + (req "kind" (constant "RangedInt"))) + (function + | RangedInt (min, max) -> Some (min, max, ()) | _ -> None) + (fun (min, max, _) -> RangedInt (min, max)); + case + ~title:"RangedFloat" + (Tag 4) + (obj3 + (req "min" float) + (req "max" float) + (req "kind" (constant "RangedFloat"))) + (function + | RangedFloat (min, max) -> Some (min, max, ()) | _ -> None) + (fun (min, max, ()) -> RangedFloat (min, max)); + case + ~title:"Float" + (Tag 5) + (obj1 (req "kind" (constant "Float"))) + (function Float -> Some () | _ -> None) + (fun () -> Float); + case + ~title:"Bytes" + (Tag 6) + (obj1 (req "kind" (constant "Bytes"))) + (function Bytes -> Some () | _ -> None) + (fun () -> Bytes); + case + ~title:"String" + (Tag 7) + (obj1 (req "kind" (constant "String"))) + (function String -> Some () | _ -> None) + (fun () -> String); + case + ~title:"Enum" + (Tag 8) + (obj3 + (req "size" integer_encoding) + (req "reference" string) + (req "kind" (constant "Enum"))) + (function + | Enum (size, cases) -> Some (size, cases, ()) | _ -> None) + (fun (size, cases, _) -> Enum (size, cases)); + case + ~title:"Seq" + (Tag 9) + (obj3 + (req "layout" layout) + (req "kind" (constant "Seq")) + (opt "max_length" int31)) + (function + | Seq (layout, len) -> Some (layout, (), len) | _ -> None) + (fun (layout, (), len) -> Seq (layout, len)); + case + ~title:"Ref" + (Tag 10) + (obj2 (req "name" string) (req "kind" (constant "Ref"))) + (function Ref layout -> Some (layout, ()) | _ -> None) + (fun (name, ()) -> Ref name); + case + ~title:"Padding" + (Tag 11) + (obj1 (req "kind" (constant "Padding"))) + (function Padding -> Some () | _ -> None) + (fun () -> Padding) ]) + + let kind_enum_cases () = + [ case + ~title:"Dynamic" + (Tag 0) + (obj1 (req "kind" (constant "Dynamic"))) + (function `Dynamic -> Some () | _ -> None) + (fun () -> `Dynamic); + case + ~title:"Variable" + (Tag 1) + (obj1 (req "kind" (constant "Variable"))) + (function `Variable -> Some () | _ -> None) + (fun () -> `Variable) ] + + let kind_t_encoding = + def "schema.kind" + @@ union + ( case + ~title:"Fixed" + (Tag 2) + (obj2 (req "size" int31) (req "kind" (constant "Float"))) + (function `Fixed n -> Some (n, ()) | _ -> None) + (fun (n, _) -> `Fixed n) + :: kind_enum_cases () ) + + let unsigned_integer_encoding = + string_enum [("Uint30", `Uint30); ("Uint16", `Uint16); ("Uint8", `Uint8)] + + let field_descr_encoding = + let dynamic_layout_encoding = dynamic_size layout_encoding in + def "schema.field" + @@ union + [ case + ~title:"Named_field" (Tag 0) - (obj1 - (req "kind" (constant "Zero_width"))) + (obj4 + (req "name" string) + (req "layout" dynamic_layout_encoding) + (req "data_kind" kind_t_encoding) + (req "kind" (constant "named"))) (function - | Zero_width -> Some () - | _ -> None) - (fun () -> Zero_width) ; - case ~title:"Int" + | Named_field (name, kind, layout) -> + Some (name, layout, kind, ()) + | _ -> + None) + (fun (name, kind, layout, _) -> Named_field (name, layout, kind)); + case + ~title:"Anonymous_field" (Tag 1) - (obj2 - (req "size" integer_extended_encoding) - (req "kind" (constant "Int"))) + (obj3 + (req "layout" dynamic_layout_encoding) + (req "kind" (constant "anon")) + (req "data_kind" kind_t_encoding)) (function - | Int integer -> Some (integer, ()) - | _ -> None) - (fun (integer, _)-> Int integer) ; - case ~title:"Bool" + | Anonymous_field (kind, layout) -> + Some (layout, (), kind) + | _ -> + None) + (fun (kind, _, layout) -> Anonymous_field (layout, kind)); + case + ~title:"Dynamic_field" (Tag 2) - (obj1 (req "kind" (constant "Bool"))) + (obj4 + (req "kind" (constant "dyn")) + (opt "name" string) + (req "num_fields" int31) + (req "size" unsigned_integer_encoding)) (function - | Bool -> Some () - | _ -> None) - (fun () -> Bool) ; - case ~title:"RangedInt" + | Dynamic_size_field (name, i, size) -> + Some ((), name, i, size) + | _ -> + None) + (fun ((), name, i, size) -> Dynamic_size_field (name, i, size)); + case + ~title:"Optional_field" (Tag 3) - (obj3 - (req "min" int31) - (req "max" int31) - (req "kind" (constant "RangedInt"))) - (function - | RangedInt (min, max) -> Some (min, max, ()) - | _ -> None) - (fun (min, max, _) -> RangedInt (min, max)) ; - case ~title:"RangedFloat" - (Tag 4) - (obj3 - (req "min" float) - (req "max" float) - (req "kind" (constant "RangedFloat"))) - (function - | RangedFloat (min, max) -> Some (min, max, ()) - | _ -> None) - (fun (min, max, ()) -> RangedFloat (min, max)) ; - case ~title:"Float" - (Tag 5) - (obj1 (req "kind" (constant "Float"))) - (function - | Float -> Some () - | _ -> None) - (fun () -> Float) ; - case ~title:"Bytes" - (Tag 6) - (obj1 (req "kind" (constant "Bytes"))) - (function - | Bytes -> Some () - | _ -> None) - (fun () -> Bytes) ; - case ~title:"String" - (Tag 7) - (obj1 (req "kind" (constant "String"))) - (function - | String -> Some () - | _ -> None) - (fun () -> String) ; - case ~title:"Enum" - (Tag 8) - (obj3 - (req "size" integer_encoding) - (req "reference" string) - (req "kind" (constant "Enum"))) - (function - | Enum (size, cases) -> Some (size, cases, ()) - | _ -> None) - (fun (size, cases, _) -> Enum (size, cases)) ; - case ~title:"Seq" - (Tag 9) - (obj3 - (req "layout" layout) - (req "kind" (constant "Seq")) - (opt "max_length" int31)) - (function - | Seq (layout, len) -> Some (layout, (), len) - | _ -> None) - (fun (layout, (), len) -> Seq (layout, len)) ; - case ~title:"Ref" - (Tag 10) (obj2 - (req "name" string) - (req "kind" (constant "Ref"))) - (function - | Ref layout -> Some (layout, ()) - | _ -> None) - (fun (name, ()) -> Ref name) ; - case ~title:"Padding" - (Tag 11) - (obj1 - (req "kind" (constant "Padding"))) - (function - | Padding -> Some () - | _ -> None) - (fun () -> Padding) ; - ]) - - let kind_enum_cases = - (fun () -> - [ case ~title:"Dynamic" - (Tag 0) - (obj1 (req "kind" (constant "Dynamic"))) - (function `Dynamic -> Some () - | _ -> None) - (fun () -> `Dynamic) ; - case ~title:"Variable" - (Tag 1) - (obj1 (req "kind" (constant "Variable"))) - (function `Variable -> Some () - | _ -> None) - (fun () -> `Variable) ]) + (req "kind" (constant "option_indicator")) + (req "name" string)) + (function Optional_field s -> Some ((), s) | _ -> None) + (fun ((), s) -> Optional_field s) ] - let kind_t_encoding = - def "schema.kind" @@ + let tag_size_encoding = string_enum [("Uint16", `Uint16); ("Uint8", `Uint8)] + + let binary_description_encoding = union - ((case ~title:"Fixed" + [ case + ~title:"Obj" + (Tag 0) + (obj1 (req "fields" (list (dynamic_size field_descr_encoding)))) + (function Obj {fields} -> Some fields | _ -> None) + (fun fields -> Obj {fields}); + case + ~title:"Cases" + (Tag 1) + (obj3 + (req "tag_size" tag_size_encoding) + (req "kind" (dynamic_size kind_t_encoding)) + (req + "cases" + (list + ( def "union case" + @@ conv + (fun (tag, name, fields) -> (tag, fields, name)) + (fun (tag, fields, name) -> (tag, name, fields)) + @@ obj3 + (req "tag" int31) + (req + "fields" + (list (dynamic_size field_descr_encoding))) + (opt "name" string) )))) + (function + | Cases {kind; tag_size; cases} -> + Some (tag_size, kind, cases) + | _ -> + None) + (fun (tag_size, kind, cases) -> Cases {kind; tag_size; cases}); + case + ~title:"Int_enum" (Tag 2) (obj2 - (req "size" int31) - (req "kind" (constant "Float"))) - (function `Fixed n -> Some (n, ()) - | _ -> None) - (fun (n, _) -> `Fixed n)) :: (kind_enum_cases ())) - - let unsigned_integer_encoding = - string_enum - [("Uint30", `Uint30) ; - ("Uint16", `Uint16) ; - ("Uint8", `Uint8) ] - - let field_descr_encoding = - let dynamic_layout_encoding = dynamic_size layout_encoding in - def "schema.field" @@ - union [ - case ~title:"Named_field" - (Tag 0) - (obj4 - (req "name" string) - (req "layout" dynamic_layout_encoding) - (req "data_kind" kind_t_encoding) - (req "kind" (constant "named"))) - (function Named_field (name, kind, layout) -> Some (name, layout, kind, ()) - | _ -> None) - (fun (name, kind, layout, _) -> Named_field (name, layout, kind)) ; - case ~title:"Anonymous_field" - (Tag 1) - (obj3 - (req "layout" dynamic_layout_encoding) - (req "kind" (constant "anon")) - (req "data_kind" kind_t_encoding)) - (function Anonymous_field (kind, layout) -> Some (layout, (), kind) - | _ -> None) - (fun (kind, _, layout) -> Anonymous_field (layout, kind)) ; - case ~title:"Dynamic_field" - (Tag 2) - (obj4 - (req "kind" (constant "dyn")) - (opt "name" string) - (req "num_fields" int31) - (req "size" unsigned_integer_encoding)) - (function Dynamic_size_field (name, i, size) -> Some ((), name, i, size) - | _ -> None) - (fun ((), name, i, size) -> Dynamic_size_field (name, i, size)) ; - case ~title:"Optional_field" - (Tag 3) - (obj2 - (req "kind" (constant "option_indicator")) - (req "name" string)) - (function Optional_field s -> Some ((), s) - | _ -> None) - (fun ((), s) -> Optional_field s) - ] - - let tag_size_encoding = - string_enum - [("Uint16", `Uint16) ; - ("Uint8", `Uint8) ] - - let binary_description_encoding = - union [ - case ~title:"Obj" - (Tag 0) - (obj1 - (req "fields" (list (dynamic_size field_descr_encoding)))) - (function - | Obj { fields } -> Some (fields) - | _ -> None) - (fun (fields) -> Obj { fields }) ; - case ~title:"Cases" - (Tag 1) - (obj3 - (req "tag_size" tag_size_encoding) - (req "kind" (dynamic_size kind_t_encoding)) - (req "cases" - (list - (def "union case" @@ - conv - (fun (tag, name, fields) -> (tag, fields, name)) - (fun (tag, fields, name) -> (tag, name, fields)) @@ - obj3 - (req "tag" int31) - (req "fields" (list (dynamic_size field_descr_encoding))) - (opt "name" string))))) - (function - | Cases { kind ; tag_size ; cases } -> - Some (tag_size, kind, cases) - | _ -> None) - (fun (tag_size, kind, cases) -> - Cases { kind ; tag_size ; cases }) ; - case ~title:"Int_enum" - (Tag 2) - (obj2 - (req "size" integer_encoding) - (req "cases" (list (tup2 int31 string)))) - (function Int_enum { size ; cases } -> Some (size, cases) - | _ -> None) - (fun (size, cases) -> Int_enum { size ; cases }) - ] + (req "size" integer_encoding) + (req "cases" (list (tup2 int31 string)))) + (function Int_enum {size; cases} -> Some (size, cases) | _ -> None) + (fun (size, cases) -> Int_enum {size; cases}) ] let encoding = conv - (fun { toplevel ; fields } -> (toplevel, fields)) - (fun (toplevel, fields) -> { toplevel ; fields }) @@ - obj2 - (req "toplevel" binary_description_encoding) - (req "fields" - (list - (obj2 - (req "description" description_encoding) - (req "encoding" binary_description_encoding)))) - + (fun {toplevel; fields} -> (toplevel, fields)) + (fun (toplevel, fields) -> {toplevel; fields}) + @@ obj2 + (req "toplevel" binary_description_encoding) + (req + "fields" + (list + (obj2 + (req "description" description_encoding) + (req "encoding" binary_description_encoding)))) end let encoding = Encoding.encoding + let pp = Printer.pp diff --git a/src/lib_data_encoding/binary_schema.mli b/src/lib_data_encoding/binary_schema.mli index 768402a96773e4142c96453ac228934de058992c..e97bc20954dab5f1936356c9b451546795859b77 100644 --- a/src/lib_data_encoding/binary_schema.mli +++ b/src/lib_data_encoding/binary_schema.mli @@ -25,7 +25,7 @@ (** This is for use *within* the data encoding library only. *) -type integer_extended = [ Binary_size.integer | `Int32 | `Int64 ] +type integer_extended = [Binary_size.integer | `Int32 | `Int64] type field_descr = | Named_field of string * Encoding.Kind.t * layout @@ -50,21 +50,20 @@ and layout = and fields = field_descr list and toplevel_encoding = - | Obj of { fields : fields } - | Cases of { kind : Encoding.Kind.t ; - tag_size : Binary_size.tag_size ; - cases : (int * string option * fields) list } - | Int_enum of { size : Binary_size.integer ; - cases : (int * string) list } + | Obj of {fields : fields} + | Cases of + { kind : Encoding.Kind.t; + tag_size : Binary_size.tag_size; + cases : (int * string option * fields) list } + | Int_enum of {size : Binary_size.integer; cases : (int * string) list} -and description = - { title : string ; - description : string option } +and description = {title : string; description : string option} type t = { - toplevel: toplevel_encoding ; - fields: (description * toplevel_encoding) list ; + toplevel : toplevel_encoding; + fields : (description * toplevel_encoding) list } -val pp: Format.formatter -> t -> unit -val encoding: t Encoding.t +val pp : Format.formatter -> t -> unit + +val encoding : t Encoding.t diff --git a/src/lib_data_encoding/binary_size.ml b/src/lib_data_encoding/binary_size.ml index b11c8b476e72b54be58362d63fe1e4f30a29618e..ff0b11f1bca9efabea48fd037c603056e4ee5221 100644 --- a/src/lib_data_encoding/binary_size.ml +++ b/src/lib_data_encoding/binary_size.ml @@ -24,70 +24,89 @@ (*****************************************************************************) let bool = 1 + let int8 = 1 + let uint8 = 1 + let char = 1 + let int16 = 2 + let uint16 = 2 + let uint30 = 4 + let uint32 = 4 + let uint64 = 8 + let int31 = 4 + let int32 = 4 + let int64 = 8 + let float = 8 -type tag_size = [ `Uint8 | `Uint16 ] +type tag_size = [`Uint8 | `Uint16] + +let tag_size = function `Uint8 -> uint8 | `Uint16 -> uint16 + +type signed_integer = [`Int31 | `Int16 | `Int8] -let tag_size = function - | `Uint8 -> uint8 - | `Uint16 -> uint16 +type unsigned_integer = [`Uint30 | `Uint16 | `Uint8] -type signed_integer = [ `Int31 | `Int16 | `Int8 ] -type unsigned_integer = [ `Uint30 | `Uint16 | `Uint8 ] -type integer = [ signed_integer | unsigned_integer ] +type integer = [signed_integer | unsigned_integer] -let signed_range_to_size min max : [> signed_integer ] = - if min >= ~-128 && max <= 127 - then `Int8 - else if min >= ~-32_768 && max <= 32_767 - then `Int16 +let signed_range_to_size min max : [> signed_integer] = + if min >= ~-128 && max <= 127 then `Int8 + else if min >= ~-32_768 && max <= 32_767 then `Int16 else `Int31 (* max should be centered at zero *) -let unsigned_range_to_size max : [> unsigned_integer ] = +let unsigned_range_to_size max : [> unsigned_integer] = assert (max >= 0) ; - if max <= 255 - then `Uint8 - else if max <= 65535 - then `Uint16 - else `Uint30 + if max <= 255 then `Uint8 else if max <= 65535 then `Uint16 else `Uint30 let integer_to_size = function - | `Int31 -> int31 - | `Int16 -> int16 - | `Int8 -> int8 - | `Uint30 -> uint30 - | `Uint16 -> uint16 - | `Uint8 -> uint8 + | `Int31 -> + int31 + | `Int16 -> + int16 + | `Int8 -> + int8 + | `Uint30 -> + uint30 + | `Uint16 -> + uint16 + | `Uint8 -> + uint8 let max_int = function - | `Uint30 | `Int31 -> (1 lsl 30) - 1 - | `Int16 -> 1 lsl 15 - 1 - | `Int8 -> 1 lsl 7 - 1 - | `Uint16 -> 1 lsl 16 - 1 - | `Uint8 -> 1 lsl 8 - 1 + | `Uint30 | `Int31 -> + (1 lsl 30) - 1 + | `Int16 -> + (1 lsl 15) - 1 + | `Int8 -> + (1 lsl 7) - 1 + | `Uint16 -> + (1 lsl 16) - 1 + | `Uint8 -> + (1 lsl 8) - 1 let min_int = function - | `Uint8 | `Uint16 | `Uint30 -> 0 - | `Int31 -> - (1 lsl 30) - | `Int16 -> - (1 lsl 15) - | `Int8 -> - (1 lsl 7) + | `Uint8 | `Uint16 | `Uint30 -> + 0 + | `Int31 -> + -(1 lsl 30) + | `Int16 -> + -(1 lsl 15) + | `Int8 -> + -(1 lsl 7) let range_to_size ~minimum ~maximum : integer = - if minimum < 0 - then signed_range_to_size minimum maximum + if minimum < 0 then signed_range_to_size minimum maximum else unsigned_range_to_size (maximum - minimum) -let enum_size arr = - unsigned_range_to_size (Array.length arr) +let enum_size arr = unsigned_range_to_size (Array.length arr) diff --git a/src/lib_data_encoding/binary_size.mli b/src/lib_data_encoding/binary_size.mli index ae8d0d002a05dfbaac0f0e5c6575f2b503d18070..9912b9581c0b704e77ef2418ca9aba06e5cd0fef 100644 --- a/src/lib_data_encoding/binary_size.mli +++ b/src/lib_data_encoding/binary_size.mli @@ -25,34 +25,50 @@ (** This is for use *within* the data encoding library only. *) -val bool: int -val int8: int -val uint8: int -val char: int -val int16: int -val uint16: int -val uint30: int -val uint32: int -val uint64: int -val int31: int -val int32: int -val int64: int -val float: int +val bool : int -type tag_size = [ `Uint8 | `Uint16 ] +val int8 : int -val tag_size: tag_size -> int +val uint8 : int -type signed_integer = [ `Int31 | `Int16 | `Int8 ] -type unsigned_integer = [ `Uint30 | `Uint16 | `Uint8 ] -type integer = [ signed_integer | unsigned_integer ] +val char : int -val integer_to_size: [< integer ] -> int +val int16 : int -val min_int: [< integer ] -> int -val max_int: [< integer ] -> int +val uint16 : int -val range_to_size: minimum:int -> maximum:int -> integer -val unsigned_range_to_size: int -> unsigned_integer +val uint30 : int -val enum_size: 'a array -> [> unsigned_integer ] +val uint32 : int + +val uint64 : int + +val int31 : int + +val int32 : int + +val int64 : int + +val float : int + +type tag_size = [`Uint8 | `Uint16] + +val tag_size : tag_size -> int + +type signed_integer = [`Int31 | `Int16 | `Int8] + +type unsigned_integer = [`Uint30 | `Uint16 | `Uint8] + +type integer = [signed_integer | unsigned_integer] + +val integer_to_size : [< integer] -> int + +val min_int : [< integer] -> int + +val max_int : [< integer] -> int + +val range_to_size : minimum:int -> maximum:int -> integer + +val unsigned_range_to_size : int -> unsigned_integer + +val enum_size : 'a array -> [> unsigned_integer] diff --git a/src/lib_data_encoding/binary_stream_reader.ml b/src/lib_data_encoding/binary_stream_reader.ml index dda9e0344ad9b49ad5166afe0b5f1b53d026e7ba..aa39932b5195addaff6f19abbd18200b10aacdb1 100644 --- a/src/lib_data_encoding/binary_stream_reader.ml +++ b/src/lib_data_encoding/binary_stream_reader.ml @@ -29,43 +29,43 @@ let raise e = raise (Read_error e) (** Persistent state of the binary reader. *) type state = { - - stream : Binary_stream.t ; - (** All the remaining data to be read. *) - - remaining_bytes : int option ; - (** Total number of bytes that should be from 'stream' (None = + stream : Binary_stream.t; (** All the remaining data to be read. *) + remaining_bytes : int option; + (** Total number of bytes that should be from 'stream' (None = illimited). Reading less bytes should raise [Extra_bytes] and trying to read more bytes should raise [Not_enough_data]. *) - - allowed_bytes : int option ; - (** Maximum number of bytes that are allowed to be read from 'stream' + allowed_bytes : int option; + (** Maximum number of bytes that are allowed to be read from 'stream' before to fail (None = illimited). *) - - total_read : int ; - (** Total number of bytes that has been read from [stream] since the + total_read : int + (** Total number of bytes that has been read from [stream] since the beginning. *) - } (** Return type for the function [read_rec]. See [Data_encoding] for its description. *) type 'ret status = - | Success of { result : 'ret ; size : int ; stream : Binary_stream.t } + | Success of {result : 'ret; size : int; stream : Binary_stream.t} | Await of (MBytes.t -> 'ret status) | Error of read_error let check_remaining_bytes state size = match state.remaining_bytes with - | Some len when len < size -> raise Not_enough_data - | Some len -> Some (len - size) - | None -> None + | Some len when len < size -> + raise Not_enough_data + | Some len -> + Some (len - size) + | None -> + None let check_allowed_bytes state size = match state.allowed_bytes with - | Some len when len < size -> raise Size_limit_exceeded - | Some len -> Some (len - size) - | None -> None + | Some len when len < size -> + raise Size_limit_exceeded + | Some len -> + Some (len - size) + | None -> + None (** [read_atom resume size conv state k] reads [size] bytes from [state], pass it to [conv] to be decoded, and finally call the continuation [k] @@ -88,111 +88,133 @@ let read_atom resume size conv state k = match let remaining_bytes = check_remaining_bytes state size in let allowed_bytes = check_allowed_bytes state size in - let res, stream = Binary_stream.read state.stream size in - conv res.buffer res.ofs, - { remaining_bytes ; allowed_bytes ; stream ; - total_read = state.total_read + size } + let (res, stream) = Binary_stream.read state.stream size in + ( conv res.buffer res.ofs, + { remaining_bytes; + allowed_bytes; + stream; + total_read = state.total_read + size } ) with - | exception (Read_error error) -> Error error - | exception Binary_stream.Need_more_data -> Await resume - | v -> k v (* tail call *) + | exception Read_error error -> + Error error + | exception Binary_stream.Need_more_data -> + Await resume + | v -> + k v + +(* tail call *) (** Reader for all the atomic types. *) module Atom = struct - let uint8 r = read_atom r Binary_size.uint8 MBytes.get_uint8 + let uint16 r = read_atom r Binary_size.int16 MBytes.get_uint16 let int8 r = read_atom r Binary_size.int8 MBytes.get_int8 + let int16 r = read_atom r Binary_size.int16 MBytes.get_int16 + let int32 r = read_atom r Binary_size.int32 MBytes.get_int32 + let int64 r = read_atom r Binary_size.int64 MBytes.get_int64 let float r = read_atom r Binary_size.float MBytes.get_double let bool resume state k = - int8 resume state @@ fun (v, state) -> - k (v <> 0, state) + int8 resume state @@ fun (v, state) -> k (v <> 0, state) let uint30 r = - read_atom r Binary_size.uint30 @@ fun buffer ofs -> + read_atom r Binary_size.uint30 + @@ fun buffer ofs -> let v = Int32.to_int (MBytes.get_int32 buffer ofs) in - if v < 0 then - raise (Invalid_int { min = 0 ; v ; max = (1 lsl 30) - 1 }) ; + if v < 0 then raise (Invalid_int {min = 0; v; max = (1 lsl 30) - 1}) ; v let int31 r = - read_atom r Binary_size.int31 @@ fun buffer ofs -> - Int32.to_int (MBytes.get_int32 buffer ofs) + read_atom r Binary_size.int31 + @@ fun buffer ofs -> Int32.to_int (MBytes.get_int32 buffer ofs) let int = function - | `Int31 -> int31 - | `Int16 -> int16 - | `Int8 -> int8 - | `Uint30 -> uint30 - | `Uint16 -> uint16 - | `Uint8 -> uint8 + | `Int31 -> + int31 + | `Int16 -> + int16 + | `Int8 -> + int8 + | `Uint30 -> + uint30 + | `Uint16 -> + uint16 + | `Uint8 -> + uint8 let ranged_int ~minimum ~maximum resume state k = let read_int = match Binary_size.range_to_size ~minimum ~maximum with - | `Int8 -> int8 - | `Int16 -> int16 - | `Int31 -> int31 - | `Uint8 -> uint8 - | `Uint16 -> uint16 - | `Uint30 -> uint30 in - read_int resume state @@ fun (ranged, state) -> + | `Int8 -> + int8 + | `Int16 -> + int16 + | `Int31 -> + int31 + | `Uint8 -> + uint8 + | `Uint16 -> + uint16 + | `Uint30 -> + uint30 + in + read_int resume state + @@ fun (ranged, state) -> let ranged = if minimum > 0 then ranged + minimum else ranged in if not (minimum <= ranged && ranged <= maximum) then - Error (Invalid_int { min = minimum ; v =ranged ; max = maximum }) - else - k (ranged, state) + Error (Invalid_int {min = minimum; v = ranged; max = maximum}) + else k (ranged, state) let ranged_float ~minimum ~maximum resume state k = - float resume state @@ fun (ranged, state) -> + float resume state + @@ fun (ranged, state) -> if not (minimum <= ranged && ranged <= maximum) then - Error (Invalid_float { min = minimum ; v = ranged ; max = maximum }) - else - k (ranged, state) + Error (Invalid_float {min = minimum; v = ranged; max = maximum}) + else k (ranged, state) let rec read_z res value bit_in_value state k = let resume buffer = let stream = Binary_stream.push buffer state.stream in - read_z res value bit_in_value { state with stream } k in - uint8 resume state @@ fun (byte, state) -> + read_z res value bit_in_value {state with stream} k + in + uint8 resume state + @@ fun (byte, state) -> let value = value lor ((byte land 0x7F) lsl bit_in_value) in let bit_in_value = bit_in_value + 7 in - let bit_in_value, value = - if bit_in_value < 8 then - (bit_in_value, value) - else begin + let (bit_in_value, value) = + if bit_in_value < 8 then (bit_in_value, value) + else ( Buffer.add_char res (Char.unsafe_chr (value land 0xFF)) ; - bit_in_value - 8, value lsr 8 - end in - if byte land 0x80 = 0x80 then - read_z res value bit_in_value state k - else begin + (bit_in_value - 8, value lsr 8) ) + in + if byte land 0x80 = 0x80 then read_z res value bit_in_value state k + else ( if bit_in_value > 0 then Buffer.add_char res (Char.unsafe_chr value) ; if byte = 0x00 then raise Trailing_zero ; - k (Z.of_bits (Buffer.contents res), state) - end + k (Z.of_bits (Buffer.contents res), state) ) let n resume state k = - uint8 resume state @@ fun (first, state) -> + uint8 resume state + @@ fun (first, state) -> let first_value = first land 0x7F in if first land 0x80 = 0x80 then read_z (Buffer.create 100) first_value 7 state k - else - k (Z.of_int first_value, state) + else k (Z.of_int first_value, state) let z resume state k = - uint8 resume state @@ fun (first, state) -> + uint8 resume state + @@ fun (first, state) -> let first_value = first land 0x3F in - let sign = (first land 0x40) <> 0 in + let sign = first land 0x40 <> 0 in if first land 0x80 = 0x80 then - read_z (Buffer.create 100) first_value 6 state @@ fun (n, state) -> - k ((if sign then Z.neg n else n), state) + read_z (Buffer.create 100) first_value 6 state + @@ fun (n, state) -> k ((if sign then Z.neg n else n), state) else let n = Z.of_int first_value in k ((if sign then Z.neg n else n), state) @@ -200,244 +222,288 @@ module Atom = struct let string_enum arr resume state k = let read_index = match Binary_size.enum_size arr with - | `Uint8 -> uint8 - | `Uint16 -> uint16 - | `Uint30 -> uint30 in - read_index resume state @@ fun (index, state) -> - if index >= Array.length arr then - Error No_case_matched - else - k (arr.(index), state) + | `Uint8 -> + uint8 + | `Uint16 -> + uint16 + | `Uint30 -> + uint30 + in + read_index resume state + @@ fun (index, state) -> + if index >= Array.length arr then Error No_case_matched + else k (arr.(index), state) let fixed_length_bytes length r = - read_atom r length @@ fun buf ofs -> - MBytes.sub buf ofs length + read_atom r length @@ fun buf ofs -> MBytes.sub buf ofs length let fixed_length_string length r = - read_atom r length @@ fun buf ofs -> - MBytes.sub_string buf ofs length - - let tag = function - | `Uint8 -> uint8 - | `Uint16 -> uint16 + read_atom r length @@ fun buf ofs -> MBytes.sub_string buf ofs length + let tag = function `Uint8 -> uint8 | `Uint16 -> uint16 end let rec skip n state k = let resume buffer = let stream = Binary_stream.push buffer state.stream in - try skip n { state with stream } k - with Read_error err -> Error err in - Atom.fixed_length_string n resume state @@ fun (_, state : string * _) -> - k state + try skip n {state with stream} k with Read_error err -> Error err + in + Atom.fixed_length_string n resume state + @@ fun ((_, state) : string * _) -> k state (** Main recursive reading function, in continuation passing style. *) -let rec read_rec - : type next ret. - bool -> next Encoding.t -> state -> ((next * state) -> ret status) -> ret status - = fun whole e state k -> - let resume buffer = - let stream = Binary_stream.push buffer state.stream in - try read_rec whole e { state with stream }k - with Read_error err -> Error err in - let open Encoding in - assert (Encoding.classify e <> `Variable || state.remaining_bytes <> None) ; - match e.encoding with - | Null -> k ((), state) - | Empty -> k ((), state) - | Constant _ -> k ((), state) - | Ignore -> k ((), state) - | Bool -> Atom.bool resume state k - | Int8 -> Atom.int8 resume state k - | Uint8 -> Atom.uint8 resume state k - | Int16 -> Atom.int16 resume state k - | Uint16 -> Atom.uint16 resume state k - | Int31 -> Atom.int31 resume state k - | Int32 -> Atom.int32 resume state k - | Int64 -> Atom.int64 resume state k - | N -> Atom.n resume state k - | Z -> Atom.z resume state k - | Float -> Atom.float resume state k - | Bytes (`Fixed n) -> Atom.fixed_length_bytes n resume state k - | Bytes `Variable -> - let size = remaining_bytes state in - Atom.fixed_length_bytes size resume state k - | String (`Fixed n) -> Atom.fixed_length_string n resume state k - | String `Variable -> - let size = remaining_bytes state in - Atom.fixed_length_string size resume state k - | Padded (e, n) -> - read_rec false e state @@ fun (v, state) -> - skip n state @@ (fun state -> k (v, state)) - | RangedInt { minimum ; maximum } -> - Atom.ranged_int ~minimum ~maximum resume state k - | RangedFloat { minimum ; maximum } -> - Atom.ranged_float ~minimum ~maximum resume state k - | String_enum (_, arr) -> - Atom.string_enum arr resume state k - | Array (max_length, e) -> - let max_length = Option.unopt ~default:max_int max_length in - read_list Array_too_long max_length e state @@ fun (l, state) -> - k (Array.of_list l, state) - | List (max_length, e) -> - let max_length = Option.unopt ~default:max_int max_length in - read_list List_too_long max_length e state k - | (Obj (Req { encoding = e ; _ })) -> read_rec whole e state k - | (Obj (Dft { encoding = e ; _ })) -> read_rec whole e state k - | (Obj (Opt { kind = `Dynamic ; encoding = e ; _ })) -> - Atom.bool resume state @@ fun (present, state) -> - if not present then - k (None, state) - else - read_rec whole e state @@ fun (v, state) -> - k (Some v, state) - | (Obj (Opt { kind = `Variable ; encoding = e ; _ })) -> - let size = remaining_bytes state in - if size = 0 then - k (None, state) - else - read_rec whole e state @@ fun (v, state) -> - k (Some v, state) - | Objs { kind = `Fixed sz ; left ; right } -> - ignore (check_remaining_bytes state sz : int option) ; - ignore (check_allowed_bytes state sz : int option) ; - read_rec false left state @@ fun (left, state) -> - read_rec whole right state @@ fun (right, state) -> - k ((left, right), state) - | Objs { kind = `Dynamic ; left ; right } -> - read_rec false left state @@ fun (left, state) -> - read_rec whole right state @@ fun (right, state) -> - k ((left, right), state) - | Objs { kind = `Variable ; left ; right } -> - read_variable_pair left right state k - | Tup e -> read_rec whole e state k - | Tups { kind = `Fixed sz ; left ; right } -> - ignore (check_remaining_bytes state sz : int option) ; - ignore (check_allowed_bytes state sz : int option) ; - read_rec false left state @@ fun (left, state) -> - read_rec whole right state @@ fun (right, state) -> - k ((left, right), state) - | Tups { kind = `Dynamic ; left ; right } -> - read_rec false left state @@ fun (left, state) -> - read_rec whole right state @@ fun (right, state) -> - k ((left, right), state) - | Tups { kind = `Variable ; left ; right } -> - read_variable_pair left right state k - | Conv { inj ; encoding ; _ } -> - read_rec whole encoding state @@ fun (v, state) -> - k (inj v, state) - | Union { tag_size ; cases ; _ } -> begin - Atom.tag tag_size resume state @@ fun (ctag, state) -> - match - List.find_opt - (function - | Case { tag = Tag tag ; _ } -> tag = ctag - | Case { tag = Json_only ; _ } -> false) - cases - with - | None -> Error (Unexpected_tag ctag) - | Some (Case { encoding ; inj ; _ }) -> - read_rec whole encoding state @@ fun (v, state) -> - k (inj v, state) - end - | Dynamic_size { kind ; encoding = e } -> - Atom.int kind resume state @@ fun (sz, state) -> - let remaining = check_remaining_bytes state sz in - let state = { state with remaining_bytes = Some sz } in - ignore (check_allowed_bytes state sz : int option) ; - read_rec true e state @@ fun (v, state) -> - if state.remaining_bytes <> Some 0 then - Error Extra_bytes - else - k (v, { state with remaining_bytes = remaining }) - | Check_size { limit ; encoding = e } -> - let old_allowed_bytes = state.allowed_bytes in - let limit = - match state.allowed_bytes with - | None -> limit - | Some current_limit -> min current_limit limit in - begin - match state.remaining_bytes with - | Some remaining when whole && limit < remaining -> - raise Size_limit_exceeded - | _ -> () - end ; - let state = { state with allowed_bytes = Some limit } in - read_rec whole e state @@ fun (v, state) -> - let allowed_bytes = - match old_allowed_bytes with - | None -> None - | Some old_limit -> - let remaining = - match state.allowed_bytes with - | None -> assert false - | Some remaining -> remaining in - let read = limit - remaining in - Some (old_limit - read) in - k (v, { state with allowed_bytes }) - | Describe { encoding = e ; _ } -> read_rec whole e state k - | Splitted { encoding = e ; _ } -> read_rec whole e state k - | Mu { fix ; _ } -> read_rec whole (fix e) state k - | Delayed f -> read_rec whole (f ()) state k - -and remaining_bytes { remaining_bytes ; _ } = +let rec read_rec : + type next ret. + bool -> + next Encoding.t -> + state -> + (next * state -> ret status) -> + ret status = + fun whole e state k -> + let resume buffer = + let stream = Binary_stream.push buffer state.stream in + try read_rec whole e {state with stream} k + with Read_error err -> Error err + in + let open Encoding in + assert (Encoding.classify e <> `Variable || state.remaining_bytes <> None) ; + match e.encoding with + | Null -> + k ((), state) + | Empty -> + k ((), state) + | Constant _ -> + k ((), state) + | Ignore -> + k ((), state) + | Bool -> + Atom.bool resume state k + | Int8 -> + Atom.int8 resume state k + | Uint8 -> + Atom.uint8 resume state k + | Int16 -> + Atom.int16 resume state k + | Uint16 -> + Atom.uint16 resume state k + | Int31 -> + Atom.int31 resume state k + | Int32 -> + Atom.int32 resume state k + | Int64 -> + Atom.int64 resume state k + | N -> + Atom.n resume state k + | Z -> + Atom.z resume state k + | Float -> + Atom.float resume state k + | Bytes (`Fixed n) -> + Atom.fixed_length_bytes n resume state k + | Bytes `Variable -> + let size = remaining_bytes state in + Atom.fixed_length_bytes size resume state k + | String (`Fixed n) -> + Atom.fixed_length_string n resume state k + | String `Variable -> + let size = remaining_bytes state in + Atom.fixed_length_string size resume state k + | Padded (e, n) -> + read_rec false e state + @@ fun (v, state) -> skip n state @@ fun state -> k (v, state) + | RangedInt {minimum; maximum} -> + Atom.ranged_int ~minimum ~maximum resume state k + | RangedFloat {minimum; maximum} -> + Atom.ranged_float ~minimum ~maximum resume state k + | String_enum (_, arr) -> + Atom.string_enum arr resume state k + | Array (max_length, e) -> + let max_length = Option.unopt ~default:max_int max_length in + read_list Array_too_long max_length e state + @@ fun (l, state) -> k (Array.of_list l, state) + | List (max_length, e) -> + let max_length = Option.unopt ~default:max_int max_length in + read_list List_too_long max_length e state k + | Obj (Req {encoding = e; _}) -> + read_rec whole e state k + | Obj (Dft {encoding = e; _}) -> + read_rec whole e state k + | Obj (Opt {kind = `Dynamic; encoding = e; _}) -> + Atom.bool resume state + @@ fun (present, state) -> + if not present then k (None, state) + else read_rec whole e state @@ fun (v, state) -> k (Some v, state) + | Obj (Opt {kind = `Variable; encoding = e; _}) -> + let size = remaining_bytes state in + if size = 0 then k (None, state) + else read_rec whole e state @@ fun (v, state) -> k (Some v, state) + | Objs {kind = `Fixed sz; left; right} -> + ignore (check_remaining_bytes state sz : int option) ; + ignore (check_allowed_bytes state sz : int option) ; + read_rec false left state + @@ fun (left, state) -> + read_rec whole right state + @@ fun (right, state) -> k ((left, right), state) + | Objs {kind = `Dynamic; left; right} -> + read_rec false left state + @@ fun (left, state) -> + read_rec whole right state + @@ fun (right, state) -> k ((left, right), state) + | Objs {kind = `Variable; left; right} -> + read_variable_pair left right state k + | Tup e -> + read_rec whole e state k + | Tups {kind = `Fixed sz; left; right} -> + ignore (check_remaining_bytes state sz : int option) ; + ignore (check_allowed_bytes state sz : int option) ; + read_rec false left state + @@ fun (left, state) -> + read_rec whole right state + @@ fun (right, state) -> k ((left, right), state) + | Tups {kind = `Dynamic; left; right} -> + read_rec false left state + @@ fun (left, state) -> + read_rec whole right state + @@ fun (right, state) -> k ((left, right), state) + | Tups {kind = `Variable; left; right} -> + read_variable_pair left right state k + | Conv {inj; encoding; _} -> + read_rec whole encoding state @@ fun (v, state) -> k (inj v, state) + | Union {tag_size; cases; _} -> ( + Atom.tag tag_size resume state + @@ fun (ctag, state) -> + match + List.find_opt + (function + | Case {tag = Tag tag; _} -> + tag = ctag + | Case {tag = Json_only; _} -> + false) + cases + with + | None -> + Error (Unexpected_tag ctag) + | Some (Case {encoding; inj; _}) -> + read_rec whole encoding state @@ fun (v, state) -> k (inj v, state) ) + | Dynamic_size {kind; encoding = e} -> + Atom.int kind resume state + @@ fun (sz, state) -> + let remaining = check_remaining_bytes state sz in + let state = {state with remaining_bytes = Some sz} in + ignore (check_allowed_bytes state sz : int option) ; + read_rec true e state + @@ fun (v, state) -> + if state.remaining_bytes <> Some 0 then Error Extra_bytes + else k (v, {state with remaining_bytes = remaining}) + | Check_size {limit; encoding = e} -> + let old_allowed_bytes = state.allowed_bytes in + let limit = + match state.allowed_bytes with + | None -> + limit + | Some current_limit -> + min current_limit limit + in + ( match state.remaining_bytes with + | Some remaining when whole && limit < remaining -> + raise Size_limit_exceeded + | _ -> + () ) ; + let state = {state with allowed_bytes = Some limit} in + read_rec whole e state + @@ fun (v, state) -> + let allowed_bytes = + match old_allowed_bytes with + | None -> + None + | Some old_limit -> + let remaining = + match state.allowed_bytes with + | None -> + assert false + | Some remaining -> + remaining + in + let read = limit - remaining in + Some (old_limit - read) + in + k (v, {state with allowed_bytes}) + | Describe {encoding = e; _} -> + read_rec whole e state k + | Splitted {encoding = e; _} -> + read_rec whole e state k + | Mu {fix; _} -> + read_rec whole (fix e) state k + | Delayed f -> + read_rec whole (f ()) state k + +and remaining_bytes {remaining_bytes; _} = match remaining_bytes with | None -> (* This function should only be called with a variable encoding, for which the `remaining_bytes` should never be `None`. *) assert false - | Some len -> len + | Some len -> + len + +and read_variable_pair : + type left right ret. + left Encoding.t -> + right Encoding.t -> + state -> + ((left * right) * state -> ret status) -> + ret status = + fun e1 e2 state k -> + let size = remaining_bytes state in + match (Encoding.classify e1, Encoding.classify e2) with + | ((`Dynamic | `Fixed _), `Variable) -> + read_rec false e1 state + @@ fun (left, state) -> + read_rec true e2 state @@ fun (right, state) -> k ((left, right), state) + | (`Variable, `Fixed n) -> + if n > size then Error Not_enough_data + else + let state = {state with remaining_bytes = Some (size - n)} in + read_rec true e1 state + @@ fun (left, state) -> + assert (state.remaining_bytes = Some 0) ; + let state = {state with remaining_bytes = Some n} in + read_rec true e2 state + @@ fun (right, state) -> + assert (state.remaining_bytes = Some 0) ; + k ((left, right), state) + | _ -> + assert false -and read_variable_pair - : type left right ret. - left Encoding.t -> right Encoding.t -> state -> - (((left * right) * state) -> ret status) -> ret status - = fun e1 e2 state k -> +(* Should be rejected by [Encoding.Kind.combine] *) +and read_list : + type a ret. + read_error -> + int -> + a Encoding.t -> + state -> + (a list * state -> ret status) -> + ret status = + fun error max_length e state k -> + let rec loop state acc max_length = let size = remaining_bytes state in - match Encoding.classify e1, Encoding.classify e2 with - | (`Dynamic | `Fixed _), `Variable -> - read_rec false e1 state @@ fun (left, state) -> - read_rec true e2 state @@ fun (right, state) -> - k ((left, right), state) - | `Variable, `Fixed n -> - if n > size then - Error Not_enough_data - else - let state = { state with remaining_bytes = Some (size - n) } in - read_rec true e1 state @@ fun (left, state) -> - assert (state.remaining_bytes = Some 0) ; - let state = { state with remaining_bytes = Some n } in - read_rec true e2 state @@ fun (right, state) -> - assert (state.remaining_bytes = Some 0) ; - k ((left, right), state) - | _ -> assert false (* Should be rejected by [Encoding.Kind.combine] *) - -and read_list - : type a ret. - read_error -> int -> a Encoding.t -> state -> ((a list * state) -> ret status) -> ret status - = fun error max_length e state k -> - let rec loop state acc max_length = - let size = remaining_bytes state in - if size = 0 then - k (List.rev acc, state) - else if max_length = 0 then - raise error - else - read_rec false e state @@ fun (v, state) -> - loop state (v :: acc) (max_length - 1) in - loop state [] max_length + if size = 0 then k (List.rev acc, state) + else if max_length = 0 then raise error + else + read_rec false e state + @@ fun (v, state) -> loop state (v :: acc) (max_length - 1) + in + loop state [] max_length let read_rec e state k = - try read_rec false e state k - with Read_error err -> Error err - - + try read_rec false e state k with Read_error err -> Error err (** ******************** *) + (** Various entry points *) let success (v, state) = - Success { result = v ; size = state.total_read ; stream = state.stream } + Success {result = v; size = state.total_read; stream = state.stream} let read_stream ?(init = Binary_stream.empty) encoding = match Encoding.classify encoding with @@ -445,6 +511,10 @@ let read_stream ?(init = Binary_stream.empty) encoding = invalid_arg "Data_encoding.Binary.read_stream: variable encoding" | `Dynamic | `Fixed _ -> (* No hardcoded read limit in a stream. *) - let state = { remaining_bytes = None ; allowed_bytes = None ; - stream = init ; total_read = 0 } in + let state = + { remaining_bytes = None; + allowed_bytes = None; + stream = init; + total_read = 0 } + in read_rec encoding state success diff --git a/src/lib_data_encoding/binary_stream_reader.mli b/src/lib_data_encoding/binary_stream_reader.mli index 8077c1798669348d914ab322337b4549355f66d9..df378ab95afcd39861b1d4947e29ea1ece50192c 100644 --- a/src/lib_data_encoding/binary_stream_reader.mli +++ b/src/lib_data_encoding/binary_stream_reader.mli @@ -27,8 +27,8 @@ use the corresponding module intended for use: {!Data_encoding.Binary}. *) type 'ret status = - | Success of { result : 'ret ; size : int ; stream : Binary_stream.t } + | Success of {result : 'ret; size : int; stream : Binary_stream.t} | Await of (MBytes.t -> 'ret status) | Error of Binary_error.read_error -val read_stream: ?init:Binary_stream.t -> 'a Encoding.t -> 'a status +val read_stream : ?init:Binary_stream.t -> 'a Encoding.t -> 'a status diff --git a/src/lib_data_encoding/binary_writer.ml b/src/lib_data_encoding/binary_writer.ml index c5c5da6a87422871bd3e97d6fda3b875637a8f45..5f039e2b643e56559c2fdd50c2a4414b90a7ccf7 100644 --- a/src/lib_data_encoding/binary_writer.ml +++ b/src/lib_data_encoding/binary_writer.ml @@ -29,24 +29,22 @@ let raise error = raise (Write_error error) (** Imperative state of the binary writer. *) type state = { - - mutable buffer : MBytes.t ; - (** The buffer where to write. *) - - mutable offset : int ; - (** The offset of the next byte to be written in [buffer]. *) - - mutable allowed_bytes : int option ; - (** Maximum number of bytes that are allowed to be write in [buffer] + mutable buffer : MBytes.t; (** The buffer where to write. *) + mutable offset : int; + (** The offset of the next byte to be written in [buffer]. *) + mutable allowed_bytes : int option + (** Maximum number of bytes that are allowed to be write in [buffer] (after [offset]) before to fail (None = illimited). *) - } let check_allowed_bytes state size = match state.allowed_bytes with - | Some len when len < size -> raise Size_limit_exceeded - | Some len -> state.allowed_bytes <- Some (len - size) - | None -> () + | Some len when len < size -> + raise Size_limit_exceeded + | Some len -> + state.allowed_bytes <- Some (len - size) + | None -> + () (** [may_resize state size] will first ensure there is enough space in [state.buffer] for writing [size] bytes (starting at @@ -61,30 +59,30 @@ let check_allowed_bytes state size = let may_resize state size = check_allowed_bytes state size ; let buffer_len = MBytes.length state.buffer in - if buffer_len - state.offset < size then begin + if buffer_len - state.offset < size then ( let new_buffer = - MBytes.create (max (2 * buffer_len) (buffer_len + size)) in + MBytes.create (max (2 * buffer_len) (buffer_len + size)) + in MBytes.blit state.buffer 0 new_buffer 0 state.offset ; - state.buffer <- new_buffer - end ; + state.buffer <- new_buffer ) ; state.offset <- state.offset + size (** Writer for all the atomic types. *) module Atom = struct - let check_int_range min v max = - if (v < min || max < v) then - raise (Invalid_int { min ; v ; max }) + if v < min || max < v then raise (Invalid_int {min; v; max}) let check_float_range min v max = - if (v < min || max < v) then - raise (Invalid_float { min ; v ; max }) + if v < min || max < v then raise (Invalid_float {min; v; max}) let set_int kind buffer ofs v = match kind with - | `Int31 | `Uint30 -> MBytes.set_int32 buffer ofs (Int32.of_int v) - | `Int16 | `Uint16 -> MBytes.set_int16 buffer ofs v - | `Int8 | `Uint8 -> MBytes.set_int8 buffer ofs v + | `Int31 | `Uint30 -> + MBytes.set_int32 buffer ofs (Int32.of_int v) + | `Int16 | `Uint16 -> + MBytes.set_int16 buffer ofs v + | `Int8 | `Uint8 -> + MBytes.set_int8 buffer ofs v let int kind state v = check_int_range (Binary_size.min_int kind) v (Binary_size.max_int kind) ; @@ -93,10 +91,15 @@ module Atom = struct set_int kind state.buffer ofs v let int8 = int `Int8 + let uint8 = int `Uint8 + let int16 = int `Int16 + let uint16 = int `Uint16 + let uint30 = int `Uint30 + let int31 = int `Int31 let bool state v = uint8 state (if v then 255 else 0) @@ -115,17 +118,22 @@ module Atom = struct check_int_range minimum v maximum ; let v = if minimum >= 0 then v - minimum else v in match Binary_size.range_to_size ~minimum ~maximum with - | `Uint8 -> uint8 state v - | `Uint16 -> uint16 state v - | `Uint30 -> uint30 state v - | `Int8 -> int8 state v - | `Int16 -> int16 state v - | `Int31 -> int31 state v + | `Uint8 -> + uint8 state v + | `Uint16 -> + uint16 state v + | `Uint30 -> + uint30 state v + | `Int8 -> + int8 state v + | `Int16 -> + int16 state v + | `Int31 -> + int31 state v let n state v = - if (Z.sign v < 0) then raise Invalid_natural ; - if Z.equal v Z.zero then - uint8 state 0x00 + if Z.sign v < 0 then raise Invalid_natural ; + if Z.equal v Z.zero then uint8 state 0x00 else let bits = Z.numbits v in let get_chunk pos len = Z.to_int (Z.extract v pos len) in @@ -135,32 +143,35 @@ module Atom = struct for i = 0 to length - 1 do let pos = i * 7 in let chunk_len = if i = length - 1 then bits - pos else 7 in - MBytes.set_int8 state.buffer (offset + i) - ((if i = length - 1 then 0x00 else 0x80) - lor (get_chunk pos chunk_len)) + MBytes.set_int8 + state.buffer + (offset + i) + ((if i = length - 1 then 0x00 else 0x80) lor get_chunk pos chunk_len) done let z state v = let sign = Z.sign v < 0 in let bits = Z.numbits v in - if Z.equal v Z.zero then - uint8 state 0x00 + if Z.equal v Z.zero then uint8 state 0x00 else let v = Z.abs v in let get_chunk pos len = Z.to_int (Z.extract v pos len) in let length = Binary_length.z_length v in let offset = state.offset in may_resize state length ; - MBytes.set_int8 state.buffer offset - ((if sign then 0x40 else 0x00) - lor (if bits > 6 then 0x80 else 0x00) - lor (get_chunk 0 6)) ; + MBytes.set_int8 + state.buffer + offset + ( (if sign then 0x40 else 0x00) + lor (if bits > 6 then 0x80 else 0x00) + lor get_chunk 0 6 ) ; for i = 1 to length - 1 do - let pos = 6 + (i - 1) * 7 in + let pos = 6 + ((i - 1) * 7) in let chunk_len = if i = length - 1 then bits - pos else 7 in - MBytes.set_int8 state.buffer (offset + i) - ((if i = length - 1 then 0x00 else 0x80) - lor (get_chunk pos chunk_len)) + MBytes.set_int8 + state.buffer + (offset + i) + ((if i = length - 1 then 0x00 else 0x80) lor get_chunk pos chunk_len) done let float state v = @@ -174,183 +185,205 @@ module Atom = struct let string_enum tbl arr state v = let value = - try snd (Hashtbl.find tbl v) - with Not_found -> raise No_case_matched in + try snd (Hashtbl.find tbl v) with Not_found -> raise No_case_matched + in match Binary_size.enum_size arr with - | `Uint30 -> uint30 state value - | `Uint16 -> uint16 state value - | `Uint8 -> uint8 state value + | `Uint30 -> + uint30 state value + | `Uint16 -> + uint16 state value + | `Uint8 -> + uint8 state value let fixed_kind_bytes length state s = if MBytes.length s <> length then - raise (Invalid_bytes_length { expected = length ; - found = MBytes.length s }) ; + raise (Invalid_bytes_length {expected = length; found = MBytes.length s}) ; let ofs = state.offset in may_resize state length ; MBytes.blit s 0 state.buffer ofs length let fixed_kind_string length state s = if String.length s <> length then - raise (Invalid_string_length { expected = length ; - found = String.length s }) ; + raise + (Invalid_string_length {expected = length; found = String.length s}) ; let ofs = state.offset in may_resize state length ; MBytes.blit_of_string s 0 state.buffer ofs length - let tag = function - | `Uint8 -> uint8 - | `Uint16 -> uint16 - + let tag = function `Uint8 -> uint8 | `Uint16 -> uint16 end (** Main recursive writing function. *) let rec write_rec : type a. a Encoding.t -> state -> a -> unit = - fun e state value -> - let open Encoding in - match e.encoding with - | Null -> () - | Empty -> () - | Constant _ -> () - | Ignore -> () - | Bool -> Atom.bool state value - | Int8 -> Atom.int8 state value - | Uint8 -> Atom.uint8 state value - | Int16 -> Atom.int16 state value - | Uint16 -> Atom.uint16 state value - | Int31 -> Atom.int31 state value - | Int32 -> Atom.int32 state value - | Int64 -> Atom.int64 state value - | N -> Atom.n state value - | Z -> Atom.z state value - | Float -> Atom.float state value - | Bytes (`Fixed n) -> Atom.fixed_kind_bytes n state value - | Bytes `Variable -> - let length = MBytes.length value in - Atom.fixed_kind_bytes length state value - | String (`Fixed n) -> Atom.fixed_kind_string n state value - | String `Variable -> - let length = String.length value in - Atom.fixed_kind_string length state value - | Padded (e, n) -> - write_rec e state value ; - Atom.fixed_kind_string n state (String.make n '\000') - | RangedInt { minimum ; maximum } -> - Atom.ranged_int ~minimum ~maximum state value - | RangedFloat { minimum ; maximum } -> - Atom.ranged_float ~minimum ~maximum state value - | String_enum (tbl, arr) -> - Atom.string_enum tbl arr state value - | Array (Some max_length, _e) when Array.length value > max_length -> - raise Array_too_long - | Array (_, e) -> - Array.iter (write_rec e state) value - | List (Some max_length, _e) when List.length value > max_length -> - raise List_too_long - | List (_, e) -> - List.iter (write_rec e state) value - | Obj (Req { encoding = e ; _ }) -> write_rec e state value - | Obj (Opt { kind = `Dynamic ; encoding = e ; _ }) -> begin - match value with - | None -> Atom.bool state false - | Some value -> Atom.bool state true ; write_rec e state value - end - | Obj (Opt { kind = `Variable ; encoding = e ; _ }) -> begin - match value with - | None -> () - | Some value -> write_rec e state value - end - | Obj (Dft { encoding = e ; _ }) -> write_rec e state value - | Objs { left ; right ; _ } -> - let (v1, v2) = value in - write_rec left state v1 ; - write_rec right state v2 - | Tup e -> write_rec e state value - | Tups { left ; right ; _ } -> - let (v1, v2) = value in - write_rec left state v1 ; - write_rec right state v2 - | Conv { encoding = e ; proj ; _ } -> - write_rec e state (proj value) - | Union { tag_size ; cases ; _ } -> - let rec write_case = function - | [] -> raise No_case_matched - | Case { tag = Json_only ; _ } :: tl -> write_case tl - | Case { encoding = e ; proj ; tag = Tag tag ; _ } :: tl -> - match proj value with - | None -> write_case tl - | Some value -> - Atom.tag tag_size state tag ; - write_rec e state value in - write_case cases - | Dynamic_size { kind ; encoding = e } -> - let initial_offset = state.offset in - Atom.int kind state 0 ; (* place holder for [size] *) - write_with_limit (Binary_size.max_int kind) e state value ; - (* patch the written [size] *) - Atom.set_int kind - state.buffer - initial_offset - (state.offset - initial_offset - Binary_size.integer_to_size kind) - | Check_size { limit ; encoding = e } -> - write_with_limit limit e state value - | Describe { encoding = e ; _ } -> write_rec e state value - | Splitted { encoding = e ; _ } -> write_rec e state value - | Mu { fix ; _ } -> write_rec (fix e) state value - | Delayed f -> write_rec (f ()) state value + fun e state value -> + let open Encoding in + match e.encoding with + | Null -> + () + | Empty -> + () + | Constant _ -> + () + | Ignore -> + () + | Bool -> + Atom.bool state value + | Int8 -> + Atom.int8 state value + | Uint8 -> + Atom.uint8 state value + | Int16 -> + Atom.int16 state value + | Uint16 -> + Atom.uint16 state value + | Int31 -> + Atom.int31 state value + | Int32 -> + Atom.int32 state value + | Int64 -> + Atom.int64 state value + | N -> + Atom.n state value + | Z -> + Atom.z state value + | Float -> + Atom.float state value + | Bytes (`Fixed n) -> + Atom.fixed_kind_bytes n state value + | Bytes `Variable -> + let length = MBytes.length value in + Atom.fixed_kind_bytes length state value + | String (`Fixed n) -> + Atom.fixed_kind_string n state value + | String `Variable -> + let length = String.length value in + Atom.fixed_kind_string length state value + | Padded (e, n) -> + write_rec e state value ; + Atom.fixed_kind_string n state (String.make n '\000') + | RangedInt {minimum; maximum} -> + Atom.ranged_int ~minimum ~maximum state value + | RangedFloat {minimum; maximum} -> + Atom.ranged_float ~minimum ~maximum state value + | String_enum (tbl, arr) -> + Atom.string_enum tbl arr state value + | Array (Some max_length, _e) when Array.length value > max_length -> + raise Array_too_long + | Array (_, e) -> + Array.iter (write_rec e state) value + | List (Some max_length, _e) when List.length value > max_length -> + raise List_too_long + | List (_, e) -> + List.iter (write_rec e state) value + | Obj (Req {encoding = e; _}) -> + write_rec e state value + | Obj (Opt {kind = `Dynamic; encoding = e; _}) -> ( + match value with + | None -> + Atom.bool state false + | Some value -> + Atom.bool state true ; write_rec e state value ) + | Obj (Opt {kind = `Variable; encoding = e; _}) -> ( + match value with None -> () | Some value -> write_rec e state value ) + | Obj (Dft {encoding = e; _}) -> + write_rec e state value + | Objs {left; right; _} -> + let (v1, v2) = value in + write_rec left state v1 ; write_rec right state v2 + | Tup e -> + write_rec e state value + | Tups {left; right; _} -> + let (v1, v2) = value in + write_rec left state v1 ; write_rec right state v2 + | Conv {encoding = e; proj; _} -> + write_rec e state (proj value) + | Union {tag_size; cases; _} -> + let rec write_case = function + | [] -> + raise No_case_matched + | Case {tag = Json_only; _} :: tl -> + write_case tl + | Case {encoding = e; proj; tag = Tag tag; _} :: tl -> ( + match proj value with + | None -> + write_case tl + | Some value -> + Atom.tag tag_size state tag ; + write_rec e state value ) + in + write_case cases + | Dynamic_size {kind; encoding = e} -> + let initial_offset = state.offset in + Atom.int kind state 0 ; + (* place holder for [size] *) + write_with_limit (Binary_size.max_int kind) e state value ; + (* patch the written [size] *) + Atom.set_int + kind + state.buffer + initial_offset + (state.offset - initial_offset - Binary_size.integer_to_size kind) + | Check_size {limit; encoding = e} -> + write_with_limit limit e state value + | Describe {encoding = e; _} -> + write_rec e state value + | Splitted {encoding = e; _} -> + write_rec e state value + | Mu {fix; _} -> + write_rec (fix e) state value + | Delayed f -> + write_rec (f ()) state value and write_with_limit : type a. int -> a Encoding.t -> state -> a -> unit = - fun limit e state value -> - (* backup the current limit *) - let old_limit = state.allowed_bytes in - (* install the new limit (only if smaller than the current limit) *) - let limit = - match state.allowed_bytes with - | None -> limit - | Some old_limit -> min old_limit limit in - state.allowed_bytes <- Some limit ; - write_rec e state value ; - (* restore the previous limit (minus the read bytes) *) - match old_limit with + fun limit e state value -> + (* backup the current limit *) + let old_limit = state.allowed_bytes in + (* install the new limit (only if smaller than the current limit) *) + let limit = + match state.allowed_bytes with | None -> - state.allowed_bytes <- None + limit | Some old_limit -> - let remaining = - match state.allowed_bytes with - | None -> assert false - | Some len -> len in - let read = limit - remaining in - state.allowed_bytes <- Some (old_limit - read) - + min old_limit limit + in + state.allowed_bytes <- Some limit ; + write_rec e state value ; + (* restore the previous limit (minus the read bytes) *) + match old_limit with + | None -> + state.allowed_bytes <- None + | Some old_limit -> + let remaining = + match state.allowed_bytes with None -> assert false | Some len -> len + in + let read = limit - remaining in + state.allowed_bytes <- Some (old_limit - read) (** ******************** *) + (** Various entry points *) let write e v buffer offset len = (* By harcoding [allowed_bytes] with the buffer length, we ensure that [write] will never reallocate the buffer. *) - let state = { buffer ; offset ; allowed_bytes = Some len } in - try - write_rec e state v ; - Some state.offset - with Write_error _ -> None + let state = {buffer; offset; allowed_bytes = Some len} in + try write_rec e state v ; Some state.offset with Write_error _ -> None let to_bytes_exn e v = match Encoding.classify e with - | `Fixed n -> begin + | `Fixed n -> (* Preallocate the complete buffer *) - let state = { buffer = MBytes.create n ; - offset = 0 ; allowed_bytes = Some n } in - write_rec e state v ; - state.buffer - end + let state = + {buffer = MBytes.create n; offset = 0; allowed_bytes = Some n} + in + write_rec e state v ; state.buffer | `Dynamic | `Variable -> (* Preallocate a minimal buffer and let's not hardcode a limit to its extension. *) - let state = { buffer = MBytes.create 4096 ; - offset = 0 ; allowed_bytes = None } in + let state = + {buffer = MBytes.create 4096; offset = 0; allowed_bytes = None} + in write_rec e state v ; MBytes.sub state.buffer 0 state.offset -let to_bytes e v = - try Some (to_bytes_exn e v) - with Write_error _ -> None +let to_bytes e v = try Some (to_bytes_exn e v) with Write_error _ -> None diff --git a/src/lib_data_encoding/binary_writer.mli b/src/lib_data_encoding/binary_writer.mli index 772b7b418b515ec57c2ef7323250583d19a90c38..dc5c53bd33e97dd8db6371463add8a9498eb55e8 100644 --- a/src/lib_data_encoding/binary_writer.mli +++ b/src/lib_data_encoding/binary_writer.mli @@ -27,5 +27,7 @@ use the corresponding module intended for use: {!Data_encoding.Binary}. *) val write : 'a Encoding.t -> 'a -> MBytes.t -> int -> int -> int option + val to_bytes_exn : 'a Encoding.t -> 'a -> MBytes.t + val to_bytes : 'a Encoding.t -> 'a -> MBytes.t option diff --git a/src/lib_data_encoding/bson.ml b/src/lib_data_encoding/bson.ml index 1fc017dc120ccb9d23657978dd1dd66ac92b79f7..17d4169816f01733109484e137809c1e602183e1 100644 --- a/src/lib_data_encoding/bson.ml +++ b/src/lib_data_encoding/bson.ml @@ -24,7 +24,9 @@ (*****************************************************************************) type bson = Json_repr_bson.bson + type t = bson let construct e v = Json_repr_bson.Json_encoding.construct (Json.convert e) v + let destruct e v = Json_repr_bson.Json_encoding.destruct (Json.convert e) v diff --git a/src/lib_data_encoding/bson.mli b/src/lib_data_encoding/bson.mli index c8ffcf41cdecb57a8a6d22192d71e1cf418ce8a8..f669ede2f89c2f13fbbdddb9c32349e848fdcdb4 100644 --- a/src/lib_data_encoding/bson.mli +++ b/src/lib_data_encoding/bson.mli @@ -27,6 +27,9 @@ use the corresponding module intended for use: {!Data_encoding.Bson}. *) type bson = Json_repr_bson.bson + type t = bson + val construct : 't Encoding.t -> 't -> bson + val destruct : 't Encoding.t -> bson -> 't diff --git a/src/lib_data_encoding/data_encoding.ml b/src/lib_data_encoding/data_encoding.ml index 6c1741734229c0768943ff2b8fcc5078ffd863ac..e735119aa1f4f97dd0effe42b5c2e3ed08067a9f 100644 --- a/src/lib_data_encoding/data_encoding.ml +++ b/src/lib_data_encoding/data_encoding.ml @@ -23,181 +23,192 @@ (* *) (*****************************************************************************) -module Encoding = -struct +module Encoding = struct include Encoding + let splitted ~json ~binary = raw_splitted ~json:(Json.convert json) ~binary + let assoc enc = let json = Json_encoding.assoc (Json.convert enc) in let binary = list (tup2 string enc) in raw_splitted ~json ~binary module Bounded = struct - let string length = raw_splitted - ~binary: begin - let kind = Binary_size.unsigned_range_to_size length in - check_size (length + Binary_size.integer_to_size kind) @@ - dynamic_size ~kind Variable.string - end - ~json: begin - let open Json_encoding in + ~binary: + (let kind = Binary_size.unsigned_range_to_size length in + check_size (length + Binary_size.integer_to_size kind) + @@ dynamic_size ~kind Variable.string) + ~json: + (let open Json_encoding in conv (fun s -> - if String.length s > length then invalid_arg "oversized string" ; - s) + if String.length s > length then invalid_arg "oversized string" ; + s) (fun s -> - if String.length s > length then - raise (Cannot_destruct ([], Invalid_argument "oversized string")) ; - s) - string - end + if String.length s > length then + raise + (Cannot_destruct ([], Invalid_argument "oversized string")) ; + s) + string) let bytes length = raw_splitted - ~binary: begin - let kind = Binary_size.unsigned_range_to_size length in - check_size (length + Binary_size.integer_to_size kind) @@ - dynamic_size ~kind Variable.bytes - end - ~json: begin - let open Json_encoding in + ~binary: + (let kind = Binary_size.unsigned_range_to_size length in + check_size (length + Binary_size.integer_to_size kind) + @@ dynamic_size ~kind Variable.bytes) + ~json: + (let open Json_encoding in conv (fun s -> - if MBytes.length s > length then invalid_arg "oversized string" ; - s) + if MBytes.length s > length then invalid_arg "oversized string" ; + s) (fun s -> - if MBytes.length s > length then - raise (Cannot_destruct ([], Invalid_argument "oversized string")) ; - s) - Json.bytes_jsont - end - + if MBytes.length s > length then + raise + (Cannot_destruct ([], Invalid_argument "oversized string")) ; + s) + Json.bytes_jsont) end type 'a lazy_state = | Value of 'a | Bytes of MBytes.t | Both of MBytes.t * 'a - type 'a lazy_t = - { mutable state : 'a lazy_state ; - encoding : 'a t } + + type 'a lazy_t = {mutable state : 'a lazy_state; encoding : 'a t} + let force_decode le = match le.state with - | Value value -> Some value - | Both (_, value) -> Some value - | Bytes bytes -> - match Binary_reader.of_bytes le.encoding bytes with - | Some expr -> le.state <- Both (bytes, expr) ; Some expr - | None -> None + | Value value -> + Some value + | Both (_, value) -> + Some value + | Bytes bytes -> ( + match Binary_reader.of_bytes le.encoding bytes with + | Some expr -> + le.state <- Both (bytes, expr) ; + Some expr + | None -> + None ) + let force_bytes le = match le.state with - | Bytes bytes -> bytes - | Both (bytes, _) -> bytes + | Bytes bytes -> + bytes + | Both (bytes, _) -> + bytes | Value value -> let bytes = Binary_writer.to_bytes_exn le.encoding value in le.state <- Both (bytes, value) ; bytes + let lazy_encoding encoding = let binary = Encoding.conv force_bytes - (fun bytes -> { state = Bytes bytes ; encoding }) - Encoding.bytes in + (fun bytes -> {state = Bytes bytes; encoding}) + Encoding.bytes + in let json = Encoding.conv (fun le -> - match force_decode le with - | Some r -> r - | None -> raise Exit) - (fun value -> { state = Value value ; encoding }) - encoding in + match force_decode le with Some r -> r | None -> raise Exit) + (fun value -> {state = Value value; encoding}) + encoding + in splitted ~json ~binary - let make_lazy encoding value = - { encoding ; state = Value value } + + let make_lazy encoding value = {encoding; state = Value value} + let apply_lazy ~fun_value ~fun_bytes ~fun_combine le = match le.state with - | Value value -> fun_value value - | Bytes bytes -> fun_bytes bytes - | Both (bytes, value) -> fun_combine (fun_value value) (fun_bytes bytes) + | Value value -> + fun_value value + | Bytes bytes -> + fun_bytes bytes + | Both (bytes, value) -> + fun_combine (fun_value value) (fun_bytes bytes) module With_version = struct - let version_case enc choose wrap name nth = case ~title:(Printf.sprintf "%s version %d" name nth) Json_only (obj1 (req (Printf.sprintf "%s.v%d" name nth) enc)) - choose wrap + choose + wrap let make_encoding ~name l = - union ~tag_size: `Uint8 (List.mapi (fun nth f -> f name nth) l) + union ~tag_size:`Uint8 (List.mapi (fun nth f -> f name nth) l) type _ t = | Version_0 : 'v0 encoding -> 'v0 t - | Version_S : { - previous: 'vn t ; - encoding: 'vnp1 encoding ; - upgrade: 'vn -> 'vnp1 - } -> 'vnp1 t + | Version_S : + { previous : 'vn t; + encoding : 'vnp1 encoding; + upgrade : 'vn -> 'vnp1 } + -> 'vnp1 t let first_version e = Version_0 e let next_version encoding upgrade previous = - Version_S { encoding ; upgrade ; previous } - - let encoding : type a. name : string -> a t -> a encoding = - fun ~name version -> - match version with - | Version_0 e -> - make_encoding ~name - [ version_case e (fun x -> Some x) (fun x -> x) ] - | Version_S { previous ; encoding ; upgrade } -> - let rec mk_nones : - (* This function generates encoding cases for all the + Version_S {encoding; upgrade; previous} + + let encoding : type a. name:string -> a t -> a encoding = + fun ~name version -> + match version with + | Version_0 e -> + make_encoding ~name [version_case e (fun x -> Some x) (fun x -> x)] + | Version_S {previous; encoding; upgrade} -> + let rec mk_nones : + type (* This function generates encoding cases for all the outdated versions. These versions are never encoded to (hence [fun _ -> None]) but are safely decoded with the use of the upgrade functions. *) - type b. (b -> a) -> b t -> (string -> int -> a case) list = - fun upgr -> function - | Version_0 e -> - [ version_case e (fun _ -> None) (fun x -> upgr x) ] - | Version_S { previous ; encoding ; upgrade } -> - let others = - mk_nones (fun x -> upgr (upgrade x)) previous in - version_case encoding (fun _ -> None) (fun x -> upgr x) - :: others - in - let nones = mk_nones upgrade previous in - let cases = - version_case encoding (fun x -> Some x) (fun x -> x) :: nones - |> List.rev - in - make_encoding ~name cases + b. + (b -> a) -> b t -> (string -> int -> a case) list = + fun upgr -> function + | Version_0 e -> + [version_case e (fun _ -> None) (fun x -> upgr x)] + | Version_S {previous; encoding; upgrade} -> + let others = mk_nones (fun x -> upgr (upgrade x)) previous in + version_case encoding (fun _ -> None) (fun x -> upgr x) + :: others + in + let nones = mk_nones upgrade previous in + let cases = + version_case encoding (fun x -> Some x) (fun x -> x) :: nones + |> List.rev + in + make_encoding ~name cases end - end include Encoding - - - module Json = Json module Bson = Bson module Binary_schema = Binary_schema + module Binary = struct include Binary_error include Binary_length include Binary_writer include Binary_reader include Binary_stream_reader + let describe = Binary_description.describe end type json = Json.t + let json = Json.encoding + type json_schema = Json.schema + let json_schema = Json.schema_encoding + type bson = Bson.t diff --git a/src/lib_data_encoding/data_encoding.mli b/src/lib_data_encoding/data_encoding.mli index eaae8cff15df669f31806249bf13f0a9ed370919..f454132fbbd1fa89e7f6d1c1c7f89bfdffc223a9 100644 --- a/src/lib_data_encoding/data_encoding.mli +++ b/src/lib_data_encoding/data_encoding.mli @@ -75,10 +75,10 @@ *) -module Encoding: sig - +module Encoding : sig (** The type descriptors for values of type ['a]. *) type 'a t + type 'a encoding = 'a t (** {3 Ground descriptors} *) @@ -209,9 +209,11 @@ module Encoding: sig A schema may optionally be provided as documentation of the new encoding. *) val conv : - ('a -> 'b) -> ('b -> 'a) -> + ('a -> 'b) -> + ('b -> 'a) -> ?schema:Json_schema.schema -> - 'b encoding -> 'a encoding + 'b encoding -> + 'a encoding (** Association list. An object in JSON, a list of pairs in binary. *) @@ -228,29 +230,38 @@ module Encoding: sig (** Required field. *) val req : - ?title:string -> ?description:string -> - string -> 't encoding -> 't field + ?title:string -> ?description:string -> string -> 't encoding -> 't field (** Optional field. Omitted entirely in JSON encoding if None. Omitted in binary if the only optional field in a [`Variable] encoding, otherwise a 1-byte prefix (`0` or `255`) tells if the field is present or not. *) val opt : - ?title:string -> ?description:string -> - string -> 't encoding -> 't option field + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't option field (** Optional field of variable length. Only one can be present in a given object. *) val varopt : - ?title:string -> ?description:string -> - string -> 't encoding -> 't option field + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't option field (** Required field with a default value. If the default value is passed, the field is omitted in JSON. The value is always serialized in binary. *) val dft : - ?title:string -> ?description:string -> - string -> 't encoding -> 't -> 't field + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't -> + 't field (** {4 Constructors for objects with N fields} *) @@ -264,37 +275,80 @@ module Encoding: sig @raise Invalid_argument if more than one field is a variable one. *) - val obj1 : - 'f1 field -> 'f1 encoding - val obj2 : - 'f1 field -> 'f2 field -> ('f1 * 'f2) encoding - val obj3 : - 'f1 field -> 'f2 field -> 'f3 field -> ('f1 * 'f2 * 'f3) encoding + val obj1 : 'f1 field -> 'f1 encoding + + val obj2 : 'f1 field -> 'f2 field -> ('f1 * 'f2) encoding + + val obj3 : 'f1 field -> 'f2 field -> 'f3 field -> ('f1 * 'f2 * 'f3) encoding + val obj4 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> ('f1 * 'f2 * 'f3 * 'f4) encoding + val obj5 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5) encoding + val obj6 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> 'f6 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6) encoding + val obj7 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7) encoding + val obj8 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8) encoding + val obj9 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> 'f9 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> + 'f9 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9) encoding + val obj10 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> 'f9 field -> 'f10 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> + 'f9 field -> + 'f10 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9 * 'f10) encoding (** Create a larger object from the encodings of two smaller ones. @@ -314,46 +368,83 @@ module Encoding: sig @raise Invalid_argument if more than one field is a variable one. *) - val tup1 : - 'f1 encoding -> - 'f1 encoding - val tup2 : - 'f1 encoding -> 'f2 encoding -> - ('f1 * 'f2) encoding + val tup1 : 'f1 encoding -> 'f1 encoding + + val tup2 : 'f1 encoding -> 'f2 encoding -> ('f1 * 'f2) encoding + val tup3 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> - ('f1 * 'f2 * 'f3) encoding + 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> ('f1 * 'f2 * 'f3) encoding + val tup4 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> ('f1 * 'f2 * 'f3 * 'f4) encoding + val tup5 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> 'f5 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5) encoding + val tup6 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6) encoding + val tup7 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7) encoding + val tup8 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8) encoding + val tup9 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> 'f9 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9) encoding + val tup10 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> - 'f9 encoding -> 'f10 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> + 'f9 encoding -> + 'f10 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9 * 'f10) encoding - (** Create a large tuple encoding from two smaller ones. @raise Invalid_argument if both values are not tuples or if both tuples contains a variable field. *) @@ -366,6 +457,7 @@ module Encoding: sig case, providing its encoder, and converter functions to and from the union type. *) type 't case + type case_tag = Tag of int | Json_only (** Encodes a variant constructor. Takes the encoding for the specific @@ -383,7 +475,10 @@ module Encoding: sig title:string -> ?description:string -> case_tag -> - 'a encoding -> ('t -> 'a option) -> ('a -> 't) -> 't case + 'a encoding -> + ('t -> 'a option) -> + ('a -> 't) -> + 't case (** Create a single encoding from a series of cases. @@ -396,12 +491,10 @@ module Encoding: sig @raise Invalid_argument if it is given the empty list or if there are more cases than can fit in the tag size. *) - val union : - ?tag_size:[ `Uint8 | `Uint16 ] -> 't case list -> 't encoding + val union : ?tag_size:[`Uint8 | `Uint16] -> 't case list -> 't encoding (** {3 Predicates over descriptors} *) - (** Is the given encoding serialized as a JSON object? *) val is_obj : 'a encoding -> bool @@ -410,7 +503,7 @@ module Encoding: sig (** Classify the binary serialization of an encoding as explained in the preamble. *) - val classify : 'a encoding -> [ `Fixed of int | `Dynamic | `Variable ] + val classify : 'a encoding -> [`Fixed of int | `Dynamic | `Variable] (** {3 Specialized descriptors} *) @@ -423,7 +516,6 @@ module Encoding: sig (** Create encodings that produce data of a fixed length when binary encoded. See the preamble for an explanation. *) module Fixed : sig - (** @raise Invalid_argument if the argument is less or equal to zero. *) val string : int -> string encoding @@ -441,8 +533,8 @@ module Encoding: sig (** Create encodings that produce data of a variable length when binary encoded. See the preamble for an explanation. *) module Variable : sig - val string : string encoding + val bytes : MBytes.t encoding (** @raise Invalid_argument if the encoding argument is variable length @@ -452,7 +544,6 @@ module Encoding: sig (** @raise Invalid_argument if the encoding argument is variable length or may lead to zero-width representation in binary. *) val list : ?max_length:int -> 'a encoding -> 'a list encoding - end module Bounded : sig @@ -474,8 +565,7 @@ module Encoding: sig Typically used to combine two variable encodings in a same objects or tuple, or to use a variable encoding in an array or a list. *) val dynamic_size : - ?kind: [ `Uint30 | `Uint16 | `Uint8 ] -> - 'a encoding -> 'a encoding + ?kind:[`Uint30 | `Uint16 | `Uint8] -> 'a encoding -> 'a encoding (** [check_size size encoding] ensures that the binary encoding of a value will not be allowed to exceed [size] bytes. The reader @@ -494,9 +584,10 @@ module Encoding: sig (** Combinator for recursive encodings. *) val mu : string -> - ?title: string -> - ?description: string -> - ('a encoding -> 'a encoding) -> 'a encoding + ?title:string -> + ?description:string -> + ('a encoding -> 'a encoding) -> + 'a encoding (** {3 Documenting descriptors} *) @@ -504,8 +595,10 @@ module Encoding: sig add documentation to an encoding. *) val def : string -> - ?title:string -> ?description:string -> - 't encoding ->'t encoding + ?title:string -> + ?description:string -> + 't encoding -> + 't encoding (** See {!lazy_encoding} below.*) type 'a lazy_t @@ -528,8 +621,11 @@ module Encoding: sig (** Apply on structure of lazy value, and combine results *) val apply_lazy : - fun_value:('a -> 'b) -> fun_bytes:(MBytes.t -> 'b) -> fun_combine:('b -> 'b -> 'b) -> - 'a lazy_t -> 'b + fun_value:('a -> 'b) -> + fun_bytes:(MBytes.t -> 'b) -> + fun_combine:('b -> 'b -> 'b) -> + 'a lazy_t -> + 'b (** Create a {!Data_encoding.t} value which records knowledge of older versions of a given encoding as long as one can "upgrade" @@ -539,8 +635,7 @@ module Encoding: sig See the module [Documented_example] in ["./test/versioned.ml"] for a tutorial. *) - module With_version: sig - + module With_version : sig (** An encapsulation of consecutive encoding versions. *) type _ t @@ -555,14 +650,13 @@ module Encoding: sig (** Make an encoding from an encapsulation of versions; the argument [~name] is used to prefix the version "tag" in the encoding, it should not change from one version to the next. *) - val encoding : name: string -> 'a t -> 'a encoding + val encoding : name:string -> 'a t -> 'a encoding end end include module type of Encoding with type 'a t = 'a Encoding.t -module Json: sig - +module Json : sig (** In memory JSON data, compatible with [Ezjsonm]. *) type json = [ `O of (string * json) list @@ -571,9 +665,10 @@ module Json: sig | `A of json list | `Null | `String of string ] + type t = json - type schema = Json_schema.schema + type schema = Json_schema.schema (** Encodes raw JSON data (BSON is used for binary). *) val encoding : json Encoding.t @@ -600,15 +695,10 @@ module Json: sig (** A set of accessors that point to a location in a JSON object. *) and path_item = - [ `Field of string - (** A field in an object. *) - | `Index of int - (** An index in an array. *) - | `Star - (** Any / every field or index. *) - | `Next - (** The next element after an array. *) - ] + [ `Field of string (** A field in an object. *) + | `Index of int (** An index in an array. *) + | `Star (** Any / every field or index. *) + | `Next (** The next element after an array. *) ] (** Exception raised by destructors, with the location in the original JSON structure and the specific error. *) @@ -630,11 +720,14 @@ module Json: sig exception Unexpected_field of string val print_error : - ?print_unknown: (Format.formatter -> exn -> unit) -> - Format.formatter -> exn -> unit + ?print_unknown:(Format.formatter -> exn -> unit) -> + Format.formatter -> + exn -> + unit (** Helpers for writing encoders. *) val cannot_destruct : ('a, Format.formatter, unit, 'b) format4 -> 'a + val wrap_error : ('a -> 'b) -> 'a -> 'b (** Read a JSON document from a string. *) @@ -650,12 +743,11 @@ module Json: sig val to_string : ?newline:bool -> ?minify:bool -> json -> string val pp : Format.formatter -> json -> unit - end -module Bson: sig - +module Bson : sig type bson = Json_repr_bson.bson + type t = bson (** Construct a BSON object from an encoding. *) @@ -664,17 +756,17 @@ module Bson: sig (** Destruct a BSON object into a value. Fail with an exception if the JSON object and encoding do not match.. *) val destruct : 't Encoding.t -> bson -> 't - end module Binary_schema : sig type t - val pp: Format.formatter -> t -> unit - val encoding: t Encoding.t -end -module Binary: sig + val pp : Format.formatter -> t -> unit + val encoding : t Encoding.t +end + +module Binary : sig (** All the errors that might be returned while reading a binary value *) type read_error = | Not_enough_data @@ -682,27 +774,31 @@ module Binary: sig | No_case_matched | Unexpected_tag of int | Invalid_size of int - | Invalid_int of { min : int ; v : int ; max : int } - | Invalid_float of { min : float ; v : float ; max : float } + | Invalid_int of {min : int; v : int; max : int} + | Invalid_float of {min : float; v : float; max : float} | Trailing_zero | Size_limit_exceeded | List_too_long | Array_too_long + exception Read_error of read_error - val pp_read_error: Format.formatter -> read_error -> unit + + val pp_read_error : Format.formatter -> read_error -> unit (** All the errors that might be returned while writing a binary value *) type write_error = | Size_limit_exceeded | No_case_matched - | Invalid_int of { min : int ; v : int ; max : int } - | Invalid_float of { min : float ; v : float ; max : float } - | Invalid_bytes_length of { expected : int ; found : int } - | Invalid_string_length of { expected : int ; found : int } + | Invalid_int of {min : int; v : int; max : int} + | Invalid_float of {min : float; v : float; max : float} + | Invalid_bytes_length of {expected : int; found : int} + | Invalid_string_length of {expected : int; found : int} | Invalid_natural | List_too_long | Array_too_long + val pp_write_error : Format.formatter -> write_error -> unit + exception Write_error of write_error (** Compute the expected length of the binary representation of a value *) @@ -712,23 +808,23 @@ module Binary: sig encoding might produce, only when the size of the representation does not depends of the value itself. *) val fixed_length : 'a Encoding.t -> int option + val fixed_length_exn : 'a Encoding.t -> int (** [read enc buf ofs len] tries to reconstruct a value from the bytes in [buf] starting at offset [ofs] and reading at most [len] bytes. This function also returns the offset of the first unread bytes in the [buf]. *) - val read : 'a Encoding.t -> MBytes.t -> int -> int -> (int * 'a) option + val read : 'a Encoding.t -> MBytes.t -> int -> int -> (int * 'a) option (** Return type for the function [read_stream]. *) type 'ret status = - | Success of { result : 'ret ; size : int ; stream : Binary_stream.t } - (** Fully decoded value, together with the total amount of bytes reads, + | Success of {result : 'ret; size : int; stream : Binary_stream.t} + (** Fully decoded value, together with the total amount of bytes reads, and the remaining unread stream. *) - | Await of (MBytes.t -> 'ret status) - (** Partially decoded value.*) + | Await of (MBytes.t -> 'ret status) (** Partially decoded value.*) | Error of read_error - (** Failure. The stream is garbled and it should be dropped. *) + (** Failure. The stream is garbled and it should be dropped. *) (** Streamed equivalent of [read]. This variant cannot be called on variable-size encodings. *) @@ -759,11 +855,14 @@ module Binary: sig val to_bytes_exn : 'a Encoding.t -> 'a -> MBytes.t val describe : 'a Encoding.t -> Binary_schema.t - end type json = Json.t -val json: json Encoding.t + +val json : json Encoding.t + type json_schema = Json.schema -val json_schema: json_schema Encoding.t + +val json_schema : json_schema Encoding.t + type bson = Bson.t diff --git a/src/lib_data_encoding/encoding.ml b/src/lib_data_encoding/encoding.ml index 1dec990b44eb1073cd68fb64de6b60542a090937..2d855fb6657f03a1d4b9d252363abec799eb0520 100644 --- a/src/lib_data_encoding/encoding.ml +++ b/src/lib_data_encoding/encoding.ml @@ -24,57 +24,58 @@ (*****************************************************************************) module Kind = struct - - type t = - [ `Fixed of int - | `Dynamic - | `Variable ] - - type length = - [ `Fixed of int - | `Variable ] - - type enum = - [ `Dynamic - | `Variable ] - - let combine name : t -> t -> t = fun k1 k2 -> - match k1, k2 with - | `Fixed n1, `Fixed n2 -> `Fixed (n1 + n2) - | `Dynamic, `Dynamic | `Fixed _, `Dynamic - | `Dynamic, `Fixed _ -> `Dynamic - | `Variable, `Fixed _ - | (`Dynamic | `Fixed _), `Variable -> `Variable - | `Variable, `Dynamic -> - Printf.ksprintf invalid_arg + type t = [`Fixed of int | `Dynamic | `Variable] + + type length = [`Fixed of int | `Variable] + + type enum = [`Dynamic | `Variable] + + let combine name : t -> t -> t = + fun k1 k2 -> + match (k1, k2) with + | (`Fixed n1, `Fixed n2) -> + `Fixed (n1 + n2) + | (`Dynamic, `Dynamic) | (`Fixed _, `Dynamic) | (`Dynamic, `Fixed _) -> + `Dynamic + | (`Variable, `Fixed _) | ((`Dynamic | `Fixed _), `Variable) -> + `Variable + | (`Variable, `Dynamic) -> + Printf.ksprintf + invalid_arg "Cannot merge two %s when the left element is of variable length \ - and the right one of dynamic length. \ - You should use the reverse order, or wrap the second one \ - with Data_encoding.dynamic_size." + and the right one of dynamic length. You should use the reverse \ + order, or wrap the second one with Data_encoding.dynamic_size." name - | `Variable, `Variable -> - Printf.ksprintf invalid_arg - "Cannot merge two %s with variable length. \ - You should wrap one of them with Data_encoding.dynamic_size." + | (`Variable, `Variable) -> + Printf.ksprintf + invalid_arg + "Cannot merge two %s with variable length. You should wrap one of \ + them with Data_encoding.dynamic_size." name - let merge : t -> t -> t = fun k1 k2 -> - match k1, k2 with - | `Fixed n1, `Fixed n2 when n1 = n2 -> `Fixed n1 - | `Fixed _, `Fixed _ -> `Dynamic - | `Dynamic, `Dynamic | `Fixed _, `Dynamic - | `Dynamic, `Fixed _ -> `Dynamic - | `Variable, (`Dynamic | `Fixed _) - | (`Dynamic | `Fixed _), `Variable - | `Variable, `Variable -> `Variable + let merge : t -> t -> t = + fun k1 k2 -> + match (k1, k2) with + | (`Fixed n1, `Fixed n2) when n1 = n2 -> + `Fixed n1 + | (`Fixed _, `Fixed _) -> + `Dynamic + | (`Dynamic, `Dynamic) | (`Fixed _, `Dynamic) | (`Dynamic, `Fixed _) -> + `Dynamic + | (`Variable, (`Dynamic | `Fixed _)) + | ((`Dynamic | `Fixed _), `Variable) + | (`Variable, `Variable) -> + `Variable let merge_list sz : t list -> t = function - | [] -> assert false (* should be rejected by Data_encoding.union *) - | k :: ks -> - match List.fold_left merge k ks with - | `Fixed n -> `Fixed (n + Binary_size.tag_size sz) - | k -> k - + | [] -> + assert false (* should be rejected by Data_encoding.union *) + | k :: ks -> ( + match List.fold_left merge k ks with + | `Fixed n -> + `Fixed (n + Binary_size.tag_size sz) + | k -> + k ) end type case_tag = Tag of int | Json_only @@ -94,8 +95,8 @@ type 'a desc = | Int64 : Int64.t desc | N : Z.t desc | Z : Z.t desc - | RangedInt : { minimum : int ; maximum : int } -> int desc - | RangedFloat : { minimum : float ; maximum : float } -> float desc + | RangedInt : {minimum : int; maximum : int} -> int desc + | RangedFloat : {minimum : float; maximum : float} -> float desc | Float : float desc | Bytes : Kind.length -> MBytes.t desc | String : Kind.length -> string desc @@ -104,541 +105,737 @@ type 'a desc = | Array : int option * 'a t -> 'a array desc | List : int option * 'a t -> 'a list desc | Obj : 'a field -> 'a desc - | Objs : { kind: Kind.t ; left: 'a t ; right: 'b t } -> ('a * 'b) desc + | Objs : {kind : Kind.t; left : 'a t; right : 'b t} -> ('a * 'b) desc | Tup : 'a t -> 'a desc - | Tups : { kind: Kind.t ; left: 'a t ; right: 'b t } -> ('a * 'b) desc + | Tups : {kind : Kind.t; left : 'a t; right : 'b t} -> ('a * 'b) desc | Union : - { kind: Kind.t ; - tag_size: Binary_size.tag_size ; - cases: 'a case list ; - } -> 'a desc + { kind : Kind.t; + tag_size : Binary_size.tag_size; + cases : 'a case list } + -> 'a desc | Mu : - { kind: Kind.enum ; - name: string ; - title: string option ; - description: string option ; - fix: 'a t -> 'a t ; - } -> 'a desc + { kind : Kind.enum; + name : string; + title : string option; + description : string option; + fix : 'a t -> 'a t } + -> 'a desc | Conv : - { proj : ('a -> 'b) ; - inj : ('b -> 'a) ; - encoding : 'b t ; - schema : Json_schema.schema option ; - } -> 'a desc + { proj : 'a -> 'b; + inj : 'b -> 'a; + encoding : 'b t; + schema : Json_schema.schema option } + -> 'a desc | Describe : - { id : string ; - title : string option ; - description : string option ; - encoding : 'a t ; - } -> 'a desc + { id : string; + title : string option; + description : string option; + encoding : 'a t } + -> 'a desc | Splitted : - { encoding : 'a t ; - json_encoding : 'a Json_encoding.encoding ; - is_obj : bool ; - is_tup : bool ; - } -> 'a desc + { encoding : 'a t; + json_encoding : 'a Json_encoding.encoding; + is_obj : bool; + is_tup : bool } + -> 'a desc | Dynamic_size : - { kind : Binary_size.unsigned_integer ; - encoding : 'a t ; - } -> 'a desc - | Check_size : { limit : int ; encoding : 'a t } -> 'a desc + { kind : Binary_size.unsigned_integer; + encoding : 'a t } + -> 'a desc + | Check_size : {limit : int; encoding : 'a t} -> 'a desc | Delayed : (unit -> 'a t) -> 'a desc and _ field = - | Req : { name: string ; - encoding: 'a t ; - title: string option ; - description: string option ; - } -> 'a field - | Opt : { name: string ; - kind: Kind.enum ; - encoding: 'a t ; - title: string option ; - description: string option ; - } -> 'a option field - | Dft : { name: string ; - encoding: 'a t ; - default: 'a ; - title: string option ; - description: string option ; - } -> 'a field + | Req : + { name : string; + encoding : 'a t; + title : string option; + description : string option } + -> 'a field + | Opt : + { name : string; + kind : Kind.enum; + encoding : 'a t; + title : string option; + description : string option } + -> 'a option field + | Dft : + { name : string; + encoding : 'a t; + default : 'a; + title : string option; + description : string option } + -> 'a field and 'a case = - | Case : { title : string ; - description : string option ; - encoding : 'a t ; - proj : ('t -> 'a option) ; - inj : ('a -> 't) ; - tag : case_tag ; - } -> 't case + | Case : + { title : string; + description : string option; + encoding : 'a t; + proj : 't -> 'a option; + inj : 'a -> 't; + tag : case_tag } + -> 't case and 'a t = { - encoding: 'a desc ; - mutable json_encoding: 'a Json_encoding.encoding option ; + encoding : 'a desc; + mutable json_encoding : 'a Json_encoding.encoding option } type 'a encoding = 'a t -let rec classify : type a. a t -> Kind.t = fun e -> - classify_desc e.encoding -and classify_desc : type a. a desc -> Kind.t = fun e -> +let rec classify : type a. a t -> Kind.t = fun e -> classify_desc e.encoding + +and classify_desc : type a. a desc -> Kind.t = + fun e -> match e with (* Fixed *) - | Null -> `Fixed 0 - | Empty -> `Fixed 0 - | Constant _ -> `Fixed 0 - | Bool -> `Fixed Binary_size.bool - | Int8 -> `Fixed Binary_size.int8 - | Uint8 -> `Fixed Binary_size.uint8 - | Int16 -> `Fixed Binary_size.int16 - | Uint16 -> `Fixed Binary_size.uint16 - | Int31 -> `Fixed Binary_size.int31 - | Int32 -> `Fixed Binary_size.int32 - | Int64 -> `Fixed Binary_size.int64 - | N -> `Dynamic - | Z -> `Dynamic - | RangedInt { minimum ; maximum } -> + | Null -> + `Fixed 0 + | Empty -> + `Fixed 0 + | Constant _ -> + `Fixed 0 + | Bool -> + `Fixed Binary_size.bool + | Int8 -> + `Fixed Binary_size.int8 + | Uint8 -> + `Fixed Binary_size.uint8 + | Int16 -> + `Fixed Binary_size.int16 + | Uint16 -> + `Fixed Binary_size.uint16 + | Int31 -> + `Fixed Binary_size.int31 + | Int32 -> + `Fixed Binary_size.int32 + | Int64 -> + `Fixed Binary_size.int64 + | N -> + `Dynamic + | Z -> + `Dynamic + | RangedInt {minimum; maximum} -> `Fixed Binary_size.(integer_to_size @@ range_to_size ~minimum ~maximum) - | Float -> `Fixed Binary_size.float - | RangedFloat _ -> `Fixed Binary_size.float + | Float -> + `Fixed Binary_size.float + | RangedFloat _ -> + `Fixed Binary_size.float (* Tagged *) - | Bytes kind -> (kind :> Kind.t) - | String kind -> (kind :> Kind.t) - | Padded ({ encoding ; _ }, n) -> begin - match classify_desc encoding with - | `Fixed m -> `Fixed (n+m) - | _ -> assert false (* by construction (see [Fixed.padded]) *) - end + | Bytes kind -> + (kind :> Kind.t) + | String kind -> + (kind :> Kind.t) + | Padded ({encoding; _}, n) -> ( + match classify_desc encoding with + | `Fixed m -> + `Fixed (n + m) + | _ -> + assert false (* by construction (see [Fixed.padded]) *) ) | String_enum (_, cases) -> `Fixed Binary_size.(integer_to_size @@ enum_size cases) - | Obj (Opt { kind ; _ }) -> (kind :> Kind.t) - | Objs { kind ; _ } -> kind - | Tups { kind ; _ } -> kind - | Union { kind ; _ } -> (kind :> Kind.t) - | Mu { kind ; _ } -> (kind :> Kind.t) + | Obj (Opt {kind; _}) -> + (kind :> Kind.t) + | Objs {kind; _} -> + kind + | Tups {kind; _} -> + kind + | Union {kind; _} -> + (kind :> Kind.t) + | Mu {kind; _} -> + (kind :> Kind.t) (* Variable *) - | Ignore -> `Fixed 0 - | Array _ -> `Variable - | List _ -> `Variable + | Ignore -> + `Fixed 0 + | Array _ -> + `Variable + | List _ -> + `Variable (* Recursive *) - | Obj (Req { encoding ; _ }) -> classify encoding - | Obj (Dft { encoding ; _ }) -> classify encoding - | Tup encoding -> classify encoding - | Conv { encoding ; _ } -> classify encoding - | Describe { encoding ; _ } -> classify encoding - | Splitted { encoding ; _ } -> classify encoding - | Dynamic_size _ -> `Dynamic - | Check_size { encoding ; _ } -> classify encoding - | Delayed f -> classify (f ()) - -let make ?json_encoding encoding = { encoding ; json_encoding } + | Obj (Req {encoding; _}) -> + classify encoding + | Obj (Dft {encoding; _}) -> + classify encoding + | Tup encoding -> + classify encoding + | Conv {encoding; _} -> + classify encoding + | Describe {encoding; _} -> + classify encoding + | Splitted {encoding; _} -> + classify encoding + | Dynamic_size _ -> + `Dynamic + | Check_size {encoding; _} -> + classify encoding + | Delayed f -> + classify (f ()) + +let make ?json_encoding encoding = {encoding; json_encoding} module Fixed = struct let string n = if n <= 0 then - invalid_arg "Cannot create a string encoding of negative or null fixed length." ; + invalid_arg + "Cannot create a string encoding of negative or null fixed length." ; make @@ String (`Fixed n) + let bytes n = if n <= 0 then - invalid_arg "Cannot create a byte encoding of negative or null fixed length." ; + invalid_arg + "Cannot create a byte encoding of negative or null fixed length." ; make @@ Bytes (`Fixed n) + let add_padding e n = if n <= 0 then invalid_arg "Cannot create a padding of negative or null fixed length." ; match classify e with | `Fixed _ -> make @@ Padded (e, n) - | _ -> invalid_arg "Cannot pad non-fixed size encoding" + | _ -> + invalid_arg "Cannot pad non-fixed size encoding" end -let rec is_zeroable: type t. t encoding -> bool = fun e -> +let rec is_zeroable : type t. t encoding -> bool = + fun e -> (* Whether an encoding can ever produce zero-byte of encoding. It is dnagerous to place zero-size elements in a collection (list/array) because they are indistinguishable from the abscence of elements. *) match e.encoding with (* trivially true *) - | Null -> true (* always true *) - | Empty -> true (* always true *) - | Ignore -> true (* always true *) - | Constant _ -> true (* always true *) + | Null -> + true (* always true *) + | Empty -> + true (* always true *) + | Ignore -> + true (* always true *) + | Constant _ -> + true (* always true *) (* trivially false *) - | Bool -> false - | Int8 -> false - | Uint8 -> false - | Int16 -> false - | Uint16 -> false - | Int31 -> false - | Int32 -> false - | Int64 -> false - | N -> false - | Z -> false - | RangedInt _ -> false - | RangedFloat _ -> false - | Float -> false - | Bytes _ -> false - | String _ -> false - | Padded _ -> false - | String_enum _ -> false + | Bool -> + false + | Int8 -> + false + | Uint8 -> + false + | Int16 -> + false + | Uint16 -> + false + | Int31 -> + false + | Int32 -> + false + | Int64 -> + false + | N -> + false + | Z -> + false + | RangedInt _ -> + false + | RangedFloat _ -> + false + | Float -> + false + | Bytes _ -> + false + | String _ -> + false + | Padded _ -> + false + | String_enum _ -> + false (* true in some cases, but in practice always protected by Dynamic *) - | Array _ -> true (* 0-element array *) - | List _ -> true (* 0-element list *) + | Array _ -> + true (* 0-element array *) + | List _ -> + true (* 0-element list *) (* represented as whatever is inside: truth mostly propagates *) - | Obj (Req { encoding = e ; _ }) -> is_zeroable e (* represented as-is *) - | Obj (Opt { kind = `Variable ; _ }) -> true (* optional field ommited *) - | Obj (Dft { encoding = e ; _ }) -> is_zeroable e (* represented as-is *) - | Obj _ -> false - | Objs { left ; right ; _ } -> is_zeroable left && is_zeroable right - | Tup e -> is_zeroable e - | Tups { left ; right ; _ } -> is_zeroable left && is_zeroable right - | Union _ -> false (* includes a tag *) + | Obj (Req {encoding = e; _}) -> + is_zeroable e (* represented as-is *) + | Obj (Opt {kind = `Variable; _}) -> + true (* optional field ommited *) + | Obj (Dft {encoding = e; _}) -> + is_zeroable e (* represented as-is *) + | Obj _ -> + false + | Objs {left; right; _} -> + is_zeroable left && is_zeroable right + | Tup e -> + is_zeroable e + | Tups {left; right; _} -> + is_zeroable left && is_zeroable right + | Union _ -> + false (* includes a tag *) (* other recursive cases: truth propagates *) - | Mu { kind = `Dynamic ; _ } -> false (* size prefix *) - | Mu { kind = `Variable ; fix ; _ } -> is_zeroable (fix e) - | Conv { encoding ; _ } -> is_zeroable encoding - | Describe { encoding ; _ } -> is_zeroable encoding - | Splitted { encoding ; _ } -> is_zeroable encoding - | Check_size { encoding ; _ } -> is_zeroable encoding + | Mu {kind = `Dynamic; _} -> + false (* size prefix *) + | Mu {kind = `Variable; fix; _} -> + is_zeroable (fix e) + | Conv {encoding; _} -> + is_zeroable encoding + | Describe {encoding; _} -> + is_zeroable encoding + | Splitted {encoding; _} -> + is_zeroable encoding + | Check_size {encoding; _} -> + is_zeroable encoding (* Unscrutable: true by default *) - | Delayed f -> is_zeroable (f ()) + | Delayed f -> + is_zeroable (f ()) (* Protected against zeroable *) - | Dynamic_size _ -> false (* always some data for size *) + | Dynamic_size _ -> + false + +(* always some data for size *) module Variable = struct let string = make @@ String `Variable + let bytes = make @@ Bytes `Variable + let check_not_variable name e = match classify e with | `Variable -> - Printf.ksprintf invalid_arg - "Cannot insert variable length element in %s. \ - You should wrap the contents using Data_encoding.dynamic_size." name - | `Dynamic | `Fixed _ -> () + Printf.ksprintf + invalid_arg + "Cannot insert variable length element in %s. You should wrap the \ + contents using Data_encoding.dynamic_size." + name + | `Dynamic | `Fixed _ -> + () + let check_not_zeroable name e = if is_zeroable e then - Printf.ksprintf invalid_arg - "Cannot insert potentially zero-sized element in %s." name - else - () + Printf.ksprintf + invalid_arg + "Cannot insert potentially zero-sized element in %s." + name + else () + let array ?max_length e = check_not_variable "an array" e ; check_not_zeroable "an array" e ; let encoding = make @@ Array (max_length, e) in - match classify e, max_length with - | `Fixed n, Some max_length -> + match (classify e, max_length) with + | (`Fixed n, Some max_length) -> let limit = n * max_length in - make @@ Check_size { limit ; encoding } - | _, _ -> encoding + make @@ Check_size {limit; encoding} + | (_, _) -> + encoding + let list ?max_length e = check_not_variable "a list" e ; check_not_zeroable "a list" e ; let encoding = make @@ List (max_length, e) in - match classify e, max_length with - | `Fixed n, Some max_length -> + match (classify e, max_length) with + | (`Fixed n, Some max_length) -> let limit = n * max_length in - make @@ Check_size { limit ; encoding } - | _, _ -> encoding + make @@ Check_size {limit; encoding} + | (_, _) -> + encoding end -let dynamic_size ?(kind = `Uint30) e = - make @@ Dynamic_size { kind ; encoding = e } +let dynamic_size ?(kind = `Uint30) e = make @@ Dynamic_size {kind; encoding = e} -let check_size limit encoding = - make @@ Check_size { limit ; encoding } +let check_size limit encoding = make @@ Check_size {limit; encoding} -let delayed f = - make @@ Delayed f +let delayed f = make @@ Delayed f let null = make @@ Null + let empty = make @@ Empty + let unit = make @@ Ignore + let constant s = make @@ Constant s + let bool = make @@ Bool + let int8 = make @@ Int8 + let uint8 = make @@ Uint8 + let int16 = make @@ Int16 + let uint16 = make @@ Uint16 + let int31 = make @@ Int31 + let int32 = make @@ Int32 + let ranged_int minimum maximum = - let minimum = min minimum maximum - and maximum = max minimum maximum in + let minimum = min minimum maximum and maximum = max minimum maximum in if minimum < -(1 lsl 30) || (1 lsl 30) - 1 < maximum then invalid_arg "Data_encoding.ranged_int" ; - make @@ RangedInt { minimum ; maximum } + make @@ RangedInt {minimum; maximum} + let ranged_float minimum maximum = - let minimum = min minimum maximum - and maximum = max minimum maximum in - make @@ RangedFloat { minimum ; maximum } + let minimum = min minimum maximum and maximum = max minimum maximum in + make @@ RangedFloat {minimum; maximum} + let int64 = make @@ Int64 + let n = make @@ N + let z = make @@ Z + let float = make @@ Float let string = dynamic_size Variable.string + let bytes = dynamic_size Variable.bytes + let array ?max_length e = dynamic_size (Variable.array ?max_length e) + let list ?max_length e = dynamic_size (Variable.list ?max_length e) let string_enum = function - | [] -> invalid_arg "data_encoding.string_enum: cannot have zero cases" - | [ _case ] -> invalid_arg "data_encoding.string_enum: cannot have a single case, use constant instead" + | [] -> + invalid_arg "data_encoding.string_enum: cannot have zero cases" + | [_case] -> + invalid_arg + "data_encoding.string_enum: cannot have a single case, use constant \ + instead" | _ :: _ as cases -> let arr = Array.of_list (List.map snd cases) in let tbl = Hashtbl.create (Array.length arr) in List.iteri (fun ind (str, a) -> Hashtbl.add tbl a (str, ind)) cases ; make @@ String_enum (tbl, arr) -let conv proj inj ?schema encoding = - make @@ Conv { proj ; inj ; encoding ; schema } +let conv proj inj ?schema encoding = make @@ Conv {proj; inj; encoding; schema} let def id ?title ?description encoding = - make @@ Describe { id ; title ; description ; encoding } + make @@ Describe {id; title; description; encoding} let req ?title ?description n t = - Req { name = n ; encoding = t ; title ; description } + Req {name = n; encoding = t; title; description} + let opt ?title ?description n encoding = let kind = match classify encoding with - | `Variable -> `Variable - | `Fixed _ | `Dynamic -> `Dynamic in - Opt { name = n ; kind ; encoding ; title ; description } + | `Variable -> + `Variable + | `Fixed _ | `Dynamic -> + `Dynamic + in + Opt {name = n; kind; encoding; title; description} + let varopt ?title ?description n encoding = - Opt { name = n ; kind = `Variable ; encoding ; title ; description } + Opt {name = n; kind = `Variable; encoding; title; description} + let dft ?title ?description n t d = - Dft { name = n ; encoding = t ; default = d ; title ; description } + Dft {name = n; encoding = t; default = d; title; description} let raw_splitted ~json ~binary = - make @@ Splitted { encoding = binary ; - json_encoding = json ; - is_obj = false ; - is_tup = false } + make + @@ Splitted + {encoding = binary; json_encoding = json; is_obj = false; is_tup = false} -let rec is_obj : type a. a t -> bool = fun e -> +let rec is_obj : type a. a t -> bool = + fun e -> match e.encoding with - | Obj _ -> true - | Objs _ (* by construction *) -> true - | Conv { encoding = e ; _ } -> is_obj e - | Dynamic_size { encoding = e ; _ } -> is_obj e - | Union { cases ; _ } -> - List.for_all (fun (Case { encoding = e ; _ }) -> is_obj e) cases - | Empty -> true - | Ignore -> true - | Mu { fix ; _ } -> is_obj (fix e) - | Splitted { is_obj ; _ } -> is_obj - | Delayed f -> is_obj (f ()) - | Describe { encoding ; _ } -> is_obj encoding - | _ -> false - -let rec is_tup : type a. a t -> bool = fun e -> + | Obj _ -> + true + | Objs _ (* by construction *) -> + true + | Conv {encoding = e; _} -> + is_obj e + | Dynamic_size {encoding = e; _} -> + is_obj e + | Union {cases; _} -> + List.for_all (fun (Case {encoding = e; _}) -> is_obj e) cases + | Empty -> + true + | Ignore -> + true + | Mu {fix; _} -> + is_obj (fix e) + | Splitted {is_obj; _} -> + is_obj + | Delayed f -> + is_obj (f ()) + | Describe {encoding; _} -> + is_obj encoding + | _ -> + false + +let rec is_tup : type a. a t -> bool = + fun e -> match e.encoding with - | Tup _ -> true - | Tups _ (* by construction *) -> true - | Conv { encoding = e ; _ } -> is_tup e - | Dynamic_size { encoding = e ; _ } -> is_tup e - | Union { cases ; _ } -> - List.for_all (function Case { encoding = e; _ } -> is_tup e) cases - | Mu { fix ; _ } -> is_tup (fix e) - | Splitted { is_tup ; _ } -> is_tup - | Delayed f -> is_tup (f ()) - | Describe { encoding ; _ } -> is_tup encoding - | _ -> false + | Tup _ -> + true + | Tups _ (* by construction *) -> + true + | Conv {encoding = e; _} -> + is_tup e + | Dynamic_size {encoding = e; _} -> + is_tup e + | Union {cases; _} -> + List.for_all (function Case {encoding = e; _} -> is_tup e) cases + | Mu {fix; _} -> + is_tup (fix e) + | Splitted {is_tup; _} -> + is_tup + | Delayed f -> + is_tup (f ()) + | Describe {encoding; _} -> + is_tup encoding + | _ -> + false let raw_merge_objs left right = let kind = Kind.combine "objects" (classify left) (classify right) in - make @@ Objs { kind ; left ; right } + make @@ Objs {kind; left; right} let obj1 f1 = make @@ Obj f1 -let obj2 f2 f1 = - raw_merge_objs (obj1 f2) (obj1 f1) -let obj3 f3 f2 f1 = - raw_merge_objs (obj1 f3) (obj2 f2 f1) -let obj4 f4 f3 f2 f1 = - raw_merge_objs (obj2 f4 f3) (obj2 f2 f1) -let obj5 f5 f4 f3 f2 f1 = - raw_merge_objs (obj1 f5) (obj4 f4 f3 f2 f1) -let obj6 f6 f5 f4 f3 f2 f1 = - raw_merge_objs (obj2 f6 f5) (obj4 f4 f3 f2 f1) + +let obj2 f2 f1 = raw_merge_objs (obj1 f2) (obj1 f1) + +let obj3 f3 f2 f1 = raw_merge_objs (obj1 f3) (obj2 f2 f1) + +let obj4 f4 f3 f2 f1 = raw_merge_objs (obj2 f4 f3) (obj2 f2 f1) + +let obj5 f5 f4 f3 f2 f1 = raw_merge_objs (obj1 f5) (obj4 f4 f3 f2 f1) + +let obj6 f6 f5 f4 f3 f2 f1 = raw_merge_objs (obj2 f6 f5) (obj4 f4 f3 f2 f1) + let obj7 f7 f6 f5 f4 f3 f2 f1 = raw_merge_objs (obj3 f7 f6 f5) (obj4 f4 f3 f2 f1) + let obj8 f8 f7 f6 f5 f4 f3 f2 f1 = raw_merge_objs (obj4 f8 f7 f6 f5) (obj4 f4 f3 f2 f1) + let obj9 f9 f8 f7 f6 f5 f4 f3 f2 f1 = raw_merge_objs (obj1 f9) (obj8 f8 f7 f6 f5 f4 f3 f2 f1) + let obj10 f10 f9 f8 f7 f6 f5 f4 f3 f2 f1 = raw_merge_objs (obj2 f10 f9) (obj8 f8 f7 f6 f5 f4 f3 f2 f1) let merge_objs o1 o2 = - if is_obj o1 && is_obj o2 then - raw_merge_objs o1 o2 - else - invalid_arg "Json_encoding.merge_objs" + if is_obj o1 && is_obj o2 then raw_merge_objs o1 o2 + else invalid_arg "Json_encoding.merge_objs" let raw_merge_tups left right = let kind = Kind.combine "tuples" (classify left) (classify right) in - make @@ Tups { kind ; left ; right } + make @@ Tups {kind; left; right} let tup1 e1 = make @@ Tup e1 -let tup2 e2 e1 = - raw_merge_tups (tup1 e2) (tup1 e1) -let tup3 e3 e2 e1 = - raw_merge_tups (tup1 e3) (tup2 e2 e1) -let tup4 e4 e3 e2 e1 = - raw_merge_tups (tup2 e4 e3) (tup2 e2 e1) -let tup5 e5 e4 e3 e2 e1 = - raw_merge_tups (tup1 e5) (tup4 e4 e3 e2 e1) -let tup6 e6 e5 e4 e3 e2 e1 = - raw_merge_tups (tup2 e6 e5) (tup4 e4 e3 e2 e1) + +let tup2 e2 e1 = raw_merge_tups (tup1 e2) (tup1 e1) + +let tup3 e3 e2 e1 = raw_merge_tups (tup1 e3) (tup2 e2 e1) + +let tup4 e4 e3 e2 e1 = raw_merge_tups (tup2 e4 e3) (tup2 e2 e1) + +let tup5 e5 e4 e3 e2 e1 = raw_merge_tups (tup1 e5) (tup4 e4 e3 e2 e1) + +let tup6 e6 e5 e4 e3 e2 e1 = raw_merge_tups (tup2 e6 e5) (tup4 e4 e3 e2 e1) + let tup7 e7 e6 e5 e4 e3 e2 e1 = raw_merge_tups (tup3 e7 e6 e5) (tup4 e4 e3 e2 e1) + let tup8 e8 e7 e6 e5 e4 e3 e2 e1 = raw_merge_tups (tup4 e8 e7 e6 e5) (tup4 e4 e3 e2 e1) + let tup9 e9 e8 e7 e6 e5 e4 e3 e2 e1 = raw_merge_tups (tup1 e9) (tup8 e8 e7 e6 e5 e4 e3 e2 e1) + let tup10 e10 e9 e8 e7 e6 e5 e4 e3 e2 e1 = raw_merge_tups (tup2 e10 e9) (tup8 e8 e7 e6 e5 e4 e3 e2 e1) let merge_tups t1 t2 = - if is_tup t1 && is_tup t2 then - raw_merge_tups t1 t2 - else - invalid_arg "Tezos_serial.Encoding.merge_tups" + if is_tup t1 && is_tup t2 then raw_merge_tups t1 t2 + else invalid_arg "Tezos_serial.Encoding.merge_tups" let conv3 ty = - conv - (fun (c, b, a) -> (c, (b, a))) - (fun (c, (b, a)) -> (c, b, a)) - ty + conv (fun (c, b, a) -> (c, (b, a))) (fun (c, (b, a)) -> (c, b, a)) ty + let obj3 f3 f2 f1 = conv3 (obj3 f3 f2 f1) + let tup3 f3 f2 f1 = conv3 (tup3 f3 f2 f1) + let conv4 ty = conv (fun (d, c, b, a) -> ((d, c), (b, a))) (fun ((d, c), (b, a)) -> (d, c, b, a)) ty + let obj4 f4 f3 f2 f1 = conv4 (obj4 f4 f3 f2 f1) + let tup4 f4 f3 f2 f1 = conv4 (tup4 f4 f3 f2 f1) + let conv5 ty = conv (fun (e, d, c, b, a) -> (e, ((d, c), (b, a)))) (fun (e, ((d, c), (b, a))) -> (e, d, c, b, a)) ty + let obj5 f5 f4 f3 f2 f1 = conv5 (obj5 f5 f4 f3 f2 f1) + let tup5 f5 f4 f3 f2 f1 = conv5 (tup5 f5 f4 f3 f2 f1) + let conv6 ty = conv (fun (f, e, d, c, b, a) -> ((f, e), ((d, c), (b, a)))) (fun ((f, e), ((d, c), (b, a))) -> (f, e, d, c, b, a)) ty + let obj6 f6 f5 f4 f3 f2 f1 = conv6 (obj6 f6 f5 f4 f3 f2 f1) + let tup6 f6 f5 f4 f3 f2 f1 = conv6 (tup6 f6 f5 f4 f3 f2 f1) + let conv7 ty = conv (fun (g, f, e, d, c, b, a) -> ((g, (f, e)), ((d, c), (b, a)))) (fun ((g, (f, e)), ((d, c), (b, a))) -> (g, f, e, d, c, b, a)) ty + let obj7 f7 f6 f5 f4 f3 f2 f1 = conv7 (obj7 f7 f6 f5 f4 f3 f2 f1) + let tup7 f7 f6 f5 f4 f3 f2 f1 = conv7 (tup7 f7 f6 f5 f4 f3 f2 f1) + let conv8 ty = - conv (fun (h, g, f, e, d, c, b, a) -> - (((h, g), (f, e)), ((d, c), (b, a)))) - (fun (((h, g), (f, e)), ((d, c), (b, a))) -> - (h, g, f, e, d, c, b, a)) + conv + (fun (h, g, f, e, d, c, b, a) -> (((h, g), (f, e)), ((d, c), (b, a)))) + (fun (((h, g), (f, e)), ((d, c), (b, a))) -> (h, g, f, e, d, c, b, a)) ty + let obj8 f8 f7 f6 f5 f4 f3 f2 f1 = conv8 (obj8 f8 f7 f6 f5 f4 f3 f2 f1) + let tup8 f8 f7 f6 f5 f4 f3 f2 f1 = conv8 (tup8 f8 f7 f6 f5 f4 f3 f2 f1) + let conv9 ty = conv (fun (i, h, g, f, e, d, c, b, a) -> - (i, (((h, g), (f, e)), ((d, c), (b, a))))) + (i, (((h, g), (f, e)), ((d, c), (b, a))))) (fun (i, (((h, g), (f, e)), ((d, c), (b, a)))) -> - (i, h, g, f, e, d, c, b, a)) + (i, h, g, f, e, d, c, b, a)) ty -let obj9 f9 f8 f7 f6 f5 f4 f3 f2 f1 = - conv9 (obj9 f9 f8 f7 f6 f5 f4 f3 f2 f1) -let tup9 f9 f8 f7 f6 f5 f4 f3 f2 f1 = - conv9 (tup9 f9 f8 f7 f6 f5 f4 f3 f2 f1) + +let obj9 f9 f8 f7 f6 f5 f4 f3 f2 f1 = conv9 (obj9 f9 f8 f7 f6 f5 f4 f3 f2 f1) + +let tup9 f9 f8 f7 f6 f5 f4 f3 f2 f1 = conv9 (tup9 f9 f8 f7 f6 f5 f4 f3 f2 f1) + let conv10 ty = conv (fun (j, i, h, g, f, e, d, c, b, a) -> - ((j, i), (((h, g), (f, e)), ((d, c), (b, a))))) + ((j, i), (((h, g), (f, e)), ((d, c), (b, a))))) (fun ((j, i), (((h, g), (f, e)), ((d, c), (b, a)))) -> - (j, i, h, g, f, e, d, c, b, a)) + (j, i, h, g, f, e, d, c, b, a)) ty + let obj10 f10 f9 f8 f7 f6 f5 f4 f3 f2 f1 = conv10 (obj10 f10 f9 f8 f7 f6 f5 f4 f3 f2 f1) + let tup10 f10 f9 f8 f7 f6 f5 f4 f3 f2 f1 = conv10 (tup10 f10 f9 f8 f7 f6 f5 f4 f3 f2 f1) let check_cases tag_size cases = - if cases = [] then - invalid_arg "Data_encoding.union: empty list of cases." ; - let max_tag = - match tag_size with - | `Uint8 -> 256 - | `Uint16 -> 256 * 256 in - ignore @@ - List.fold_left - (fun others (Case { tag ; _ }) -> - match tag with - | Json_only -> others - | Tag tag -> - if List.mem tag others then - Format.kasprintf invalid_arg - "The tag %d appears twice in an union." - tag ; - if tag < 0 || max_tag <= tag then - Format.kasprintf invalid_arg "The tag %d is invalid." tag ; - tag :: others - ) - [] cases + if cases = [] then invalid_arg "Data_encoding.union: empty list of cases." ; + let max_tag = match tag_size with `Uint8 -> 256 | `Uint16 -> 256 * 256 in + ignore + @@ List.fold_left + (fun others (Case {tag; _}) -> + match tag with + | Json_only -> + others + | Tag tag -> + if List.mem tag others then + Format.kasprintf + invalid_arg + "The tag %d appears twice in an union." + tag ; + if tag < 0 || max_tag <= tag then + Format.kasprintf invalid_arg "The tag %d is invalid." tag ; + tag :: others) + [] + cases let union ?(tag_size = `Uint8) cases = check_cases tag_size cases ; - let kinds = - List.map (fun (Case { encoding ; _ }) -> classify encoding) cases in + let kinds = List.map (fun (Case {encoding; _}) -> classify encoding) cases in let kind = Kind.merge_list tag_size kinds in - make @@ Union { kind ; tag_size ; cases } + make @@ Union {kind; tag_size; cases} + let case ~title ?description tag encoding proj inj = - Case { title ; description ; encoding ; proj ; inj ; tag } + Case {title; description; encoding; proj; inj; tag} -let rec is_nullable: type t. t encoding -> bool = fun e -> +let rec is_nullable : type t. t encoding -> bool = + fun e -> match e.encoding with - | Null -> true - | Empty -> false - | Ignore -> true - | Constant _ -> false - | Bool -> false - | Int8 -> false - | Uint8 -> false - | Int16 -> false - | Uint16 -> false - | Int31 -> false - | Int32 -> false - | Int64 -> false - | N -> false - | Z -> false - | RangedInt _ -> false - | RangedFloat _ -> false - | Float -> false - | Bytes _ -> false - | String _ -> false - | Padded (e, _) -> is_nullable e - | String_enum _ -> false - | Array _ -> false - | List _ -> false - | Obj _ -> false - | Objs _ -> false - | Tup _ -> false - | Tups _ -> false - | Union { cases ; _ } -> - List.exists (fun (Case { encoding = e ; _ }) -> is_nullable e) cases - | Mu { fix ; _ } -> is_nullable (fix e) - | Conv { encoding = e ; _ } -> is_nullable e - | Describe { encoding = e ; _ } -> is_nullable e - | Splitted { json_encoding ; _ } -> Json_encoding.is_nullable json_encoding - | Dynamic_size { encoding = e ; _ } -> is_nullable e - | Check_size { encoding = e ; _ } -> is_nullable e - | Delayed _ -> true + | Null -> + true + | Empty -> + false + | Ignore -> + true + | Constant _ -> + false + | Bool -> + false + | Int8 -> + false + | Uint8 -> + false + | Int16 -> + false + | Uint16 -> + false + | Int31 -> + false + | Int32 -> + false + | Int64 -> + false + | N -> + false + | Z -> + false + | RangedInt _ -> + false + | RangedFloat _ -> + false + | Float -> + false + | Bytes _ -> + false + | String _ -> + false + | Padded (e, _) -> + is_nullable e + | String_enum _ -> + false + | Array _ -> + false + | List _ -> + false + | Obj _ -> + false + | Objs _ -> + false + | Tup _ -> + false + | Tups _ -> + false + | Union {cases; _} -> + List.exists (fun (Case {encoding = e; _}) -> is_nullable e) cases + | Mu {fix; _} -> + is_nullable (fix e) + | Conv {encoding = e; _} -> + is_nullable e + | Describe {encoding = e; _} -> + is_nullable e + | Splitted {json_encoding; _} -> + Json_encoding.is_nullable json_encoding + | Dynamic_size {encoding = e; _} -> + is_nullable e + | Check_size {encoding = e; _} -> + is_nullable e + | Delayed _ -> + true let option ty = if is_nullable ty then @@ -646,42 +843,46 @@ let option ty = (* TODO add a special construct `Option` in the GADT *) union ~tag_size:`Uint8 - [ case - (Tag 1) ty - ~title:"Some" - (fun x -> x) - (fun x -> Some x) ; + [ case (Tag 1) ty ~title:"Some" (fun x -> x) (fun x -> Some x); case - (Tag 0) null + (Tag 0) + null ~title:"None" (function None -> Some () | Some _ -> None) - (fun () -> None) ; - ] + (fun () -> None) ] + let mu name ?title ?description fix = let kind = try let precursor = - make @@ Mu { kind = `Dynamic ; name ; title ; description ; fix } in + make @@ Mu {kind = `Dynamic; name; title; description; fix} + in match classify @@ fix precursor with - | `Fixed _ | `Dynamic -> `Dynamic - | `Variable -> raise Exit + | `Fixed _ | `Dynamic -> + `Dynamic + | `Variable -> + raise Exit with Exit | _ (* TODO variability error *) -> let precursor = - make @@ Mu { kind = `Variable ; name ; title ; description ; fix } in + make @@ Mu {kind = `Variable; name; title; description; fix} + in ignore (classify @@ fix precursor) ; - `Variable in - make @@ Mu { kind ; name ; title ; description ; fix } + `Variable + in + make @@ Mu {kind; name; title; description; fix} let result ok_enc error_enc = union ~tag_size:`Uint8 - [ case (Tag 1) ok_enc + [ case + (Tag 1) + ok_enc ~title:"Ok" (function Ok x -> Some x | Error _ -> None) - (fun x -> Ok x) ; - case (Tag 0) error_enc + (fun x -> Ok x); + case + (Tag 0) + error_enc ~title:"Result" (function Ok _ -> None | Error x -> Some x) - (fun x -> Error x) ; - ] - + (fun x -> Error x) ] diff --git a/src/lib_data_encoding/encoding.mli b/src/lib_data_encoding/encoding.mli index 1051aea36e597fbf687dc1df300720c02bdfd76e..c97d91cf12003fe82b4f02b932bf78ab3fca2da5 100644 --- a/src/lib_data_encoding/encoding.mli +++ b/src/lib_data_encoding/encoding.mli @@ -26,13 +26,18 @@ (** This is for use *within* the data encoding library only. Instead, you should use the corresponding module intended for use: {!Data_encoding.Encoding}. *) -module Kind: sig - type t = [ `Fixed of int | `Dynamic | `Variable ] - type length = [ `Fixed of int | `Variable ] - type enum = [ `Dynamic | `Variable ] - val combine: string -> t -> t -> t +module Kind : sig + type t = [`Fixed of int | `Dynamic | `Variable] + + type length = [`Fixed of int | `Variable] + + type enum = [`Dynamic | `Variable] + + val combine : string -> t -> t -> t + val merge : t -> t -> t - val merge_list: Binary_size.tag_size -> t list -> t + + val merge_list : Binary_size.tag_size -> t list -> t end type case_tag = Tag of int | Json_only @@ -52,8 +57,8 @@ type 'a desc = | Int64 : Int64.t desc | N : Z.t desc | Z : Z.t desc - | RangedInt : { minimum : int ; maximum : int } -> int desc - | RangedFloat : { minimum : float ; maximum : float } -> float desc + | RangedInt : {minimum : int; maximum : int} -> int desc + | RangedFloat : {minimum : float; maximum : float} -> float desc | Float : float desc | Bytes : Kind.length -> MBytes.t desc | String : Kind.length -> string desc @@ -62,234 +67,377 @@ type 'a desc = | Array : int option * 'a t -> 'a array desc | List : int option * 'a t -> 'a list desc | Obj : 'a field -> 'a desc - | Objs : { kind: Kind.t ; left: 'a t ; right: 'b t } -> ('a * 'b) desc + | Objs : {kind : Kind.t; left : 'a t; right : 'b t} -> ('a * 'b) desc | Tup : 'a t -> 'a desc - | Tups : { kind: Kind.t ; left: 'a t ; right: 'b t } -> ('a * 'b) desc + | Tups : {kind : Kind.t; left : 'a t; right : 'b t} -> ('a * 'b) desc | Union : - { kind: Kind.t ; - tag_size: Binary_size.tag_size ; - cases: 'a case list ; - } -> 'a desc + { kind : Kind.t; + tag_size : Binary_size.tag_size; + cases : 'a case list } + -> 'a desc | Mu : - { kind: Kind.enum ; - name: string ; - title: string option ; - description: string option ; - fix: 'a t -> 'a t ; - } -> 'a desc + { kind : Kind.enum; + name : string; + title : string option; + description : string option; + fix : 'a t -> 'a t } + -> 'a desc | Conv : - { proj : ('a -> 'b) ; - inj : ('b -> 'a) ; - encoding : 'b t ; - schema : Json_schema.schema option ; - } -> 'a desc + { proj : 'a -> 'b; + inj : 'b -> 'a; + encoding : 'b t; + schema : Json_schema.schema option } + -> 'a desc | Describe : - { id : string ; - title : string option ; - description : string option ; - encoding : 'a t ; - } -> 'a desc + { id : string; + title : string option; + description : string option; + encoding : 'a t } + -> 'a desc | Splitted : - { encoding : 'a t ; - json_encoding : 'a Json_encoding.encoding ; - is_obj : bool ; - is_tup : bool ; - } -> 'a desc + { encoding : 'a t; + json_encoding : 'a Json_encoding.encoding; + is_obj : bool; + is_tup : bool } + -> 'a desc | Dynamic_size : - { kind : Binary_size.unsigned_integer ; - encoding : 'a t ; - } -> 'a desc - | Check_size : { limit : int ; encoding : 'a t } -> 'a desc + { kind : Binary_size.unsigned_integer; + encoding : 'a t } + -> 'a desc + | Check_size : {limit : int; encoding : 'a t} -> 'a desc | Delayed : (unit -> 'a t) -> 'a desc and _ field = - | Req : { name: string ; - encoding: 'a t ; - title: string option ; - description: string option ; - } -> 'a field - | Opt : { name: string ; - kind: Kind.enum ; - encoding: 'a t ; - title: string option ; - description: string option ; - } -> 'a option field - | Dft : { name: string ; - encoding: 'a t ; - default: 'a ; - title: string option ; - description: string option ; - } -> 'a field + | Req : + { name : string; + encoding : 'a t; + title : string option; + description : string option } + -> 'a field + | Opt : + { name : string; + kind : Kind.enum; + encoding : 'a t; + title : string option; + description : string option } + -> 'a option field + | Dft : + { name : string; + encoding : 'a t; + default : 'a; + title : string option; + description : string option } + -> 'a field and 'a case = - | Case : { title : string ; - description : string option ; - encoding : 'a t ; - proj : ('t -> 'a option) ; - inj : ('a -> 't) ; - tag : case_tag ; - } -> 't case + | Case : + { title : string; + description : string option; + encoding : 'a t; + proj : 't -> 'a option; + inj : 'a -> 't; + tag : case_tag } + -> 't case and 'a t = { - encoding: 'a desc ; - mutable json_encoding: 'a Json_encoding.encoding option ; + encoding : 'a desc; + mutable json_encoding : 'a Json_encoding.encoding option } + type 'a encoding = 'a t -val make: ?json_encoding: 'a Json_encoding.encoding -> 'a desc -> 'a t +val make : ?json_encoding:'a Json_encoding.encoding -> 'a desc -> 'a t val null : unit encoding + val empty : unit encoding + val unit : unit encoding + val constant : string -> unit encoding + val int8 : int encoding + val uint8 : int encoding + val int16 : int encoding + val uint16 : int encoding + val int31 : int encoding + val int32 : int32 encoding + val int64 : int64 encoding + val n : Z.t encoding + val z : Z.t encoding + val ranged_int : int -> int -> int encoding + val ranged_float : float -> float -> float encoding + val bool : bool encoding + val string : string encoding + val bytes : MBytes.t encoding + val float : float encoding + val option : 'a encoding -> 'a option encoding + val result : 'a encoding -> 'b encoding -> ('a, 'b) result encoding + val string_enum : (string * 'a) list -> 'a encoding + val is_obj : 'a encoding -> bool + val is_tup : 'a encoding -> bool + module Fixed : sig val string : int -> string encoding + val bytes : int -> MBytes.t encoding + val add_padding : 'a encoding -> int -> 'a encoding end + module Variable : sig val string : string encoding + val bytes : MBytes.t encoding + val array : ?max_length:int -> 'a encoding -> 'a array encoding + val list : ?max_length:int -> 'a encoding -> 'a list encoding end + val dynamic_size : ?kind:Binary_size.unsigned_integer -> 'a encoding -> 'a encoding + val check_size : int -> 'a encoding -> 'a encoding + val delayed : (unit -> 'a encoding) -> 'a encoding + val req : - ?title:string -> ?description:string -> - string -> 't encoding -> 't field + ?title:string -> ?description:string -> string -> 't encoding -> 't field + val opt : - ?title:string -> ?description:string -> - string -> 't encoding -> 't option field + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't option field + val varopt : - ?title:string -> ?description:string -> - string -> 't encoding -> 't option field + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't option field + val dft : - ?title:string -> ?description:string -> - string -> 't encoding -> 't -> 't field - -val obj1 : - 'f1 field -> 'f1 encoding -val obj2 : - 'f1 field -> 'f2 field -> ('f1 * 'f2) encoding -val obj3 : - 'f1 field -> 'f2 field -> 'f3 field -> ('f1 * 'f2 * 'f3) encoding + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't -> + 't field + +val obj1 : 'f1 field -> 'f1 encoding + +val obj2 : 'f1 field -> 'f2 field -> ('f1 * 'f2) encoding + +val obj3 : 'f1 field -> 'f2 field -> 'f3 field -> ('f1 * 'f2 * 'f3) encoding + val obj4 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> ('f1 * 'f2 * 'f3 * 'f4) encoding + val obj5 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5) encoding + val obj6 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> 'f6 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6) encoding + val obj7 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7) encoding + val obj8 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8) encoding + val obj9 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> 'f9 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> + 'f9 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9) encoding + val obj10 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> 'f9 field -> 'f10 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> + 'f9 field -> + 'f10 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9 * 'f10) encoding -val tup1 : - 'f1 encoding -> - 'f1 encoding -val tup2 : - 'f1 encoding -> 'f2 encoding -> - ('f1 * 'f2) encoding +val tup1 : 'f1 encoding -> 'f1 encoding + +val tup2 : 'f1 encoding -> 'f2 encoding -> ('f1 * 'f2) encoding + val tup3 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> - ('f1 * 'f2 * 'f3) encoding + 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> ('f1 * 'f2 * 'f3) encoding + val tup4 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> ('f1 * 'f2 * 'f3 * 'f4) encoding + val tup5 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> 'f5 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5) encoding + val tup6 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6) encoding + val tup7 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7) encoding + val tup8 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8) encoding + val tup9 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> 'f9 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9) encoding + val tup10 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> - 'f9 encoding -> 'f10 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> + 'f9 encoding -> + 'f10 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9 * 'f10) encoding val merge_objs : 'o1 encoding -> 'o2 encoding -> ('o1 * 'o2) encoding + val merge_tups : 'a1 encoding -> 'a2 encoding -> ('a1 * 'a2) encoding + val array : ?max_length:int -> 'a encoding -> 'a array encoding + val list : ?max_length:int -> 'a encoding -> 'a list encoding val case : title:string -> - ?description: string -> + ?description:string -> case_tag -> - 'a encoding -> ('t -> 'a option) -> ('a -> 't) -> 't case -val union : - ?tag_size:[ `Uint8 | `Uint16 ] -> 't case list -> 't encoding + 'a encoding -> + ('t -> 'a option) -> + ('a -> 't) -> + 't case + +val union : ?tag_size:[`Uint8 | `Uint16] -> 't case list -> 't encoding val def : - string -> - ?title:string -> ?description:string -> - 'a encoding -> 'a encoding + string -> ?title:string -> ?description:string -> 'a encoding -> 'a encoding val conv : - ('a -> 'b) -> ('b -> 'a) -> + ('a -> 'b) -> + ('b -> 'a) -> ?schema:Json_schema.schema -> - 'b encoding -> 'a encoding + 'b encoding -> + 'a encoding + val mu : string -> ?title:string -> - ?description: string -> - ('a encoding -> 'a encoding) -> 'a encoding + ?description:string -> + ('a encoding -> 'a encoding) -> + 'a encoding + +val classify : 'a encoding -> [`Fixed of int | `Dynamic | `Variable] + +val classify_desc : 'a desc -> [`Fixed of int | `Dynamic | `Variable] -val classify : 'a encoding -> [ `Fixed of int | `Dynamic | `Variable ] -val classify_desc : 'a desc -> [ `Fixed of int | `Dynamic | `Variable ] -val raw_splitted : json:'a Json_encoding.encoding -> binary:'a encoding -> 'a encoding +val raw_splitted : + json:'a Json_encoding.encoding -> binary:'a encoding -> 'a encoding diff --git a/src/lib_data_encoding/json.ml b/src/lib_data_encoding/json.ml index 09db0eda85876e5b118b5acd8c6dd035eb286333..a063b3e95af8d3650a74789c7b18618a0fa7d2c0 100644 --- a/src/lib_data_encoding/json.ml +++ b/src/lib_data_encoding/json.ml @@ -34,78 +34,76 @@ type json = type schema = Json_schema.schema type pair_builder = { - build: 'a 'b. Encoding.Kind.t -> 'a Encoding.t -> 'b Encoding.t -> ('a * 'b) Encoding.t + build : + 'a 'b. Encoding.Kind.t -> 'a Encoding.t -> 'b Encoding.t -> + ('a * 'b) Encoding.t } exception Parse_error of string -let wrap_error f = - fun str -> - try f str - with exn -> raise (Json_encoding.Cannot_destruct ([], exn)) +let wrap_error f str = + try f str with exn -> raise (Json_encoding.Cannot_destruct ([], exn)) let int64_encoding = let open Json_encoding in - def "int64" - ~title: "64 bit integers" - ~description: "Decimal representation of 64 bit integers" @@ - conv - Int64.to_string - (wrap_error Int64.of_string) - string + def + "int64" + ~title:"64 bit integers" + ~description:"Decimal representation of 64 bit integers" + @@ conv Int64.to_string (wrap_error Int64.of_string) string let n_encoding = let open Json_encoding in - def "positive_bignum" - ~title: "Positive big number" - ~description: "Decimal representation of a positive big number" @@ - conv - (fun z -> - if Z.sign z < 0 then invalid_arg "negative natural" ; - Z.to_string z) - (fun s -> - let n = Z.of_string s in - if Z.sign n < 0 then - raise (Json_encoding.Cannot_destruct ([], Failure "negative natural")) ; - n) - string + def + "positive_bignum" + ~title:"Positive big number" + ~description:"Decimal representation of a positive big number" + @@ conv + (fun z -> + if Z.sign z < 0 then invalid_arg "negative natural" ; + Z.to_string z) + (fun s -> + let n = Z.of_string s in + if Z.sign n < 0 then + raise + (Json_encoding.Cannot_destruct ([], Failure "negative natural")) ; + n) + string let z_encoding = let open Json_encoding in - def "bignum" - ~title: "Big number" - ~description: "Decimal representation of a big number" @@ - conv Z.to_string Z.of_string string + def + "bignum" + ~title:"Big number" + ~description:"Decimal representation of a big number" + @@ conv Z.to_string Z.of_string string let bytes_jsont = let open Json_encoding in let schema = let open Json_schema in create - { title = None ; - description = None ; - default = None ; - enum = None ; - kind = String { - pattern = Some "^[a-zA-Z0-9]+$" ; - min_length = 0 ; - max_length = None ; - } ; - format = None ; - id = None } in - conv ~schema + { title = None; + description = None; + default = None; + enum = None; + kind = + String + {pattern = Some "^[a-zA-Z0-9]+$"; min_length = 0; max_length = None}; + format = None; + id = None } + in + conv + ~schema MBytes.to_hex (wrap_error MBytes.of_hex) - (conv - (fun (`Hex h) -> h) - (fun h -> `Hex h) - string) + (conv (fun (`Hex h) -> h) (fun h -> `Hex h) string) let check_utf8 s = - Uutf.String.fold_utf_8 (fun valid _pos -> function - | `Uchar _ -> valid - | `Malformed _ -> false) - true s + Uutf.String.fold_utf_8 + (fun valid _pos -> function `Uchar _ -> valid | `Malformed _ -> false) + true + s let raw_string_encoding = let open Json_encoding in @@ -114,249 +112,326 @@ let raw_string_encoding = in let obj_case = case - (obj1 (req "invalid_utf8_string" (array (ranged_int ~minimum:0 ~maximum:255 "byte")))) + (obj1 + (req + "invalid_utf8_string" + (array (ranged_int ~minimum:0 ~maximum:255 "byte")))) (fun s -> Some (Array.init (String.length s) (fun i -> Char.code s.[i]))) (fun a -> String.init (Array.length a) (fun i -> Char.chr a.(i))) in def "unistring" ~title:"Universal string representation" - ~description:"Either a plain UTF8 string, or a sequence of bytes for strings \ - that contain invalid byte sequences." - (union [ utf8_case ; obj_case ]) + ~description: + "Either a plain UTF8 string, or a sequence of bytes for strings that \ + contain invalid byte sequences." + (union [utf8_case; obj_case]) -let rec lift_union : type a. a Encoding.t -> a Encoding.t = fun e -> +let rec lift_union : type a. a Encoding.t -> a Encoding.t = + fun e -> let open Encoding in match e.encoding with - | Conv { proj ; inj ; encoding = e ; schema } -> begin - match lift_union e with - | { encoding = Union { kind ; tag_size ; cases } ; _ } -> - make @@ - Union { kind ; tag_size ; - cases = List.map - (fun (Case { title ; description ; encoding ; proj = proj' ; inj = inj' ; tag }) -> - Case { encoding ; - title ; - description ; - proj = (fun x -> proj' (proj x)); - inj = (fun x -> inj (inj' x)) ; - tag }) - cases } - | e -> make @@ Conv { proj ; inj ; encoding = e ; schema } - end - | Objs { kind ; left ; right } -> + | Conv {proj; inj; encoding = e; schema} -> ( + match lift_union e with + | {encoding = Union {kind; tag_size; cases}; _} -> + make + @@ Union + { kind; + tag_size; + cases = + List.map + (fun (Case + { title; + description; + encoding; + proj = proj'; + inj = inj'; + tag }) -> + Case + { encoding; + title; + description; + proj = (fun x -> proj' (proj x)); + inj = (fun x -> inj (inj' x)); + tag }) + cases } + | e -> + make @@ Conv {proj; inj; encoding = e; schema} ) + | Objs {kind; left; right} -> lift_union_in_pair - { build = fun kind left right -> make @@ Objs { kind ; left ; right } } - kind left right - | Tups { kind ; left ; right } -> + {build = (fun kind left right -> make @@ Objs {kind; left; right})} + kind + left + right + | Tups {kind; left; right} -> lift_union_in_pair - { build = fun kind left right -> make @@ Tups { kind ; left ; right } } - kind left right - | _ -> e - -and lift_union_in_pair - : type a b. pair_builder -> Encoding.Kind.t -> a Encoding.t -> b Encoding.t -> (a * b) Encoding.t - = fun b p e1 e2 -> - let open Encoding in - match lift_union e1, lift_union e2 with - | e1, { encoding = Union { tag_size ; cases ; _ } ; _ } -> - make @@ - Union { kind = `Dynamic (* ignored *) ; tag_size ; - cases = - List.map - (fun (Case { title ; description ; encoding = e2 ; proj ; inj ; tag }) -> - Case { encoding = lift_union_in_pair b p e1 e2 ; - title ; - description ; - proj = (fun (x, y) -> - match proj y with - | None -> None - | Some y -> Some (x, y)) ; - inj = (fun (x, y) -> (x, inj y)) ; - tag }) - cases } - | { encoding = Union { tag_size ; cases ; _ } ; _ }, e2 -> - make @@ - Union { kind = `Dynamic (* ignored *) ; tag_size ; - cases = - List.map - (fun (Case { title ; description ; encoding = e1 ; proj ; inj ; tag }) -> - Case { encoding = lift_union_in_pair b p e1 e2 ; - title ; - description ; - proj = (fun (x, y) -> - match proj x with - | None -> None - | Some x -> Some (x, y)) ; - inj = (fun (x, y) -> (inj x, y)) ; - tag }) - cases } - | e1, e2 -> b.build p e1 e2 + {build = (fun kind left right -> make @@ Tups {kind; left; right})} + kind + left + right + | _ -> + e + +and lift_union_in_pair : + type a b. + pair_builder -> + Encoding.Kind.t -> + a Encoding.t -> + b Encoding.t -> + (a * b) Encoding.t = + fun b p e1 e2 -> + let open Encoding in + match (lift_union e1, lift_union e2) with + | (e1, {encoding = Union {tag_size; cases; _}; _}) -> + make + @@ Union + { kind = `Dynamic (* ignored *); + tag_size; + cases = + List.map + (fun (Case + {title; description; encoding = e2; proj; inj; tag}) -> + Case + { encoding = lift_union_in_pair b p e1 e2; + title; + description; + proj = + (fun (x, y) -> + match proj y with + | None -> + None + | Some y -> + Some (x, y)); + inj = (fun (x, y) -> (x, inj y)); + tag }) + cases } + | ({encoding = Union {tag_size; cases; _}; _}, e2) -> + make + @@ Union + { kind = `Dynamic (* ignored *); + tag_size; + cases = + List.map + (fun (Case + {title; description; encoding = e1; proj; inj; tag}) -> + Case + { encoding = lift_union_in_pair b p e1 e2; + title; + description; + proj = + (fun (x, y) -> + match proj x with + | None -> + None + | Some x -> + Some (x, y)); + inj = (fun (x, y) -> (inj x, y)); + tag }) + cases } + | (e1, e2) -> + b.build p e1 e2 let rec json : type a. a Encoding.desc -> a Json_encoding.encoding = let open Encoding in let open Json_encoding in function - | Null -> null - | Empty -> empty - | Constant s -> constant s - | Ignore -> unit - | Int8 -> ranged_int ~minimum:~-(1 lsl 7) ~maximum:((1 lsl 7) - 1) "int8" - | Uint8 -> ranged_int ~minimum:0 ~maximum:((1 lsl 8) - 1) "uint8" - | Int16 -> ranged_int ~minimum:~-(1 lsl 15) ~maximum:((1 lsl 15) - 1) "int16" - | Uint16 -> ranged_int ~minimum:0 ~maximum:((1 lsl 16) - 1) "uint16" - | RangedInt { minimum ; maximum } -> ranged_int ~minimum ~maximum "rangedInt" - | Int31 -> int - | Int32 -> int32 - | Int64 -> int64_encoding - | N -> n_encoding - | Z -> z_encoding - | Bool -> bool - | Float -> float - | RangedFloat { minimum ; maximum } -> ranged_float ~minimum ~maximum "rangedFloat" + | Null -> + null + | Empty -> + empty + | Constant s -> + constant s + | Ignore -> + unit + | Int8 -> + ranged_int ~minimum:~-(1 lsl 7) ~maximum:((1 lsl 7) - 1) "int8" + | Uint8 -> + ranged_int ~minimum:0 ~maximum:((1 lsl 8) - 1) "uint8" + | Int16 -> + ranged_int ~minimum:~-(1 lsl 15) ~maximum:((1 lsl 15) - 1) "int16" + | Uint16 -> + ranged_int ~minimum:0 ~maximum:((1 lsl 16) - 1) "uint16" + | RangedInt {minimum; maximum} -> + ranged_int ~minimum ~maximum "rangedInt" + | Int31 -> + int + | Int32 -> + int32 + | Int64 -> + int64_encoding + | N -> + n_encoding + | Z -> + z_encoding + | Bool -> + bool + | Float -> + float + | RangedFloat {minimum; maximum} -> + ranged_float ~minimum ~maximum "rangedFloat" | String (`Fixed expected) -> let check s = let found = String.length s in if found <> expected then - raise (Cannot_destruct - ([] , - Unexpected (Format.asprintf "string (len %d)" found, - Format.asprintf "string (len %d)" expected))) ; - s in + raise + (Cannot_destruct + ( [], + Unexpected + ( Format.asprintf "string (len %d)" found, + Format.asprintf "string (len %d)" expected ) )) ; + s + in conv check check raw_string_encoding - | String _ -> raw_string_encoding - | Padded (e, _) -> get_json e + | String _ -> + raw_string_encoding + | Padded (e, _) -> + get_json e | Bytes (`Fixed expected) -> let check s = let found = MBytes.length s in if found <> expected then - raise (Cannot_destruct - ([] , - Unexpected (Format.asprintf "string (len %d)" found, - Format.asprintf "string (len %d)" expected))) ; - s in + raise + (Cannot_destruct + ( [], + Unexpected + ( Format.asprintf "string (len %d)" found, + Format.asprintf "string (len %d)" expected ) )) ; + s + in conv check check bytes_jsont - | Bytes _ -> bytes_jsont - | String_enum (tbl, _) -> string_enum (Hashtbl.fold (fun a (str, _) acc -> (str, a) :: acc) tbl []) - | Array (_, e) -> array (get_json e) (* FIXME TODO enforce max_length *) - | List (_, e) -> list (get_json e) - | Obj f -> obj1 (field_json f) - | Objs { left ; right ; _ } -> + | Bytes _ -> + bytes_jsont + | String_enum (tbl, _) -> + string_enum (Hashtbl.fold (fun a (str, _) acc -> (str, a) :: acc) tbl []) + | Array (_, e) -> + array (get_json e) (* FIXME TODO enforce max_length *) + | List (_, e) -> + list (get_json e) + | Obj f -> + obj1 (field_json f) + | Objs {left; right; _} -> merge_objs (get_json left) (get_json right) - | Tup e -> tup1 (get_json e) - | Tups { left ; right ; _ } -> + | Tup e -> + tup1 (get_json e) + | Tups {left; right; _} -> merge_tups (get_json left) (get_json right) - | Conv { proj ; inj ; encoding = e ; schema } -> conv ?schema proj inj (get_json e) - | Describe { id ; title ; description ; encoding = e } -> + | Conv {proj; inj; encoding = e; schema} -> + conv ?schema proj inj (get_json e) + | Describe {id; title; description; encoding = e} -> def id ?title ?description (get_json e) - | Mu { name ; fix ; _ } as ty -> + | Mu {name; fix; _} as ty -> mu name (fun json_encoding -> get_json @@ fix (make ~json_encoding ty)) - | Union { cases ; _ } -> union (List.map case_json cases) - | Splitted { json_encoding ; _ } -> json_encoding - | Dynamic_size { encoding = e ; _ } -> get_json e - | Check_size { encoding ; _ } -> get_json encoding - | Delayed f -> get_json (f ()) - -and field_json - : type a. a Encoding.field -> a Json_encoding.field = + | Union {cases; _} -> + union (List.map case_json cases) + | Splitted {json_encoding; _} -> + json_encoding + | Dynamic_size {encoding = e; _} -> + get_json e + | Check_size {encoding; _} -> + get_json encoding + | Delayed f -> + get_json (f ()) + +and field_json : type a. a Encoding.field -> a Json_encoding.field = let open Json_encoding in function - | Encoding.Req { name ; encoding = e ; _ } -> req name (get_json e) - | Encoding.Opt { name ; encoding = e ; _ } -> opt name (get_json e) - | Encoding.Dft { name ; encoding = e ; default = d; _ } -> dft name (get_json e) d + | Encoding.Req {name; encoding = e; _} -> + req name (get_json e) + | Encoding.Opt {name; encoding = e; _} -> + opt name (get_json e) + | Encoding.Dft {name; encoding = e; default = d; _} -> + dft name (get_json e) d and case_json : type a. a Encoding.case -> a Json_encoding.case = let open Json_encoding in function - | Encoding.Case { encoding = e ; proj ; inj ; _ } -> case (get_json e) proj inj + | Encoding.Case {encoding = e; proj; inj; _} -> case (get_json e) proj inj -and get_json : type a. a Encoding.t -> a Json_encoding.encoding = fun e -> +and get_json : type a. a Encoding.t -> a Json_encoding.encoding = + fun e -> match e.json_encoding with | None -> let json_encoding = json (lift_union e).encoding in e.json_encoding <- Some json_encoding ; json_encoding - | Some json_encoding -> json_encoding + | Some json_encoding -> + json_encoding let convert = get_json type path = path_item list + and path_item = - [ `Field of string - (** A field in an object. *) - | `Index of int - (** An index in an array. *) - | `Star - (** Any / every field or index. *) - | `Next - (** The next element after an array. *) ] + [ `Field of string (** A field in an object. *) + | `Index of int (** An index in an array. *) + | `Star (** Any / every field or index. *) + | `Next (** The next element after an array. *) ] include Json_encoding let construct e v = construct (get_json e) v + let destruct e v = destruct (get_json e) v + let schema ?definitions_path e = schema ?definitions_path (get_json e) let cannot_destruct fmt = - Format.kasprintf - (fun msg -> raise (Cannot_destruct ([], Failure msg))) - fmt + Format.kasprintf (fun msg -> raise (Cannot_destruct ([], Failure msg))) fmt type t = json let to_string ?(newline = false) ?minify j = - Format.asprintf "%a%s" - Json_repr.(pp ?compact:minify (module Ezjsonm)) j + Format.asprintf + "%a%s" + Json_repr.(pp ?compact:minify (module Ezjsonm)) + j (if newline then "\n" else "") let pp = Json_repr.(pp (module Ezjsonm)) let from_string s = match Ezjsonm.from_string ("[" ^ s ^ "]") with - | exception Ezjsonm.Parse_error (_, msg) -> Error msg - | `A [ json ] -> Ok json - | _ -> Error "Malformed value" + | exception Ezjsonm.Parse_error (_, msg) -> + Error msg + | `A [json] -> + Ok json + | _ -> + Error "Malformed value" -let from_stream (stream: string Lwt_stream.t) = +let from_stream (stream : string Lwt_stream.t) = let buffer = ref "" in Lwt_stream.filter_map (fun str -> - buffer := !buffer ^ str ; - try - let json = Ezjsonm.from_string !buffer in - buffer := "" ; - Some (Ok json) - with Ezjsonm.Parse_error _ -> - None) + buffer := !buffer ^ str ; + try + let json = Ezjsonm.from_string !buffer in + buffer := "" ; + Some (Ok json) + with Ezjsonm.Parse_error _ -> None) stream let encoding = let binary : Json_repr.ezjsonm Encoding.t = Encoding.conv (fun json -> - Json_repr.convert - (module Json_repr.Ezjsonm) - (module Json_repr_bson.Repr) - json |> - Json_repr_bson.bson_to_bytes |> - Bytes.to_string) - (fun s -> try - Bytes.of_string s |> - Json_repr_bson.bytes_to_bson ~copy:false |> - Json_repr.convert - (module Json_repr_bson.Repr) - (module Json_repr.Ezjsonm) - with - | Json_repr_bson.Bson_decoding_error (msg, _, _) -> - raise (Parse_error msg)) - Encoding.string in - let json = - Json_encoding.any_ezjson_value in + Json_repr.convert + (module Json_repr.Ezjsonm) + (module Json_repr_bson.Repr) + json + |> Json_repr_bson.bson_to_bytes |> Bytes.to_string) + (fun s -> + try + Bytes.of_string s + |> Json_repr_bson.bytes_to_bson ~copy:false + |> Json_repr.convert + (module Json_repr_bson.Repr) + (module Json_repr.Ezjsonm) + with Json_repr_bson.Bson_decoding_error (msg, _, _) -> + raise (Parse_error msg)) + Encoding.string + in + let json = Json_encoding.any_ezjson_value in Encoding.raw_splitted ~binary ~json let schema_encoding = - Encoding.conv - Json_schema.to_json - Json_schema.of_json - encoding - + Encoding.conv Json_schema.to_json Json_schema.of_json encoding diff --git a/src/lib_data_encoding/json.mli b/src/lib_data_encoding/json.mli index 3716c69b57eb4051ec94363d2fd37479eb1a27a9..fe48e1af3caa58c045e1ea133dda29bb0df1036a 100644 --- a/src/lib_data_encoding/json.mli +++ b/src/lib_data_encoding/json.mli @@ -33,40 +33,55 @@ type json = | `A of json list | `Null | `String of string ] + type t = json + type schema = Json_schema.schema val convert : 'a Encoding.t -> 'a Json_encoding.encoding + val schema : ?definitions_path:string -> 'a Encoding.t -> schema -val encoding: json Encoding.t -val schema_encoding: schema Encoding.t + +val encoding : json Encoding.t + +val schema_encoding : schema Encoding.t + val construct : 't Encoding.t -> 't -> json + val destruct : 't Encoding.t -> json -> 't type path = path_item list -and path_item = - [ `Field of string - | `Index of int - | `Star - | `Next - ] + +and path_item = [`Field of string | `Index of int | `Star | `Next] + exception Cannot_destruct of (path * exn) + exception Unexpected of string * string + exception No_case_matched of exn list + exception Bad_array_size of int * int + exception Missing_field of string + exception Unexpected_field of string val print_error : - ?print_unknown: (Format.formatter -> exn -> unit) -> - Format.formatter -> exn -> unit + ?print_unknown:(Format.formatter -> exn -> unit) -> + Format.formatter -> + exn -> + unit val cannot_destruct : ('a, Format.formatter, unit, 'b) format4 -> 'a + val wrap_error : ('a -> 'b) -> 'a -> 'b val from_string : string -> (json, string) result + val from_stream : string Lwt_stream.t -> (json, string) result Lwt_stream.t + val to_string : ?newline:bool -> ?minify:bool -> json -> string + val pp : Format.formatter -> json -> unit -val bytes_jsont: MBytes.t Json_encoding.encoding +val bytes_jsont : MBytes.t Json_encoding.encoding diff --git a/src/lib_data_encoding/test/bench_data_encoding.ml b/src/lib_data_encoding/test/bench_data_encoding.ml index d543749513d30b13cf510a63e9c9eab73121ee8c..e0266080746452bdfee566714341a79457b37b01 100644 --- a/src/lib_data_encoding/test/bench_data_encoding.ml +++ b/src/lib_data_encoding/test/bench_data_encoding.ml @@ -23,11 +23,11 @@ (* *) (*****************************************************************************) -let bench ?(num_iterations=1000) name thunk = +let bench ?(num_iterations = 1000) name thunk = Gc.full_major () ; Gc.compact () ; let start_time = Sys.time () in - for _i = 0 to (num_iterations - 1) do + for _i = 0 to num_iterations - 1 do thunk () done ; let end_time = Sys.time () in @@ -39,98 +39,110 @@ let bench ?(num_iterations=1000) name thunk = let read_stream encoding bytes = let rec loop bytes status = - match bytes, status with - | [], Data_encoding.Binary.Success _ -> () - | bytes :: bytess, Await f -> loop bytess (f bytes) - | _, _ -> assert false in + match (bytes, status) with + | ([], Data_encoding.Binary.Success _) -> + () + | (bytes :: bytess, Await f) -> + loop bytess (f bytes) + | (_, _) -> + assert false + in loop bytes (Data_encoding.Binary.read_stream encoding) -let bench_all ?(num_iterations=1000) name encoding value = - bench ~num_iterations ("writing " ^ name ^ " json") - (fun () -> ignore @@ Data_encoding.Json.to_string @@ Data_encoding.Json.construct encoding value) ; - bench ~num_iterations ("writing " ^ name ^ " binary") +let bench_all ?(num_iterations = 1000) name encoding value = + bench + ~num_iterations + ("writing " ^ name ^ " json") + (fun () -> + ignore @@ Data_encoding.Json.to_string + @@ Data_encoding.Json.construct encoding value) ; + bench + ~num_iterations + ("writing " ^ name ^ " binary") (fun () -> ignore @@ Data_encoding.Binary.to_bytes_exn encoding value) ; - let encoded_json = Data_encoding.Json.to_string @@ Data_encoding.Json.construct encoding value in - bench ~num_iterations ("reading " ^ name ^ " json") - (fun () -> ignore (Data_encoding.Json.destruct encoding (Ezjsonm.from_string encoded_json))) ; - let encoded_binary = Data_encoding.Binary.to_bytes_exn encoding value in - bench ~num_iterations ("reading " ^ name ^ " binary") - (fun () -> ignore @@ Data_encoding.Binary.of_bytes encoding encoded_binary ) ; - bench ~num_iterations ("reading " ^ name ^ " streamed binary (one chunk)") + let encoded_json = + Data_encoding.Json.to_string @@ Data_encoding.Json.construct encoding value + in + bench + ~num_iterations + ("reading " ^ name ^ " json") + (fun () -> + ignore + (Data_encoding.Json.destruct + encoding + (Ezjsonm.from_string encoded_json))) ; + let encoded_binary = Data_encoding.Binary.to_bytes_exn encoding value in + bench + ~num_iterations + ("reading " ^ name ^ " binary") + (fun () -> ignore @@ Data_encoding.Binary.of_bytes encoding encoded_binary) ; + bench + ~num_iterations + ("reading " ^ name ^ " streamed binary (one chunk)") (fun () -> read_stream encoding [encoded_binary]) ; - bench ~num_iterations + bench + ~num_iterations ("reading " ^ name ^ " streamed binary (small chunks)") (fun () -> read_stream encoding (MBytes.cut 1 encoded_binary)) ; () -type t = - | A of string - | B of bool - | I of int - | F of float - | R of t * t +type t = A of string | B of bool | I of int | F of float | R of t * t let cases_encoding : t Data_encoding.t = let open Data_encoding in - mu "recursive" - (fun recursive -> union [ - case (Tag 0) - ~title:"A" - string - (function A s -> Some s - | _ -> None) - (fun s -> A s) ; - case (Tag 1) - ~title:"B" - bool - (function B bool -> Some bool - | _ -> None) - (fun bool -> B bool) ; - case (Tag 2) - ~title:"I" - int31 - (function I int -> Some int - | _ -> None) - (fun int -> I int) ; - case (Tag 3) - ~title:"F" - float - (function F float -> Some float - | _ -> None) - (fun float -> F float) ; - case (Tag 4) - ~title:"R" - (obj2 - (req "field1" recursive) - (req "field2" recursive)) - (function R (a, b) -> Some (a, b) - | _ -> None) - (fun (a, b) -> R (a, b)) - ]) + mu "recursive" (fun recursive -> + union + [ case + (Tag 0) + ~title:"A" + string + (function A s -> Some s | _ -> None) + (fun s -> A s); + case + (Tag 1) + ~title:"B" + bool + (function B bool -> Some bool | _ -> None) + (fun bool -> B bool); + case + (Tag 2) + ~title:"I" + int31 + (function I int -> Some int | _ -> None) + (fun int -> I int); + case + (Tag 3) + ~title:"F" + float + (function F float -> Some float | _ -> None) + (fun float -> F float); + case + (Tag 4) + ~title:"R" + (obj2 (req "field1" recursive) (req "field2" recursive)) + (function R (a, b) -> Some (a, b) | _ -> None) + (fun (a, b) -> R (a, b)) ]) let () = - bench_all "10000_element_int_list" Data_encoding.(list int31) ~num_iterations:1000 (Array.to_list (Array.make 10000 0)) ; - bench_all "option_element_int_list" Data_encoding.(list (option int31)) (Array.to_list (Array.make 10000 (Some 0))) ; - let encoding = Data_encoding.(list (result (option int31) string)) in - let value = (Array.to_list (Array.make 10000 (Error "hello"))) in - bench_all "option_result_element_list" encoding value; - + let value = Array.to_list (Array.make 10000 (Error "hello")) in + bench_all "option_result_element_list" encoding value ; let encoding = Data_encoding.(list cases_encoding) in - let value = Array.to_list (Array.make 1000 (R (R (A "asdf", B true), F 1.0))) in - bench ~num_iterations:1000 "binary_encoding" - (fun () -> ignore @@ Data_encoding.Binary.to_bytes encoding value) ; - - bench_all "binary_encoding_large_list" + let value = + Array.to_list (Array.make 1000 (R (R (A "asdf", B true), F 1.0))) + in + bench ~num_iterations:1000 "binary_encoding" (fun () -> + ignore @@ Data_encoding.Binary.to_bytes encoding value) ; + bench_all + "binary_encoding_large_list" Data_encoding.(list cases_encoding) (Array.to_list (Array.make 2000 (R (R (A "asdf", B true), F 1.0)))) - diff --git a/src/lib_data_encoding/test/helpers.ml b/src/lib_data_encoding/test/helpers.ml index b3306abb466c69239565f4d116c1c7861ebcd751..6bdafc8d78bff4533208a6aae9e3ee1431b01b9a 100644 --- a/src/lib_data_encoding/test/helpers.ml +++ b/src/lib_data_encoding/test/helpers.ml @@ -26,54 +26,65 @@ open Data_encoding let no_exception f = - try f () - with - | Json_encoding.Cannot_destruct _ - | Json_encoding.Unexpected _ - | Json_encoding.No_case_matched _ - | Json_encoding.Bad_array_size _ - | Json_encoding.Missing_field _ - | Json_encoding.Unexpected_field _ - | Json_encoding.Bad_schema _ as exn -> + try f () with + | ( Json_encoding.Cannot_destruct _ + | Json_encoding.Unexpected _ + | Json_encoding.No_case_matched _ + | Json_encoding.Bad_array_size _ + | Json_encoding.Missing_field _ + | Json_encoding.Unexpected_field _ + | Json_encoding.Bad_schema _ ) as exn -> Alcotest.failf "@[v 2>json failed:@ %a@]" - (fun ppf -> Json_encoding.print_error ppf) exn + (fun ppf -> Json_encoding.print_error ppf) + exn | Binary.Read_error error -> Alcotest.failf "@[v 2>bytes reading failed:@ %a@]" - Binary.pp_read_error error + Binary.pp_read_error + error | Binary.Write_error error -> Alcotest.failf "@[v 2>bytes writing failed:@ %a@]" - Binary.pp_write_error error + Binary.pp_write_error + error let check_raises expected f = match f () with - | exception exn when expected exn -> () + | exception exn when expected exn -> + () | exception exn -> Alcotest.failf "Unexpected exception: %s." (Printexc.to_string exn) - | _ -> Alcotest.failf "Expecting exception, got success." + | _ -> + Alcotest.failf "Expecting exception, got success." let chunked_read sz encoding bytes = let status = List.fold_left (fun status chunk -> - match status with - | Binary.Await f -> f chunk - | Success _ when MBytes.length chunk <> 0 -> Error Extra_bytes - | Success _ | Error _ -> status) + match status with + | Binary.Await f -> + f chunk + | Success _ when MBytes.length chunk <> 0 -> + Error Extra_bytes + | Success _ | Error _ -> + status) (Binary.read_stream encoding) - (MBytes.cut sz bytes) in + (MBytes.cut sz bytes) + in match status with - | Success { stream ; _ } when not (Binary_stream.is_empty stream) -> + | Success {stream; _} when not (Binary_stream.is_empty stream) -> Binary.Error Extra_bytes - | _ -> status + | _ -> + status let streamed_read encoding bytes = List.fold_left - (fun (status, count as acc) chunk -> - match status with - | Binary.Await f -> (f chunk, succ count) - | Success _ | Error _ -> acc) + (fun ((status, count) as acc) chunk -> + match status with + | Binary.Await f -> + (f chunk, succ count) + | Success _ | Error _ -> + acc) (Binary.read_stream encoding, 0) (MBytes.cut 1 bytes) diff --git a/src/lib_data_encoding/test/invalid_encoding.ml b/src/lib_data_encoding/test/invalid_encoding.ml index cc5e7f7d78664464557f63c1b4f57370c28b5c1e..913e13784b3285e3a4ff8c3cb1ef48d494de91ad 100644 --- a/src/lib_data_encoding/test/invalid_encoding.ml +++ b/src/lib_data_encoding/test/invalid_encoding.ml @@ -27,20 +27,20 @@ open Data_encoding open Helpers let test ?(expected = fun _ -> true) name f = - name, `Quick, fun () -> check_raises expected f + (name, `Quick, fun () -> check_raises expected f) -let tests = [ - test "multi_variable_tup" (fun () -> tup2 Variable.string Variable.string) ; - test "variable_in_list" (fun () -> list Variable.string) ; - test "nested_option" (fun () -> option (option int8)) ; - test "merge_non_objs" (fun () -> merge_objs int8 string) ; - test "empty_union" (fun () -> union []) ; - test "duplicated_tag" (fun () -> - union [ case (Tag 0) ~title:"" empty (fun () -> None) (fun () -> ()) ; - case (Tag 0) ~title:"" empty (fun () -> None) (fun () -> ()) ]) ; - test "fixed_negative_size" (fun () -> Fixed.string (~- 1)) ; - test "fixed_null_size" (fun () -> Fixed.bytes 0) ; - test "array_null_size" (fun () -> Variable.list empty) ; - test "list_null_size" (fun () -> Variable.list null) ; - test "zeroable_in_list" (fun () -> list (obj1 (varopt "x" int8))) ; -] +let tests = + [ test "multi_variable_tup" (fun () -> tup2 Variable.string Variable.string); + test "variable_in_list" (fun () -> list Variable.string); + test "nested_option" (fun () -> option (option int8)); + test "merge_non_objs" (fun () -> merge_objs int8 string); + test "empty_union" (fun () -> union []); + test "duplicated_tag" (fun () -> + union + [ case (Tag 0) ~title:"" empty (fun () -> None) (fun () -> ()); + case (Tag 0) ~title:"" empty (fun () -> None) (fun () -> ()) ]); + test "fixed_negative_size" (fun () -> Fixed.string ~-1); + test "fixed_null_size" (fun () -> Fixed.bytes 0); + test "array_null_size" (fun () -> Variable.list empty); + test "list_null_size" (fun () -> Variable.list null); + test "zeroable_in_list" (fun () -> list (obj1 (varopt "x" int8))) ] diff --git a/src/lib_data_encoding/test/randomized.ml b/src/lib_data_encoding/test/randomized.ml index b622a700db29c1b2aa964a3e9ae49d3bb96e7dcf..57a66cf440f647a940f7a3783377f736db050e03 100644 --- a/src/lib_data_encoding/test/randomized.ml +++ b/src/lib_data_encoding/test/randomized.ml @@ -28,26 +28,21 @@ open Data_encoding (** Generate encodings of the encoding and the randomized generator *) -let test_generator ?(iterations=50) ty encoding generator = +let test_generator ?(iterations = 50) ty encoding generator = for _ = 0 to iterations - 1 do let value = generator () in Success.json ty encoding value () ; Success.bson ty encoding value () ; Success.binary ty encoding value () ; - Success.stream ty encoding value () ; + Success.stream ty encoding value () done let rec make_int_list acc len () = - if len = 0 then - acc - else - make_int_list (Random.int64 Int64.max_int :: acc) (len - 1) () + if len = 0 then acc + else make_int_list (Random.int64 Int64.max_int :: acc) (len - 1) () let test_randomized_int_list () = - test_generator - Alcotest.(list int64) - (list int64) - (make_int_list [] 100) + test_generator Alcotest.(list int64) (list int64) (make_int_list [] 100) let test_randomized_string_list () = test_generator @@ -60,16 +55,15 @@ let test_randomized_variant_list () = Alcotest.(list (result (option string) string)) (list (result (option string) (obj1 (req "failure" string)))) (fun () -> - List.map - (fun x -> - let str = Int64.to_string x in - if Random.bool () - then if Random.bool () then Ok (Some str) else Ok None - else Error str) - (make_int_list [] 20 ())) + List.map + (fun x -> + let str = Int64.to_string x in + if Random.bool () then + if Random.bool () then Ok (Some str) else Ok None + else Error str) + (make_int_list [] 20 ())) -let tests = [ - "int_list", `Quick, test_randomized_int_list ; - "string_list", `Quick, test_randomized_string_list ; - "variant_list", `Quick, test_randomized_variant_list ; -] +let tests = + [ ("int_list", `Quick, test_randomized_int_list); + ("string_list", `Quick, test_randomized_string_list); + ("variant_list", `Quick, test_randomized_variant_list) ] diff --git a/src/lib_data_encoding/test/read_failure.ml b/src/lib_data_encoding/test/read_failure.ml index 4d3bfb9890ffbbac94cd78efcd7768f3b97a814d..1fcdc23d42f58ba73c3139f7326be623b960eb36 100644 --- a/src/lib_data_encoding/test/read_failure.ml +++ b/src/lib_data_encoding/test/read_failure.ml @@ -30,54 +30,64 @@ open Helpers open Types let not_enough_data = function - | Binary.Read_error Not_enough_data -> true - | _ -> false + | Binary.Read_error Not_enough_data -> + true + | _ -> + false -let extra_bytes = function - | Binary.Read_error Extra_bytes -> true - | _ -> false +let extra_bytes = function Binary.Read_error Extra_bytes -> true | _ -> false let trailing_zero = function - | Binary.Read_error Trailing_zero -> true - | _ -> false + | Binary.Read_error Trailing_zero -> + true + | _ -> + false let invalid_int = function - | Binary.Read_error (Invalid_int _) -> true - | Json_encoding.Cannot_destruct ([] , Failure _) -> true - | _ -> false + | Binary.Read_error (Invalid_int _) -> + true + | Json_encoding.Cannot_destruct ([], Failure _) -> + true + | _ -> + false let invalid_string_length = function | Json_encoding.Cannot_destruct - ([], Json_encoding.Unexpected ("string (len 9)", "string (len 4)")) -> true + ([], Json_encoding.Unexpected ("string (len 9)", "string (len 4)")) -> + true | Json_encoding.Cannot_destruct - ([], Json_encoding.Unexpected ("bytes (len 9)", "bytes (len 4)")) -> true - | Binary.Read_error Extra_bytes -> true - | _ -> false + ([], Json_encoding.Unexpected ("bytes (len 9)", "bytes (len 4)")) -> + true + | Binary.Read_error Extra_bytes -> + true + | _ -> + false let missing_case = function - | Json_encoding.Cannot_destruct ([], Json_encoding.No_case_matched _ ) -> true - | Binary.Read_error (Unexpected_tag _) -> true - | _ -> false + | Json_encoding.Cannot_destruct ([], Json_encoding.No_case_matched _) -> + true + | Binary.Read_error (Unexpected_tag _) -> + true + | _ -> + false let missing_enum = function - | Json_encoding.Cannot_destruct ([], Json_encoding.Unexpected _ ) -> true - | Binary.Read_error No_case_matched -> true - | _ -> false + | Json_encoding.Cannot_destruct ([], Json_encoding.Unexpected _) -> + true + | Binary.Read_error No_case_matched -> + true + | _ -> + false let json ?(expected = fun _ -> true) read_encoding json () = - check_raises expected begin fun () -> - ignore (Json.destruct read_encoding json) - end + check_raises expected (fun () -> ignore (Json.destruct read_encoding json)) let bson ?(expected = fun _ -> true) read_encoding bson () = - check_raises expected begin fun () -> - ignore (Bson.destruct read_encoding bson) - end + check_raises expected (fun () -> ignore (Bson.destruct read_encoding bson)) let binary ?(expected = fun _ -> true) read_encoding bytes () = - check_raises expected begin fun () -> - ignore (Binary.of_bytes_exn read_encoding bytes) ; - end + check_raises expected (fun () -> + ignore (Binary.of_bytes_exn read_encoding bytes)) let stream ?(expected = fun _ -> true) read_encoding bytes () = let len_data = MBytes.length bytes in @@ -94,136 +104,203 @@ let stream ?(expected = fun _ -> true) read_encoding bytes () = Alcotest.failf "@[<v 2>%s failed: read error@ %a@]" name - Binary.pp_read_error error + Binary.pp_read_error + error done -let minimal_stream ?(expected = fun _ -> true) expected_read read_encoding bytes () = +let minimal_stream ?(expected = fun _ -> true) expected_read read_encoding + bytes () = let name = "minimal_stream" in match streamed_read read_encoding bytes with - | Binary.Success _, _ -> + | (Binary.Success _, _) -> Alcotest.failf "%s failed: expecting exception, got success." name - | Binary.Await _, _ -> + | (Binary.Await _, _) -> Alcotest.failf "%s failed: not enough data" name - | Binary.Error error, count when expected (Binary.Read_error error) && count = expected_read -> + | (Binary.Error error, count) + when expected (Binary.Read_error error) && count = expected_read -> () - | Binary.Error error, count -> + | (Binary.Error error, count) -> Alcotest.failf "@[<v 2>%s failed: read error after reading %d. @ %a@]" - name count - Binary.pp_read_error error - + name + count + Binary.pp_read_error + error let all ?expected name write_encoding read_encoding value = let json_value = Json.construct write_encoding value in let bson_value = Bson.construct write_encoding value in let bytes_value = Binary.to_bytes_exn write_encoding value in - [ name ^ ".json", `Quick, json ?expected read_encoding json_value ; - name ^ ".bson", `Quick, bson ?expected read_encoding bson_value ; - name ^ ".bytes", `Quick, binary ?expected read_encoding bytes_value ; - name ^ ".stream", `Quick, stream ?expected read_encoding bytes_value ; - ] + [ (name ^ ".json", `Quick, json ?expected read_encoding json_value); + (name ^ ".bson", `Quick, bson ?expected read_encoding bson_value); + (name ^ ".bytes", `Quick, binary ?expected read_encoding bytes_value); + (name ^ ".stream", `Quick, stream ?expected read_encoding bytes_value) ] let all_ranged_int minimum maximum = let encoding = ranged_int minimum maximum in let signed = match Binary_size.range_to_size ~minimum ~maximum with - | `Int31 | `Int8 | `Int16 -> true - | `Uint8 | `Uint16 | `Uint30 -> false in + | `Int31 | `Int8 | `Int16 -> + true + | `Uint8 | `Uint16 | `Uint30 -> + false + in let write_encoding = splitted ~json:(ranged_int (minimum - 1) (maximum + 1)) ~binary: - (if signed then - (ranged_int (minimum - 1) (maximum + 1)) - else - ranged_int minimum (maximum + 1)) in + ( if signed then ranged_int (minimum - 1) (maximum + 1) + else ranged_int minimum (maximum + 1) ) + in let name = Format.asprintf "ranged_int.%d" minimum in - all ~expected:invalid_int (name ^ ".max") write_encoding encoding (maximum + 1) @ + all + ~expected:invalid_int + (name ^ ".max") + write_encoding + encoding + (maximum + 1) + @ if signed then - all ~expected:invalid_int (name ^ ".min") write_encoding encoding (minimum - 1) + all + ~expected:invalid_int + (name ^ ".min") + write_encoding + encoding + (minimum - 1) else let json_value = Json.construct write_encoding (minimum - 1) in let bson_value = Bson.construct write_encoding (minimum - 1) in - [ name ^ "min.json", `Quick, json ~expected:invalid_int encoding json_value ; - name ^ "min..bson", `Quick, bson ~expected:invalid_int encoding bson_value ] + [ ( name ^ "min.json", + `Quick, + json ~expected:invalid_int encoding json_value ); + ( name ^ "min..bson", + `Quick, + bson ~expected:invalid_int encoding bson_value ) ] let all_ranged_float minimum maximum = let encoding = ranged_float minimum maximum in let name = Format.asprintf "ranged_float.%f" minimum in - all (name ^ ".min") float encoding (minimum -. 1.) @ - all (name ^ ".max") float encoding (maximum +. 1.) + all (name ^ ".min") float encoding (minimum -. 1.) + @ all (name ^ ".max") float encoding (maximum +. 1.) let test_bounded_string_list = let expected = function - | Binary_error.Read_error Size_limit_exceeded -> true - | _ -> false in + | Binary_error.Read_error Size_limit_exceeded -> + true + | _ -> + false + in let test name ~total ~elements v expected_read expected_read' = let bytes = Binary.to_bytes_exn (Variable.list string) v in let vbytes = Binary.to_bytes_exn (list string) v in - [ "bounded_string_list." ^ name, `Quick, - binary ~expected (bounded_list ~total ~elements string) bytes ; - "bounded_string_list_stream." ^ name, `Quick, - stream ~expected - (dynamic_size (bounded_list ~total:total ~elements string)) vbytes ; - "bounded_string_list_minimal_stream." ^ name, `Quick, - minimal_stream ~expected expected_read - (dynamic_size (bounded_list ~total:total ~elements string)) vbytes ; - "bounded_string_list_minimal_stream." ^ name, `Quick, - minimal_stream ~expected expected_read' - (check_size (total + 4) - (dynamic_size (Variable.list (check_size elements string)))) vbytes ; - - ] in - test "a" ~total:0 ~elements:0 [""] 4 4 @ - test "b1" ~total:3 ~elements:4 [""] 4 4 @ - test "b2" ~total:4 ~elements:3 [""] 4 4 @ - test "c1" ~total:19 ~elements:4 ["";"";"";"";""] 4 4 @ - test "c2" ~total:20 ~elements:3 ["";"";"";"";""] 4 4 @ - test "d1" ~total:20 ~elements:5 ["";"";"";"";"a"] 4 4 @ - test "d2" ~total:21 ~elements:4 ["";"";"";"";"a"] 24 24 @ - test "e" ~total:30 ~elements:10 ["ab";"c";"def";"gh";"ijk"] 4 4 + [ ( "bounded_string_list." ^ name, + `Quick, + binary ~expected (bounded_list ~total ~elements string) bytes ); + ( "bounded_string_list_stream." ^ name, + `Quick, + stream + ~expected + (dynamic_size (bounded_list ~total ~elements string)) + vbytes ); + ( "bounded_string_list_minimal_stream." ^ name, + `Quick, + minimal_stream + ~expected + expected_read + (dynamic_size (bounded_list ~total ~elements string)) + vbytes ); + ( "bounded_string_list_minimal_stream." ^ name, + `Quick, + minimal_stream + ~expected + expected_read' + (check_size + (total + 4) + (dynamic_size (Variable.list (check_size elements string)))) + vbytes ) ] + in + test "a" ~total:0 ~elements:0 [""] 4 4 + @ test "b1" ~total:3 ~elements:4 [""] 4 4 + @ test "b2" ~total:4 ~elements:3 [""] 4 4 + @ test "c1" ~total:19 ~elements:4 [""; ""; ""; ""; ""] 4 4 + @ test "c2" ~total:20 ~elements:3 [""; ""; ""; ""; ""] 4 4 + @ test "d1" ~total:20 ~elements:5 [""; ""; ""; ""; "a"] 4 4 + @ test "d2" ~total:21 ~elements:4 [""; ""; ""; ""; "a"] 24 24 + @ test "e" ~total:30 ~elements:10 ["ab"; "c"; "def"; "gh"; "ijk"] 4 4 let tests = - all_ranged_int 100 400 @ - all_ranged_int 19000 19253 @ - all_ranged_int ~-100 300 @ - all_ranged_int ~-300_000_000 300_000_000 @ - all_ranged_float ~-. 100. 300. @ - all "string.fixed" ~expected:invalid_string_length - string (Fixed.string 4) "turlututu" @ - all "string.bounded" string (Bounded.string 4) "turlututu" @ - all "bytes.fixed" ~expected:invalid_string_length - bytes (Fixed.bytes 4) (MBytes.of_string "turlututu") @ - all "bytes.bounded" bytes (Bounded.bytes 4) (MBytes.of_string "turlututu") @ - all "unknown_case.B" ~expected:missing_case union_enc mini_union_enc (B "2") @ - all "unknown_case.E" ~expected:missing_case union_enc mini_union_enc E @ - all "enum.missing" ~expected:missing_enum enum_enc mini_enum_enc 4 @ - test_bounded_string_list @ - [ "n.truncated", `Quick, - binary ~expected:not_enough_data n (MBytes.of_string "\x83") ; - "n.trailing_zero", `Quick, - binary ~expected:trailing_zero n (MBytes.of_string "\x83\x00") ; - "n.trailing_zero2", `Quick, - binary ~expected:trailing_zero n (MBytes.of_string "\x83\x00") ; - "z.truncated", `Quick, - binary ~expected:not_enough_data z (MBytes.of_string "\x83") ; - "z.trailing_zero", `Quick, - binary ~expected:trailing_zero z (MBytes.of_string "\x83\x00") ; - "z.trailing_zero2", `Quick, - binary ~expected:trailing_zero z (MBytes.of_string "\x83\x80\x00") ; - "dynamic_size.empty", `Quick, - binary ~expected:not_enough_data (dynamic_size Variable.string) - (MBytes.of_string "") ; - "dynamic_size.partial_size", `Quick, - binary ~expected:not_enough_data (dynamic_size Variable.string) - (MBytes.of_string "\x00\x00") ; - "dynamic_size.incomplete_data", `Quick, - binary ~expected:not_enough_data (dynamic_size Variable.string) - (MBytes.of_string "\x00\x00\x00\x04\x00\x00") ; - "dynamic_size.outer-garbage", `Quick, - binary ~expected:extra_bytes (dynamic_size Variable.string) - (MBytes.of_string "\x00\x00\x00\x01\x00\x00") ; - "dynamic_size.inner-garbage", `Quick, - binary ~expected:extra_bytes (dynamic_size uint8) - (MBytes.of_string "\x00\x00\x00\x02\x00\x00") ; - ] + all_ranged_int 100 400 @ all_ranged_int 19000 19253 + @ all_ranged_int ~-100 300 + @ all_ranged_int ~-300_000_000 300_000_000 + @ all_ranged_float ~-.100. 300. + @ all + "string.fixed" + ~expected:invalid_string_length + string + (Fixed.string 4) + "turlututu" + @ all "string.bounded" string (Bounded.string 4) "turlututu" + @ all + "bytes.fixed" + ~expected:invalid_string_length + bytes + (Fixed.bytes 4) + (MBytes.of_string "turlututu") + @ all "bytes.bounded" bytes (Bounded.bytes 4) (MBytes.of_string "turlututu") + @ all + "unknown_case.B" + ~expected:missing_case + union_enc + mini_union_enc + (B "2") + @ all "unknown_case.E" ~expected:missing_case union_enc mini_union_enc E + @ all "enum.missing" ~expected:missing_enum enum_enc mini_enum_enc 4 + @ test_bounded_string_list + @ [ ( "n.truncated", + `Quick, + binary ~expected:not_enough_data n (MBytes.of_string "\x83") ); + ( "n.trailing_zero", + `Quick, + binary ~expected:trailing_zero n (MBytes.of_string "\x83\x00") ); + ( "n.trailing_zero2", + `Quick, + binary ~expected:trailing_zero n (MBytes.of_string "\x83\x00") ); + ( "z.truncated", + `Quick, + binary ~expected:not_enough_data z (MBytes.of_string "\x83") ); + ( "z.trailing_zero", + `Quick, + binary ~expected:trailing_zero z (MBytes.of_string "\x83\x00") ); + ( "z.trailing_zero2", + `Quick, + binary ~expected:trailing_zero z (MBytes.of_string "\x83\x80\x00") ); + ( "dynamic_size.empty", + `Quick, + binary + ~expected:not_enough_data + (dynamic_size Variable.string) + (MBytes.of_string "") ); + ( "dynamic_size.partial_size", + `Quick, + binary + ~expected:not_enough_data + (dynamic_size Variable.string) + (MBytes.of_string "\x00\x00") ); + ( "dynamic_size.incomplete_data", + `Quick, + binary + ~expected:not_enough_data + (dynamic_size Variable.string) + (MBytes.of_string "\x00\x00\x00\x04\x00\x00") ); + ( "dynamic_size.outer-garbage", + `Quick, + binary + ~expected:extra_bytes + (dynamic_size Variable.string) + (MBytes.of_string "\x00\x00\x00\x01\x00\x00") ); + ( "dynamic_size.inner-garbage", + `Quick, + binary + ~expected:extra_bytes + (dynamic_size uint8) + (MBytes.of_string "\x00\x00\x00\x02\x00\x00") ) ] diff --git a/src/lib_data_encoding/test/success.ml b/src/lib_data_encoding/test/success.ml index aeeae27565136be24743c495b1b9900dd82a896a..cae9cfb7a9a3de327c46de789eecb162e747760c 100644 --- a/src/lib_data_encoding/test/success.ml +++ b/src/lib_data_encoding/test/success.ml @@ -37,246 +37,292 @@ open Types open Utils.Infix let json ty encoding value () = - no_exception begin fun () -> - let json = Json.construct encoding value in - let result = Json.destruct encoding json in - Alcotest.check ty "json" value result - end + no_exception (fun () -> + let json = Json.construct encoding value in + let result = Json.destruct encoding json in + Alcotest.check ty "json" value result) let bson ty encoding value () = - no_exception begin fun () -> - let json = Bson.construct encoding value in - let result = Bson.destruct encoding json in - Alcotest.check ty "bson" value result - end + no_exception (fun () -> + let json = Bson.construct encoding value in + let result = Bson.destruct encoding json in + Alcotest.check ty "bson" value result) let binary ty encoding value () = - no_exception begin fun () -> - let bytes = Binary.to_bytes_exn encoding value in - let result = Binary.of_bytes_exn encoding bytes in - Alcotest.check ty "binary" value result - end + no_exception (fun () -> + let bytes = Binary.to_bytes_exn encoding value in + let result = Binary.of_bytes_exn encoding bytes in + Alcotest.check ty "binary" value result) let stream ty encoding value () = - no_exception begin fun () -> - let bytes = Binary.to_bytes_exn encoding value in - let len_data = MBytes.length bytes in - for sz = 1 to max 1 len_data do - let name = Format.asprintf "stream (%d)" sz in - match chunked_read sz encoding bytes with - | Binary.Success { result ; size ; stream } -> - if size <> MBytes.length bytes || - not (Binary_stream.is_empty stream) then - Alcotest.failf "%s failed: remaining data" name ; - Alcotest.check ty name value result - | Binary.Await _ -> - Alcotest.failf "%s failed: not enough data" name - | Binary.Error error -> - Alcotest.failf - "@[<v 2>%s failed: read error@ %a@]" - name - Binary.pp_read_error error - done ; - end + no_exception (fun () -> + let bytes = Binary.to_bytes_exn encoding value in + let len_data = MBytes.length bytes in + for sz = 1 to max 1 len_data do + let name = Format.asprintf "stream (%d)" sz in + match chunked_read sz encoding bytes with + | Binary.Success {result; size; stream} -> + if + size <> MBytes.length bytes + || not (Binary_stream.is_empty stream) + then Alcotest.failf "%s failed: remaining data" name ; + Alcotest.check ty name value result + | Binary.Await _ -> + Alcotest.failf "%s failed: not enough data" name + | Binary.Error error -> + Alcotest.failf + "@[<v 2>%s failed: read error@ %a@]" + name + Binary.pp_read_error + error + done) let all name ty encoding value = let stream_encoding = match Data_encoding.classify encoding with - | `Variable -> dynamic_size encoding - | `Dynamic | `Fixed _ -> encoding in - [ name ^ ".json", `Quick, json ty encoding value ; - name ^ ".bson", `Quick, bson ty encoding value ; - name ^ ".binary", `Quick, binary ty encoding value ; - name ^ ".binary_stream", `Quick, stream ty stream_encoding value ] + | `Variable -> + dynamic_size encoding + | `Dynamic | `Fixed _ -> + encoding + in + [ (name ^ ".json", `Quick, json ty encoding value); + (name ^ ".bson", `Quick, bson ty encoding value); + (name ^ ".binary", `Quick, binary ty encoding value); + (name ^ ".binary_stream", `Quick, stream ty stream_encoding value) ] let all_int encoding size = let name = Format.asprintf "int%d" size in - all (name ^ ".min") Alcotest.int encoding ~- (1 lsl (size - 1)) @ - all (name ^ ".mean") Alcotest.int encoding 0 @ - all (name ^ ".max") Alcotest.int encoding ((1 lsl (size - 1)) - 1) + all (name ^ ".min") Alcotest.int encoding ~-(1 lsl (size - 1)) + @ all (name ^ ".mean") Alcotest.int encoding 0 + @ all (name ^ ".max") Alcotest.int encoding ((1 lsl (size - 1)) - 1) let all_uint encoding size = let name = Format.asprintf "uint%d" size in - all (name ^ ".min") Alcotest.int encoding 0 @ - all (name ^ ".mean") Alcotest.int encoding (1 lsl (size - 1)) @ - all (name ^ ".max") Alcotest.int encoding ((1 lsl size) - 1) + all (name ^ ".min") Alcotest.int encoding 0 + @ all (name ^ ".mean") Alcotest.int encoding (1 lsl (size - 1)) + @ all (name ^ ".max") Alcotest.int encoding ((1 lsl size) - 1) let all_ranged_int minimum maximum = let encoding = ranged_int minimum maximum in let name = Format.asprintf "ranged_int.%d" minimum in - all (name ^ ".min") Alcotest.int encoding minimum @ - all (name ^ ".mean") Alcotest.int encoding ((minimum + maximum) / 2) @ - all (name ^ ".max") Alcotest.int encoding maximum + all (name ^ ".min") Alcotest.int encoding minimum + @ all (name ^ ".mean") Alcotest.int encoding ((minimum + maximum) / 2) + @ all (name ^ ".max") Alcotest.int encoding maximum let all_ranged_float minimum maximum = let encoding = ranged_float minimum maximum in let name = Format.asprintf "ranged_float.%f" minimum in - all (name ^ ".min") Alcotest.float encoding minimum @ - all (name ^ ".mean") Alcotest.float encoding ((minimum +. maximum) /. 2.) @ - all (name ^ ".max") Alcotest.float encoding maximum + all (name ^ ".min") Alcotest.float encoding minimum + @ all (name ^ ".mean") Alcotest.float encoding ((minimum +. maximum) /. 2.) + @ all (name ^ ".max") Alcotest.float encoding maximum let test_n_sequence () = - let test i = - binary Alcotest.z z i () ; - stream Alcotest.z z i () in - for i = 0 to 10_000 do test (Z.of_int i) done ; - for i = 100_000_000 to 100_010_000 do test (Z.of_int i) done + let test i = binary Alcotest.z z i () ; stream Alcotest.z z i () in + for i = 0 to 10_000 do + test (Z.of_int i) + done ; + for i = 100_000_000 to 100_010_000 do + test (Z.of_int i) + done let test_z_sequence () = - let test i = - binary Alcotest.z z i () ; - stream Alcotest.z z i () in - for i = -10_000 to 10_000 do test (Z.of_int i) done ; - for i = 100_000_000 to 100_010_000 do test (Z.of_int i) done ; - for i = -100_000_000 downto -100_010_000 do test (Z.of_int i) done + let test i = binary Alcotest.z z i () ; stream Alcotest.z z i () in + for i = -10_000 to 10_000 do + test (Z.of_int i) + done ; + for i = 100_000_000 to 100_010_000 do + test (Z.of_int i) + done ; + for i = -100_000_000 downto -100_010_000 do + test (Z.of_int i) + done let test_string_enum_boundary () = - let entries = List.rev_map (fun x -> string_of_int x, x) (0 -- 254) in + let entries = List.rev_map (fun x -> (string_of_int x, x)) (0 -- 254) in let run_test cases = - List.iter (fun (_, num) -> + List.iter + (fun (_, num) -> let enc = string_enum cases in json Alcotest.int enc num () ; bson Alcotest.int enc num () ; binary Alcotest.int enc num () ; stream Alcotest.int enc num ()) - cases in + cases + in run_test entries ; - let entries2 = (("255", 255) :: entries) in + let entries2 = ("255", 255) :: entries in run_test entries2 ; run_test (("256", 256) :: entries2) let test_bounded_string_list = let test name ~total ~elements v = - "bounded_string_list." ^ name, `Quick, - binary Alcotest.(list string) - (bounded_list ~total ~elements string) v in - [ test "a" ~total:0 ~elements:0 [] ; - test "b" ~total:4 ~elements:4 [""] ; - test "c" ~total:20 ~elements:4 ["";"";"";"";""] ; - test "d" ~total:21 ~elements:5 ["";"";"";"";"a"] ; - test "e" ~total:31 ~elements:10 ["ab";"c";"def";"gh";"ijk"] ; - ] + ( "bounded_string_list." ^ name, + `Quick, + binary Alcotest.(list string) (bounded_list ~total ~elements string) v ) + in + [ test "a" ~total:0 ~elements:0 []; + test "b" ~total:4 ~elements:4 [""]; + test "c" ~total:20 ~elements:4 [""; ""; ""; ""; ""]; + test "d" ~total:21 ~elements:5 [""; ""; ""; ""; "a"]; + test "e" ~total:31 ~elements:10 ["ab"; "c"; "def"; "gh"; "ijk"] ] let tests = - all "null" Alcotest.pass null () @ - all "empty" Alcotest.pass empty () @ - all "constant" Alcotest.pass (constant "toto") () @ - all_int int8 8 @ - all_uint uint8 8 @ - all_int int16 16 @ - all_uint uint16 16 @ - all_int int31 31 @ - all "int32.min" Alcotest.int32 int32 Int32.min_int @ - all "int32.max" Alcotest.int32 int32 Int32.max_int @ - all "int64.min" Alcotest.int64 int64 Int64.min_int @ - all "int64.max" Alcotest.int64 int64 Int64.max_int @ - all_ranged_int 100 400 @ - all_ranged_int 19000 19254 @ - all_ranged_int ~-100 300 @ - all_ranged_int ~-300_000_000 300_000_000 @ - all "bool.true" Alcotest.bool bool true @ - all "bool.false" Alcotest.bool bool false @ - all "string" Alcotest.string string "tutu" @ - all "string.fixed" Alcotest.string (Fixed.string 4) "tutu" @ - all "string.variable" Alcotest.string Variable.string "tutu" @ - all "string.bounded1" Alcotest.string (Bounded.string 4) "tu" @ - all "string.bounded2" Alcotest.string (Bounded.string 4) "tutu" @ - all "bytes" Alcotest.bytes bytes (MBytes.of_string "titi") @ - all "bytes.fixed" Alcotest.bytes (Fixed.bytes 4) - (MBytes.of_string "titi") @ - all "bytes.variable" Alcotest.bytes Variable.bytes - (MBytes.of_string "titi") @ - all "bytes.bounded1" Alcotest.bytes (Bounded.bytes 4) (MBytes.of_string "tu") @ - all "bytes.bounded2" Alcotest.bytes (Bounded.bytes 4) (MBytes.of_string "tutu") @ - all "float" Alcotest.float float 42. @ - all "float.max" Alcotest.float float max_float @ - all "float.min" Alcotest.float float min_float @ - all "float.neg_zero" Alcotest.float float (-. 0.) @ - all "float.zero" Alcotest.float float (+. 0.) @ - all "float.infinity" Alcotest.float float infinity @ - all "float.neg_infity" Alcotest.float float neg_infinity @ - all "float.epsilon" Alcotest.float float epsilon_float @ - all "float.nan" Alcotest.float float nan @ - all_ranged_float ~-. 100. 300. @ - all "n.zero" Alcotest.n n (Z.zero) @ - all "n.one" Alcotest.n n (Z.one) @ - [ "n.sequence", `Quick, test_n_sequence ] @ + all "null" Alcotest.pass null () + @ all "empty" Alcotest.pass empty () + @ all "constant" Alcotest.pass (constant "toto") () + @ all_int int8 8 @ all_uint uint8 8 @ all_int int16 16 @ all_uint uint16 16 + @ all_int int31 31 + @ all "int32.min" Alcotest.int32 int32 Int32.min_int + @ all "int32.max" Alcotest.int32 int32 Int32.max_int + @ all "int64.min" Alcotest.int64 int64 Int64.min_int + @ all "int64.max" Alcotest.int64 int64 Int64.max_int + @ all_ranged_int 100 400 @ all_ranged_int 19000 19254 + @ all_ranged_int ~-100 300 + @ all_ranged_int ~-300_000_000 300_000_000 + @ all "bool.true" Alcotest.bool bool true + @ all "bool.false" Alcotest.bool bool false + @ all "string" Alcotest.string string "tutu" + @ all "string.fixed" Alcotest.string (Fixed.string 4) "tutu" + @ all "string.variable" Alcotest.string Variable.string "tutu" + @ all "string.bounded1" Alcotest.string (Bounded.string 4) "tu" + @ all "string.bounded2" Alcotest.string (Bounded.string 4) "tutu" + @ all "bytes" Alcotest.bytes bytes (MBytes.of_string "titi") + @ all "bytes.fixed" Alcotest.bytes (Fixed.bytes 4) (MBytes.of_string "titi") + @ all + "bytes.variable" + Alcotest.bytes + Variable.bytes + (MBytes.of_string "titi") + @ all + "bytes.bounded1" + Alcotest.bytes + (Bounded.bytes 4) + (MBytes.of_string "tu") + @ all + "bytes.bounded2" + Alcotest.bytes + (Bounded.bytes 4) + (MBytes.of_string "tutu") + @ all "float" Alcotest.float float 42. + @ all "float.max" Alcotest.float float max_float + @ all "float.min" Alcotest.float float min_float + @ all "float.neg_zero" Alcotest.float float (-0.) + @ all "float.zero" Alcotest.float float 0. + @ all "float.infinity" Alcotest.float float infinity + @ all "float.neg_infity" Alcotest.float float neg_infinity + @ all "float.epsilon" Alcotest.float float epsilon_float + @ all "float.nan" Alcotest.float float nan + @ all_ranged_float ~-.100. 300. + @ all "n.zero" Alcotest.n n Z.zero + @ all "n.one" Alcotest.n n Z.one + @ [("n.sequence", `Quick, test_n_sequence)] + @ let rec fact i l = - if i < 1 then - [] + if i < 1 then [] else let l = Z.mul l (Z.of_int i) in - fact (i - 1) l @ - all (Format.asprintf "n.fact.%d" i) Alcotest.n n l in - fact 35 Z.one @ - all "n.a" Alcotest.n n - (Z.of_string "123574503164821730218493275982143254986574985328") @ - all "n.b" Alcotest.n n - (Z.of_string "8493275982143254986574985328") @ - all "n.c" Alcotest.n n - (Z.of_string "123574503164821730218474985328") @ - all "n.d" Alcotest.n n - (Z.of_string "10000000000100000000001000003050000000060600000000000777000008") @ - all "z.zero" Alcotest.z z (Z.zero) @ - all "z.one" Alcotest.z z (Z.one) @ - [ "z.sequence", `Quick, test_z_sequence ] @ + fact (i - 1) l @ all (Format.asprintf "n.fact.%d" i) Alcotest.n n l + in + fact 35 Z.one + @ all + "n.a" + Alcotest.n + n + (Z.of_string "123574503164821730218493275982143254986574985328") + @ all "n.b" Alcotest.n n (Z.of_string "8493275982143254986574985328") + @ all "n.c" Alcotest.n n (Z.of_string "123574503164821730218474985328") + @ all + "n.d" + Alcotest.n + n + (Z.of_string + "10000000000100000000001000003050000000060600000000000777000008") + @ all "z.zero" Alcotest.z z Z.zero + @ all "z.one" Alcotest.z z Z.one + @ [("z.sequence", `Quick, test_z_sequence)] + @ let rec fact n l = - if n < 1 then - [] + if n < 1 then [] else let l = Z.mul l (Z.of_int n) in - fact (n - 1) l @ - all (Format.asprintf "z.fact.%d" n) Alcotest.z z l in - fact 35 Z.one @ - all "z.a" Alcotest.z z - (Z.of_string "123574503164821730218493275982143254986574985328") @ - all "z.b" Alcotest.z z - (Z.of_string "8493275982143254986574985328") @ - all "z.c" Alcotest.z z - (Z.of_string "123574503164821730218474985328") @ - all "z.d" Alcotest.z z - (Z.of_string "10000000000100000000001000003050000000060600000000000777000008") @ - all "z.e" Alcotest.z z - (Z.of_string "-123574503164821730218493275982143254986574985328") @ - all "z.f" Alcotest.z z - (Z.of_string "-8493275982143254986574985328") @ - all "z.g" Alcotest.z z - (Z.of_string "-123574503164821730218474985328") @ - all "z.h" Alcotest.z z - (Z.of_string "-10000000000100000000001000003050000000060600000000000777000008") @ - all "none" Alcotest.(option string) (option string) None @ - all "some.string" Alcotest.(option string) (option string) - (Some "thing") @ - all "enum" Alcotest.int enum_enc 4 @ - all "obj" Alcotest.record record_obj_enc default_record @ - all "obj.dft" Alcotest.record record_obj_enc - { default_record with b = false } @ - all "obj.req" Alcotest.record record_obj_enc - { default_record with c = None } @ - all "tup" Alcotest.record record_tup_enc default_record @ - all "obj.variable" Alcotest.variable_record variable_record_obj_enc - default_variable_record @ - all "tup.variable" Alcotest.variable_record variable_record_tup_enc - default_variable_record @ - all "obj.variable_left" Alcotest.variable_left_record variable_left_record_obj_enc - default_variable_left_record @ - all "tup.variable_left" Alcotest.variable_left_record variable_left_record_tup_enc - default_variable_left_record @ - all "union.A" Alcotest.union union_enc (A 1) @ - all "union.B" Alcotest.union union_enc (B "2") @ - all "union.C" Alcotest.union union_enc (C 3) @ - all "union.D" Alcotest.union union_enc (D "4") @ - all "union.E" Alcotest.union union_enc E @ - all "variable_list.empty" Alcotest.(list int) (Variable.list int31) [] @ - all "variable_list" Alcotest.(list int) (Variable.list int31) [1;2;3;4;5] @ - all "variable_array.empty" Alcotest.(array int) (Variable.array int31) [||] @ - all "variable_array" Alcotest.(array int) (Variable.array int31) [|1;2;3;4;5|] @ - all "list.empty" Alcotest.(list int) (list int31) [] @ - all "list" Alcotest.(list int) (list int31) [1;2;3;4;5] @ - all "array.empty" Alcotest.(array int) (array int31) [||] @ - all "array" Alcotest.(array int) (array int31) [|1;2;3;4;5|] @ - all "mu_list.empty" Alcotest.(list int) (mu_list_enc int31) [] @ - all "mu_list" Alcotest.(list int) (mu_list_enc int31) [1;2;3;4;5] @ - test_bounded_string_list @ - [ "string_enum_boundary", `Quick, test_string_enum_boundary ; - ] + fact (n - 1) l @ all (Format.asprintf "z.fact.%d" n) Alcotest.z z l + in + fact 35 Z.one + @ all + "z.a" + Alcotest.z + z + (Z.of_string "123574503164821730218493275982143254986574985328") + @ all "z.b" Alcotest.z z (Z.of_string "8493275982143254986574985328") + @ all "z.c" Alcotest.z z (Z.of_string "123574503164821730218474985328") + @ all + "z.d" + Alcotest.z + z + (Z.of_string + "10000000000100000000001000003050000000060600000000000777000008") + @ all + "z.e" + Alcotest.z + z + (Z.of_string "-123574503164821730218493275982143254986574985328") + @ all "z.f" Alcotest.z z (Z.of_string "-8493275982143254986574985328") + @ all "z.g" Alcotest.z z (Z.of_string "-123574503164821730218474985328") + @ all + "z.h" + Alcotest.z + z + (Z.of_string + "-10000000000100000000001000003050000000060600000000000777000008") + @ all "none" Alcotest.(option string) (option string) None + @ all "some.string" Alcotest.(option string) (option string) (Some "thing") + @ all "enum" Alcotest.int enum_enc 4 + @ all "obj" Alcotest.record record_obj_enc default_record + @ all "obj.dft" Alcotest.record record_obj_enc {default_record with b = false} + @ all "obj.req" Alcotest.record record_obj_enc {default_record with c = None} + @ all "tup" Alcotest.record record_tup_enc default_record + @ all + "obj.variable" + Alcotest.variable_record + variable_record_obj_enc + default_variable_record + @ all + "tup.variable" + Alcotest.variable_record + variable_record_tup_enc + default_variable_record + @ all + "obj.variable_left" + Alcotest.variable_left_record + variable_left_record_obj_enc + default_variable_left_record + @ all + "tup.variable_left" + Alcotest.variable_left_record + variable_left_record_tup_enc + default_variable_left_record + @ all "union.A" Alcotest.union union_enc (A 1) + @ all "union.B" Alcotest.union union_enc (B "2") + @ all "union.C" Alcotest.union union_enc (C 3) + @ all "union.D" Alcotest.union union_enc (D "4") + @ all "union.E" Alcotest.union union_enc E + @ all "variable_list.empty" Alcotest.(list int) (Variable.list int31) [] + @ all + "variable_list" + Alcotest.(list int) + (Variable.list int31) + [1; 2; 3; 4; 5] + @ all "variable_array.empty" Alcotest.(array int) (Variable.array int31) [||] + @ all + "variable_array" + Alcotest.(array int) + (Variable.array int31) + [|1; 2; 3; 4; 5|] + @ all "list.empty" Alcotest.(list int) (list int31) [] + @ all "list" Alcotest.(list int) (list int31) [1; 2; 3; 4; 5] + @ all "array.empty" Alcotest.(array int) (array int31) [||] + @ all "array" Alcotest.(array int) (array int31) [|1; 2; 3; 4; 5|] + @ all "mu_list.empty" Alcotest.(list int) (mu_list_enc int31) [] + @ all "mu_list" Alcotest.(list int) (mu_list_enc int31) [1; 2; 3; 4; 5] + @ test_bounded_string_list + @ [("string_enum_boundary", `Quick, test_string_enum_boundary)] diff --git a/src/lib_data_encoding/test/test.ml b/src/lib_data_encoding/test/test.ml index 5c1bf75dd2b1fcd36bc23d55719f7f550677dce7..279d25381c444056ee20aa135b83da51d29d4663 100644 --- a/src/lib_data_encoding/test/test.ml +++ b/src/lib_data_encoding/test/test.ml @@ -25,11 +25,11 @@ let () = Random.init 100 ; - Alcotest.run "tezos-data-encoding" [ - "success", Success.tests ; - "invalid_encoding", Invalid_encoding.tests ; - "read_failure", Read_failure.tests ; - "write_failure", Write_failure.tests ; - "randomized", Randomized.tests ; - "versioned", Versioned.tests ; - ] + Alcotest.run + "tezos-data-encoding" + [ ("success", Success.tests); + ("invalid_encoding", Invalid_encoding.tests); + ("read_failure", Read_failure.tests); + ("write_failure", Write_failure.tests); + ("randomized", Randomized.tests); + ("versioned", Versioned.tests) ] diff --git a/src/lib_data_encoding/test/test_generated.ml b/src/lib_data_encoding/test/test_generated.ml index 3ba4886bfd187369a0702b0d1759a6468f9c06ac..b72372c57f7a55884c90f72077aca3a3532a0161 100644 --- a/src/lib_data_encoding/test/test_generated.ml +++ b/src/lib_data_encoding/test/test_generated.ml @@ -31,38 +31,37 @@ let char = Crowbar.map [Crowbar.uint8] Char.chr let string = Crowbar.bytes + (* The v0.1 of Crowbar doesn't have fixed-size string generation. When we * update Crowbar, we can improve this generator. *) let short_string = let open Crowbar in - choose [ - const ""; - map [char] (fun c -> String.make 1 c); - map [char; char; char; char] (fun c1 c2 c3 c4 -> - let s = Bytes.make 4 c1 in - Bytes.set s 1 c2; - Bytes.set s 2 c3; - Bytes.set s 3 c4; - Bytes.to_string s - ); - ] + choose + [ const ""; + map [char] (fun c -> String.make 1 c); + map [char; char; char; char] (fun c1 c2 c3 c4 -> + let s = Bytes.make 4 c1 in + Bytes.set s 1 c2 ; + Bytes.set s 2 c3 ; + Bytes.set s 3 c4 ; + Bytes.to_string s) ] + let short_string1 = let open Crowbar in - choose [ - map [char] (fun c -> String.make 1 c); - map [char; char; char; char] (fun c1 c2 c3 c4 -> - let s = Bytes.make 4 c1 in - Bytes.set s 1 c2; - Bytes.set s 2 c3; - Bytes.set s 3 c4; - Bytes.to_string s - ); - ] + choose + [ map [char] (fun c -> String.make 1 c); + map [char; char; char; char] (fun c1 c2 c3 c4 -> + let s = Bytes.make 4 c1 in + Bytes.set s 1 c2 ; + Bytes.set s 2 c3 ; + Bytes.set s 3 c4 ; + Bytes.to_string s) ] + let mbytes = Crowbar.map [Crowbar.bytes] MBytes.of_string -let short_mbytes = Crowbar.map [short_string] MBytes.of_string -let short_mbytes1 = Crowbar.map [short_string1] MBytes.of_string +let short_mbytes = Crowbar.map [short_string] MBytes.of_string +let short_mbytes1 = Crowbar.map [short_string1] MBytes.of_string (* We need to hide the type parameter of `Encoding.t` to avoid the generator * combinator `choose` from complaining about different types. We use first @@ -72,97 +71,146 @@ let short_mbytes1 = Crowbar.map [short_string1] MBytes.of_string module type TESTABLE = sig type t - val v: t - val ding: t Data_encoding.t - val pp: t Crowbar.printer + + val v : t + + val ding : t Data_encoding.t + + val pp : t Crowbar.printer end + type testable = (module TESTABLE) let null : testable = - (module struct + ( module struct type t = unit + let v = () + let ding = Data_encoding.null + let pp ppf () = Crowbar.pp ppf "(null)" - end) + end ) + let empty : testable = - (module struct + ( module struct type t = unit + let v = () + let ding = Data_encoding.empty + let pp ppf () = Crowbar.pp ppf "(empty)" - end) + end ) + let unit : testable = - (module struct + ( module struct type t = unit + let v = () + let ding = Data_encoding.unit + let pp ppf () = Crowbar.pp ppf "(unit)" - end) -let map_constant (s: string) : testable = - (module struct + end ) + +let map_constant (s : string) : testable = + ( module struct type t = unit + let v = () + let ding = Data_encoding.constant s + let pp ppf () = Crowbar.pp ppf "\"%s\"" s - end) -let map_int8 (i: int) : testable = - (module struct + end ) + +let map_int8 (i : int) : testable = + ( module struct type t = int + let v = i + let ding = Data_encoding.int8 + let pp = Crowbar.pp_int - end) -let map_uint8 (i: int) : testable = - (module struct + end ) + +let map_uint8 (i : int) : testable = + ( module struct type t = int + let v = i + let ding = Data_encoding.uint8 + let pp = Crowbar.pp_int - end) -let map_int16 (i: int) : testable = - (module struct + end ) + +let map_int16 (i : int) : testable = + ( module struct type t = int + let v = i + let ding = Data_encoding.int16 + let pp = Crowbar.pp_int - end) -let map_uint16 (i: int) : testable = - (module struct + end ) + +let map_uint16 (i : int) : testable = + ( module struct type t = int + let v = i + let ding = Data_encoding.uint16 + let pp = Crowbar.pp_int - end) -let map_int32 (i: int32) : testable = - (module struct + end ) + +let map_int32 (i : int32) : testable = + ( module struct type t = int32 + let v = i + let ding = Data_encoding.int32 + let pp = Crowbar.pp_int32 - end) -let map_int64 (i: int64) : testable = - (module struct + end ) + +let map_int64 (i : int64) : testable = + ( module struct type t = int64 + let v = i + let ding = Data_encoding.int64 + let pp = Crowbar.pp_int64 - end) + end ) + let map_range_int a b c : testable = let (small, middle, big) = match List.sort compare [a; b; c] with | [small; middle; big] -> - assert (small <= middle); - assert (middle <= big); + assert (small <= middle) ; + assert (middle <= big) ; (small, middle, big) - | _ -> assert false + | _ -> + assert false in - (module struct + ( module struct type t = int + let v = middle + let ding = Data_encoding.ranged_int small big + let pp ppf i = Crowbar.pp ppf "(%d :[%d;%d])" i small big - end) + end ) + let map_range_float a b c : testable = if compare a nan = 0 || compare b nan = 0 || compare c nan = 0 then (* copout *) @@ -171,292 +219,433 @@ let map_range_float a b c : testable = let (small, middle, big) = match List.sort compare [a; b; c] with | [small; middle; big] -> - assert (small <= middle); - assert (middle <= big); + assert (small <= middle) ; + assert (middle <= big) ; (small, middle, big) - | _ -> assert false + | _ -> + assert false in - (module struct + ( module struct type t = float + let v = middle + let ding = Data_encoding.ranged_float small big + let pp ppf i = Crowbar.pp ppf "(%f :[%f;%f])" i small big - end) + end ) + let map_bool b : testable = - (module struct + ( module struct type t = bool + let v = b + let ding = Data_encoding.bool + let pp = Crowbar.pp_bool - end) + end ) + let map_string s : testable = - (module struct + ( module struct type t = string + let v = s + let ding = Data_encoding.string + let pp = Crowbar.pp_string - end) + end ) + let map_bytes s : testable = - (module struct + ( module struct type t = MBytes.t + let v = s + let ding = Data_encoding.bytes + let pp ppf m = if MBytes.length m > 40 then - Crowbar.pp ppf "@[<hv 1>%a … (%d more bytes)@]" - MBytes.pp_hex (MBytes.sub m 1 30) + Crowbar.pp + ppf + "@[<hv 1>%a … (%d more bytes)@]" + MBytes.pp_hex + (MBytes.sub m 1 30) (MBytes.length m) - else - MBytes.pp_hex ppf m - end) + else MBytes.pp_hex ppf m + end ) + let map_float f : testable = - (module struct + ( module struct type t = float + let v = f + let ding = Data_encoding.float + let pp = Crowbar.pp_float - end) + end ) + let map_fixed_string s : testable = - (module struct + ( module struct type t = string + let v = s + let ding = Data_encoding.Fixed.string (String.length s) + let pp ppf s = Crowbar.pp ppf "\"%s\"" s - end) + end ) + let map_fixed_bytes s : testable = - (module struct + ( module struct type t = MBytes.t + let v = s + let ding = Data_encoding.Fixed.bytes (MBytes.length s) + let pp = MBytes.pp_hex - end) + end ) + let map_variable_string s : testable = - (module struct + ( module struct type t = string + let v = s + let ding = Data_encoding.Variable.string + let pp ppf s = Crowbar.pp ppf "\"%s\"" s - end) + end ) + let map_variable_bytes s : testable = - (module struct + ( module struct type t = MBytes.t + let v = s + let ding = Data_encoding.Variable.bytes + let pp = MBytes.pp_hex - end) + end ) (* And now combinators *) let dyn_if_not ding = match Data_encoding.classify ding with - | `Fixed _ | `Dynamic -> ding - | `Variable -> Data_encoding.dynamic_size ding + | `Fixed _ | `Dynamic -> + ding + | `Variable -> + Data_encoding.dynamic_size ding -let map_some (t: testable) : testable = +let map_some (t : testable) : testable = let module T = (val t) in - (module struct + ( module struct type t = T.t option + let v = Some T.v + let ding = - try - Data_encoding.option T.ding - with - | Invalid_argument _ -> - Crowbar.bad_test () + try Data_encoding.option T.ding + with Invalid_argument _ -> Crowbar.bad_test () + let pp ppf o = - Crowbar.pp ppf "@[<hv 1>%a@]" - (fun fmt v -> match v with - | None -> Format.fprintf fmt "None" - | Some v -> Format.fprintf fmt "Some(%a)" T.pp v - ) o - end) -let map_none (t: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>%a@]" + (fun fmt v -> + match v with + | None -> + Format.fprintf fmt "None" + | Some v -> + Format.fprintf fmt "Some(%a)" T.pp v) + o + end ) + +let map_none (t : testable) : testable = let module T = (val t) in - (module struct + ( module struct type t = T.t option + let v = None + let ding = - try - Data_encoding.option T.ding - with - | Invalid_argument _ -> - Crowbar.bad_test () + try Data_encoding.option T.ding + with Invalid_argument _ -> Crowbar.bad_test () + let pp ppf o = - Crowbar.pp ppf "@[<hv 1>%a@]" - (fun fmt v -> match v with - | None -> Format.fprintf fmt "None" - | Some v -> Format.fprintf fmt "Some(%a)" T.pp v - ) o - end) -let map_ok (t_o: testable) (t_e: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>%a@]" + (fun fmt v -> + match v with + | None -> + Format.fprintf fmt "None" + | Some v -> + Format.fprintf fmt "Some(%a)" T.pp v) + o + end ) + +let map_ok (t_o : testable) (t_e : testable) : testable = let module T_O = (val t_o) in let module T_E = (val t_e) in - (module struct + ( module struct type t = (T_O.t, T_E.t) result + let v = Ok T_O.v + let ding = Data_encoding.result T_O.ding T_E.ding + let pp ppf r = - Crowbar.pp ppf "@[<hv 1>%a@]" - (fun fmt r -> match r with - | Ok o -> Format.fprintf fmt "Ok(%a)" T_O.pp o - | Error e -> Format.fprintf fmt "Error(%a)" T_E.pp e - ) r - end) -let map_error (t_o: testable) (t_e: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>%a@]" + (fun fmt r -> + match r with + | Ok o -> + Format.fprintf fmt "Ok(%a)" T_O.pp o + | Error e -> + Format.fprintf fmt "Error(%a)" T_E.pp e) + r + end ) + +let map_error (t_o : testable) (t_e : testable) : testable = let module T_O = (val t_o) in let module T_E = (val t_e) in - (module struct + ( module struct type t = (T_O.t, T_E.t) result + let v = Error T_E.v + let ding = Data_encoding.result T_O.ding T_E.ding + let pp ppf r = - Crowbar.pp ppf "@[<hv 1>%a@]" - (fun fmt r -> match r with - | Ok o -> Format.fprintf fmt "Ok(%a)" T_O.pp o - | Error e -> Format.fprintf fmt "Error(%a)" T_E.pp e - ) r - end) -let map_variable_list (t: testable) (ts: testable list) : testable = + Crowbar.pp + ppf + "@[<hv 1>%a@]" + (fun fmt r -> + match r with + | Ok o -> + Format.fprintf fmt "Ok(%a)" T_O.pp o + | Error e -> + Format.fprintf fmt "Error(%a)" T_E.pp e) + r + end ) + +let map_variable_list (t : testable) (ts : testable list) : testable = let module T = (val t) in - (module struct + ( module struct type t = T.t list + let ding = Data_encoding.Variable.list (dyn_if_not T.ding) + let v = - List.fold_left (fun acc (t: testable) -> + List.fold_left + (fun acc (t : testable) -> let module T = (val t) in (* We can get rid of this Obj when we update Crowbar *) - (Obj.magic T.v) :: acc - ) + Obj.magic T.v :: acc) [] ts + let pp = Crowbar.pp_list T.pp - end) -let map_variable_array (t: testable) (ts: testable array) : testable = + end ) + +let map_variable_array (t : testable) (ts : testable array) : testable = let module T = (val t) in - (module struct + ( module struct type t = T.t array + let ding = Data_encoding.Variable.array (dyn_if_not T.ding) + let v = - Array.of_list ( - Array.fold_left (fun acc (t: testable) -> - let module T = (val t) in - (Obj.magic T.v) :: acc - ) - [] - ts - ) + Array.of_list + (Array.fold_left + (fun acc (t : testable) -> + let module T = (val t) in + Obj.magic T.v :: acc) + [] + ts) + let pp ppf a = if Array.length a > 40 then - Crowbar.pp ppf "@[<hv 1>[|%a … (%d more elements)|]@]" + Crowbar.pp + ppf + "@[<hv 1>[|%a … (%d more elements)|]@]" (Format.pp_print_list ~pp_sep:(fun ppf () -> Format.fprintf ppf ";@ ") T.pp) (Array.to_list (Array.sub a 0 30)) (Array.length a) else - Crowbar.pp ppf "@[<hv 1>[|%a|]@]" + Crowbar.pp + ppf + "@[<hv 1>[|%a|]@]" (Format.pp_print_list ~pp_sep:(fun ppf () -> Format.fprintf ppf ";@ ") T.pp) (Array.to_list a) - end) -let map_dynamic_size (t: testable) : testable = + end ) + +let map_dynamic_size (t : testable) : testable = let module T = (val t) in - (module struct + ( module struct include T + let ding = Data_encoding.dynamic_size T.ding - end) + end ) -let map_tup1 (t1: testable) : testable = +let map_tup1 (t1 : testable) : testable = let module T1 = (val t1) in - (module struct + ( module struct include T1 + let ding = Data_encoding.tup1 T1.ding - let pp ppf (v1) = - Crowbar.pp ppf "@[<hv 1>(%a)@]" - T1.pp v1 - end) -let map_tup2 (t1: testable) (t2: testable) : testable = + + let pp ppf v1 = Crowbar.pp ppf "@[<hv 1>(%a)@]" T1.pp v1 + end ) + +let map_tup2 (t1 : testable) (t2 : testable) : testable = let module T1 = (val t1) in let module T2 = (val t2) in - (module struct + ( module struct type t = T1.t * T2.t + let ding = Data_encoding.tup2 (dyn_if_not T1.ding) T2.ding + let v = (T1.v, T2.v) - let pp ppf (v1, v2) = - Crowbar.pp ppf "@[<hv 1>(%a, %a)@]" - T1.pp v1 - T2.pp v2 - end) -let map_tup3 (t1: testable) (t2: testable) (t3: testable) : testable = + + let pp ppf (v1, v2) = Crowbar.pp ppf "@[<hv 1>(%a, %a)@]" T1.pp v1 T2.pp v2 + end ) + +let map_tup3 (t1 : testable) (t2 : testable) (t3 : testable) : testable = let module T1 = (val t1) in let module T2 = (val t2) in let module T3 = (val t3) in - (module struct + ( module struct type t = T1.t * T2.t * T3.t - let ding = Data_encoding.tup3 (dyn_if_not T1.ding) (dyn_if_not T2.ding) T3.ding + + let ding = + Data_encoding.tup3 (dyn_if_not T1.ding) (dyn_if_not T2.ding) T3.ding + let v = (T1.v, T2.v, T3.v) + let pp ppf (v1, v2, v3) = - Crowbar.pp ppf "@[<hv 1>(%a, %a, %a)@]" - T1.pp v1 - T2.pp v2 - T3.pp v3 - end) -let map_tup4 (t1: testable) (t2: testable) (t3: testable) (t4: testable) : testable = + Crowbar.pp ppf "@[<hv 1>(%a, %a, %a)@]" T1.pp v1 T2.pp v2 T3.pp v3 + end ) + +let map_tup4 (t1 : testable) (t2 : testable) (t3 : testable) (t4 : testable) : + testable = let module T1 = (val t1) in let module T2 = (val t2) in let module T3 = (val t3) in let module T4 = (val t4) in - (module struct + ( module struct type t = T1.t * T2.t * T3.t * T4.t - let ding = Data_encoding.tup4 (dyn_if_not T1.ding) (dyn_if_not T2.ding) (dyn_if_not T3.ding) T4.ding + + let ding = + Data_encoding.tup4 + (dyn_if_not T1.ding) + (dyn_if_not T2.ding) + (dyn_if_not T3.ding) + T4.ding + let v = (T1.v, T2.v, T3.v, T4.v) + let pp ppf (v1, v2, v3, v4) = - Crowbar.pp ppf "@[<hv 1>(%a, %a, %a, %a)@]" - T1.pp v1 - T2.pp v2 - T3.pp v3 - T4.pp v4 - end) -let map_tup5 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>(%a, %a, %a, %a)@]" + T1.pp + v1 + T2.pp + v2 + T3.pp + v3 + T4.pp + v4 + end ) + +let map_tup5 (t1 : testable) (t2 : testable) (t3 : testable) (t4 : testable) + (t5 : testable) : testable = let module T1 = (val t1) in let module T2 = (val t2) in let module T3 = (val t3) in let module T4 = (val t4) in let module T5 = (val t5) in - (module struct + ( module struct type t = T1.t * T2.t * T3.t * T4.t * T5.t - let ding = Data_encoding.tup5 (dyn_if_not T1.ding) (dyn_if_not T2.ding) (dyn_if_not T3.ding) (dyn_if_not T4.ding) T5.ding + + let ding = + Data_encoding.tup5 + (dyn_if_not T1.ding) + (dyn_if_not T2.ding) + (dyn_if_not T3.ding) + (dyn_if_not T4.ding) + T5.ding + let v = (T1.v, T2.v, T3.v, T4.v, T5.v) + let pp ppf (v1, v2, v3, v4, v5) = - Crowbar.pp ppf "@[<hv 1>(%a, %a, %a, %a, %a)@]" - T1.pp v1 - T2.pp v2 - T3.pp v3 - T4.pp v4 - T5.pp v5 - end) -let map_tup6 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: testable) (t6: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>(%a, %a, %a, %a, %a)@]" + T1.pp + v1 + T2.pp + v2 + T3.pp + v3 + T4.pp + v4 + T5.pp + v5 + end ) + +let map_tup6 (t1 : testable) (t2 : testable) (t3 : testable) (t4 : testable) + (t5 : testable) (t6 : testable) : testable = let module T1 = (val t1) in let module T2 = (val t2) in let module T3 = (val t3) in let module T4 = (val t4) in let module T5 = (val t5) in let module T6 = (val t6) in - (module struct + ( module struct type t = T1.t * T2.t * T3.t * T4.t * T5.t * T6.t - let ding = Data_encoding.tup6 (dyn_if_not T1.ding) (dyn_if_not T2.ding) (dyn_if_not T3.ding) (dyn_if_not T4.ding) (dyn_if_not T5.ding) T6.ding + + let ding = + Data_encoding.tup6 + (dyn_if_not T1.ding) + (dyn_if_not T2.ding) + (dyn_if_not T3.ding) + (dyn_if_not T4.ding) + (dyn_if_not T5.ding) + T6.ding + let v = (T1.v, T2.v, T3.v, T4.v, T5.v, T6.v) + let pp ppf (v1, v2, v3, v4, v5, v6) = - Crowbar.pp ppf "@[<hv 1>(%a, %a, %a, %a, %a, %a)@]" - T1.pp v1 - T2.pp v2 - T3.pp v3 - T4.pp v4 - T5.pp v5 - T6.pp v6 - end) -let map_tup7 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: testable) (t6: testable) (t7: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>(%a, %a, %a, %a, %a, %a)@]" + T1.pp + v1 + T2.pp + v2 + T3.pp + v3 + T4.pp + v4 + T5.pp + v5 + T6.pp + v6 + end ) + +let map_tup7 (t1 : testable) (t2 : testable) (t3 : testable) (t4 : testable) + (t5 : testable) (t6 : testable) (t7 : testable) : testable = let module T1 = (val t1) in let module T2 = (val t2) in let module T3 = (val t3) in @@ -464,21 +653,44 @@ let map_tup7 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: te let module T5 = (val t5) in let module T6 = (val t6) in let module T7 = (val t7) in - (module struct + ( module struct type t = T1.t * T2.t * T3.t * T4.t * T5.t * T6.t * T7.t - let ding = Data_encoding.tup7 (dyn_if_not T1.ding) (dyn_if_not T2.ding) (dyn_if_not T3.ding) (dyn_if_not T4.ding) (dyn_if_not T5.ding) (dyn_if_not T6.ding) T7.ding + + let ding = + Data_encoding.tup7 + (dyn_if_not T1.ding) + (dyn_if_not T2.ding) + (dyn_if_not T3.ding) + (dyn_if_not T4.ding) + (dyn_if_not T5.ding) + (dyn_if_not T6.ding) + T7.ding + let v = (T1.v, T2.v, T3.v, T4.v, T5.v, T6.v, T7.v) + let pp ppf (v1, v2, v3, v4, v5, v6, v7) = - Crowbar.pp ppf "@[<hv 1>(%a, %a, %a, %a, %a, %a, %a)@]" - T1.pp v1 - T2.pp v2 - T3.pp v3 - T4.pp v4 - T5.pp v5 - T6.pp v6 - T7.pp v7 - end) -let map_tup8 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: testable) (t6: testable) (t7: testable) (t8: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>(%a, %a, %a, %a, %a, %a, %a)@]" + T1.pp + v1 + T2.pp + v2 + T3.pp + v3 + T4.pp + v4 + T5.pp + v5 + T6.pp + v6 + T7.pp + v7 + end ) + +let map_tup8 (t1 : testable) (t2 : testable) (t3 : testable) (t4 : testable) + (t5 : testable) (t6 : testable) (t7 : testable) (t8 : testable) : testable + = let module T1 = (val t1) in let module T2 = (val t2) in let module T3 = (val t3) in @@ -487,22 +699,47 @@ let map_tup8 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: te let module T6 = (val t6) in let module T7 = (val t7) in let module T8 = (val t8) in - (module struct + ( module struct type t = T1.t * T2.t * T3.t * T4.t * T5.t * T6.t * T7.t * T8.t - let ding = Data_encoding.tup8 (dyn_if_not T1.ding) (dyn_if_not T2.ding) (dyn_if_not T3.ding) (dyn_if_not T4.ding) (dyn_if_not T5.ding) (dyn_if_not T6.ding) (dyn_if_not T7.ding) T8.ding + + let ding = + Data_encoding.tup8 + (dyn_if_not T1.ding) + (dyn_if_not T2.ding) + (dyn_if_not T3.ding) + (dyn_if_not T4.ding) + (dyn_if_not T5.ding) + (dyn_if_not T6.ding) + (dyn_if_not T7.ding) + T8.ding + let v = (T1.v, T2.v, T3.v, T4.v, T5.v, T6.v, T7.v, T8.v) + let pp ppf (v1, v2, v3, v4, v5, v6, v7, v8) = - Crowbar.pp ppf "@[<hv 1>(%a, %a, %a, %a, %a, %a, %a, %a)@]" - T1.pp v1 - T2.pp v2 - T3.pp v3 - T4.pp v4 - T5.pp v5 - T6.pp v6 - T7.pp v7 - T8.pp v8 - end) -let map_tup9 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: testable) (t6: testable) (t7: testable) (t8: testable) (t9: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>(%a, %a, %a, %a, %a, %a, %a, %a)@]" + T1.pp + v1 + T2.pp + v2 + T3.pp + v3 + T4.pp + v4 + T5.pp + v5 + T6.pp + v6 + T7.pp + v7 + T8.pp + v8 + end ) + +let map_tup9 (t1 : testable) (t2 : testable) (t3 : testable) (t4 : testable) + (t5 : testable) (t6 : testable) (t7 : testable) (t8 : testable) + (t9 : testable) : testable = let module T1 = (val t1) in let module T2 = (val t2) in let module T3 = (val t3) in @@ -512,23 +749,50 @@ let map_tup9 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: te let module T7 = (val t7) in let module T8 = (val t8) in let module T9 = (val t9) in - (module struct + ( module struct type t = T1.t * T2.t * T3.t * T4.t * T5.t * T6.t * T7.t * T8.t * T9.t - let ding = Data_encoding.tup9 (dyn_if_not T1.ding) (dyn_if_not T2.ding) (dyn_if_not T3.ding) (dyn_if_not T4.ding) (dyn_if_not T5.ding) (dyn_if_not T6.ding) (dyn_if_not T7.ding) (dyn_if_not T8.ding) T9.ding + + let ding = + Data_encoding.tup9 + (dyn_if_not T1.ding) + (dyn_if_not T2.ding) + (dyn_if_not T3.ding) + (dyn_if_not T4.ding) + (dyn_if_not T5.ding) + (dyn_if_not T6.ding) + (dyn_if_not T7.ding) + (dyn_if_not T8.ding) + T9.ding + let v = (T1.v, T2.v, T3.v, T4.v, T5.v, T6.v, T7.v, T8.v, T9.v) + let pp ppf (v1, v2, v3, v4, v5, v6, v7, v8, v9) = - Crowbar.pp ppf "@[<hv 1>(%a, %a, %a, %a, %a, %a, %a, %a, %a)@]" - T1.pp v1 - T2.pp v2 - T3.pp v3 - T4.pp v4 - T5.pp v5 - T6.pp v6 - T7.pp v7 - T8.pp v8 - T9.pp v9 - end) -let map_tup10 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: testable) (t6: testable) (t7: testable) (t8: testable) (t9: testable) (t10: testable) : testable = + Crowbar.pp + ppf + "@[<hv 1>(%a, %a, %a, %a, %a, %a, %a, %a, %a)@]" + T1.pp + v1 + T2.pp + v2 + T3.pp + v3 + T4.pp + v4 + T5.pp + v5 + T6.pp + v6 + T7.pp + v7 + T8.pp + v8 + T9.pp + v9 + end ) + +let map_tup10 (t1 : testable) (t2 : testable) (t3 : testable) (t4 : testable) + (t5 : testable) (t6 : testable) (t7 : testable) (t8 : testable) + (t9 : testable) (t10 : testable) : testable = let module T1 = (val t1) in let module T2 = (val t2) in let module T3 = (val t3) in @@ -539,104 +803,130 @@ let map_tup10 (t1: testable) (t2: testable) (t3: testable) (t4: testable) (t5: t let module T8 = (val t8) in let module T9 = (val t9) in let module T10 = (val t10) in - (module struct - type t = T1.t * T2.t * T3.t * T4.t * T5.t * T6.t * T7.t * T8.t * T9.t * T10.t - let ding = Data_encoding.tup10 (dyn_if_not T1.ding) (dyn_if_not T2.ding) (dyn_if_not T3.ding) (dyn_if_not T4.ding) (dyn_if_not T5.ding) (dyn_if_not T6.ding) (dyn_if_not T7.ding) (dyn_if_not T8.ding) (dyn_if_not T9.ding) T10.ding + ( module struct + type t = + T1.t * T2.t * T3.t * T4.t * T5.t * T6.t * T7.t * T8.t * T9.t * T10.t + + let ding = + Data_encoding.tup10 + (dyn_if_not T1.ding) + (dyn_if_not T2.ding) + (dyn_if_not T3.ding) + (dyn_if_not T4.ding) + (dyn_if_not T5.ding) + (dyn_if_not T6.ding) + (dyn_if_not T7.ding) + (dyn_if_not T8.ding) + (dyn_if_not T9.ding) + T10.ding + let v = (T1.v, T2.v, T3.v, T4.v, T5.v, T6.v, T7.v, T8.v, T9.v, T10.v) + let pp ppf (v1, v2, v3, v4, v5, v6, v7, v8, v9, v10) = - Crowbar.pp ppf "@[<hv 1>(%a, %a, %a, %a, %a, %a, %a, %a, %a, %a)@]" - T1.pp v1 - T2.pp v2 - T3.pp v3 - T4.pp v4 - T5.pp v5 - T6.pp v6 - T7.pp v7 - T8.pp v8 - T9.pp v9 - T10.pp v10 - end) - -let map_merge_tups (t1: testable) (t2: testable): testable = + Crowbar.pp + ppf + "@[<hv 1>(%a, %a, %a, %a, %a, %a, %a, %a, %a, %a)@]" + T1.pp + v1 + T2.pp + v2 + T3.pp + v3 + T4.pp + v4 + T5.pp + v5 + T6.pp + v6 + T7.pp + v7 + T8.pp + v8 + T9.pp + v9 + T10.pp + v10 + end ) + +let map_merge_tups (t1 : testable) (t2 : testable) : testable = let module T1 = (val t1) in let module T2 = (val t2) in - (module struct + ( module struct type t = T1.t * T2.t - let ding = Data_encoding.merge_tups (dyn_if_not T1.ding) (dyn_if_not T2.ding) + + let ding = + Data_encoding.merge_tups (dyn_if_not T1.ding) (dyn_if_not T2.ding) + let v = (T1.v, T2.v) - let pp ppf (v1, v2) = - Crowbar.pp ppf "@[<hv 1>(%a, %a)@]" - T1.pp v1 - T2.pp v2 - end) + let pp ppf (v1, v2) = Crowbar.pp ppf "@[<hv 1>(%a, %a)@]" T1.pp v1 T2.pp v2 + end ) -let testable_printer: testable Crowbar.printer = fun ppf (t: testable) -> +let testable_printer : testable Crowbar.printer = + fun ppf (t : testable) -> let module T = (val t) in T.pp ppf T.v - (* helpers to construct values tester values *) - (* Generator for testable values *) -let tup_gen (tgen: testable Crowbar.gen): testable Crowbar.gen = +let tup_gen (tgen : testable Crowbar.gen) : testable Crowbar.gen = let open Crowbar in (* Stack overflow if there are more levels *) - with_printer testable_printer @@ - choose [ - map [tgen] map_tup1; - map [tgen; tgen] map_tup2; - map [tgen; tgen; tgen] map_tup3; - map [tgen; tgen; tgen; tgen] map_tup4; - map [tgen; tgen; tgen; tgen; tgen] map_tup5; - map [tgen; tgen; tgen; tgen; tgen; tgen] map_tup6; - ] + with_printer testable_printer + @@ choose + [ map [tgen] map_tup1; + map [tgen; tgen] map_tup2; + map [tgen; tgen; tgen] map_tup3; + map [tgen; tgen; tgen; tgen] map_tup4; + map [tgen; tgen; tgen; tgen; tgen] map_tup5; + map [tgen; tgen; tgen; tgen; tgen; tgen] map_tup6 ] + let gen = let open Crowbar in - let g: testable Crowbar.gen = fix (fun g -> - choose [ - const null; - const empty; - const unit; - map [short_string] map_constant; - map [int8] map_int8; - map [uint8] map_uint8; - (* TODO: use newer version of crowbar to get these generators + let g : testable Crowbar.gen = + fix (fun g -> + choose + [ const null; + const empty; + const unit; + map [short_string] map_constant; + map [int8] map_int8; + map [uint8] map_uint8; + (* TODO: use newer version of crowbar to get these generators map [int16] map_int16; map [uint16] map_uint16; *) - map [int32] map_int32; - map [int64] map_int64; - (* NOTE: the int encoding require ranges to be 30-bit compatible *) - map [int8; int8; int8] map_range_int; - map [float; float; float] map_range_float; - map [bool] map_bool; - map [short_string] map_string; - map [short_mbytes] map_bytes; - map [float] map_float; - map [short_string1] map_fixed_string; - map [short_mbytes1] map_fixed_bytes; - map [short_string] map_variable_string; - map [short_mbytes] map_variable_bytes; - - map [g] map_some; - map [g] map_none; - - map [g] map_dynamic_size; - - map [g] map_tup1; - map [g; g] map_tup2; - map [g; g; g] map_tup3; - map [g; g; g; g] map_tup4; - map [g; g; g; g; g] map_tup5; - map [g; g; g; g; g; g] map_tup6; - map [g; g] (fun t1 t2 -> map_merge_tups (map_tup1 t1) (map_tup1 t2)); - map [g; g; g] (fun t1 t2 t3 -> map_merge_tups (map_tup2 t1 t2) (map_tup1 t3)); - map [g; g; g] (fun t1 t2 t3 -> map_merge_tups (map_tup1 t1) (map_tup2 t2 t3)); - - (* NOTE: we cannot use lists/arrays for now. They require the + map [int32] map_int32; + map [int64] map_int64; + (* NOTE: the int encoding require ranges to be 30-bit compatible *) + map [int8; int8; int8] map_range_int; + map [float; float; float] map_range_float; + map [bool] map_bool; + map [short_string] map_string; + map [short_mbytes] map_bytes; + map [float] map_float; + map [short_string1] map_fixed_string; + map [short_mbytes1] map_fixed_bytes; + map [short_string] map_variable_string; + map [short_mbytes] map_variable_bytes; + map [g] map_some; + map [g] map_none; + map [g] map_dynamic_size; + map [g] map_tup1; + map [g; g] map_tup2; + map [g; g; g] map_tup3; + map [g; g; g; g] map_tup4; + map [g; g; g; g; g] map_tup5; + map [g; g; g; g; g; g] map_tup6; + map [g; g] (fun t1 t2 -> + map_merge_tups (map_tup1 t1) (map_tup1 t2)); + map [g; g; g] (fun t1 t2 t3 -> + map_merge_tups (map_tup2 t1 t2) (map_tup1 t3)); + map [g; g; g] (fun t1 t2 t3 -> + map_merge_tups (map_tup1 t1) (map_tup2 t2 t3)) + (* NOTE: we cannot use lists/arrays for now. They require the data-inside to be homogeneous (e.g., same rangedness of ranged numbers) which we cannot guarantee right now. This can be fixed once we update Crowbar and get access to the new `dynamic_bind` generator @@ -645,7 +935,7 @@ let gen = map [g; list g] map_variable_list; map [g; list g] (fun t ts -> map_variable_array t (Array.of_list ts)); *) - ]) + ]) in with_printer testable_printer g @@ -688,59 +978,55 @@ let gen = *) - (* Basic functions for executing tests on a given input *) let roundtrip_json pp ding v = let json = - try - Data_encoding.Json.construct ding v - with - Invalid_argument m -> - Crowbar.fail (Format.asprintf "Cannot construct: %a (%s)" pp v m) + try Data_encoding.Json.construct ding v + with Invalid_argument m -> + Crowbar.fail (Format.asprintf "Cannot construct: %a (%s)" pp v m) in let vv = - try - Data_encoding.Json.destruct ding json - with - Data_encoding.Json.Cannot_destruct (_, _) -> - Crowbar.fail "Cannot destruct" + try Data_encoding.Json.destruct ding json + with Data_encoding.Json.Cannot_destruct (_, _) -> + Crowbar.fail "Cannot destruct" in Crowbar.check_eq ~pp v vv let roundtrip_binary pp ding v = let bin = - try - Data_encoding.Binary.to_bytes_exn ding v - with - | Data_encoding.Binary.Write_error we -> - Format.kasprintf Crowbar.fail - "Cannot construct: %a (%a)" - pp v - Data_encoding.Binary.pp_write_error we + try Data_encoding.Binary.to_bytes_exn ding v + with Data_encoding.Binary.Write_error we -> + Format.kasprintf + Crowbar.fail + "Cannot construct: %a (%a)" + pp + v + Data_encoding.Binary.pp_write_error + we in let vv = - try - Data_encoding.Binary.of_bytes_exn ding bin - with - | Data_encoding.Binary.Read_error re -> - Format.kasprintf Crowbar.fail - "Cannot destruct: %a (%a)" - pp v - Data_encoding.Binary.pp_read_error re + try Data_encoding.Binary.of_bytes_exn ding bin + with Data_encoding.Binary.Read_error re -> + Format.kasprintf + Crowbar.fail + "Cannot destruct: %a (%a)" + pp + v + Data_encoding.Binary.pp_read_error + re in Crowbar.check_eq ~pp v vv - - (* Setting up the actual tests *) -let test_testable_json (testable: testable) = +let test_testable_json (testable : testable) = let module T = (val testable) in roundtrip_json T.pp T.ding T.v -let test_testable_binary (testable: testable) = + +let test_testable_binary (testable : testable) = let module T = (val testable) in roundtrip_binary T.pp T.ding T.v + let () = - Crowbar.add_test ~name:("binary roundtrips") [gen] test_testable_binary; - Crowbar.add_test ~name:("json roundtrips") [gen] test_testable_json; + Crowbar.add_test ~name:"binary roundtrips" [gen] test_testable_binary ; + Crowbar.add_test ~name:"json roundtrips" [gen] test_testable_json ; () - diff --git a/src/lib_data_encoding/test/types.ml b/src/lib_data_encoding/test/types.ml index 03af661b658cbd6b3a1706a2fc9cf0cf263e5b7e..5c4cfd85d59ee8337d8c3a33352e6b36df8a8034 100644 --- a/src/lib_data_encoding/test/types.ml +++ b/src/lib_data_encoding/test/types.ml @@ -25,202 +25,178 @@ open Data_encoding -type record = { - a : int ; - b : bool ; - c : Z.t option ; - d : float ; -} +type record = {a : int; b : bool; c : Z.t option; d : float} -let default_record = { a = 32 ; b = true ; c = Some Z.one ; d = 12.34 } +let default_record = {a = 32; b = true; c = Some Z.one; d = 12.34} let record_obj_enc = conv - (fun { a ; b ; c ; d } -> ((a, b), (c, d))) - (fun ((a, b), (c, d)) -> { a ; b ; c ; d }) + (fun {a; b; c; d} -> ((a, b), (c, d))) + (fun ((a, b), (c, d)) -> {a; b; c; d}) (merge_objs - (obj2 - (req "a" int31) - (dft "b" bool false)) - (obj2 - (opt "c" z) - (req "d" float))) + (obj2 (req "a" int31) (dft "b" bool false)) + (obj2 (opt "c" z) (req "d" float))) let record_tup_enc = conv - (fun { a ; b ; c ; d } -> ((a, b, c), d)) - (fun ((a, b, c), d) -> { a ; b ; c ; d }) - (merge_tups - (tup3 int31 bool (option z)) - (tup1 float)) - -let record_to_string { a ; b ; c ; d } = - let c = - match c with - | None -> "none" - | Some c -> Z.to_string c in + (fun {a; b; c; d} -> ((a, b, c), d)) + (fun ((a, b, c), d) -> {a; b; c; d}) + (merge_tups (tup3 int31 bool (option z)) (tup1 float)) + +let record_to_string {a; b; c; d} = + let c = match c with None -> "none" | Some c -> Z.to_string c in Format.asprintf "(%d, %B, %s, %f)" a b c d -type variable_record = { - p : int ; - q : MBytes.t ; -} +type variable_record = {p : int; q : MBytes.t} -let default_variable_record = { p = 23 ; q = MBytes.of_string "wwwxxyyzzz" } +let default_variable_record = {p = 23; q = MBytes.of_string "wwwxxyyzzz"} let variable_record_obj_enc = conv - (fun { p ; q } -> (p, q)) - (fun (p, q) -> { p ; q }) - (obj2 - (req "p" int31) - (req "q" Variable.bytes)) + (fun {p; q} -> (p, q)) + (fun (p, q) -> {p; q}) + (obj2 (req "p" int31) (req "q" Variable.bytes)) let variable_record_tup_enc = conv - (fun { p ; q } -> (p, q)) - (fun (p, q) -> { p ; q }) + (fun {p; q} -> (p, q)) + (fun (p, q) -> {p; q}) (tup2 int31 Variable.bytes) -let variable_record_to_string { p ; q } = +let variable_record_to_string {p; q} = Format.asprintf "(%d, %a)" p MBytes.pp_hex q -type variable_left_record = { - x : int ; - y : MBytes.t ; - z : int ; -} +type variable_left_record = {x : int; y : MBytes.t; z : int} let default_variable_left_record = - { x = 98 ; y = MBytes.of_string "765" ; z = 4321 } + {x = 98; y = MBytes.of_string "765"; z = 4321} let variable_left_record_obj_enc = conv - (fun { x ; y ; z } -> (x, y, z)) - (fun (x, y, z) -> { x ; y ; z }) - (obj3 - (req "x" int31) - (req "y" Variable.bytes) - (req "z" int31)) + (fun {x; y; z} -> (x, y, z)) + (fun (x, y, z) -> {x; y; z}) + (obj3 (req "x" int31) (req "y" Variable.bytes) (req "z" int31)) let variable_left_record_tup_enc = conv - (fun { x ; y ; z } -> (x, y, z)) - (fun (x, y, z) -> { x ; y ; z }) + (fun {x; y; z} -> (x, y, z)) + (fun (x, y, z) -> {x; y; z}) (tup3 int31 Variable.bytes int31) -let variable_left_record_to_string { x ; y ; z } = +let variable_left_record_to_string {x; y; z} = Format.asprintf "(%d, %a, %d)" x MBytes.pp_hex y z type union = A of int | B of string | C of int | D of string | E let union_enc = - union [ - case (Tag 1) - ~title:"A" - int8 - (function A i -> Some i | _ -> None) - (fun i -> A i) ; - case (Tag 2) - ~title:"B" - string - (function B s -> Some s | _ -> None) - (fun s -> B s) ; - case (Tag 3) - ~title:"C" - (obj1 (req "C" int8)) - (function C i -> Some i | _ -> None) - (fun i -> C i) ; - case (Tag 4) - ~title:"D" - (obj2 - (req "kind" (constant "D")) - (req "data" (string))) - (function D s -> Some ((), s) | _ -> None) - (fun ((), s) -> D s) ; - case (Tag 5) - ~title:"E" - empty - (function E -> Some () | _ -> None) - (fun () -> E) ; - ] + union + [ case + (Tag 1) + ~title:"A" + int8 + (function A i -> Some i | _ -> None) + (fun i -> A i); + case + (Tag 2) + ~title:"B" + string + (function B s -> Some s | _ -> None) + (fun s -> B s); + case + (Tag 3) + ~title:"C" + (obj1 (req "C" int8)) + (function C i -> Some i | _ -> None) + (fun i -> C i); + case + (Tag 4) + ~title:"D" + (obj2 (req "kind" (constant "D")) (req "data" string)) + (function D s -> Some ((), s) | _ -> None) + (fun ((), s) -> D s); + case + (Tag 5) + ~title:"E" + empty + (function E -> Some () | _ -> None) + (fun () -> E) ] let mini_union_enc = - union [ - case (Tag 1) - ~title:"A" - int8 - (function A i -> Some i | _ -> None) - (fun i -> A i) ; - ] + union + [ case + (Tag 1) + ~title:"A" + int8 + (function A i -> Some i | _ -> None) + (fun i -> A i) ] let union_to_string = function - | A i -> Printf.sprintf "A %d" i - | B s -> Printf.sprintf "B %s" s - | C i -> Printf.sprintf "C %d" i - | D s -> Printf.sprintf "D %s" s - | E -> "E" + | A i -> + Printf.sprintf "A %d" i + | B s -> + Printf.sprintf "B %s" s + | C i -> + Printf.sprintf "C %d" i + | D s -> + Printf.sprintf "D %s" s + | E -> + "E" let enum_enc = string_enum - [ "one", 1 ; "two", 2 ; "three", 3 ; "four", 4 ; "five", 5 ; "six", 6 ] + [("one", 1); ("two", 2); ("three", 3); ("four", 4); ("five", 5); ("six", 6)] -let mini_enum_enc = - string_enum - [ "one", 1 ; "two", 2 ] +let mini_enum_enc = string_enum [("one", 1); ("two", 2)] let mu_list_enc enc = - mu "list" @@ fun mu_list_enc -> - union [ - case (Tag 0) - ~title:"Nil" - empty - (function [] -> Some () | _ :: _ -> None) - (fun () -> []) ; - case (Tag 1) - ~title:"Cons" - (obj2 - (req "value" enc) - (req "next" mu_list_enc)) - (function x :: xs -> Some (x, xs) | [] -> None) - (fun (x, xs) -> x :: xs) ; - ] + mu "list" + @@ fun mu_list_enc -> + union + [ case + (Tag 0) + ~title:"Nil" + empty + (function [] -> Some () | _ :: _ -> None) + (fun () -> []); + case + (Tag 1) + ~title:"Cons" + (obj2 (req "value" enc) (req "next" mu_list_enc)) + (function x :: xs -> Some (x, xs) | [] -> None) + (fun (x, xs) -> x :: xs) ] let bounded_list ~total ~elements enc = check_size total (Variable.list (check_size elements enc)) module Alcotest = struct include Alcotest + let float = - testable - Fmt.float - (fun f1 f2 -> - match classify_float f1, classify_float f2 with - | FP_nan, FP_nan -> true - | _ -> f1 = f2) + testable Fmt.float (fun f1 f2 -> + match (classify_float f1, classify_float f2) with + | (FP_nan, FP_nan) -> + true + | _ -> + f1 = f2) + let bytes = testable - (Fmt.of_to_string (fun s -> let `Hex s = MBytes.to_hex s in s)) + (Fmt.of_to_string (fun s -> + let (`Hex s) = MBytes.to_hex s in + s)) MBytes.equal - let z = - testable - (Fmt.of_to_string Z.to_string) - Z.equal + + let z = testable (Fmt.of_to_string Z.to_string) Z.equal + let n = z - let record = - testable - (Fmt.of_to_string record_to_string) - (=) + + let record = testable (Fmt.of_to_string record_to_string) ( = ) + let variable_record = - testable - (Fmt.of_to_string variable_record_to_string) - (=) + testable (Fmt.of_to_string variable_record_to_string) ( = ) + let variable_left_record = - testable - (Fmt.of_to_string variable_left_record_to_string) - (=) - let union = - testable - (Fmt.of_to_string union_to_string) - (=) + testable (Fmt.of_to_string variable_left_record_to_string) ( = ) + + let union = testable (Fmt.of_to_string union_to_string) ( = ) end diff --git a/src/lib_data_encoding/test/versioned.ml b/src/lib_data_encoding/test/versioned.ml index 7317335f0607af01b5a61587b36c4c0001e5a18d..9428a45d0c1dba0077c64592434a85cdef6f350b 100644 --- a/src/lib_data_encoding/test/versioned.ml +++ b/src/lib_data_encoding/test/versioned.ml @@ -38,7 +38,9 @@ module Documented_example = struct simplified {!Internal_event.EVENT_DEFINITION}): *) module type INTENDED_SIGNATURE = sig type t + val encoding : t Data_encoding.t + val pp : Format.formatter -> t -> unit end @@ -49,14 +51,14 @@ module Documented_example = struct (** The first version has a [(string * string) list] field. *) module V0 = struct - type t = { message : string ; attachment : (string * string) list } + type t = {message : string; attachment : (string * string) list} (** This is the "naked" (i.e. non-versioned) encoding of version-0: *) let encoding = let open Data_encoding in conv - (fun { message ; attachment } -> (message, attachment)) - (fun (message, attachment) -> { message ; attachment }) + (fun {message; attachment} -> (message, attachment)) + (fun (message, attachment) -> {message; attachment}) (obj2 (req "message" string) (req "attach" (list (tup2 string string)))) end @@ -68,13 +70,15 @@ module Documented_example = struct let encoding = Data_encoding.With_version.(encoding ~name (first_version V0.encoding)) - let pp ppf { message ; attachment } = + let pp ppf {message; attachment} = let open Format in fprintf ppf "%s:@ %s@ [" name message ; pp_open_box ppf 2 ; - pp_print_list ~pp_sep:(fun fmt () -> fprintf fmt ";@ ") + pp_print_list + ~pp_sep:(fun fmt () -> fprintf fmt ";@ ") (fun fmt (k, v) -> fprintf fmt "%s: %S" k v) - ppf attachment ; + ppf + attachment ; pp_close_box ppf () ; fprintf ppf "]" ; () @@ -83,23 +87,22 @@ module Documented_example = struct (** In a later version we want the attachment to be any piece of Json and not just a key-value list: *) module V1 = struct - (** Version 1 is very similar to {!Internal_event.Debug_event}: *) - type t = { message : string ; attachment : Data_encoding.Json.t } + type t = {message : string; attachment : Data_encoding.Json.t} - let make ?(attach = `Null) message () = { message ; attachment = attach } + let make ?(attach = `Null) message () = {message; attachment = attach} (** Note the "upgrade" function which can make a {!V1.t} from a {!V0.t}: *) - let of_v0 { V0.message ; attachment } = - { message ; + let of_v0 {V0.message; attachment} = + { message; attachment = `O (List.map (fun (k, v) -> (k, `String v)) attachment) } (** Again we build first a version-free encoding: *) let encoding = let open Data_encoding in conv - (fun { message ; attachment } -> (message, attachment)) - (fun (message, attachment) -> { message ; attachment }) + (fun {message; attachment} -> (message, attachment)) + (fun (message, attachment) -> {message; attachment}) (obj2 (req "message" string) (req "attachment" json)) end @@ -113,66 +116,84 @@ module Documented_example = struct provides {!V1.of_v0} as an upgrade function. *) let encoding = Data_encoding.With_version.( - encoding ~name ( - first_version V0.encoding - |> next_version V1.encoding V1.of_v0)) + encoding + ~name + (first_version V0.encoding |> next_version V1.encoding V1.of_v0)) - let pp ppf { message ; attachment } = + let pp ppf {message; attachment} = let open Format in fprintf ppf "%s:@ %s@ %a" name message Data_encoding.Json.pp attachment end - (** This test "serializes" successively using {!First_version.encoding} and {!Second_version.encoding}, and then shows that the former's output can be parsed with the later. *) let actual_test () = let v0_thing : First_version.t = - { V0. message = "The v0 message" ; - attachment = [ "k1", "v1" ; "k2", "v2" ] } in + {V0.message = "The v0 message"; attachment = [("k1", "v1"); ("k2", "v2")]} + in let json_v0 = - Data_encoding.Json.construct First_version.encoding v0_thing in + Data_encoding.Json.construct First_version.encoding v0_thing + in let expected_json_v0 = - `O [name ^ ".v0", (* -> here we see how the [~name] is used. *) - `O [ - "message", `String v0_thing.V0.message ; - "attach", `A (List.map - (fun (k, v) -> `A [ `String k ; `String v ]) - v0_thing.V0.attachment) ] ] in - begin if json_v0 <> expected_json_v0 then - Alcotest.failf "Json-v0: %a@ Vs@ %a" - Data_encoding.Json.pp json_v0 Data_encoding.Json.pp expected_json_v0 - end; + `O + [ ( name ^ ".v0", + (* -> here we see how the [~name] is used. *) + `O + [ ("message", `String v0_thing.V0.message); + ( "attach", + `A + (List.map + (fun (k, v) -> `A [`String k; `String v]) + v0_thing.V0.attachment) ) ] ) ] + in + if json_v0 <> expected_json_v0 then + Alcotest.failf + "Json-v0: %a@ Vs@ %a" + Data_encoding.Json.pp + json_v0 + Data_encoding.Json.pp + expected_json_v0 ; (* Up to here we only used the {!First_version} module. Now the same process with {!Second_version}: *) let v1_thing : Second_version.t = - {V1. message = "The v1 message" ; - attachment = `O [ "k1" , `String "v1" ; "kn" , `Float 42. ] } in + { V1.message = "The v1 message"; + attachment = `O [("k1", `String "v1"); ("kn", `Float 42.)] } + in let json_v1 = - Data_encoding.Json.construct Second_version.encoding v1_thing in + Data_encoding.Json.construct Second_version.encoding v1_thing + in let expected_json_v1 = - `O [name ^ ".v1", - `O [ - "message", `String v1_thing.V1.message ; - "attachment", v1_thing.V1.attachment ] ] in - begin if json_v1 <> expected_json_v1 then - Alcotest.failf "Json-v1: %a@ Vs@ %a" - Data_encoding.Json.pp json_v1 Data_encoding.Json.pp expected_json_v1 - end; + `O + [ ( name ^ ".v1", + `O + [ ("message", `String v1_thing.V1.message); + ("attachment", v1_thing.V1.attachment) ] ) ] + in + if json_v1 <> expected_json_v1 then + Alcotest.failf + "Json-v1: %a@ Vs@ %a" + Data_encoding.Json.pp + json_v1 + Data_encoding.Json.pp + expected_json_v1 ; (* Now the {b interesting part}, we decode ("destruct") the JSON from {!First_version} with {!Second_version}: *) let v0_decoded_later : Second_version.t = - Data_encoding.Json.destruct Second_version.encoding json_v0 in + Data_encoding.Json.destruct Second_version.encoding json_v0 + in (* And we check that going through JSON is equivalent to just calling the upgrade function directly on the {!First_version.t} value: *) let expected_v1 = V1.of_v0 v0_thing in - begin if v0_decoded_later <> expected_v1 then - Alcotest.failf "Parsing v0 with v1: %a@ Vs@ %a" - Second_version.pp v0_decoded_later Second_version.pp expected_v1 - end; + if v0_decoded_later <> expected_v1 then + Alcotest.failf + "Parsing v0 with v1: %a@ Vs@ %a" + Second_version.pp + v0_decoded_later + Second_version.pp + expected_v1 ; () - end (** This test builds a few successive versions of encodings and tries @@ -184,65 +205,65 @@ end let test_n_encapsulated_versions () = let open Data_encoding in let name = "test0" in - let version_0 = - (obj2 (req "left" string) (req "right" (string))) - in - let versioned_0 = - With_version.(encoding ~name @@ first_version version_0) in - let value_0 = "v0", "k0" in + let version_0 = obj2 (req "left" string) (req "right" string) in + let versioned_0 = With_version.(encoding ~name @@ first_version version_0) in + let value_0 = ("v0", "k0") in let json_0 = Json.construct versioned_0 value_0 in - Helpers.no_exception begin fun () -> - let result = Json.destruct versioned_0 json_0 in - if result <> value_0 then - Alcotest.failf "value-0" - end; + Helpers.no_exception (fun () -> + let result = Json.destruct versioned_0 json_0 in + if result <> value_0 then Alcotest.failf "value-0") ; let module Ex = struct type v0 = string * string + type t = - | Hide: 'a Data_encoding.t * 'a With_version.t * 'a * (v0 -> 'a) -> t + | Hide : 'a Data_encoding.t * 'a With_version.t * 'a * (v0 -> 'a) -> t end in let make_next (Ex.Hide (enc, versioned, example, from_v0)) index = let new_tag = Printf.sprintf "left-%d" index in let version_n = obj2 (req new_tag string) (req "right" enc) in - let upgrade vn = "some-random-extra-string", vn in - let versioned_n = With_version.(next_version version_n upgrade versioned) in + let upgrade vn = ("some-random-extra-string", vn) in + let versioned_n = + With_version.(next_version version_n upgrade versioned) + in let encoding = With_version.(encoding ~name versioned_n) in - let example_n = "val4" ^ new_tag, example in + let example_n = ("val4" ^ new_tag, example) in let json_example_n = Json.construct encoding example_n in - Helpers.no_exception begin fun () -> - let result = Json.destruct encoding json_example_n in - if result <> example_n then - Alcotest.failf "value-%d" index - end; + Helpers.no_exception (fun () -> + let result = Json.destruct encoding json_example_n in + if result <> example_n then Alcotest.failf "value-%d" index) ; let json_example_p = - Json.construct With_version.(encoding ~name versioned) example in - Helpers.no_exception begin fun () -> - let result = Json.destruct encoding json_example_p in - if result <> upgrade example then - Alcotest.failf "value-%d-previous-encoding" index - end; - let next_upgrade = fun x -> upgrade (from_v0 x) in - Helpers.no_exception begin fun () -> - let result = Json.destruct encoding json_0 in - if result <> next_upgrade value_0 then - Alcotest.failf "value-%d-from-v0-encoding" index - end; - Format.eprintf "json_example_%d:@ %a\n%!" index Json.pp json_example_n; - Format.eprintf "json_example_%d-from-v0:@ %a\n%!" index Json.pp - (Json.construct encoding (next_upgrade value_0)); + Json.construct With_version.(encoding ~name versioned) example + in + Helpers.no_exception (fun () -> + let result = Json.destruct encoding json_example_p in + if result <> upgrade example then + Alcotest.failf "value-%d-previous-encoding" index) ; + let next_upgrade x = upgrade (from_v0 x) in + Helpers.no_exception (fun () -> + let result = Json.destruct encoding json_0 in + if result <> next_upgrade value_0 then + Alcotest.failf "value-%d-from-v0-encoding" index) ; + Format.eprintf "json_example_%d:@ %a\n%!" index Json.pp json_example_n ; + Format.eprintf + "json_example_%d-from-v0:@ %a\n%!" + index + Json.pp + (Json.construct encoding (next_upgrade value_0)) ; Ex.Hide (version_n, versioned_n, example_n, next_upgrade) in - let Ex.Hide _ = + let (Ex.Hide _) = ListLabels.fold_left - (List.init 10 ((+) 1)) - ~init:(Ex.Hide (version_0, With_version.(first_version version_0), - value_0, fun x -> x)) + (List.init 10 (( + ) 1)) + ~init: + (Ex.Hide + ( version_0, + With_version.(first_version version_0), + value_0, + fun x -> x )) ~f:make_next in () - -let tests = [ - "example-test", `Quick, Documented_example.actual_test; - "test-encapsulated-versions", `Quick, test_n_encapsulated_versions; -] +let tests = + [ ("example-test", `Quick, Documented_example.actual_test); + ("test-encapsulated-versions", `Quick, test_n_encapsulated_versions) ] diff --git a/src/lib_data_encoding/test/write_failure.ml b/src/lib_data_encoding/test/write_failure.ml index 69178b85ca5c6076193077cff5c8ad85ffa47628..c17ef227fe78fb9320ee375e39a3f1bdce3bfcfb 100644 --- a/src/lib_data_encoding/test/write_failure.ml +++ b/src/lib_data_encoding/test/write_failure.ml @@ -30,72 +30,74 @@ open Types let check_raises expected f = match f () with - | exception exn when expected exn -> () + | exception exn when expected exn -> + () | exception exn -> Alcotest.failf "Unexpected exception: %s." (Printexc.to_string exn) - | _ -> Alcotest.failf "Expecting exception, got success." + | _ -> + Alcotest.failf "Expecting exception, got success." let json ?(expected = fun _ -> true) encoding value () = - check_raises expected begin fun () -> - ignore (Json.construct encoding value : Json.t) ; - end + check_raises expected (fun () -> + ignore (Json.construct encoding value : Json.t)) let bson ?(expected = fun _ -> true) encoding value () = - check_raises expected begin fun () -> - ignore (Bson.construct encoding value : Bson.t) ; - end + check_raises expected (fun () -> + ignore (Bson.construct encoding value : Bson.t)) let binary ?(expected = fun _ -> true) encoding value () = - check_raises expected begin fun () -> - ignore (Binary.to_bytes_exn encoding value : MBytes.t) ; - end + check_raises expected (fun () -> + ignore (Binary.to_bytes_exn encoding value : MBytes.t)) let all name encoding value = - [ name ^ ".json", `Quick, json encoding value ; - name ^ ".bson", `Quick, bson encoding value ; - name ^ ".bytes", `Quick, binary encoding value ] + [ (name ^ ".json", `Quick, json encoding value); + (name ^ ".bson", `Quick, bson encoding value); + (name ^ ".bytes", `Quick, binary encoding value) ] let all_ranged_int minimum maximum = let encoding = ranged_int minimum maximum in let name = Format.asprintf "ranged_int.%d" minimum in - all (name ^ ".min") encoding (minimum - 1) @ - all (name ^ ".max") encoding (maximum + 1) + all (name ^ ".min") encoding (minimum - 1) + @ all (name ^ ".max") encoding (maximum + 1) let all_ranged_float minimum maximum = let encoding = ranged_float minimum maximum in let name = Format.asprintf "ranged_float.%f" minimum in - all (name ^ ".min") encoding (minimum -. 1.) @ - all (name ^ ".max") encoding (maximum +. 1.) + all (name ^ ".min") encoding (minimum -. 1.) + @ all (name ^ ".max") encoding (maximum +. 1.) let test_bounded_string_list = let expected = function - | Binary_error.Write_error Size_limit_exceeded -> true - | _ -> false in + | Binary_error.Write_error Size_limit_exceeded -> + true + | _ -> + false + in let test name ~total ~elements v = - "bounded_string_list." ^ name, `Quick, - binary ~expected (bounded_list ~total ~elements string) v in - [ test "a" ~total:0 ~elements:0 [""] ; - test "b1" ~total:3 ~elements:4 [""] ; - test "b2" ~total:4 ~elements:3 [""] ; - test "c1" ~total:19 ~elements:4 ["";"";"";"";""] ; - test "c2" ~total:20 ~elements:3 ["";"";"";"";""] ; - test "d1" ~total:20 ~elements:5 ["";"";"";"";"a"] ; - test "d2" ~total:21 ~elements:4 ["";"";"";"";"a"] ; - test "e" ~total:30 ~elements:10 ["ab";"c";"def";"gh";"ijk"] ; - ] + ( "bounded_string_list." ^ name, + `Quick, + binary ~expected (bounded_list ~total ~elements string) v ) + in + [ test "a" ~total:0 ~elements:0 [""]; + test "b1" ~total:3 ~elements:4 [""]; + test "b2" ~total:4 ~elements:3 [""]; + test "c1" ~total:19 ~elements:4 [""; ""; ""; ""; ""]; + test "c2" ~total:20 ~elements:3 [""; ""; ""; ""; ""]; + test "d1" ~total:20 ~elements:5 [""; ""; ""; ""; "a"]; + test "d2" ~total:21 ~elements:4 [""; ""; ""; ""; "a"]; + test "e" ~total:30 ~elements:10 ["ab"; "c"; "def"; "gh"; "ijk"] ] let tests = - all_ranged_int 100 400 @ - all_ranged_int 19000 19254 @ - all_ranged_int ~-100 300 @ - all_ranged_int ~-300_000_000 300_000_000 @ - all_ranged_float ~-. 100. 300. @ - all "string.fixed" (Fixed.string 4) "turlututu" @ - all "string.bounded" (Bounded.string 4) "turlututu" @ - all "bytes.fixed" (Fixed.bytes 4) (MBytes.of_string "turlututu") @ - all "bytes.bounded" (Bounded.bytes 4) (MBytes.of_string "turlututu") @ - all "unknown_case.B" mini_union_enc (B "2") @ - all "unknown_case.E" mini_union_enc E @ - test_bounded_string_list @ - all "n" n (Z.of_string "-12") @ - [] + all_ranged_int 100 400 @ all_ranged_int 19000 19254 + @ all_ranged_int ~-100 300 + @ all_ranged_int ~-300_000_000 300_000_000 + @ all_ranged_float ~-.100. 300. + @ all "string.fixed" (Fixed.string 4) "turlututu" + @ all "string.bounded" (Bounded.string 4) "turlututu" + @ all "bytes.fixed" (Fixed.bytes 4) (MBytes.of_string "turlututu") + @ all "bytes.bounded" (Bounded.bytes 4) (MBytes.of_string "turlututu") + @ all "unknown_case.B" mini_union_enc (B "2") + @ all "unknown_case.E" mini_union_enc E + @ test_bounded_string_list + @ all "n" n (Z.of_string "-12") + @ [] diff --git a/src/lib_error_monad/.ocamlformat b/src/lib_error_monad/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_error_monad/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_error_monad/error_monad.ml b/src/lib_error_monad/error_monad.ml index 89bdebc62330403cc5fa4af937fa74f047adab0c..eb7b3666c8e1f91855596812072707dd9e3802c2 100644 --- a/src/lib_error_monad/error_monad.ml +++ b/src/lib_error_monad/error_monad.ml @@ -28,28 +28,34 @@ (*-- Error classification ----------------------------------------------------*) -type error_category = [ `Branch | `Temporary | `Permanent ] +type error_category = [`Branch | `Temporary | `Permanent] (* hack: forward reference from [Data_encoding_ezjsonm] *) let json_to_string = ref (fun _ -> "") let json_pp id encoding ppf x = - Format.pp_print_string ppf @@ - !json_to_string @@ + Format.pp_print_string ppf @@ !json_to_string + @@ let encoding = - Data_encoding.(merge_objs (obj1 (req "id" string)) encoding) in + Data_encoding.(merge_objs (obj1 (req "id" string)) encoding) + in Data_encoding.Json.construct encoding (id, x) let set_error_encoding_cache_dirty = ref (fun () -> ()) -module Make(Prefix : sig val id : string end) = struct - +module Make (Prefix : sig + val id : string +end) = +struct type error = .. module type Wrapped_error_monad = sig type unwrapped = .. + include Error_monad_sig.S with type error := unwrapped + val unwrap : error -> unwrapped option + val wrap : unwrapped -> error end @@ -59,62 +65,82 @@ module Make(Prefix : sig val id : string end) = struct (* the toplevel store for error kinds *) type error_kind = - Error_kind : - { id: string ; - title: string ; - description: string ; - from_error: error -> 'err option ; - category: full_error_category ; - encoding_case: error Data_encoding.case ; - pp: Format.formatter -> 'err -> unit ; } -> - error_kind - - type error_info = - { category : error_category ; - id: string ; - title : string ; - description : string ; - schema : Data_encoding.json_schema } - - let error_kinds - : error_kind list ref - = ref [] + | Error_kind : + { id : string; + title : string; + description : string; + from_error : error -> 'err option; + category : full_error_category; + encoding_case : error Data_encoding.case; + pp : Format.formatter -> 'err -> unit } + -> error_kind + + type error_info = { + category : error_category; + id : string; + title : string; + description : string; + schema : Data_encoding.json_schema + } + + let error_kinds : error_kind list ref = ref [] let get_registered_errors () : error_info list = List.flatten (List.map (function - | Error_kind { id = "" ; _ } -> [] - | Error_kind { id ; title ; description ; category = Main category ; encoding_case ; _ } -> - [ { id ; title ; description ; category ; - schema = Data_encoding.Json.schema (Data_encoding.union [ encoding_case ]) } ] - | Error_kind { category = Wrapped (module WEM) ; _ } -> + | Error_kind {id = ""; _} -> + [] + | Error_kind + { id; + title; + description; + category = Main category; + encoding_case; + _ } -> + [ { id; + title; + description; + category; + schema = + Data_encoding.Json.schema + (Data_encoding.union [encoding_case]) } ] + | Error_kind {category = Wrapped (module WEM); _} -> List.map - (fun { WEM.id ; title ; description ; category ; schema } -> - { id ; title ; description ; category ; schema }) + (fun {WEM.id; title; description; category; schema} -> + {id; title; description; category; schema}) (WEM.get_registered_errors ())) !error_kinds) let error_encoding_cache = ref None + let () = let cont = !set_error_encoding_cache_dirty in - set_error_encoding_cache_dirty := fun () -> - cont () ; - error_encoding_cache := None + set_error_encoding_cache_dirty := + fun () -> + cont () ; + error_encoding_cache := None let string_of_category = function - | `Permanent -> "permanent" - | `Temporary -> "temporary" - | `Branch -> "branch" - - let pp_info - ppf - { category; id; title; description; schema } = + | `Permanent -> + "permanent" + | `Temporary -> + "temporary" + | `Branch -> + "branch" + + let pp_info ppf {category; id; title; description; schema} = Format.fprintf ppf - "@[<v 2>category : %s\nid : %s\ntitle : %s\ndescription : %s\nschema : %a@]" + "@[<v 2>category : %s\n\ + id : %s\n\ + title : %s\n\ + description : %s\n\ + schema : %a@]" (string_of_category category) - id title description + id + title + description (Json_repr.pp (module Json_repr.Ezjsonm)) (Json_schema.to_json schema) @@ -126,26 +152,30 @@ module Make(Prefix : sig val id : string end) = struct let category = Main `Temporary in let to_error msg = Unclassified msg in let from_error = function - | Unclassified msg -> Some msg + | Unclassified msg -> + Some msg | error -> let msg = Obj.(extension_name @@ extension_constructor error) in - Some ("Unclassified error: " ^ msg ^ ". Was the error registered?") in + Some ("Unclassified error: " ^ msg ^ ". Was the error registered?") + in let title = "Generic error" in - let description = "An unclassified error" in + let description = "An unclassified error" in let encoding_case = let open Data_encoding in - case Json_only + case + Json_only ~title:"Generic error" - (def "generic_error" ~title ~description @@ - conv (fun x -> ((), x)) (fun ((), x) -> x) @@ - (obj2 - (req "kind" (constant "generic")) - (req "error" string))) - from_error to_error in + ( def "generic_error" ~title ~description + @@ conv (fun x -> ((), x)) (fun ((), x) -> x) + @@ obj2 (req "kind" (constant "generic")) (req "error" string) ) + from_error + to_error + in let pp ppf s = Format.fprintf ppf "@[<h 0>%a@]" Format.pp_print_text s in error_kinds := - Error_kind { id ; title ; description ; - from_error ; category ; encoding_case ; pp } :: !error_kinds + Error_kind + {id; title; description; from_error; category; encoding_case; pp} + :: !error_kinds (* Catch all error when 'deserializing' an error. *) type error += Unregistred_error of Data_encoding.json @@ -155,179 +185,220 @@ module Make(Prefix : sig val id : string end) = struct let category = Main `Temporary in let to_error msg = Unregistred_error msg in let from_error = function - | Unregistred_error json -> Some json - | _ -> None in + | Unregistred_error json -> + Some json + | _ -> + None + in let encoding_case = let open Data_encoding in - case Json_only - ~title:"Unregistred error" - json from_error to_error in + case Json_only ~title:"Unregistred error" json from_error to_error + in let pp ppf json = - Format.fprintf ppf "@[<v 2>Unregistred error:@ %a@]" - Data_encoding.Json.pp json in + Format.fprintf + ppf + "@[<v 2>Unregistred error:@ %a@]" + Data_encoding.Json.pp + json + in error_kinds := - Error_kind { id ; title = "" ; description = "" ; - from_error ; category ; encoding_case ; pp } :: !error_kinds + Error_kind + { id; + title = ""; + description = ""; + from_error; + category; + encoding_case; + pp } + :: !error_kinds - let raw_register_error_kind - category ~id:name ~title ~description ?pp + let raw_register_error_kind category ~id:name ~title ~description ?pp encoding from_error to_error = let name = Prefix.id ^ name in - if List.exists - (fun (Error_kind { id ; _ }) -> name = id) - !error_kinds then + if List.exists (fun (Error_kind {id; _}) -> name = id) !error_kinds then invalid_arg - (Printf.sprintf - "register_error_kind: duplicate error name: %s" name) ; + (Printf.sprintf "register_error_kind: duplicate error name: %s" name) ; let encoding_case = let open Data_encoding in match category with | Wrapped (module WEM) -> let unwrap err = match WEM.unwrap err with - | Some (WEM.Unclassified _) -> None + | Some (WEM.Unclassified _) -> + None | Some (WEM.Unregistred_error _) -> Format.eprintf "What %s@." name ; None - | res -> res in + | res -> + res + in let wrap err = match err with | WEM.Unclassified _ -> failwith "ignore wrapped error when serializing" | WEM.Unregistred_error _ -> failwith "ignore wrapped error when deserializing" - | res -> WEM.wrap res in - case Json_only - ~title:name - WEM.error_encoding unwrap wrap + | res -> + WEM.wrap res + in + case Json_only ~title:name WEM.error_encoding unwrap wrap | Main category -> let with_id_and_kind_encoding = merge_objs (obj2 (req "kind" (constant (string_of_category category))) (req "id" (constant name))) - encoding in - case Json_only + encoding + in + case + Json_only ~title ~description (conv (fun x -> (((), ()), x)) - (fun (((),()), x) -> x) + (fun (((), ()), x) -> x) with_id_and_kind_encoding) - from_error to_error in + from_error + to_error + in !set_error_encoding_cache_dirty () ; error_kinds := Error_kind - { id = name ; - category ; - title ; - description ; - from_error ; - encoding_case ; + { id = name; + category; + title; + description; + from_error; + encoding_case; pp = Option.unopt ~default:(json_pp name encoding) pp } :: !error_kinds - let register_wrapped_error_kind - (module WEM : Wrapped_error_monad) ~id ~title ~description = + let register_wrapped_error_kind (module WEM : Wrapped_error_monad) ~id ~title + ~description = raw_register_error_kind (Wrapped (module WEM)) - ~id ~title ~description - ~pp:WEM.pp WEM.error_encoding WEM.unwrap WEM.wrap - - let register_error_kind - category ~id ~title ~description ?pp - encoding from_error to_error = - if not (Data_encoding.is_obj encoding) - then invalid_arg + ~id + ~title + ~description + ~pp:WEM.pp + WEM.error_encoding + WEM.unwrap + WEM.wrap + + let register_error_kind category ~id ~title ~description ?pp encoding + from_error to_error = + if not (Data_encoding.is_obj encoding) then + invalid_arg (Printf.sprintf - "Specified encoding for \"%s%s\" is not an object, but error encodings must be objects." - Prefix.id id) ; + "Specified encoding for \"%s%s\" is not an object, but error \ + encodings must be objects." + Prefix.id + id) ; raw_register_error_kind (Main category) - ~id ~title ~description ?pp - encoding from_error to_error + ~id + ~title + ~description + ?pp + encoding + from_error + to_error let error_encoding () = match !error_encoding_cache with | None -> let cases = List.map - (fun (Error_kind { encoding_case ; _ }) -> encoding_case) - !error_kinds in + (fun (Error_kind {encoding_case; _}) -> encoding_case) + !error_kinds + in let json_encoding = Data_encoding.union cases in let encoding = - Data_encoding.dynamic_size @@ - Data_encoding.splitted - ~json:json_encoding - ~binary: - (Data_encoding.conv - (Data_encoding.Json.construct json_encoding) - (Data_encoding.Json.destruct json_encoding) - Data_encoding.json) in + Data_encoding.dynamic_size + @@ Data_encoding.splitted + ~json:json_encoding + ~binary: + (Data_encoding.conv + (Data_encoding.Json.construct json_encoding) + (Data_encoding.Json.destruct json_encoding) + Data_encoding.json) + in error_encoding_cache := Some encoding ; encoding - | Some encoding -> encoding + | Some encoding -> + encoding let error_encoding = Data_encoding.delayed error_encoding - let json_of_error error = - Data_encoding.Json.construct error_encoding error - let error_of_json json = - Data_encoding.Json.destruct error_encoding json + let json_of_error error = Data_encoding.Json.construct error_encoding error + + let error_of_json json = Data_encoding.Json.destruct error_encoding json let classify_error error = let rec find e = function - | [] -> `Temporary + | [] -> + `Temporary (* assert false (\* See "Generic error" *\) *) - | Error_kind { from_error ; category ; _ } :: rest -> - match from_error e with - | Some _ -> begin - match category with - | Main error_category -> error_category - | Wrapped (module WEM) -> - match WEM.unwrap e with - | Some e -> WEM.classify_errors [ e ] - | None -> find e rest - end - | None -> find e rest in + | Error_kind {from_error; category; _} :: rest -> ( + match from_error e with + | Some _ -> ( + match category with + | Main error_category -> + error_category + | Wrapped (module WEM) -> ( + match WEM.unwrap e with + | Some e -> + WEM.classify_errors [e] + | None -> + find e rest ) ) + | None -> + find e rest ) + in find error !error_kinds let classify_errors errors = List.fold_left - (fun r e -> match r, classify_error e with - | `Permanent, _ | _, `Permanent -> `Permanent - | `Branch, _ | _, `Branch -> `Branch - | `Temporary, `Temporary -> `Temporary) - `Temporary errors + (fun r e -> + match (r, classify_error e) with + | (`Permanent, _) | (_, `Permanent) -> + `Permanent + | (`Branch, _) | (_, `Branch) -> + `Branch + | (`Temporary, `Temporary) -> + `Temporary) + `Temporary + errors let pp ppf error = let rec find = function - | [] -> assert false (* See "Generic error" *) - | Error_kind { from_error ; pp ; _ } :: errors -> - match from_error error with - | None -> find errors - | Some x -> pp ppf x in + | [] -> + assert false (* See "Generic error" *) + | Error_kind {from_error; pp; _} :: errors -> ( + match from_error error with None -> find errors | Some x -> pp ppf x ) + in find !error_kinds (*-- Monad definition --------------------------------------------------------*) - let (>>=) = Lwt.(>>=) + let ( >>= ) = Lwt.( >>= ) type 'a tzresult = ('a, error list) result let result_encoding t_encoding = let open Data_encoding in - let errors_encoding = - obj1 (req "error" (list error_encoding)) in - let t_encoding = - obj1 (req "result" t_encoding) in + let errors_encoding = obj1 (req "error" (list error_encoding)) in + let t_encoding = obj1 (req "result" t_encoding) in union ~tag_size:`Uint8 - [ case (Tag 0) t_encoding + [ case + (Tag 0) + t_encoding ~title:"Ok" (function Ok x -> Some x | _ -> None) - (function res -> Ok res) ; - case (Tag 1) errors_encoding + (function res -> Ok res); + case + (Tag 1) + errors_encoding ~title:"Error" (function Error x -> Some x | _ -> None) (fun errs -> Error errs) ] @@ -346,43 +417,38 @@ module Make(Prefix : sig val id : string end) = struct let return_false = Lwt.return_ok false - let error s = Error [ s ] + let error s = Error [s] let ok v = Ok v - let fail s = Lwt.return_error [ s ] + let fail s = Lwt.return_error [s] + + let ( >>? ) v f = match v with Error _ as err -> err | Ok v -> f v - let (>>?) v f = - match v with - | Error _ as err -> err - | Ok v -> f v + let ( >>=? ) v f = + v >>= function Error _ as err -> Lwt.return err | Ok v -> f v - let (>>=?) v f = - v >>= function - | Error _ as err -> Lwt.return err - | Ok v -> f v + let ( >>|? ) v f = v >>=? fun v -> Lwt.return_ok (f v) - let (>>|?) v f = v >>=? fun v -> Lwt.return_ok (f v) - let (>|=) = Lwt.(>|=) + let ( >|= ) = Lwt.( >|= ) - let (>|?) v f = v >>? fun v -> Ok (f v) + let ( >|? ) v f = v >>? fun v -> Ok (f v) let rec map_s f l = match l with - | [] -> return_nil + | [] -> + return_nil | h :: t -> - f h >>=? fun rh -> - map_s f t >>=? fun rt -> - return (rh :: rt) + f h >>=? fun rh -> map_s f t >>=? fun rt -> return (rh :: rt) let mapi_s f l = let rec mapi_s f i l = match l with - | [] -> return_nil + | [] -> + return_nil | h :: t -> - f i h >>=? fun rh -> - mapi_s f (i+1) t >>=? fun rt -> - return (rh :: rt) + f i h + >>=? fun rh -> mapi_s f (i + 1) t >>=? fun rt -> return (rh :: rt) in mapi_s f 0 l @@ -390,231 +456,255 @@ module Make(Prefix : sig val id : string end) = struct match l with | [] -> return_nil - | x :: l -> + | x :: l -> ( let tx = f x and tl = map_p f l in - tx >>= fun x -> - tl >>= fun l -> - match x, l with - | Ok x, Ok l -> Lwt.return_ok (x :: l) - | Error exn1, Error exn2 -> Lwt.return_error (exn1 @ exn2) - | Ok _, Error exn - | Error exn, Ok _ -> Lwt.return_error exn + tx + >>= fun x -> + tl + >>= fun l -> + match (x, l) with + | (Ok x, Ok l) -> + Lwt.return_ok (x :: l) + | (Error exn1, Error exn2) -> + Lwt.return_error (exn1 @ exn2) + | (Ok _, Error exn) | (Error exn, Ok _) -> + Lwt.return_error exn ) let mapi_p f l = let rec mapi_p f i l = match l with | [] -> return_nil - | x :: l -> - let tx = f i x and tl = mapi_p f (i+1) l in - tx >>= fun x -> - tl >>= fun l -> - match x, l with - | Ok x, Ok l -> Lwt.return_ok (x :: l) - | Error exn1, Error exn2 -> Lwt.return_error (exn1 @ exn2) - | Ok _, Error exn - | Error exn, Ok _ -> Lwt.return_error exn in + | x :: l -> ( + let tx = f i x and tl = mapi_p f (i + 1) l in + tx + >>= fun x -> + tl + >>= fun l -> + match (x, l) with + | (Ok x, Ok l) -> + Lwt.return_ok (x :: l) + | (Error exn1, Error exn2) -> + Lwt.return_error (exn1 @ exn2) + | (Ok _, Error exn) | (Error exn, Ok _) -> + Lwt.return_error exn ) + in mapi_p f 0 l let rec map2_s f l1 l2 = - match l1, l2 with - | [], [] -> return_nil - | _ :: _, [] | [], _ :: _ -> invalid_arg "Error_monad.map2_s" - | h1 :: t1, h2 :: t2 -> - f h1 h2 >>=? fun rh -> - map2_s f t1 t2 >>=? fun rt -> - return (rh :: rt) + match (l1, l2) with + | ([], []) -> + return_nil + | (_ :: _, []) | ([], _ :: _) -> + invalid_arg "Error_monad.map2_s" + | (h1 :: t1, h2 :: t2) -> + f h1 h2 >>=? fun rh -> map2_s f t1 t2 >>=? fun rt -> return (rh :: rt) let mapi2_s f l1 l2 = let rec mapi2_s i f l1 l2 = - match l1, l2 with - | [], [] -> return_nil - | _ :: _, [] | [], _ :: _ -> invalid_arg "Error_monad.mapi2_s" - | h1 :: t1, h2 :: t2 -> - f i h1 h2 >>=? fun rh -> - mapi2_s (i+1) f t1 t2 >>=? fun rt -> - return (rh :: rt) in + match (l1, l2) with + | ([], []) -> + return_nil + | (_ :: _, []) | ([], _ :: _) -> + invalid_arg "Error_monad.mapi2_s" + | (h1 :: t1, h2 :: t2) -> + f i h1 h2 + >>=? fun rh -> + mapi2_s (i + 1) f t1 t2 >>=? fun rt -> return (rh :: rt) + in mapi2_s 0 f l1 l2 let rec map2 f l1 l2 = - match l1, l2 with - | [], [] -> Ok [] - | _ :: _, [] | [], _ :: _ -> invalid_arg "Error_monad.map2" - | h1 :: t1, h2 :: t2 -> - f h1 h2 >>? fun rh -> - map2 f t1 t2 >>? fun rt -> - Ok (rh :: rt) + match (l1, l2) with + | ([], []) -> + Ok [] + | (_ :: _, []) | ([], _ :: _) -> + invalid_arg "Error_monad.map2" + | (h1 :: t1, h2 :: t2) -> + f h1 h2 >>? fun rh -> map2 f t1 t2 >>? fun rt -> Ok (rh :: rt) let rec filter_map_s f l = match l with - | [] -> return_nil - | h :: t -> - f h >>=? function - | None -> filter_map_s f t + | [] -> + return_nil + | h :: t -> ( + f h + >>=? function + | None -> + filter_map_s f t | Some rh -> - filter_map_s f t >>=? fun rt -> - return (rh :: rt) + filter_map_s f t >>=? fun rt -> return (rh :: rt) ) let rec filter_map_p f l = match l with - | [] -> return_nil - | h :: t -> - let th = f h - and tt = filter_map_p f t in - th >>=? function - | None -> tt - | Some rh -> - tt >>=? fun rt -> - return (rh :: rt) + | [] -> + return_nil + | h :: t -> ( + let th = f h and tt = filter_map_p f t in + th + >>=? function + | None -> tt | Some rh -> tt >>=? fun rt -> return (rh :: rt) ) let rec filter_s f l = match l with - | [] -> return_nil - | h :: t -> - f h >>=? function - | false -> filter_s f t + | [] -> + return_nil + | h :: t -> ( + f h + >>=? function + | false -> + filter_s f t | true -> - filter_s f t >>=? fun t -> - return (h :: t) + filter_s f t >>=? fun t -> return (h :: t) ) let rec filter_p f l = match l with - | [] -> return_nil - | h :: t -> - let jh = f h - and t = filter_p f t in - jh >>=? function - | false -> t - | true -> - t >>=? fun t -> - return (h :: t) + | [] -> + return_nil + | h :: t -> ( + let jh = f h and t = filter_p f t in + jh >>=? function false -> t | true -> t >>=? fun t -> return (h :: t) ) let rec iter_s f l = - match l with - | [] -> return_unit - | h :: t -> - f h >>=? fun () -> - iter_s f t + match l with [] -> return_unit | h :: t -> f h >>=? fun () -> iter_s f t let rec iter_p f l = match l with - | [] -> return_unit - | x :: l -> + | [] -> + return_unit + | x :: l -> ( let tx = f x and tl = iter_p f l in - tx >>= fun tx_res -> - tl >>= fun tl_res -> - match tx_res, tl_res with - | Ok (), Ok () -> Lwt.return_ok () - | Error exn1, Error exn2 -> Lwt.return_error (exn1 @ exn2) - | Ok (), Error exn - | Error exn, Ok () -> Lwt.return_error exn + tx + >>= fun tx_res -> + tl + >>= fun tl_res -> + match (tx_res, tl_res) with + | (Ok (), Ok ()) -> + Lwt.return_ok () + | (Error exn1, Error exn2) -> + Lwt.return_error (exn1 @ exn2) + | (Ok (), Error exn) | (Error exn, Ok ()) -> + Lwt.return_error exn ) let iteri_p f l = let rec iteri_p i f l = match l with - | [] -> return_unit - | x :: l -> - let tx = f i x and tl = iteri_p (i+1) f l in - tx >>= fun tx_res -> - tl >>= fun tl_res -> - match tx_res, tl_res with - | Ok (), Ok () -> Lwt.return (Ok ()) - | Error exn1, Error exn2 -> Lwt.return (Error (exn1 @ exn2)) - | Ok (), Error exn - | Error exn, Ok () -> Lwt.return (Error exn) + | [] -> + return_unit + | x :: l -> ( + let tx = f i x and tl = iteri_p (i + 1) f l in + tx + >>= fun tx_res -> + tl + >>= fun tl_res -> + match (tx_res, tl_res) with + | (Ok (), Ok ()) -> + Lwt.return (Ok ()) + | (Error exn1, Error exn2) -> + Lwt.return (Error (exn1 @ exn2)) + | (Ok (), Error exn) | (Error exn, Ok ()) -> + Lwt.return (Error exn) ) in iteri_p 0 f l let rec iter2_p f l1 l2 = - match l1, l2 with - | [], [] -> return_unit - | [], _ | _, [] -> invalid_arg "Error_monad.iter2_p" - | x1 :: l1 , x2 :: l2 -> + match (l1, l2) with + | ([], []) -> + return_unit + | ([], _) | (_, []) -> + invalid_arg "Error_monad.iter2_p" + | (x1 :: l1, x2 :: l2) -> ( let tx = f x1 x2 and tl = iter2_p f l1 l2 in - tx >>= fun tx_res -> - tl >>= fun tl_res -> - match tx_res, tl_res with - | Ok (), Ok () -> Lwt.return_ok () - | Error exn1, Error exn2 -> Lwt.return_error (exn1 @ exn2) - | Ok (), Error exn - | Error exn, Ok () -> Lwt.return_error exn + tx + >>= fun tx_res -> + tl + >>= fun tl_res -> + match (tx_res, tl_res) with + | (Ok (), Ok ()) -> + Lwt.return_ok () + | (Error exn1, Error exn2) -> + Lwt.return_error (exn1 @ exn2) + | (Ok (), Error exn) | (Error exn, Ok ()) -> + Lwt.return_error exn ) let iteri2_p f l1 l2 = let rec iteri2_p i f l1 l2 = - match l1, l2 with - | [], [] -> return_unit - | [], _ | _, [] -> invalid_arg "Error_monad.iteri2_p" - | x1 :: l1 , x2 :: l2 -> - let tx = f i x1 x2 and tl = iteri2_p (i+1) f l1 l2 in - tx >>= fun tx_res -> - tl >>= fun tl_res -> - match tx_res, tl_res with - | Ok (), Ok () -> Lwt.return_ok () - | Error exn1, Error exn2 -> Lwt.return_error (exn1 @ exn2) - | Ok (), Error exn - | Error exn, Ok () -> Lwt.return_error exn + match (l1, l2) with + | ([], []) -> + return_unit + | ([], _) | (_, []) -> + invalid_arg "Error_monad.iteri2_p" + | (x1 :: l1, x2 :: l2) -> ( + let tx = f i x1 x2 and tl = iteri2_p (i + 1) f l1 l2 in + tx + >>= fun tx_res -> + tl + >>= fun tl_res -> + match (tx_res, tl_res) with + | (Ok (), Ok ()) -> + Lwt.return_ok () + | (Error exn1, Error exn2) -> + Lwt.return_error (exn1 @ exn2) + | (Ok (), Error exn) | (Error exn, Ok ()) -> + Lwt.return_error exn ) in iteri2_p 0 f l1 l2 let rec fold_left_s f init l = match l with - | [] -> return init + | [] -> + return init | h :: t -> - f init h >>=? fun acc -> - fold_left_s f acc t + f init h >>=? fun acc -> fold_left_s f acc t let rec fold_right_s f l init = match l with - | [] -> return init + | [] -> + return init | h :: t -> - fold_right_s f t init >>=? fun acc -> - f h acc + fold_right_s f t init >>=? fun acc -> f h acc let rec join = function - | [] -> return_unit - | t :: ts -> - t >>= function + | [] -> + return_unit + | t :: ts -> ( + t + >>= function | Error _ as err -> - join ts >>=? fun () -> - Lwt.return err + join ts >>=? fun () -> Lwt.return err | Ok () -> - join ts + join ts ) let record_trace err result = - match result with - | Ok _ as res -> res - | Error errs -> Error (err :: errs) + match result with Ok _ as res -> res | Error errs -> Error (err :: errs) let trace err f = - f >>= function - | Error errs -> Lwt.return_error (err :: errs) - | ok -> Lwt.return ok + f + >>= function + | Error errs -> Lwt.return_error (err :: errs) | ok -> Lwt.return ok let record_trace_eval mk_err result = match result with - | Ok _ as res -> res + | Ok _ as res -> + res | Error errs -> - mk_err () >>? fun err -> - Error (err :: errs) + mk_err () >>? fun err -> Error (err :: errs) let trace_eval mk_err f = - f >>= function + f + >>= function | Error errs -> - mk_err () >>=? fun err -> - Lwt.return_error (err :: errs) - | ok -> Lwt.return ok + mk_err () >>=? fun err -> Lwt.return_error (err :: errs) + | ok -> + Lwt.return ok - let fail_unless cond exn = - if cond then return_unit else fail exn + let fail_unless cond exn = if cond then return_unit else fail exn - let fail_when cond exn = - if cond then fail exn else return_unit + let fail_when cond exn = if cond then fail exn else return_unit - let unless cond f = - if cond then return_unit else f () + let unless cond f = if cond then return_unit else f () - let _when cond f = - if cond then f () else return_unit + let _when cond f = if cond then f () else return_unit let pp_print_error ppf errors = match errors with @@ -623,7 +713,9 @@ module Make(Prefix : sig val id : string end) = struct | [error] -> Format.fprintf ppf "@[<v 2>Error:@ %a@]@." pp error | errors -> - Format.fprintf ppf "@[<v 2>Error, dumping error stack:@,%a@]@." + Format.fprintf + ppf + "@[<v 2>Error, dumping error stack:@,%a@]@." (Format.pp_print_list pp) (List.rev errors) @@ -634,69 +726,86 @@ module Make(Prefix : sig val id : string end) = struct let category = Main `Permanent in let to_error (loc, msg) = Assert_error (loc, msg) in let from_error = function - | Assert_error (loc, msg) -> Some (loc, msg) - | _ -> None in + | Assert_error (loc, msg) -> + Some (loc, msg) + | _ -> + None + in let title = "Assertion error" in - let description = "An fatal assertion" in + let description = "An fatal assertion" in let encoding_case = let open Data_encoding in - case Json_only ~title ~description - (conv (fun (x, y) -> ((), x, y)) (fun ((), x, y) -> (x, y)) - ((obj3 - (req "kind" (constant "assertion")) - (req "location" string) - (req "error" string)))) - from_error to_error in + case + Json_only + ~title + ~description + (conv + (fun (x, y) -> ((), x, y)) + (fun ((), x, y) -> (x, y)) + (obj3 + (req "kind" (constant "assertion")) + (req "location" string) + (req "error" string))) + from_error + to_error + in let pp ppf (loc, msg) = - Format.fprintf ppf + Format.fprintf + ppf "Assert failure (%s)%s" loc - (if msg = "" then "." else ": " ^ msg) in + (if msg = "" then "." else ": " ^ msg) + in error_kinds := - Error_kind { id ; title ; description ; - from_error ; category ; encoding_case ; pp } :: !error_kinds + Error_kind + {id; title; description; from_error; category; encoding_case; pp} + :: !error_kinds let _assert b loc fmt = - if b then - Format.ikfprintf (fun _ -> return_unit) Format.str_formatter fmt - else - Format.kasprintf (fun msg -> fail (Assert_error (loc, msg))) fmt - + if b then Format.ikfprintf (fun _ -> return_unit) Format.str_formatter fmt + else Format.kasprintf (fun msg -> fail (Assert_error (loc, msg))) fmt type 'a tzlazy_state = | Remembered of 'a | Not_yet_known of (unit -> 'a tzresult Lwt.t) - type 'a tzlazy = { mutable tzcontents: 'a tzlazy_state } - let tzlazy c = { tzcontents = Not_yet_known c } - let tzforce v = match v.tzcontents with - | Remembered v -> return v - | Not_yet_known c -> - c () >>=? fun w -> - v.tzcontents <- Remembered w; - return w + type 'a tzlazy = {mutable tzcontents : 'a tzlazy_state} + let tzlazy c = {tzcontents = Not_yet_known c} + + let tzforce v = + match v.tzcontents with + | Remembered v -> + return v + | Not_yet_known c -> + c () + >>=? fun w -> + v.tzcontents <- Remembered w ; + return w end -include Make(struct let id = "" end) +include Make (struct + let id = "" +end) type error += Exn of exn -let generic_error fmt = - Format.kasprintf (fun s -> error (Exn (Failure s))) fmt +let generic_error fmt = Format.kasprintf (fun s -> error (Exn (Failure s))) fmt + +let failwith fmt = Format.kasprintf (fun s -> fail (Exn (Failure s))) fmt + +let error s = Error [s] -let failwith fmt = - Format.kasprintf (fun s -> fail (Exn (Failure s))) fmt +let error_exn s = Error [Exn s] -let error s = Error [ s ] -let error_exn s = Error [ Exn s ] let trace_exn exn f = trace (Exn exn) f + let generic_trace fmt = Format.kasprintf (fun str -> trace_exn (Failure str)) fmt + let record_trace_exn exn f = record_trace (Exn exn) f -let failure fmt = - Format.kasprintf (fun str -> Exn (Failure str)) fmt +let failure fmt = Format.kasprintf (fun str -> Exn (Failure str)) fmt let pp_exn ppf exn = pp ppf (Exn exn) @@ -709,9 +818,12 @@ let () = ~pp:(fun ppf s -> Format.fprintf ppf "@[<h 0>%a@]" Format.pp_print_text s) Data_encoding.(obj1 (req "msg" string)) (function - | Exn (Failure msg) -> Some msg - | Exn exn -> Some (Printexc.to_string exn) - | _ -> None) + | Exn (Failure msg) -> + Some msg + | Exn exn -> + Some (Printexc.to_string exn) + | _ -> + None) (fun msg -> Exn (Failure msg)) type error += Canceled @@ -729,23 +841,26 @@ let () = let protect ?on_error ?canceler t = let cancelation = match canceler with - | None -> Lwt_utils.never_ending () + | None -> + Lwt_utils.never_ending () | Some canceler -> - (Lwt_canceler.cancelation canceler >>= fun () -> - fail Canceled ) in - let res = - Lwt.pick [ cancelation ; - Lwt.catch t (fun exn -> fail (Exn exn)) ] in - res >>= function - | Ok _ -> res - | Error err -> + Lwt_canceler.cancelation canceler >>= fun () -> fail Canceled + in + let res = Lwt.pick [cancelation; Lwt.catch t (fun exn -> fail (Exn exn))] in + res + >>= function + | Ok _ -> + res + | Error err -> ( let canceled = - Option.unopt_map canceler ~default:false ~f:Lwt_canceler.canceled in + Option.unopt_map canceler ~default:false ~f:Lwt_canceler.canceled + in let err = if canceled then [Canceled] else err in match on_error with - | None -> Lwt.return_error err + | None -> + Lwt.return_error err | Some on_error -> - Lwt.catch (fun () -> on_error err) (fun exn -> fail (Exn exn)) + Lwt.catch (fun () -> on_error err) (fun exn -> fail (Exn exn)) ) type error += Timeout @@ -761,13 +876,9 @@ let () = let with_timeout ?(canceler = Lwt_canceler.create ()) timeout f = let target = f canceler in - Lwt.choose [ timeout ; (target >|= fun _ -> ()) ] >>= fun () -> - if Lwt.state target <> Lwt.Sleep then begin - Lwt.cancel timeout ; - target - end else begin - Lwt_canceler.cancel canceler >>= fun () -> - fail Timeout - end + Lwt.choose [timeout; (target >|= fun _ -> ())] + >>= fun () -> + if Lwt.state target <> Lwt.Sleep then (Lwt.cancel timeout ; target) + else Lwt_canceler.cancel canceler >>= fun () -> fail Timeout let errs_tag = Tag.def ~doc:"Errors" "errs" pp_print_error diff --git a/src/lib_error_monad/error_monad.mli b/src/lib_error_monad/error_monad.mli index 1a73f5f469000418f813bdda8be08d6b6813e5a4..cfa992d4be30e2d9996520afeef2546b9bcad4fb 100644 --- a/src/lib_error_monad/error_monad.mli +++ b/src/lib_error_monad/error_monad.mli @@ -27,41 +27,49 @@ (** Categories of error *) type error_category = - [ `Branch (** Errors that may not happen in another context *) - | `Temporary (** Errors that may not happen in a later context *) - | `Permanent (** Errors that will happen no matter the context *) - ] + [ `Branch (** Errors that may not happen in another context *) + | `Temporary (** Errors that may not happen in a later context *) + | `Permanent (** Errors that will happen no matter the context *) ] include Error_monad_sig.S module type Wrapped_error_monad = sig type unwrapped = .. + include Error_monad_sig.S with type error := unwrapped + val unwrap : error -> unwrapped option + val wrap : unwrapped -> error end val register_wrapped_error_kind : (module Wrapped_error_monad) -> - id:string -> title:string -> description:string -> + id:string -> + title:string -> + description:string -> unit (** Erroneous result (shortcut for generic errors) *) -val generic_error : - ('a, Format.formatter, unit, 'b tzresult) format4 -> - 'a +val generic_error : ('a, Format.formatter, unit, 'b tzresult) format4 -> 'a (** Erroneous return (shortcut for generic errors) *) -val failwith : - ('a, Format.formatter, unit, 'b tzresult Lwt.t) format4 -> - 'a +val failwith : ('a, Format.formatter, unit, 'b tzresult Lwt.t) format4 -> 'a val error_exn : exn -> 'a tzresult + val record_trace_exn : exn -> 'a tzresult -> 'a tzresult + val trace_exn : exn -> 'b tzresult Lwt.t -> 'b tzresult Lwt.t + val generic_trace : - ('a, Format.formatter, unit, - ('b, error list) result Lwt.t -> ('b, error list) result Lwt.t) format4 -> 'a + ( 'a, + Format.formatter, + unit, + ('b, error list) result Lwt.t -> ('b, error list) result Lwt.t ) + format4 -> + 'a + val pp_exn : Format.formatter -> exn -> unit val failure : ('a, Format.formatter, unit, error) format4 -> 'a @@ -86,16 +94,23 @@ type error += Canceled val protect : ?on_error:(error list -> 'a tzresult Lwt.t) -> ?canceler:Lwt_canceler.t -> - (unit -> 'a tzresult Lwt.t) -> 'a tzresult Lwt.t + (unit -> 'a tzresult Lwt.t) -> + 'a tzresult Lwt.t type error += Timeout -val with_timeout: + +val with_timeout : ?canceler:Lwt_canceler.t -> - unit Lwt.t -> (Lwt_canceler.t -> 'a tzresult Lwt.t) -> 'a tzresult Lwt.t + unit Lwt.t -> + (Lwt_canceler.t -> 'a tzresult Lwt.t) -> + 'a tzresult Lwt.t -module Make(Prefix : sig val id : string end) : Error_monad_sig.S +module Make (Prefix : sig + val id : string +end) : Error_monad_sig.S (**/**) + val json_to_string : (Data_encoding.json -> string) ref val errs_tag : error list Tag.def diff --git a/src/lib_error_monad/error_monad_sig.ml b/src/lib_error_monad/error_monad_sig.ml index 8b62c3a8bf53d8422662967d26613ba0365975b4..3b865325bf4840cd6d6a3bf7fa46ed637fb08da2 100644 --- a/src/lib_error_monad/error_monad_sig.ml +++ b/src/lib_error_monad/error_monad_sig.ml @@ -26,39 +26,44 @@ (** Categories of error *) type error_category = - [ `Branch (** Errors that may not happen in another context *) - | `Temporary (** Errors that may not happen in a later context *) - | `Permanent (** Errors that will happen no matter the context *) - ] + [ `Branch (** Errors that may not happen in another context *) + | `Temporary (** Errors that may not happen in a later context *) + | `Permanent (** Errors that will happen no matter the context *) ] module type S = sig - type error = .. (** Catch all error when 'serializing' an error. *) - type error += private Unclassified of string - (** Catch all error when 'deserializing' an error. *) + type error += + private + | Unclassified of string + (** Catch all error when 'deserializing' an error. *) + type error += private Unregistred_error of Data_encoding.json - val pp: Format.formatter -> error -> unit - val pp_print_error: Format.formatter -> error list -> unit + val pp : Format.formatter -> error -> unit + + val pp_print_error : Format.formatter -> error list -> unit (** An error serializer *) val error_encoding : error Data_encoding.t + val json_of_error : error -> Data_encoding.json + val error_of_json : Data_encoding.json -> error (** {2 Error documentation} *) (** Error information *) - type error_info = - { category : error_category ; - id : string ; - title : string ; - description : string ; - schema : Data_encoding.json_schema } + type error_info = { + category : error_category; + id : string; + title : string; + description : string; + schema : Data_encoding.json_schema + } - val pp_info: Format.formatter -> error_info -> unit + val pp_info : Format.formatter -> error_info -> unit (** Retrieves information of registered errors *) val get_registered_errors : unit -> error_info list @@ -97,9 +102,7 @@ module type S = sig type 'a tzresult = ('a, error list) result (** A serializer for result of a given type *) - val result_encoding : - 'a Data_encoding.t -> - 'a tzresult Data_encoding.t + val result_encoding : 'a Data_encoding.t -> 'a tzresult Data_encoding.t (** Sucessful result *) val ok : 'a -> 'a tzresult @@ -132,21 +135,22 @@ module type S = sig val fail : error -> 'a tzresult Lwt.t (** Non-Lwt bind operator *) - val (>>?) : 'a tzresult -> ('a -> 'b tzresult) -> 'b tzresult + val ( >>? ) : 'a tzresult -> ('a -> 'b tzresult) -> 'b tzresult (** Bind operator *) - val (>>=?) : + val ( >>=? ) : 'a tzresult Lwt.t -> ('a -> 'b tzresult Lwt.t) -> 'b tzresult Lwt.t (** Lwt's bind reexported *) - val (>>=) : 'a Lwt.t -> ('a -> 'b Lwt.t) -> 'b Lwt.t - val (>|=) : 'a Lwt.t -> ('a -> 'b) -> 'b Lwt.t + val ( >>= ) : 'a Lwt.t -> ('a -> 'b Lwt.t) -> 'b Lwt.t + + val ( >|= ) : 'a Lwt.t -> ('a -> 'b) -> 'b Lwt.t (** To operator *) - val (>>|?) : 'a tzresult Lwt.t -> ('a -> 'b) -> 'b tzresult Lwt.t + val ( >>|? ) : 'a tzresult Lwt.t -> ('a -> 'b) -> 'b tzresult Lwt.t (** Non-Lwt to operator *) - val (>|?) : 'a tzresult -> ('a -> 'b) -> 'b tzresult + val ( >|? ) : 'a tzresult -> ('a -> 'b) -> 'b tzresult (** Enrich an error report (or do nothing on a successful result) manually *) val record_trace : error -> 'a tzresult -> 'a tzresult @@ -155,57 +159,89 @@ module type S = sig val trace : error -> 'b tzresult Lwt.t -> 'b tzresult Lwt.t (** Same as record_trace, for unevaluated error *) - val record_trace_eval : (unit -> error tzresult) -> 'a tzresult -> 'a tzresult + val record_trace_eval : + (unit -> error tzresult) -> 'a tzresult -> 'a tzresult (** Same as trace, for unevaluated Lwt error *) - val trace_eval : (unit -> error tzresult Lwt.t) -> 'b tzresult Lwt.t -> 'b tzresult Lwt.t + val trace_eval : + (unit -> error tzresult Lwt.t) -> 'b tzresult Lwt.t -> 'b tzresult Lwt.t (** Erroneous return on failed assertion *) val fail_unless : bool -> error -> unit tzresult Lwt.t + val fail_when : bool -> error -> unit tzresult Lwt.t val unless : bool -> (unit -> unit tzresult Lwt.t) -> unit tzresult Lwt.t + val _when : bool -> (unit -> unit tzresult Lwt.t) -> unit tzresult Lwt.t (* Usage: [_assert cond __LOC__ "<fmt>" ...] *) val _assert : - bool -> string -> - ('a, Format.formatter, unit, unit tzresult Lwt.t) format4 -> 'a + bool -> + string -> + ('a, Format.formatter, unit, unit tzresult Lwt.t) format4 -> + 'a (** {2 In-monad list iterators} *) (** A {!List.iter} in the monad *) val iter_s : ('a -> unit tzresult Lwt.t) -> 'a list -> unit tzresult Lwt.t + val iter_p : ('a -> unit tzresult Lwt.t) -> 'a list -> unit tzresult Lwt.t - val iteri_p : (int -> 'a -> unit tzresult Lwt.t) -> 'a list -> unit tzresult Lwt.t - val iter2_p : ('a -> 'b -> unit tzresult Lwt.t) -> 'a list -> 'b list -> unit tzresult Lwt.t - val iteri2_p : (int -> 'a -> 'b -> unit tzresult Lwt.t) -> 'a list -> 'b list -> unit tzresult Lwt.t + + val iteri_p : + (int -> 'a -> unit tzresult Lwt.t) -> 'a list -> unit tzresult Lwt.t + + val iter2_p : + ('a -> 'b -> unit tzresult Lwt.t) -> + 'a list -> + 'b list -> + unit tzresult Lwt.t + + val iteri2_p : + (int -> 'a -> 'b -> unit tzresult Lwt.t) -> + 'a list -> + 'b list -> + unit tzresult Lwt.t (** A {!List.map} in the monad *) val map_s : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t + val map_p : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t - val mapi_s : (int -> 'a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t - val mapi_p : (int -> 'a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t + + val mapi_s : + (int -> 'a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t + + val mapi_p : + (int -> 'a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t (** A {!List.map2} in the monad *) val map2 : ('a -> 'b -> 'c tzresult) -> 'a list -> 'b list -> 'c list tzresult + val map2_s : - ('a -> 'b -> 'c tzresult Lwt.t) -> 'a list -> 'b list -> + ('a -> 'b -> 'c tzresult Lwt.t) -> + 'a list -> + 'b list -> 'c list tzresult Lwt.t + val mapi2_s : - (int -> 'a -> 'b -> 'c tzresult Lwt.t) -> 'a list -> 'b list -> + (int -> 'a -> 'b -> 'c tzresult Lwt.t) -> + 'a list -> + 'b list -> 'c list tzresult Lwt.t (** A {!List.filter_map} in the monad *) val filter_map_s : ('a -> 'b option tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t + val filter_map_p : ('a -> 'b option tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t (** A {!List.filter} in the monad *) val filter_s : ('a -> bool tzresult Lwt.t) -> 'a list -> 'a list tzresult Lwt.t + val filter_p : ('a -> bool tzresult Lwt.t) -> 'a list -> 'a list tzresult Lwt.t @@ -224,7 +260,7 @@ module type S = sig type 'a tzlazy (** Create a {!tzlazy} value. *) - val tzlazy: (unit -> 'a tzresult Lwt.t) -> 'a tzlazy + val tzlazy : (unit -> 'a tzresult Lwt.t) -> 'a tzlazy (** [tzforce tzl] is either (a) the remembered value carried by [tzl] if available @@ -232,6 +268,5 @@ module type S = sig in which case the value is remembered, or (c) an error if the callback/closure used to create [tzl] is unsuccessful. *) - val tzforce: 'a tzlazy -> 'a tzresult Lwt.t - + val tzforce : 'a tzlazy -> 'a tzresult Lwt.t end diff --git a/src/lib_error_monad/error_table.ml b/src/lib_error_monad/error_table.ml index 1234385dc9cd755b72dbe5ee991a14a288d949ae..6422e0fff378120a3c3e6268922889d51d64e18b 100644 --- a/src/lib_error_monad/error_table.ml +++ b/src/lib_error_monad/error_table.ml @@ -27,39 +27,53 @@ open Error_monad module type S = sig type key + type 'a t - val create: int -> 'a t + + val create : int -> 'a t + val clear : 'a t -> unit + val reset : 'a t -> unit - val find_or_make : 'a t -> key -> (unit -> 'a tzresult Lwt.t) -> 'a tzresult Lwt.t - val remove: 'a t -> key -> unit - val find_opt: 'a t -> key -> 'a tzresult Lwt.t option + + val find_or_make : + 'a t -> key -> (unit -> 'a tzresult Lwt.t) -> 'a tzresult Lwt.t + + val remove : 'a t -> key -> unit + + val find_opt : 'a t -> key -> 'a tzresult Lwt.t option + val mem : 'a t -> key -> bool - val iter_s: (key -> 'a -> unit Lwt.t) -> 'a t -> unit Lwt.t - val iter_p: (key -> 'a -> unit Lwt.t) -> 'a t -> unit Lwt.t - val fold: (key -> 'a -> 'b -> 'b Lwt.t) -> 'a t -> 'b -> 'b Lwt.t - val fold_promises: (key -> 'a tzresult Lwt.t -> 'b -> 'b) -> 'a t -> 'b -> 'b - val fold_resolved: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b - val fold_keys: (key -> 'b -> 'b) -> 'a t -> 'b -> 'b - val length: 'a t -> int -end -module Make(T: Hashtbl.S) - : S with type key = T.key -= struct + val iter_s : (key -> 'a -> unit Lwt.t) -> 'a t -> unit Lwt.t + + val iter_p : (key -> 'a -> unit Lwt.t) -> 'a t -> unit Lwt.t + + val fold : (key -> 'a -> 'b -> 'b Lwt.t) -> 'a t -> 'b -> 'b Lwt.t + + val fold_promises : + (key -> 'a tzresult Lwt.t -> 'b -> 'b) -> 'a t -> 'b -> 'b + + val fold_resolved : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + val fold_keys : (key -> 'b -> 'b) -> 'a t -> 'b -> 'b + + val length : 'a t -> int +end + +module Make (T : Hashtbl.S) : S with type key = T.key = struct type key = T.key - type 'a t = { - table : 'a tzresult Lwt.t T.t ; - cleaners : unit Lwt.t T.t ; - } - let create n = { table = T.create n ; cleaners = T.create n } + type 'a t = {table : 'a tzresult Lwt.t T.t; cleaners : unit Lwt.t T.t} + + let create n = {table = T.create n; cleaners = T.create n} + let clear t = T.iter (fun _ cleaner -> Lwt.cancel cleaner) t.cleaners ; T.iter (fun _ a -> Lwt.cancel a) t.table ; T.clear t.cleaners ; T.clear t.table + let reset t = T.iter (fun _ cleaner -> Lwt.cancel cleaner) t.cleaners ; T.iter (fun _ a -> Lwt.cancel a) t.table ; @@ -68,32 +82,26 @@ module Make(T: Hashtbl.S) let find_or_make t k i = match T.find_opt t.table k with - | Some a -> a + | Some a -> + a | None -> let p = i () in T.add t.table k p ; - T.add t.cleaners k ( - p >>= function + T.add + t.cleaners + k + ( p + >>= function | Ok _ -> - T.remove t.cleaners k ; - Lwt.return_unit + T.remove t.cleaners k ; Lwt.return_unit | Error _ -> - T.remove t.table k ; - T.remove t.cleaners k ; - Lwt.return_unit - ) ; + T.remove t.table k ; T.remove t.cleaners k ; Lwt.return_unit ) ; p let remove t k = - begin match T.find_opt t.cleaners k with - | None -> () - | Some a -> Lwt.cancel a - end ; + (match T.find_opt t.cleaners k with None -> () | Some a -> Lwt.cancel a) ; T.remove t.cleaners k ; - begin match T.find_opt t.table k with - | None -> () - | Some a -> Lwt.cancel a - end ; + (match T.find_opt t.table k with None -> () | Some a -> Lwt.cancel a) ; T.remove t.table k let find_opt t k = T.find_opt t.table k @@ -102,39 +110,35 @@ module Make(T: Hashtbl.S) let iter_s f t = T.fold (fun k a acc -> (k, a) :: acc) t.table [] - |> Lwt_list.iter_s - (fun (k, a) -> - a >>= function - | Error _ -> Lwt.return_unit - | Ok a -> f k a) + |> Lwt_list.iter_s (fun (k, a) -> + a >>= function Error _ -> Lwt.return_unit | Ok a -> f k a) + let iter_p f t = T.fold (fun k a acc -> (k, a) :: acc) t.table [] - |> Lwt_list.iter_p - (fun (k, a) -> - a >>= function - | Error _ -> Lwt.return_unit - | Ok a -> f k a) + |> Lwt_list.iter_p (fun (k, a) -> + a >>= function Error _ -> Lwt.return_unit | Ok a -> f k a) let fold f t acc = T.fold (fun k a acc -> (k, a) :: acc) t.table [] |> Lwt_list.fold_left_s - (fun acc (k, a) -> - a >>= function - | Error _ -> Lwt.return acc - | Ok a -> f k a acc) - acc + (fun acc (k, a) -> + a >>= function Error _ -> Lwt.return acc | Ok a -> f k a acc) + acc + let fold_promises f t acc = T.fold f t.table acc + let fold_resolved f t acc = T.fold (fun k a acc -> - match Lwt.state a with - | Lwt.Sleep | Lwt.Fail _ | Lwt.Return (Error _) -> acc - | Lwt.Return (Ok a) -> f k a acc) + match Lwt.state a with + | Lwt.Sleep | Lwt.Fail _ | Lwt.Return (Error _) -> + acc + | Lwt.Return (Ok a) -> + f k a acc) t.table acc - let fold_keys f t acc = - T.fold (fun k _ acc -> f k acc) t.table acc - let length t = T.length t.table + let fold_keys f t acc = T.fold (fun k _ acc -> f k acc) t.table acc + let length t = T.length t.table end diff --git a/src/lib_error_monad/error_table.mli b/src/lib_error_monad/error_table.mli index be11409f8606df33592d54f0d98ac3299e8dd52d..7594c352e52f901fadf59958ec7ff89290af1752 100644 --- a/src/lib_error_monad/error_table.mli +++ b/src/lib_error_monad/error_table.mli @@ -46,14 +46,16 @@ module type S = sig the table. The next call to [find_or_make] with the same key causes the provided [gen] function to be called. *) - type key + type 'a t - val create: int -> 'a t + + val create : int -> 'a t + val clear : 'a t -> unit + val reset : 'a t -> unit - val find_or_make : 'a t -> key -> (unit -> 'a tzresult Lwt.t) -> 'a tzresult Lwt.t (** [find_or_make t k gen] is [p] if [k] is already bound to [k] in [t]. In this case, no side-effect is performed. @@ -61,37 +63,42 @@ module type S = sig ()]. In this case, [r] becomes bound to [k] in [t]. In addition, a listener is added to [r] so that if [r] resolves to [Error _], the binding is removed. *) + val find_or_make : + 'a t -> key -> (unit -> 'a tzresult Lwt.t) -> 'a tzresult Lwt.t - val remove: 'a t -> key -> unit + val remove : 'a t -> key -> unit - val find_opt: 'a t -> key -> 'a tzresult Lwt.t option (** [find_opt t k] is [None] if there are no bindings for [k] in [t], and [Some p] if [p] is bound to [k] in [t]. *) + val find_opt : 'a t -> key -> 'a tzresult Lwt.t option val mem : 'a t -> key -> bool - val iter_s: (key -> 'a -> unit Lwt.t) -> 'a t -> unit Lwt.t - val iter_p: (key -> 'a -> unit Lwt.t) -> 'a t -> unit Lwt.t + + val iter_s : (key -> 'a -> unit Lwt.t) -> 'a t -> unit Lwt.t + (** [iter_{s,p} f t] iterates [f] over the promises of [t]. It blocks on unresolved promises and only applies the function on the ones that resolve successfully. *) + val iter_p : (key -> 'a -> unit Lwt.t) -> 'a t -> unit Lwt.t - val fold: (key -> 'a -> 'b -> 'b Lwt.t) -> 'a t -> 'b -> 'b Lwt.t (** [fold f t init] folds [f] over the successfully resolving promises of [t]. I.e., it goes through the promises in the table and waits for each of the promise to resolve in order to fold over it. *) + val fold : (key -> 'a -> 'b -> 'b Lwt.t) -> 'a t -> 'b -> 'b Lwt.t - val fold_promises: (key -> 'a tzresult Lwt.t -> 'b -> 'b) -> 'a t -> 'b -> 'b (** [fold_promises f t init] folds [f] over the promises of [t]. *) + val fold_promises : + (key -> 'a tzresult Lwt.t -> 'b -> 'b) -> 'a t -> 'b -> 'b - val fold_resolved: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b (** [fold_resolved f t init] folds [f] over the successfully resolved promises of [t]. *) + val fold_resolved : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b - val fold_keys: (key -> 'b -> 'b) -> 'a t -> 'b -> 'b (** [fold_keys f t init] folds [f] over the keys bound in [t]. *) + val fold_keys : (key -> 'b -> 'b) -> 'a t -> 'b -> 'b - val length: 'a t -> int + val length : 'a t -> int end -module Make(T: Hashtbl.S) : S with type key = T.key (** Intended use: [Make(Hashtbl.Make(M))]. *) +module Make (T : Hashtbl.S) : S with type key = T.key diff --git a/src/lib_error_monad/test/assert.ml b/src/lib_error_monad/test/assert.ml index 5930d3990bc8d713cc099ae87b7e4bc09ae29c8b..e176720f8f53cd4e3ca9580911379e29d20116e3 100644 --- a/src/lib_error_monad/test/assert.ml +++ b/src/lib_error_monad/test/assert.ml @@ -24,11 +24,11 @@ (*****************************************************************************) let fail expected given msg = - Format.kasprintf failwith - "@[%s@ expected: %s@ got: %s@]" msg expected given + Format.kasprintf failwith "@[%s@ expected: %s@ got: %s@]" msg expected given + let fail_msg fmt = Format.kasprintf (fail "" "") fmt let default_printer _ = "" -let equal ?(eq=(=)) ?(prn=default_printer) ?(msg="") x y = +let equal ?(eq = ( = )) ?(prn = default_printer) ?(msg = "") x y = if not (eq x y) then fail (prn x) (prn y) msg diff --git a/src/lib_error_monad/test/test_error_tables.ml b/src/lib_error_monad/test/test_error_tables.ml index 9b0446ac9c41e54bfb816ad9c3fb3de876655f06..7554f35d4c75009b40abf058455f75b9a329572b 100644 --- a/src/lib_error_monad/test/test_error_tables.ml +++ b/src/lib_error_monad/test/test_error_tables.ml @@ -25,125 +25,152 @@ open Lwt.Infix -module IntErrorTable = - Error_table.Make(Hashtbl.Make(struct - type t = int - let equal x y = x = y - let hash x = x - end)) +module IntErrorTable = Error_table.Make (Hashtbl.Make (struct + type t = int + + let equal x y = x = y + + let hash x = x +end)) let test_add_remove _ _ = let t = IntErrorTable.create 2 in - IntErrorTable.find_or_make t 0 (fun () -> Error_monad.return 0) >>= function - | Error _ -> Assert.fail "Ok 0" "Error _" "find_or_make" - | Ok n -> + IntErrorTable.find_or_make t 0 (fun () -> Error_monad.return 0) + >>= function + | Error _ -> + Assert.fail "Ok 0" "Error _" "find_or_make" + | Ok n -> ( if not (n = 0) then Assert.fail "Ok 0" (Format.asprintf "Ok %d" n) "find_or_make" else match IntErrorTable.find_opt t 0 with - | None -> Assert.fail "Some (Ok 0)" "None" "find_opt" - | Some p -> - p >>= function - | Error _ -> Assert.fail "Some (Ok 0)" "Some (Error _)" "find_opt" + | None -> + Assert.fail "Some (Ok 0)" "None" "find_opt" + | Some p -> ( + p + >>= function + | Error _ -> + Assert.fail "Some (Ok 0)" "Some (Error _)" "find_opt" | Ok n -> if not (n = 0) then - Assert.fail "Some (Ok 0)" (Format.asprintf "Some (Ok %d)" n) "find_opt" - else begin + Assert.fail + "Some (Ok 0)" + (Format.asprintf "Some (Ok %d)" n) + "find_opt" + else ( IntErrorTable.remove t 0 ; match IntErrorTable.find_opt t 0 with - | Some _ -> Assert.fail "None" "Some _" "remove;find_opt" - | None -> Lwt.return_unit - end + | Some _ -> + Assert.fail "None" "Some _" "remove;find_opt" + | None -> + Lwt.return_unit ) ) ) let test_add_add _ _ = let t = IntErrorTable.create 2 in - IntErrorTable.find_or_make t 0 (fun () -> Error_monad.return 0) >>= fun _ -> - IntErrorTable.find_or_make t 0 (fun () -> Error_monad.return 1) >>= fun _ -> + IntErrorTable.find_or_make t 0 (fun () -> Error_monad.return 0) + >>= fun _ -> + IntErrorTable.find_or_make t 0 (fun () -> Error_monad.return 1) + >>= fun _ -> match IntErrorTable.find_opt t 0 with - | None -> Assert.fail "Some (Ok 0)" "None" "find_opt" - | Some p -> - p >>= function - | Error _ -> Assert.fail "Some (Ok 0)" "Some (Error _)" "find_opt" + | None -> + Assert.fail "Some (Ok 0)" "None" "find_opt" + | Some p -> ( + p + >>= function + | Error _ -> + Assert.fail "Some (Ok 0)" "Some (Error _)" "find_opt" | Ok n -> if not (n = 0) then - Assert.fail "Some (Ok 0)" (Format.asprintf "Some (Ok %d)" n) "find_opt" - else - Lwt.return_unit + Assert.fail + "Some (Ok 0)" + (Format.asprintf "Some (Ok %d)" n) + "find_opt" + else Lwt.return_unit ) let test_length _ _ = let t = IntErrorTable.create 2 in - IntErrorTable.find_or_make t 0 (fun () -> Error_monad.return 0) >>= fun _ -> - IntErrorTable.find_or_make t 1 (fun () -> Error_monad.return 1) >>= fun _ -> - IntErrorTable.find_or_make t 2 (fun () -> Error_monad.return 2) >>= fun _ -> - IntErrorTable.find_or_make t 3 (fun () -> Error_monad.return 3) >>= fun _ -> + IntErrorTable.find_or_make t 0 (fun () -> Error_monad.return 0) + >>= fun _ -> + IntErrorTable.find_or_make t 1 (fun () -> Error_monad.return 1) + >>= fun _ -> + IntErrorTable.find_or_make t 2 (fun () -> Error_monad.return 2) + >>= fun _ -> + IntErrorTable.find_or_make t 3 (fun () -> Error_monad.return 3) + >>= fun _ -> let l = IntErrorTable.length t in - if not (l = 4) then - Assert.fail "4" (Format.asprintf "%d" l) "length" - else - Lwt.return_unit + if not (l = 4) then Assert.fail "4" (Format.asprintf "%d" l) "length" + else Lwt.return_unit let test_self_clean _ _ = let t = IntErrorTable.create 2 in - IntErrorTable.find_or_make t 0 (fun () -> Lwt.return (Ok 0)) >>= fun _ -> - IntErrorTable.find_or_make t 1 (fun () -> Lwt.return (Error [])) >>= fun _ -> - IntErrorTable.find_or_make t 2 (fun () -> Lwt.return (Error [])) >>= fun _ -> - IntErrorTable.find_or_make t 3 (fun () -> Lwt.return (Ok 3)) >>= fun _ -> - IntErrorTable.find_or_make t 4 (fun () -> Lwt.return (Ok 4)) >>= fun _ -> - IntErrorTable.find_or_make t 5 (fun () -> Lwt.return (Error [])) >>= fun _ -> + IntErrorTable.find_or_make t 0 (fun () -> Lwt.return (Ok 0)) + >>= fun _ -> + IntErrorTable.find_or_make t 1 (fun () -> Lwt.return (Error [])) + >>= fun _ -> + IntErrorTable.find_or_make t 2 (fun () -> Lwt.return (Error [])) + >>= fun _ -> + IntErrorTable.find_or_make t 3 (fun () -> Lwt.return (Ok 3)) + >>= fun _ -> + IntErrorTable.find_or_make t 4 (fun () -> Lwt.return (Ok 4)) + >>= fun _ -> + IntErrorTable.find_or_make t 5 (fun () -> Lwt.return (Error [])) + >>= fun _ -> let l = IntErrorTable.length t in - if not (l = 3) then - Assert.fail "3" (Format.asprintf "%d" l) "length" - else - Lwt.return_unit + if not (l = 3) then Assert.fail "3" (Format.asprintf "%d" l) "length" + else Lwt.return_unit let test_order _ _ = let t = IntErrorTable.create 2 in - let wter, wker = Lwt.task () in + let (wter, wker) = Lwt.task () in let world = ref [] in (* PROMISE A *) let p_a = IntErrorTable.find_or_make t 0 (fun () -> - wter >>= fun r -> - world := "a_inner" :: !world; - Lwt.return r) >>= fun r_a -> - world := "a_outer" :: !world; - Lwt.return r_a in - Lwt_main.yield () >>= fun () -> - + wter + >>= fun r -> + world := "a_inner" :: !world ; + Lwt.return r) + >>= fun r_a -> + world := "a_outer" :: !world ; + Lwt.return r_a + in + Lwt_main.yield () + >>= fun () -> (* PROMISE B *) let p_b = IntErrorTable.find_or_make t 0 (fun () -> - world := "b_inner" :: !world; - Lwt.return (Ok 1024)) >>= fun r_b -> - world := "b_outer" :: !world; - Lwt.return r_b in - Lwt_main.yield () >>= fun () -> - + world := "b_inner" :: !world ; + Lwt.return (Ok 1024)) + >>= fun r_b -> + world := "b_outer" :: !world ; + Lwt.return r_b + in + Lwt_main.yield () + >>= fun () -> (* Wake up A *) Lwt.wakeup wker (Ok 0) ; - (* Check that both A and B get expected results *) - p_a >>= begin function - | Error _ -> Assert.fail "Ok 0" "Error _" "find_or_make(a)" - | Ok n -> - if not (n = 0) then - Assert.fail "Ok 0" (Format.asprintf "Ok %d" n) "find_or_make(a)" - else - Lwt.return_unit - end >>= fun () -> - p_b >>= begin function - | Error _ -> Assert.fail "Ok 0" "Error _" "find_or_make(b)" - | Ok n -> - if not (n = 0) then - Assert.fail "Ok 0" (Format.asprintf "Ok %d" n) "find_or_make(b)" - else - Lwt.return_unit - end >>= fun () -> - + p_a + >>= (function + | Error _ -> + Assert.fail "Ok 0" "Error _" "find_or_make(a)" + | Ok n -> + if not (n = 0) then + Assert.fail "Ok 0" (Format.asprintf "Ok %d" n) "find_or_make(a)" + else Lwt.return_unit) + >>= fun () -> + p_b + >>= (function + | Error _ -> + Assert.fail "Ok 0" "Error _" "find_or_make(b)" + | Ok n -> + if not (n = 0) then + Assert.fail "Ok 0" (Format.asprintf "Ok %d" n) "find_or_make(b)" + else Lwt.return_unit) + >>= fun () -> (* Check that the `world` record is as expected *) match !world with - | "b_outer" :: "a_outer" :: "a_inner" :: [] - | "a_outer" :: "b_outer" :: "a_inner" :: [] -> + | ["b_outer"; "a_outer"; "a_inner"] | ["a_outer"; "b_outer"; "a_inner"] -> Lwt.return () | world -> Assert.fail @@ -151,16 +178,11 @@ let test_order _ _ = Format.(asprintf "[%a]" (pp_print_list pp_print_string) world) "world" +let tests = + [ Alcotest_lwt.test_case "add_remove" `Quick test_add_remove; + Alcotest_lwt.test_case "add_add" `Quick test_add_add; + Alcotest_lwt.test_case "length" `Quick test_length; + Alcotest_lwt.test_case "self_clean" `Quick test_length; + Alcotest_lwt.test_case "order" `Quick test_order ] -let tests = [ - Alcotest_lwt.test_case "add_remove" `Quick test_add_remove ; - Alcotest_lwt.test_case "add_add" `Quick test_add_add ; - Alcotest_lwt.test_case "length" `Quick test_length ; - Alcotest_lwt.test_case "self_clean" `Quick test_length ; - Alcotest_lwt.test_case "order" `Quick test_order ; -] - -let () = - Alcotest.run "error_tables" [ - "error_tables", tests ; - ] +let () = Alcotest.run "error_tables" [("error_tables", tests)] diff --git a/src/lib_event_logging/.ocamlformat b/src/lib_event_logging/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_event_logging/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_event_logging/internal_event.ml b/src/lib_event_logging/internal_event.ml index 513f4af437a161d644b78dac4894937948f21b9b..cc1fb46113e40acd70f192aa9c51deddc7aea8a3 100644 --- a/src/lib_event_logging/internal_event.ml +++ b/src/lib_event_logging/internal_event.ml @@ -24,70 +24,91 @@ (*****************************************************************************) open Error_monad + module List = struct include List include Tezos_stdlib.TzList end + module String = struct include String include Tezos_stdlib.TzString end - let valid_char c = match c with - | '0' .. '9' | 'a' .. 'z' | 'A' .. 'Z' - | '@' | '-' | '_' | '+' | '=' | '~' -> true - | _ -> false + | '0' .. '9' | 'a' .. 'z' | 'A' .. 'Z' | '@' | '-' | '_' | '+' | '=' | '~' -> + true + | _ -> + false let check_name_exn : string -> (string -> char -> exn) -> unit = - fun name make_exn -> - String.iter - (fun c -> if valid_char c then () else raise (make_exn name c)) - name ; - () - + fun name make_exn -> + String.iter + (fun c -> if valid_char c then () else raise (make_exn name c)) + name ; + () type level = Lwt_log_core.level = - Debug | Info | Notice | Warning | Error | Fatal + | Debug + | Info + | Notice + | Warning + | Error + | Fatal + module Level = struct type t = level + let default = Info + let to_lwt_log t = t + let to_string = Lwt_log_core.string_of_level + let of_string = Lwt_log_core.level_of_string + let encoding = let open Data_encoding in string_enum (List.map - (fun l -> to_string l, l) - [ Debug ; Info ; Notice ; Warning ; Error ; Fatal ]) + (fun l -> (to_string l, l)) + [Debug; Info; Notice; Warning; Error; Fatal]) end -module Section: sig +module Section : sig type t = private string list + val empty : t + val make : string list -> t + val make_sanitized : string list -> t + val to_lwt_log : t -> Lwt_log_core.section + val encoding : t Data_encoding.t + val to_string_list : t -> string list end = struct type t = string list + let empty = [] let make sl = List.iter (fun s -> - check_name_exn s (fun name char -> - Printf.ksprintf (fun s -> Invalid_argument s) - "Internal_event.Section: invalid name %S (contains %c)" name char)) - sl; + check_name_exn s (fun name char -> + Printf.ksprintf + (fun s -> Invalid_argument s) + "Internal_event.Section: invalid name %S (contains %c)" + name + char)) + sl ; sl let make_sanitized sl = - List.map - (String.map (fun c -> if valid_char c then c else '_')) sl |> make + List.map (String.map (fun c -> if valid_char c then c else '_')) sl |> make let to_lwt_log s = Lwt_log_core.Section.make (String.concat "." s) @@ -119,13 +140,12 @@ end module type EVENT = sig include EVENT_DEFINITION - val emit : ?section : Section.t -> (unit -> t) -> unit tzresult Lwt.t + val emit : ?section:Section.t -> (unit -> t) -> unit tzresult Lwt.t end type 'a event_definition = (module EVENT_DEFINITION with type t = 'a) module type SINK = sig - type t val uri_scheme : string @@ -133,8 +153,11 @@ module type SINK = sig val configure : Uri.t -> t tzresult Lwt.t val handle : - t -> 'a event_definition -> - ?section : Section.t -> (unit -> 'a) -> unit tzresult Lwt.t + t -> + 'a event_definition -> + ?section:Section.t -> + (unit -> 'a) -> + unit tzresult Lwt.t val close : t -> unit tzresult Lwt.t end @@ -142,15 +165,19 @@ end type 'a sink_definition = (module SINK with type t = 'a) module All_sinks = struct - - type registered = | Registered : - { scheme : string ; definition : 'a sink_definition } -> registered + { scheme : string; + definition : 'a sink_definition } + -> registered type active = - | Active : { scheme : string ; configuration : Uri.t ; - sink : 'a ; definition : 'a sink_definition } -> active + | Active : + { scheme : string; + configuration : Uri.t; + sink : 'a; + definition : 'a sink_definition } + -> active let registered : registered list ref = ref [] @@ -158,9 +185,7 @@ module All_sinks = struct let find_registered_exn scheme_to_find = List.find - (function - | Registered { scheme ; _ } -> - String.equal scheme scheme_to_find) + (function Registered {scheme; _} -> String.equal scheme scheme_to_find) !registered let register (type a) m = @@ -168,83 +193,91 @@ module All_sinks = struct match find_registered_exn S.uri_scheme with | exception _ -> registered := - Registered { scheme = S.uri_scheme ; definition = m } :: !registered + Registered {scheme = S.uri_scheme; definition = m} :: !registered | _ -> (* This should be considered a programming error: *) - Printf.ksprintf Pervasives.invalid_arg - "Internal_event: registering duplicate URI scheme: %S" S.uri_scheme + Printf.ksprintf + Pervasives.invalid_arg + "Internal_event: registering duplicate URI scheme: %S" + S.uri_scheme type activation_error_reason = | Missing_uri_scheme of string | Uri_scheme_not_registered of string + type error += Activation_error of activation_error_reason let () = let description = - "Activation of an Internal Event SINK with an URI failed" in + "Activation of an Internal Event SINK with an URI failed" + in let title = "Internal Event Sink: Wrong Activation URI" in - register_error_kind `Permanent ~id:"internal-event-activation-error" ~title + register_error_kind + `Permanent + ~id:"internal-event-activation-error" + ~title ~description - ~pp:(fun ppf -> function - | Missing_uri_scheme uri -> - Format.fprintf ppf "%s: Missing URI scheme %S" title uri - | Uri_scheme_not_registered uri -> - Format.fprintf ppf "%s: URI scheme not registered %S" title uri) + ~pp:(fun ppf -> function Missing_uri_scheme uri -> + Format.fprintf ppf "%s: Missing URI scheme %S" title uri + | Uri_scheme_not_registered uri -> + Format.fprintf ppf "%s: URI scheme not registered %S" title uri) Data_encoding.( - union [ - case ~title:"missing-uri-scheme" - (Tag 0) - (obj1 (req "missing-uri-scheme" (obj1 (req "uri" string)))) - (function Missing_uri_scheme uri -> Some uri | _ -> None) - (fun uri -> Missing_uri_scheme uri) ; - case ~title:"non-registered-uri-scheme" - (Tag 2) - (obj1 (req "non-registered-uri-scheme" (obj1 (req "uri" string)))) - (function Uri_scheme_not_registered uri -> Some uri | _ -> None) - (fun uri -> Uri_scheme_not_registered uri) ; - ]) - (function - | Activation_error reason -> Some reason | _ -> None) + union + [ case + ~title:"missing-uri-scheme" + (Tag 0) + (obj1 (req "missing-uri-scheme" (obj1 (req "uri" string)))) + (function Missing_uri_scheme uri -> Some uri | _ -> None) + (fun uri -> Missing_uri_scheme uri); + case + ~title:"non-registered-uri-scheme" + (Tag 2) + (obj1 (req "non-registered-uri-scheme" (obj1 (req "uri" string)))) + (function + | Uri_scheme_not_registered uri -> Some uri | _ -> None) + (fun uri -> Uri_scheme_not_registered uri) ]) + (function Activation_error reason -> Some reason | _ -> None) (fun reason -> Activation_error reason) let activate uri = - begin match Uri.scheme uri with - | None -> fail (Activation_error (Missing_uri_scheme (Uri.to_string uri))) - | Some scheme_to_activate -> - let activate (type a) scheme definition = - let module S = (val definition : SINK with type t = a) in - S.configure uri >>=? fun sink -> - return (Active { scheme ; configuration = uri ; definition ; sink }) - in - begin match find_registered_exn scheme_to_activate with - | Registered { scheme ; definition } -> - activate scheme definition - | exception _ -> - fail (Activation_error - (Uri_scheme_not_registered (Uri.to_string uri))) - end - >>=? fun act -> - active := act :: !active ; - return_unit - end + match Uri.scheme uri with + | None -> + fail (Activation_error (Missing_uri_scheme (Uri.to_string uri))) + | Some scheme_to_activate -> + let activate (type a) scheme definition = + let module S = (val definition : SINK with type t = a) in + S.configure uri + >>=? fun sink -> + return (Active {scheme; configuration = uri; definition; sink}) + in + ( match find_registered_exn scheme_to_activate with + | Registered {scheme; definition} -> + activate scheme definition + | exception _ -> + fail + (Activation_error (Uri_scheme_not_registered (Uri.to_string uri))) + ) + >>=? fun act -> + active := act :: !active ; + return_unit let close () = let close_one (type a) sink definition = let module S = (val definition : SINK with type t = a) in - S.close sink in + S.close sink + in iter_s - (fun (Active { sink ; definition ; _ }) -> close_one sink definition) + (fun (Active {sink; definition; _}) -> close_one sink definition) !active - let handle def section v = let handle (type a) sink definition = let module S = (val definition : SINK with type t = a) in - S.handle ?section sink def v in + S.handle ?section sink def v + in List.fold_left - (fun prev -> function Active { sink ; definition ; _ } -> - prev >>=? fun () -> - handle sink definition) + (fun prev -> function Active {sink; definition; _} -> + prev >>=? fun () -> handle sink definition) return_unit !active @@ -254,7 +287,7 @@ module All_sinks = struct pp_open_box fmt 2 ; pp_print_if_newline fmt () ; pp_print_string fmt "* " ; - fprintf fmt "%s: [" name; + fprintf fmt "%s: [" name ; pp_print_cut fmt () ; pp_print_list ~pp_sep:(fun fmt () -> pp_print_string fmt "," ; pp_print_space fmt ()) @@ -263,40 +296,43 @@ module All_sinks = struct list ; pp_close_box fmt () ; pp_print_cut fmt () ; - pp_print_string fmt "]" ; + pp_print_string fmt "]" in pp_open_box fmt 0 ; - pp_list_of_sinks "Registered sinks" !registered - (fun fmt (Registered { scheme ; _ }) -> - fprintf fmt "\"%s://..\"" scheme) ; + pp_list_of_sinks + "Registered sinks" + !registered + (fun fmt (Registered {scheme; _}) -> fprintf fmt "\"%s://..\"" scheme) ; pp_print_break fmt 2 0 ; - pp_list_of_sinks "Active sinks" !active - (fun fmt (Active { configuration ; _ }) -> - fprintf fmt "\"%a\"" Uri.pp_hum configuration) ; + pp_list_of_sinks + "Active sinks" + !active + (fun fmt (Active {configuration; _}) -> + fprintf fmt "\"%a\"" Uri.pp_hum configuration) ; pp_print_cut fmt () ; pp_close_box fmt () ; () end module Generic = struct + type definition = Definition : (string * 'a event_definition) -> definition - type definition = - | Definition: (string * 'a event_definition) -> definition - - type event = - | Event: (string * 'a event_definition * 'a) -> event + type event = Event : (string * 'a event_definition * 'a) -> event - type with_name = < doc : string; name : string > + type with_name = < doc : string ; name : string > - let json_schema (Definition (_, d)) - : < schema : Json_schema.schema ; with_name > = - let aux (type a) (ev : a event_definition) = + let json_schema (Definition (_, d)) : + < schema : Json_schema.schema ; with_name > = + let aux (type a) (ev : a event_definition) = let module E = (val ev : EVENT_DEFINITION with type t = a) in object method name = E.name + method doc = E.doc + method schema = Data_encoding.Json.schema E.encoding - end in + end + in aux d let explode_event (Event (_, def, ev)) = @@ -304,15 +340,18 @@ module Generic = struct let module M = (val def : EVENT_DEFINITION with type t = a) in object method name = M.name + method doc = M.doc + method pp fmt () = M.pp fmt ev + method json = Data_encoding.Json.construct M.encoding ev - end in + end + in aux def ev end module All_definitions = struct - open Generic let all : definition list ref = ref [] @@ -320,8 +359,8 @@ module All_definitions = struct let registration_exn fmt = Format.kasprintf (fun s -> - (* This should be considered a programming error: *) - Invalid_argument ("Internal_event registration error: " ^ s)) + (* This should be considered a programming error: *) + Invalid_argument ("Internal_event registration error: " ^ s)) fmt let add (type a) ev = @@ -330,7 +369,8 @@ module All_definitions = struct | _ -> raise (registration_exn "duplicate Event name: %S" E.name) | exception _ -> - check_name_exn E.name + check_name_exn + E.name (registration_exn "invalid event name: %S contains '%c'") ; all := Definition (E.name, ev) :: !all @@ -338,9 +378,10 @@ module All_definitions = struct let find match_name = match List.find (function Definition (n, _) -> match_name n) !all with - | s -> Some s - | exception _ -> None - + | s -> + Some s + | exception _ -> + None end module Make (E : EVENT_DEFINITION) : EVENT with type t = E.t = struct @@ -356,98 +397,128 @@ module Make (E : EVENT_DEFINITION) : EVENT with type t = E.t = struct end module Legacy_logging = struct - let sections = ref [] module type LOG = sig - val debug: ('a, Format.formatter, unit, unit) format4 -> 'a - val log_info: ('a, Format.formatter, unit, unit) format4 -> 'a - val log_notice: ('a, Format.formatter, unit, unit) format4 -> 'a - val warn: ('a, Format.formatter, unit, unit) format4 -> 'a - val log_error: ('a, Format.formatter, unit, unit) format4 -> 'a - val fatal_error: ('a, Format.formatter, unit, unit) format4 -> 'a - - val lwt_debug: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_log_info: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_log_notice: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_warn: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_log_error: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_fatal_error: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + val debug : ('a, Format.formatter, unit, unit) format4 -> 'a + + val log_info : ('a, Format.formatter, unit, unit) format4 -> 'a + + val log_notice : ('a, Format.formatter, unit, unit) format4 -> 'a + + val warn : ('a, Format.formatter, unit, unit) format4 -> 'a + + val log_error : ('a, Format.formatter, unit, unit) format4 -> 'a + + val fatal_error : ('a, Format.formatter, unit, unit) format4 -> 'a + + val lwt_debug : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_log_info : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_log_notice : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_warn : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_log_error : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_fatal_error : + ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a end open Tezos_stdlib + type ('a, 'b) msgf = (('a, Format.formatter, unit, 'b) format4 -> ?tags:Tag.set -> 'a) -> - ?tags:Tag.set -> 'b + ?tags:Tag.set -> + 'b + type ('a, 'b) log = ('a, 'b) msgf -> 'b + module type SEMLOG = sig module Tag = Tag - val debug: ('a, unit) log - val log_info: ('a, unit) log - val log_notice: ('a, unit) log - val warn: ('a, unit) log - val log_error: ('a, unit) log - val fatal_error: ('a, unit) log - val lwt_debug: ('a, unit Lwt.t) log - val lwt_log_info: ('a, unit Lwt.t) log - val lwt_log_notice: ('a, unit Lwt.t) log - val lwt_warn: ('a, unit Lwt.t) log - val lwt_log_error: ('a, unit Lwt.t) log - val lwt_fatal_error: ('a, unit Lwt.t) log + + val debug : ('a, unit) log + + val log_info : ('a, unit) log + + val log_notice : ('a, unit) log + + val warn : ('a, unit) log + + val log_error : ('a, unit) log + + val fatal_error : ('a, unit) log + + val lwt_debug : ('a, unit Lwt.t) log + + val lwt_log_info : ('a, unit Lwt.t) log + + val lwt_log_notice : ('a, unit Lwt.t) log + + val lwt_warn : ('a, unit Lwt.t) log + + val lwt_log_error : ('a, unit Lwt.t) log + + val lwt_fatal_error : ('a, unit Lwt.t) log + val event : string Tag.def + val exn : exn Tag.def end - module Make_event (P : sig val name : string end) = struct + module Make_event (P : sig + val name : string + end) = + struct let name_split = String.split_on_char '.' P.name + let section = Section.make name_split module Definition = struct let name = "legacy_logging_event-" ^ String.concat "-" name_split type t = { - message : string ; - section : Section.t ; - level : level ; - tags : Tag.set ; + message : string; + section : Section.t; + level : level; + tags : Tag.set } let make ?(tags = Tag.empty) level message = - { message ; section ; level ; tags } - + {message; section; level; tags} let v0_encoding = let open Data_encoding in conv - (fun { message ; section ; level ; tags } -> - (message, section, level, tags)) + (fun {message; section; level; tags} -> + (message, section, level, tags)) (fun (message, section, level, tags) -> - { message ; section ; level ; tags }) + {message; section; level; tags}) (obj4 (req "message" string) (req "section" Section.encoding) (req "level" Level.encoding) - (dft "tags" + (dft + "tags" (conv (fun tags -> Format.asprintf "%a" Tag.pp_set tags) (fun _ -> Tag.empty) string) - Tag.empty) - ) + Tag.empty)) let encoding = Data_encoding.With_version.(encoding ~name (first_version v0_encoding)) - let pp ppf { message ; _ } = + let pp ppf {message; _} = let open Format in fprintf ppf "%s" message let doc = "Generic event legacy / string-based information logging." - let level { level ; _ } = level + let level {level; _} = level end - let () = sections := P.name :: !sections module Event = Make (Definition) @@ -455,69 +526,111 @@ module Legacy_logging = struct let emit_async level fmt ?tags = Format.kasprintf (fun message -> - Lwt.ignore_result - (Event.emit ~section (fun () -> Definition.make ?tags level message))) + Lwt.ignore_result + (Event.emit ~section (fun () -> + Definition.make ?tags level message))) fmt + let emit_lwt level fmt ?tags = Format.kasprintf (fun message -> - Event.emit ~section (fun () -> Definition.make ?tags level message) - >>= function - | Ok () -> Lwt.return_unit - | Error el -> Format.kasprintf Lwt.fail_with "%a" pp_print_error el) + Event.emit ~section (fun () -> Definition.make ?tags level message) + >>= function + | Ok () -> + Lwt.return_unit + | Error el -> + Format.kasprintf Lwt.fail_with "%a" pp_print_error el) fmt end - module Make (P : sig val name : string end) = struct - include Make_event(P) + module Make (P : sig + val name : string + end) = + struct + include Make_event (P) + let emit_async = emit_async ?tags:None + let debug f = emit_async Debug f + let log_info f = emit_async Info f + let log_notice f = emit_async Notice f + let warn f = emit_async Warning f + let log_error f = emit_async Error f + let fatal_error f = emit_async Fatal f + let emit_lwt = emit_lwt ?tags:None + let lwt_debug f = emit_lwt Debug f + let lwt_log_info f = emit_lwt Info f + let lwt_log_notice f = emit_lwt Notice f + let lwt_warn f = emit_lwt Warning f + let lwt_log_error f = emit_lwt Error f + let lwt_fatal_error f = emit_lwt Fatal f end - module Make_semantic (P : sig val name : string end) = struct - include Make_event(P) - let debug (f: ('a, unit) msgf) = f (emit_async Debug) ?tags:None - let log_info f = f (emit_async Info) ?tags:None - let log_notice f = f (emit_async Notice) ?tags:None - let warn f = f (emit_async Warning) ?tags:None - let log_error f = f (emit_async Error) ?tags:None - let fatal_error f = f (emit_async Fatal) ?tags:None - let lwt_debug f = f (emit_lwt Debug) ?tags:None - let lwt_log_info f = f (emit_lwt Info) ?tags:None - let lwt_log_notice f = f (emit_lwt Notice) ?tags:None - let lwt_warn f = f (emit_lwt Warning) ?tags:None - let lwt_log_error f = f (emit_lwt Error) ?tags:None + + module Make_semantic (P : sig + val name : string + end) = + struct + include Make_event (P) + + let debug (f : ('a, unit) msgf) = f (emit_async Debug) ?tags:None + + let log_info f = f (emit_async Info) ?tags:None + + let log_notice f = f (emit_async Notice) ?tags:None + + let warn f = f (emit_async Warning) ?tags:None + + let log_error f = f (emit_async Error) ?tags:None + + let fatal_error f = f (emit_async Fatal) ?tags:None + + let lwt_debug f = f (emit_lwt Debug) ?tags:None + + let lwt_log_info f = f (emit_lwt Info) ?tags:None + + let lwt_log_notice f = f (emit_lwt Notice) ?tags:None + + let lwt_warn f = f (emit_lwt Warning) ?tags:None + + let lwt_log_error f = f (emit_lwt Error) ?tags:None + let lwt_fatal_error f = f (emit_lwt Fatal) ?tags:None + module Tag = Tag + let event = - Tag.def ~doc:"String identifier for the class of event being logged" - "event" Format.pp_print_text + Tag.def + ~doc:"String identifier for the class of event being logged" + "event" + Format.pp_print_text + let exn = - Tag.def ~doc:"Exception which was detected" - "exception" (fun f e -> Format.pp_print_text f (Printexc.to_string e)) + Tag.def ~doc:"Exception which was detected" "exception" (fun f e -> + Format.pp_print_text f (Printexc.to_string e)) end end module Error_event = struct type t = { - message : string option ; - severity : [ `Fatal | `Recoverable ] ; - trace : Error_monad.error list ; + message : string option; + severity : [`Fatal | `Recoverable]; + trace : Error_monad.error list } let make ?message ?(severity = `Recoverable) trace () = - { message ; trace; severity } + {message; trace; severity} module Definition = struct let name = "error-event" @@ -528,26 +641,28 @@ module Error_event = struct let open Data_encoding in let v0_encoding = conv - (fun { message ; trace ; severity } -> (message, severity, trace)) - (fun (message, severity, trace) -> { message ; severity ; trace }) + (fun {message; trace; severity} -> (message, severity, trace)) + (fun (message, severity, trace) -> {message; severity; trace}) (obj3 (opt "message" string) - (req "severity" - (string_enum ["fatal", `Fatal; "recoverable", `Recoverable])) + (req + "severity" + (string_enum [("fatal", `Fatal); ("recoverable", `Recoverable)])) (req "trace" (list Error_monad.error_encoding))) in With_version.(encoding ~name (first_version v0_encoding)) let pp f x = - Format.fprintf f "%s:@ %s" name + Format.fprintf + f + "%s:@ %s" + name (match x.message with Some x -> x | None -> "") let doc = "Generic event for any kind of error." - let level { severity ; _ } = - match severity with - | `Fatal -> Fatal - | `Recoverable -> Error + let level {severity; _} = + match severity with `Fatal -> Fatal | `Recoverable -> Error end include (Make (Definition) : EVENT with type t := t) @@ -555,27 +670,31 @@ module Error_event = struct let log_error_and_recover ?section ?message ?severity f = f () >>= function - | Ok () -> Lwt.return_unit - | Error el -> + | Ok () -> + Lwt.return_unit + | Error el -> ( emit ?section (fun () -> make ?message ?severity el ()) >>= function - | Ok () -> Lwt.return_unit + | Ok () -> + Lwt.return_unit | Error el -> - Format.kasprintf (Lwt_log.error) + Format.kasprintf + Lwt_log.error "Error while emitting error logging event !! %a" - pp_print_error el + pp_print_error + el ) end module Debug_event = struct - type t = { message : string ; attachment : Data_encoding.Json.t } + type t = {message : string; attachment : Data_encoding.Json.t} - let make ?(attach = `Null) message () = { message ; attachment = attach } + let make ?(attach = `Null) message () = {message; attachment = attach} let v0_encoding = let open Data_encoding in conv - (fun { message ; attachment } -> (message, attachment)) - (fun (message, attachment) -> { message ; attachment }) + (fun {message; attachment} -> (message, attachment)) + (fun (message, attachment) -> {message; attachment}) (obj2 (req "message" string) (req "attachment" json)) module Definition = struct @@ -586,7 +705,7 @@ module Debug_event = struct let encoding = Data_encoding.With_version.(encoding ~name (first_version v0_encoding)) - let pp ppf { message ; attachment } = + let pp ppf {message; attachment} = let open Format in fprintf ppf "%s:@ %s@ %a" name message Data_encoding.Json.pp attachment @@ -599,33 +718,38 @@ module Debug_event = struct end module Lwt_worker_event = struct - type t = { name : string ; event : [ `Started | `Ended | `Failed of string ] } + type t = {name : string; event : [`Started | `Ended | `Failed of string]} let v0_encoding = let open Data_encoding in conv - (fun { name ; event } -> (name, event)) - (fun (name, event) -> { name ; event }) + (fun {name; event} -> (name, event)) + (fun (name, event) -> {name; event}) (obj2 (req "name" string) - (req "event" - (union [ - case ~title:"started" (Tag 0) - (obj1 (req "kind" (constant "started"))) - (function `Started -> Some () | _ -> None) - (fun () -> `Started) ; - case ~title:"ended" (Tag 1) - (obj1 (req "kind" (constant "ended"))) - (function `Ended -> Some () | _ -> None) - (fun () -> `Ended) ; - case ~title:"failed" (Tag 2) - (obj2 - (req "kind" (constant "failed")) - (req "exception" string)) - (function `Failed s -> Some ((), s) | _ -> None) - (fun ((), s) -> `Failed s) ; - ]) - )) + (req + "event" + (union + [ case + ~title:"started" + (Tag 0) + (obj1 (req "kind" (constant "started"))) + (function `Started -> Some () | _ -> None) + (fun () -> `Started); + case + ~title:"ended" + (Tag 1) + (obj1 (req "kind" (constant "ended"))) + (function `Ended -> Some () | _ -> None) + (fun () -> `Ended); + case + ~title:"failed" + (Tag 2) + (obj2 + (req "kind" (constant "failed")) + (req "exception" string)) + (function `Failed s -> Some ((), s) | _ -> None) + (fun ((), s) -> `Failed s) ]))) module Definition = struct let name = "lwt-worker-event" @@ -635,37 +759,33 @@ module Lwt_worker_event = struct let encoding = Data_encoding.With_version.(encoding ~name (first_version v0_encoding)) - let pp ppf { name ; event } = + let pp ppf {name; event} = let open Format in - fprintf ppf "Worker %s:@ %a" name - (fun fmt -> function - | `Failed msg -> fprintf ppf "Failed with %s" msg - | `Ended -> fprintf fmt "Ended" - | `Started -> fprintf fmt "Started") + fprintf + ppf + "Worker %s:@ %a" + name + (fun fmt -> function `Failed msg -> fprintf ppf "Failed with %s" msg + | `Ended -> fprintf fmt "Ended" | `Started -> fprintf fmt "Started") event let doc = "Generic event for callers of the function Lwt_utils.worker." - let level { event ; _ } = - match event with - | `Failed _ -> Error - | `Started | `Ended -> Info + let level {event; _} = + match event with `Failed _ -> Error | `Started | `Ended -> Info end include (Make (Definition) : EVENT with type t := t) let on_event name event = - let section = Section.make_sanitized [ "lwt-worker"; name ] in + let section = Section.make_sanitized ["lwt-worker"; name] in Error_event.log_error_and_recover ~message:(Printf.sprintf "Trying to emit worker event for %S" name) ~severity:`Fatal - (fun () -> emit ~section (fun () -> { name ; event })) + (fun () -> emit ~section (fun () -> {name; event})) end - - module Lwt_log_sink = struct - (* let default_template = "$(date) - $(section): $(message)" *) let default_section = Lwt_log_core.Section.main @@ -679,25 +799,22 @@ module Lwt_log_sink = struct let handle (type a) () m ?section (v : unit -> a) = let module M = (val m : EVENT_DEFINITION with type t = a) in - protect - (fun () -> - let ev = v () in - let section = - Option.unopt_map ~f:Section.to_lwt_log - section ~default:default_section in - let level = M.level ev in - Format.kasprintf - (Lwt_log_core.log ~section ~level) - "%a" M.pp ev - >>= fun () -> return_unit) - - let close _ = - Lwt_log.close !Lwt_log.default - >>= fun () -> - return_unit + protect (fun () -> + let ev = v () in + let section = + Option.unopt_map + ~f:Section.to_lwt_log + section + ~default:default_section + in + let level = M.level ev in + Format.kasprintf (Lwt_log_core.log ~section ~level) "%a" M.pp ev + >>= fun () -> return_unit) + + let close _ = Lwt_log.close !Lwt_log.default >>= fun () -> return_unit end + include Sink let () = All_sinks.register (module Sink) - end diff --git a/src/lib_event_logging/internal_event.mli b/src/lib_event_logging/internal_event.mli index 9c3fc17b7442bb306d4bb3e1790d97d7ea7d1778..52c9d05c1cd91ad3be1c6c2107cb66b534f19cbe 100644 --- a/src/lib_event_logging/internal_event.mli +++ b/src/lib_event_logging/internal_event.mli @@ -39,64 +39,69 @@ open Error_monad (** {3 Events Definitions and Registration } *) -type level = Debug | Info | Notice | Warning | Error | Fatal -(** The relative importance of a particular event (compatible with +type level = + | Debug + | Info + | Notice + | Warning + | Error + | Fatal + (** The relative importance of a particular event (compatible with traditional logging systems, cf. {!Lwt_log_core.level}). *) (** Module to manipulate values of type {!level}. *) module Level : sig - - type t = level (** Alias of {!level}. *) + type t = level - val default : t (** The default level is {!Info}, it is used in {!Event_defaults}. *) + val default : t - val to_lwt_log : t -> Lwt_log_core.level (** Cast the level to a value of {!Lwt_log_core.level}. *) + val to_lwt_log : t -> Lwt_log_core.level val to_string : t -> string + val of_string : string -> t option + val encoding : t Data_encoding.t end - (** Sections are a simple way of classifying events at the time of their emission. *) -module Section: sig +module Section : sig type t = private string list val empty : t - val make_sanitized : string list -> t (** Build a {!Section.t} by replacing special characters with ['_']. *) + val make_sanitized : string list -> t - val to_lwt_log : t -> Lwt_log_core.section (** Make the equivalent {!Lwt_log} section. *) + val to_lwt_log : t -> Lwt_log_core.section val encoding : t Data_encoding.t val to_string_list : t -> string list - end (** Parameters defining an inspectable type of events. *) module type EVENT_DEFINITION = sig type t - val name : string (** Defines the identifier for the event. Names should be unique and are restricted to alphanumeric characters or [".@-_+=,~"].*) + val name : string - val doc : string (** A display-friendly test which describes what the event means. *) + val doc : string val pp : Format.formatter -> t -> unit val encoding : t Data_encoding.t - val level : t -> level (** Return the prefered {!level} for a given event instance. *) + val level : t -> level end (** Default values for fields in {!EVENT_DEFINITION}. *) @@ -110,16 +115,16 @@ end module type EVENT = sig include EVENT_DEFINITION - val emit : ?section: Section.t -> (unit -> t) -> unit tzresult Lwt.t (** Output an event of type {!t}, if no sinks are listening the function won't be applied. *) + val emit : ?section:Section.t -> (unit -> t) -> unit tzresult Lwt.t end (** Build an event from an event-definition. *) -module Make(E: EVENT_DEFINITION): EVENT with type t = E.t +module Make (E : EVENT_DEFINITION) : EVENT with type t = E.t -type 'a event_definition = (module EVENT_DEFINITION with type t = 'a) (** [event_definition] wraps {!EVENT_DEFINITION} as a first class module. *) +type 'a event_definition = (module EVENT_DEFINITION with type t = 'a) (** Helper functions to manipulate all kinds of events in a generic way. *) module Generic : sig @@ -127,28 +132,28 @@ module Generic : sig type event = Event : (string * 'a event_definition * 'a) -> event - type with_name = < doc : string; name : string > + type with_name = < doc : string ; name : string > - val json_schema : definition -> < schema : Json_schema.schema ; with_name > (** Get the JSON schema (together with [name] and [doc]) of a given event definition. *) + val json_schema : definition -> < schema : Json_schema.schema ; with_name > - val explode_event : event -> - < pp : Format.formatter -> unit -> unit ; - json : Data_encoding.json ; - with_name > - (** Get the JSON representation and a pretty-printer for a given + (** Get the JSON representation and a pretty-printer for a given event {i instance}. *) + val explode_event : + event -> + < pp : Format.formatter -> unit -> unit + ; json : Data_encoding.json + ; with_name > end (** Access to all the event definitions registered with {!Make}. *) module All_definitions : sig - (** Get the list of all the known definitions. *) val get : unit -> Generic.definition list (** Find the definition matching on the given name. *) - val find: (string -> bool) -> Generic.definition option + val find : (string -> bool) -> Generic.definition option end (** {3 Sink Definitions and Registration } *) @@ -157,52 +162,53 @@ end events, for instance, a sink could be output to a file, to a database, or a simple "memory-less" forwarding mechanism. *) module type SINK = sig - (** A sink can store any required state, e.g. a database handle, in a value of the [t] type see {!configure}. *) type t - val uri_scheme : string (** Registered sinks are a distinguished by their URI scheme. *) + val uri_scheme : string - val configure : Uri.t -> t tzresult Lwt.t (** When a registered sink is activated the {!configure} function is called to initialize it. The parameters should be encoded or obtained from the URI (the scheme of the URI is already {!uri_scheme}). *) + val configure : Uri.t -> t tzresult Lwt.t - val handle : - t -> 'a event_definition -> - ?section: Section.t -> (unit -> 'a) -> unit tzresult Lwt.t (** A sink's main function is to {!handle} incoming events from the code base. *) + val handle : + t -> + 'a event_definition -> + ?section:Section.t -> + (unit -> 'a) -> + unit tzresult Lwt.t - val close : t -> unit tzresult Lwt.t (** A function to be called on graceful termination of processes (e.g. to flush file-descriptors, etc.). *) + val close : t -> unit tzresult Lwt.t end -type 'a sink_definition = (module SINK with type t = 'a) (** [sink_definition] wraps {!SINK_DEFINITION} as a first class module. *) +type 'a sink_definition = (module SINK with type t = 'a) (** Use {!All_sinks.register} to add a new {i inactive} sink, then {!All_sinks.activate} to make it handle events. *) module All_sinks : sig - - val register : 'a sink_definition -> unit (** Register a new sink (e.g. [let () = Internal_event.All_sinks.register (module Sink_implementation)]) for it to be available (but inactive) in the framework. *) + val register : 'a sink_definition -> unit - val activate : Uri.t -> unit tzresult Lwt.t (** Make a registered sink active: the function finds it by URI scheme and calls {!configure}. *) + val activate : Uri.t -> unit tzresult Lwt.t - val close : unit -> unit tzresult Lwt.t (** Call [close] on all the sinks. *) + val close : unit -> unit tzresult Lwt.t - val pp_state : Format.formatter -> unit -> unit (** Display the state of registered/active sinks. *) + val pp_state : Format.formatter -> unit -> unit end (** {3 Common Event Definitions } *) @@ -210,57 +216,56 @@ end (** {!Error_event.t} is a generic event to emit values of type {!Error_monad.error list}. *) module Error_event : sig - type t = { - message : string option ; - severity : [ `Fatal | `Recoverable ] ; - trace : Error_monad.error list ; - } (** Errors mainly store {!Error_monad.error list} values. One can attach a message and a severity (the default is [`Recoverable] which corresponds to the {!Error} {!level}, while [`Fatal] corresponds to {!Fatal}). *) + type t = { + message : string option; + severity : [`Fatal | `Recoverable]; + trace : Error_monad.error list + } val make : - ?message: string -> - ?severity:[ `Fatal | `Recoverable ] -> + ?message:string -> + ?severity:[`Fatal | `Recoverable] -> Error_monad.error list -> - unit -> t + unit -> + t include EVENT with type t := t + (** [log_error_and_recover f] calls [f ()] and emits an {!Error_event.t} + event if it results in an error. It then continues in the [_ Lwt.t] + monad (e.g. there is no call to [Lwt.fail]). *) val log_error_and_recover : ?section:Section.t -> ?message:string -> - ?severity:[ `Fatal | `Recoverable ] -> - (unit -> (unit, error list) result Lwt.t) -> unit Lwt.t - (** [log_error_and_recover f] calls [f ()] and emits an {!Error_event.t} - event if it results in an error. It then continues in the [_ Lwt.t] - monad (e.g. there is no call to [Lwt.fail]). *) + ?severity:[`Fatal | `Recoverable] -> + (unit -> (unit, error list) result Lwt.t) -> + unit Lwt.t end (** The debug-event is meant for emitting (temporarily) semi-structured data in the event stream. *) module Debug_event : sig - type t = { - message : string ; - attachment : Data_encoding.Json.t ; - } - val make : ?attach: Data_encoding.Json.t -> string -> unit -> t + type t = {message : string; attachment : Data_encoding.Json.t} + + val make : ?attach:Data_encoding.Json.t -> string -> unit -> t + include EVENT with type t := t end (** The worker event is meant for use with {!Lwt_utils.worker}. *) module Lwt_worker_event : sig - type t = { - name : string; - event : [ `Ended | `Failed of string | `Started ]; - } + type t = {name : string; event : [`Ended | `Failed of string | `Started]} + include EVENT with type t := t - val on_event : - string -> [ `Ended | `Failed of string | `Started ] -> unit Lwt.t - (** [on_event msg status] emits an event of type [t] and matches + (** [on_event msg status] emits an event of type [t] and matches the signature required by {!Lwt_utils.worker}. *) + val on_event : + string -> [`Ended | `Failed of string | `Started] -> unit Lwt.t end (** {3 Compatibility With Legacy Logging } *) @@ -270,57 +275,97 @@ end into the event-logging framework. {b Please do not use for new modules.} *) module Legacy_logging : sig - module type LOG = sig - val debug: ('a, Format.formatter, unit, unit) format4 -> 'a - val log_info: ('a, Format.formatter, unit, unit) format4 -> 'a - val log_notice: ('a, Format.formatter, unit, unit) format4 -> 'a - val warn: ('a, Format.formatter, unit, unit) format4 -> 'a - val log_error: ('a, Format.formatter, unit, unit) format4 -> 'a - val fatal_error: ('a, Format.formatter, unit, unit) format4 -> 'a - - val lwt_debug: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_log_info: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_log_notice: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_warn: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_log_error: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a - val lwt_fatal_error: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + val debug : ('a, Format.formatter, unit, unit) format4 -> 'a + + val log_info : ('a, Format.formatter, unit, unit) format4 -> 'a + + val log_notice : ('a, Format.formatter, unit, unit) format4 -> 'a + + val warn : ('a, Format.formatter, unit, unit) format4 -> 'a + + val log_error : ('a, Format.formatter, unit, unit) format4 -> 'a + + val fatal_error : ('a, Format.formatter, unit, unit) format4 -> 'a + + val lwt_debug : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_log_info : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_log_notice : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_warn : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_log_error : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + + val lwt_fatal_error : + ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a end + open Tezos_stdlib + type ('a, 'b) msgf = (('a, Format.formatter, unit, 'b) format4 -> ?tags:Tag.set -> 'a) -> - ?tags:Tag.set -> 'b + ?tags:Tag.set -> + 'b + type ('a, 'b) log = ('a, 'b) msgf -> 'b + module type SEMLOG = sig module Tag = Tag - val debug: ('a, unit) log - val log_info: ('a, unit) log - val log_notice: ('a, unit) log - val warn: ('a, unit) log - val log_error: ('a, unit) log - val fatal_error: ('a, unit) log - val lwt_debug: ('a, unit Lwt.t) log - val lwt_log_info: ('a, unit Lwt.t) log - val lwt_log_notice: ('a, unit Lwt.t) log - val lwt_warn: ('a, unit Lwt.t) log - val lwt_log_error: ('a, unit Lwt.t) log - val lwt_fatal_error: ('a, unit Lwt.t) log + + val debug : ('a, unit) log + + val log_info : ('a, unit) log + + val log_notice : ('a, unit) log + + val warn : ('a, unit) log + + val log_error : ('a, unit) log + + val fatal_error : ('a, unit) log + + val lwt_debug : ('a, unit Lwt.t) log + + val lwt_log_info : ('a, unit Lwt.t) log + + val lwt_log_notice : ('a, unit Lwt.t) log + + val lwt_warn : ('a, unit Lwt.t) log + + val lwt_log_error : ('a, unit Lwt.t) log + + val lwt_fatal_error : ('a, unit Lwt.t) log + val event : string Tag.def + val exn : exn Tag.def end - module Make : (sig val name : string end) -> sig - module Event : EVENT - include LOG - end - module Make_semantic : (sig val name : string end) -> sig - module Event : EVENT - include SEMLOG - end + + module Make : functor + (_ : sig + val name : string + end) + -> sig + module Event : EVENT + + include LOG + end + + module Make_semantic : functor + (_ : sig + val name : string + end) + -> sig + module Event : EVENT + + include SEMLOG + end val sections : string list ref end - (** {3 Common Event-Sink Definitions } *) (** The lwt-sink outputs pretty-printed renderings of events to the @@ -332,6 +377,6 @@ end terminal, one needs to use the [TEZOS_LOG] variable (see also the module {!Lwt_log_sink_unix}). *) -module Lwt_log_sink: sig +module Lwt_log_sink : sig val uri_scheme : string end diff --git a/src/lib_micheline/.ocamlformat b/src/lib_micheline/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_micheline/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_micheline/micheline.ml b/src/lib_micheline/micheline.ml index 0cff3eabd188ef0fb8831d593a92182c5c5bcb87..3d68ca2b943a64c7765288df66536e173774844a 100644 --- a/src/lib_micheline/micheline.ml +++ b/src/lib_micheline/micheline.ml @@ -40,32 +40,44 @@ let canonical_location_encoding = let open Data_encoding in def "micheline.location" - ~title: - "Canonical location in a Micheline expression" + ~title:"Canonical location in a Micheline expression" ~description: - "The location of a node in a Micheline expression tree \ - in prefix order, with zero being the root and adding one \ - for every basic node, sequence and primitive application." @@ - int31 + "The location of a node in a Micheline expression tree in prefix order, \ + with zero being the root and adding one for every basic node, sequence \ + and primitive application." + @@ int31 let location = function - | Int (loc, _) -> loc - | String (loc, _) -> loc - | Bytes (loc, _) -> loc - | Seq (loc, _) -> loc - | Prim (loc, _, _, _) -> loc + | Int (loc, _) -> + loc + | String (loc, _) -> + loc + | Bytes (loc, _) -> + loc + | Seq (loc, _) -> + loc + | Prim (loc, _, _, _) -> + loc let annotations = function - | Int (_, _) -> [] - | String (_, _) -> [] - | Bytes (_, _) -> [] - | Seq (_, _) -> [] - | Prim (_, _, _, annots) -> annots + | Int (_, _) -> + [] + | String (_, _) -> + [] + | Bytes (_, _) -> + [] + | Seq (_, _) -> + [] + | Prim (_, _, _, annots) -> + annots let root (Canonical expr) = expr let strip_locations root = - let id = let id = ref (-1) in fun () -> incr id ; !id in + let id = + let id = ref (-1) in + fun () -> incr id ; !id + in let rec strip_locations l = let id = id () in match l with @@ -78,11 +90,15 @@ let strip_locations root = | Seq (_, seq) -> Seq (id, List.map strip_locations seq) | Prim (_, name, seq, annots) -> - Prim (id, name, List.map strip_locations seq, annots) in + Prim (id, name, List.map strip_locations seq, annots) + in Canonical (strip_locations root) let extract_locations root = - let id = let id = ref (-1) in fun () -> incr id ; !id in + let id = + let id = ref (-1) in + fun () -> incr id ; !id + in let loc_table = ref [] in let rec strip_locations l = let id = id () in @@ -101,9 +117,10 @@ let extract_locations root = Seq (id, List.map strip_locations seq) | Prim (loc, name, seq, annots) -> loc_table := (id, loc) :: !loc_table ; - Prim (id, name, List.map strip_locations seq, annots) in + Prim (id, name, List.map strip_locations seq, annots) + in let stripped = strip_locations root in - Canonical stripped, List.rev !loc_table + (Canonical stripped, List.rev !loc_table) let inject_locations lookup (Canonical root) = let rec inject_locations l = @@ -117,16 +134,19 @@ let inject_locations lookup (Canonical root) = | Seq (loc, seq) -> Seq (lookup loc, List.map inject_locations seq) | Prim (loc, name, seq, annots) -> - Prim (lookup loc, name, List.map inject_locations seq, annots) in + Prim (lookup loc, name, List.map inject_locations seq, annots) + in inject_locations root let map f (Canonical expr) = let rec map_node f = function - | Int _ | String _ | Bytes _ as node -> node + | (Int _ | String _ | Bytes _) as node -> + node | Seq (loc, seq) -> Seq (loc, List.map (map_node f) seq) | Prim (loc, name, seq, annots) -> - Prim (loc, f name, List.map (map_node f) seq, annots) in + Prim (loc, f name, List.map (map_node f) seq, annots) + in Canonical (map_node f expr) let rec map_node fl fp = function @@ -145,127 +165,163 @@ type semantics = V0 | V1 let internal_canonical_encoding ~semantics ~variant prim_encoding = let open Data_encoding in - let int_encoding = - obj1 (req "int" z) in - let string_encoding = - obj1 (req "string" string) in - let bytes_encoding = - obj1 (req "bytes" bytes) in + let int_encoding = obj1 (req "int" z) in + let string_encoding = obj1 (req "string" string) in + let bytes_encoding = obj1 (req "bytes" bytes) in let int_encoding tag = - case tag int_encoding + case + tag + int_encoding ~title:"Int" (function Int (_, v) -> Some v | _ -> None) - (fun v -> Int (0, v)) in + (fun v -> Int (0, v)) + in let string_encoding tag = - case tag string_encoding + case + tag + string_encoding ~title:"String" (function String (_, v) -> Some v | _ -> None) - (fun v -> String (0, v)) in + (fun v -> String (0, v)) + in let bytes_encoding tag = - case tag bytes_encoding + case + tag + bytes_encoding ~title:"Bytes" (function Bytes (_, v) -> Some v | _ -> None) - (fun v -> Bytes (0, v)) in + (fun v -> Bytes (0, v)) + in let seq_encoding tag expr_encoding = - case tag (list expr_encoding) + case + tag + (list expr_encoding) ~title:"Sequence" (function Seq (_, v) -> Some v | _ -> None) - (fun args -> Seq (0, args)) in + (fun args -> Seq (0, args)) + in let annots_encoding = let split s = if s = "" && semantics <> V0 then [] else let annots = String.split_on_char ' ' s in - List.iter (fun a -> - if String.length a > 255 then failwith "Oversized annotation" - ) annots; + List.iter + (fun a -> + if String.length a > 255 then failwith "Oversized annotation") + annots ; if String.concat " " annots <> s then - failwith "Invalid annotation string, \ - must be a sequence of valid annotations with spaces" ; - annots in + failwith + "Invalid annotation string, must be a sequence of valid \ + annotations with spaces" ; + annots + in splitted ~json:(list (Bounded.string 255)) - ~binary:(conv (String.concat " ") split string) in + ~binary:(conv (String.concat " ") split string) + in let application_encoding tag expr_encoding = - case tag + case + tag ~title:"Generic prim (any number of args with or without annot)" - (obj3 (req "prim" prim_encoding) + (obj3 + (req "prim" prim_encoding) (dft "args" (list expr_encoding) []) (dft "annots" annots_encoding [])) - (function Prim (_, prim, args, annots) -> Some (prim, args, annots) - | _ -> None) - (fun (prim, args, annots) -> Prim (0, prim, args, annots)) in - let node_encoding = mu ("micheline." ^ variant ^ ".expression") (fun expr_encoding -> - splitted - ~json:(union ~tag_size:`Uint8 - [ int_encoding Json_only; - string_encoding Json_only ; - bytes_encoding Json_only ; - seq_encoding Json_only expr_encoding ; - application_encoding Json_only expr_encoding ]) - ~binary:(union ~tag_size:`Uint8 - [ int_encoding (Tag 0) ; - string_encoding (Tag 1) ; - seq_encoding (Tag 2) expr_encoding ; - (* No args, no annot *) - case (Tag 3) - ~title:"Prim (no args, annot)" - (obj1 (req "prim" prim_encoding)) - (function Prim (_, v, [], []) -> Some v - | _ -> None) - (fun v -> Prim (0, v, [], [])) ; - (* No args, with annots *) - case (Tag 4) - ~title:"Prim (no args + annot)" - (obj2 (req "prim" prim_encoding) - (req "annots" annots_encoding)) - (function - | Prim (_, v, [], annots) -> Some (v, annots) - | _ -> None) - (function (prim, annots) -> Prim (0, prim, [], annots)) ; - (* Single arg, no annot *) - case (Tag 5) - ~title:"Prim (1 arg, no annot)" - (obj2 (req "prim" prim_encoding) - (req "arg" expr_encoding)) - (function - | Prim (_, v, [ arg ], []) -> Some (v, arg) - | _ -> None) - (function (prim, arg) -> Prim (0, prim, [ arg ], [])) ; - (* Single arg, with annot *) - case (Tag 6) - ~title:"Prim (1 arg + annot)" - (obj3 (req "prim" prim_encoding) - (req "arg" expr_encoding) - (req "annots" annots_encoding)) - (function - | Prim (_, prim, [ arg ], annots) -> Some (prim, arg, annots) - | _ -> None) - (fun (prim, arg, annots) -> Prim (0, prim, [ arg ], annots)) ; - (* Two args, no annot *) - case (Tag 7) - ~title:"Prim (2 args, no annot)" - (obj3 (req "prim" prim_encoding) - (req "arg1" expr_encoding) - (req "arg2" expr_encoding)) - (function - | Prim (_, prim, [ arg1 ; arg2 ], []) -> Some (prim, arg1, arg2) - | _ -> None) - (fun (prim, arg1, arg2) -> Prim (0, prim, [ arg1 ; arg2 ], [])) ; - (* Two args, with annots *) - case (Tag 8) - ~title:"Prim (2 args + annot)" - (obj4 (req "prim" prim_encoding) - (req "arg1" expr_encoding) - (req "arg2" expr_encoding) - (req "annots" annots_encoding)) - (function - | Prim (_, prim, [ arg1 ; arg2 ], annots) -> Some (prim, arg1, arg2, annots) - | _ -> None) - (fun (prim, arg1, arg2, annots) -> Prim (0, prim, [ arg1 ; arg2 ], annots)) ; - (* General case *) - application_encoding (Tag 9) expr_encoding ; - bytes_encoding (Tag 10) ])) + (function + | Prim (_, prim, args, annots) -> Some (prim, args, annots) | _ -> None) + (fun (prim, args, annots) -> Prim (0, prim, args, annots)) + in + let node_encoding = + mu + ("micheline." ^ variant ^ ".expression") + (fun expr_encoding -> + splitted + ~json: + (union + ~tag_size:`Uint8 + [ int_encoding Json_only; + string_encoding Json_only; + bytes_encoding Json_only; + seq_encoding Json_only expr_encoding; + application_encoding Json_only expr_encoding ]) + ~binary: + (union + ~tag_size:`Uint8 + [ int_encoding (Tag 0); + string_encoding (Tag 1); + seq_encoding (Tag 2) expr_encoding; + (* No args, no annot *) + case + (Tag 3) + ~title:"Prim (no args, annot)" + (obj1 (req "prim" prim_encoding)) + (function Prim (_, v, [], []) -> Some v | _ -> None) + (fun v -> Prim (0, v, [], [])); + (* No args, with annots *) + case + (Tag 4) + ~title:"Prim (no args + annot)" + (obj2 + (req "prim" prim_encoding) + (req "annots" annots_encoding)) + (function + | Prim (_, v, [], annots) -> Some (v, annots) | _ -> None) + (function (prim, annots) -> Prim (0, prim, [], annots)); + (* Single arg, no annot *) + case + (Tag 5) + ~title:"Prim (1 arg, no annot)" + (obj2 (req "prim" prim_encoding) (req "arg" expr_encoding)) + (function + | Prim (_, v, [arg], []) -> Some (v, arg) | _ -> None) + (function (prim, arg) -> Prim (0, prim, [arg], [])); + (* Single arg, with annot *) + case + (Tag 6) + ~title:"Prim (1 arg + annot)" + (obj3 + (req "prim" prim_encoding) + (req "arg" expr_encoding) + (req "annots" annots_encoding)) + (function + | Prim (_, prim, [arg], annots) -> + Some (prim, arg, annots) + | _ -> + None) + (fun (prim, arg, annots) -> Prim (0, prim, [arg], annots)); + (* Two args, no annot *) + case + (Tag 7) + ~title:"Prim (2 args, no annot)" + (obj3 + (req "prim" prim_encoding) + (req "arg1" expr_encoding) + (req "arg2" expr_encoding)) + (function + | Prim (_, prim, [arg1; arg2], []) -> + Some (prim, arg1, arg2) + | _ -> + None) + (fun (prim, arg1, arg2) -> Prim (0, prim, [arg1; arg2], [])); + (* Two args, with annots *) + case + (Tag 8) + ~title:"Prim (2 args + annot)" + (obj4 + (req "prim" prim_encoding) + (req "arg1" expr_encoding) + (req "arg2" expr_encoding) + (req "annots" annots_encoding)) + (function + | Prim (_, prim, [arg1; arg2], annots) -> + Some (prim, arg1, arg2, annots) + | _ -> + None) + (fun (prim, arg1, arg2, annots) -> + Prim (0, prim, [arg1; arg2], annots)); + (* General case *) + application_encoding (Tag 9) expr_encoding; + bytes_encoding (Tag 10) ])) in conv (function Canonical node -> node) @@ -274,8 +330,10 @@ let internal_canonical_encoding ~semantics ~variant prim_encoding = let canonical_encoding ~variant prim_encoding = internal_canonical_encoding ~semantics:V1 ~variant prim_encoding + let canonical_encoding_v1 ~variant prim_encoding = internal_canonical_encoding ~semantics:V1 ~variant prim_encoding + let canonical_encoding_v0 ~variant prim_encoding = internal_canonical_encoding ~semantics:V0 ~variant prim_encoding @@ -283,12 +341,12 @@ let table_encoding ~variant location_encoding prim_encoding = let open Data_encoding in conv (fun node -> - let canon, assoc = extract_locations node in - let _, table = List.split assoc in - (canon, table)) + let (canon, assoc) = extract_locations node in + let (_, table) = List.split assoc in + (canon, table)) (fun (canon, table) -> - let table = Array.of_list table in - inject_locations (fun i -> table.(i)) canon) + let table = Array.of_list table in + inject_locations (fun i -> table.(i)) canon) (obj2 (req "expression" (canonical_encoding ~variant prim_encoding)) (req "locations" (list location_encoding))) diff --git a/src/lib_micheline/micheline.mli b/src/lib_micheline/micheline.mli index 49cb40c56c4fc32fad24f032e98fb6b364e46e08..3d7d501dd878b80569e84751bd027a5cadef6a4e 100644 --- a/src/lib_micheline/micheline.mli +++ b/src/lib_micheline/micheline.mli @@ -38,16 +38,21 @@ type ('l, 'p) node = (** Encoding for expressions, as their {!canonical} encoding. Locations are stored in a side table. See {!canonical_encoding} for the [variant] parameter. *) -val table_encoding : variant:string -> - 'l Data_encoding.encoding -> 'p Data_encoding.encoding -> +val table_encoding : + variant:string -> + 'l Data_encoding.encoding -> + 'p Data_encoding.encoding -> ('l, 'p) node Data_encoding.encoding (** Encoding for expressions, as their {!canonical} encoding. Locations are erased when serialized, and restored to a provided default value when deserialized. See {!canonical_encoding} for the [variant] parameter. *) -val erased_encoding : variant:string -> - 'l -> 'p Data_encoding.encoding -> ('l, 'p) node Data_encoding.encoding +val erased_encoding : + variant:string -> + 'l -> + 'p Data_encoding.encoding -> + ('l, 'p) node Data_encoding.encoding (** Extract the location of the node. *) val location : ('l, 'p) node -> 'l @@ -73,14 +78,23 @@ val canonical_location_encoding : canonical_location Data_encoding.encoding is a name used to produce named definitions in the schemas. Make sure to use different names if two expression variants with different primitive encodings are used in the same schema. *) -val canonical_encoding : variant:string -> 'l Data_encoding.encoding -> 'l canonical Data_encoding.encoding +val canonical_encoding : + variant:string -> + 'l Data_encoding.encoding -> + 'l canonical Data_encoding.encoding (** Old version of {!canonical_encoding} for retrocompatibility. Do not use in new code. *) -val canonical_encoding_v0 : variant:string -> 'l Data_encoding.encoding -> 'l canonical Data_encoding.encoding +val canonical_encoding_v0 : + variant:string -> + 'l Data_encoding.encoding -> + 'l canonical Data_encoding.encoding (** Alias for {!canonical_encoding}. *) -val canonical_encoding_v1 : variant:string -> 'l Data_encoding.encoding -> 'l canonical Data_encoding.encoding +val canonical_encoding_v1 : + variant:string -> + 'l Data_encoding.encoding -> + 'l canonical Data_encoding.encoding (** Compute the canonical form of an expression. Drops the concrete locations completely. *) @@ -91,14 +105,17 @@ val root : 'p canonical -> (canonical_location, 'p) node (** Compute the canonical form of an expression. Saves the concrete locations in an association list. *) -val extract_locations : ('l, 'p) node -> 'p canonical * (canonical_location * 'l) list +val extract_locations : + ('l, 'p) node -> 'p canonical * (canonical_location * 'l) list (** Transforms an expression in canonical form into a polymorphic one. Takes a mapping function to inject the concrete locations. *) -val inject_locations : (canonical_location -> 'l) -> 'p canonical -> ('l, 'p) node +val inject_locations : + (canonical_location -> 'l) -> 'p canonical -> ('l, 'p) node (** Copies the tree, updating its primitives. *) val map : ('a -> 'b) -> 'a canonical -> 'b canonical (** Copies the tree, updating its primitives and locations. *) -val map_node : ('la -> 'lb) -> ('pa -> 'pb) -> ('la, 'pa) node -> ('lb, 'pb) node +val map_node : + ('la -> 'lb) -> ('pa -> 'pb) -> ('la, 'pa) node -> ('lb, 'pb) node diff --git a/src/lib_micheline/micheline_parser.ml b/src/lib_micheline/micheline_parser.ml index 316e9f16dd93e0060462356f5a700fd16cdde88e..0df713014c842a3cff13003e46d089503d7d8279 100644 --- a/src/lib_micheline/micheline_parser.ml +++ b/src/lib_micheline/micheline_parser.ml @@ -29,45 +29,31 @@ open Micheline type 'a parsing_result = 'a * error list -type point = - { point : int ; - byte : int ; - line : int ; - column : int } - -let point_zero = - { point = 0 ; - byte = 0 ; - line = 0 ; - column = 0 } +type point = {point : int; byte : int; line : int; column : int} + +let point_zero = {point = 0; byte = 0; line = 0; column = 0} let point_encoding = let open Data_encoding in conv - (fun { line ; column ; point ; byte } -> (line, column, point, byte)) - (fun (line, column, point, byte) -> { line ; column ; point ; byte }) + (fun {line; column; point; byte} -> (line, column, point, byte)) + (fun (line, column, point, byte) -> {line; column; point; byte}) (obj4 (req "line" uint16) (req "column" uint16) (req "point" uint16) (req "byte" uint16)) -type location = - { start : point ; - stop : point } +type location = {start : point; stop : point} -let location_zero = - { start = point_zero ; - stop = point_zero } +let location_zero = {start = point_zero; stop = point_zero} let location_encoding = let open Data_encoding in conv - (fun { start ; stop } -> (start, stop)) - (fun (start, stop) -> { start ; stop }) - (obj2 - (req "start" point_encoding) - (req "stop" point_encoding)) + (fun {start; stop} -> (start, stop)) + (fun (start, stop) -> {start; stop}) + (obj2 (req "start" point_encoding) (req "stop" point_encoding)) type token_value = | String of string @@ -78,76 +64,96 @@ type token_value = | Comment of string | Eol_comment of string | Semi - | Open_paren | Close_paren - | Open_brace | Close_brace + | Open_paren + | Close_paren + | Open_brace + | Close_brace let token_value_encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"String" (obj1 (req "string" string)) (function String s -> Some s | _ -> None) - (fun s -> String s) ; - case (Tag 1) + (fun s -> String s); + case + (Tag 1) ~title:"Int" (obj1 (req "int" string)) (function Int s -> Some s | _ -> None) - (fun s -> Int s) ; - case (Tag 2) + (fun s -> Int s); + case + (Tag 2) ~title:"Annot" (obj1 (req "annot" string)) (function Annot s -> Some s | _ -> None) - (fun s -> Annot s) ; - case (Tag 3) + (fun s -> Annot s); + case + (Tag 3) ~title:"Comment" (obj2 (req "comment" string) (dft "end_of_line" bool false)) (function - | Comment s -> Some (s, false) - | Eol_comment s -> Some (s, true) | _ -> None) - (function - | (s, false) -> Comment s - | (s, true) -> Eol_comment s) ; - case (Tag 4) + | Comment s -> + Some (s, false) + | Eol_comment s -> + Some (s, true) + | _ -> + None) + (function (s, false) -> Comment s | (s, true) -> Eol_comment s); + case + (Tag 4) ~title:"Punctuation" - (obj1 (req "punctuation" (string_enum [ - "(", Open_paren ; - ")", Close_paren ; - "{", Open_brace ; - "}", Close_brace ; - ";", Semi ]))) - (fun t -> Some t) (fun t -> t) ; - case (Tag 5) + (obj1 + (req + "punctuation" + (string_enum + [ ("(", Open_paren); + (")", Close_paren); + ("{", Open_brace); + ("}", Close_brace); + (";", Semi) ]))) + (fun t -> Some t) + (fun t -> t); + case + (Tag 5) ~title:"Bytes" (obj1 (req "bytes" string)) (function Bytes s -> Some s | _ -> None) (fun s -> Bytes s) ] -type token = - { token : token_value ; - loc : location } +type token = {token : token_value; loc : location} let max_annot_length = 255 type error += Invalid_utf8_sequence of point * string + type error += Unexpected_character of point * string + type error += Undefined_escape_sequence of point * string + type error += Missing_break_after_number of point + type error += Unterminated_string of location + type error += Unterminated_integer of location + type error += Odd_lengthed_bytes of location + type error += Unterminated_comment of location + type error += Annotation_length of location let tokenize source = let decoder = Uutf.decoder ~encoding:`UTF_8 (`String source) in let here () = - { point = Uutf.decoder_count decoder ; - byte = Uutf.decoder_byte_count decoder ; - line = Uutf.decoder_line decoder ; - column = Uutf.decoder_col decoder } in - let tok start stop token = - { loc = { start ; stop } ; token } in + { point = Uutf.decoder_count decoder; + byte = Uutf.decoder_byte_count decoder; + line = Uutf.decoder_line decoder; + column = Uutf.decoder_col decoder } + in + let tok start stop token = {loc = {start; stop}; token} in let stack = ref [] in let errors = ref [] in let rec next () = @@ -155,193 +161,215 @@ let tokenize source = | charloc :: charlocs -> stack := charlocs ; charloc - | [] -> + | [] -> ( let loc = here () in match Uutf.decode decoder with - | `Await -> assert false + | `Await -> + assert false | `Malformed s -> errors := Invalid_utf8_sequence (loc, s) :: !errors ; next () - | `Uchar _ | `End as other -> other, loc in - let back charloc = - stack := charloc :: !stack in + | (`Uchar _ | `End) as other -> + (other, loc) ) + in + let back charloc = stack := charloc :: !stack in let uchar_to_char c = - if Uchar.is_char c then - Some (Uchar.to_char c) - else - None in + if Uchar.is_char c then Some (Uchar.to_char c) else None + in let allowed_ident_char c = match uchar_to_char c with - | Some ('a'..'z' | 'A'..'Z' | '_' | '0'..'9') -> true - | Some _ | None -> false in + | Some ('a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9') -> + true + | Some _ | None -> + false + in let allowed_annot_char c = match uchar_to_char c with - | Some ('a'..'z' | 'A'..'Z' | '_' | '.' | '%' | '@' | '0'..'9') -> true - | Some _ | None -> false in + | Some ('a' .. 'z' | 'A' .. 'Z' | '_' | '.' | '%' | '@' | '0' .. '9') -> + true + | Some _ | None -> + false + in let rec skip acc = match next () with - | `End, _ -> List.rev acc - | `Uchar c, start -> - begin match uchar_to_char c with - | Some ('a'..'z' | 'A'..'Z') -> ident acc start (fun s _ -> Ident s) - | Some ('@' | ':' | '$' | '&' | '%' | '!' | '?') -> - annot acc start - (fun str stop -> - if String.length str > max_annot_length - then errors := (Annotation_length { start ; stop }) :: !errors ; - Annot str) - | Some '-' -> - begin match next () with - | `End, stop -> - errors := Unterminated_integer { start ; stop } :: !errors ; - List.rev acc - | `Uchar c, stop as first -> - begin match uchar_to_char c with - | Some '0' -> base acc start - | Some ('1'..'9') -> integer acc start - | Some _ | None -> - errors := Unterminated_integer { start ; stop } :: !errors ; - back first ; - skip acc - end - end - | Some '0' -> base acc start - | Some ('1'..'9') -> integer acc start - | Some (' ' | '\n') -> skip acc - | Some ';' -> skip (tok start (here ()) Semi :: acc) - | Some '{' -> skip (tok start (here ()) Open_brace :: acc) - | Some '}' -> skip (tok start (here ()) Close_brace :: acc) - | Some '(' -> skip (tok start (here ()) Open_paren :: acc) - | Some ')' -> skip (tok start (here ()) Close_paren :: acc) - | Some '"' -> string acc [] start - | Some '#' -> eol_comment acc start - | Some '/' -> - begin match next () with - | `Uchar c, _ when Uchar.equal c (Uchar.of_char '*') -> - comment acc start 0 - | (`Uchar _ | `End), _ as charloc -> - errors := Unexpected_character (start, "/") :: !errors ; - back charloc ; - skip acc - end + | (`End, _) -> + List.rev acc + | (`Uchar c, start) -> ( + match uchar_to_char c with + | Some ('a' .. 'z' | 'A' .. 'Z') -> + ident acc start (fun s _ -> Ident s) + | Some ('@' | ':' | '$' | '&' | '%' | '!' | '?') -> + annot acc start (fun str stop -> + if String.length str > max_annot_length then + errors := Annotation_length {start; stop} :: !errors ; + Annot str) + | Some '-' -> ( + match next () with + | (`End, stop) -> + errors := Unterminated_integer {start; stop} :: !errors ; + List.rev acc + | (`Uchar c, stop) as first -> ( + match uchar_to_char c with + | Some '0' -> + base acc start + | Some '1' .. '9' -> + integer acc start | Some _ | None -> - let byte = Uutf.decoder_byte_count decoder in - let s = String.sub source start.byte (byte - start.byte) in - errors := Unexpected_character (start, s) :: !errors ; - skip acc - end + errors := Unterminated_integer {start; stop} :: !errors ; + back first ; + skip acc ) ) + | Some '0' -> + base acc start + | Some '1' .. '9' -> + integer acc start + | Some (' ' | '\n') -> + skip acc + | Some ';' -> + skip (tok start (here ()) Semi :: acc) + | Some '{' -> + skip (tok start (here ()) Open_brace :: acc) + | Some '}' -> + skip (tok start (here ()) Close_brace :: acc) + | Some '(' -> + skip (tok start (here ()) Open_paren :: acc) + | Some ')' -> + skip (tok start (here ()) Close_paren :: acc) + | Some '"' -> + string acc [] start + | Some '#' -> + eol_comment acc start + | Some '/' -> ( + match next () with + | (`Uchar c, _) when Uchar.equal c (Uchar.of_char '*') -> + comment acc start 0 + | ((`Uchar _ | `End), _) as charloc -> + errors := Unexpected_character (start, "/") :: !errors ; + back charloc ; + skip acc ) + | Some _ | None -> + let byte = Uutf.decoder_byte_count decoder in + let s = String.sub source start.byte (byte - start.byte) in + errors := Unexpected_character (start, s) :: !errors ; + skip acc ) and base acc start = match next () with - | (`Uchar c, stop) as charloc -> - begin match uchar_to_char c with - | Some ('0'.. '9') -> integer acc start - | Some 'x' -> bytes acc start - | Some ('a'..'w' | 'y' | 'z' | 'A'..'Z') -> - errors := Missing_break_after_number stop :: !errors ; - back charloc ; - skip (tok start stop (Int "0") :: acc) - | Some _ | None -> - back charloc ; - skip (tok start stop (Int "0") :: acc) - end + | (`Uchar c, stop) as charloc -> ( + match uchar_to_char c with + | Some '0' .. '9' -> + integer acc start + | Some 'x' -> + bytes acc start + | Some ('a' .. 'w' | 'y' | 'z' | 'A' .. 'Z') -> + errors := Missing_break_after_number stop :: !errors ; + back charloc ; + skip (tok start stop (Int "0") :: acc) + | Some _ | None -> + back charloc ; + skip (tok start stop (Int "0") :: acc) ) | (_, stop) as other -> back other ; skip (tok start stop (Int "0") :: acc) and integer acc start = let tok stop = - let value = - String.sub source start.byte (stop.byte - start.byte) in - tok start stop (Int value) in + let value = String.sub source start.byte (stop.byte - start.byte) in + tok start stop (Int value) + in match next () with - | (`Uchar c, stop) as charloc -> + | (`Uchar c, stop) as charloc -> ( let missing_break () = errors := Missing_break_after_number stop :: !errors ; back charloc ; - skip (tok stop :: acc) in - begin match Uchar.to_char c with - | ('0'.. '9') -> - integer acc start - | ('a'..'z' | 'A'..'Z') -> - missing_break () - | _ -> - back charloc ; - skip (tok stop :: acc) - end + skip (tok stop :: acc) + in + match Uchar.to_char c with + | '0' .. '9' -> + integer acc start + | 'a' .. 'z' | 'A' .. 'Z' -> + missing_break () + | _ -> + back charloc ; + skip (tok stop :: acc) ) | (`End, stop) as other -> back other ; skip (tok stop :: acc) and bytes acc start = let tok stop = - let value = - String.sub source start.byte (stop.byte - start.byte) in - tok start stop (Bytes value) in + let value = String.sub source start.byte (stop.byte - start.byte) in + tok start stop (Bytes value) + in match next () with - | (`Uchar c, stop) as charloc -> + | (`Uchar c, stop) as charloc -> ( let missing_break () = errors := Missing_break_after_number stop :: !errors ; back charloc ; - skip (tok stop :: acc) in - begin match Uchar.to_char c with - | ('0'..'9' | 'a'..'f' | 'A'..'F') -> - bytes acc start - | ('g'..'z' | 'G'..'Z') -> - missing_break () - | _ -> - back charloc ; - skip (tok stop :: acc) - end + skip (tok stop :: acc) + in + match Uchar.to_char c with + | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' -> + bytes acc start + | 'g' .. 'z' | 'G' .. 'Z' -> + missing_break () + | _ -> + back charloc ; + skip (tok stop :: acc) ) | (`End, stop) as other -> back other ; skip (tok stop :: acc) and string acc sacc start = let tok () = - tok start (here ()) (String (String.concat "" (List.rev sacc))) in + tok start (here ()) (String (String.concat "" (List.rev sacc))) + in match next () with - | `End, stop -> - errors := Unterminated_string { start ; stop } :: !errors ; + | (`End, stop) -> + errors := Unterminated_string {start; stop} :: !errors ; skip (tok () :: acc) - | `Uchar c, stop -> - match uchar_to_char c with - | Some '"' -> skip (tok () :: acc) - | Some ('\n' | '\r') -> - errors := Unterminated_string { start ; stop } :: !errors ; + | (`Uchar c, stop) -> ( + match uchar_to_char c with + | Some '"' -> + skip (tok () :: acc) + | Some ('\n' | '\r') -> + errors := Unterminated_string {start; stop} :: !errors ; + skip (tok () :: acc) + | Some '\\' -> ( + match next () with + | (`End, stop) -> + errors := Unterminated_string {start; stop} :: !errors ; skip (tok () :: acc) - | Some '\\' -> - begin match next () with - | `End, stop -> - errors := Unterminated_string { start ; stop } :: !errors ; - skip (tok () :: acc) - | `Uchar c, loc -> - match uchar_to_char c with - | Some '"' -> string acc ("\"" :: sacc) start - | Some 'r' -> string acc ("\r" :: sacc) start - | Some 'n' -> string acc ("\n" :: sacc) start - | Some 't' -> string acc ("\t" :: sacc) start - | Some 'b' -> string acc ("\b" :: sacc) start - | Some '\\' -> string acc ("\\" :: sacc) start - | Some _ | None -> - let byte = Uutf.decoder_byte_count decoder in - let s = String.sub source loc.byte (byte - loc.byte) in - errors := Undefined_escape_sequence (loc, s) :: !errors ; - string acc sacc start - end - | Some _ | None -> - let byte = Uutf.decoder_byte_count decoder in - let s = String.sub source stop.byte (byte - stop.byte) in - string acc (s :: sacc) start - and generic_ident allow_char acc start (ret : string -> point -> token_value) = + | (`Uchar c, loc) -> ( + match uchar_to_char c with + | Some '"' -> + string acc ("\"" :: sacc) start + | Some 'r' -> + string acc ("\r" :: sacc) start + | Some 'n' -> + string acc ("\n" :: sacc) start + | Some 't' -> + string acc ("\t" :: sacc) start + | Some 'b' -> + string acc ("\b" :: sacc) start + | Some '\\' -> + string acc ("\\" :: sacc) start + | Some _ | None -> + let byte = Uutf.decoder_byte_count decoder in + let s = String.sub source loc.byte (byte - loc.byte) in + errors := Undefined_escape_sequence (loc, s) :: !errors ; + string acc sacc start ) ) + | Some _ | None -> + let byte = Uutf.decoder_byte_count decoder in + let s = String.sub source stop.byte (byte - stop.byte) in + string acc (s :: sacc) start ) + and generic_ident allow_char acc start (ret : string -> point -> token_value) + = let tok stop = - let name = - String.sub source start.byte (stop.byte - start.byte) in - tok start stop (ret name stop) in + let name = String.sub source start.byte (stop.byte - start.byte) in + tok start stop (ret name stop) + in match next () with | (`Uchar c, stop) as charloc -> - if allow_char c then - generic_ident allow_char acc start ret - else begin + if allow_char c then generic_ident allow_char acc start ret + else ( back charloc ; - skip (tok stop :: acc) - end + skip (tok stop :: acc) ) | (_, stop) as other -> back other ; skip (tok stop :: acc) @@ -349,75 +377,82 @@ let tokenize source = and annot acc start ret = generic_ident allowed_annot_char acc start ret and comment acc start lvl = match next () with - | `End, stop -> - errors := Unterminated_comment { start ; stop } :: !errors ; + | (`End, stop) -> + errors := Unterminated_comment {start; stop} :: !errors ; let text = String.sub source start.byte (stop.byte - start.byte) in skip (tok start stop (Comment text) :: acc) - | `Uchar c, _ -> - begin match uchar_to_char c with - | Some '*' -> - begin match next () with - | `Uchar c, _ when Uchar.equal c (Uchar.of_char '/') -> - if lvl = 0 then - let stop = here () in - let text = - String.sub source start.byte (stop.byte - start.byte) in - skip (tok start stop (Comment text) :: acc) - else - comment acc start (lvl - 1) - | other -> - back other ; - comment acc start lvl - end - | Some '/' -> - begin match next () with - | `Uchar c, _ when Uchar.equal c (Uchar.of_char '*') -> - comment acc start (lvl + 1) - | other -> - back other ; - comment acc start lvl - end - | Some _ | None -> comment acc start lvl - end + | (`Uchar c, _) -> ( + match uchar_to_char c with + | Some '*' -> ( + match next () with + | (`Uchar c, _) when Uchar.equal c (Uchar.of_char '/') -> + if lvl = 0 then + let stop = here () in + let text = + String.sub source start.byte (stop.byte - start.byte) + in + skip (tok start stop (Comment text) :: acc) + else comment acc start (lvl - 1) + | other -> + back other ; comment acc start lvl ) + | Some '/' -> ( + match next () with + | (`Uchar c, _) when Uchar.equal c (Uchar.of_char '*') -> + comment acc start (lvl + 1) + | other -> + back other ; comment acc start lvl ) + | Some _ | None -> + comment acc start lvl ) and eol_comment acc start = let tok stop = let text = String.sub source start.byte (stop.byte - start.byte) in - tok start stop (Eol_comment text) in + tok start stop (Eol_comment text) + in match next () with - | `Uchar c, stop -> - begin match uchar_to_char c with - | Some '\n' -> skip (tok stop :: acc) - | Some _ | None -> eol_comment acc start - end + | (`Uchar c, stop) -> ( + match uchar_to_char c with + | Some '\n' -> + skip (tok stop :: acc) + | Some _ | None -> + eol_comment acc start ) | (_, stop) as other -> back other ; - skip (tok stop :: acc) in + skip (tok stop :: acc) + in let tokens = skip [] in - tokens, List.rev !errors + (tokens, List.rev !errors) type node = (location, string) Micheline.node let node_encoding = - Micheline.table_encoding ~variant:"generic" location_encoding Data_encoding.string + Micheline.table_encoding + ~variant:"generic" + location_encoding + Data_encoding.string (* Beginning of a sequence of consecutive primitives *) let min_point : node list -> point = function - | [] -> point_zero - | Int ({ start ; _ }, _) :: _ - | String ({ start ; _ }, _) :: _ - | Bytes ({ start ; _ }, _) :: _ - | Prim ({ start ; _ }, _, _, _) :: _ - | Seq ({ start ; _ }, _) :: _ -> start + | [] -> + point_zero + | Int ({start; _}, _) :: _ + | String ({start; _}, _) :: _ + | Bytes ({start; _}, _) :: _ + | Prim ({start; _}, _, _, _) :: _ + | Seq ({start; _}, _) :: _ -> + start (* End of a sequence of consecutive primitives *) -let rec max_point : node list -> point = function - | [] -> point_zero - | _ :: (_ :: _ as rest) -> max_point rest - | Int ({ stop ; _ }, _) :: [] - | String ({ stop ; _ }, _) :: [] - | Bytes ({ stop ; _ }, _) :: [] - | Prim ({ stop ; _ }, _, _, _) :: [] - | Seq ({ stop ; _ }, _) :: [] -> stop +let rec max_point : node list -> point = function + | [] -> + point_zero + | _ :: (_ :: _ as rest) -> + max_point rest + | [Int ({stop; _}, _)] + | [String ({stop; _}, _)] + | [Bytes ({stop; _}, _)] + | [Prim ({stop; _}, _, _, _)] + | [Seq ({stop; _}, _)] -> + stop (* An item in the parser's state stack. Not every value of type [mode list] is a valid parsing context. @@ -435,26 +470,27 @@ type mode = | Wrapped of token * string * node list * string list (* Enter a new parsing state. *) -let push_mode mode stack = - mode :: stack +let push_mode mode stack = mode :: stack (* Leave a parsing state. *) -let pop_mode = function - | [] -> assert false - | _ :: rest -> rest +let pop_mode = function [] -> assert false | _ :: rest -> rest (* Usually after a [pop_mode], jump back into the previous parsing state, injecting the current reduction (insert the just parsed item of a sequence or argument of a primitive application). *) let fill_mode result = function - | [] -> assert false - | Expression _ :: _ :: _ -> assert false - | Expression (Some _) :: [] -> assert false - | Toplevel _ :: _ :: _ -> assert false - | Expression None :: [] -> - Expression (Some result) :: [] - | Toplevel exprs :: [] -> - Toplevel (result :: exprs) :: [] + | [] -> + assert false + | Expression _ :: _ :: _ -> + assert false + | [Expression (Some _)] -> + assert false + | Toplevel _ :: _ :: _ -> + assert false + | [Expression None] -> + [Expression (Some result)] + | [Toplevel exprs] -> + [Toplevel (result :: exprs)] | Sequence (token, exprs) :: rest -> Sequence (token, result :: exprs) :: rest | Wrapped (token, name, exprs, annot) :: rest -> @@ -463,417 +499,536 @@ let fill_mode result = function Unwrapped (start, name, result :: exprs, annot) :: rest type error += Unclosed of token + type error += Unexpected of token + type error += Extra of token + type error += Misaligned of node + type error += Empty let rec annots = function - | { token = Annot annot ; _ } :: rest -> - let annots, rest = annots rest in - annot :: annots, rest - | rest -> [], rest + | {token = Annot annot; _} :: rest -> + let (annots, rest) = annots rest in + (annot :: annots, rest) + | rest -> + ([], rest) let rec parse ?(check = true) errors tokens stack = (* Two steps: - 1. parse without checking indentation [parse] - 2. check indentation [check] (inlined in 1) *) - match stack, tokens with + match (stack, tokens) with (* Start by preventing all absurd cases, so now the pattern matching exhaustivity can tell us that we treater all possible tokens for all possible valid states. *) - | [], _ - | [ Wrapped _ ], _ - | [ Unwrapped _ ], _ - | Unwrapped _ :: Unwrapped _ :: _, _ - | Unwrapped _ :: Wrapped _ :: _, _ - | Toplevel _ :: _ :: _, _ - | Expression _ :: _ :: _, _ -> + | ([], _) + | ([Wrapped _], _) + | ([Unwrapped _], _) + | (Unwrapped _ :: Unwrapped _ :: _, _) + | (Unwrapped _ :: Wrapped _ :: _, _) + | (Toplevel _ :: _ :: _, _) + | (Expression _ :: _ :: _, _) -> assert false (* Return *) - | Expression (Some result) :: _, [] -> - [ result ], List.rev errors - | Expression (Some _) :: _, token :: rem -> + | (Expression (Some result) :: _, []) -> + ([result], List.rev errors) + | (Expression (Some _) :: _, token :: rem) -> let errors = Unexpected token :: errors in parse ~check errors rem (* skip *) stack - | Expression None :: _, [] -> + | (Expression None :: _, []) -> let errors = Empty :: errors in - let ghost = { start = point_zero ; stop = point_zero} in - [ Seq (ghost, []) ], List.rev errors - | Toplevel [ Seq (_, exprs) as expr ] :: [], - [] -> - let errors = if check then do_check ~toplevel: false errors expr else errors in - exprs, List.rev errors - | Toplevel exprs :: [], - [] -> + let ghost = {start = point_zero; stop = point_zero} in + ([Seq (ghost, [])], List.rev errors) + | ([Toplevel [(Seq (_, exprs) as expr)]], []) -> + let errors = + if check then do_check ~toplevel:false errors expr else errors + in + (exprs, List.rev errors) + | ([Toplevel exprs], []) -> let exprs = List.rev exprs in - let loc = { start = min_point exprs ; stop = max_point exprs } in + let loc = {start = min_point exprs; stop = max_point exprs} in let expr = Seq (loc, exprs) in - let errors = if check then do_check ~toplevel: true errors expr else errors in - exprs, List.rev errors + let errors = + if check then do_check ~toplevel:true errors expr else errors + in + (exprs, List.rev errors) (* Ignore comments *) - | _, - { token = Eol_comment _ | Comment _ ; _ } :: rest -> + | (_, {token = Eol_comment _ | Comment _; _} :: rest) -> parse ~check errors rest stack - | (Expression None | Sequence _ | Toplevel _) :: _, - ({ token = Int _ | String _ | Bytes _ ; _ } as token):: { token = Eol_comment _ | Comment _ ; _ } :: rest - | (Wrapped _ | Unwrapped _) :: _, - ({ token = Open_paren ; _ } as token) - :: { token = Eol_comment _ | Comment _ ; _ } :: rest -> + | ( (Expression None | Sequence _ | Toplevel _) :: _, + ({token = Int _ | String _ | Bytes _; _} as token) + :: {token = Eol_comment _ | Comment _; _} :: rest ) + | ( (Wrapped _ | Unwrapped _) :: _, + ({token = Open_paren; _} as token) + :: {token = Eol_comment _ | Comment _; _} :: rest ) -> parse ~check errors (token :: rest) stack (* Erroneous states *) - | (Wrapped _ | Unwrapped _) :: _ , - ({ token = Open_paren ; _ } as token) - :: { token = Open_paren | Open_brace ; _ } :: rem - | Unwrapped _ :: Expression _ :: _ , - ({ token = Semi | Close_brace | Close_paren ; _ } as token) :: rem - | Expression None :: _ , - ({ token = Semi | Close_brace | Close_paren | Open_paren ; _ } as token) :: rem -> + | ( (Wrapped _ | Unwrapped _) :: _, + ({token = Open_paren; _} as token) + :: {token = Open_paren | Open_brace; _} :: rem ) + | ( Unwrapped _ :: Expression _ :: _, + ({token = Semi | Close_brace | Close_paren; _} as token) :: rem ) + | ( Expression None :: _, + ({token = Semi | Close_brace | Close_paren | Open_paren; _} as token) + :: rem ) -> let errors = Unexpected token :: errors in parse ~check errors rem (* skip *) stack - | (Sequence _ | Toplevel _) :: _ , - ({ token = Semi ; _ } as valid) :: ({ token = Semi ; _ } as token) :: rem -> + | ( (Sequence _ | Toplevel _) :: _, + ({token = Semi; _} as valid) :: ({token = Semi; _} as token) :: rem ) -> let errors = Extra token :: errors in - parse ~check errors (valid (* skip *) :: rem) stack - | (Wrapped _ | Unwrapped _) :: _ , - { token = Open_paren ; _ } - :: ({ token = Int _ | String _ | Bytes _ | Annot _ | Close_paren ; _ } as token) :: rem - | (Expression None | Sequence _ | Toplevel _) :: _, - { token = Int _ | String _ | Bytes _ ; _ } :: ({ token = Ident _ | Int _ | String _ | Bytes _ | Annot _ | Close_paren | Open_paren | Open_brace ; _ } as token) :: rem - | Unwrapped (_, _, _, _) :: Toplevel _ :: _, - ({ token = Close_brace ; _ } as token) :: rem - | Unwrapped (_, _, _, _) :: _, - ({ token = Close_paren ; _ } as token) :: rem - | Toplevel _ :: [], - ({ token = Close_paren ; _ } as token) :: rem - | Toplevel _ :: [], - ({ token = Open_paren ; _ } as token) :: rem - | Toplevel _ :: [], - ({ token = Close_brace ; _ } as token) :: rem - | Sequence _ :: _, - ({ token = Open_paren ; _ } as token) :: rem - | Sequence _ :: _, - ({ token = Close_paren ; _ } as token :: rem) - | (Wrapped _ | Unwrapped _) :: _, - ({ token = Open_paren ; _ } as token) :: ({ token = Close_brace | Semi ; _ } :: _ | [] as rem) - | _, - ({ token = Annot _ ; _ } as token) :: rem -> + parse ~check errors ((* skip *) valid :: rem) stack + | ( (Wrapped _ | Unwrapped _) :: _, + {token = Open_paren; _} + :: ( {token = Int _ | String _ | Bytes _ | Annot _ | Close_paren; _} as + token ) + :: rem ) + | ( (Expression None | Sequence _ | Toplevel _) :: _, + {token = Int _ | String _ | Bytes _; _} + :: ( { token = + ( Ident _ + | Int _ + | String _ + | Bytes _ + | Annot _ + | Close_paren + | Open_paren + | Open_brace ); + _ } as token ) + :: rem ) + | ( Unwrapped (_, _, _, _) :: Toplevel _ :: _, + ({token = Close_brace; _} as token) :: rem ) + | (Unwrapped (_, _, _, _) :: _, ({token = Close_paren; _} as token) :: rem) + | ([Toplevel _], ({token = Close_paren; _} as token) :: rem) + | ([Toplevel _], ({token = Open_paren; _} as token) :: rem) + | ([Toplevel _], ({token = Close_brace; _} as token) :: rem) + | (Sequence _ :: _, ({token = Open_paren; _} as token) :: rem) + | (Sequence _ :: _, ({token = Close_paren; _} as token) :: rem) + | ( (Wrapped _ | Unwrapped _) :: _, + ({token = Open_paren; _} as token) + :: (({token = Close_brace | Semi; _} :: _ | []) as rem) ) + | (_, ({token = Annot _; _} as token) :: rem) -> let errors = Unexpected token :: errors in parse ~check errors rem (* skip *) stack - | Wrapped (token, _, _, _) :: _, ([] | { token = Close_brace | Semi ; _ } :: _) -> + | (Wrapped (token, _, _, _) :: _, ([] | {token = Close_brace | Semi; _} :: _)) + -> let errors = Unclosed token :: errors in - let fake = { token with token = Close_paren } in - let tokens = (* insert *) fake :: tokens in + let fake = {token with token = Close_paren} in + let tokens = (* insert *) fake :: tokens in parse ~check errors tokens stack - | (Sequence (token, _) :: _ | Unwrapped _ :: Sequence (token, _) :: _), [] -> + | ((Sequence (token, _) :: _ | Unwrapped _ :: Sequence (token, _) :: _), []) + -> let errors = Unclosed token :: errors in - let fake = { token with token = Close_brace } in - let tokens = (* insert *) fake :: tokens in + let fake = {token with token = Close_brace} in + let tokens = (* insert *) fake :: tokens in parse ~check errors tokens stack (* Valid states *) - | (Toplevel _ | Sequence (_, _)) :: _ , - { token = Ident name ; loc } :: ({ token = Annot _ ; _ } :: _ as rest) -> - let annots, rest = annots rest in + | ( (Toplevel _ | Sequence (_, _)) :: _, + {token = Ident name; loc} :: ({token = Annot _; _} :: _ as rest) ) -> + let (annots, rest) = annots rest in let mode = Unwrapped (loc, name, [], annots) in parse ~check errors rest (push_mode mode stack) - | (Expression None | Toplevel _ | Sequence (_, _)) :: _ , - { token = Ident name ; loc } :: rest -> + | ( (Expression None | Toplevel _ | Sequence (_, _)) :: _, + {token = Ident name; loc} :: rest ) -> let mode = Unwrapped (loc, name, [], []) in parse ~check errors rest (push_mode mode stack) - | (Unwrapped _ | Wrapped _) :: _, - { token = Int value ; loc } :: rest - | (Expression None | Sequence _ | Toplevel _) :: _, - { token = Int value ; loc } :: ([] | { token = Semi | Close_brace; _ } :: _ as rest) -> + | ((Unwrapped _ | Wrapped _) :: _, {token = Int value; loc} :: rest) + | ( (Expression None | Sequence _ | Toplevel _) :: _, + {token = Int value; loc} + :: (([] | {token = Semi | Close_brace; _} :: _) as rest) ) -> let expr : node = Int (loc, Z.of_string value) in - let errors = if check then do_check ~toplevel: false errors expr else errors in + let errors = + if check then do_check ~toplevel:false errors expr else errors + in parse ~check errors rest (fill_mode expr stack) - | (Unwrapped _ | Wrapped _) :: _, - { token = String contents ; loc } :: rest - | (Expression None | Sequence _ | Toplevel _) :: _, - { token = String contents ; loc } :: ([] | { token = Semi | Close_brace; _ } :: _ as rest) -> + | ((Unwrapped _ | Wrapped _) :: _, {token = String contents; loc} :: rest) + | ( (Expression None | Sequence _ | Toplevel _) :: _, + {token = String contents; loc} + :: (([] | {token = Semi | Close_brace; _} :: _) as rest) ) -> let expr : node = String (loc, contents) in - let errors = if check then do_check ~toplevel: false errors expr else errors in + let errors = + if check then do_check ~toplevel:false errors expr else errors + in parse ~check errors rest (fill_mode expr stack) - | (Unwrapped _ | Wrapped _) :: _, - { token = Bytes contents ; loc } :: rest - | (Expression None | Sequence _ | Toplevel _) :: _, - { token = Bytes contents ; loc } :: ([] | { token = Semi | Close_brace; _ } :: _ as rest) -> - let errors, contents = if String.length contents mod 2 <> 0 then - Odd_lengthed_bytes loc :: errors, contents ^ "0" - else errors, contents in + | ((Unwrapped _ | Wrapped _) :: _, {token = Bytes contents; loc} :: rest) + | ( (Expression None | Sequence _ | Toplevel _) :: _, + {token = Bytes contents; loc} + :: (([] | {token = Semi | Close_brace; _} :: _) as rest) ) -> + let (errors, contents) = + if String.length contents mod 2 <> 0 then + (Odd_lengthed_bytes loc :: errors, contents ^ "0") + else (errors, contents) + in let bytes = - MBytes.of_hex (`Hex (String.sub contents 2 (String.length contents - 2))) in + MBytes.of_hex + (`Hex (String.sub contents 2 (String.length contents - 2))) + in let expr : node = Bytes (loc, bytes) in - let errors = if check then do_check ~toplevel: false errors expr else errors in + let errors = + if check then do_check ~toplevel:false errors expr else errors + in parse ~check errors rest (fill_mode expr stack) - | Sequence ({ loc = { start ; _ } ; _ }, exprs) :: _ , - { token = Close_brace ; loc = { stop ; _ } } :: rest -> + | ( Sequence ({loc = {start; _}; _}, exprs) :: _, + {token = Close_brace; loc = {stop; _}} :: rest ) -> let exprs = List.rev exprs in - let expr = Micheline.Seq ({ start ; stop }, exprs) in - let errors = if check then do_check ~toplevel: false errors expr else errors in + let expr = Micheline.Seq ({start; stop}, exprs) in + let errors = + if check then do_check ~toplevel:false errors expr else errors + in parse ~check errors rest (fill_mode expr (pop_mode stack)) - | (Sequence _ | Toplevel _) :: _ , - { token = Semi ; _ } :: rest -> + | ((Sequence _ | Toplevel _) :: _, {token = Semi; _} :: rest) -> parse ~check errors rest stack - | Unwrapped ({ start ; stop }, name, exprs, annot) :: Expression _ :: _, - ([] as rest) - | Unwrapped ({ start ; stop }, name, exprs, annot) :: Toplevel _ :: _, - ({ token = Semi ; _ } :: _ | [] as rest) - | Unwrapped ({ start ; stop }, name, exprs, annot) :: Sequence _ :: _ , - ({ token = Close_brace | Semi ; _ } :: _ as rest) - | Wrapped ({ loc = { start ; stop } ; _ }, name, exprs, annot) :: _ , - { token = Close_paren ; _ } :: rest -> + | ( Unwrapped ({start; stop}, name, exprs, annot) :: Expression _ :: _, + ([] as rest) ) + | ( Unwrapped ({start; stop}, name, exprs, annot) :: Toplevel _ :: _, + (({token = Semi; _} :: _ | []) as rest) ) + | ( Unwrapped ({start; stop}, name, exprs, annot) :: Sequence _ :: _, + ({token = Close_brace | Semi; _} :: _ as rest) ) + | ( Wrapped ({loc = {start; stop}; _}, name, exprs, annot) :: _, + {token = Close_paren; _} :: rest ) -> let exprs = List.rev exprs in let stop = if exprs = [] then stop else max_point exprs in - let expr = Micheline.Prim ({ start ; stop }, name, exprs, annot) in - let errors = if check then do_check ~toplevel: false errors expr else errors in + let expr = Micheline.Prim ({start; stop}, name, exprs, annot) in + let errors = + if check then do_check ~toplevel:false errors expr else errors + in parse ~check errors rest (fill_mode expr (pop_mode stack)) - | (Wrapped _ | Unwrapped _) :: _ , - ({ token = Open_paren ; _ } as token) :: { token = Ident name ; _ } :: ({ token = Annot _ ; _ } :: _ as rest) -> - let annots, rest = annots rest in + | ( (Wrapped _ | Unwrapped _) :: _, + ({token = Open_paren; _} as token) + :: {token = Ident name; _} :: ({token = Annot _; _} :: _ as rest) ) -> + let (annots, rest) = annots rest in let mode = Wrapped (token, name, [], annots) in parse ~check errors rest (push_mode mode stack) - | (Wrapped _ | Unwrapped _) :: _ , - ({ token = Open_paren ; _ } as token) :: { token = Ident name ; _ } :: rest -> + | ( (Wrapped _ | Unwrapped _) :: _, + ({token = Open_paren; _} as token) :: {token = Ident name; _} :: rest ) + -> let mode = Wrapped (token, name, [], []) in parse ~check errors rest (push_mode mode stack) - | (Wrapped _ | Unwrapped _) :: _ , - { token = Ident name ; loc } :: rest -> + | ((Wrapped _ | Unwrapped _) :: _, {token = Ident name; loc} :: rest) -> let expr = Micheline.Prim (loc, name, [], []) in - let errors = if check then do_check ~toplevel: false errors expr else errors in + let errors = + if check then do_check ~toplevel:false errors expr else errors + in parse ~check errors rest (fill_mode expr stack) - | (Wrapped _ | Unwrapped _ | Toplevel _ | Sequence _ | Expression None) :: _ , - ({ token = Open_brace ; _ } as token) :: rest -> + | ( (Wrapped _ | Unwrapped _ | Toplevel _ | Sequence _ | Expression None) :: _, + ({token = Open_brace; _} as token) :: rest ) -> let mode = Sequence (token, []) in parse ~check errors rest (push_mode mode stack) + (* indentation checker *) and do_check ?(toplevel = false) errors = function - | Seq ({ start ; stop }, []) as expr -> - if start.column >= stop.column then - Misaligned expr :: errors - else errors - | Prim ({ start ; stop }, _, first :: rest, _) - | Seq ({ start ; stop }, first :: rest) as expr -> - let { column = first_column ; line = first_line ; _ } = - min_point [ first ] in - if start.column >= stop.column then - Misaligned expr :: errors - else if not toplevel && start.column >= first_column then + | Seq ({start; stop}, []) as expr -> + if start.column >= stop.column then Misaligned expr :: errors else errors + | ( Prim ({start; stop}, _, first :: rest, _) + | Seq ({start; stop}, first :: rest) ) as expr -> + let {column = first_column; line = first_line; _} = min_point [first] in + if start.column >= stop.column then Misaligned expr :: errors + else if (not toplevel) && start.column >= first_column then Misaligned expr :: errors else (* In a sequence or in the arguments of a primitive, we require all items to be aligned, but we relax the rule to allow consecutive items to be writtem on the same line. *) let rec in_line_or_aligned prev_start_line errors = function - | [] -> errors + | [] -> + errors | expr :: rest -> - let { column ; line = start_line ; _ } = min_point [ expr ] in - let { line = stop_line ; _ } = max_point [ expr ] in + let {column; line = start_line; _} = min_point [expr] in + let {line = stop_line; _} = max_point [expr] in let errors = - if stop_line <> prev_start_line - && column <> first_column then + if stop_line <> prev_start_line && column <> first_column then Misaligned expr :: errors - else - errors in - in_line_or_aligned start_line errors rest in + else errors + in + in_line_or_aligned start_line errors rest + in in_line_or_aligned first_line errors rest - | Prim (_, _, [], _) | String _ | Int _ | Bytes _ -> errors + | Prim (_, _, [], _) | String _ | Int _ | Bytes _ -> + errors let parse_expression ?check tokens = - let result = match tokens with - | ({ token = Open_paren ; _ } as token) :: { token = Ident name ; _ } :: { token = Annot annot ; _ } :: rest -> - let annots, rest = annots rest in + let result = + match tokens with + | ({token = Open_paren; _} as token) + :: {token = Ident name; _} :: {token = Annot annot; _} :: rest -> + let (annots, rest) = annots rest in let mode = Wrapped (token, name, [], annot :: annots) in - parse ?check [] rest [ mode ; Expression None ] - | ({ token = Open_paren ; _ } as token) :: { token = Ident name ; _ } :: rest -> + parse ?check [] rest [mode; Expression None] + | ({token = Open_paren; _} as token) :: {token = Ident name; _} :: rest -> let mode = Wrapped (token, name, [], []) in - parse ?check [] rest [ mode ; Expression None ] + parse ?check [] rest [mode; Expression None] | _ -> - parse ?check [] tokens [ Expression None ] in + parse ?check [] tokens [Expression None] + in match result with - | [ single ], errors -> single, errors - | _ -> assert false + | ([single], errors) -> + (single, errors) + | _ -> + assert false -let parse_toplevel ?check tokens = - parse ?check [] tokens [ Toplevel [] ] +let parse_toplevel ?check tokens = parse ?check [] tokens [Toplevel []] -let print_point ppf { line ; column ; _ } = - Format.fprintf ppf - "At line %d character %d" - line column +let print_point ppf {line; column; _} = + Format.fprintf ppf "At line %d character %d" line column let print_token_kind ppf = function - | Open_paren | Close_paren -> Format.fprintf ppf "parenthesis" - | Open_brace | Close_brace -> Format.fprintf ppf "curly brace" - | String _ -> Format.fprintf ppf "string constant" - | Bytes _ -> Format.fprintf ppf "bytes constant" - | Int _ -> Format.fprintf ppf "integer constant" - | Ident _ -> Format.fprintf ppf "identifier" - | Annot _ -> Format.fprintf ppf "annotation" - | Comment _ | Eol_comment _ -> Format.fprintf ppf "comment" - | Semi -> Format.fprintf ppf "semi colon" + | Open_paren | Close_paren -> + Format.fprintf ppf "parenthesis" + | Open_brace | Close_brace -> + Format.fprintf ppf "curly brace" + | String _ -> + Format.fprintf ppf "string constant" + | Bytes _ -> + Format.fprintf ppf "bytes constant" + | Int _ -> + Format.fprintf ppf "integer constant" + | Ident _ -> + Format.fprintf ppf "identifier" + | Annot _ -> + Format.fprintf ppf "annotation" + | Comment _ | Eol_comment _ -> + Format.fprintf ppf "comment" + | Semi -> + Format.fprintf ppf "semi colon" let print_location ppf loc = if loc.start.line = loc.stop.line then if loc.start.column = loc.stop.column then - Format.fprintf ppf + Format.fprintf + ppf "At line %d character %d" - loc.start.line loc.start.column + loc.start.line + loc.start.column else - Format.fprintf ppf + Format.fprintf + ppf "At line %d characters %d to %d" - loc.start.line loc.start.column loc.stop.column + loc.start.line + loc.start.column + loc.stop.column else - Format.fprintf ppf + Format.fprintf + ppf "From line %d character %d to line %d character %d" - loc.start.line loc.start.column loc.stop.line loc.stop.column + loc.start.line + loc.start.column + loc.stop.line + loc.stop.column let no_parsing_error (ast, errors) = - match errors with - | [] -> ok ast - | errors -> Error errors + match errors with [] -> ok ast | errors -> Error errors let () = - register_error_kind `Permanent - ~id: "micheline.parse_error.invalid_utf8_sequence" - ~title: "Micheline parser error: invalid UTF-8 sequence" - ~description: "While parsing a piece of Micheline source, \ - a sequence of bytes that is not valid UTF-8 \ - was encountered." - ~pp:(fun ppf (point, str) -> Format.fprintf ppf "%a, invalid UTF-8 sequence %S" print_point point str) + register_error_kind + `Permanent + ~id:"micheline.parse_error.invalid_utf8_sequence" + ~title:"Micheline parser error: invalid UTF-8 sequence" + ~description: + "While parsing a piece of Micheline source, a sequence of bytes that is \ + not valid UTF-8 was encountered." + ~pp:(fun ppf (point, str) -> + Format.fprintf ppf "%a, invalid UTF-8 sequence %S" print_point point str) Data_encoding.(obj2 (req "point" point_encoding) (req "sequence" string)) - (function Invalid_utf8_sequence (point, str) -> Some (point, str) | _ -> None) + (function + | Invalid_utf8_sequence (point, str) -> Some (point, str) | _ -> None) (fun (point, str) -> Invalid_utf8_sequence (point, str)) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.unexpected_character" - ~title: "Micheline parser error: unexpected character" - ~description: "While parsing a piece of Micheline source, \ - an unexpected character was encountered." - ~pp:(fun ppf (point, str) -> Format.fprintf ppf "%a, unexpected character %s" print_point point str) + register_error_kind + `Permanent + ~id:"micheline.parse_error.unexpected_character" + ~title:"Micheline parser error: unexpected character" + ~description: + "While parsing a piece of Micheline source, an unexpected character was \ + encountered." + ~pp:(fun ppf (point, str) -> + Format.fprintf ppf "%a, unexpected character %s" print_point point str) Data_encoding.(obj2 (req "point" point_encoding) (req "character" string)) - (function Unexpected_character (point, str) -> Some (point, str) | _ -> None) + (function + | Unexpected_character (point, str) -> Some (point, str) | _ -> None) (fun (point, str) -> Unexpected_character (point, str)) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.undefined_escape_sequence" - ~title: "Micheline parser error: undefined escape sequence" - ~description: "While parsing a piece of Micheline source, \ - an unexpected escape sequence was encountered in a string." - ~pp:(fun ppf (point, str) -> Format.fprintf ppf "%a, undefined escape sequence \"%s\"" print_point point str) + register_error_kind + `Permanent + ~id:"micheline.parse_error.undefined_escape_sequence" + ~title:"Micheline parser error: undefined escape sequence" + ~description: + "While parsing a piece of Micheline source, an unexpected escape \ + sequence was encountered in a string." + ~pp:(fun ppf (point, str) -> + Format.fprintf + ppf + "%a, undefined escape sequence \"%s\"" + print_point + point + str) Data_encoding.(obj2 (req "point" point_encoding) (req "sequence" string)) - (function Undefined_escape_sequence (point, str) -> Some (point, str) | _ -> None) + (function + | Undefined_escape_sequence (point, str) -> Some (point, str) | _ -> None) (fun (point, str) -> Undefined_escape_sequence (point, str)) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.missing_break_after_number" - ~title: "Micheline parser error: missing break after number" - ~description: "While parsing a piece of Micheline source, \ - a number was not visually separated from \ - its follower token, leading to misreadability." - ~pp:(fun ppf point -> Format.fprintf ppf "%a, missing break after number" print_point point) + register_error_kind + `Permanent + ~id:"micheline.parse_error.missing_break_after_number" + ~title:"Micheline parser error: missing break after number" + ~description: + "While parsing a piece of Micheline source, a number was not visually \ + separated from its follower token, leading to misreadability." + ~pp:(fun ppf point -> + Format.fprintf ppf "%a, missing break after number" print_point point) Data_encoding.(obj1 (req "point" point_encoding)) (function Missing_break_after_number point -> Some point | _ -> None) (fun point -> Missing_break_after_number point) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.unterminated_string" - ~title: "Micheline parser error: unterminated string" - ~description: "While parsing a piece of Micheline source, \ - a string was not terminated." - ~pp:(fun ppf loc -> Format.fprintf ppf "%a, unterminated string" print_location loc) + register_error_kind + `Permanent + ~id:"micheline.parse_error.unterminated_string" + ~title:"Micheline parser error: unterminated string" + ~description: + "While parsing a piece of Micheline source, a string was not terminated." + ~pp:(fun ppf loc -> + Format.fprintf ppf "%a, unterminated string" print_location loc) Data_encoding.(obj1 (req "location" location_encoding)) (function Unterminated_string loc -> Some loc | _ -> None) (fun loc -> Unterminated_string loc) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.unterminated_integer" - ~title: "Micheline parser error: unterminated integer" - ~description: "While parsing a piece of Micheline source, \ - an integer was not terminated." - ~pp:(fun ppf loc -> Format.fprintf ppf "%a, unterminated integer" print_location loc) + register_error_kind + `Permanent + ~id:"micheline.parse_error.unterminated_integer" + ~title:"Micheline parser error: unterminated integer" + ~description: + "While parsing a piece of Micheline source, an integer was not \ + terminated." + ~pp:(fun ppf loc -> + Format.fprintf ppf "%a, unterminated integer" print_location loc) Data_encoding.(obj1 (req "location" location_encoding)) (function Unterminated_integer loc -> Some loc | _ -> None) (fun loc -> Unterminated_integer loc) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.odd_lengthed_bytes" - ~title: "Micheline parser error: odd lengthed bytes" - ~description: "While parsing a piece of Micheline source, the \ - length of a byte sequence (0x...) was not a \ - multiple of two, leaving a trailing half byte." - ~pp:(fun ppf loc -> Format.fprintf ppf "%a, odd_lengthed bytes" print_location loc) + register_error_kind + `Permanent + ~id:"micheline.parse_error.odd_lengthed_bytes" + ~title:"Micheline parser error: odd lengthed bytes" + ~description: + "While parsing a piece of Micheline source, the length of a byte \ + sequence (0x...) was not a multiple of two, leaving a trailing half \ + byte." + ~pp:(fun ppf loc -> + Format.fprintf ppf "%a, odd_lengthed bytes" print_location loc) Data_encoding.(obj1 (req "location" location_encoding)) (function Odd_lengthed_bytes loc -> Some loc | _ -> None) (fun loc -> Odd_lengthed_bytes loc) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.unterminated_comment" - ~title: "Micheline parser error: unterminated comment" - ~description: "While parsing a piece of Micheline source, \ - a commentX was not terminated." - ~pp:(fun ppf loc -> Format.fprintf ppf "%a, unterminated comment" print_location loc) + register_error_kind + `Permanent + ~id:"micheline.parse_error.unterminated_comment" + ~title:"Micheline parser error: unterminated comment" + ~description: + "While parsing a piece of Micheline source, a commentX was not \ + terminated." + ~pp:(fun ppf loc -> + Format.fprintf ppf "%a, unterminated comment" print_location loc) Data_encoding.(obj1 (req "location" location_encoding)) (function Unterminated_comment loc -> Some loc | _ -> None) (fun loc -> Unterminated_comment loc) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.annotation_exceeds_max_length" - ~title: "Micheline parser error: annotation exceeds max length" - ~description: (Format.sprintf - "While parsing a piece of Micheline source, \ - an annotation exceeded the maximum length (%d)." max_annot_length) - ~pp:(fun ppf loc -> Format.fprintf ppf "%a, annotation exceeded maximum length (%d chars)" - print_location - loc max_annot_length) + register_error_kind + `Permanent + ~id:"micheline.parse_error.annotation_exceeds_max_length" + ~title:"Micheline parser error: annotation exceeds max length" + ~description: + (Format.sprintf + "While parsing a piece of Micheline source, an annotation exceeded \ + the maximum length (%d)." + max_annot_length) + ~pp:(fun ppf loc -> + Format.fprintf + ppf + "%a, annotation exceeded maximum length (%d chars)" + print_location + loc + max_annot_length) Data_encoding.(obj1 (req "location" location_encoding)) (function Annotation_length loc -> Some loc | _ -> None) (fun loc -> Annotation_length loc) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.unclosed_token" - ~title: "Micheline parser error: unclosed token" - ~description: "While parsing a piece of Micheline source, \ - a parenthesis or a brace was unclosed." + register_error_kind + `Permanent + ~id:"micheline.parse_error.unclosed_token" + ~title:"Micheline parser error: unclosed token" + ~description: + "While parsing a piece of Micheline source, a parenthesis or a brace \ + was unclosed." ~pp:(fun ppf (loc, token) -> - Format.fprintf ppf "%a, unclosed %a" print_location loc print_token_kind token) - Data_encoding.(obj2 - (req "location"location_encoding) - (req "token" token_value_encoding)) - (function Unclosed { loc ; token } -> Some (loc, token) | _ -> None) - (fun (loc, token) -> Unclosed { loc ; token }) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.unexpected_token" - ~title: "Micheline parser error: unexpected token" - ~description: "While parsing a piece of Micheline source, \ - an unexpected token was encountered." + Format.fprintf + ppf + "%a, unclosed %a" + print_location + loc + print_token_kind + token) + Data_encoding.( + obj2 + (req "location" location_encoding) + (req "token" token_value_encoding)) + (function Unclosed {loc; token} -> Some (loc, token) | _ -> None) + (fun (loc, token) -> Unclosed {loc; token}) ; + register_error_kind + `Permanent + ~id:"micheline.parse_error.unexpected_token" + ~title:"Micheline parser error: unexpected token" + ~description: + "While parsing a piece of Micheline source, an unexpected token was \ + encountered." ~pp:(fun ppf (loc, token) -> - Format.fprintf ppf "%a, unexpected %a" print_location loc print_token_kind token) - Data_encoding.(obj2 - (req "location"location_encoding) - (req "token" token_value_encoding)) - (function Unexpected { loc ; token } -> Some (loc, token) | _ -> None) - (fun (loc, token) -> Unexpected { loc ; token }) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.extra_token" - ~title: "Micheline parser error: extra token" - ~description: "While parsing a piece of Micheline source, \ - an extra semi colon or parenthesis was encountered." + Format.fprintf + ppf + "%a, unexpected %a" + print_location + loc + print_token_kind + token) + Data_encoding.( + obj2 + (req "location" location_encoding) + (req "token" token_value_encoding)) + (function Unexpected {loc; token} -> Some (loc, token) | _ -> None) + (fun (loc, token) -> Unexpected {loc; token}) ; + register_error_kind + `Permanent + ~id:"micheline.parse_error.extra_token" + ~title:"Micheline parser error: extra token" + ~description: + "While parsing a piece of Micheline source, an extra semi colon or \ + parenthesis was encountered." ~pp:(fun ppf (loc, token) -> - Format.fprintf ppf "%a, extra %a" print_location loc print_token_kind token) - Data_encoding.(obj2 - (req "location"location_encoding) - (req "token" token_value_encoding)) - (function Extra { loc ; token } -> Some (loc, token) | _ -> None) - (fun (loc, token) -> Extra { loc ; token }) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.misaligned_node" - ~title: "Micheline parser error: misaligned node" - ~description: "While parsing a piece of Micheline source, \ - an expression was not aligned with its \ - siblings of the same mother application \ - or sequence." + Format.fprintf + ppf + "%a, extra %a" + print_location + loc + print_token_kind + token) + Data_encoding.( + obj2 + (req "location" location_encoding) + (req "token" token_value_encoding)) + (function Extra {loc; token} -> Some (loc, token) | _ -> None) + (fun (loc, token) -> Extra {loc; token}) ; + register_error_kind + `Permanent + ~id:"micheline.parse_error.misaligned_node" + ~title:"Micheline parser error: misaligned node" + ~description: + "While parsing a piece of Micheline source, an expression was not \ + aligned with its siblings of the same mother application or sequence." ~pp:(fun ppf node -> - Format.fprintf ppf "%a, misaligned expression" print_location (location node)) + Format.fprintf + ppf + "%a, misaligned expression" + print_location + (location node)) Data_encoding.(obj1 (req "expression" node_encoding)) (function Misaligned node -> Some node | _ -> None) (fun node -> Misaligned node) ; - register_error_kind `Permanent - ~id: "micheline.parse_error.empty_expression" - ~title: "Micheline parser error: empty_expression" - ~description: "Tried to interpret an empty piece or \ - Micheline source as a single expression." + register_error_kind + `Permanent + ~id:"micheline.parse_error.empty_expression" + ~title:"Micheline parser error: empty_expression" + ~description: + "Tried to interpret an empty piece or Micheline source as a single \ + expression." ~pp:(fun ppf () -> Format.fprintf ppf "empty expression") Data_encoding.empty (function Empty -> Some () | _ -> None) diff --git a/src/lib_micheline/micheline_parser.mli b/src/lib_micheline/micheline_parser.mli index 2e50f4cd4cb76fd9b583e7fa61f7b9cdd41dcaa4..5a339856e8a002e468e3657290a4ec9838eee183 100644 --- a/src/lib_micheline/micheline_parser.mli +++ b/src/lib_micheline/micheline_parser.mli @@ -29,17 +29,11 @@ type 'a parsing_result = 'a * error list val no_parsing_error : 'a parsing_result -> 'a tzresult -type point = - { point : int ; - byte : int ; - line : int ; - column : int } +type point = {point : int; byte : int; line : int; column : int} val point_zero : point -type location = - { start : point ; - stop : point } +type location = {start : point; stop : point} val location_zero : location @@ -56,12 +50,12 @@ type token_value = | Comment of string | Eol_comment of string | Semi - | Open_paren | Close_paren - | Open_brace | Close_brace + | Open_paren + | Close_paren + | Open_brace + | Close_brace -type token = - { token : token_value ; - loc : location } +type token = {token : token_value; loc : location} val tokenize : string -> token list parsing_result @@ -78,18 +72,31 @@ val max_annot_length : int val node_encoding : node Data_encoding.encoding type error += Invalid_utf8_sequence of point * string + type error += Unexpected_character of point * string + type error += Undefined_escape_sequence of point * string + type error += Missing_break_after_number of point + type error += Unterminated_string of location + type error += Unterminated_integer of location + type error += Odd_lengthed_bytes of location + type error += Unterminated_comment of location + type error += Unclosed of token + type error += Unexpected of token + type error += Extra of token + type error += Misaligned of node + type error += Empty + type error += Annotation_length of location val parse_toplevel : ?check:bool -> token list -> node list parsing_result diff --git a/src/lib_micheline/micheline_printer.ml b/src/lib_micheline/micheline_printer.ml index eb2b0176b5bb3883a0efcf1fc6101e85767ef5f5..322790f989d9852cb20faa2dd51003d1a744876d 100644 --- a/src/lib_micheline/micheline_printer.ml +++ b/src/lib_micheline/micheline_printer.ml @@ -25,15 +25,12 @@ open Micheline -type location = { comment : string option } +type location = {comment : string option} type node = (location, string) Micheline.node -let printable - ?(comment = (fun _ -> None)) - map_prim expr = - let map_loc loc = - { comment = comment loc } in +let printable ?(comment = fun _ -> None) map_prim expr = + let map_loc loc = {comment = comment loc} in map_node map_loc map_prim (root expr) let print_comment ppf text = @@ -41,14 +38,22 @@ let print_comment ppf text = let print_string ppf text = Format.fprintf ppf "\"" ; - String.iter (function - | '"' -> Format.fprintf ppf "\\\"" - | '\n' -> Format.fprintf ppf "\\n" - | '\r' -> Format.fprintf ppf "\\r" - | '\b' -> Format.fprintf ppf "\\b" - | '\t' -> Format.fprintf ppf "\\t" - | '\\' -> Format.fprintf ppf "\\\\" - | c -> Format.fprintf ppf "%c" c) + String.iter + (function + | '"' -> + Format.fprintf ppf "\\\"" + | '\n' -> + Format.fprintf ppf "\\n" + | '\r' -> + Format.fprintf ppf "\\r" + | '\b' -> + Format.fprintf ppf "\\b" + | '\t' -> + Format.fprintf ppf "\\t" + | '\\' -> + Format.fprintf ppf "\\\\" + | c -> + Format.fprintf ppf "%c" c) text ; Format.fprintf ppf "\"" @@ -57,113 +62,142 @@ let print_annotations = let preformat root = let preformat_loc = function - | { comment = None } -> + | {comment = None} -> (false, 0) - | { comment = Some text } -> - (String.contains text '\n', String.length text + 1) in + | {comment = Some text} -> + (String.contains text '\n', String.length text + 1) + in let preformat_annots = function - | [] -> 0 - | annots -> String.length (String.concat " " annots) + 2 in + | [] -> + 0 + | annots -> + String.length (String.concat " " annots) + 2 + in let rec preformat_expr = function | Int (loc, value) -> - let cml, csz = preformat_loc loc in + let (cml, csz) = preformat_loc loc in Int ((cml, String.length (Z.to_string value) + csz, loc), value) | String (loc, value) -> - let cml, csz = preformat_loc loc in + let (cml, csz) = preformat_loc loc in String ((cml, String.length value + csz, loc), value) | Bytes (loc, value) -> - let cml, csz = preformat_loc loc in - Bytes ((cml, MBytes.length value * 2 + 2 + csz, loc), value) + let (cml, csz) = preformat_loc loc in + Bytes ((cml, (MBytes.length value * 2) + 2 + csz, loc), value) | Prim (loc, name, items, annots) -> - let cml, csz = preformat_loc loc in + let (cml, csz) = preformat_loc loc in let asz = preformat_annots annots in let items = List.map preformat_expr items in - let ml, sz = + let (ml, sz) = List.fold_left (fun (tml, tsz) e -> - let (ml, sz, _) = location e in - (tml || ml, tsz + 1 + sz)) + let (ml, sz, _) = location e in + (tml || ml, tsz + 1 + sz)) (cml, String.length name + csz + asz) - items in + items + in Prim ((ml, sz, loc), name, items, annots) | Seq (loc, items) -> - let cml, csz = preformat_loc loc in + let (cml, csz) = preformat_loc loc in let items = List.map preformat_expr items in - let ml, sz = + let (ml, sz) = List.fold_left (fun (tml, tsz) e -> - let (ml, sz, _) = location e in - (tml || ml, tsz + 3 + sz)) + let (ml, sz, _) = location e in + (tml || ml, tsz + 3 + sz)) (cml, 4 + csz) - items in - Seq ((ml, sz, loc), items) in + items + in + Seq ((ml, sz, loc), items) + in preformat_expr root let rec print_expr_unwrapped ppf = function - | Prim ((ml, s, { comment }), name, args, annot) -> - let name = match annot with - | [] -> name + | Prim ((ml, s, {comment}), name, args, annot) -> + let name = + match annot with + | [] -> + name | annots -> - Format.asprintf "%s @[<h>%a@]" name print_annotations annots in - if not ml && s < 80 then begin - if args = [] then - Format.fprintf ppf "%s" name + Format.asprintf "%s @[<h>%a@]" name print_annotations annots + in + if (not ml) && s < 80 then ( + if args = [] then Format.fprintf ppf "%s" name else - Format.fprintf ppf "@[<h>%s %a@]" name (Format.pp_print_list ~pp_sep:Format.pp_print_space print_expr) args ; - begin match comment with - | None -> () - | Some text -> Format.fprintf ppf "@ /* %s */" text - end ; - end else begin - if args = [] then - Format.fprintf ppf "%s" name + Format.fprintf + ppf + "@[<h>%s %a@]" + name + (Format.pp_print_list ~pp_sep:Format.pp_print_space print_expr) + args ; + match comment with + | None -> + () + | Some text -> + Format.fprintf ppf "@ /* %s */" text ) + else ( + if args = [] then Format.fprintf ppf "%s" name else if String.length name <= 4 then - Format.fprintf ppf "%s @[<v 0>%a@]" name (Format.pp_print_list print_expr) args + Format.fprintf + ppf + "%s @[<v 0>%a@]" + name + (Format.pp_print_list print_expr) + args else - Format.fprintf ppf "@[<v 2>%s@,%a@]" name (Format.pp_print_list print_expr) args ; - begin match comment with - | None -> () - | Some comment -> Format.fprintf ppf "@ %a" print_comment comment - end - end - | Int ((_, _, { comment }), value) -> - begin match comment with - | None -> Format.fprintf ppf "%s" (Z.to_string value) - | Some comment -> Format.fprintf ppf "%s@ %a" (Z.to_string value) print_comment comment - end - | String ((_, _, { comment }), value) -> - begin match comment with - | None -> print_string ppf value - | Some comment -> Format.fprintf ppf "%a@ %a" print_string value print_comment comment - end - | Bytes ((_, _, { comment }), value) -> - begin match comment with - | None -> Format.fprintf ppf "0x%a" MBytes.pp_hex value - | Some comment -> Format.fprintf ppf "0x%a@ %a" MBytes.pp_hex value print_comment comment - end - | Seq ((_, _, { comment = None }), []) -> + Format.fprintf + ppf + "@[<v 2>%s@,%a@]" + name + (Format.pp_print_list print_expr) + args ; + match comment with + | None -> + () + | Some comment -> + Format.fprintf ppf "@ %a" print_comment comment ) + | Int ((_, _, {comment}), value) -> ( + match comment with + | None -> + Format.fprintf ppf "%s" (Z.to_string value) + | Some comment -> + Format.fprintf ppf "%s@ %a" (Z.to_string value) print_comment comment ) + | String ((_, _, {comment}), value) -> ( + match comment with + | None -> + print_string ppf value + | Some comment -> + Format.fprintf ppf "%a@ %a" print_string value print_comment comment ) + | Bytes ((_, _, {comment}), value) -> ( + match comment with + | None -> + Format.fprintf ppf "0x%a" MBytes.pp_hex value + | Some comment -> + Format.fprintf ppf "0x%a@ %a" MBytes.pp_hex value print_comment comment + ) + | Seq ((_, _, {comment = None}), []) -> Format.fprintf ppf "{}" - | Seq ((ml, s, { comment }), items) -> - if not ml && s < 80 then - Format.fprintf ppf "{ @[<h 0>" - else - Format.fprintf ppf "{ @[<v 0>" ; - begin match comment, items with - | None, _ -> () - | Some comment, [] -> Format.fprintf ppf "%a" print_comment comment - | Some comment, _ -> Format.fprintf ppf "%a@ " print_comment comment - end ; + | Seq ((ml, s, {comment}), items) -> + if (not ml) && s < 80 then Format.fprintf ppf "{ @[<h 0>" + else Format.fprintf ppf "{ @[<v 0>" ; + ( match (comment, items) with + | (None, _) -> + () + | (Some comment, []) -> + Format.fprintf ppf "%a" print_comment comment + | (Some comment, _) -> + Format.fprintf ppf "%a@ " print_comment comment ) ; Format.pp_print_list ~pp_sep:(fun ppf () -> Format.fprintf ppf " ;@ ") print_expr_unwrapped - ppf items ; + ppf + items ; Format.fprintf ppf "@] }" and print_expr ppf = function - | Prim (_, _, _ :: _, _) - | Prim (_, _, [], _ :: _) as expr -> + | (Prim (_, _, _ :: _, _) | Prim (_, _, [], _ :: _)) as expr -> Format.fprintf ppf "(%a)" print_expr_unwrapped expr - | expr -> print_expr_unwrapped ppf expr + | expr -> + print_expr_unwrapped ppf expr let with_unbounded_formatter ppf f x = let buf = Buffer.create 10000 in @@ -174,7 +208,11 @@ let with_unbounded_formatter ppf f x = f sppf x ; Format.fprintf sppf "%!" ; let lines = String.split_on_char '\n' (Buffer.contents buf) in - Format.pp_print_list ~pp_sep:Format.pp_force_newline Format.pp_print_string ppf lines + Format.pp_print_list + ~pp_sep:Format.pp_force_newline + Format.pp_print_string + ppf + lines let print_expr_unwrapped ppf expr = with_unbounded_formatter ppf print_expr_unwrapped (preformat expr) diff --git a/src/lib_micheline/micheline_printer.mli b/src/lib_micheline/micheline_printer.mli index 05b5a38be7168c2c5241ee9b61726b51e2ff0109..54b725021be5cf7f50cd0f706c3d4336caa8b226 100644 --- a/src/lib_micheline/micheline_printer.mli +++ b/src/lib_micheline/micheline_printer.mli @@ -27,13 +27,17 @@ open Micheline val print_string : Format.formatter -> string -> unit -type location = { comment : string option } +type location = {comment : string option} type node = (location, string) Micheline.node val print_expr : Format.formatter -> (location, string) Micheline.node -> unit -val print_expr_unwrapped : Format.formatter -> (location, string) Micheline.node -> unit + +val print_expr_unwrapped : + Format.formatter -> (location, string) Micheline.node -> unit val printable : - ?comment: (int -> string option) -> - ('p -> string) -> 'p canonical -> (location, string) Micheline.node + ?comment:(int -> string option) -> + ('p -> string) -> + 'p canonical -> + (location, string) Micheline.node diff --git a/src/lib_micheline/test/assert.ml b/src/lib_micheline/test/assert.ml index a1276d310c7be5cc6a44045e4fa82daf1d9339ed..4a7a861513845ba2fd7062b6e5bb946e82377bae 100644 --- a/src/lib_micheline/test/assert.ml +++ b/src/lib_micheline/test/assert.ml @@ -26,25 +26,33 @@ (* Mini compatibility layer to avoid circular dependency *) module Compat = struct let failwith fmt = Format.kasprintf (fun s -> Lwt.return_error s) fmt + let return_unit = Lwt.return_ok () - let (>>=) = Lwt.bind - let (>>=?) v f = - v >>= function - | Error _ as err -> Lwt.return err - | Ok v -> f v + + let ( >>= ) = Lwt.bind + + let ( >>=? ) v f = + v >>= function Error _ as err -> Lwt.return err | Ok v -> f v + let rec iter2_p f l1 l2 = - match l1, l2 with - | [], [] -> return_unit - | [], _ | _, [] -> invalid_arg "Error_monad.iter2_p" - | x1 :: l1 , x2 :: l2 -> + match (l1, l2) with + | ([], []) -> + return_unit + | ([], _) | (_, []) -> + invalid_arg "Error_monad.iter2_p" + | (x1 :: l1, x2 :: l2) -> ( let tx = f x1 x2 and tl = iter2_p f l1 l2 in - tx >>= fun tx_res -> - tl >>= fun tl_res -> - match tx_res, tl_res with - | Ok (), Ok () -> Lwt.return_ok () - | Error exn1, Error exn2 -> failwith "%s -- %s" exn1 exn2 - | Ok (), Error exn - | Error exn, Ok () -> Lwt.return_error exn + tx + >>= fun tx_res -> + tl + >>= fun tl_res -> + match (tx_res, tl_res) with + | (Ok (), Ok ()) -> + Lwt.return_ok () + | (Error exn1, Error exn2) -> + failwith "%s -- %s" exn1 exn2 + | (Ok (), Error exn) | (Error exn, Ok ()) -> + Lwt.return_error exn ) end open Compat @@ -52,46 +60,73 @@ open Compat let fail loc printer given expected msg = failwith "@[<v 2> On %s : %s@ @[Given:\t%a@]@ @[Expected:\t%a@]@]" - loc msg printer given printer expected + loc + msg + printer + given + printer + expected let default_printer fmt _ = Format.fprintf fmt "" -let equal ~loc ?(eq=(=)) ?(printer=default_printer) ?(msg="") given expected = - if not (eq given expected) then - fail loc printer given expected msg - else - return_unit +let equal ~loc ?(eq = ( = )) ?(printer = default_printer) ?(msg = "") given + expected = + if not (eq given expected) then fail loc printer given expected msg + else return_unit -let not_equal ~loc ?(eq=(=)) ?(printer=default_printer) ?(msg="") given expected = - if eq given expected then - fail loc printer given expected msg - else - return_unit +let not_equal ~loc ?(eq = ( = )) ?(printer = default_printer) ?(msg = "") given + expected = + if eq given expected then fail loc printer given expected msg + else return_unit let pp_tokens fmt tokens = let token_value_printer fmt token_value = - Format.fprintf fmt "@[%s@]" + Format.fprintf + fmt + "@[%s@]" (let open Micheline_parser in - match token_value with - String s -> Format.sprintf "String %S" s - | Bytes s -> Format.sprintf "Bytes %S" s - | Int s -> Format.sprintf "Int %S" s - | Ident s -> Format.sprintf "Ident %S" s - | Annot s -> Format.sprintf "Annot %S" s - | Comment s -> Format.sprintf "Comment %S" s - | Eol_comment s -> Format.sprintf "Eol_comment %S" s - | Semi -> Format.sprintf "Semi" - | Open_paren -> Format.sprintf "Open_paren" - | Close_paren -> Format.sprintf "Close_paren" - | Open_brace -> Format.sprintf "Open_brace" - | Close_brace -> Format.sprintf "Close_brace" - ) in - Format.fprintf fmt "%a" - (Format.pp_print_list token_value_printer) - tokens + match token_value with + | String s -> + Format.sprintf "String %S" s + | Bytes s -> + Format.sprintf "Bytes %S" s + | Int s -> + Format.sprintf "Int %S" s + | Ident s -> + Format.sprintf "Ident %S" s + | Annot s -> + Format.sprintf "Annot %S" s + | Comment s -> + Format.sprintf "Comment %S" s + | Eol_comment s -> + Format.sprintf "Eol_comment %S" s + | Semi -> + Format.sprintf "Semi" + | Open_paren -> + Format.sprintf "Open_paren" + | Close_paren -> + Format.sprintf "Close_paren" + | Open_brace -> + Format.sprintf "Open_brace" + | Close_brace -> + Format.sprintf "Close_brace") + in + Format.fprintf fmt "%a" (Format.pp_print_list token_value_printer) tokens let equal_tokens ~loc given expected = - equal ~loc ~eq:(=) ~printer:pp_tokens ~msg:"Tokens are not equal" given expected + equal + ~loc + ~eq:( = ) + ~printer:pp_tokens + ~msg:"Tokens are not equal" + given + expected let not_equal_tokens ~loc given expected = - not_equal ~loc ~eq:(=) ~printer:pp_tokens ~msg:"Tokens are equal" given expected + not_equal + ~loc + ~eq:( = ) + ~printer:pp_tokens + ~msg:"Tokens are equal" + given + expected diff --git a/src/lib_micheline/test/test_parser.ml b/src/lib_micheline/test/test_parser.ml index 278bf9d7466db3f73c98ba42c52199bc5b2f017d..8c826478c84e4f5fbf1a50970d610838437db8ba 100644 --- a/src/lib_micheline/test/test_parser.ml +++ b/src/lib_micheline/test/test_parser.ml @@ -31,188 +31,341 @@ open Assert.Compat let assert_tokenize ~loc given expected = match Micheline_parser.tokenize given with - | tokens, [] -> - let tokens_got = - List.map (fun x -> x.Micheline_parser.token) tokens - in + | (tokens, []) -> + let tokens_got = List.map (fun x -> x.Micheline_parser.token) tokens in Assert.equal_tokens ~loc tokens_got expected - | _, _ -> failwith "%s - Cannot tokenize %s" loc given + | (_, _) -> + failwith "%s - Cannot tokenize %s" loc given let assert_tokenize_error ~loc given expected = match Micheline_parser.tokenize given with - | tokens, [] -> - let tokens_got = - List.map (fun x -> x.Micheline_parser.token) tokens - in + | (tokens, []) -> + let tokens_got = List.map (fun x -> x.Micheline_parser.token) tokens in Assert.not_equal_tokens ~loc tokens_got expected - | _, _ -> return_unit + | (_, _) -> + return_unit let test_tokenize_basic () = (* String *) - assert_tokenize ~loc:__LOC__ "\"abc\"" [ String "abc" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "\"abc\t\"" [ String "abc\t" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "\"abc\b\"" [ String "abc\b" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "\"abc\\n\"" [ String "abc\n" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "\"abc\\r\"" [ String "abc\r" ] >>=? fun () -> + assert_tokenize ~loc:__LOC__ "\"abc\"" [String "abc"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "\"abc\t\"" [String "abc\t"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "\"abc\b\"" [String "abc\b"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "\"abc\\n\"" [String "abc\n"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "\"abc\\r\"" [String "abc\r"] + >>=? fun () -> (*fail*) - assert_tokenize_error ~loc:__LOC__ "\"abc\n\"" [ String "abc\n" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "\"abc\\\"" [ String "abc\\" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "\"abc\"" [ String "abc\n" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "\"abc\r\"" [ String "abc\r" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "abc\r" [ String "abc\r" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "\"abc\"\r" [ String "abc\r" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "\"abc" [ String "abc" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "abc\"" [ String "abc" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "\"\"\"" [ String "" ] >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "\"abc\n\"" [String "abc\n"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "\"abc\\\"" [String "abc\\"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "\"abc\"" [String "abc\n"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "\"abc\r\"" [String "abc\r"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "abc\r" [String "abc\r"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "\"abc\"\r" [String "abc\r"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "\"abc" [String "abc"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "abc\"" [String "abc"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "\"\"\"" [String ""] + >>=? fun () -> (* Bytes *) - assert_tokenize ~loc:__LOC__ "0xabc" [ Bytes "0xabc" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "0x" [ Bytes "0x" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "0x1" [ Bytes "0x1" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "xabc" [ Bytes "xabc" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "1xabc" [ Bytes "1xabc" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "1c" [ Bytes "1c" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "0c" [ Bytes "0c" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "0xx" [ Bytes "0xx" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "0b" [ Bytes "0b" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "0xg" [ Bytes "0xg" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "0X" [ Bytes "0X" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "1x" [ Bytes "1x" ] >>=? fun () -> + assert_tokenize ~loc:__LOC__ "0xabc" [Bytes "0xabc"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "0x" [Bytes "0x"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "0x1" [Bytes "0x1"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "xabc" [Bytes "xabc"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "1xabc" [Bytes "1xabc"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "1c" [Bytes "1c"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "0c" [Bytes "0c"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "0xx" [Bytes "0xx"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "0b" [Bytes "0b"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "0xg" [Bytes "0xg"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "0X" [Bytes "0X"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "1x" [Bytes "1x"] + >>=? fun () -> (* Int *) - assert_tokenize ~loc:__LOC__ "10" [ Int "10" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "0" [ Int "0" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "00" [ Int "00" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "001" [ Int "001" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "-0" [ Int "0" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "-1" [ Int "-1" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "1" [ Int "1" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "-10" [ Int "-10" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ ".1000" [ Int ".1000" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "10_00" [ Int "10_00" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "1,000" [ Int "1,000" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "1000.000" [ Int "1000.000" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "-0" [ Int "-0" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "--0" [ Int "0" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "+0" [ Int "0" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "a" [ Int "a" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "0a" [ Int "0a" ] >>=? fun () -> + assert_tokenize ~loc:__LOC__ "10" [Int "10"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "0" [Int "0"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "00" [Int "00"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "001" [Int "001"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "-0" [Int "0"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "-1" [Int "-1"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "1" [Int "1"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "-10" [Int "-10"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ ".1000" [Int ".1000"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "10_00" [Int "10_00"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "1,000" [Int "1,000"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "1000.000" [Int "1000.000"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "-0" [Int "-0"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "--0" [Int "0"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "+0" [Int "0"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "a" [Int "a"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "0a" [Int "0a"] + >>=? fun () -> (* Ident *) - assert_tokenize ~loc:__LOC__ "string" [ Ident "string" ] >>=? fun () -> + assert_tokenize ~loc:__LOC__ "string" [Ident "string"] + >>=? fun () -> (* Annotation *) - assert_tokenize ~loc:__LOC__ "@my_pair" [ Annot "@my_pair" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "@@my_pair" [ Annot "@@my_pair" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "$t" [ Annot "$t" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "&t" [ Annot "&t" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ":t" [ Annot ":t" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ":_" [ Annot ":_" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ":0" [ Annot ":0" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ":%" [ Annot ":%" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ":%%" [ Annot ":%%" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ":%@" [ Annot ":%@" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ":%@_" [ Annot ":%@_" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ":%@_0" [ Annot ":%@_0" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "%from" [ Annot "%from" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "%@from" [ Annot "%@from" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "%from_a" [ Annot "%from_a" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "%from.a" [ Annot "%from.a" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "%From.a" [ Annot "%From.a" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "%0From.a" [ Annot "%0From.a" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "?t" [ Annot "?t" ] >>=? fun () -> + assert_tokenize ~loc:__LOC__ "@my_pair" [Annot "@my_pair"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "@@my_pair" [Annot "@@my_pair"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "$t" [Annot "$t"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "&t" [Annot "&t"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ":t" [Annot ":t"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ":_" [Annot ":_"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ":0" [Annot ":0"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ":%" [Annot ":%"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ":%%" [Annot ":%%"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ":%@" [Annot ":%@"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ":%@_" [Annot ":%@_"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ":%@_0" [Annot ":%@_0"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "%from" [Annot "%from"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "%@from" [Annot "%@from"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "%from_a" [Annot "%from_a"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "%from.a" [Annot "%from.a"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "%From.a" [Annot "%From.a"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "%0From.a" [Annot "%0From.a"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "?t" [Annot "?t"] + >>=? fun () -> (*fail*) - assert_tokenize_error ~loc:__LOC__ "??t" [ Annot "??t" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "&&t" [ Annot "&&t" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "$$t" [ Annot "$$t" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "_from" [ Annot "_from" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ ".from" [ Annot ".from" ] >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "??t" [Annot "??t"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "&&t" [Annot "&&t"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "$$t" [Annot "$$t"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "_from" [Annot "_from"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ ".from" [Annot ".from"] + >>=? fun () -> (*NOTE: the cases below fail because ':' is used in the middle of the annotation. *) - assert_tokenize_error ~loc:__LOC__ "%:from" [ Annot "%:from" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "%:@from" [ Annot "%:@from" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "::t" [ Annot "::t" ] >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "%:from" [Annot "%:from"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "%:@from" [Annot "%:@from"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "::t" [Annot "::t"] + >>=? fun () -> (* Comment *) - assert_tokenize ~loc:__LOC__ - "/*\"/**/\"*/" [Comment "/*\"/**/\"*/"] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "/* /* /* */ */ */" [Comment "/* /* /* */ */ */"] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "/*parse 1" [Comment "/*parse 1"] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "parse 1*/" [Comment "parse 1*/"] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "/* */*/" [Comment "/* */*/"] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "/*/* */" [Comment "/*/* */"] >>=? fun () -> + assert_tokenize ~loc:__LOC__ "/*\"/**/\"*/" [Comment "/*\"/**/\"*/"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "/* /* /* */ */ */" [Comment "/* /* /* */ */ */"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "/*parse 1" [Comment "/*parse 1"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "parse 1*/" [Comment "parse 1*/"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "/* */*/" [Comment "/* */*/"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "/*/* */" [Comment "/*/* */"] + >>=? fun () -> (* EOL *) - assert_tokenize ~loc:__LOC__ "#Access" [ Eol_comment "#Access" ] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "##Access" [ Eol_comment "##Access" ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "?Access" [ Eol_comment "?Access" ] >>=? fun () -> + assert_tokenize ~loc:__LOC__ "#Access" [Eol_comment "#Access"] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "##Access" [Eol_comment "##Access"] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "?Access" [Eol_comment "?Access"] + >>=? fun () -> (* SKIP *) - assert_tokenize ~loc:__LOC__ ";" [ Semi] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "{" [ Open_brace] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "}" [ Close_brace] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "(" [ Open_paren] >>=? fun () -> - assert_tokenize ~loc:__LOC__ ")" [ Close_paren] >>=? fun () -> + assert_tokenize ~loc:__LOC__ ";" [Semi] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "{" [Open_brace] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "}" [Close_brace] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "(" [Open_paren] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ ")" [Close_paren] + >>=? fun () -> (*fail*) - assert_tokenize_error ~loc:__LOC__ "{" [ Semi ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ ";" [ Open_brace ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "}" [ Open_brace ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ "(" [ Close_paren ] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ ")" [ Open_paren ] + assert_tokenize_error ~loc:__LOC__ "{" [Semi] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ ";" [Open_brace] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "}" [Open_brace] + >>=? fun () -> + assert_tokenize_error ~loc:__LOC__ "(" [Close_paren] + >>=? fun () -> assert_tokenize_error ~loc:__LOC__ ")" [Open_paren] (*********************) (* One line contracts *) let test_one_line_contract () = - assert_tokenize ~loc:__LOC__ "(option int)" - [Open_paren; Ident "option"; Ident "int"; Close_paren] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "DIP {ADD}" - [Ident "DIP"; Open_brace; Ident "ADD"; Close_brace] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "parameter int;" - [Ident "parameter"; Ident "int"; Semi] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "PUSH string \"abc\";" - [Ident "PUSH"; Ident "string"; String "abc"; Semi] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "DROP; SWAP" - [Ident "DROP"; Semi; Ident "SWAP"] >>=? fun () -> + assert_tokenize + ~loc:__LOC__ + "(option int)" + [Open_paren; Ident "option"; Ident "int"; Close_paren] + >>=? fun () -> + assert_tokenize + ~loc:__LOC__ + "DIP {ADD}" + [Ident "DIP"; Open_brace; Ident "ADD"; Close_brace] + >>=? fun () -> + assert_tokenize + ~loc:__LOC__ + "parameter int;" + [Ident "parameter"; Ident "int"; Semi] + >>=? fun () -> + assert_tokenize + ~loc:__LOC__ + "PUSH string \"abc\";" + [Ident "PUSH"; Ident "string"; String "abc"; Semi] + >>=? fun () -> + assert_tokenize ~loc:__LOC__ "DROP; SWAP" [Ident "DROP"; Semi; Ident "SWAP"] + >>=? fun () -> (* NOTE: the cases below do not fail because we only do tokenization. *) - assert_tokenize ~loc:__LOC__ "DIP {ADD" - [Ident "DIP"; Open_brace; Ident "ADD"] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "(option int" - [Open_paren; Ident "option"; Ident "int"] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "parameter int}" - [Ident "parameter"; Ident "int"; Close_brace] >>=? fun () -> - assert_tokenize ~loc:__LOC__ "}{}{}{" + assert_tokenize ~loc:__LOC__ "DIP {ADD" [Ident "DIP"; Open_brace; Ident "ADD"] + >>=? fun () -> + assert_tokenize + ~loc:__LOC__ + "(option int" + [Open_paren; Ident "option"; Ident "int"] + >>=? fun () -> + assert_tokenize + ~loc:__LOC__ + "parameter int}" + [Ident "parameter"; Ident "int"; Close_brace] + >>=? fun () -> + assert_tokenize + ~loc:__LOC__ + "}{}{}{" [Close_brace; Open_brace; Close_brace; Open_brace; Close_brace; Open_brace] (*********************************) (* Conditional contracts *) let test_condition_contract () = - assert_tokenize ~loc:__LOC__ - "parameter (or string (option int));\ - storage unit;\ - return string;\ - code {CAR;\ - IF_LEFT{}\ - {IF_NONE {FAIL}\ - {PUSH int 0; CMPGT; \ - IF {FAIL}{PUSH string \"\"}}};\ - UNIT; SWAP; PAIR}" - [Ident "parameter"; Open_paren; Ident "or"; Ident "string"; Open_paren; - Ident "option"; Ident "int"; Close_paren; Close_paren; Semi; - Ident "storage"; Ident "unit"; Semi; - Ident "return"; Ident "string"; Semi; - Ident "code"; Open_brace; Ident "CAR"; Semi; - Ident "IF_LEFT"; Open_brace; Close_brace; - Open_brace; Ident "IF_NONE"; Open_brace; Ident "FAIL"; Close_brace; - Open_brace; Ident "PUSH"; Ident "int"; Int "0"; Semi; Ident "CMPGT"; Semi; - Ident "IF"; Open_brace; Ident "FAIL"; Close_brace; - Open_brace; Ident "PUSH"; Ident "string"; String ""; - Close_brace; Close_brace; Close_brace; Semi; - Ident "UNIT"; Semi; Ident "SWAP"; Semi; Ident "PAIR"; Close_brace - ] >>=? fun () -> + assert_tokenize + ~loc:__LOC__ + "parameter (or string (option int));storage unit;return string;code \ + {CAR;IF_LEFT{}{IF_NONE {FAIL}{PUSH int 0; CMPGT; IF {FAIL}{PUSH string \ + \"\"}}};UNIT; SWAP; PAIR}" + [ Ident "parameter"; + Open_paren; + Ident "or"; + Ident "string"; + Open_paren; + Ident "option"; + Ident "int"; + Close_paren; + Close_paren; + Semi; + Ident "storage"; + Ident "unit"; + Semi; + Ident "return"; + Ident "string"; + Semi; + Ident "code"; + Open_brace; + Ident "CAR"; + Semi; + Ident "IF_LEFT"; + Open_brace; + Close_brace; + Open_brace; + Ident "IF_NONE"; + Open_brace; + Ident "FAIL"; + Close_brace; + Open_brace; + Ident "PUSH"; + Ident "int"; + Int "0"; + Semi; + Ident "CMPGT"; + Semi; + Ident "IF"; + Open_brace; + Ident "FAIL"; + Close_brace; + Open_brace; + Ident "PUSH"; + Ident "string"; + String ""; + Close_brace; + Close_brace; + Close_brace; + Semi; + Ident "UNIT"; + Semi; + Ident "SWAP"; + Semi; + Ident "PAIR"; + Close_brace ] + >>=? fun () -> (* NOTE: the cases below do not fail because we only do tokenization. *) - assert_tokenize ~loc:__LOC__ + assert_tokenize + ~loc:__LOC__ "parameter (or string (option int);" - [Ident "parameter"; Open_paren; Ident "or"; Ident "string"; Open_paren; - Ident "option"; Ident "int"; Close_paren; Semi] >>=? fun () -> - assert_tokenize ~loc:__LOC__ + [ Ident "parameter"; + Open_paren; + Ident "or"; + Ident "string"; + Open_paren; + Ident "option"; + Ident "int"; + Close_paren; + Semi ] + >>=? fun () -> + assert_tokenize + ~loc:__LOC__ "parameter (or)" - [Ident "parameter"; Open_paren; Ident "or"; Close_paren] >>=? fun () -> - assert_tokenize_error ~loc:__LOC__ + [Ident "parameter"; Open_paren; Ident "or"; Close_paren] + >>=? fun () -> + assert_tokenize_error + ~loc:__LOC__ "parameter (or" [Ident "parameter"; Open_paren; Ident "or"; Close_paren] @@ -222,196 +375,266 @@ let test_condition_contract () = let assert_toplevel_parsing ~loc source expected = match Micheline_parser.tokenize source with - | _, (_::_) -> failwith "%s - Cannot tokenize %s" loc source - | tokens, [] -> - match Micheline_parser.parse_toplevel tokens with - | _, (_::_) -> failwith "%s - Cannot parse_toplevel %s" loc source - | ast, [] -> - let ast = List.map Micheline.strip_locations ast in - let expected = List.map Micheline.strip_locations expected in - Assert.equal ~loc (List.length ast) (List.length expected) >>=? fun () -> - iter2_p (Assert.equal ~loc) ast expected >>=? fun () -> - return_unit + | (_, _ :: _) -> + failwith "%s - Cannot tokenize %s" loc source + | (tokens, []) -> ( + match Micheline_parser.parse_toplevel tokens with + | (_, _ :: _) -> + failwith "%s - Cannot parse_toplevel %s" loc source + | (ast, []) -> + let ast = List.map Micheline.strip_locations ast in + let expected = List.map Micheline.strip_locations expected in + Assert.equal ~loc (List.length ast) (List.length expected) + >>=? fun () -> + iter2_p (Assert.equal ~loc) ast expected >>=? fun () -> return_unit ) let assert_toplevel_parsing_error ~loc source expected = match Micheline_parser.tokenize source with - | _, (_::_) -> return_unit - | tokens, [] -> - match Micheline_parser.parse_toplevel tokens with - | _, (_::_) -> return_unit - | ast, [] -> - let ast = List.map Micheline.strip_locations ast in - let expected = List.map Micheline.strip_locations expected in - Assert.equal ~loc (List.length ast) (List.length expected) >>=? fun () -> - iter2_p (Assert.not_equal ~loc) ast expected + | (_, _ :: _) -> + return_unit + | (tokens, []) -> ( + match Micheline_parser.parse_toplevel tokens with + | (_, _ :: _) -> + return_unit + | (ast, []) -> + let ast = List.map Micheline.strip_locations ast in + let expected = List.map Micheline.strip_locations expected in + Assert.equal ~loc (List.length ast) (List.length expected) + >>=? fun () -> iter2_p (Assert.not_equal ~loc) ast expected ) let test_basic_parsing () = - assert_toplevel_parsing ~loc:__LOC__ "parameter unit;" - [Prim ((), "parameter", - [Prim ((), "unit", [], [])], - [])] >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "parameter unit;" + [Prim ((), "parameter", [Prim ((), "unit", [], [])], [])] + >>=? fun () -> (* Sequence *) - assert_toplevel_parsing ~loc:__LOC__ "code {}" - [Prim ((), "code", - [ Seq ((), [])], [])] >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "code {}" + [Prim ((), "code", [Seq ((), [])], [])] + >>=? fun () -> (* Int *) - assert_toplevel_parsing ~loc:__LOC__ "PUSH int 100" - [Prim ((), "PUSH", - [Prim ((), "int", [], []); - Int ((), Z.of_int 100)], - [])] >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "PUSH int 100" + [Prim ((), "PUSH", [Prim ((), "int", [], []); Int ((), Z.of_int 100)], [])] + >>=? fun () -> (*NOTE: this case doesn't fail because we don't type check *) - assert_toplevel_parsing ~loc:__LOC__ "PUSH string 100" - [Prim ((), "PUSH", - [Prim ((), "string", [], []); - Int ((), Z.of_int 100)], - [])] >>=? fun () -> - assert_toplevel_parsing_error ~loc:__LOC__ "PUSH int 100_000" - [Prim ((), "PUSH", - [Prim ((), "string", [], []); - Int ((), Z.of_int 100_000)], - [])] >>=? fun () -> - assert_toplevel_parsing_error ~loc:__LOC__ "PUSH int 100" - [Prim ((), "PUSH", - [Prim ((), "int", [], []); - Int ((), Z.of_int 1000)], - [])] >>=? fun () -> - assert_toplevel_parsing_error ~loc:__LOC__ "PUSH int 100" - [Prim ((), "PUSH", - [Prim ((), "string", [], []); - Int ((), Z.of_int 100)], - [])] >>=? fun () -> - assert_toplevel_parsing_error ~loc:__LOC__ "PUSH int \"100\"" - [Prim ((), "PUSH", - [Prim ((), "string", [], []); - Int ((), Z.of_int 100)], - [])] >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "PUSH string 100" + [ Prim + ((), "PUSH", [Prim ((), "string", [], []); Int ((), Z.of_int 100)], []) + ] + >>=? fun () -> + assert_toplevel_parsing_error + ~loc:__LOC__ + "PUSH int 100_000" + [ Prim + ( (), + "PUSH", + [Prim ((), "string", [], []); Int ((), Z.of_int 100_000)], + [] ) ] + >>=? fun () -> + assert_toplevel_parsing_error + ~loc:__LOC__ + "PUSH int 100" + [Prim ((), "PUSH", [Prim ((), "int", [], []); Int ((), Z.of_int 1000)], [])] + >>=? fun () -> + assert_toplevel_parsing_error + ~loc:__LOC__ + "PUSH int 100" + [ Prim + ((), "PUSH", [Prim ((), "string", [], []); Int ((), Z.of_int 100)], []) + ] + >>=? fun () -> + assert_toplevel_parsing_error + ~loc:__LOC__ + "PUSH int \"100\"" + [ Prim + ((), "PUSH", [Prim ((), "string", [], []); Int ((), Z.of_int 100)], []) + ] + >>=? fun () -> (* String *) - assert_toplevel_parsing ~loc:__LOC__ "Pair False \"abc\"" - [Prim ( - (), "Pair", - [Prim ( - (), "False", [], []); - String ((), "abc")], [] - )] >>=? fun () -> - assert_toplevel_parsing_error ~loc:__LOC__ "Pair False \"ab\"" - [Prim ( - (), "Pair", - [Prim ( - (), "False", [], []); - String ((), "abc")], [] - )] >>=? fun () -> - assert_toplevel_parsing_error ~loc:__LOC__ "Pair False abc\"" - [Prim ( - (), "Pair", - [Prim ( - (), "False", [], []); - String ((), "abc")], [] - )] >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "Pair False \"abc\"" + [Prim ((), "Pair", [Prim ((), "False", [], []); String ((), "abc")], [])] + >>=? fun () -> + assert_toplevel_parsing_error + ~loc:__LOC__ + "Pair False \"ab\"" + [Prim ((), "Pair", [Prim ((), "False", [], []); String ((), "abc")], [])] + >>=? fun () -> + assert_toplevel_parsing_error + ~loc:__LOC__ + "Pair False abc\"" + [Prim ((), "Pair", [Prim ((), "False", [], []); String ((), "abc")], [])] + >>=? fun () -> (* annotations *) - assert_toplevel_parsing ~loc:__LOC__ "NIL @annot string; #comment\n" - [Prim ((), "NIL", [Prim ((), "string", [], [])], ["@annot"])] >>=? fun () -> - assert_toplevel_parsing_error ~loc:__LOC__ "NIL @annot string; #comment\n" - [Prim ((), "NIL", [Prim ((), "string", [], [])], [])] >>=? fun () -> - assert_toplevel_parsing ~loc:__LOC__ "IF_NONE {FAIL} {}" - [Prim ((), "IF_NONE", [ Seq ((), [ Prim ((), "FAIL", [], [])]); - Seq ((), [])], [])] >>=? fun () -> - assert_toplevel_parsing ~loc:__LOC__ "PUSH (map int bool) (Map (Item 100 False))" - [Prim ((), "PUSH", [Prim ((), "map", [Prim ((), "int", [], []); - Prim ((), "bool", [], [])], []); - Prim ((), "Map", [Prim ((), "Item", - [Int((), Z.of_int 100); - Prim ((), "False", [], []) - ], []); - ], []) - ] - , [])] >>=? fun () -> - assert_toplevel_parsing ~loc:__LOC__ "LAMDA @name int int {}" - [Prim ((), "LAMDA", [Prim ((), "int", [], []); - Prim ((), "int", [], []); - Seq ((), [])], ["@name"])] >>=? fun () -> - assert_toplevel_parsing ~loc:__LOC__ "code {DUP @test; DROP}" - [Prim ((), "code", [Seq ((), [Prim ((), "DUP", [], ["@test"]); - Prim ((), "DROP", [], [])])], [])] + assert_toplevel_parsing + ~loc:__LOC__ + "NIL @annot string; #comment\n" + [Prim ((), "NIL", [Prim ((), "string", [], [])], ["@annot"])] + >>=? fun () -> + assert_toplevel_parsing_error + ~loc:__LOC__ + "NIL @annot string; #comment\n" + [Prim ((), "NIL", [Prim ((), "string", [], [])], [])] + >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "IF_NONE {FAIL} {}" + [ Prim + ( (), + "IF_NONE", + [Seq ((), [Prim ((), "FAIL", [], [])]); Seq ((), [])], + [] ) ] + >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "PUSH (map int bool) (Map (Item 100 False))" + [ Prim + ( (), + "PUSH", + [ Prim + ( (), + "map", + [Prim ((), "int", [], []); Prim ((), "bool", [], [])], + [] ); + Prim + ( (), + "Map", + [ Prim + ( (), + "Item", + [Int ((), Z.of_int 100); Prim ((), "False", [], [])], + [] ) ], + [] ) ], + [] ) ] + >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "LAMDA @name int int {}" + [ Prim + ( (), + "LAMDA", + [Prim ((), "int", [], []); Prim ((), "int", [], []); Seq ((), [])], + ["@name"] ) ] + >>=? fun () -> + assert_toplevel_parsing + ~loc:__LOC__ + "code {DUP @test; DROP}" + [ Prim + ( (), + "code", + [ Seq + ((), [Prim ((), "DUP", [], ["@test"]); Prim ((), "DROP", [], [])]) + ], + [] ) ] let test_condition_contract_parsing () = - assert_toplevel_parsing ~loc:__LOC__ "parameter unit;\ - return unit;\ - storage tez; #How much you have to send me \n\ - code {CDR; DUP;\ - AMOUNT; CMPLT;\ - IF {FAIL}}" - [Prim ((), "parameter", [ Prim ((), "unit", [],[])], []); - Prim ((), "return", [Prim ((), "unit", [], [])], []); - Prim ((), "storage", [Prim ((), "tez", [], [])], []); - Prim ((), "code", [Seq ((), [Prim ((), "CDR", [], []); - Prim ((), "DUP", [], []); - Prim ((), "AMOUNT", [], []); - Prim ((), "CMPLT", [], []); - Prim ((), "IF", - [Seq ((), - [Prim ((), "FAIL", [], [])])] - , [])])], - []) - ] + assert_toplevel_parsing + ~loc:__LOC__ + "parameter unit;return unit;storage tez; #How much you have to send me \n\ + code {CDR; DUP;AMOUNT; CMPLT;IF {FAIL}}" + [ Prim ((), "parameter", [Prim ((), "unit", [], [])], []); + Prim ((), "return", [Prim ((), "unit", [], [])], []); + Prim ((), "storage", [Prim ((), "tez", [], [])], []); + Prim + ( (), + "code", + [ Seq + ( (), + [ Prim ((), "CDR", [], []); + Prim ((), "DUP", [], []); + Prim ((), "AMOUNT", [], []); + Prim ((), "CMPLT", [], []); + Prim ((), "IF", [Seq ((), [Prim ((), "FAIL", [], [])])], []) + ] ) ], + [] ) ] let test_list_append_parsing () = - assert_toplevel_parsing ~loc:__LOC__ "parameter (pair (list int)(list int));\ - return (list int);\ - storage unit;\ - code { CAR; DUP; DIP{CDR}; CAR;\ - NIL int; SWAP;\ - LAMDA (pair int (list int))\ - (list int)\ - {DUP; CAR; DIP {CDR}; CONS};\ - REDUCE;\ - LAMDA (pair int (list int))\ - (list int)\ - {DUP; CAR; DIP{CDR}; CONS};\ - UNIT; SWAP; PAIR}" - [Prim ((), "parameter", - [Prim ((), "pair", - [Prim ((), "list", [Prim ((), "int", [], [])], []); - Prim ((), "list", [Prim ((), "int", [], [])], [])], [])], []); - Prim ((), "return", [Prim ((), "list", [Prim ((), "int", [], [])], [])], []); - Prim ((), "storage", [Prim ((), "unit", [], [])], []); - Prim ((), "code", - [Seq ((), - [Prim ((), "CAR", [], []); + assert_toplevel_parsing + ~loc:__LOC__ + "parameter (pair (list int)(list int));return (list int);storage \ + unit;code { CAR; DUP; DIP{CDR}; CAR;NIL int; SWAP;LAMDA (pair int (list \ + int))(list int){DUP; CAR; DIP {CDR}; CONS};REDUCE;LAMDA (pair int (list \ + int))(list int){DUP; CAR; DIP{CDR}; CONS};UNIT; SWAP; PAIR}" + [ Prim + ( (), + "parameter", + [ Prim + ( (), + "pair", + [ Prim ((), "list", [Prim ((), "int", [], [])], []); + Prim ((), "list", [Prim ((), "int", [], [])], []) ], + [] ) ], + [] ); + Prim + ((), "return", [Prim ((), "list", [Prim ((), "int", [], [])], [])], []); + Prim ((), "storage", [Prim ((), "unit", [], [])], []); + Prim + ( (), + "code", + [ Seq + ( (), + [ Prim ((), "CAR", [], []); Prim ((), "DUP", [], []); Prim ((), "DIP", [Seq ((), [Prim ((), "CDR", [], [])])], []); Prim ((), "CAR", [], []); Prim ((), "NIL", [Prim ((), "int", [], [])], []); Prim ((), "SWAP", [], []); - Prim ((), "LAMDA", - [Prim ((), "pair", - [Prim ((), "int", [], []); - Prim ((), "list", - [Prim ((), "int", [], [])], []) - ], []); - Prim ((), "list", [Prim ((), "int", [], [])], []); - Seq ((), [Prim ((), "DUP", [], []); - Prim ((), "CAR", [], []); - Prim ((), "DIP", [Seq ((), [Prim ((), "CDR", [], [])])], []); - Prim ((), "CONS", [], [])]) - ], []); + Prim + ( (), + "LAMDA", + [ Prim + ( (), + "pair", + [ Prim ((), "int", [], []); + Prim ((), "list", [Prim ((), "int", [], [])], []) + ], + [] ); + Prim ((), "list", [Prim ((), "int", [], [])], []); + Seq + ( (), + [ Prim ((), "DUP", [], []); + Prim ((), "CAR", [], []); + Prim + ( (), + "DIP", + [Seq ((), [Prim ((), "CDR", [], [])])], + [] ); + Prim ((), "CONS", [], []) ] ) ], + [] ); Prim ((), "REDUCE", [], []); - Prim ((), "LAMDA", - [Prim ((), "pair", - [Prim ((), "int", [], []); - Prim ((), "list", - [Prim ((), "int", [], [])], []) - ], []); - Prim ((), "list", [Prim ((), "int", [], [])], []); - Seq ((), [Prim ((), "DUP", [], []); - Prim ((), "CAR", [], []); - Prim ((), "DIP", [Seq ((), [Prim ((), "CDR", [], [])])], []); - Prim ((), "CONS", [], [])]) - ], []); + Prim + ( (), + "LAMDA", + [ Prim + ( (), + "pair", + [ Prim ((), "int", [], []); + Prim ((), "list", [Prim ((), "int", [], [])], []) + ], + [] ); + Prim ((), "list", [Prim ((), "int", [], [])], []); + Seq + ( (), + [ Prim ((), "DUP", [], []); + Prim ((), "CAR", [], []); + Prim + ( (), + "DIP", + [Seq ((), [Prim ((), "CDR", [], [])])], + [] ); + Prim ((), "CONS", [], []) ] ) ], + [] ); Prim ((), "UNIT", [], []); Prim ((), "SWAP", [], []); - Prim ((), "PAIR", [], []) - ])], [])] + Prim ((), "PAIR", [], []) ] ) ], + [] ) ] (****************************************************************************) (* Expression parsing tests *) @@ -419,47 +642,52 @@ let test_list_append_parsing () = let assert_expression_parsing ~loc source expected = match Micheline_parser.tokenize source with - | _, (_::_) -> failwith "%s - Cannot tokenize %s" loc source - | tokens, [] -> - match Micheline_parser.parse_expression tokens with - | _, (_::_) -> failwith "%s - Cannot parse_expression %s" loc source - | ast, [] -> - let ast = Micheline.strip_locations ast in - let expected = Micheline.strip_locations expected in - Assert.equal ~loc ast expected + | (_, _ :: _) -> + failwith "%s - Cannot tokenize %s" loc source + | (tokens, []) -> ( + match Micheline_parser.parse_expression tokens with + | (_, _ :: _) -> + failwith "%s - Cannot parse_expression %s" loc source + | (ast, []) -> + let ast = Micheline.strip_locations ast in + let expected = Micheline.strip_locations expected in + Assert.equal ~loc ast expected ) let test_parses_expression () = (* String *) - assert_expression_parsing ~loc:__LOC__ "Pair False \"abc\"" - (Prim ((), "Pair", [Prim ((), "False", [], []); - String ((), "abc")], [])) >>=? fun () -> + assert_expression_parsing + ~loc:__LOC__ + "Pair False \"abc\"" + (Prim ((), "Pair", [Prim ((), "False", [], []); String ((), "abc")], [])) + >>=? fun () -> (* Int *) - assert_expression_parsing ~loc:__LOC__ "Item 100" - (Prim ((), "Item", [Int ((), Z.of_int 100)], [])) >>=? fun () -> + assert_expression_parsing + ~loc:__LOC__ + "Item 100" + (Prim ((), "Item", [Int ((), Z.of_int 100)], [])) + >>=? fun () -> (* Sequence *) - assert_expression_parsing ~loc:__LOC__ "{}" - (Seq ((), [])) + assert_expression_parsing ~loc:__LOC__ "{}" (Seq ((), [])) (****************************************************************************) -let tests = [ - "tokenize", (fun _ -> test_tokenize_basic ()) ; - "test one line contract", (fun _ -> test_one_line_contract ()) ; - "test_condition_contract", (fun _ -> test_condition_contract ()) ; - "test_basic_parsing", (fun _ -> test_basic_parsing ()) ; - "test_condition_contract_parsing", (fun _ -> test_condition_contract_parsing ()) ; - "test_list_append_parsing", (fun _ -> test_list_append_parsing ()) ; - "test_parses_expression", (fun _ -> test_parses_expression ()) ; -] +let tests = + [ ("tokenize", fun _ -> test_tokenize_basic ()); + ("test one line contract", fun _ -> test_one_line_contract ()); + ("test_condition_contract", fun _ -> test_condition_contract ()); + ("test_basic_parsing", fun _ -> test_basic_parsing ()); + ( "test_condition_contract_parsing", + fun _ -> test_condition_contract_parsing () ); + ("test_list_append_parsing", fun _ -> test_list_append_parsing ()); + ("test_parses_expression", fun _ -> test_parses_expression ()) ] let wrap (n, f) = - Alcotest_lwt.test_case n `Quick begin fun _ () -> - f () >>= function - | Ok () -> Lwt.return_unit - | Error err -> Lwt.fail_with err - end + Alcotest_lwt.test_case n `Quick (fun _ () -> + f () + >>= function Ok () -> Lwt.return_unit | Error err -> Lwt.fail_with err) let () = - Alcotest.run ~argv:[|""|] "tezos-lib-micheline" [ - "micheline", List.map wrap tests - ] + Alcotest.run + ~argv:[|""|] + "tezos-lib-micheline" + [("micheline", List.map wrap tests)] diff --git a/src/lib_network_sandbox/console.ml b/src/lib_network_sandbox/console.ml index 4de364d4ea5ade30133ded7b92434012a6dd79ed..6db75c1feeea65e6ce64f50142923de81a0e3552 100644 --- a/src/lib_network_sandbox/console.ml +++ b/src/lib_network_sandbox/console.ml @@ -30,11 +30,10 @@ let make with_timestamp color = (fun tag -> match color_of_tag tag with | Some c -> fprintf formatter "%s" c - | None -> () ) + | None -> ()) ; print_close_tag= (fun tag -> - if color_of_tag tag <> None then fprintf formatter "%s" reset ) - } ; + if color_of_tag tag <> None then fprintf formatter "%s" reset) } ; pp_set_tags formatter true) ) ; {color; buffer= b; channel; formatter; with_timestamp} @@ -77,8 +76,10 @@ let sayf (o : _ Base_state.t) (fmt : Format.formatter -> unit -> unit) : (_, _) Asynchronous_result.t = let date = if o#console.with_timestamp then - let date = Tezos_stdlib_unix.Systime_os.now () - |> Tezos_base.Time.System.to_notation in + let date = + Tezos_stdlib_unix.Systime_os.now () + |> Tezos_base.Time.System.to_notation + in sprintf "[%s]" date else "" in @@ -98,8 +99,10 @@ let sayf (o : _ Base_state.t) (fmt : Format.formatter -> unit -> unit) : let say (o : _ Base_state.t) ef : (_, _) Asynchronous_result.t = let date = if o#console.with_timestamp then - let date = Tezos_stdlib_unix.Systime_os.now () - |> Tezos_base.Time.System.to_notation in + let date = + Tezos_stdlib_unix.Systime_os.now () + |> Tezos_base.Time.System.to_notation + in sprintf "[%s]" date else "" in @@ -148,7 +151,7 @@ module Prompt = struct | Ok (List (Atom c :: more)) -> ( match List.find commands ~f:(fun m -> - List.mem m.commands c ~equal:String.equal ) + List.mem m.commands c ~equal:String.equal) with | Some {action; _} -> ( Asynchronous_result.bind_on_error (action more) @@ -158,12 +161,12 @@ module Prompt = struct desc (shout "Error in action:") (custom (fun ppf -> Attached_result.pp ppf result (* Error.pp ppf err *) - ~pp_error:(fun fmt -> function + ~pp_error:(fun fmt -> + function | `Lwt_exn _ as e -> Lwt_exception.pp fmt e | `Command_line s -> - Format.fprintf fmt "Wrong command line: %s" s - ) ))) - >>= fun () -> return `Loop ) + Format.fprintf fmt "Wrong command line: %s" s)))) + >>= fun () -> return `Loop) >>= function | `Loop -> loop () | `Help -> @@ -184,7 +187,7 @@ module Prompt = struct ~param: {default_label with space_after_label= false} (cmdlist (List.map ~f:(af "%S") commands)) - (list [haf "->"; doc]) )))) + (list [haf "->"; doc]))))) >>= fun () -> loop () | `Quit -> return () ) | None -> @@ -192,7 +195,8 @@ module Prompt = struct EF.( desc (ksprintf shout "Error, unknown command: %S" c) - (custom (fun fmt -> Base.Sexp.pp_hum_indent 4 fmt (List more)))) + (custom (fun fmt -> + Base.Sexp.pp_hum_indent 4 fmt (List more)))) >>= fun () -> loop () ) | Ok other -> say state @@ -207,7 +211,7 @@ module Prompt = struct desc (shout "Error: ") (custom (fun fmt -> Parsexp.Parse_error.report fmt ~filename:"<command-line>" - err ))) + err))) >>= fun () -> loop () in loop () diff --git a/src/lib_network_sandbox/helpers.ml b/src/lib_network_sandbox/helpers.ml index 8e421f039c0023b693f257e790ce471e3d655d47..71b85ce18fc8aefd4a7954577acb06bd937d0bd9 100644 --- a/src/lib_network_sandbox/helpers.ml +++ b/src/lib_network_sandbox/helpers.ml @@ -73,14 +73,14 @@ module Counter_log = struct let total = "**Total:**" in let longest = List.fold !t ~init:total ~f:(fun p (n, _) -> - if String.length p < String.length n then n else p ) + if String.length p < String.length n then n else p) in List.rev_map ((total, sum t) :: !t) ~f:(fun (cmt, n) -> sprintf "| %s %s|% 8d|" cmt (String.make (String.length longest - String.length cmt + 2) '.') - n ) + n) |> String.concat ~sep:"\n" end @@ -110,7 +110,7 @@ module System_dependencies = struct >>= fun result -> match result#status with | Unix.WEXITED 0 -> return prev - | _ -> return (`Missing_exec (cmd, result) :: prev) ) + | _ -> return (`Missing_exec (cmd, result) :: prev)) >>= fun errors_or_warnings -> List.fold protocol_paths ~init:(return errors_or_warnings) ~f:(fun prev_m path -> @@ -119,7 +119,7 @@ module System_dependencies = struct Lwt_exception.catch Lwt_unix.file_exists (path // "TEZOS_PROTOCOL") >>= function | true -> return prev - | false -> return (`Not_a_protocol_path path :: prev) ) + | false -> return (`Not_a_protocol_path path :: prev)) >>= fun errors_or_warnings -> match (errors_or_warnings, how_to_react) with | [], _ -> return () @@ -142,7 +142,7 @@ module System_dependencies = struct | `Not_a_protocol_path path -> pp_print_text ppf (sprintf "Not a protocol path: `%s`." path) ) ; - pp_close_box ppf () ; pp_print_space ppf () ) ; + pp_close_box ppf () ; pp_print_space ppf ()) ; pp_close_box ppf ()) >>= fun () -> failf "Error/Warnings were raised during precheck." end diff --git a/src/lib_network_sandbox/interactive_test.ml b/src/lib_network_sandbox/interactive_test.ml index fa3bcc4c65ac2af16b29bc491d7966aa739b9a46..08da8d798eaf92e13a39c42a12c51596f732cae2 100644 --- a/src/lib_network_sandbox/interactive_test.ml +++ b/src/lib_network_sandbox/interactive_test.ml @@ -12,7 +12,7 @@ module Commands = struct let unit_loop_no_args doc opts f = Prompt.unit_and_loop doc opts (fun sexps -> - no_args sexps >>= fun () -> f () ) + no_args sexps >>= fun () -> f ()) module Sexp_options = struct let option_doc pattern doc = EF.(desc (haf "`%s`:" pattern) doc) @@ -64,7 +64,7 @@ module Commands = struct EF.( desc (haf "Disk-Usage:") (af "%s" (String.concat ~sep:" " du#out))) - | false -> return () ) + | false -> return ()) let processes state = Prompt.unit_and_loop @@ -73,7 +73,7 @@ module Commands = struct ["p"; "processes"] (fun sxp -> let all = flag "all" sxp in - say state (Running_processes.ef ~all state) ) + say state (Running_processes.ef ~all state)) let curl ?(jq = ".") state ~port ~path = Running_processes.run_cmdf state "curl http://localhost:%d%s | jq %s" port @@ -100,7 +100,7 @@ module Commands = struct EF.( desc (af "Curl-Node :%d" port) (af "\"%s\"" (String.concat ~sep:"\n" res))) - | `Error -> return () ) + | `Error -> return ()) let curl_metadata state ~default_port = curl_unit_display state ["m"; "metadata"] ~default_port @@ -134,7 +134,7 @@ module Commands = struct | `Failed -> af "Failed" | `Level i -> af "[%d]" i | `Null -> af "{Null}" - | `Unknown s -> af "¿%s?" s ) )))) ) + | `Unknown s -> af "¿%s?" s )))))) let show_process state = Prompt.unit_and_loop @@ -144,7 +144,7 @@ module Commands = struct | [Atom name] -> let prefix = String.lowercase name in Running_processes.find_process_by_id state ~f:(fun n -> - String.is_prefix (String.lowercase n) ~prefix ) + String.is_prefix (String.lowercase n) ~prefix) >>= fun procs -> List.fold procs ~init:(return []) ~f:(fun prevm {process; lwt} -> prevm @@ -163,9 +163,9 @@ module Commands = struct [ desc (af "out: %s" out) (ocaml_string_list tailout#out) ; desc (af "err: %s" err) (ocaml_string_list tailerr#out) ] - :: prev) ) + :: prev)) >>= fun ef -> say state EF.(list ef) - | _other -> cmdline_fail "command expects 1 argument: name-prefix" ) + | _other -> cmdline_fail "command expects 1 argument: name-prefix") let kill_all state = unit_loop_no_args @@ -189,7 +189,7 @@ module Commands = struct [ atom (name acc) ; af "Pub:%s" (pubkey acc) ; af "Hash:%s" (pubkey_hash acc) - ; atom (private_key acc) ] )))) ) + ; atom (private_key acc) ]))))) let show_connections state nodes = unit_loop_no_args @@ -220,7 +220,7 @@ module Commands = struct | `A sl -> List.map sl ~f:(function | `String s -> s - | _ -> failwith "Not a string list" ) + | _ -> failwith "Not a string list") | _ -> failwith "Not a string list" in return contracts @@ -232,7 +232,7 @@ module Commands = struct [ desc (af "output") (ocaml_string_list res) ; desc (af "exn") (exn e) ]) >>= fun () -> return [] ) - | `Error -> return [] ) + | `Error -> return []) >>= fun contracts -> let balance block contract = let path = @@ -250,7 +250,7 @@ module Commands = struct balance "1" hsh >>= fun init -> balance "head" hsh - >>= fun current -> return ((hsh, init, current) :: prev) ) + >>= fun current -> return ((hsh, init, current) :: prev)) >>= fun results -> say state EF.( @@ -260,7 +260,7 @@ module Commands = struct desc (haf "%S" hsh) (af "%s → %s" (Option.value init ~default:"???") - (Option.value cur ~default:"???")) ))) ) + (Option.value cur ~default:"???")))))) let arbitrary_command_on_clients ?make_admin ?(command_names = ["cc"; "client-command"]) state ~clients = @@ -299,13 +299,13 @@ module Commands = struct | other -> ksprintf failwith "Option `only` only accepts a list of names: %s" - (to_string_hum other) )) - | _ -> None ) + (to_string_hum other))) + | _ -> None) |> function | None -> clients | Some more -> List.filter clients ~f:(fun c -> - List.mem more c.Tezos_client.id ~equal:String.equal ) + List.mem more c.Tezos_client.id ~equal:String.equal) in let use_admin = match make_admin with @@ -333,11 +333,11 @@ module Commands = struct display_errors_of_command state res >>= function | true -> return ((client, String.concat ~sep:"\n" res#out) :: prev) - | false -> return prev ) + | false -> return prev) >>= fun results -> let different_results = List.dedup_and_sort results ~compare:(fun (_, a) (_, b) -> - String.compare a b ) + String.compare a b) in say state EF.( @@ -354,14 +354,14 @@ module Commands = struct let clients = List.filter_map results ~f:(function | c, r when res = r -> Some c.Tezos_client.id - | _ -> None ) + | _ -> None) in desc (haf "Client%s %s:" ( if List.length subset_of_clients = 1 then "" else "s" ) (String.concat ~sep:", " clients)) - (markdown_verbatim res) ))) ]) ) + (markdown_verbatim res)))) ])) let all_defaults state ~nodes = let default_port = (List.hd_exn nodes).Tezos_node.rpc_port in @@ -398,7 +398,7 @@ module Interactivity = struct | true, _, _ -> `Full | false, true, _ -> `At_end | false, false, true -> `At_end - | false, false, false -> `None ) + | false, false, false -> `None) $ Arg.( value & opt bool @@ -464,7 +464,7 @@ module Pauser = struct let _ = Lwt_unix.on_signal Sys.sigint (fun i -> Printf.eprintf "SIGINTED (%d)\n%!" i ; - Lwt_condition.broadcast cond `Sigint ) + Lwt_condition.broadcast cond `Sigint) in let wait () = Lwt_exception.catch Lwt_condition.wait cond @@ -488,5 +488,5 @@ module Pauser = struct [ haf "Last pause before the test will Kill 'Em All and Quit." ; desc (shout "Error:") (af "%a" pp_error error_value) ] >>= fun () -> - finish () >>= fun () -> fail error_value ~attach:result.attachments ) + finish () >>= fun () -> fail error_value ~attach:result.attachments) end diff --git a/src/lib_network_sandbox/internal_pervasives.ml b/src/lib_network_sandbox/internal_pervasives.ml index bf876287b454b6645c693fdb0ce418771496b6a8..207cc1a234244f2dde92ff01d2a718276866d4d3 100644 --- a/src/lib_network_sandbox/internal_pervasives.ml +++ b/src/lib_network_sandbox/internal_pervasives.ml @@ -126,7 +126,7 @@ module Attached_result = struct fprintf ppf "%s:@ " k ; match v with | `Text s -> pp_print_text ppf s - | `String_value s -> fprintf ppf "%S" s ) + | `String_value s -> fprintf ppf "%S" s) end (** A wrapper around [('ok, 'a Error.t) result Lwt.t]. *) @@ -217,11 +217,11 @@ module Asynchronous_result = struct module List_sequential = struct let iter l ~f = List.fold l ~init:(return ()) ~f:(fun pm x -> - pm >>= fun () -> (f x : (_, _) t) ) + pm >>= fun () -> (f x : (_, _) t)) let iteri l ~f = List.fold l ~init:(return 0) ~f:(fun pm x -> - pm >>= fun n -> (f n x : (_, _) t) >>= fun () -> return (n + 1) ) + pm >>= fun n -> (f n x : (_, _) t) >>= fun () -> return (n + 1)) >>= fun _ -> return () end @@ -256,14 +256,14 @@ module Asynchronous_result = struct | Ok x -> f x elt | Error _ -> error := Some prevm ; - Lwt.fail Not_found ) - stream (Attached_result.ok init) ) + Lwt.fail Not_found) + stream (Attached_result.ok init)) (fun e -> match !error with | Some res -> Lwt.return res | None -> (* `f` threw a forbidden exception! *) - Lwt.fail e ) + Lwt.fail e) end let run_application r = @@ -351,14 +351,13 @@ module System = struct Lwt_exception.catch (fun () -> Lwt_io.with_file ?perm ~mode:Lwt_io.output path (fun out -> - Lwt_io.write out content ) ) + Lwt_io.write out content)) () let read_file (_state : _ Base_state.t) path = Lwt_exception.catch (fun () -> - Lwt_io.with_file ~mode:Lwt_io.input path (fun out -> Lwt_io.read out) - ) + Lwt_io.with_file ~mode:Lwt_io.input path (fun out -> Lwt_io.read out)) () end diff --git a/src/lib_network_sandbox/kiln.ml b/src/lib_network_sandbox/kiln.ml index 492b4f3b7d6ce4bb1efeeb60abc07ee2c159ba38..dc13e5c4cdea33dfc28348eb055ce9fbde3ccc0f 100644 --- a/src/lib_network_sandbox/kiln.ml +++ b/src/lib_network_sandbox/kiln.ml @@ -79,7 +79,7 @@ module Configuration_directory = struct ; ( "baker-endorser-paths" , list (fun (p, bak, endo) -> - strings [p; absolutize bak; absolutize endo] ) + strings [p; absolutize bak; absolutize endo]) protocol_execs ) ] |> to_string ~minify:false) >>= fun () -> @@ -90,7 +90,7 @@ module Configuration_directory = struct let open Cmdliner in Term.( pure (fun x clean -> - Option.map x ~f:(fun (path, p2p_port) -> {path; p2p_port; clean}) ) + Option.map x ~f:(fun (path, p2p_port) -> {path; p2p_port; clean})) $ Arg.( value (opt diff --git a/src/lib_network_sandbox/log_recorder.ml b/src/lib_network_sandbox/log_recorder.ml index 2c8451a6a85a146adb62979925a5de4a87ae9e3d..460af9643ebea19f532885098984a5cd65e4399b 100644 --- a/src/lib_network_sandbox/log_recorder.ml +++ b/src/lib_network_sandbox/log_recorder.ml @@ -33,7 +33,7 @@ module Operations = struct [af "→ %s" msg; ocaml_string_list res] | `Bake (n, msg, res) -> desc_list (haf "Node-baked: %S" n) - [af "→ %s" msg; ocaml_string_list res] ))) ) + [af "→ %s" msg; ocaml_string_list res])))) let bake state ~client ~output msg = let t = from_state state in diff --git a/src/lib_network_sandbox/running_processes.ml b/src/lib_network_sandbox/running_processes.ml index 1e34f13e7687dc324fde30270176db5aace49adb..24aa35ebac3cc2d3ab2b93eed982f2031fc0eafd 100644 --- a/src/lib_network_sandbox/running_processes.ml +++ b/src/lib_network_sandbox/running_processes.ml @@ -72,7 +72,7 @@ let ef_procesess state processes = desc_list (af "P:%s" process.id) [ desc (af "out") (atom (output_path state process `Stdout)) ; desc (af "err") (atom (output_path state process `Stderr)) - ; desc (af "pid") (af "%d" lwt#pid) ] ))) + ; desc (af "pid") (af "%d" lwt#pid) ]))) let unix_status_to_string (p : Unix.process_status) = match p with @@ -103,7 +103,7 @@ let ef ?(all = false) state = | `Process_group -> af "process-group" | `Process_group_script _ -> af "shell-script" ) ] ) :: prev - | _, _ -> prev ) + | _, _ -> prev) (State.processes state) [] |> List.sort ~compare:(fun (a, _) (b, _) -> String.compare a b) |> List.map ~f:snd @@ -156,7 +156,7 @@ let start t process = |> String.concat ~sep:"; " ) sep in - Lwt_io.write chan msg ) ) + Lwt_io.write chan msg)) () >>= fun () -> let proc = @@ -191,7 +191,7 @@ let kill _t {lwt; process} = ( try Unix.kill pid signal with | Unix.Unix_error (Unix.ESRCH, _, _) -> () | e -> raise e ) ; - Lwt.return () ) + Lwt.return ()) () | `Docker name -> ( Lwt_exception.catch Lwt_unix.system (sprintf "docker kill %s" name) @@ -213,13 +213,13 @@ let wait_all t = State.all_processes t >>= fun all -> List.fold all ~init:(return ()) ~f:(fun prevm one -> - prevm >>= fun () -> wait t one >>= fun _ -> return () ) + prevm >>= fun () -> wait t one >>= fun _ -> return ()) let kill_all t = State.all_processes t >>= fun all -> List.fold all ~init:(return ()) ~f:(fun prevm one -> - prevm >>= fun () -> kill t one ) + prevm >>= fun () -> kill t one) let find_process_by_id ?(only_running = false) t ~f = State.all_processes t @@ -227,7 +227,7 @@ let find_process_by_id ?(only_running = false) t ~f = return (List.filter all ~f:(fun {process= {id; _}; lwt} -> if only_running && not (lwt#state = Lwt_process.Running) then false - else f id )) + else f id)) let cmds = ref 0 @@ -267,7 +267,7 @@ let run_cmdf state fmt = method err = err_lines method status = status - end) ) + end)) fmt let run_async_cmdf state f fmt = @@ -279,7 +279,7 @@ let run_async_cmdf state f fmt = >>= fun (proc_state, proc) -> f proc >>= fun res -> - wait state proc_state >>= fun status -> return (status, res) ) + wait state proc_state >>= fun status -> return (status, res)) fmt let run_successful_cmdf state fmt = @@ -289,7 +289,7 @@ let run_successful_cmdf state fmt = >>= fun res -> Process_result.Error.fail_if_non_zero res (sprintf "Shell command: %S" cmd) - >>= fun () -> return res ) + >>= fun () -> return res) fmt let run_genspio state name genspio = diff --git a/src/lib_network_sandbox/test_command_line.ml b/src/lib_network_sandbox/test_command_line.ml index 52ea3392a7cc8cec517be5ef2cec69c808e98e39..55e39ad96cf5477b568e7474c8b1f84c4dd2f3ce 100644 --- a/src/lib_network_sandbox/test_command_line.ml +++ b/src/lib_network_sandbox/test_command_line.ml @@ -10,7 +10,7 @@ module Run_command = struct (Console.say state EF.( custom (fun ppf -> Attached_result.pp ppf result ~pp_error))) - >>= fun () -> die 2 ) ) + >>= fun () -> die 2)) let term ~pp_error () = Cmdliner.Term.pure (fun (state, run) -> or_hard_fail state run ~pp_error) diff --git a/src/lib_network_sandbox/test_scenario.ml b/src/lib_network_sandbox/test_scenario.ml index 24689d301d5513f60e5f4c521b87f7950105e39b..6774b249cc76bbdb0a0aad312002aa9f2a5f2caa 100644 --- a/src/lib_network_sandbox/test_scenario.ml +++ b/src/lib_network_sandbox/test_scenario.ml @@ -93,7 +93,7 @@ module Topology = struct in let peers = List.filter_map peers ~f:(fun p -> - if p <> id then Some (p2p p) else None ) + if p <> id then Some (p2p p) else None) in Tezos_node.make ?protocol ~exec id ~expected_connections ~rpc_port ~p2p_port @@ -104,8 +104,8 @@ module Topology = struct (String.concat ~sep:"\n " (List.map names ~f:(fun n -> sprintf "%s:%d" n (p2p n)))) in - let rec make : type a. - ?extra_peers:string list -> prefix:string -> a network -> a = + let rec make : + type a. ?extra_peers:string list -> prefix:string -> a network -> a = fun ?(extra_peers = []) ~prefix network -> let prefix = prefix ^ network.name in let make ?extra_peers n = make ?extra_peers ~prefix n in @@ -170,10 +170,10 @@ module Network = struct match String.split line ~on:' ' |> List.filter_map ~f:(fun s -> - match String.strip s with "" -> None | s -> Some s ) + match String.strip s with "" -> None | s -> Some s) with | ("tcp" | "tcp6") :: _ as row -> Some (`Tcp (idx, row)) - | _ -> Some (`Wrong (idx, line)) ) + | _ -> Some (`Wrong (idx, line))) in return rows @@ -183,7 +183,7 @@ module Network = struct match String.split addr ~on:':' with | [_; port] -> ( try Some (Int.of_string port, row) with _ -> None ) | _ -> None ) - | _ -> None ) + | _ -> None) let netstat_listening_ports state = netstat state @@ -211,7 +211,7 @@ module Network = struct match (taken rpc_port, taken p2p_port) with | None, None -> return () | Some p, _ -> if time_wait p then return () else fail "RPC" p - | _, Some p -> if time_wait p then return () else fail "P2P" p ) + | _, Some p -> if time_wait p then return () else fail "P2P" p) else return () ) >>= fun () -> let protocols = @@ -226,7 +226,7 @@ module Network = struct prev_m >>= fun () -> Running_processes.start state (Tezos_node.process state node) - >>= fun _ -> return () ) + >>= fun _ -> return ()) >>= fun () -> let node_0 = List.hd_exn nodes in let client = Tezos_client.of_node node_0 ~exec:client_exec in @@ -238,7 +238,7 @@ module Network = struct Dbg.e EF.(af "Waiting for all nodes to be bootstrapped") ; List_sequential.iter nodes ~f:(fun node -> let client = Tezos_client.of_node node ~exec:client_exec in - Tezos_client.bootstrapped client ~state ) + Tezos_client.bootstrapped client ~state) end let network_with_protocol ?external_peer_ports ?base_port ?(size = 5) ?protocol @@ -278,7 +278,7 @@ module Queries = struct match res with "null" -> `Null | unknown -> `Unknown unknown ) in return ((id, parsed) :: prev) - | false -> return ((id, `Failed) :: prev) ) + | false -> return ((id, `Failed) :: prev)) >>= fun results -> let sorted = List.sort results ~compare:(fun (a, _) (b, _) -> String.compare a b) @@ -322,9 +322,9 @@ module Queries = struct let not_readys = List.filter_map results ~f:(function | _, `Level n when check_level n -> None - | id, res -> Some (id, res) ) + | id, res -> Some (id, res)) in match not_readys with | [] -> return (`Done ()) - | ids -> return (`Not_done (msg ids)) ) + | ids -> return (`Not_done (msg ids))) end diff --git a/src/lib_network_sandbox/tezos_admin_client.ml b/src/lib_network_sandbox/tezos_admin_client.ml index fff5ad98d059d9a0c0a76ee1c0a27bf62eee4ac1..030fa3bfc50b43fcf739fbef76b684bc27b35787 100644 --- a/src/lib_network_sandbox/tezos_admin_client.ml +++ b/src/lib_network_sandbox/tezos_admin_client.ml @@ -30,7 +30,7 @@ module Command_error = struct Format.fprintf fmt "Admin-command-error:@ %s%s" msg (Option.value_map args ~default:"" ~f:(fun l -> sprintf " (args: %s)" - (List.map ~f:(sprintf "%S") l |> String.concat ~sep:", ") )) + (List.map ~f:(sprintf "%S") l |> String.concat ~sep:", "))) end open Command_error @@ -55,5 +55,5 @@ let inject_protocol admin state ~path = | _ :: _ :: hash :: _ when hash.[0] = 'P' -> return hash | _ -> failf "inject protocol: cannot parse hash of protocol: %s" - (String.concat ~sep:", " (List.map ~f:(sprintf "%S") res#out)) ) + (String.concat ~sep:", " (List.map ~f:(sprintf "%S") res#out))) >>= fun hash -> return (res, hash) diff --git a/src/lib_network_sandbox/tezos_admin_client.mli b/src/lib_network_sandbox/tezos_admin_client.mli index 2a3a342df0a8b5f36b399b393bd3637f2211cc16..2bf7bcf18cad2f519ce0b50612e3eb22ab86264c 100644 --- a/src/lib_network_sandbox/tezos_admin_client.mli +++ b/src/lib_network_sandbox/tezos_admin_client.mli @@ -1,8 +1,8 @@ -(** Wrapper around the [tezos-admin-client] application. *) open Internal_pervasives +(** Wrapper around the [tezos-admin-client] application. *) -(** [t] is very similar to {!Tezos_client.t}. *) type t = private {id: string; port: int; exec: Tezos_executable.t} +(** [t] is very similar to {!Tezos_client.t}. *) val of_client : exec:Tezos_executable.t -> Tezos_client.t -> t val of_node : exec:Tezos_executable.t -> Tezos_node.t -> t diff --git a/src/lib_network_sandbox/tezos_client.ml b/src/lib_network_sandbox/tezos_client.ml index b607caa892fc1093fc29101cf3b0438699f39b61..d727d86ab57c8f6c1a0ff7d0aaa65869266e12e9 100644 --- a/src/lib_network_sandbox/tezos_client.ml +++ b/src/lib_network_sandbox/tezos_client.ml @@ -24,7 +24,7 @@ let bootstrapped_script t ~state = let cmd = loop_until_true ~attempts:5 ~sleep:1 ~on_failed_attempt:(fun _ -> - eprintf (str "Bootstrap attempt failed\\n") [] ) + eprintf (str "Bootstrap attempt failed\\n") []) (succeeds (client_command t ~state ["bootstrapped"])) in seq @@ -99,7 +99,7 @@ module Command_error = struct Format.fprintf fmt "Client-command-error:@ %s%s" msg (Option.value_map args ~default:"" ~f:(fun l -> sprintf " (args: %s)" - (List.map ~f:(sprintf "%S") l |> String.concat ~sep:", ") )) + (List.map ~f:(sprintf "%S") l |> String.concat ~sep:", "))) end open Command_error @@ -168,8 +168,7 @@ let find_applied_in_mempool state ~client ~f = let mempool_has_operation state ~client ~kind = find_applied_in_mempool state ~client ~f:(fun o -> Jqo.field o ~k:"contents" - |> Jqo.list_exists ~f:(fun op -> Jqo.field op ~k:"kind" = `String kind) - ) + |> Jqo.list_exists ~f:(fun op -> Jqo.field op ~k:"kind" = `String kind)) >>= fun found_or_not -> return (found_or_not <> None) let block_has_operation state ~client ~level ~kind = @@ -183,7 +182,7 @@ let block_has_operation state ~client ~level ~kind = Jqo.list_exists olist ~f:(fun o -> Jqo.field o ~k:"contents" |> Jqo.list_exists ~f:(fun op -> - Jqo.field op ~k:"kind" = `String kind ) ) ) + Jqo.field op ~k:"kind" = `String kind))) in say state EF.( diff --git a/src/lib_network_sandbox/tezos_client.mli b/src/lib_network_sandbox/tezos_client.mli index 4355373a319719a79dab5e0fd3b1c9c5c13471f4..7f6b7173c7e5edfba5021c3061412b56502a1fda 100644 --- a/src/lib_network_sandbox/tezos_client.mli +++ b/src/lib_network_sandbox/tezos_client.mli @@ -1,5 +1,5 @@ -(** Wrapper around the main ["tezos-client"] application. *) open Internal_pervasives +(** Wrapper around the main ["tezos-client"] application. *) type t = {id: string; port: int; exec: Tezos_executable.t} type client = t diff --git a/src/lib_network_sandbox/tezos_executable.ml b/src/lib_network_sandbox/tezos_executable.ml index 9f65f492228f6f5cce3359879d56837e7fa470c2..0343e844aee63d0db86bae6a25624ac58729a486 100644 --- a/src/lib_network_sandbox/tezos_executable.ml +++ b/src/lib_network_sandbox/tezos_executable.ml @@ -45,7 +45,7 @@ let call t ~path args = ~var:(str "TEZOS_EVENTS_CONFIG") (ksprintf str "unix-files://%s?level-at-least=%s" (path // "events") level_at_least) ] - | _other -> assert false ) + | _other -> assert false) @ [ exec ["mkdir"; "-p"; path] ; write_stdout ~path:(path // "last-cmd" |> str) @@ -59,7 +59,7 @@ let cli_term kind prefix = { kind ; binary ; unix_files_sink= Some Unix_files_sink.all_info - ; environment= [] } ) + ; environment= [] }) $ Arg.( value & opt (some string) None diff --git a/src/lib_network_sandbox/tezos_executable.mli b/src/lib_network_sandbox/tezos_executable.mli index ebb366e2d72befd72f27dd5c12ed3a8410b4a0d4..569c93b5b8cfe57632ce4ab576a9c82a3a347f3a 100644 --- a/src/lib_network_sandbox/tezos_executable.mli +++ b/src/lib_network_sandbox/tezos_executable.mli @@ -17,15 +17,15 @@ module Unix_files_sink : sig val all_info : t end -(** The type [kind] is used to distinguish ['a t] executables. *) type kind = [`Node | `Baker | `Endorser | `Accuser | `Client | `Admin] +(** The type [kind] is used to distinguish ['a t] executables. *) -(** The wrapper of the tezos-executable. *) type t = private { kind: kind ; binary: string option ; unix_files_sink: Unix_files_sink.t option ; environment: (string * string) list } +(** The wrapper of the tezos-executable. *) val make : ?binary:string diff --git a/src/lib_network_sandbox/tezos_node.ml b/src/lib_network_sandbox/tezos_node.ml index 56186a5cedc3c2ffd50d198b4ee74b47fd78a232..078591283a8d39a694f151cf8c80e0baa12b3850 100644 --- a/src/lib_network_sandbox/tezos_node.ml +++ b/src/lib_network_sandbox/tezos_node.ml @@ -115,7 +115,7 @@ let connections node_list = List.find node_list ~f:(fun {p2p_port; _} -> p2p_port = p2p) with | None -> `Unknown p2p - | Some n -> `Peer n ) + | Some n -> `Peer n) in List.iter peer_nodes ~f:(fun peer_opt -> let conn = @@ -126,5 +126,5 @@ let connections node_list = `Duplex (node, peer) else `From_to (node, peer) in - res := Connection_set.add conn !res ) ) ; + res := Connection_set.add conn !res)) ; Connection_set.elements !res diff --git a/src/lib_network_sandbox/tezos_protocol.ml b/src/lib_network_sandbox/tezos_protocol.ml index 178d89c3f12b461ee638dcab9037bfe83d5b3a65..0a6374a7f3f6a7dc0e4f4085e3ef5de9a2105560 100644 --- a/src/lib_network_sandbox/tezos_protocol.ml +++ b/src/lib_network_sandbox/tezos_protocol.ml @@ -231,7 +231,7 @@ let default () = { id= "default-bootstrap" ; bootstrap_accounts= List.init 4 ~f:(fun n -> - (Account.of_namef "bootacc-%d" n, 4_000_000_000_000L) ) + (Account.of_namef "bootacc-%d" n, 4_000_000_000_000L)) ; dictator ; bootstrap_contracts= [(dictator, 10_000_000, `Sandbox_faucet)] ; expected_pow= 1 @@ -313,11 +313,11 @@ let cli_term () = let open Term in pure (fun remove_default_bas - (`Blocks_per_voting_period bpvp) - (`Protocol_hash hashopt) - (`Time_between_blocks tbb) - add_bootstraps - -> + (`Blocks_per_voting_period bpvp) + (`Protocol_hash hashopt) + (`Time_between_blocks tbb) + add_bootstraps + -> let d = default () in let id = if add_bootstraps = [] && remove_default_bas = false then d.id @@ -339,7 +339,7 @@ let cli_term () = ; hash ; bootstrap_accounts ; time_between_blocks - ; blocks_per_voting_period } ) + ; blocks_per_voting_period }) $ Arg.( value (flag @@ -370,8 +370,7 @@ let cli_term () = $ Arg.( pure (fun l -> List.map l ~f:(fun ((name, pubkey, pubkey_hash, private_key), tez) -> - (Account.key_pair name ~pubkey ~pubkey_hash ~private_key, tez) ) - ) + (Account.key_pair name ~pubkey ~pubkey_hash ~private_key, tez))) $ value (opt_all (pair ~sep:'@' (t4 ~sep:',' string string string string) int64) diff --git a/src/lib_network_sandbox/tezos_protocol.mli b/src/lib_network_sandbox/tezos_protocol.mli index 4a7f547608988d781be7c76192f4fb557bb15bd6..2724c544a9233193357538255650f15998f25bcc 100644 --- a/src/lib_network_sandbox/tezos_protocol.mli +++ b/src/lib_network_sandbox/tezos_protocol.mli @@ -74,7 +74,6 @@ module Voting_period : sig val to_string : t -> string end -(** [t] wraps bootstrap parameters for sandboxed protocols. *) type t = { id: string ; bootstrap_accounts: (Account.t * Int64.t) list @@ -89,6 +88,7 @@ type t = ; blocks_per_cycle: int ; preserved_cycles: int ; proof_of_work_threshold: int } +(** [t] wraps bootstrap parameters for sandboxed protocols. *) val compare : t -> t -> int val default : unit -> t diff --git a/src/lib_p2p/.ocamlformat b/src/lib_p2p/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_p2p/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_p2p/p2p.ml b/src/lib_p2p/p2p.ml index 549bd5b80d206e48364f450e9e0fb2d1bb603c6e..a0a5c5823db01a8dab35e2d70ef0a75d6226fa85 100644 --- a/src/lib_p2p/p2p.ml +++ b/src/lib_p2p/p2p.ml @@ -24,93 +24,85 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make(struct - let name = "p2p" - end) +include Internal_event.Legacy_logging.Make (struct + let name = "p2p" +end) type 'peer_meta peer_meta_config = 'peer_meta P2p_pool.peer_meta_config = { - peer_meta_encoding : 'peer_meta Data_encoding.t ; - peer_meta_initial : unit -> 'peer_meta ; - score : 'peer_meta -> float ; + peer_meta_encoding : 'peer_meta Data_encoding.t; + peer_meta_initial : unit -> 'peer_meta; + score : 'peer_meta -> float } type 'conn_meta conn_meta_config = 'conn_meta P2p_socket.metadata_config = { - conn_meta_encoding : 'conn_meta Data_encoding.t ; - conn_meta_value : P2p_peer.Id.t -> 'conn_meta ; - private_node : 'conn_meta -> bool ; + conn_meta_encoding : 'conn_meta Data_encoding.t; + conn_meta_value : P2p_peer.Id.t -> 'conn_meta; + private_node : 'conn_meta -> bool } type 'msg app_message_encoding = 'msg P2p_message.encoding = - Encoding : { - tag: int ; - title: string ; - encoding: 'a Data_encoding.t ; - wrap: 'a -> 'msg ; - unwrap: 'msg -> 'a option ; - max_length: int option ; - } -> 'msg app_message_encoding + | Encoding : + { tag : int; + title : string; + encoding : 'a Data_encoding.t; + wrap : 'a -> 'msg; + unwrap : 'msg -> 'a option; + max_length : int option } + -> 'msg app_message_encoding type 'msg message_config = 'msg P2p_pool.message_config = { - encoding : 'msg app_message_encoding list ; - chain_name : Distributed_db_version.name ; - distributed_db_versions : Distributed_db_version.t list ; + encoding : 'msg app_message_encoding list; + chain_name : Distributed_db_version.name; + distributed_db_versions : Distributed_db_version.t list } type config = { - listening_port : P2p_addr.port option ; - listening_addr : P2p_addr.t option ; - discovery_port : P2p_addr.port option ; - discovery_addr : Ipaddr.V4.t option ; - trusted_points : P2p_point.Id.t list ; - peers_file : string ; - private_mode : bool ; - identity : P2p_identity.t ; - proof_of_work_target : Crypto_box.target ; - disable_mempool : bool ; - trust_discovered_peers : bool ; - disable_testchain : bool ; - greylisting_config : P2p_point_state.Info.greylisting_config ; + listening_port : P2p_addr.port option; + listening_addr : P2p_addr.t option; + discovery_port : P2p_addr.port option; + discovery_addr : Ipaddr.V4.t option; + trusted_points : P2p_point.Id.t list; + peers_file : string; + private_mode : bool; + identity : P2p_identity.t; + proof_of_work_target : Crypto_box.target; + disable_mempool : bool; + trust_discovered_peers : bool; + disable_testchain : bool; + greylisting_config : P2p_point_state.Info.greylisting_config } type limits = { - - connection_timeout : Time.System.Span.t ; - authentication_timeout : Time.System.Span.t ; - greylist_timeout : Time.System.Span.t ; - maintenance_idle_time : Time.System.Span.t ; - - min_connections : int ; - expected_connections : int ; - max_connections : int ; - - backlog : int ; - max_incoming_connections : int ; - - max_download_speed : int option ; - max_upload_speed : int option ; - - read_buffer_size : int ; - read_queue_size : int option ; - write_queue_size : int option ; - incoming_app_message_queue_size : int option ; - incoming_message_queue_size : int option ; - outgoing_message_queue_size : int option ; - - known_peer_ids_history_size : int ; - known_points_history_size : int ; - max_known_peer_ids : (int * int) option ; - max_known_points : (int * int) option ; - - swap_linger : Time.System.Span.t ; - - binary_chunks_size : int option ; + connection_timeout : Time.System.Span.t; + authentication_timeout : Time.System.Span.t; + greylist_timeout : Time.System.Span.t; + maintenance_idle_time : Time.System.Span.t; + min_connections : int; + expected_connections : int; + max_connections : int; + backlog : int; + max_incoming_connections : int; + max_download_speed : int option; + max_upload_speed : int option; + read_buffer_size : int; + read_queue_size : int option; + write_queue_size : int option; + incoming_app_message_queue_size : int option; + incoming_message_queue_size : int option; + outgoing_message_queue_size : int option; + known_peer_ids_history_size : int; + known_points_history_size : int; + max_known_peer_ids : (int * int) option; + max_known_points : (int * int) option; + swap_linger : Time.System.Span.t; + binary_chunks_size : int option } let create_scheduler limits = - let max_upload_speed = - Option.map limits.max_upload_speed ~f:(( * ) 1024) in + let max_upload_speed = Option.map limits.max_upload_speed ~f:(( * ) 1024) in let max_download_speed = - Option.map limits.max_upload_speed ~f:(( * ) 1024) in + Option.map limits.max_upload_speed ~f:(( * ) 1024) + in P2p_io_scheduler.create ~read_buffer_size:limits.read_buffer_size ?max_upload_speed @@ -119,126 +111,123 @@ let create_scheduler limits = ?write_queue_size:limits.write_queue_size () -let create_connection_pool config limits meta_cfg conn_meta_cfg msg_cfg io_sched = - let pool_cfg = { - P2p_pool.identity = config.identity ; - proof_of_work_target = config.proof_of_work_target ; - listening_port = config.listening_port ; - trusted_points = config.trusted_points ; - peers_file = config.peers_file ; - private_mode = config.private_mode ; - greylisting_config = config.greylisting_config ; - min_connections = limits.min_connections ; - max_connections = limits.max_connections ; - max_incoming_connections = limits.max_incoming_connections ; - connection_timeout = limits.connection_timeout ; - authentication_timeout = limits.authentication_timeout ; - incoming_app_message_queue_size = limits.incoming_app_message_queue_size ; - incoming_message_queue_size = limits.incoming_message_queue_size ; - outgoing_message_queue_size = limits.outgoing_message_queue_size ; - known_peer_ids_history_size = limits.known_peer_ids_history_size ; - known_points_history_size = limits.known_points_history_size ; - max_known_points = limits.max_known_points ; - max_known_peer_ids = limits.max_known_peer_ids ; - swap_linger = limits.swap_linger ; - binary_chunks_size = limits.binary_chunks_size ; - } +let create_connection_pool config limits meta_cfg conn_meta_cfg msg_cfg + io_sched = + let pool_cfg = + { P2p_pool.identity = config.identity; + proof_of_work_target = config.proof_of_work_target; + listening_port = config.listening_port; + trusted_points = config.trusted_points; + peers_file = config.peers_file; + private_mode = config.private_mode; + greylisting_config = config.greylisting_config; + min_connections = limits.min_connections; + max_connections = limits.max_connections; + max_incoming_connections = limits.max_incoming_connections; + connection_timeout = limits.connection_timeout; + authentication_timeout = limits.authentication_timeout; + incoming_app_message_queue_size = limits.incoming_app_message_queue_size; + incoming_message_queue_size = limits.incoming_message_queue_size; + outgoing_message_queue_size = limits.outgoing_message_queue_size; + known_peer_ids_history_size = limits.known_peer_ids_history_size; + known_points_history_size = limits.known_points_history_size; + max_known_points = limits.max_known_points; + max_known_peer_ids = limits.max_known_peer_ids; + swap_linger = limits.swap_linger; + binary_chunks_size = limits.binary_chunks_size } in let pool = - P2p_pool.create pool_cfg meta_cfg conn_meta_cfg msg_cfg io_sched in + P2p_pool.create pool_cfg meta_cfg conn_meta_cfg msg_cfg io_sched + in pool let may_create_discovery_worker _limits config pool = - match (config.listening_port, config.discovery_port, config.discovery_addr) with + match + (config.listening_port, config.discovery_port, config.discovery_addr) + with | (Some listening_port, Some discovery_port, Some discovery_addr) -> - Some (P2p_discovery.create pool - config.identity.peer_id - ~listening_port - ~discovery_port ~discovery_addr - ~trust_discovered_peers:config.trust_discovered_peers) + Some + (P2p_discovery.create + pool + config.identity.peer_id + ~listening_port + ~discovery_port + ~discovery_addr + ~trust_discovered_peers:config.trust_discovered_peers) | (_, _, _) -> None let bounds ~min ~expected ~max = assert (min <= expected) ; assert (expected <= max) ; - let step_min = - (expected - min) / 3 - and step_max = - (max - expected) / 3 in - { P2p_maintenance.min_threshold = min + step_min ; - min_target = min + 2 * step_min ; - max_target = max - 2 * step_max ; - max_threshold = max - step_max ; - } + let step_min = (expected - min) / 3 and step_max = (max - expected) / 3 in + { P2p_maintenance.min_threshold = min + step_min; + min_target = min + (2 * step_min); + max_target = max - (2 * step_max); + max_threshold = max - step_max } let create_maintenance_worker limits pool config = let bounds = bounds ~min:limits.min_connections ~expected:limits.expected_connections - ~max:limits.max_connections in - let maintenance_config = { - P2p_maintenance. - maintenance_idle_time = limits.maintenance_idle_time ; - greylist_timeout = limits.greylist_timeout ; - private_mode = config.private_mode ; - } in + ~max:limits.max_connections + in + let maintenance_config = + { P2p_maintenance.maintenance_idle_time = limits.maintenance_idle_time; + greylist_timeout = limits.greylist_timeout; + private_mode = config.private_mode } + in let discovery = may_create_discovery_worker limits config pool in P2p_maintenance.create ?discovery maintenance_config bounds pool let may_create_welcome_worker config limits pool = match config.listening_port with - | None -> Lwt.return_none + | None -> + Lwt.return_none | Some port -> P2p_welcome.create - ~backlog:limits.backlog pool + ~backlog:limits.backlog + pool ?addr:config.listening_addr - port >>= fun w -> - Lwt.return_some w + port + >>= fun w -> Lwt.return_some w type ('msg, 'peer_meta, 'conn_meta) connection = ('msg, 'peer_meta, 'conn_meta) P2p_pool.connection module Real = struct - type ('msg, 'peer_meta, 'conn_meta) net = { - config: config ; - limits: limits ; - io_sched: P2p_io_scheduler.t ; - pool: ('msg, 'peer_meta, 'conn_meta) P2p_pool.t ; - maintenance: 'peer_meta P2p_maintenance.t ; - welcome: P2p_welcome.t option ; + config : config; + limits : limits; + io_sched : P2p_io_scheduler.t; + pool : ('msg, 'peer_meta, 'conn_meta) P2p_pool.t; + maintenance : 'peer_meta P2p_maintenance.t; + welcome : P2p_welcome.t option } let create ~config ~limits meta_cfg conn_meta_cfg msg_cfg = let io_sched = create_scheduler limits in create_connection_pool - config limits meta_cfg conn_meta_cfg msg_cfg io_sched >>= fun pool -> + config + limits + meta_cfg + conn_meta_cfg + msg_cfg + io_sched + >>= fun pool -> let maintenance = create_maintenance_worker limits pool config in - may_create_welcome_worker config limits pool >>= fun welcome -> - return { - config ; - limits ; - io_sched ; - pool ; - maintenance ; - welcome ; - } + may_create_welcome_worker config limits pool + >>= fun welcome -> + return {config; limits; io_sched; pool; maintenance; welcome} - let peer_id { config ; _ } = config.identity.peer_id + let peer_id {config; _} = config.identity.peer_id - - let maintain { maintenance ; _ } () = - P2p_maintenance.maintain maintenance + let maintain {maintenance; _} () = P2p_maintenance.maintain maintenance let activate t () = - log_info "activate"; - begin - match t.welcome with - | None -> () - | Some w -> P2p_welcome.activate w - end ; + log_info "activate" ; + (match t.welcome with None -> () | Some w -> P2p_welcome.activate w) ; P2p_maintenance.activate t.maintenance ; Lwt.async (fun () -> P2p_maintenance.maintain t.maintenance) ; () @@ -248,175 +237,192 @@ module Real = struct (* returns when all workers have shutted down in the opposite creation order. *) let shutdown net () = - Lwt_utils.may ~f:P2p_welcome.shutdown net.welcome >>= fun () -> - P2p_maintenance.shutdown net.maintenance >>= fun () -> - P2p_pool.destroy net.pool >>= fun () -> - P2p_io_scheduler.shutdown ~timeout:3.0 net.io_sched - - let connections { pool ; _ } () = - P2p_pool.Connection.fold pool - ~init:[] ~f:(fun _peer_id c acc -> c :: acc) - let find_connection { pool ; _ } peer_id = + Lwt_utils.may ~f:P2p_welcome.shutdown net.welcome + >>= fun () -> + P2p_maintenance.shutdown net.maintenance + >>= fun () -> + P2p_pool.destroy net.pool + >>= fun () -> P2p_io_scheduler.shutdown ~timeout:3.0 net.io_sched + + let connections {pool; _} () = + P2p_pool.Connection.fold pool ~init:[] ~f:(fun _peer_id c acc -> c :: acc) + + let find_connection {pool; _} peer_id = P2p_pool.Connection.find_by_peer_id pool peer_id - let disconnect ?wait conn = - P2p_pool.disconnect ?wait conn - let connection_info _net conn = - P2p_pool.Connection.info conn + + let disconnect ?wait conn = P2p_pool.disconnect ?wait conn + + let connection_info _net conn = P2p_pool.Connection.info conn + let connection_local_metadata _net conn = P2p_pool.Connection.local_metadata conn + let connection_remote_metadata _net conn = P2p_pool.Connection.remote_metadata conn - let connection_stat _net conn = - P2p_pool.Connection.stat conn - let global_stat { pool ; _ } () = - P2p_pool.pool_stat pool - let set_peer_metadata { pool ; _ } conn meta = + + let connection_stat _net conn = P2p_pool.Connection.stat conn + + let global_stat {pool; _} () = P2p_pool.pool_stat pool + + let set_peer_metadata {pool; _} conn meta = P2p_pool.Peers.set_peer_metadata pool conn meta - let get_peer_metadata { pool ; _ } conn = + + let get_peer_metadata {pool; _} conn = P2p_pool.Peers.get_peer_metadata pool conn let recv _net conn = - P2p_pool.read conn >>=? fun msg -> - lwt_debug "message read from %a" + P2p_pool.read conn + >>=? fun msg -> + lwt_debug + "message read from %a" P2p_peer.Id.pp - (P2p_pool.Connection.info conn).peer_id >>= fun () -> - return msg + (P2p_pool.Connection.info conn).peer_id + >>= fun () -> return msg let rec recv_any net () = let pipes = - P2p_pool.Connection.fold - net.pool ~init:[] - ~f:begin fun _peer_id conn acc -> - (P2p_pool.is_readable conn >>= function - | Ok () -> Lwt.return_some conn - | Error _ -> Lwt_utils.never_ending ()) :: acc - end in - Lwt.pick ( - ( P2p_pool.Pool_event.wait_new_connection net.pool >>= fun () -> - Lwt.return_none ):: - pipes) >>= function - | None -> recv_any net () - | Some conn -> - P2p_pool.read conn >>= function + P2p_pool.Connection.fold net.pool ~init:[] ~f:(fun _peer_id conn acc -> + ( P2p_pool.is_readable conn + >>= function + | Ok () -> + Lwt.return_some conn + | Error _ -> + Lwt_utils.never_ending () ) + :: acc) + in + Lwt.pick + ( ( P2p_pool.Pool_event.wait_new_connection net.pool + >>= fun () -> Lwt.return_none ) + :: pipes ) + >>= function + | None -> + recv_any net () + | Some conn -> ( + P2p_pool.read conn + >>= function | Ok msg -> - lwt_debug "message read from %a" + lwt_debug + "message read from %a" P2p_peer.Id.pp - (P2p_pool.Connection.info conn).peer_id >>= fun () -> - Lwt.return (conn, msg) + (P2p_pool.Connection.info conn).peer_id + >>= fun () -> Lwt.return (conn, msg) | Error _ -> - lwt_debug "error reading message from %a" + lwt_debug + "error reading message from %a" P2p_peer.Id.pp - (P2p_pool.Connection.info conn).peer_id >>= fun () -> - Lwt_unix.yield () >>= fun () -> - recv_any net () + (P2p_pool.Connection.info conn).peer_id + >>= fun () -> Lwt_unix.yield () >>= fun () -> recv_any net () ) let send _net conn m = - P2p_pool.write conn m >>= function + P2p_pool.write conn m + >>= function | Ok () -> - lwt_debug "message sent to %a" + lwt_debug + "message sent to %a" P2p_peer.Id.pp - (P2p_pool.Connection.info conn).peer_id >>= fun () -> - return_unit + (P2p_pool.Connection.info conn).peer_id + >>= fun () -> return_unit | Error err -> - lwt_debug "error sending message from %a: %a" + lwt_debug + "error sending message from %a: %a" P2p_peer.Id.pp (P2p_pool.Connection.info conn).peer_id - pp_print_error err >>= fun () -> - Lwt.return_error err + pp_print_error + err + >>= fun () -> Lwt.return_error err let try_send _net conn v = match P2p_pool.write_now conn v with | Ok v -> - debug "message trysent to %a" + debug + "message trysent to %a" P2p_peer.Id.pp (P2p_pool.Connection.info conn).peer_id ; v | Error err -> - debug "error trysending message to %a@ %a" + debug + "error trysending message to %a@ %a" P2p_peer.Id.pp (P2p_pool.Connection.info conn).peer_id - pp_print_error err ; + pp_print_error + err ; false - let broadcast { pool ; _ } msg = + let broadcast {pool; _} msg = P2p_pool.write_all pool msg ; debug "message broadcasted" - let fold_connections { pool ; _ } ~init ~f = + let fold_connections {pool; _} ~init ~f = P2p_pool.Connection.fold pool ~init ~f - let iter_connections { pool ; _ } f = - P2p_pool.Connection.fold pool - ~init:() - ~f:(fun gid conn () -> f gid conn) - - let on_new_connection { pool ; _ } f = - P2p_pool.on_new_connection pool f + let iter_connections {pool; _} f = + P2p_pool.Connection.fold pool ~init:() ~f:(fun gid conn () -> f gid conn) + let on_new_connection {pool; _} f = P2p_pool.on_new_connection pool f end module Fake = struct - let id = P2p_identity.generate (Crypto_box.make_target 0.) - let empty_stat = { - P2p_stat.total_sent = 0L ; - total_recv = 0L ; - current_inflow = 0 ; - current_outflow = 0 ; - } - let connection_info announced_version faked_metadata = { - P2p_connection.Info.incoming = false ; - peer_id = id.peer_id ; - id_point = (Ipaddr.V6.unspecified, None) ; - remote_socket_port = 0 ; - announced_version ; - local_metadata = faked_metadata ; - remote_metadata = faked_metadata ; - private_node = false ; - } + let empty_stat = + { P2p_stat.total_sent = 0L; + total_recv = 0L; + current_inflow = 0; + current_outflow = 0 } + + let connection_info announced_version faked_metadata = + { P2p_connection.Info.incoming = false; + peer_id = id.peer_id; + id_point = (Ipaddr.V6.unspecified, None); + remote_socket_port = 0; + announced_version; + local_metadata = faked_metadata; + remote_metadata = faked_metadata; + private_node = false } end type ('msg, 'peer_meta, 'conn_meta) t = { - announced_version : Network_version.t ; - peer_id : P2p_peer.Id.t ; - maintain : unit -> unit Lwt.t ; - roll : unit -> unit Lwt.t ; - shutdown : unit -> unit Lwt.t ; - connections : unit -> ('msg, 'peer_meta, 'conn_meta) connection list ; + announced_version : Network_version.t; + peer_id : P2p_peer.Id.t; + maintain : unit -> unit Lwt.t; + roll : unit -> unit Lwt.t; + shutdown : unit -> unit Lwt.t; + connections : unit -> ('msg, 'peer_meta, 'conn_meta) connection list; find_connection : - P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection option ; + P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection option; disconnect : - ?wait:bool -> ('msg, 'peer_meta, 'conn_meta) connection -> unit Lwt.t ; + ?wait:bool -> ('msg, 'peer_meta, 'conn_meta) connection -> unit Lwt.t; connection_info : - ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta P2p_connection.Info.t ; + ('msg, 'peer_meta, 'conn_meta) connection -> + 'conn_meta P2p_connection.Info.t; connection_local_metadata : - ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta ; + ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta; connection_remote_metadata : - ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta ; - connection_stat : ('msg, 'peer_meta, 'conn_meta) connection -> P2p_stat.t ; - global_stat : unit -> P2p_stat.t ; - get_peer_metadata : P2p_peer.Id.t -> 'peer_meta ; - set_peer_metadata : P2p_peer.Id.t -> 'peer_meta -> unit ; - recv : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg tzresult Lwt.t ; - recv_any : unit -> (('msg, 'peer_meta, 'conn_meta) connection * 'msg) Lwt.t ; + ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta; + connection_stat : ('msg, 'peer_meta, 'conn_meta) connection -> P2p_stat.t; + global_stat : unit -> P2p_stat.t; + get_peer_metadata : P2p_peer.Id.t -> 'peer_meta; + set_peer_metadata : P2p_peer.Id.t -> 'peer_meta -> unit; + recv : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg tzresult Lwt.t; + recv_any : unit -> (('msg, 'peer_meta, 'conn_meta) connection * 'msg) Lwt.t; send : - ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> unit tzresult Lwt.t ; - try_send : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> bool ; - broadcast : 'msg -> unit ; - pool : ('msg, 'peer_meta, 'conn_meta) P2p_pool.t option ; + ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> unit tzresult Lwt.t; + try_send : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> bool; + broadcast : 'msg -> unit; + pool : ('msg, 'peer_meta, 'conn_meta) P2p_pool.t option; fold_connections : - 'a. init: 'a -> - f:(P2p_peer.Id.t -> - ('msg, 'peer_meta, 'conn_meta) connection -> 'a -> 'a) -> 'a ; + 'a. init:'a -> + f:(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> 'a -> 'a) -> + 'a; iter_connections : - (P2p_peer.Id.t -> - ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> unit ; + (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> + unit; on_new_connection : - (P2p_peer.Id.t -> - ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> unit ; - activate : unit -> unit ; + (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> + unit; + activate : unit -> unit } + type ('msg, 'peer_meta, 'conn_meta) net = ('msg, 'peer_meta, 'conn_meta) t let announced_version net = net.announced_version @@ -425,142 +431,162 @@ let pool net = net.pool let check_limits = let fail_1 v orig = - if not (Ptime.Span.compare v Ptime.Span.zero <= 0) then - return_unit + if not (Ptime.Span.compare v Ptime.Span.zero <= 0) then return_unit else - Error_monad.failwith "value of option %S cannot be negative or null@." + Error_monad.failwith + "value of option %S cannot be negative or null@." orig in let fail_2 v orig = - if not (v < 0) then - return_unit - else - Error_monad.failwith "value of option %S cannot be negative@." orig + if not (v < 0) then return_unit + else Error_monad.failwith "value of option %S cannot be negative@." orig in fun c -> - fail_1 c.authentication_timeout - "authentication-timeout" >>=? fun () -> - fail_2 c.min_connections - "min-connections" >>=? fun () -> - fail_2 c.expected_connections - "expected-connections" >>=? fun () -> - fail_2 c.max_connections - "max-connections" >>=? fun () -> - fail_2 c.max_incoming_connections - "max-incoming-connections" >>=? fun () -> - fail_2 c.read_buffer_size - "read-buffer-size" >>=? fun () -> - fail_2 c.known_peer_ids_history_size - "known-peer-ids-history-size" >>=? fun () -> - fail_2 c.known_points_history_size - "known-points-history-size" >>=? fun () -> - fail_1 c.swap_linger - "swap-linger" >>=? fun () -> - begin - match c.binary_chunks_size with - | None -> return_unit - | Some size -> P2p_socket.check_binary_chunks_size size - end >>=? fun () -> - return_unit + fail_1 c.authentication_timeout "authentication-timeout" + >>=? fun () -> + fail_2 c.min_connections "min-connections" + >>=? fun () -> + fail_2 c.expected_connections "expected-connections" + >>=? fun () -> + fail_2 c.max_connections "max-connections" + >>=? fun () -> + fail_2 c.max_incoming_connections "max-incoming-connections" + >>=? fun () -> + fail_2 c.read_buffer_size "read-buffer-size" + >>=? fun () -> + fail_2 c.known_peer_ids_history_size "known-peer-ids-history-size" + >>=? fun () -> + fail_2 c.known_points_history_size "known-points-history-size" + >>=? fun () -> + fail_1 c.swap_linger "swap-linger" + >>=? fun () -> + ( match c.binary_chunks_size with + | None -> + return_unit + | Some size -> + P2p_socket.check_binary_chunks_size size ) + >>=? fun () -> return_unit let create ~config ~limits peer_cfg conn_cfg msg_cfg = - check_limits limits >>=? fun () -> - Real.create ~config ~limits peer_cfg conn_cfg msg_cfg >>=? fun net -> - return { - announced_version = - Network_version.announced - ~chain_name: msg_cfg.chain_name - ~distributed_db_versions: msg_cfg.distributed_db_versions - ~p2p_versions: P2p_version.supported ; - peer_id = Real.peer_id net ; - maintain = Real.maintain net ; - roll = Real.roll net ; - shutdown = Real.shutdown net ; - connections = Real.connections net ; - find_connection = Real.find_connection net ; - disconnect = Real.disconnect ; - connection_info = Real.connection_info net ; - connection_local_metadata = Real.connection_local_metadata net ; - connection_remote_metadata = Real.connection_remote_metadata net ; - connection_stat = Real.connection_stat net ; - global_stat = Real.global_stat net ; - get_peer_metadata = Real.get_peer_metadata net ; - set_peer_metadata = Real.set_peer_metadata net ; - recv = Real.recv net ; - recv_any = Real.recv_any net ; - send = Real.send net ; - try_send = Real.try_send net ; - broadcast = Real.broadcast net ; - pool = Some net.pool ; - fold_connections = (fun ~init ~f -> Real.fold_connections net ~init ~f) ; - iter_connections = Real.iter_connections net ; - on_new_connection = Real.on_new_connection net ; - activate = Real.activate net ; - } + check_limits limits + >>=? fun () -> + Real.create ~config ~limits peer_cfg conn_cfg msg_cfg + >>=? fun net -> + return + { announced_version = + Network_version.announced + ~chain_name:msg_cfg.chain_name + ~distributed_db_versions:msg_cfg.distributed_db_versions + ~p2p_versions:P2p_version.supported; + peer_id = Real.peer_id net; + maintain = Real.maintain net; + roll = Real.roll net; + shutdown = Real.shutdown net; + connections = Real.connections net; + find_connection = Real.find_connection net; + disconnect = Real.disconnect; + connection_info = Real.connection_info net; + connection_local_metadata = Real.connection_local_metadata net; + connection_remote_metadata = Real.connection_remote_metadata net; + connection_stat = Real.connection_stat net; + global_stat = Real.global_stat net; + get_peer_metadata = Real.get_peer_metadata net; + set_peer_metadata = Real.set_peer_metadata net; + recv = Real.recv net; + recv_any = Real.recv_any net; + send = Real.send net; + try_send = Real.try_send net; + broadcast = Real.broadcast net; + pool = Some net.pool; + fold_connections = (fun ~init ~f -> Real.fold_connections net ~init ~f); + iter_connections = Real.iter_connections net; + on_new_connection = Real.on_new_connection net; + activate = Real.activate net } let activate t = - log_info "activate P2P layer !"; + log_info "activate P2P layer !" ; t.activate () let faked_network (msg_cfg : 'msg message_config) peer_cfg faked_metadata = let announced_version = Network_version.announced - ~chain_name: msg_cfg.chain_name - ~distributed_db_versions: msg_cfg.distributed_db_versions - ~p2p_versions: P2p_version.supported in - { - announced_version ; - peer_id = Fake.id.peer_id ; - maintain = Lwt.return ; - roll = Lwt.return ; - shutdown = Lwt.return ; - connections = (fun () -> []) ; - find_connection = (fun _ -> None) ; - disconnect = (fun ?wait:_ _ -> Lwt.return_unit) ; + ~chain_name:msg_cfg.chain_name + ~distributed_db_versions:msg_cfg.distributed_db_versions + ~p2p_versions:P2p_version.supported + in + { announced_version; + peer_id = Fake.id.peer_id; + maintain = Lwt.return; + roll = Lwt.return; + shutdown = Lwt.return; + connections = (fun () -> []); + find_connection = (fun _ -> None); + disconnect = (fun ?wait:_ _ -> Lwt.return_unit); connection_info = - (fun _ -> Fake.connection_info announced_version faked_metadata) ; - connection_local_metadata = (fun _ -> faked_metadata) ; - connection_remote_metadata = (fun _ -> faked_metadata) ; - connection_stat = (fun _ -> Fake.empty_stat) ; - global_stat = (fun () -> Fake.empty_stat) ; - get_peer_metadata = (fun _ -> peer_cfg.peer_meta_initial ()) ; - set_peer_metadata = (fun _ _ -> ()) ; - recv = (fun _ -> Lwt_utils.never_ending ()) ; - recv_any = (fun () -> Lwt_utils.never_ending ()) ; - send = (fun _ _ -> fail P2p_errors.Connection_closed) ; - try_send = (fun _ _ -> false) ; - fold_connections = (fun ~init ~f:_ -> init) ; - iter_connections = (fun _f -> ()) ; - on_new_connection = (fun _f -> ()) ; - broadcast = ignore ; - pool = None ; - activate = (fun _ -> ()) ; - } + (fun _ -> Fake.connection_info announced_version faked_metadata); + connection_local_metadata = (fun _ -> faked_metadata); + connection_remote_metadata = (fun _ -> faked_metadata); + connection_stat = (fun _ -> Fake.empty_stat); + global_stat = (fun () -> Fake.empty_stat); + get_peer_metadata = (fun _ -> peer_cfg.peer_meta_initial ()); + set_peer_metadata = (fun _ _ -> ()); + recv = (fun _ -> Lwt_utils.never_ending ()); + recv_any = (fun () -> Lwt_utils.never_ending ()); + send = (fun _ _ -> fail P2p_errors.Connection_closed); + try_send = (fun _ _ -> false); + fold_connections = (fun ~init ~f:_ -> init); + iter_connections = (fun _f -> ()); + on_new_connection = (fun _f -> ()); + broadcast = ignore; + pool = None; + activate = (fun _ -> ()) } let peer_id net = net.peer_id + let maintain net = net.maintain () + let roll net = net.roll () + let shutdown net = net.shutdown () + let connections net = net.connections () + let disconnect net = net.disconnect + let find_connection net = net.find_connection + let connection_info net = net.connection_info + let connection_local_metadata net = net.connection_local_metadata + let connection_remote_metadata net = net.connection_remote_metadata + let connection_stat net = net.connection_stat + let global_stat net = net.global_stat () + let get_peer_metadata net = net.get_peer_metadata + let set_peer_metadata net = net.set_peer_metadata + let recv net = net.recv + let recv_any net = net.recv_any () + let send net = net.send + let try_send net = net.try_send + let broadcast net = net.broadcast + let fold_connections net = net.fold_connections + let iter_connections net = net.iter_connections + let on_new_connection net = net.on_new_connection let greylist_addr net addr = Option.iter net.pool ~f:(fun pool -> P2p_pool.greylist_addr pool addr) + let greylist_peer net peer_id = Option.iter net.pool ~f:(fun pool -> P2p_pool.greylist_peer pool peer_id) diff --git a/src/lib_p2p/p2p.mli b/src/lib_p2p/p2p.mli index 15843292b9fd015bc2a1404c56075a280acbccc6..1931a5b6d07fcfe6fd58697959db9816604bf4e2 100644 --- a/src/lib_p2p/p2p.mli +++ b/src/lib_p2p/p2p.mli @@ -35,134 +35,101 @@ type 'peer_meta peer_meta_config = { peer_meta_encoding : 'peer_meta Data_encoding.t; peer_meta_initial : unit -> 'peer_meta; - score : 'peer_meta -> float ; + score : 'peer_meta -> float } type 'conn_meta conn_meta_config = { conn_meta_encoding : 'conn_meta Data_encoding.t; - conn_meta_value : P2p_peer.Id.t -> 'conn_meta ; - private_node : 'conn_meta -> bool ; + conn_meta_value : P2p_peer.Id.t -> 'conn_meta; + private_node : 'conn_meta -> bool } type 'msg message_config = { - encoding : 'msg P2p_message.encoding list ; - chain_name : Distributed_db_version.name ; - distributed_db_versions : Distributed_db_version.t list ; + encoding : 'msg P2p_message.encoding list; + chain_name : Distributed_db_version.name; + distributed_db_versions : Distributed_db_version.t list } (** Network configuration *) type config = { - listening_port : P2p_addr.port option; - (** Tells if incoming connections accepted, precising the TCP port + (** Tells if incoming connections accepted, precising the TCP port on which the peer can be reached (default: [9732])*) - listening_addr : P2p_addr.t option; - (** When incoming connections are accepted, precise on which + (** When incoming connections are accepted, precise on which IP adddress the node listen (default: [[::]]). *) - discovery_port : P2p_addr.port option; - (** Tells if local peer discovery is enabled, precising the TCP port + (** Tells if local peer discovery is enabled, precising the TCP port on which the peer can be reached (default: [10732]) *) - discovery_addr : Ipaddr.V4.t option; - (** When local peer discovery is enabled, precise on which + (** When local peer discovery is enabled, precise on which IP address messages are broadcasted (default: [255.255.255.255]). *) - - trusted_points : P2p_point.Id.t list ; - (** List of hard-coded known peers to bootstrap the network from. *) - - peers_file : string ; - (** The path to the JSON file where the metadata associated to + trusted_points : P2p_point.Id.t list; + (** List of hard-coded known peers to bootstrap the network from. *) + peers_file : string; + (** The path to the JSON file where the metadata associated to peer_ids are loaded / stored. *) - - private_mode : bool ; - (** If [true], only open outgoing/accept incoming connections + private_mode : bool; + (** If [true], only open outgoing/accept incoming connections to/from peers whose addresses are in [trusted_peers], and inform these peers that the identity of this node should be revealed to the rest of the network. *) - - identity : P2p_identity.t ; - (** Cryptographic identity of the peer. *) - - proof_of_work_target : Crypto_box.target ; - (** Expected level of proof of work of peers' identity. *) - - disable_mempool : bool ; - (** If [true], all non-empty mempools will be ignored. *) - - trust_discovered_peers : bool ; - (** If [true], peers discovered on the local network will be trusted. *) - - disable_testchain : bool ; - (** If [true], testchain related messages will be ignored. *) - - greylisting_config : P2p_point_state.Info.greylisting_config ; - (** The greylisting configuration. *) + identity : P2p_identity.t; (** Cryptographic identity of the peer. *) + proof_of_work_target : Crypto_box.target; + (** Expected level of proof of work of peers' identity. *) + disable_mempool : bool; + (** If [true], all non-empty mempools will be ignored. *) + trust_discovered_peers : bool; + (** If [true], peers discovered on the local network will be trusted. *) + disable_testchain : bool; + (** If [true], testchain related messages will be ignored. *) + greylisting_config : P2p_point_state.Info.greylisting_config + (** The greylisting configuration. *) } (** Network capacities *) type limits = { - - connection_timeout : Time.System.Span.t ; - (** Maximum time allowed to the establishment of a connection. *) - - authentication_timeout : Time.System.Span.t ; - (** Delay granted to a peer to perform authentication, in seconds. *) - - greylist_timeout : Time.System.Span.t ; - (** GC delay for the grelists tables, in seconds. *) - - maintenance_idle_time: Time.System.Span.t ; - (** How long to wait at most, in seconds, before running a maintenance loop. *) - - min_connections : int ; - (** Strict minimum number of connections (triggers an urgent maintenance) *) - - expected_connections : int ; - (** Targeted number of connections to reach when bootstrapping / maintaining *) - - max_connections : int ; - (** Maximum number of connections (exceeding peers are disconnected) *) - - backlog : int ; - (** Argument of [Lwt_unix.accept].*) - - max_incoming_connections : int ; - (** Maximum not-yet-authenticated incoming connections. *) - - max_download_speed : int option ; - (** Hard-limit in the number of bytes received per second. *) - - max_upload_speed : int option ; - (** Hard-limit in the number of bytes sent per second. *) - - read_buffer_size : int ; - (** Size in bytes of the buffer passed to [Lwt_unix.read]. *) - - read_queue_size : int option ; - write_queue_size : int option ; - incoming_app_message_queue_size : int option ; - incoming_message_queue_size : int option ; - outgoing_message_queue_size : int option ; - (** Various bounds for internal queues. *) - - known_peer_ids_history_size : int ; - known_points_history_size : int ; - (** Size of circular log buffers, in number of events recorded. *) - - max_known_peer_ids : (int * int) option ; - max_known_points : (int * int) option ; - (** Optional limitation of internal hashtables (max, target) *) - - swap_linger : Time.System.Span.t ; - (** Peer swapping does not occur more than once during a timespan of + connection_timeout : Time.System.Span.t; + (** Maximum time allowed to the establishment of a connection. *) + authentication_timeout : Time.System.Span.t; + (** Delay granted to a peer to perform authentication, in seconds. *) + greylist_timeout : Time.System.Span.t; + (** GC delay for the grelists tables, in seconds. *) + maintenance_idle_time : Time.System.Span.t; + (** How long to wait at most, in seconds, before running a maintenance loop. *) + min_connections : int; + (** Strict minimum number of connections (triggers an urgent maintenance) *) + expected_connections : int; + (** Targeted number of connections to reach when bootstrapping / maintaining *) + max_connections : int; + (** Maximum number of connections (exceeding peers are disconnected) *) + backlog : int; (** Argument of [Lwt_unix.accept].*) + max_incoming_connections : int; + (** Maximum not-yet-authenticated incoming connections. *) + max_download_speed : int option; + (** Hard-limit in the number of bytes received per second. *) + max_upload_speed : int option; + (** Hard-limit in the number of bytes sent per second. *) + read_buffer_size : int; + (** Size in bytes of the buffer passed to [Lwt_unix.read]. *) + read_queue_size : int option; + write_queue_size : int option; + incoming_app_message_queue_size : int option; + incoming_message_queue_size : int option; + outgoing_message_queue_size : int option; + (** Various bounds for internal queues. *) + known_peer_ids_history_size : int; + known_points_history_size : int; + (** Size of circular log buffers, in number of events recorded. *) + max_known_peer_ids : (int * int) option; + max_known_points : (int * int) option; + (** Optional limitation of internal hashtables (max, target) *) + swap_linger : Time.System.Span.t; + (** Peer swapping does not occur more than once during a timespan of [swap_linger] seconds. *) - - binary_chunks_size : int option ; - (** Size (in bytes) of binary blocks that are sent to other + binary_chunks_size : int option + (** Size (in bytes) of binary blocks that are sent to other peers. Default value is 64 kB. Max value is 64kB. *) - } (** Type of a P2P layer instance, parametrized by: @@ -171,11 +138,14 @@ type limits = { ['conn_meta]: type of the metadata associated with connection (ack_cfg) *) type ('msg, 'peer_meta, 'conn_meta) t + type ('msg, 'peer_meta, 'conn_meta) net = ('msg, 'peer_meta, 'conn_meta) t -val announced_version : ('msg, 'peer_meta, 'conn_meta) net -> Network_version.t +val announced_version : ('msg, 'peer_meta, 'conn_meta) net -> Network_version.t -val pool : ('msg, 'peer_meta, 'conn_meta) net -> ('msg, 'peer_meta, 'conn_meta) P2p_pool.t option +val pool : + ('msg, 'peer_meta, 'conn_meta) net -> + ('msg, 'peer_meta, 'conn_meta) P2p_pool.t option (** A faked p2p layer, which do not initiate any connection nor open any listening socket *) @@ -187,9 +157,12 @@ val faked_network : (** Main network initialisation function *) val create : - config:config -> limits:limits -> - 'peer_meta peer_meta_config -> 'conn_meta conn_meta_config -> - 'msg message_config -> ('msg, 'peer_meta, 'conn_meta) net tzresult Lwt.t + config:config -> + limits:limits -> + 'peer_meta peer_meta_config -> + 'conn_meta conn_meta_config -> + 'msg message_config -> + ('msg, 'peer_meta, 'conn_meta) net tzresult Lwt.t val activate : ('msg, 'peer_meta, 'conn_meta) net -> unit @@ -224,14 +197,17 @@ val connection_info : ('msg, 'peer_meta, 'conn_meta) net -> ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta P2p_connection.Info.t + val connection_local_metadata : ('msg, 'peer_meta, 'conn_meta) net -> ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta + val connection_remote_metadata : ('msg, 'peer_meta, 'conn_meta) net -> ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta + val connection_stat : ('msg, 'peer_meta, 'conn_meta) net -> ('msg, 'peer_meta, 'conn_meta) connection -> @@ -249,6 +225,7 @@ val global_stat : ('msg, 'peer_meta, 'conn_meta) net -> P2p_stat.t (** Accessors for meta information about a global identifier *) val get_peer_metadata : ('msg, 'peer_meta, 'conn_meta) net -> P2p_peer.Id.t -> 'peer_meta + val set_peer_metadata : ('msg, 'peer_meta, 'conn_meta) net -> P2p_peer.Id.t -> 'peer_meta -> unit @@ -290,11 +267,14 @@ val fold_connections : val iter_connections : ('msg, 'peer_meta, 'conn_meta) net -> - (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> unit + (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> + unit val on_new_connection : ('msg, 'peer_meta, 'conn_meta) net -> - (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> unit + (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> + unit val greylist_addr : ('msg, 'peer_meta, 'conn_meta) net -> P2p_addr.t -> unit + val greylist_peer : ('msg, 'peer_meta, 'conn_meta) net -> P2p_peer.Id.t -> unit diff --git a/src/lib_p2p/p2p_acl.ml b/src/lib_p2p/p2p_acl.ml index 070fa0e4b258a91415eca7d905ef1c0b9e8c92d8..34533f3939eba1caf7c92f7d119370f1bce8611d 100644 --- a/src/lib_p2p/p2p_acl.ml +++ b/src/lib_p2p/p2p_acl.ml @@ -23,43 +23,44 @@ (* *) (*****************************************************************************) -module PeerRing = Ring.MakeTable(struct - include P2p_peer.Id - end) +module PeerRing = Ring.MakeTable (struct + include P2p_peer.Id +end) -module PatriciaTree(V:HashPtree.Value) = struct +module PatriciaTree (V : HashPtree.Value) = struct module Size = struct let size = 128 end - module Bits = HashPtree.Bits(Size) - module M = HashPtree.Make_BE_sized(V)(Size) + + module Bits = HashPtree.Bits (Size) + module M = HashPtree.Make_BE_sized (V) (Size) type t = M.t + let empty = M.empty (* take into consideration the fact that the int64 * returned by Ipaddr.V6.to_int64 is signed *) let z_of_bytes i = let i = Z.of_int64 i in - Z.(if i < zero then i + of_int 2 ** 64 else i) + Z.(if i < zero then i + (of_int 2 ** 64) else i) let z_of_ipv6 ip = - let hi_x, lo_x = Ipaddr.V6.to_int64 ip in + let (hi_x, lo_x) = Ipaddr.V6.to_int64 ip in let hi = z_of_bytes hi_x in let lo = z_of_bytes lo_x in Z.((hi lsl 64) + lo) - let key_of_ipv6 ip = - Bits.of_z (z_of_ipv6 ip) + let key_of_ipv6 ip = Bits.of_z (z_of_ipv6 ip) let z_mask_of_ipv6_prefix p = let ip = Ipaddr.V6.Prefix.network p in let len = Ipaddr.V6.Prefix.bits p in - z_of_ipv6 ip, Z.(lsl) Z.one (128 - len) + (z_of_ipv6 ip, Z.( lsl ) Z.one (128 - len)) let key_mask_of_ipv6_prefix p = - let z, m = z_mask_of_ipv6_prefix p in - Bits.of_z z, Bits.of_z m + let (z, m) = z_mask_of_ipv6_prefix p in + (Bits.of_z z, Bits.of_z m) let z_to_ipv6 z = (* assumes z is a 128 bit value *) @@ -68,23 +69,20 @@ module PatriciaTree(V:HashPtree.Value) = struct if Z.(hi_z >= of_int 2 ** 63) then (* If overflows int64, then returns the bit equivalent representation (which is negative) *) - Int64.add 0x8000000000000000L - ((Z.(to_int64 (hi_z - (of_int 2 ** 63))))) - else - Z.(to_int64 hi_z) + Int64.add 0x8000000000000000L Z.(to_int64 (hi_z - (of_int 2 ** 63))) + else Z.(to_int64 hi_z) in - let lo = Z.(to_int64 (z mod (pow ~$2 64))) in + let lo = Z.(to_int64 (z mod pow ~$2 64)) in Ipaddr.V6.of_int64 (hi, lo) - let remove key t = - M.remove (key_of_ipv6 key) t + let remove key t = M.remove (key_of_ipv6 key) t let remove_prefix prefix t = - let key, mask = key_mask_of_ipv6_prefix prefix in + let (key, mask) = key_mask_of_ipv6_prefix prefix in M.remove_prefix key mask t let add_prefix prefix value t = - let key, mask = key_mask_of_ipv6_prefix prefix in + let (key, mask) = key_mask_of_ipv6_prefix prefix in M.add (fun _ v -> v) ~key ~value ~mask t let add key value t = @@ -96,7 +94,7 @@ module PatriciaTree(V:HashPtree.Value) = struct let key_mask_to_prefix key mask = let len = if Bits.(equal mask zero) then 0 - else 128 - (Z.trailing_zeros (Bits.to_z mask)) + else 128 - Z.trailing_zeros (Bits.to_z mask) in Ipaddr.V6.Prefix.make len (z_to_ipv6 (Bits.to_z key)) @@ -109,74 +107,73 @@ module PatriciaTree(V:HashPtree.Value) = struct let pp ppf t = let lst = fold (fun p _ l -> p :: l) t [] in - Format.fprintf ppf "@[<2>[%a]@]" - Format.(pp_print_list ~pp_sep:(fun ppf () -> fprintf ppf ";@ ") - Ipaddr.V6.Prefix.pp) + Format.fprintf + ppf + "@[<2>[%a]@]" + Format.( + pp_print_list + ~pp_sep:(fun ppf () -> fprintf ppf ";@ ") + Ipaddr.V6.Prefix.pp) lst - end (* patricia trees using IpV6 addresses as keys *) module IpSet = struct - - include PatriciaTree(Time.System) + include PatriciaTree (Time.System) let remove_old t ~older_than = - let module MI = - struct + let module MI = struct type result = Time.System.t + let default = Ptime.max + let map _t _key value = value - let reduce _t left right = Time.System.(min left right) - end - in - let module MR = M.Map_Reduce(MI) in - MR.filter (fun addtime -> - Time.System.(older_than <= addtime) - ) t + let reduce _t left right = Time.System.(min left right) + end in + let module MR = M.Map_Reduce (MI) in + MR.filter (fun addtime -> Time.System.(older_than <= addtime)) t end -module IpTable = Hashtbl.Make(struct - type t = Ipaddr.V6.t - let hash = Hashtbl.hash - let equal x y = Ipaddr.V6.compare x y = 0 - end) +module IpTable = Hashtbl.Make (struct + type t = Ipaddr.V6.t + + let hash = Hashtbl.hash + + let equal x y = Ipaddr.V6.compare x y = 0 +end) type t = { - mutable greylist_ips : IpSet.t ; - greylist_peers : PeerRing.t ; - banned_ips : unit IpTable.t ; - banned_peers : unit P2p_peer.Table.t ; + mutable greylist_ips : IpSet.t; + greylist_peers : PeerRing.t; + banned_ips : unit IpTable.t; + banned_peers : unit P2p_peer.Table.t } -let create size = { - greylist_ips = IpSet.empty; - greylist_peers = PeerRing.create size; - banned_ips = IpTable.create 53; - banned_peers = P2p_peer.Table.create 53; -} +let create size = + { greylist_ips = IpSet.empty; + greylist_peers = PeerRing.create size; + banned_ips = IpTable.create 53; + banned_peers = P2p_peer.Table.create 53 } (* check if an ip is banned. priority is for static blacklist, then in the greylist *) let banned_addr acl addr = - IpTable.mem acl.banned_ips addr || - IpSet.mem addr acl.greylist_ips + IpTable.mem acl.banned_ips addr || IpSet.mem addr acl.greylist_ips (* Check is the peer_id is in the banned ring. It might be possible that a peer ID that is not banned, but its ip address is. *) let banned_peer acl peer_id = - P2p_peer.Table.mem acl.banned_peers peer_id || - PeerRing.mem acl.greylist_peers peer_id + P2p_peer.Table.mem acl.banned_peers peer_id + || PeerRing.mem acl.greylist_peers peer_id let clear acl = - acl.greylist_ips <- IpSet.empty; - P2p_peer.Table.clear acl.banned_peers; - IpTable.clear acl.banned_ips; + acl.greylist_ips <- IpSet.empty ; + P2p_peer.Table.clear acl.banned_peers ; + IpTable.clear acl.banned_ips ; PeerRing.clear acl.greylist_peers module IPGreylist = struct - let add acl addr time = acl.greylist_ips <- IpSet.add addr time acl.greylist_ips @@ -190,42 +187,26 @@ module IPGreylist = struct acl.greylist_ips <- IpSet.remove_old acl.greylist_ips ~older_than let encoding = Data_encoding.(list P2p_addr.encoding) - end module IPBlacklist = struct + let add acl addr = IpTable.add acl.banned_ips addr () - let add acl addr = - IpTable.add acl.banned_ips addr () - - let remove acl addr = - IpTable.remove acl.banned_ips addr - - let mem acl addr = - IpTable.mem acl.banned_ips addr + let remove acl addr = IpTable.remove acl.banned_ips addr + let mem acl addr = IpTable.mem acl.banned_ips addr end module PeerBlacklist = struct + let add acl addr = P2p_peer.Table.add acl.banned_peers addr () - let add acl addr = - P2p_peer.Table.add acl.banned_peers addr () - - let remove acl addr = - P2p_peer.Table.remove acl.banned_peers addr - - let mem acl addr = - P2p_peer.Table.mem acl.banned_peers addr + let remove acl addr = P2p_peer.Table.remove acl.banned_peers addr + let mem acl addr = P2p_peer.Table.mem acl.banned_peers addr end module PeerGreylist = struct + let add acl peer_id = PeerRing.add acl.greylist_peers peer_id - let add acl peer_id = - PeerRing.add acl.greylist_peers peer_id - - let mem acl peer_id = - PeerRing.mem acl.greylist_peers peer_id - + let mem acl peer_id = PeerRing.mem acl.greylist_peers peer_id end - diff --git a/src/lib_p2p/p2p_acl.mli b/src/lib_p2p/p2p_acl.mli index 785a45c0046ee222fffe4bbd407055118a0d983d..4b4197ae2391786523c157f74543017aa06626cf 100644 --- a/src/lib_p2p/p2p_acl.mli +++ b/src/lib_p2p/p2p_acl.mli @@ -57,42 +57,38 @@ val banned_peer : t -> P2p_peer.Id.t -> bool val clear : t -> unit module IPGreylist : sig - (** [add t addr] adds [addr] to the address greylist. *) - val add: t -> P2p_addr.t -> Time.System.t -> unit + val add : t -> P2p_addr.t -> Time.System.t -> unit (** [remove_old t ~older_than] removes all banned peers older than the given time. *) - val remove_old: t -> older_than:Time.System.t -> unit - - val mem: t -> P2p_addr.t -> bool + val remove_old : t -> older_than:Time.System.t -> unit - val encoding: P2p_addr.t list Data_encoding.t + val mem : t -> P2p_addr.t -> bool + val encoding : P2p_addr.t list Data_encoding.t end module IPBlacklist : sig + val add : t -> P2p_addr.t -> unit - val add: t -> P2p_addr.t -> unit - val remove: t -> P2p_addr.t -> unit - val mem: t -> P2p_addr.t -> bool + val remove : t -> P2p_addr.t -> unit + val mem : t -> P2p_addr.t -> bool end module PeerBlacklist : sig + val add : t -> P2p_peer.Id.t -> unit - val add: t -> P2p_peer.Id.t -> unit - val remove: t -> P2p_peer.Id.t -> unit - val mem: t -> P2p_peer.Id.t -> bool + val remove : t -> P2p_peer.Id.t -> unit + val mem : t -> P2p_peer.Id.t -> bool end - module PeerGreylist : sig + val add : t -> P2p_peer.Id.t -> unit - val add: t -> P2p_peer.Id.t -> unit - val mem: t -> P2p_peer.Id.t -> bool - + val mem : t -> P2p_peer.Id.t -> bool end (** / *) @@ -101,14 +97,23 @@ module PeerRing : Ring.TABLE with type v = P2p_peer.Id.t module IpSet : sig type t - val empty: t + + val empty : t + val add : Ipaddr.V6.t -> Time.System.t -> t -> t + val add_prefix : Ipaddr.V6.Prefix.t -> Time.System.t -> t -> t + val remove : Ipaddr.V6.t -> t -> t + val remove_prefix : Ipaddr.V6.Prefix.t -> t -> t + val mem : Ipaddr.V6.t -> t -> bool - val fold: (Ipaddr.V6.Prefix.t -> Time.System.t -> 'a -> 'a) -> t -> 'a -> 'a + + val fold : (Ipaddr.V6.Prefix.t -> Time.System.t -> 'a -> 'a) -> t -> 'a -> 'a + val pp : Format.formatter -> t -> unit + val remove_old : t -> older_than:Time.System.t -> t end diff --git a/src/lib_p2p/p2p_answerer.ml b/src/lib_p2p/p2p_answerer.ml index b321f64ed0b3a1a88387a664e6db32449b71d432..3b8c2b379ba0d506fd331f270ac5c7d44ea419ce 100644 --- a/src/lib_p2p/p2p_answerer.ml +++ b/src/lib_p2p/p2p_answerer.ml @@ -24,84 +24,77 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make (struct let name = "p2p.answerer" end) +include Internal_event.Legacy_logging.Make (struct + let name = "p2p.answerer" +end) type 'msg callback = { - bootstrap: unit -> P2p_point.Id.t list Lwt.t ; - advertise: P2p_point.Id.t list -> unit Lwt.t ; - message: int -> 'msg -> unit Lwt.t ; - swap_request: P2p_point.Id.t -> P2p_peer.Id.t -> unit Lwt.t ; - swap_ack: P2p_point.Id.t -> P2p_peer.Id.t -> unit Lwt.t ; + bootstrap : unit -> P2p_point.Id.t list Lwt.t; + advertise : P2p_point.Id.t list -> unit Lwt.t; + message : int -> 'msg -> unit Lwt.t; + swap_request : P2p_point.Id.t -> P2p_peer.Id.t -> unit Lwt.t; + swap_ack : P2p_point.Id.t -> P2p_peer.Id.t -> unit Lwt.t } type ('msg, 'meta) t = { - canceler: Lwt_canceler.t ; - conn: ('msg P2p_message.t, 'meta) P2p_socket.t ; - callback: 'msg callback ; - mutable worker: unit Lwt.t ; + canceler : Lwt_canceler.t; + conn : ('msg P2p_message.t, 'meta) P2p_socket.t; + callback : 'msg callback; + mutable worker : unit Lwt.t } let rec worker_loop st = - Lwt_unix.yield () >>= fun () -> - protect ~canceler:st.canceler begin fun () -> - P2p_socket.read st.conn - end >>= function - | Ok (_, Bootstrap) -> begin + Lwt_unix.yield () + >>= fun () -> + protect ~canceler:st.canceler (fun () -> P2p_socket.read st.conn) + >>= function + | Ok (_, Bootstrap) -> ( (* st.callback.bootstrap will return an empty list if the node is in private mode *) - st.callback.bootstrap () >>= function + st.callback.bootstrap () + >>= function | [] -> worker_loop st - | points -> - match P2p_socket.write_now st.conn (Advertise points) with - | Ok _sent -> - (* if not sent then ?? TODO count dropped message ?? *) - worker_loop st - | Error _ -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit - end + | points -> ( + match P2p_socket.write_now st.conn (Advertise points) with + | Ok _sent -> + (* if not sent then ?? TODO count dropped message ?? *) + worker_loop st + | Error _ -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit ) ) | Ok (_, Advertise points) -> (* st.callback.advertise will ignore the points if the node is in private mode *) - st.callback.advertise points >>= fun () -> - worker_loop st + st.callback.advertise points >>= fun () -> worker_loop st | Ok (_, Swap_request (point, peer)) -> - st.callback.swap_request point peer >>= fun () -> - worker_loop st + st.callback.swap_request point peer >>= fun () -> worker_loop st | Ok (_, Swap_ack (point, peer)) -> - st.callback.swap_ack point peer >>= fun () -> - worker_loop st + st.callback.swap_ack point peer >>= fun () -> worker_loop st | Ok (size, Message msg) -> - st.callback.message size msg >>= fun () -> - worker_loop st + st.callback.message size msg >>= fun () -> worker_loop st | Ok (_, Disconnect) | Error [P2p_errors.Connection_closed] -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit | Error [P2p_errors.Decoding_error] -> (* TODO: Penalize peer... *) - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit - | Error [ Canceled ] -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit + | Error [Canceled] -> Lwt.return_unit | Error err -> - lwt_log_error "@[Answerer unexpected error:@ %a@]" - Error_monad.pp_print_error err >>= fun () -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit + lwt_log_error + "@[Answerer unexpected error:@ %a@]" + Error_monad.pp_print_error + err + >>= fun () -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit let run conn canceler callback = - let st = { - canceler ; conn ; callback ; - worker = Lwt.return_unit ; - } in + let st = {canceler; conn; callback; worker = Lwt.return_unit} in st.worker <- - Lwt_utils.worker "answerer" + Lwt_utils.worker + "answerer" ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop st) ~cancel:(fun () -> Lwt_canceler.cancel canceler) ; st -let shutdown st = - Lwt_canceler.cancel st.canceler >>= fun () -> - st.worker +let shutdown st = Lwt_canceler.cancel st.canceler >>= fun () -> st.worker diff --git a/src/lib_p2p/p2p_answerer.mli b/src/lib_p2p/p2p_answerer.mli index b284faab2c43f8a19deaa511bd6f4ca5b1c148e8..a531ca9323918e433aab56d994ca31e7d58713b2 100644 --- a/src/lib_p2p/p2p_answerer.mli +++ b/src/lib_p2p/p2p_answerer.mli @@ -28,16 +28,19 @@ using callbacks. *) type 'msg callback = { - bootstrap: unit -> P2p_point.Id.t list Lwt.t ; - advertise: P2p_point.Id.t list -> unit Lwt.t ; - message: int -> 'msg -> unit Lwt.t ; - swap_request: P2p_point.Id.t -> P2p_peer.Id.t -> unit Lwt.t ; - swap_ack: P2p_point.Id.t -> P2p_peer.Id.t -> unit Lwt.t ; + bootstrap : unit -> P2p_point.Id.t list Lwt.t; + advertise : P2p_point.Id.t list -> unit Lwt.t; + message : int -> 'msg -> unit Lwt.t; + swap_request : P2p_point.Id.t -> P2p_peer.Id.t -> unit Lwt.t; + swap_ack : P2p_point.Id.t -> P2p_peer.Id.t -> unit Lwt.t } type ('msg, 'meta) t val shutdown : ('msg, 'meta) t -> unit Lwt.t -val run : ('msg P2p_message.t, 'meta) P2p_socket.t -> - Lwt_canceler.t -> 'msg callback -> ('msg, 'meta) t +val run : + ('msg P2p_message.t, 'meta) P2p_socket.t -> + Lwt_canceler.t -> + 'msg callback -> + ('msg, 'meta) t diff --git a/src/lib_p2p/p2p_discovery.ml b/src/lib_p2p/p2p_discovery.ml index 78654d4270da7e4caeac3a30bdb3c9cbc7761290..f9170c7c31fbfa3a071c722a0e81f6337e70e1cf 100644 --- a/src/lib_p2p/p2p_discovery.ml +++ b/src/lib_p2p/p2p_discovery.ml @@ -24,14 +24,13 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make(struct - let name = "p2p.discovery" - end) +include Internal_event.Legacy_logging.Make (struct + let name = "p2p.discovery" +end) type pool = Pool : ('msg, 'meta, 'meta_conn) P2p_pool.t -> pool module Message = struct - let encoding = Data_encoding.(tup3 (Fixed.string 10) P2p_peer.Id.encoding int16) @@ -41,239 +40,229 @@ module Message = struct let make peer_id port = Data_encoding.Binary.to_bytes_exn encoding (key, peer_id, port) - end module Answer = struct - type t = { - my_peer_id: P2p_peer.Id.t ; - pool: pool ; - discovery_port: int ; - canceler: Lwt_canceler.t ; - trust_discovered_peers: bool ; - mutable worker: unit Lwt.t ; + my_peer_id : P2p_peer.Id.t; + pool : pool; + discovery_port : int; + canceler : Lwt_canceler.t; + trust_discovered_peers : bool; + mutable worker : unit Lwt.t } let create_socket st = Lwt.catch - begin fun () -> + (fun () -> let socket = Lwt_unix.socket PF_INET SOCK_DGRAM 0 in Lwt_canceler.on_cancel st.canceler (fun () -> - Lwt_utils_unix.safe_close socket - ) ; + Lwt_utils_unix.safe_close socket) ; Lwt_unix.setsockopt socket SO_BROADCAST true ; Lwt_unix.setsockopt socket SO_REUSEADDR true ; - let addr = Lwt_unix.ADDR_INET (Unix.inet_addr_any, st.discovery_port) in - Lwt_unix.bind socket addr >>= fun () -> - Lwt.return socket - end - begin fun exn -> - lwt_debug "Error creating a socket" >>= fun () -> - Lwt.fail exn - end + let addr = + Lwt_unix.ADDR_INET (Unix.inet_addr_any, st.discovery_port) + in + Lwt_unix.bind socket addr >>= fun () -> Lwt.return socket) + (fun exn -> + lwt_debug "Error creating a socket" >>= fun () -> Lwt.fail exn) let loop st = - protect ~canceler:st.canceler begin fun () -> - create_socket st >>= fun socket -> - return socket - end >>=? fun socket -> + protect ~canceler:st.canceler (fun () -> + create_socket st >>= fun socket -> return socket) + >>=? fun socket -> (* Infinite loop, should never exit. *) let rec aux () = let buf = MBytes.create Message.length in - protect ~canceler:st.canceler begin fun () -> - Lwt_bytes.recvfrom socket buf 0 Message.length [] >>= fun content -> - lwt_debug "Received discovery message..." >>= fun () -> - return content - end >>=? function + protect ~canceler:st.canceler (fun () -> + Lwt_bytes.recvfrom socket buf 0 Message.length [] + >>= fun content -> + lwt_debug "Received discovery message..." + >>= fun () -> return content) + >>=? function | (len, Lwt_unix.ADDR_INET (remote_addr, _)) - when Compare.Int.equal len Message.length -> - begin match Data_encoding.Binary.of_bytes Message.encoding buf with - | Some (key, remote_peer_id, remote_port) - when Compare.String.equal key Message.key - && not (P2p_peer.Id.equal remote_peer_id st.my_peer_id) -> - let s_addr = Unix.string_of_inet_addr remote_addr in - begin match P2p_addr.of_string_opt s_addr with - | None -> - lwt_debug "Failed to parse %S\n@." s_addr >>= fun () -> - aux () - | Some addr -> - let Pool pool = st.pool in - lwt_log_info "Registering new point %a:%d" - P2p_addr.pp addr remote_port >>= fun () -> - P2p_pool.register_new_point - ~trusted:st.trust_discovered_peers - pool st.my_peer_id - (addr, remote_port) ; - aux () - end - | _ -> aux () - end - | _ -> aux () - in aux () + when Compare.Int.equal len Message.length -> ( + match Data_encoding.Binary.of_bytes Message.encoding buf with + | Some (key, remote_peer_id, remote_port) + when Compare.String.equal key Message.key + && not (P2p_peer.Id.equal remote_peer_id st.my_peer_id) -> ( + let s_addr = Unix.string_of_inet_addr remote_addr in + match P2p_addr.of_string_opt s_addr with + | None -> + lwt_debug "Failed to parse %S\n@." s_addr >>= fun () -> aux () + | Some addr -> + let (Pool pool) = st.pool in + lwt_log_info + "Registering new point %a:%d" + P2p_addr.pp + addr + remote_port + >>= fun () -> + P2p_pool.register_new_point + ~trusted:st.trust_discovered_peers + pool + st.my_peer_id + (addr, remote_port) ; + aux () ) + | _ -> + aux () ) + | _ -> + aux () + in + aux () let worker_loop st = - loop st >>= function - | Error [ Canceled ] -> + loop st + >>= function + | Error [Canceled] -> Lwt.return_unit | Error err -> lwt_log_error "@[<v 2>Unexpected error in answer worker@ %a@]" - pp_print_error err >>= fun () -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit + pp_print_error + err + >>= fun () -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit | Ok () -> - lwt_log_error - "@[<v 2>Unexpected exit in answer worker@]" >>= fun () -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit + lwt_log_error "@[<v 2>Unexpected exit in answer worker@]" + >>= fun () -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit - let create my_peer_id pool ~trust_discovered_peers ~discovery_port = { - canceler = Lwt_canceler.create () ; - my_peer_id ; - discovery_port ; - trust_discovered_peers ; - pool = Pool pool ; - worker = Lwt.return_unit ; - } + let create my_peer_id pool ~trust_discovered_peers ~discovery_port = + { canceler = Lwt_canceler.create (); + my_peer_id; + discovery_port; + trust_discovered_peers; + pool = Pool pool; + worker = Lwt.return_unit } let activate st = st.worker <- - Lwt_utils.worker "discovery_answer" + Lwt_utils.worker + "discovery_answer" ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop st) ~cancel:(fun () -> Lwt_canceler.cancel st.canceler) - end (* ************************************************************ *) (* Sender *) module Sender = struct - type t = { - canceler: Lwt_canceler.t ; - my_peer_id: P2p_peer.Id.t ; - listening_port: int ; - discovery_port: int ; - discovery_addr: Ipaddr.V4.t ; - pool: pool ; - restart_discovery: unit Lwt_condition.t ; - mutable worker: unit Lwt.t ; + canceler : Lwt_canceler.t; + my_peer_id : P2p_peer.Id.t; + listening_port : int; + discovery_port : int; + discovery_addr : Ipaddr.V4.t; + pool : pool; + restart_discovery : unit Lwt_condition.t; + mutable worker : unit Lwt.t } module Config = struct - type t = { - delay: float; - loop: int; - } - let initial = { - delay = 0.1 ; - loop = 0 ; - } - let increase_delay config = { config with delay = 2.0 *. config.delay ; } + type t = {delay : float; loop : int} + + let initial = {delay = 0.1; loop = 0} + + let increase_delay config = {config with delay = 2.0 *. config.delay} + let max_loop = 10 end let broadcast_message st = let msg = Message.make st.my_peer_id st.listening_port in Lwt.catch - begin fun () -> + (fun () -> let socket = Lwt_unix.(socket PF_INET SOCK_DGRAM 0) in Lwt_canceler.on_cancel st.canceler (fun () -> - Lwt_utils_unix.safe_close socket - ) ; + Lwt_utils_unix.safe_close socket) ; Lwt_unix.setsockopt socket Lwt_unix.SO_BROADCAST true ; let broadcast_ipv4 = Ipaddr_unix.V4.to_inet_addr st.discovery_addr in let addr = Lwt_unix.ADDR_INET (broadcast_ipv4, st.discovery_port) in - Lwt_unix.connect socket addr >>= fun () -> - lwt_debug "Broadcasting discovery message..." >>= fun () -> - Lwt_bytes.sendto socket msg 0 Message.length [] addr >>= fun _len -> - Lwt_utils_unix.safe_close socket - end - begin fun _exn -> - lwt_debug "Error broadcasting a discovery request" >>= fun () -> - Lwt.return_unit - end + Lwt_unix.connect socket addr + >>= fun () -> + lwt_debug "Broadcasting discovery message..." + >>= fun () -> + Lwt_bytes.sendto socket msg 0 Message.length [] addr + >>= fun _len -> Lwt_utils_unix.safe_close socket) + (fun _exn -> + lwt_debug "Error broadcasting a discovery request" + >>= fun () -> Lwt.return_unit) let rec worker_loop sender_config st = - begin - protect ~canceler:st.canceler begin fun () -> - broadcast_message st >>= fun () -> - return_unit - end >>=? fun () -> - protect ~canceler:st.canceler begin fun () -> - Lwt.pick [ - begin - Lwt_condition.wait st.restart_discovery >>= fun () -> - return Config.initial - end ; - begin - Lwt_unix.sleep sender_config.Config.delay >>= fun () -> - return { sender_config with Config.loop = succ sender_config.loop ; } - end ; - ] - end - end >>= function + protect ~canceler:st.canceler (fun () -> + broadcast_message st >>= fun () -> return_unit) + >>=? (fun () -> + protect ~canceler:st.canceler (fun () -> + Lwt.pick + [ ( Lwt_condition.wait st.restart_discovery + >>= fun () -> return Config.initial ); + ( Lwt_unix.sleep sender_config.Config.delay + >>= fun () -> + return + {sender_config with Config.loop = succ sender_config.loop} + ) ])) + >>= function | Ok config when config.Config.loop = Config.max_loop -> - let new_sender_config = { - config with Config.loop = pred config.loop ; - } in + let new_sender_config = {config with Config.loop = pred config.loop} in worker_loop new_sender_config st | Ok config -> let new_sender_config = Config.increase_delay config in worker_loop new_sender_config st - | Error [ Canceled ] -> + | Error [Canceled] -> Lwt.return_unit | Error err -> lwt_log_error "@[<v 2>Unexpected error in sender worker@ %a@]" - pp_print_error err >>= fun () -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit + pp_print_error + err + >>= fun () -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit - let create my_peer_id pool ~listening_port ~discovery_port ~discovery_addr = { - canceler = Lwt_canceler.create () ; - my_peer_id ; - listening_port ; - discovery_port ; - discovery_addr ; - restart_discovery = Lwt_condition.create () ; - pool = Pool pool ; - worker = Lwt.return_unit ; - } + let create my_peer_id pool ~listening_port ~discovery_port ~discovery_addr = + { canceler = Lwt_canceler.create (); + my_peer_id; + listening_port; + discovery_port; + discovery_addr; + restart_discovery = Lwt_condition.create (); + pool = Pool pool; + worker = Lwt.return_unit } let activate st = st.worker <- - Lwt_utils.worker "discovery_sender" + Lwt_utils.worker + "discovery_sender" ~on_event:Internal_event.Lwt_worker_event.on_event - ~run:begin fun () -> worker_loop Config.initial st end - ~cancel:begin fun () -> Lwt_canceler.cancel st.canceler end - + ~run:(fun () -> worker_loop Config.initial st) + ~cancel:(fun () -> Lwt_canceler.cancel st.canceler) end (* ********************************************************************** *) -type t = { - answer: Answer.t ; - sender: Sender.t ; -} +type t = {answer : Answer.t; sender : Sender.t} -let create ~listening_port ~discovery_port ~discovery_addr ~trust_discovered_peers pool my_peer_id = - let answer = Answer.create my_peer_id pool ~discovery_port ~trust_discovered_peers in +let create ~listening_port ~discovery_port ~discovery_addr + ~trust_discovered_peers pool my_peer_id = + let answer = + Answer.create my_peer_id pool ~discovery_port ~trust_discovered_peers + in let sender = Sender.create - my_peer_id pool ~listening_port ~discovery_port ~discovery_addr in - { answer ; sender } + my_peer_id + pool + ~listening_port + ~discovery_port + ~discovery_addr + in + {answer; sender} -let activate { answer ; sender } = - Answer.activate answer ; - Sender.activate sender +let activate {answer; sender} = Answer.activate answer ; Sender.activate sender let wakeup t = Lwt_condition.signal t.sender.restart_discovery () let shutdown t = - Lwt.join [ - Lwt_canceler.cancel t.answer.canceler ; - Lwt_canceler.cancel t.sender.canceler ; - ] + Lwt.join + [ Lwt_canceler.cancel t.answer.canceler; + Lwt_canceler.cancel t.sender.canceler ] diff --git a/src/lib_p2p/p2p_discovery.mli b/src/lib_p2p/p2p_discovery.mli index 01a3e0b2b7c02fc18679271b7250db1fe233ce38..efb5cb0ed6efa64c34fdf1fc236c605727d42da3 100644 --- a/src/lib_p2p/p2p_discovery.mli +++ b/src/lib_p2p/p2p_discovery.mli @@ -24,7 +24,6 @@ (* *) (*****************************************************************************) - (** Local peer discovery. This module manages the discovery of local peers through UDP broadcasting. @@ -45,9 +44,11 @@ type t the [listening_port] through the address [discovery_addr:discovery_port]. *) val create : listening_port:int -> - discovery_port:int -> discovery_addr:Ipaddr.V4.t -> + discovery_port:int -> + discovery_addr:Ipaddr.V4.t -> trust_discovered_peers:bool -> - ('a, 'b, 'c) P2p_pool.t -> P2p_peer.Table.key -> + ('a, 'b, 'c) P2p_pool.t -> + P2p_peer.Table.key -> t val activate : t -> unit diff --git a/src/lib_p2p/p2p_errors.ml b/src/lib_p2p/p2p_errors.ml index 6668da8bf80d2ccb27c0beec395659f78851fe53..9a3eaceb27fa9e55f08d806b9e0e479079070510 100644 --- a/src/lib_p2p/p2p_errors.ml +++ b/src/lib_p2p/p2p_errors.ml @@ -34,7 +34,8 @@ let () = ~id:"node.p2p_io_scheduler.connection_closed" ~title:"Connection closed" ~description:"IO error: connection with a peer is closed." - ~pp:(fun ppf () -> Format.fprintf ppf "IO error: connection with a peer is closed.") + ~pp:(fun ppf () -> + Format.fprintf ppf "IO error: connection with a peer is closed.") Data_encoding.empty (function Connection_closed -> Some () | _ -> None) (fun () -> Connection_closed) @@ -42,15 +43,24 @@ let () = (***************************** p2p socket *********************************) type error += Decipher_error + type error += Invalid_message_size + type error += Encoding_error + type error += Rejected_socket_connection -type error += Rejected_no_common_protocol of { announced : Network_version.t } + +type error += Rejected_no_common_protocol of {announced : Network_version.t} + type error += Decoding_error + type error += Myself of P2p_connection.Id.t + type error += Not_enough_proof_of_work of P2p_peer.Id.t + type error += Invalid_auth -type error += Invalid_chunks_size of { value: int ; min: int ; max: int } + +type error += Invalid_chunks_size of {value : int; min : int; max : int} let () = (* Decipher error *) @@ -59,7 +69,8 @@ let () = ~id:"node.p2p_socket.decipher_error" ~title:"Decipher error" ~description:"An error occurred while deciphering." - ~pp:(fun ppf () -> Format.fprintf ppf "An error occurred while deciphering.") + ~pp:(fun ppf () -> + Format.fprintf ppf "An error occurred while deciphering.") Data_encoding.empty (function Decipher_error -> Some () | _ -> None) (fun () -> Decipher_error) ; @@ -69,7 +80,8 @@ let () = ~id:"node.p2p_socket.invalid_message_size" ~title:"Invalid message size" ~description:"The size of the message to be written is invalid." - ~pp:(fun ppf () -> Format.fprintf ppf "The size of the message to be written is invalid.") + ~pp:(fun ppf () -> + Format.fprintf ppf "The size of the message to be written is invalid.") Data_encoding.empty (function Invalid_message_size -> Some () | _ -> None) (fun () -> Invalid_message_size) ; @@ -89,7 +101,10 @@ let () = ~id:"node.p2p_socket.rejected_socket_connection" ~title:"Rejected socket connection" ~description:"Rejected peer connection: rejected socket connection." - ~pp:(fun ppf () -> Format.fprintf ppf "Rejected peer connection: rejected socket connection.") + ~pp:(fun ppf () -> + Format.fprintf + ppf + "Rejected peer connection: rejected socket connection.") Data_encoding.empty (function Rejected_socket_connection -> Some () | _ -> None) (fun () -> Rejected_socket_connection) ; @@ -98,16 +113,17 @@ let () = `Permanent ~id:"node.p2p_socket.rejected_no_common_protocol" ~title:"Rejected socket connection - no common network protocol" - ~description:"Rejected peer connection: \ - rejected socket connection as we have no common \ - network protocol with the peer." - ~pp:(fun ppf _lst -> Format.fprintf ppf - "Rejected peer connection: no common network protocol.") + ~description: + "Rejected peer connection: rejected socket connection as we have no \ + common network protocol with the peer." + ~pp:(fun ppf _lst -> + Format.fprintf + ppf + "Rejected peer connection: no common network protocol.") Data_encoding.(obj1 (req "announced_version" Network_version.encoding)) (function - | Rejected_no_common_protocol { announced } -> Some announced - | _ -> None) - (fun announced -> Rejected_no_common_protocol { announced }); + | Rejected_no_common_protocol {announced} -> Some announced | _ -> None) + (fun announced -> Rejected_no_common_protocol {announced}) ; (* Decoding error *) register_error_kind `Permanent @@ -124,9 +140,12 @@ let () = ~id:"node.p2p_socket.myself" ~title:"Myself" ~description:"Remote peer is actually yourself." - ~pp:(fun ppf id -> Format.fprintf ppf - "Remote peer %a cannot be authenticated: peer is actually yourself." - P2p_connection.Id.pp id) + ~pp:(fun ppf id -> + Format.fprintf + ppf + "Remote peer %a cannot be authenticated: peer is actually yourself." + P2p_connection.Id.pp + id) Data_encoding.(obj1 (req "connection id" P2p_connection.Id.encoding)) (function Myself id -> Some id | _ -> None) (fun id -> Myself id) ; @@ -135,11 +154,14 @@ let () = `Permanent ~id:"node.p2p_socket.not_enough_proof_of_work" ~title:"Not enough proof of work" - ~description:"Remote peer cannot be authenticated: not enough proof of work." + ~description: + "Remote peer cannot be authenticated: not enough proof of work." ~pp:(fun ppf id -> - Format.fprintf ppf - "Remote peer %a cannot be authenticated: not enough proof of work." - P2p_peer.Id.pp id) + Format.fprintf + ppf + "Remote peer %a cannot be authenticated: not enough proof of work." + P2p_peer.Id.pp + id) Data_encoding.(obj1 (req "peer id" P2p_peer.Id.encoding)) (function Not_enough_proof_of_work id -> Some id | _ -> None) (fun id -> Not_enough_proof_of_work id) ; @@ -149,7 +171,8 @@ let () = ~id:"node.p2p_socket.invalid_auth" ~title:"Invalid authentication" ~description:"Rejected peer connection: invalid authentication." - ~pp:(fun ppf () -> Format.fprintf ppf "Rejected peer connection: invalid authentication.") + ~pp:(fun ppf () -> + Format.fprintf ppf "Rejected peer connection: invalid authentication.") Data_encoding.empty (function Invalid_auth -> Some () | _ -> None) (fun () -> Invalid_auth) ; @@ -160,24 +183,37 @@ let () = ~title:"Invalid chunks size" ~description:"Size of chunks is not valid." ~pp:(fun ppf (value, min, max) -> - Format.fprintf ppf "Size of chunks is invalid: should be between %d and %d but is %d" min max value) - Data_encoding.(obj3 - (req "value" int31) - (req "min" int31) - (req "max" int31)) - (function Invalid_chunks_size { value ; min ; max } - -> Some (value, min, max) | _ -> None) - (fun (value, min, max) -> Invalid_chunks_size { value ; min ; max }) + Format.fprintf + ppf + "Size of chunks is invalid: should be between %d and %d but is %d" + min + max + value) + Data_encoding.( + obj3 (req "value" int31) (req "min" int31) (req "max" int31)) + (function + | Invalid_chunks_size {value; min; max} -> + Some (value, min, max) + | _ -> + None) + (fun (value, min, max) -> Invalid_chunks_size {value; min; max}) (***************************** p2p pool ***********************************) type error += Pending_connection + type error += Connected + type error += Connection_refused + type error += Rejected of P2p_peer.Id.t + type error += Too_many_connections + type error += Private_mode + type error += Point_banned of P2p_point.Id.t + type error += Peer_banned of P2p_peer.Id.t let () = @@ -186,8 +222,12 @@ let () = `Permanent ~id:"node.p2p_pool.pending_connection" ~title:"Pending connection" - ~description:"Fail to connect with a peer: a connection is already pending." - ~pp:(fun ppf () -> Format.fprintf ppf "Fail to connect with a peer: a connection is already pending.") + ~description: + "Fail to connect with a peer: a connection is already pending." + ~pp:(fun ppf () -> + Format.fprintf + ppf + "Fail to connect with a peer: a connection is already pending.") Data_encoding.empty (function Pending_connection -> Some () | _ -> None) (fun () -> Pending_connection) ; @@ -196,8 +236,12 @@ let () = `Permanent ~id:"node.p2p_pool.connected" ~title:"Connected" - ~description:"Fail to connect with a peer: a connection is already established." - ~pp:(fun ppf () -> Format.fprintf ppf "Fail to connect with a peer: a connection is already established.") + ~description: + "Fail to connect with a peer: a connection is already established." + ~pp:(fun ppf () -> + Format.fprintf + ppf + "Fail to connect with a peer: a connection is already established.") Data_encoding.empty (function Connected -> Some () | _ -> None) (fun () -> Connected) ; @@ -218,7 +262,11 @@ let () = ~title:"Rejected peer" ~description:"Connection to peer was rejected." ~pp:(fun ppf id -> - Format.fprintf ppf "Connection to peer %a was rejected." P2p_peer.Id.pp id) + Format.fprintf + ppf + "Connection to peer %a was rejected." + P2p_peer.Id.pp + id) Data_encoding.(obj1 (req "peer id" P2p_peer.Id.encoding)) (function Rejected id -> Some id | _ -> None) (fun id -> Rejected id) ; @@ -249,9 +297,11 @@ let () = ~title:"Point Banned" ~description:"The addr you tried to connect is banned." ~pp:(fun ppf (addr, _port) -> - Format.fprintf ppf - "The addr you tried to connect (%a) is banned." - P2p_addr.pp addr) + Format.fprintf + ppf + "The addr you tried to connect (%a) is banned." + P2p_addr.pp + addr) Data_encoding.(obj1 (req "point" P2p_point.Id.encoding)) (function Point_banned point -> Some point | _ -> None) (fun point -> Point_banned point) ; @@ -262,9 +312,11 @@ let () = ~title:"Peer Banned" ~description:"The peer identity you tried to connect is banned." ~pp:(fun ppf peer_id -> - Format.fprintf ppf - "The peer identity you tried to connect (%a) is banned." - P2p_peer.Id.pp peer_id) + Format.fprintf + ppf + "The peer identity you tried to connect (%a) is banned." + P2p_peer.Id.pp + peer_id) Data_encoding.(obj1 (req "peer" P2p_peer.Id.encoding)) (function Peer_banned peer_id -> Some peer_id | _ -> None) (fun peer_id -> Peer_banned peer_id) diff --git a/src/lib_p2p/p2p_errors.mli b/src/lib_p2p/p2p_errors.mli index 2c1070512c3b8f8b1460227afa949d1d32313ab5..0aca73e622a73b08142dd68e32d53dcd42311c4f 100644 --- a/src/lib_p2p/p2p_errors.mli +++ b/src/lib_p2p/p2p_errors.mli @@ -30,23 +30,39 @@ type error += Connection_closed (* p2p socket *) type error += Decipher_error + type error += Invalid_message_size + type error += Encoding_error + type error += Rejected_socket_connection -type error += Rejected_no_common_protocol of { announced : Network_version.t } + +type error += Rejected_no_common_protocol of {announced : Network_version.t} + type error += Decoding_error + type error += Myself of P2p_connection.Id.t + type error += Not_enough_proof_of_work of P2p_peer.Id.t + type error += Invalid_auth -type error += Invalid_chunks_size of { value: int ; min: int ; max: int } + +type error += Invalid_chunks_size of {value : int; min : int; max : int} (* p2p pool *) type error += Pending_connection + type error += Connected + type error += Connection_refused + type error += Rejected of P2p_peer.Id.t + type error += Too_many_connections + type error += Private_mode + type error += Point_banned of P2p_point.Id.t + type error += Peer_banned of P2p_peer.Id.t diff --git a/src/lib_p2p/p2p_fd.ml b/src/lib_p2p/p2p_fd.ml index a1714ae5899fd92dccb8d5310a786f40ead1d9ce..a7b1976936821972b831534c9fbc8f8bbc3cfa53 100644 --- a/src/lib_p2p/p2p_fd.ml +++ b/src/lib_p2p/p2p_fd.ml @@ -26,36 +26,37 @@ (* logging facility to monitor sockets *) let is_not_windows = Sys.os_type <> "Win32" + let () = (* Otherwise some writes trigger a SIGPIPE instead of raising an Lwt_unit exception. In the node, this is already done by Cohttp, so this is only useful when using the P2P layer as a stand alone library. *) - if is_not_windows then - Sys.(set_signal sigpipe Signal_ignore) + if is_not_windows then Sys.(set_signal sigpipe Signal_ignore) (* Logging facility for the P2P layer *) -module Log = Internal_event.Legacy_logging.Make(struct let name = "p2p.fd" end) +module Log = Internal_event.Legacy_logging.Make (struct + let name = "p2p.fd" +end) type t = { - fd : Lwt_unix.file_descr ; - id : int ; - mutable nread : int ; - mutable nwrit : int ; + fd : Lwt_unix.file_descr; + id : int; + mutable nread : int; + mutable nwrit : int } (* we use a prefix ' cnx:' that allows easy grepping in the log to lookup everything related to a particular connection. *) -let log t fmt = - Format.kasprintf (fun s -> Log.debug "cnx:%d:%s" t.id s) fmt +let log t fmt = Format.kasprintf (fun s -> Log.debug "cnx:%d:%s" t.id s) fmt let create = let counter = ref 0 in - function fd -> - incr counter; - let t = { fd ; id = !counter ; nread = 0 ; nwrit = 0 } in - log t "create: fd %d" t.id ; - t + function + | fd -> + incr counter ; + let t = {fd; id = !counter; nread = 0; nwrit = 0} in + log t "create: fd %d" t.id ; t let string_of_sockaddr addr = match addr with @@ -66,42 +67,44 @@ let string_of_sockaddr addr = let id t = t.id -let socket proto kind arg = - create (Lwt_unix.socket proto kind arg) +let socket proto kind arg = create (Lwt_unix.socket proto kind arg) let close t = log t "close: stats %d/%d" t.nread t.nwrit ; Lwt_utils_unix.safe_close t.fd let read t buf pos len = - log t "try-read: %d" len; - Lwt_bytes.read t.fd buf pos len >>= fun nread -> + log t "try-read: %d" len ; + Lwt_bytes.read t.fd buf pos len + >>= fun nread -> t.nread <- t.nread + nread ; log t "read: %d (%d)" nread t.nread ; Lwt.return nread let write t buf = let len = MBytes.length buf in - log t "try-write: %d" len; - Lwt_utils_unix.write_mbytes t.fd buf >>= fun () -> + log t "try-write: %d" len ; + Lwt_utils_unix.write_mbytes t.fd buf + >>= fun () -> t.nwrit <- t.nwrit + len ; log t "written: %d (%d)" len t.nwrit ; Lwt.return_unit let connect t saddr = - log t "connect: %s" (string_of_sockaddr saddr); + log t "connect: %s" (string_of_sockaddr saddr) ; Lwt_unix.connect t.fd saddr let accept sock = - Lwt_unix.accept sock >>= fun (fd, saddr) -> + Lwt_unix.accept sock + >>= fun (fd, saddr) -> let t = create fd in - log t "accept: %s" (string_of_sockaddr saddr); + log t "accept: %s" (string_of_sockaddr saddr) ; Lwt.return (t, saddr) -module Table = - Hashtbl.Make(struct - type nonrec t = t - let equal { id = x ; _ } { id = y ; _ } = x = y - let hash { id ; _ } = Hashtbl.hash id - end) +module Table = Hashtbl.Make (struct + type nonrec t = t + + let equal {id = x; _} {id = y; _} = x = y + let hash {id; _} = Hashtbl.hash id +end) diff --git a/src/lib_p2p/p2p_fd.mli b/src/lib_p2p/p2p_fd.mli index a8a3adb1c656fdaccc523b6368fcc3a896f5b787..1c6ff5eb7e73a2aea24e7f74b1a6f3b5b8cbd7f8 100644 --- a/src/lib_p2p/p2p_fd.mli +++ b/src/lib_p2p/p2p_fd.mli @@ -29,11 +29,17 @@ type t val id : t -> int + val read : t -> Lwt_bytes.t -> int -> int -> int Lwt.t + val close : t -> unit Lwt.t + val write : t -> MBytes.t -> unit Lwt.t + val socket : Lwt_unix.socket_domain -> Lwt_unix.socket_type -> int -> t + val connect : t -> Lwt_unix.sockaddr -> unit Lwt.t + val accept : Lwt_unix.file_descr -> (t * Lwt_unix.sockaddr) Lwt.t module Table : Hashtbl.S with type key = t diff --git a/src/lib_p2p/p2p_io_scheduler.ml b/src/lib_p2p/p2p_io_scheduler.ml index 6c5375b1425bf0f30c8953e3f37fa997506f45b0..3330f8a877744bf9d247e14577ff1bdabeba26c5 100644 --- a/src/lib_p2p/p2p_io_scheduler.ml +++ b/src/lib_p2p/p2p_io_scheduler.ml @@ -26,142 +26,148 @@ (* TODO decide whether we need to preallocate buffers or not. *) include Internal_event.Legacy_logging.Make (struct - let name = "p2p.io-scheduler" - end) + let name = "p2p.io-scheduler" +end) let alpha = 0.2 module type IO = sig - val name: string + val name : string + type in_param - val pop: in_param -> MBytes.t tzresult Lwt.t + + val pop : in_param -> MBytes.t tzresult Lwt.t + type out_param - val push: out_param -> MBytes.t -> unit tzresult Lwt.t - val close: out_param -> error list -> unit Lwt.t -end -module Scheduler(IO : IO) = struct + val push : out_param -> MBytes.t -> unit tzresult Lwt.t + val close : out_param -> error list -> unit Lwt.t +end + +module Scheduler (IO : IO) = struct [@@@ocaml.warning "-30"] type t = { - canceler: Lwt_canceler.t ; - mutable worker: unit Lwt.t ; - counter: Moving_average.t ; - max_speed: int option ; - mutable quota: int ; - quota_updated: unit Lwt_condition.t ; - readys: unit Lwt_condition.t ; - readys_high: (connection * MBytes.t tzresult) Queue.t ; - readys_low: (connection * MBytes.t tzresult) Queue.t ; + canceler : Lwt_canceler.t; + mutable worker : unit Lwt.t; + counter : Moving_average.t; + max_speed : int option; + mutable quota : int; + quota_updated : unit Lwt_condition.t; + readys : unit Lwt_condition.t; + readys_high : (connection * MBytes.t tzresult) Queue.t; + readys_low : (connection * MBytes.t tzresult) Queue.t } and connection = { - id: int ; - mutable closed: bool ; - canceler: Lwt_canceler.t ; - in_param: IO.in_param ; - out_param: IO.out_param ; - mutable current_pop: MBytes.t tzresult Lwt.t ; - mutable current_push: unit tzresult Lwt.t ; - counter: Moving_average.t ; - mutable quota: int ; - mutable last_quota: int ; + id : int; + mutable closed : bool; + canceler : Lwt_canceler.t; + in_param : IO.in_param; + out_param : IO.out_param; + mutable current_pop : MBytes.t tzresult Lwt.t; + mutable current_push : unit tzresult Lwt.t; + counter : Moving_average.t; + mutable quota : int; + mutable last_quota : int } let cancel (conn : connection) err = - Lwt_utils.unless conn.closed begin fun () -> - lwt_debug "Connection closed (%d, %s) " conn.id IO.name >>= fun () -> - conn.closed <- true ; - Lwt.catch - (fun () -> IO.close conn.out_param err) - (fun _ -> Lwt.return_unit) >>= fun () -> - Lwt_canceler.cancel conn.canceler - end + Lwt_utils.unless conn.closed (fun () -> + lwt_debug "Connection closed (%d, %s) " conn.id IO.name + >>= fun () -> + conn.closed <- true ; + Lwt.catch + (fun () -> IO.close conn.out_param err) + (fun _ -> Lwt.return_unit) + >>= fun () -> Lwt_canceler.cancel conn.canceler) let waiter st conn = assert (Lwt.state conn.current_pop <> Sleep) ; conn.current_pop <- IO.pop conn.in_param ; - Lwt.async begin fun () -> - conn.current_pop >>= fun res -> - conn.current_push >>= fun _ -> - let was_empty = - Queue.is_empty st.readys_high && Queue.is_empty st.readys_low in - if conn.quota > 0 then - Queue.push (conn, res) st.readys_high - else - Queue.push (conn, res) st.readys_low ; - if was_empty then Lwt_condition.broadcast st.readys () ; - Lwt.return_unit - end + Lwt.async (fun () -> + conn.current_pop + >>= fun res -> + conn.current_push + >>= fun _ -> + let was_empty = + Queue.is_empty st.readys_high && Queue.is_empty st.readys_low + in + if conn.quota > 0 then Queue.push (conn, res) st.readys_high + else Queue.push (conn, res) st.readys_low ; + if was_empty then Lwt_condition.broadcast st.readys () ; + Lwt.return_unit) let wait_data st = let is_empty = - Queue.is_empty st.readys_high && Queue.is_empty st.readys_low in + Queue.is_empty st.readys_high && Queue.is_empty st.readys_low + in if is_empty then Lwt_condition.wait st.readys else Lwt.return_unit let check_quota st = - if st.max_speed <> None && st.quota < 0 then begin - lwt_debug "scheduler.wait_quota(%s)" IO.name >>= fun () -> - Lwt_condition.wait st.quota_updated - end else - Lwt_unix.yield () + if st.max_speed <> None && st.quota < 0 then + lwt_debug "scheduler.wait_quota(%s)" IO.name + >>= fun () -> Lwt_condition.wait st.quota_updated + else Lwt_unix.yield () let rec worker_loop st = - check_quota st >>= fun () -> - lwt_debug "scheduler.wait(%s)" IO.name >>= fun () -> - Lwt.pick [ - Lwt_canceler.cancelation st.canceler ; - wait_data st - ] >>= fun () -> - if Lwt_canceler.canceled st.canceler then - Lwt.return_unit + check_quota st + >>= fun () -> + lwt_debug "scheduler.wait(%s)" IO.name + >>= fun () -> + Lwt.pick [Lwt_canceler.cancelation st.canceler; wait_data st] + >>= fun () -> + if Lwt_canceler.canceled st.canceler then Lwt.return_unit else - let prio, (conn, msg) = + let (prio, (conn, msg)) = if not (Queue.is_empty st.readys_high) then - true, (Queue.pop st.readys_high) - else - false, (Queue.pop st.readys_low) + (true, Queue.pop st.readys_high) + else (false, Queue.pop st.readys_low) in match msg with - | Error [ Canceled ] -> - worker_loop st - | Error ([P2p_errors.Connection_closed | - Exn ( Lwt_pipe.Closed | - Unix.Unix_error ((EBADF | ETIMEDOUT), _, _) )] - as err) -> - lwt_debug "Connection closed (pop: %d, %s)" - conn.id IO.name >>= fun () -> - cancel conn err >>= fun () -> + | Error [Canceled] -> worker_loop st + | Error + ( [ ( P2p_errors.Connection_closed + | Exn + ( Lwt_pipe.Closed + | Unix.Unix_error ((EBADF | ETIMEDOUT), _, _) ) ) ] as err ) + -> + lwt_debug "Connection closed (pop: %d, %s)" conn.id IO.name + >>= fun () -> cancel conn err >>= fun () -> worker_loop st | Error err -> lwt_log_error "@[Unexpected error in connection (pop: %d, %s):@ %a@]" - conn.id IO.name pp_print_error err >>= fun () -> - cancel conn err >>= fun () -> - worker_loop st + conn.id + IO.name + pp_print_error + err + >>= fun () -> cancel conn err >>= fun () -> worker_loop st | Ok msg -> - conn.current_push <- begin - IO.push conn.out_param msg >>= function - | Ok () - | Error [ Canceled ] -> - return_unit - | Error ([P2p_errors.Connection_closed | - Exn (Unix.Unix_error (EBADF, _, _) | - Lwt_pipe.Closed)] as err) -> - lwt_debug "Connection closed (push: %d, %s)" - conn.id IO.name >>= fun () -> - cancel conn err >>= fun () -> + conn.current_push <- + ( IO.push conn.out_param msg + >>= function + | Ok () | Error [Canceled] -> return_unit + | Error + ( [ ( P2p_errors.Connection_closed + | Exn (Unix.Unix_error (EBADF, _, _) | Lwt_pipe.Closed) ) + ] as err ) -> + lwt_debug "Connection closed (push: %d, %s)" conn.id IO.name + >>= fun () -> cancel conn err >>= fun () -> return_unit | Error err -> lwt_log_error "@[Unexpected error in connection (push: %d, %s):@ %a@]" - conn.id IO.name pp_print_error err >>= fun () -> - cancel conn err >>= fun () -> - Lwt.return_error err - end ; + conn.id + IO.name + pp_print_error + err + >>= fun () -> + cancel conn err >>= fun () -> Lwt.return_error err ) ; let len = MBytes.length msg in - lwt_debug "Handle: %d (%d, %s)" len conn.id IO.name >>= fun () -> + lwt_debug "Handle: %d (%d, %s)" len conn.id IO.name + >>= fun () -> Moving_average.add st.counter len ; st.quota <- st.quota - len ; Moving_average.add conn.counter len ; @@ -170,18 +176,20 @@ module Scheduler(IO : IO) = struct worker_loop st let create max_speed = - let st = { - canceler = Lwt_canceler.create () ; - worker = Lwt.return_unit ; - counter = Moving_average.create ~init:0 ~alpha ; - max_speed ; quota = Option.unopt ~default:0 max_speed ; - quota_updated = Lwt_condition.create () ; - readys = Lwt_condition.create () ; - readys_high = Queue.create () ; - readys_low = Queue.create () ; - } in + let st = + { canceler = Lwt_canceler.create (); + worker = Lwt.return_unit; + counter = Moving_average.create ~init:0 ~alpha; + max_speed; + quota = Option.unopt ~default:0 max_speed; + quota_updated = Lwt_condition.create (); + readys = Lwt_condition.create (); + readys_high = Queue.create (); + readys_low = Queue.create () } + in st.worker <- - Lwt_utils.worker IO.name + Lwt_utils.worker + IO.name ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop st) ~cancel:(fun () -> Lwt_canceler.cancel st.canceler) ; @@ -190,217 +198,236 @@ module Scheduler(IO : IO) = struct let create_connection st in_param out_param canceler id = debug "scheduler(%s).create_connection (%d)" IO.name id ; let conn = - { id ; closed = false ; - canceler ; - in_param ; out_param ; - current_pop = Lwt.fail Not_found (* dummy *) ; - current_push = return_unit ; - counter = Moving_average.create ~init:0 ~alpha ; - quota = 0 ; last_quota = 0 ; - } in - waiter st conn ; - conn + { id; + closed = false; + canceler; + in_param; + out_param; + current_pop = Lwt.fail Not_found (* dummy *); + current_push = return_unit; + counter = Moving_average.create ~init:0 ~alpha; + quota = 0; + last_quota = 0 } + in + waiter st conn ; conn let update_quota st = debug "scheduler(%s).update_quota" IO.name ; - Option.iter st.max_speed ~f:begin fun quota -> - st.quota <- (min st.quota 0) + quota ; - Lwt_condition.broadcast st.quota_updated () - end ; - if not (Queue.is_empty st.readys_low) then begin + Option.iter st.max_speed ~f:(fun quota -> + st.quota <- min st.quota 0 + quota ; + Lwt_condition.broadcast st.quota_updated ()) ; + if not (Queue.is_empty st.readys_low) then ( let tmp = Queue.create () in Queue.iter - (fun ((conn : connection), _ as msg) -> - if conn.quota > 0 then - Queue.push msg st.readys_high - else - Queue.push msg tmp) + (fun (((conn : connection), _) as msg) -> + if conn.quota > 0 then Queue.push msg st.readys_high + else Queue.push msg tmp) st.readys_low ; Queue.clear st.readys_low ; - Queue.transfer tmp st.readys_low ; - end + Queue.transfer tmp st.readys_low ) let shutdown st = - lwt_debug "--> scheduler(%s).shutdown" IO.name >>= fun () -> - Lwt_canceler.cancel st.canceler >>= fun () -> - st.worker >>= fun () -> - lwt_debug "<-- scheduler(%s).shutdown" IO.name >>= fun () -> - Lwt.return_unit - - + lwt_debug "--> scheduler(%s).shutdown" IO.name + >>= fun () -> + Lwt_canceler.cancel st.canceler + >>= fun () -> + st.worker + >>= fun () -> + lwt_debug "<-- scheduler(%s).shutdown" IO.name + >>= fun () -> Lwt.return_unit end -module ReadScheduler = Scheduler(struct - let name = "io_scheduler(read)" - type in_param = P2p_fd.t * int - let pop (fd, maxlen) = - Lwt.catch - (fun () -> - let buf = MBytes.create maxlen in - P2p_fd.read fd buf 0 maxlen >>= fun len -> - if len = 0 then - fail P2p_errors.Connection_closed - else - return (MBytes.sub buf 0 len) ) - (function - | Unix.Unix_error(Unix.ECONNRESET, _, _) -> - fail P2p_errors.Connection_closed - | exn -> - Lwt.return (error_exn exn)) - type out_param = MBytes.t tzresult Lwt_pipe.t - let push p msg = - Lwt.catch - (fun () -> Lwt_pipe.push p (Ok msg) >>= return) - (fun exn -> fail (Exn exn)) - let close p err = - Lwt.catch - (fun () -> Lwt_pipe.push p (Error err)) - (fun _ -> Lwt.return_unit) - end) - -module WriteScheduler = Scheduler(struct - let name = "io_scheduler(write)" - type in_param = MBytes.t Lwt_pipe.t - let pop p = - Lwt.catch - (fun () -> Lwt_pipe.pop p >>= return) - (fun _ -> fail (Exn Lwt_pipe.Closed)) - type out_param = P2p_fd.t - let push fd buf = - Lwt.catch - (fun () -> - P2p_fd.write fd buf >>= return) - (function - | Unix.Unix_error(Unix.ECONNRESET, _, _) - | Unix.Unix_error(Unix.EPIPE, _, _) - | Lwt.Canceled - | End_of_file -> - fail P2p_errors.Connection_closed - | exn -> - Lwt.return (error_exn exn)) - let close _p _err = Lwt.return_unit - end) +module ReadScheduler = Scheduler (struct + let name = "io_scheduler(read)" + + type in_param = P2p_fd.t * int + + let pop (fd, maxlen) = + Lwt.catch + (fun () -> + let buf = MBytes.create maxlen in + P2p_fd.read fd buf 0 maxlen + >>= fun len -> + if len = 0 then fail P2p_errors.Connection_closed + else return (MBytes.sub buf 0 len)) + (function + | Unix.Unix_error (Unix.ECONNRESET, _, _) -> + fail P2p_errors.Connection_closed + | exn -> + Lwt.return (error_exn exn)) + + type out_param = MBytes.t tzresult Lwt_pipe.t + + let push p msg = + Lwt.catch + (fun () -> Lwt_pipe.push p (Ok msg) >>= return) + (fun exn -> fail (Exn exn)) + + let close p err = + Lwt.catch + (fun () -> Lwt_pipe.push p (Error err)) + (fun _ -> Lwt.return_unit) +end) + +module WriteScheduler = Scheduler (struct + let name = "io_scheduler(write)" + + type in_param = MBytes.t Lwt_pipe.t + + let pop p = + Lwt.catch + (fun () -> Lwt_pipe.pop p >>= return) + (fun _ -> fail (Exn Lwt_pipe.Closed)) + + type out_param = P2p_fd.t + + let push fd buf = + Lwt.catch + (fun () -> P2p_fd.write fd buf >>= return) + (function + | Unix.Unix_error (Unix.ECONNRESET, _, _) + | Unix.Unix_error (Unix.EPIPE, _, _) + | Lwt.Canceled + | End_of_file -> + fail P2p_errors.Connection_closed + | exn -> + Lwt.return (error_exn exn)) + + let close _p _err = Lwt.return_unit +end) type connection = { - sched: t ; - conn: P2p_fd.t; - canceler: Lwt_canceler.t ; - read_conn: ReadScheduler.connection ; - read_queue: MBytes.t tzresult Lwt_pipe.t ; - write_conn: WriteScheduler.connection ; - write_queue: MBytes.t Lwt_pipe.t ; - mutable partial_read: MBytes.t option ; + sched : t; + conn : P2p_fd.t; + canceler : Lwt_canceler.t; + read_conn : ReadScheduler.connection; + read_queue : MBytes.t tzresult Lwt_pipe.t; + write_conn : WriteScheduler.connection; + write_queue : MBytes.t Lwt_pipe.t; + mutable partial_read : MBytes.t option } and t = { - mutable closed: bool ; - connected: connection P2p_fd.Table.t ; - read_scheduler: ReadScheduler.t ; - write_scheduler: WriteScheduler.t ; - max_upload_speed: int option ; (* bytes per second. *) - max_download_speed: int option ; - read_buffer_size: int ; - read_queue_size: int option ; - write_queue_size: int option ; + mutable closed : bool; + connected : connection P2p_fd.Table.t; + read_scheduler : ReadScheduler.t; + write_scheduler : WriteScheduler.t; + max_upload_speed : int option; + (* bytes per second. *) + max_download_speed : int option; + read_buffer_size : int; + read_queue_size : int option; + write_queue_size : int option } let reset_quota st = debug "--> reset quota" ; - let { Moving_average.average = current_inflow ; _ } = + let {Moving_average.average = current_inflow; _} = Moving_average.stat st.read_scheduler.counter - and { Moving_average.average = current_outflow ; _ } = - Moving_average.stat st.write_scheduler.counter in + and {Moving_average.average = current_outflow; _} = + Moving_average.stat st.write_scheduler.counter + in let nb_conn = P2p_fd.Table.length st.connected in - if nb_conn > 0 then begin + ( if nb_conn > 0 then let fair_read_quota = current_inflow / nb_conn and fair_write_quota = current_outflow / nb_conn in P2p_fd.Table.iter (fun _id conn -> - conn.read_conn.last_quota <- fair_read_quota ; - conn.read_conn.quota <- - (min conn.read_conn.quota 0) + fair_read_quota ; - conn.write_conn.last_quota <- fair_write_quota ; - conn.write_conn.quota <- - (min conn.write_conn.quota 0) + fair_write_quota ; ) - st.connected - end ; + conn.read_conn.last_quota <- fair_read_quota ; + conn.read_conn.quota <- min conn.read_conn.quota 0 + fair_read_quota ; + conn.write_conn.last_quota <- fair_write_quota ; + conn.write_conn.quota <- min conn.write_conn.quota 0 + fair_write_quota) + st.connected ) ; ReadScheduler.update_quota st.read_scheduler ; WriteScheduler.update_quota st.write_scheduler -let create - ?max_upload_speed ?max_download_speed - ?read_queue_size ?write_queue_size - ~read_buffer_size - () = +let create ?max_upload_speed ?max_download_speed ?read_queue_size + ?write_queue_size ~read_buffer_size () = log_info "--> create" ; - let st = { - closed = false ; - connected = P2p_fd.Table.create 53 ; - read_scheduler = ReadScheduler.create max_download_speed ; - write_scheduler = WriteScheduler.create max_upload_speed ; - max_upload_speed ; - max_download_speed ; - read_buffer_size ; - read_queue_size ; - write_queue_size ; - } in + let st = + { closed = false; + connected = P2p_fd.Table.create 53; + read_scheduler = ReadScheduler.create max_download_speed; + write_scheduler = WriteScheduler.create max_upload_speed; + max_upload_speed; + max_download_speed; + read_buffer_size; + read_queue_size; + write_queue_size } + in Moving_average.on_update (fun () -> reset_quota st) ; st exception Closed let read_size = function - | Ok buf -> (Sys.word_size / 8) * 8 + MBytes.length buf + Lwt_pipe.push_overhead - | Error _ -> 0 (* we push Error only when we close the socket, + | Ok buf -> + (Sys.word_size / 8 * 8) + MBytes.length buf + Lwt_pipe.push_overhead + | Error _ -> + 0 + +(* we push Error only when we close the socket, we don't fear memory leaks in that case... *) let write_size mbytes = - (Sys.word_size / 8) * 6 + MBytes.length mbytes + Lwt_pipe.push_overhead + (Sys.word_size / 8 * 6) + MBytes.length mbytes + Lwt_pipe.push_overhead let register st conn = - if st.closed then begin + if st.closed then ( Lwt.async (fun () -> P2p_fd.close conn) ; - raise Closed - end else begin + raise Closed ) + else let id = P2p_fd.id conn in let canceler = Lwt_canceler.create () in let read_size = - Option.map st.read_queue_size ~f:(fun v -> v, read_size) in + Option.map st.read_queue_size ~f:(fun v -> (v, read_size)) + in let write_size = - Option.map st.write_queue_size ~f:(fun v -> v, write_size) in + Option.map st.write_queue_size ~f:(fun v -> (v, write_size)) + in let read_queue = Lwt_pipe.create ?size:read_size () in let write_queue = Lwt_pipe.create ?size:write_size () in let read_conn = ReadScheduler.create_connection - st.read_scheduler (conn, st.read_buffer_size) read_queue canceler id + st.read_scheduler + (conn, st.read_buffer_size) + read_queue + canceler + id and write_conn = WriteScheduler.create_connection - st.write_scheduler write_queue conn canceler id in - Lwt_canceler.on_cancel canceler begin fun () -> - P2p_fd.Table.remove st.connected conn ; - Moving_average.destroy read_conn.counter ; - Moving_average.destroy write_conn.counter ; - Lwt_pipe.close write_queue ; - Lwt_pipe.close read_queue ; - P2p_fd.close conn - end ; - let conn = { - sched = st ; conn ; canceler ; - read_queue ; read_conn ; - write_queue ; write_conn ; - partial_read = None ; - } in + st.write_scheduler + write_queue + conn + canceler + id + in + Lwt_canceler.on_cancel canceler (fun () -> + P2p_fd.Table.remove st.connected conn ; + Moving_average.destroy read_conn.counter ; + Moving_average.destroy write_conn.counter ; + Lwt_pipe.close write_queue ; + Lwt_pipe.close read_queue ; + P2p_fd.close conn) ; + let conn = + { sched = st; + conn; + canceler; + read_queue; + read_conn; + write_queue; + write_conn; + partial_read = None } + in P2p_fd.Table.add st.connected conn.conn conn ; log_info "--> register (%d)" id ; conn - end -let write ?canceler { write_queue ; _ } msg = - trace P2p_errors.Connection_closed @@ - protect ?canceler begin fun () -> - Lwt_pipe.push write_queue msg >>= return - end -let write_now { write_queue ; _ } msg = Lwt_pipe.push_now write_queue msg +let write ?canceler {write_queue; _} msg = + trace P2p_errors.Connection_closed + @@ protect ?canceler (fun () -> Lwt_pipe.push write_queue msg >>= return) + +let write_now {write_queue; _} msg = Lwt_pipe.push_now write_queue msg let read_from conn ?pos ?len buf msg = let maxlen = MBytes.length buf in @@ -425,12 +452,12 @@ let read_now conn ?pos ?len buf = | Some msg -> conn.partial_read <- None ; Some (read_from conn ?pos ?len buf (Ok msg)) - | None -> - try - Option.map - ~f:(read_from conn ?pos ?len buf) - (Lwt_pipe.pop_now conn.read_queue) - with Lwt_pipe.Closed -> Some (Error [P2p_errors.Connection_closed]) + | None -> ( + try + Option.map + ~f:(read_from conn ?pos ?len buf) + (Lwt_pipe.pop_now conn.read_queue) + with Lwt_pipe.Closed -> Some (Error [P2p_errors.Connection_closed]) ) let read ?canceler conn ?pos ?len buf = match conn.partial_read with @@ -440,10 +467,8 @@ let read ?canceler conn ?pos ?len buf = | None -> Lwt.catch (fun () -> - protect ?canceler begin fun () -> - Lwt_pipe.pop conn.read_queue - end >|= fun msg -> - read_from conn ?pos ?len buf msg) + protect ?canceler (fun () -> Lwt_pipe.pop conn.read_queue) + >|= fun msg -> read_from conn ?pos ?len buf msg) (fun _ -> fail P2p_errors.Connection_closed) let read_full ?canceler conn ?pos ?len buf = @@ -453,62 +478,62 @@ let read_full ?canceler conn ?pos ?len buf = assert (0 <= pos && pos < maxlen) ; assert (len <= maxlen - pos) ; let rec loop pos len = - if len = 0 then - return_unit + if len = 0 then return_unit else - read ?canceler conn ~pos ~len buf >>=? fun read_len -> - loop (pos + read_len) (len - read_len) in + read ?canceler conn ~pos ~len buf + >>=? fun read_len -> loop (pos + read_len) (len - read_len) + in loop pos len let convert ~ws ~rs = - { P2p_stat.total_sent = ws.Moving_average.total ; - total_recv = rs.Moving_average.total ; - current_outflow = ws.average ; - current_inflow = rs.average ; - } + { P2p_stat.total_sent = ws.Moving_average.total; + total_recv = rs.Moving_average.total; + current_outflow = ws.average; + current_inflow = rs.average } -let global_stat { read_scheduler ; write_scheduler ; _ } = +let global_stat {read_scheduler; write_scheduler; _} = let rs = Moving_average.stat read_scheduler.counter and ws = Moving_average.stat write_scheduler.counter in convert ~rs ~ws -let stat { read_conn ; write_conn ; _ } = +let stat {read_conn; write_conn; _} = let rs = Moving_average.stat read_conn.counter and ws = Moving_average.stat write_conn.counter in convert ~rs ~ws let close ?timeout conn = let id = P2p_fd.id conn.conn in - lwt_log_info "--> close (%d)" id >>= fun () -> + lwt_log_info "--> close (%d)" id + >>= fun () -> P2p_fd.Table.remove conn.sched.connected conn.conn ; Lwt_pipe.close conn.write_queue ; - begin - match timeout with - | None -> - return (Lwt_canceler.cancelation conn.canceler) - | Some timeout -> - with_timeout - ~canceler:conn.canceler - (Lwt_unix.sleep timeout) - (fun canceler -> return (Lwt_canceler.cancelation canceler)) - end >>=? fun _ -> - conn.write_conn.current_push >>= fun res -> - lwt_log_info "<-- close (%d)" id >>= fun () -> - Lwt.return res - -let iter_connection { connected ; _ } f = + ( match timeout with + | None -> + return (Lwt_canceler.cancelation conn.canceler) + | Some timeout -> + with_timeout + ~canceler:conn.canceler + (Lwt_unix.sleep timeout) + (fun canceler -> return (Lwt_canceler.cancelation canceler)) ) + >>=? fun _ -> + conn.write_conn.current_push + >>= fun res -> lwt_log_info "<-- close (%d)" id >>= fun () -> Lwt.return res + +let iter_connection {connected; _} f = P2p_fd.Table.iter (fun _ conn -> f conn) connected let shutdown ?timeout st = - lwt_log_info "--> shutdown" >>= fun () -> + lwt_log_info "--> shutdown" + >>= fun () -> st.closed <- true ; - ReadScheduler.shutdown st.read_scheduler >>= fun () -> + ReadScheduler.shutdown st.read_scheduler + >>= fun () -> P2p_fd.Table.fold (fun _peer_id conn acc -> close ?timeout conn >>= fun _ -> acc) st.connected - Lwt.return_unit >>= fun () -> - WriteScheduler.shutdown st.write_scheduler >>= fun () -> - lwt_log_info "<-- shutdown" >>= fun () -> - Lwt.return_unit + Lwt.return_unit + >>= fun () -> + WriteScheduler.shutdown st.write_scheduler + >>= fun () -> lwt_log_info "<-- shutdown" >>= fun () -> Lwt.return_unit let id conn = P2p_fd.id conn.conn diff --git a/src/lib_p2p/p2p_io_scheduler.mli b/src/lib_p2p/p2p_io_scheduler.mli index a259d5f3f5ea5c565d83f75168068cc79b17fd6b..7af11ad354d3fd4ede978e7b98f50e138381114e 100644 --- a/src/lib_p2p/p2p_io_scheduler.mli +++ b/src/lib_p2p/p2p_io_scheduler.mli @@ -40,73 +40,81 @@ num_connections). *) -type connection (** Type of a connection. *) +type connection -type t (** Type of an IO scheduler. *) +type t -val create: +(** [create ~max_upload_speed ~max_download_speed ~read_queue_size + ~write_queue_size ()] is an IO scheduler with specified (global) + max upload (resp. download) speed, and specified read + (resp. write) queue sizes (in bytes) for connections. *) +val create : ?max_upload_speed:int -> ?max_download_speed:int -> ?read_queue_size:int -> ?write_queue_size:int -> read_buffer_size:int -> - unit -> t -(** [create ~max_upload_speed ~max_download_speed ~read_queue_size - ~write_queue_size ()] is an IO scheduler with specified (global) - max upload (resp. download) speed, and specified read - (resp. write) queue sizes (in bytes) for connections. *) + unit -> + t -val register: t -> P2p_fd.t -> connection (** [register sched fd] is a [connection] managed by [sched]. *) +val register : t -> P2p_fd.t -> connection -val write: - ?canceler:Lwt_canceler.t -> - connection -> MBytes.t -> unit tzresult Lwt.t (** [write conn msg] returns [Ok ()] when [msg] has been added to [conn]'s write queue, or fail with an error. *) +val write : + ?canceler:Lwt_canceler.t -> connection -> MBytes.t -> unit tzresult Lwt.t -val write_now: connection -> MBytes.t -> bool (** [write_now conn msg] is [true] iff [msg] has been (immediately) added to [conn]'s write queue, [false] if it has been dropped. *) +val write_now : connection -> MBytes.t -> bool -val read_now: - connection -> ?pos:int -> ?len:int -> MBytes.t -> int tzresult option (** [read_now conn ~pos ~len buf] blits at most [len] bytes from [conn]'s read queue and returns the number of bytes written in [buf] starting at [pos]. *) +val read_now : + connection -> ?pos:int -> ?len:int -> MBytes.t -> int tzresult option -val read: - ?canceler:Lwt_canceler.t -> - connection -> ?pos:int -> ?len:int -> MBytes.t -> int tzresult Lwt.t (** Like [read_now], but waits till [conn] read queue has at least one element instead of failing. *) - -val read_full: +val read : ?canceler:Lwt_canceler.t -> - connection -> ?pos:int -> ?len:int -> MBytes.t -> unit tzresult Lwt.t + connection -> + ?pos:int -> + ?len:int -> + MBytes.t -> + int tzresult Lwt.t + (** Like [read], but blits exactly [len] bytes in [buf]. *) +val read_full : + ?canceler:Lwt_canceler.t -> + connection -> + ?pos:int -> + ?len:int -> + MBytes.t -> + unit tzresult Lwt.t -val stat: connection -> P2p_stat.t (** [stat conn] is a snapshot of current bandwidth usage for [conn]. *) +val stat : connection -> P2p_stat.t -val global_stat: t -> P2p_stat.t (** [global_stat sched] is a snapshot of [sched]'s bandwidth usage (sum of [stat conn] for each [conn] in [sched]). *) +val global_stat : t -> P2p_stat.t -val iter_connection: t -> (connection -> unit) -> unit (** [iter_connection sched f] applies [f] on each connection managed by [sched]. *) +val iter_connection : t -> (connection -> unit) -> unit -val close: ?timeout:float -> connection -> unit tzresult Lwt.t (** [close conn] cancels [conn] and returns after any pending data has been sent. *) +val close : ?timeout:float -> connection -> unit tzresult Lwt.t -val shutdown: ?timeout:float -> t -> unit Lwt.t (** [shutdown sched] returns after all connections managed by [sched] have been closed and [sched]'s inner worker has successfully canceled. *) +val shutdown : ?timeout:float -> t -> unit Lwt.t val id : connection -> int diff --git a/src/lib_p2p/p2p_maintenance.ml b/src/lib_p2p/p2p_maintenance.ml index 48f6c11c710f30b7cb88976c9bcdd43061d9b324..410c644749be3984b1731695493ac995e24c8d76 100644 --- a/src/lib_p2p/p2p_maintenance.ml +++ b/src/lib_p2p/p2p_maintenance.ml @@ -24,32 +24,34 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make (struct let name = "p2p.maintenance" end) +include Internal_event.Legacy_logging.Make (struct + let name = "p2p.maintenance" +end) type bounds = { - min_threshold: int ; - min_target: int ; - max_target: int ; - max_threshold: int ; + min_threshold : int; + min_target : int; + max_target : int; + max_threshold : int } type config = { - maintenance_idle_time: Time.System.Span.t ; - greylist_timeout: Time.System.Span.t ; - private_mode: bool ; + maintenance_idle_time : Time.System.Span.t; + greylist_timeout : Time.System.Span.t; + private_mode : bool } type 'meta pool = Pool : ('msg, 'meta, 'meta_conn) P2p_pool.t -> 'meta pool type 'meta t = { - canceler: Lwt_canceler.t ; - config: config ; - bounds: bounds ; - pool: 'meta pool ; - discovery: P2p_discovery.t option ; - just_maintained: unit Lwt_condition.t ; - please_maintain: unit Lwt_condition.t ; - mutable maintain_worker: unit Lwt.t ; + canceler : Lwt_canceler.t; + config : config; + bounds : bounds; + pool : 'meta pool; + discovery : P2p_discovery.t option; + just_maintained : unit Lwt_condition.t; + please_maintain : unit Lwt_condition.t; + mutable maintain_worker : unit Lwt.t } (** Select [expected] points among the disconnected known points. @@ -58,209 +60,216 @@ type 'meta t = { first selects points with the oldest last tentative. Non-trusted points are also ignored if option --private-mode is set. *) let connectable st start_time expected seen_points = - let Pool pool = st.pool in + let (Pool pool) = st.pool in let now = Systime_os.now () in - let module Bounded_point_info = - List.Bounded(struct - type t = (Time.System.t option * P2p_point.Id.t) - let compare (t1, _) (t2, _) = - match t1, t2 with - | None, None -> 0 - | None, Some _ -> 1 - | Some _, None -> -1 - | Some t1, Some t2 -> Time.System.compare t2 t1 - end) in + let module Bounded_point_info = List.Bounded (struct + type t = Time.System.t option * P2p_point.Id.t + + let compare (t1, _) (t2, _) = + match (t1, t2) with + | (None, None) -> + 0 + | (None, Some _) -> + 1 + | (Some _, None) -> + -1 + | (Some t1, Some t2) -> + Time.System.compare t2 t1 + end) in let acc = Bounded_point_info.create expected in let seen_points = - P2p_pool.Points.fold_known pool ~init:seen_points - ~f:begin fun point pi seen_points -> + P2p_pool.Points.fold_known + pool + ~init:seen_points + ~f:(fun point pi seen_points -> (* consider the point only if: - it is not in seen_points and - it is not banned, and - it is trusted if we are in `closed` mode *) - if P2p_point.Set.mem point seen_points || - P2p_pool.Points.banned pool point || - (st.config.private_mode && not (P2p_point_state.Info.trusted pi)) - then - seen_points + if + P2p_point.Set.mem point seen_points + || P2p_pool.Points.banned pool point + || (st.config.private_mode && not (P2p_point_state.Info.trusted pi)) + then seen_points else let seen_points = P2p_point.Set.add point seen_points in match P2p_point_state.get pi with - | Disconnected -> begin - match P2p_point_state.Info.last_miss pi with - | Some last when Time.System.(start_time < last) - || P2p_point_state.Info.greylisted ~now pi -> - seen_points - | last -> - Bounded_point_info.insert (last, point) acc ; - seen_points - end - | _ -> seen_points - end + | Disconnected -> ( + match P2p_point_state.Info.last_miss pi with + | Some last + when Time.System.(start_time < last) + || P2p_point_state.Info.greylisted ~now pi -> + seen_points + | last -> + Bounded_point_info.insert (last, point) acc ; + seen_points ) + | _ -> + seen_points) in - List.map snd (Bounded_point_info.get acc), seen_points + (List.map snd (Bounded_point_info.get acc), seen_points) (** Try to create connections to new peers. It tries to create at least [min_to_contact] connections, and will never creates more than [max_to_contact]. But, if after trying once all disconnected peers, it returns [false]. *) -let rec try_to_contact - st ?(start_time = Systime_os.now ()) ~seen_points +let rec try_to_contact st ?(start_time = Systime_os.now ()) ~seen_points min_to_contact max_to_contact = - let Pool pool = st.pool in - if min_to_contact <= 0 then - Lwt.return_true + let (Pool pool) = st.pool in + if min_to_contact <= 0 then Lwt.return_true else - let contactable, seen_points = - connectable st start_time max_to_contact seen_points in - if contactable = [] then - Lwt_unix.yield () >>= fun () -> - Lwt.return_false + let (contactable, seen_points) = + connectable st start_time max_to_contact seen_points + in + if contactable = [] then Lwt_unix.yield () >>= fun () -> Lwt.return_false else List.fold_left (fun acc point -> - protect ~canceler:st.canceler begin fun () -> - P2p_pool.connect pool point - end >>= function - | Ok _ -> acc >|= succ - | Error _ -> acc) + protect ~canceler:st.canceler (fun () -> P2p_pool.connect pool point) + >>= function Ok _ -> acc >|= succ | Error _ -> acc) (Lwt.return 0) - contactable >>= fun established -> - try_to_contact st ~start_time ~seen_points - (min_to_contact - established) (max_to_contact - established) + contactable + >>= fun established -> + try_to_contact + st + ~start_time + ~seen_points + (min_to_contact - established) + (max_to_contact - established) (** Do a maintenance step. It will terminate only when the number of connections is between `min_threshold` and `max_threshold`. Do a pass in the list of banned peers and remove all peers that have been banned for more then xxx seconds *) let rec maintain st = - let Pool pool = st.pool in + let (Pool pool) = st.pool in let n_connected = P2p_pool.active_connections pool in let older_than = Option.unopt_exn (Failure "P2p_maintenance.maintain: time overflow") - (Ptime.add_span (Systime_os.now ()) (Ptime.Span.neg st.config.greylist_timeout)) + (Ptime.add_span + (Systime_os.now ()) + (Ptime.Span.neg st.config.greylist_timeout)) in P2p_pool.gc_greylist pool ~older_than ; if n_connected < st.bounds.min_threshold then too_few_connections st n_connected else if st.bounds.max_threshold < n_connected then too_many_connections st n_connected - else begin + else ( (* end of maintenance when enough users have been reached *) Lwt_condition.broadcast st.just_maintained () ; - lwt_debug "Maintenance step ended" >>= fun () -> - return_unit - end + lwt_debug "Maintenance step ended" >>= fun () -> return_unit ) and too_few_connections st n_connected = - let Pool pool = st.pool in + let (Pool pool) = st.pool in (* too few connections, try and contact many peers *) - lwt_log_notice "Too few connections (%d)" n_connected >>= fun () -> + lwt_log_notice "Too few connections (%d)" n_connected + >>= fun () -> let min_to_contact = st.bounds.min_target - n_connected in let max_to_contact = st.bounds.max_target - n_connected in try_to_contact - st min_to_contact max_to_contact ~seen_points:P2p_point.Set.empty >>= - fun success -> - if success then begin - maintain st - end else begin + st + min_to_contact + max_to_contact + ~seen_points:P2p_point.Set.empty + >>= fun success -> + if success then maintain st + else ( (* not enough contacts, ask the pals of our pals, discover the local network and then wait *) P2p_pool.broadcast_bootstrap_msg pool ; Option.iter ~f:P2p_discovery.wakeup st.discovery ; - protect ~canceler:st.canceler begin fun () -> - Lwt.pick [ - P2p_pool.Pool_event.wait_new_peer pool ; - P2p_pool.Pool_event.wait_new_point pool ; - Lwt_unix.sleep 5.0 (* TODO exponential back-off ?? + protect ~canceler:st.canceler (fun () -> + Lwt.pick + [ P2p_pool.Pool_event.wait_new_peer pool; + P2p_pool.Pool_event.wait_new_point pool; + Lwt_unix.sleep 5.0 + (* TODO exponential back-off ?? or wait for the existence of a non grey-listed peer ?? *) - ] >>= return - end >>=? fun () -> - maintain st - end + ] + >>= return) + >>=? fun () -> maintain st ) and too_many_connections st n_connected = - let Pool pool = st.pool in + let (Pool pool) = st.pool in (* too many connections, start the russian roulette *) let to_kill = n_connected - st.bounds.max_target in - lwt_log_notice "Too many connections, will kill %d" to_kill >>= fun () -> - let connections = TzList.rev_sub - (TzList.shuffle @@ - P2p_pool.Connection.fold pool - ~init:[] - ~f:(fun _ conn acc -> - if (P2p_pool.Connection.private_node conn - && P2p_pool.Connection.trusted_node conn) then - acc - else - conn::acc)) + lwt_log_notice "Too many connections, will kill %d" to_kill + >>= fun () -> + let connections = + TzList.rev_sub + ( TzList.shuffle + @@ P2p_pool.Connection.fold pool ~init:[] ~f:(fun _ conn acc -> + if + P2p_pool.Connection.private_node conn + && P2p_pool.Connection.trusted_node conn + then acc + else conn :: acc) ) to_kill in - Lwt_list.iter_p P2p_pool.disconnect connections - >>= fun () -> - maintain st + Lwt_list.iter_p P2p_pool.disconnect connections >>= fun () -> maintain st let rec worker_loop st = - let Pool pool = st.pool in - begin - protect ~canceler:st.canceler begin fun () -> - Lwt.pick [ - Systime_os.sleep st.config.maintenance_idle_time ; (* default: every two minutes *) - Lwt_condition.wait st.please_maintain ; (* when asked *) - P2p_pool.Pool_event.wait_too_few_connections pool ; (* limits *) - P2p_pool.Pool_event.wait_too_many_connections pool ; - ] >>= fun () -> - return_unit - end >>=? fun () -> - let n_connected = P2p_pool.active_connections pool in - if n_connected < st.bounds.min_threshold - || st.bounds.max_threshold < n_connected then - maintain st - else begin - P2p_pool.send_swap_request pool ; - return_unit - end - end >>= function - | Ok () -> worker_loop st - | Error [ Canceled ] -> Lwt.return_unit - | Error _ -> Lwt.return_unit + let (Pool pool) = st.pool in + protect ~canceler:st.canceler (fun () -> + Lwt.pick + [ Systime_os.sleep st.config.maintenance_idle_time; + (* default: every two minutes *) + Lwt_condition.wait st.please_maintain; + (* when asked *) + P2p_pool.Pool_event.wait_too_few_connections pool; + (* limits *) + P2p_pool.Pool_event.wait_too_many_connections pool ] + >>= fun () -> return_unit) + >>=? (fun () -> + let n_connected = P2p_pool.active_connections pool in + if + n_connected < st.bounds.min_threshold + || st.bounds.max_threshold < n_connected + then maintain st + else ( + P2p_pool.send_swap_request pool ; + return_unit )) + >>= function + | Ok () -> + worker_loop st + | Error [Canceled] -> + Lwt.return_unit + | Error _ -> + Lwt.return_unit -let create ?discovery config bounds pool = { - canceler = Lwt_canceler.create () ; - config ; - bounds ; - discovery ; - pool = Pool pool ; - just_maintained = Lwt_condition.create () ; - please_maintain = Lwt_condition.create () ; - maintain_worker = Lwt.return_unit ; -} +let create ?discovery config bounds pool = + { canceler = Lwt_canceler.create (); + config; + bounds; + discovery; + pool = Pool pool; + just_maintained = Lwt_condition.create (); + please_maintain = Lwt_condition.create (); + maintain_worker = Lwt.return_unit } let activate st = st.maintain_worker <- - Lwt_utils.worker "maintenance" + Lwt_utils.worker + "maintenance" ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop st) ~cancel:(fun () -> Lwt_canceler.cancel st.canceler) ; Option.iter st.discovery ~f:P2p_discovery.activate -let maintain { just_maintained ; please_maintain ; _ } = +let maintain {just_maintained; please_maintain; _} = let wait = Lwt_condition.wait just_maintained in Lwt_condition.broadcast please_maintain () ; wait -let shutdown { - canceler ; - discovery ; - maintain_worker ; - just_maintained ; - _ ; - } = - Lwt_canceler.cancel canceler >>= fun () -> - Lwt_utils.may ~f:P2p_discovery.shutdown discovery >>= fun () -> - maintain_worker >>= fun () -> +let shutdown {canceler; discovery; maintain_worker; just_maintained; _} = + Lwt_canceler.cancel canceler + >>= fun () -> + Lwt_utils.may ~f:P2p_discovery.shutdown discovery + >>= fun () -> + maintain_worker + >>= fun () -> Lwt_condition.broadcast just_maintained () ; Lwt.return_unit diff --git a/src/lib_p2p/p2p_maintenance.mli b/src/lib_p2p/p2p_maintenance.mli index 7058192d1670ac0766e5691870aa4cb391ed7e55..5698e2b9f634dd0c3fb118301c3aaab400d5da18 100644 --- a/src/lib_p2p/p2p_maintenance.mli +++ b/src/lib_p2p/p2p_maintenance.mli @@ -44,49 +44,45 @@ might ask its actual peers for new peers. *) type bounds = { - min_threshold: int ; - min_target: int ; - max_target: int ; - max_threshold: int ; + min_threshold : int; + min_target : int; + max_target : int; + max_threshold : int } type config = { - - maintenance_idle_time: Time.System.Span.t ; - (** How long to wait at most, in seconds, before running a maintenance loop. *) - - greylist_timeout: Time.System.Span.t ; - (** GC delay for the greylists tables, in seconds. *) - - private_mode: bool ; - (** If [true], only open outgoing/accept incoming connections + maintenance_idle_time : Time.System.Span.t; + (** How long to wait at most, in seconds, before running a maintenance loop. *) + greylist_timeout : Time.System.Span.t; + (** GC delay for the greylists tables, in seconds. *) + private_mode : bool + (** If [true], only open outgoing/accept incoming connections to/from peers whose addresses are in [trusted_peers], and inform these peers that the identity of this node should be revealed to the rest of the network. *) - } - -type 'meta t (** Type of a maintenance worker. *) +type 'meta t -val create: - ?discovery:P2p_discovery.t -> - config -> bounds -> - ('msg, 'meta, 'meta_conn) P2p_pool.t -> - 'meta t (** [run ?discovery config bounds pool] returns a maintenance worker, with the [discovery] worker if present, for [pool] with connection targets specified in [bounds]. *) +val create : + ?discovery:P2p_discovery.t -> + config -> + bounds -> + ('msg, 'meta, 'meta_conn) P2p_pool.t -> + 'meta t -val activate: 'meta t -> unit (** [activate t] start the worker that will maintain connections *) +val activate : 'meta t -> unit -val maintain: 'meta t -> unit Lwt.t (** [maintain t] gives a hint to maintenance worker [t] that maintenance is needed and returns whenever [t] has done a maintenance cycle. *) +val maintain : 'meta t -> unit Lwt.t -val shutdown: 'meta t -> unit Lwt.t (** [shutdown t] is a thread that returns whenever [t] has successfully shut down. *) +val shutdown : 'meta t -> unit Lwt.t diff --git a/src/lib_p2p/p2p_message.ml b/src/lib_p2p/p2p_message.ml index ca04e8617451d72b1c2abca0b35f774d12f584f2..b5912ad6beda97314a69f19bbd226d2bb7786fe6 100644 --- a/src/lib_p2p/p2p_message.ml +++ b/src/lib_p2p/p2p_message.ml @@ -24,16 +24,15 @@ (* *) (*****************************************************************************) - -type 'msg encoding = Encoding : { - tag: int ; - title: string ; - encoding: 'a Data_encoding.t ; - wrap: 'a -> 'msg ; - unwrap: 'msg -> 'a option ; - max_length: int option ; - } -> 'msg encoding - +type 'msg encoding = + | Encoding : + { tag : int; + title : string; + encoding : 'a Data_encoding.t; + wrap : 'a -> 'msg; + unwrap : 'msg -> 'a option; + max_length : int option } + -> 'msg encoding type 'msg t = | Bootstrap @@ -43,49 +42,64 @@ type 'msg t = | Message of 'msg | Disconnect - let encoding msg_encoding = let open Data_encoding in - dynamic_size @@ - union ~tag_size:`Uint16 - ([ case (Tag 0x01) ~title:"Disconnect" - (obj1 (req "kind" (constant "Disconnect"))) - (function Disconnect -> Some () | _ -> None) - (fun () -> Disconnect); - case (Tag 0x02) ~title:"Bootstrap" - (obj1 (req "kind" (constant "Bootstrap"))) - (function Bootstrap -> Some () | _ -> None) - (fun () -> Bootstrap); - case (Tag 0x03) ~title:"Advertise" - (obj2 - (req "id" (Variable.list P2p_point.Id.encoding)) - (req "kind" (constant "Advertise"))) - (function Advertise points -> Some (points, ()) | _ -> None) - (fun (points, ()) -> Advertise points); - case (Tag 0x04) ~title:"Swap_request" - (obj3 - (req "point" P2p_point.Id.encoding) - (req "peer_id" P2p_peer.Id.encoding) - (req "kind" (constant "Swap_request"))) - (function - | Swap_request (point, peer_id) -> Some (point, peer_id, ()) - | _ -> None) - (fun (point, peer_id, ()) -> Swap_request (point, peer_id)) ; - case (Tag 0x05) - ~title:"Swap_ack" - (obj3 - (req "point" P2p_point.Id.encoding) - (req "peer_id" P2p_peer.Id.encoding) - (req "kind" (constant "Swap_ack"))) - (function - | Swap_ack (point, peer_id) -> Some (point, peer_id, ()) - | _ -> None) - (fun (point, peer_id, ()) -> Swap_ack (point, peer_id)) ; - ] @ - ListLabels.map msg_encoding - ~f:(function Encoding { tag ; title ; encoding ; wrap ; unwrap ; max_length = _ (* ?? *) } -> - Data_encoding.case (Tag tag) - ~title - encoding - (function Message msg -> unwrap msg | _ -> None) - (fun msg -> Message (wrap msg)))) + dynamic_size + @@ union + ~tag_size:`Uint16 + ( [ case + (Tag 0x01) + ~title:"Disconnect" + (obj1 (req "kind" (constant "Disconnect"))) + (function Disconnect -> Some () | _ -> None) + (fun () -> Disconnect); + case + (Tag 0x02) + ~title:"Bootstrap" + (obj1 (req "kind" (constant "Bootstrap"))) + (function Bootstrap -> Some () | _ -> None) + (fun () -> Bootstrap); + case + (Tag 0x03) + ~title:"Advertise" + (obj2 + (req "id" (Variable.list P2p_point.Id.encoding)) + (req "kind" (constant "Advertise"))) + (function Advertise points -> Some (points, ()) | _ -> None) + (fun (points, ()) -> Advertise points); + case + (Tag 0x04) + ~title:"Swap_request" + (obj3 + (req "point" P2p_point.Id.encoding) + (req "peer_id" P2p_peer.Id.encoding) + (req "kind" (constant "Swap_request"))) + (function + | Swap_request (point, peer_id) -> + Some (point, peer_id, ()) + | _ -> + None) + (fun (point, peer_id, ()) -> Swap_request (point, peer_id)); + case + (Tag 0x05) + ~title:"Swap_ack" + (obj3 + (req "point" P2p_point.Id.encoding) + (req "peer_id" P2p_peer.Id.encoding) + (req "kind" (constant "Swap_ack"))) + (function + | Swap_ack (point, peer_id) -> + Some (point, peer_id, ()) + | _ -> + None) + (fun (point, peer_id, ()) -> Swap_ack (point, peer_id)) ] + @ ListLabels.map msg_encoding ~f:(function + | Encoding + {tag; title; encoding; wrap; unwrap; max_length = _ (* ?? *)} + -> + Data_encoding.case + (Tag tag) + ~title + encoding + (function Message msg -> unwrap msg | _ -> None) + (fun msg -> Message (wrap msg))) ) diff --git a/src/lib_p2p/p2p_message.mli b/src/lib_p2p/p2p_message.mli index e7249ca78bf23ebacb14d0b5ff14a09a83e34417..7813934265cd171ec3ecb6aec841b0041cca8f1d 100644 --- a/src/lib_p2p/p2p_message.mli +++ b/src/lib_p2p/p2p_message.mli @@ -45,27 +45,25 @@ mechanism on an actual network. Is it the added complexity worth it, wouldn't it be enough to rely on [Advertise]? *) -type 'msg encoding = Encoding : { - tag: int ; - title: string ; - encoding: 'a Data_encoding.t ; - wrap: 'a -> 'msg ; - unwrap: 'msg -> 'a option ; - max_length: int option ; - } -> 'msg encoding +type 'msg encoding = + | Encoding : + { tag : int; + title : string; + encoding : 'a Data_encoding.t; + wrap : 'a -> 'msg; + unwrap : 'msg -> 'a option; + max_length : int option } + -> 'msg encoding type 'msg t = - | Bootstrap - (** Welcome message sent by a peer upon connection *) + | Bootstrap (** Welcome message sent by a peer upon connection *) | Advertise of P2p_point.Id.t list - (** Response to a [Bootstrap] message, contains list of known points *) + (** Response to a [Bootstrap] message, contains list of known points *) | Swap_request of P2p_point.Id.t * P2p_peer.Id.t - (** Propose new peer/point and ask a peer/point to swap with *) + (** Propose new peer/point and ask a peer/point to swap with *) | Swap_ack of P2p_point.Id.t * P2p_peer.Id.t - (** Response to a swap request and propose peer/point to swap with. *) - | Message of 'msg - (** Generic upper-layer message *) - | Disconnect - (** Ending of connection *) + (** Response to a swap request and propose peer/point to swap with. *) + | Message of 'msg (** Generic upper-layer message *) + | Disconnect (** Ending of connection *) val encoding : 'a encoding list -> 'a t Data_encoding.t diff --git a/src/lib_p2p/p2p_peer_state.ml b/src/lib_p2p/p2p_peer_state.ml index efbdf3d8425cb6001baeedbf2c6e0b8cab60303a..44467323631d1415326d055d1dc9f426cf8abf2b 100644 --- a/src/lib_p2p/p2p_peer_state.ml +++ b/src/lib_p2p/p2p_peer_state.ml @@ -26,119 +26,160 @@ open P2p_peer type ('conn, 'conn_meta) t = - | Accepted of { current_point: P2p_connection.Id.t ; - cancel: Lwt_canceler.t } - | Running of { data: 'conn ; - conn_metadata: 'conn_meta ; - current_point: P2p_connection.Id.t } + | Accepted of {current_point : P2p_connection.Id.t; cancel : Lwt_canceler.t} + | Running of + { data : 'conn; + conn_metadata : 'conn_meta; + current_point : P2p_connection.Id.t } | Disconnected + type ('conn, 'conn_meta) state = ('conn, 'conn_meta) t let pp ppf = function - | Accepted { current_point ; _ } -> + | Accepted {current_point; _} -> Format.fprintf ppf "accepted %a" P2p_connection.Id.pp current_point - | Running { current_point ; _ } -> + | Running {current_point; _} -> Format.fprintf ppf "running %a" P2p_connection.Id.pp current_point | Disconnected -> Format.fprintf ppf "disconnected" module Info = struct - type ('conn, 'peer_meta, 'conn_meta) t = { - peer_id : Id.t ; - created : Time.System.t ; - mutable state : ('conn, 'conn_meta) state ; - mutable peer_metadata : 'peer_meta ; - mutable trusted : bool ; - mutable last_failed_connection : (P2p_connection.Id.t * Time.System.t) option ; - mutable last_rejected_connection : (P2p_connection.Id.t * Time.System.t) option ; - mutable last_established_connection : (P2p_connection.Id.t * Time.System.t) option ; - mutable last_disconnection : (P2p_connection.Id.t * Time.System.t) option ; - events : Pool_event.t Ring.t ; - watchers : Pool_event.t Lwt_watcher.input ; + peer_id : Id.t; + created : Time.System.t; + mutable state : ('conn, 'conn_meta) state; + mutable peer_metadata : 'peer_meta; + mutable trusted : bool; + mutable last_failed_connection : + (P2p_connection.Id.t * Time.System.t) option; + mutable last_rejected_connection : + (P2p_connection.Id.t * Time.System.t) option; + mutable last_established_connection : + (P2p_connection.Id.t * Time.System.t) option; + mutable last_disconnection : (P2p_connection.Id.t * Time.System.t) option; + events : Pool_event.t Ring.t; + watchers : Pool_event.t Lwt_watcher.input } - type ('conn, 'peer_meta, 'conn_meta) peer_info = ('conn, 'peer_meta, 'conn_meta) t + + type ('conn, 'peer_meta, 'conn_meta) peer_info = + ('conn, 'peer_meta, 'conn_meta) t let compare gi1 gi2 = Id.compare gi1.peer_id gi2.peer_id let log_size = 100 - let create ?(created = Systime_os.now ()) ?(trusted = false) ~peer_metadata peer_id = - { peer_id ; - created ; - state = Disconnected ; - peer_metadata ; - trusted ; - last_failed_connection = None ; - last_rejected_connection = None ; - last_established_connection = None ; - last_disconnection = None ; - events = Ring.create log_size ; - watchers = Lwt_watcher.create_input () ; - } + let create ?(created = Systime_os.now ()) ?(trusted = false) ~peer_metadata + peer_id = + { peer_id; + created; + state = Disconnected; + peer_metadata; + trusted; + last_failed_connection = None; + last_rejected_connection = None; + last_established_connection = None; + last_disconnection = None; + events = Ring.create log_size; + watchers = Lwt_watcher.create_input () } let encoding peer_metadata_encoding = let open Data_encoding in conv - (fun { peer_id ; trusted ; peer_metadata ; events ; created ; - last_failed_connection ; last_rejected_connection ; - last_established_connection ; last_disconnection ; _ } -> - (peer_id, created, trusted, peer_metadata, Ring.elements events, - last_failed_connection, last_rejected_connection, - last_established_connection, last_disconnection)) - (fun (peer_id, created, trusted, peer_metadata, event_list, - last_failed_connection, last_rejected_connection, - last_established_connection, last_disconnection) -> + (fun { peer_id; + trusted; + peer_metadata; + events; + created; + last_failed_connection; + last_rejected_connection; + last_established_connection; + last_disconnection; + _ } -> + ( peer_id, + created, + trusted, + peer_metadata, + Ring.elements events, + last_failed_connection, + last_rejected_connection, + last_established_connection, + last_disconnection )) + (fun ( peer_id, + created, + trusted, + peer_metadata, + event_list, + last_failed_connection, + last_rejected_connection, + last_established_connection, + last_disconnection ) -> let info = create ~trusted ~peer_metadata peer_id in let events = Ring.create log_size in Ring.add_list info.events event_list ; - { state = Disconnected ; - trusted ; peer_id ; peer_metadata ; created ; - last_failed_connection ; - last_rejected_connection ; - last_established_connection ; - last_disconnection ; - events ; - watchers = Lwt_watcher.create_input () ; - }) + { state = Disconnected; + trusted; + peer_id; + peer_metadata; + created; + last_failed_connection; + last_rejected_connection; + last_established_connection; + last_disconnection; + events; + watchers = Lwt_watcher.create_input () }) (obj9 (req "peer_id" Id.encoding) (req "created" Time.System.encoding) (dft "trusted" bool false) (req "peer_metadata" peer_metadata_encoding) (dft "events" (list Pool_event.encoding) []) - (opt "last_failed_connection" + (opt + "last_failed_connection" (tup2 P2p_connection.Id.encoding Time.System.encoding)) - (opt "last_rejected_connection" + (opt + "last_rejected_connection" (tup2 P2p_connection.Id.encoding Time.System.encoding)) - (opt "last_established_connection" + (opt + "last_established_connection" (tup2 P2p_connection.Id.encoding Time.System.encoding)) - (opt "last_disconnection" + (opt + "last_disconnection" (tup2 P2p_connection.Id.encoding Time.System.encoding))) - let peer_id { peer_id ; _ } = peer_id - let created { created ; _ } = created - let peer_metadata { peer_metadata ; _ } = peer_metadata + let peer_id {peer_id; _} = peer_id + + let created {created; _} = created + + let peer_metadata {peer_metadata; _} = peer_metadata + let set_peer_metadata gi peer_metadata = gi.peer_metadata <- peer_metadata - let trusted { trusted ; _ } = trusted + + let trusted {trusted; _} = trusted + let set_trusted gi = gi.trusted <- true + let unset_trusted gi = gi.trusted <- false + let last_established_connection s = s.last_established_connection + let last_disconnection s = s.last_disconnection + let last_failed_connection s = s.last_failed_connection + let last_rejected_connection s = s.last_rejected_connection let last_seen s = Time.System.recent s.last_established_connection (Time.System.recent s.last_rejected_connection s.last_disconnection) + let last_miss s = Time.System.recent s.last_failed_connection (Time.System.recent s.last_rejected_connection s.last_disconnection) - let log { events ; watchers ; _ } ?(timestamp = Systime_os.now ()) point kind = - let event = { Pool_event.kind ; timestamp ; point } in + let log {events; watchers; _} ?(timestamp = Systime_os.now ()) point kind = + let event = {Pool_event.kind; timestamp; point} in Ring.add events event ; Lwt_watcher.notify watchers event @@ -146,74 +187,67 @@ module Info = struct log peer_info ?timestamp point Rejecting_request module File = struct - let load path peer_metadata_encoding = - let enc = - Data_encoding.list (encoding peer_metadata_encoding) in + let enc = Data_encoding.list (encoding peer_metadata_encoding) in if path <> "/dev/null" && Sys.file_exists path then - Lwt_utils_unix.Json.read_file path >>=? fun json -> - return (Data_encoding.Json.destruct enc json) - else - return_nil + Lwt_utils_unix.Json.read_file path + >>=? fun json -> return (Data_encoding.Json.destruct enc json) + else return_nil let save path peer_metadata_encoding peers = let open Data_encoding in - Lwt_utils_unix.Json.write_file path @@ - Json.construct (list (encoding peer_metadata_encoding)) peers - + Lwt_utils_unix.Json.write_file path + @@ Json.construct (list (encoding peer_metadata_encoding)) peers end - let watch { watchers ; _ } = Lwt_watcher.create_stream watchers - let fold { events ; _ } ~init ~f = Ring.fold events ~init ~f + let watch {watchers; _} = Lwt_watcher.create_stream watchers + let fold {events; _} ~init ~f = Ring.fold events ~init ~f end -let get { Info.state ; _ } = state +let get {Info.state; _} = state -let is_disconnected { Info.state ; _ } = - match state with - | Disconnected -> true - | Accepted _ | Running _ -> false +let is_disconnected {Info.state; _} = + match state with Disconnected -> true | Accepted _ | Running _ -> false -let set_accepted - ?(timestamp = Systime_os.now ()) - peer_info current_point cancel = - assert begin +let set_accepted ?(timestamp = Systime_os.now ()) peer_info current_point + cancel = + assert ( match peer_info.Info.state with - | Accepted _ | Running _ -> false - | Disconnected -> true - end ; - peer_info.state <- Accepted { current_point ; cancel } ; + | Accepted _ | Running _ -> + false + | Disconnected -> + true ) ; + peer_info.state <- Accepted {current_point; cancel} ; Info.log peer_info ~timestamp current_point Accepting_request -let set_running - ?(timestamp = Systime_os.now ()) - peer_info point data conn_metadata = - assert begin +let set_running ?(timestamp = Systime_os.now ()) peer_info point data + conn_metadata = + assert ( match peer_info.Info.state with - | Disconnected -> true (* request to unknown peer_id. *) - | Running _ -> false - | Accepted { current_point ; _ } -> - P2p_connection.Id.equal point current_point - end ; - peer_info.state <- Running { data ; conn_metadata ; current_point = point } ; + | Disconnected -> + true (* request to unknown peer_id. *) + | Running _ -> + false + | Accepted {current_point; _} -> + P2p_connection.Id.equal point current_point ) ; + peer_info.state <- Running {data; conn_metadata; current_point = point} ; peer_info.last_established_connection <- Some (point, timestamp) ; Info.log peer_info ~timestamp point Connection_established -let set_disconnected - ?(timestamp = Systime_os.now ()) ?(requested = false) peer_info = - let current_point, (event : Pool_event.kind) = +let set_disconnected ?(timestamp = Systime_os.now ()) ?(requested = false) + peer_info = + let (current_point, (event : Pool_event.kind)) = match peer_info.Info.state with - | Accepted { current_point ; _ } -> - peer_info.last_rejected_connection <- - Some (current_point, timestamp) ; - current_point, Request_rejected - | Running { current_point ; _ } -> - peer_info.last_disconnection <- - Some (current_point, timestamp) ; - current_point, - if requested then Disconnection else External_disconnection - | Disconnected -> assert false + | Accepted {current_point; _} -> + peer_info.last_rejected_connection <- Some (current_point, timestamp) ; + (current_point, Request_rejected) + | Running {current_point; _} -> + peer_info.last_disconnection <- Some (current_point, timestamp) ; + ( current_point, + if requested then Disconnection else External_disconnection ) + | Disconnected -> + assert false in peer_info.state <- Disconnected ; Info.log peer_info ~timestamp current_point event diff --git a/src/lib_p2p/p2p_peer_state.mli b/src/lib_p2p/p2p_peer_state.mli index edd5add2ce6c3b3f324d65b683c72bb4a82f7a2f..d23f127cd19748d0a142124c0c4fec74a8e34baf 100644 --- a/src/lib_p2p/p2p_peer_state.mli +++ b/src/lib_p2p/p2p_peer_state.mli @@ -26,91 +26,119 @@ open P2p_peer type ('conn, 'conn_meta) t = - | Accepted of { current_point: P2p_connection.Id.t ; - cancel: Lwt_canceler.t } - (** We accepted a incoming connection, we greeted back and + | Accepted of {current_point : P2p_connection.Id.t; cancel : Lwt_canceler.t} + (** We accepted a incoming connection, we greeted back and we are waiting for an acknowledgement. *) - | Running of { data: 'conn ; - conn_metadata: 'conn_meta ; - current_point: P2p_connection.Id.t } - (** Successfully authentificated connection, normal business. *) - | Disconnected - (** No connection established currently. *) + | Running of + { data : 'conn; + conn_metadata : 'conn_meta; + current_point : P2p_connection.Id.t } + (** Successfully authentificated connection, normal business. *) + | Disconnected (** No connection established currently. *) + type ('conn, 'conn_meta) state = ('conn, 'conn_meta) t val pp : Format.formatter -> ('conn, 'conn_meta) t -> unit module Info : sig - type ('conn, 'peer_meta, 'conn_meta) t - type ('conn, 'peer_meta, 'conn_meta) peer_info = ('conn, 'peer_meta, 'conn_meta) t - val compare : ('conn, 'peer_meta, 'conn_meta) t -> ('conn, 'peer_meta, 'conn_meta) t -> int + type ('conn, 'peer_meta, 'conn_meta) peer_info = + ('conn, 'peer_meta, 'conn_meta) t + + val compare : + ('conn, 'peer_meta, 'conn_meta) t -> + ('conn, 'peer_meta, 'conn_meta) t -> + int + (** [create ~trusted ~meta peer_id] is a freshly minted peer_id info for + [peer_id]. *) val create : ?created:Time.System.t -> ?trusted:bool -> peer_metadata:'peer_meta -> - Id.t -> ('conn, 'peer_meta, 'conn_meta) peer_info - (** [create ~trusted ~meta peer_id] is a freshly minted peer_id info for - [peer_id]. *) + Id.t -> + ('conn, 'peer_meta, 'conn_meta) peer_info val peer_id : ('conn, 'peer_meta, 'conn_meta) peer_info -> Id.t val created : ('conn, 'peer_meta, 'conn_meta) peer_info -> Time.System.t + val peer_metadata : ('conn, 'peer_meta, 'conn_meta) peer_info -> 'peer_meta - val set_peer_metadata : ('conn, 'peer_meta, 'conn_meta) peer_info -> 'peer_meta -> unit + + val set_peer_metadata : + ('conn, 'peer_meta, 'conn_meta) peer_info -> 'peer_meta -> unit val trusted : ('conn, 'peer_meta, 'conn_meta) peer_info -> bool + val set_trusted : ('conn, 'peer_meta, 'conn_meta) peer_info -> unit + val unset_trusted : ('conn, 'peer_meta, 'conn_meta) peer_info -> unit val last_failed_connection : - ('conn, 'peer_meta, 'conn_meta) peer_info -> (P2p_connection.Id.t * Time.System.t) option + ('conn, 'peer_meta, 'conn_meta) peer_info -> + (P2p_connection.Id.t * Time.System.t) option + val last_rejected_connection : - ('conn, 'peer_meta, 'conn_meta) peer_info -> (P2p_connection.Id.t * Time.System.t) option + ('conn, 'peer_meta, 'conn_meta) peer_info -> + (P2p_connection.Id.t * Time.System.t) option + val last_established_connection : - ('conn, 'peer_meta, 'conn_meta) peer_info -> (P2p_connection.Id.t * Time.System.t) option + ('conn, 'peer_meta, 'conn_meta) peer_info -> + (P2p_connection.Id.t * Time.System.t) option + val last_disconnection : - ('conn, 'peer_meta, 'conn_meta) peer_info -> (P2p_connection.Id.t * Time.System.t) option + ('conn, 'peer_meta, 'conn_meta) peer_info -> + (P2p_connection.Id.t * Time.System.t) option - val last_seen : - ('conn, 'peer_meta, 'conn_meta) peer_info -> (P2p_connection.Id.t * Time.System.t) option (** [last_seen gi] is the most recent of: * last established connection * last rejected connection * last disconnection *) + val last_seen : + ('conn, 'peer_meta, 'conn_meta) peer_info -> + (P2p_connection.Id.t * Time.System.t) option - val last_miss : - ('conn, 'peer_meta, 'conn_meta) peer_info -> (P2p_connection.Id.t * Time.System.t) option (** [last_miss gi] is the most recent of: * last failed connection * last rejected connection * last disconnection *) + val last_miss : + ('conn, 'peer_meta, 'conn_meta) peer_info -> + (P2p_connection.Id.t * Time.System.t) option val log_incoming_rejection : ?timestamp:Time.System.t -> - ('conn, 'peer_meta, 'conn_meta) peer_info -> P2p_connection.Id.t -> unit + ('conn, 'peer_meta, 'conn_meta) peer_info -> + P2p_connection.Id.t -> + unit module File : sig val load : - string -> 'peer_meta Data_encoding.t -> + string -> + 'peer_meta Data_encoding.t -> ('conn, 'peer_meta, 'conn_meta) peer_info list tzresult Lwt.t + val save : - string -> 'peer_meta Data_encoding.t -> - ('conn, 'peer_meta, 'conn_meta) peer_info list -> unit tzresult Lwt.t + string -> + 'peer_meta Data_encoding.t -> + ('conn, 'peer_meta, 'conn_meta) peer_info list -> + unit tzresult Lwt.t end val fold : - ('conn, 'peer_meta, 'conn_meta) t -> init:'a -> f:('a -> Pool_event.t -> 'a) -> 'a + ('conn, 'peer_meta, 'conn_meta) t -> + init:'a -> + f:('a -> Pool_event.t -> 'a) -> + 'a val watch : - ('conn, 'peer_meta, 'conn_meta) t -> Pool_event.t Lwt_stream.t * Lwt_watcher.stopper - + ('conn, 'peer_meta, 'conn_meta) t -> + Pool_event.t Lwt_stream.t * Lwt_watcher.stopper end val get : ('conn, 'peer_meta, 'conn_meta) Info.t -> ('conn, 'conn_meta) state @@ -119,13 +147,21 @@ val is_disconnected : ('conn, 'peer_meta, 'conn_meta) Info.t -> bool val set_accepted : ?timestamp:Time.System.t -> - ('conn, 'peer_meta, 'conn_meta) Info.t -> P2p_connection.Id.t -> Lwt_canceler.t -> unit + ('conn, 'peer_meta, 'conn_meta) Info.t -> + P2p_connection.Id.t -> + Lwt_canceler.t -> + unit val set_running : ?timestamp:Time.System.t -> - ('conn, 'peer_meta, 'conn_meta) Info.t -> P2p_connection.Id.t -> 'conn -> 'conn_meta -> unit + ('conn, 'peer_meta, 'conn_meta) Info.t -> + P2p_connection.Id.t -> + 'conn -> + 'conn_meta -> + unit val set_disconnected : ?timestamp:Time.System.t -> ?requested:bool -> - ('conn, 'peer_meta, 'conn_meta) Info.t -> unit + ('conn, 'peer_meta, 'conn_meta) Info.t -> + unit diff --git a/src/lib_p2p/p2p_point_state.ml b/src/lib_p2p/p2p_point_state.ml index 2ffa079547e050bb8cb00de43fb7b1c015421e7a..9ea701997c5a7256aa80c699b023f8d10124ffb3 100644 --- a/src/lib_p2p/p2p_point_state.ml +++ b/src/lib_p2p/p2p_point_state.ml @@ -26,137 +26,158 @@ open P2p_point type 'data t = - | Requested of { cancel: Lwt_canceler.t } - | Accepted of { current_peer_id: P2p_peer.Id.t ; - cancel: Lwt_canceler.t } - | Running of { data: 'data ; - current_peer_id: P2p_peer.Id.t } + | Requested of {cancel : Lwt_canceler.t} + | Accepted of {current_peer_id : P2p_peer.Id.t; cancel : Lwt_canceler.t} + | Running of {data : 'data; current_peer_id : P2p_peer.Id.t} | Disconnected + type 'data state = 'data t let pp ppf = function | Requested _ -> Format.fprintf ppf "requested" - | Accepted { current_peer_id ; _ } -> + | Accepted {current_peer_id; _} -> Format.fprintf ppf "accepted %a" P2p_peer.Id.pp current_peer_id - | Running { current_peer_id ; _ } -> + | Running {current_peer_id; _} -> Format.fprintf ppf "running %a" P2p_peer.Id.pp current_peer_id | Disconnected -> Format.fprintf ppf "disconnected" module Info = struct - type greylisting_config = { - factor: float ; - initial_delay: Time.System.Span.t ; - disconnection_delay: Time.System.Span.t ; - increase_cap: Time.System.Span.t ; + factor : float; + initial_delay : Time.System.Span.t; + disconnection_delay : Time.System.Span.t; + increase_cap : Time.System.Span.t } type 'data t = { - point : Id.t ; - mutable trusted : bool ; - mutable state : 'data state ; - mutable last_failed_connection : Time.System.t option ; - mutable last_rejected_connection : (P2p_peer.Id.t * Time.System.t) option ; - mutable last_established_connection : (P2p_peer.Id.t * Time.System.t) option ; - mutable known_public : bool ; - mutable last_disconnection : (P2p_peer.Id.t * Time.System.t) option ; - mutable greylisting_delay : Time.System.Span.t ; - mutable greylisting_end : Time.System.t ; - events : Pool_event.t Ring.t ; - watchers : Pool_event.t Lwt_watcher.input ; + point : Id.t; + mutable trusted : bool; + mutable state : 'data state; + mutable last_failed_connection : Time.System.t option; + mutable last_rejected_connection : (P2p_peer.Id.t * Time.System.t) option; + mutable last_established_connection : + (P2p_peer.Id.t * Time.System.t) option; + mutable known_public : bool; + mutable last_disconnection : (P2p_peer.Id.t * Time.System.t) option; + mutable greylisting_delay : Time.System.Span.t; + mutable greylisting_end : Time.System.t; + events : Pool_event.t Ring.t; + watchers : Pool_event.t Lwt_watcher.input } + type 'data point_info = 'data t let compare pi1 pi2 = Id.compare pi1.point pi2.point let log_size = 100 - let default_greylisting_config = { - factor = 1.2 ; - initial_delay = Ptime.Span.of_int_s 1 ; - disconnection_delay = Ptime.Span.of_int_s 60 ; - increase_cap = Ptime.Span.of_int_s 172800 (* 2 days *) ; - } + let default_greylisting_config = + { factor = 1.2; + initial_delay = Ptime.Span.of_int_s 1; + disconnection_delay = Ptime.Span.of_int_s 60; + increase_cap = Ptime.Span.of_int_s 172800 (* 2 days *) } + let greylisting_config_encoding = let open Data_encoding in conv - (fun { factor ; initial_delay ; disconnection_delay ; increase_cap ; } -> - (factor, initial_delay, disconnection_delay, increase_cap)) + (fun {factor; initial_delay; disconnection_delay; increase_cap} -> + (factor, initial_delay, disconnection_delay, increase_cap)) (fun (factor, initial_delay, disconnection_delay, increase_cap) -> - { factor ; initial_delay ; disconnection_delay ; increase_cap ; }) + {factor; initial_delay; disconnection_delay; increase_cap}) (obj4 - (dft "factor" - ~description: "The factor by which the greylisting delay is \ - increased when an already greylisted peer is \ - greylisted again. This value should be set to 1 for \ - a linear back-off and to >1 for an exponential \ - back-off." - float default_greylisting_config.factor) - (dft "initial-delay" - ~description: "The span of time a peer is greylisted for when it \ - is first greylisted." - Time.System.Span.encoding default_greylisting_config.initial_delay) - (dft "disconnection-delay" - ~description: "The span of time a peer is greylisted for when it \ - is greylisted as the result of an abrupt \ - disconnection." + (dft + "factor" + ~description: + "The factor by which the greylisting delay is increased when an \ + already greylisted peer is greylisted again. This value should \ + be set to 1 for a linear back-off and to >1 for an exponential \ + back-off." + float + default_greylisting_config.factor) + (dft + "initial-delay" + ~description: + "The span of time a peer is greylisted for when it is first \ + greylisted." + Time.System.Span.encoding + default_greylisting_config.initial_delay) + (dft + "disconnection-delay" + ~description: + "The span of time a peer is greylisted for when it is \ + greylisted as the result of an abrupt disconnection." Time.System.Span.encoding default_greylisting_config.disconnection_delay) - (dft "increase-cap" - ~description: "The maximum amount by which the greylisting is \ - extended. This limits the rate of the exponential \ - back-off, which eventually becomes linear when it \ - reaches this limit. This limit is set to avoid \ - reaching the End-of-Time when repeatedly \ - greylisting a peer." - Time.System.Span.encoding default_greylisting_config.increase_cap) - ) + (dft + "increase-cap" + ~description: + "The maximum amount by which the greylisting is extended. This \ + limits the rate of the exponential back-off, which eventually \ + becomes linear when it reaches this limit. This limit is set \ + to avoid reaching the End-of-Time when repeatedly greylisting \ + a peer." + Time.System.Span.encoding + default_greylisting_config.increase_cap)) - let create ?(trusted = false) addr port = { - point = (addr, port) ; - trusted ; - state = Disconnected ; - last_failed_connection = None ; - last_rejected_connection = None ; - last_established_connection = None ; - last_disconnection = None ; - known_public = false ; - events = Ring.create log_size ; - greylisting_delay = Ptime.Span.of_int_s 1 ; - greylisting_end = Time.System.epoch ; - watchers = Lwt_watcher.create_input () ; - } + let create ?(trusted = false) addr port = + { point = (addr, port); + trusted; + state = Disconnected; + last_failed_connection = None; + last_rejected_connection = None; + last_established_connection = None; + last_disconnection = None; + known_public = false; + events = Ring.create log_size; + greylisting_delay = Ptime.Span.of_int_s 1; + greylisting_end = Time.System.epoch; + watchers = Lwt_watcher.create_input () } let point s = s.point + let trusted s = s.trusted + let set_trusted gi = gi.trusted <- true + let unset_trusted gi = gi.trusted <- false + let last_established_connection s = s.last_established_connection + let last_disconnection s = s.last_disconnection + let last_failed_connection s = s.last_failed_connection + let last_rejected_connection s = s.last_rejected_connection + let known_public s = s.known_public + let greylisted ?(now = Systime_os.now ()) s = Time.System.compare now s.greylisting_end <= 0 + let greylisted_until s = s.greylisting_end let last_seen s = - Time.System.recent s.last_rejected_connection + Time.System.recent + s.last_rejected_connection (Time.System.recent s.last_established_connection s.last_disconnection) + let last_miss s = match - s.last_failed_connection, - (Option.map ~f:(fun (_, time) -> time) @@ - Time.System.recent s.last_rejected_connection s.last_disconnection) with - | (None, None) -> None - | (None, (Some _ as a)) - | (Some _ as a, None) -> a - | (Some t1 as a1 , (Some t2 as a2)) -> + ( s.last_failed_connection, + Option.map ~f:(fun (_, time) -> time) + @@ Time.System.recent s.last_rejected_connection s.last_disconnection + ) + with + | (None, None) -> + None + | (None, (Some _ as a)) | ((Some _ as a), None) -> + a + | ((Some t1 as a1), (Some t2 as a2)) -> if Time.System.compare t1 t2 < 0 then a2 else a1 - let log { events ; watchers ; _ } ?timestamp kind = + let log {events; watchers; _} ?timestamp kind = let time = Option.unopt ~default:(Systime_os.now ()) timestamp in let event = Time.System.stamp ~time kind in Ring.add events event ; @@ -165,83 +186,79 @@ module Info = struct let log_incoming_rejection ?timestamp point_info peer_id = log point_info ?timestamp (Rejecting_request peer_id) + let fold {events; _} ~init ~f = Ring.fold events ~init ~f - let fold { events ; _ } ~init ~f = Ring.fold events ~init ~f - - let watch { watchers ; _ } = Lwt_watcher.create_stream watchers - + let watch {watchers; _} = Lwt_watcher.create_stream watchers end -let get { Info.state ; _ } = state +let get {Info.state; _} = state -let is_disconnected { Info.state ; _ } = +let is_disconnected {Info.state; _} = match state with - | Disconnected -> true - | Requested _ | Accepted _ | Running _ -> false + | Disconnected -> + true + | Requested _ | Accepted _ | Running _ -> + false let set_requested ?timestamp point_info cancel = - assert begin + assert ( match point_info.Info.state with - | Requested _ -> true - | Accepted _ | Running _ -> false - | Disconnected -> true - end ; - point_info.state <- Requested { cancel } ; + | Requested _ -> + true + | Accepted _ | Running _ -> + false + | Disconnected -> + true ) ; + point_info.state <- Requested {cancel} ; Info.log point_info ?timestamp Outgoing_request -let set_accepted - ?(timestamp = Systime_os.now ()) - point_info current_peer_id cancel = +let set_accepted ?(timestamp = Systime_os.now ()) point_info current_peer_id + cancel = (* log_notice "SET_ACCEPTED %a@." P2p_point.pp point_info.point ; *) - assert begin + assert ( match point_info.Info.state with - | Accepted _ | Running _ -> false - | Requested _ | Disconnected -> true - end ; - point_info.state <- Accepted { current_peer_id ; cancel } ; + | Accepted _ | Running _ -> + false + | Requested _ | Disconnected -> + true ) ; + point_info.state <- Accepted {current_peer_id; cancel} ; Info.log point_info ~timestamp (Accepting_request current_peer_id) let set_private point_info known_private = point_info.Info.known_public <- not known_private -let set_running - ?(timestamp = Systime_os.now ()) - point_info peer_id data = - assert begin +let set_running ?(timestamp = Systime_os.now ()) point_info peer_id data = + assert ( match point_info.Info.state with - | Disconnected -> true (* request to unknown peer_id. *) - | Running _ -> false - | Accepted { current_peer_id ; _ } -> P2p_peer.Id.equal peer_id current_peer_id - | Requested _ -> true - end ; - point_info.state <- Running { data ; current_peer_id = peer_id } ; + | Disconnected -> + true (* request to unknown peer_id. *) + | Running _ -> + false + | Accepted {current_peer_id; _} -> + P2p_peer.Id.equal peer_id current_peer_id + | Requested _ -> + true ) ; + point_info.state <- Running {data; current_peer_id = peer_id} ; point_info.last_established_connection <- Some (peer_id, timestamp) ; Info.log point_info ~timestamp (Connection_established peer_id) let maxed_time_add t s = - match Ptime.add_span t s with - | Some t -> t - | None -> Ptime.max + match Ptime.add_span t s with Some t -> t | None -> Ptime.max let set_greylisted greylisting_config timestamp point_info = point_info.Info.greylisting_end <- - maxed_time_add - timestamp - point_info.Info.greylisting_delay ; + maxed_time_add timestamp point_info.Info.greylisting_delay ; point_info.greylisting_delay <- - begin - let new_delay = - Time.System.Span.multiply_exn - greylisting_config.Info.factor - point_info.greylisting_delay in - if Ptime.Span.compare greylisting_config.Info.increase_cap new_delay > 0 then - new_delay - else - greylisting_config.Info.increase_cap - end - -let set_disconnected - ?(timestamp = Systime_os.now ()) ?(requested = false) + (let new_delay = + Time.System.Span.multiply_exn + greylisting_config.Info.factor + point_info.greylisting_delay + in + if Ptime.Span.compare greylisting_config.Info.increase_cap new_delay > 0 + then new_delay + else greylisting_config.Info.increase_cap) + +let set_disconnected ?(timestamp = Systime_os.now ()) ?(requested = false) greylisting_config point_info = let event : Pool_event.kind = match point_info.Info.state with @@ -249,21 +266,16 @@ let set_disconnected set_greylisted greylisting_config timestamp point_info ; point_info.last_failed_connection <- Some timestamp ; Request_rejected None - | Accepted { current_peer_id ; _ } -> + | Accepted {current_peer_id; _} -> set_greylisted greylisting_config timestamp point_info ; - point_info.last_rejected_connection <- - Some (current_peer_id, timestamp) ; + point_info.last_rejected_connection <- Some (current_peer_id, timestamp) ; Request_rejected (Some current_peer_id) - | Running { current_peer_id ; _ } -> - point_info.greylisting_delay <- - greylisting_config.Info.initial_delay ; + | Running {current_peer_id; _} -> + point_info.greylisting_delay <- greylisting_config.Info.initial_delay ; point_info.greylisting_end <- - maxed_time_add - timestamp - greylisting_config.Info.disconnection_delay ; + maxed_time_add timestamp greylisting_config.Info.disconnection_delay ; point_info.last_disconnection <- Some (current_peer_id, timestamp) ; - if requested - then Disconnection current_peer_id + if requested then Disconnection current_peer_id else External_disconnection current_peer_id | Disconnected -> assert false diff --git a/src/lib_p2p/p2p_point_state.mli b/src/lib_p2p/p2p_point_state.mli index a57e0060e1155d0fb9834a4b923fbc4b3e9c60dc..2958e3846e49030d155ff0d2591901e4095ad3ed 100644 --- a/src/lib_p2p/p2p_point_state.mli +++ b/src/lib_p2p/p2p_point_state.mli @@ -26,85 +26,81 @@ open P2p_point type 'conn t = - | Requested of { cancel: Lwt_canceler.t } - (** We initiated a connection. *) - | Accepted of { current_peer_id: P2p_peer.Id.t ; - cancel: Lwt_canceler.t } - (** We accepted a incoming connection. *) - | Running of { data: 'conn ; - current_peer_id: P2p_peer.Id.t } - (** Successfully authentificated connection, normal business. *) - | Disconnected - (** No connection established currently. *) + | Requested of {cancel : Lwt_canceler.t} (** We initiated a connection. *) + | Accepted of {current_peer_id : P2p_peer.Id.t; cancel : Lwt_canceler.t} + (** We accepted a incoming connection. *) + | Running of {data : 'conn; current_peer_id : P2p_peer.Id.t} + (** Successfully authentificated connection, normal business. *) + | Disconnected (** No connection established currently. *) + type 'conn state = 'conn t val pp : Format.formatter -> 'conn t -> unit module Info : sig - type 'conn t - type 'conn point_info = 'conn t + (** Type of info associated to a point. *) + type 'conn point_info = 'conn t val compare : 'conn point_info -> 'conn point_info -> int type greylisting_config = { - factor: float ; - initial_delay: Time.System.Span.t ; - disconnection_delay: Time.System.Span.t ; - increase_cap: Time.System.Span.t ; + factor : float; + initial_delay : Time.System.Span.t; + disconnection_delay : Time.System.Span.t; + increase_cap : Time.System.Span.t } val default_greylisting_config : greylisting_config + val greylisting_config_encoding : greylisting_config Data_encoding.encoding - val create : - ?trusted:bool -> - P2p_addr.t -> P2p_addr.port -> 'conn point_info (** [create ~trusted addr port] is a freshly minted point_info. If [trusted] is true, this point is considered trusted and will be treated as such. *) + val create : ?trusted:bool -> P2p_addr.t -> P2p_addr.port -> 'conn point_info - val trusted : 'conn point_info -> bool (** [trusted pi] is [true] iff [pi] has is trusted, i.e. "whitelisted". *) + val trusted : 'conn point_info -> bool - val known_public : 'conn point_info -> bool (** Points can announce themself as either public or private. Private points will not be advertized to other nodes. *) + val known_public : 'conn point_info -> bool val set_trusted : 'conn point_info -> unit + val unset_trusted : 'conn point_info -> unit - val last_failed_connection : - 'conn point_info -> Time.System.t option + val last_failed_connection : 'conn point_info -> Time.System.t option + val last_rejected_connection : 'conn point_info -> (P2p_peer.Id.t * Time.System.t) option + val last_established_connection : 'conn point_info -> (P2p_peer.Id.t * Time.System.t) option + val last_disconnection : 'conn point_info -> (P2p_peer.Id.t * Time.System.t) option - val last_seen : - 'conn point_info -> (P2p_peer.Id.t * Time.System.t) option (** [last_seen pi] is the most recent of: * last established connection * last rejected connection * last disconnection *) + val last_seen : 'conn point_info -> (P2p_peer.Id.t * Time.System.t) option - val last_miss : - 'conn point_info -> Time.System.t option (** [last_miss pi] is the most recent of: * last failed connection * last rejected connection * last disconnection *) + val last_miss : 'conn point_info -> Time.System.t option - val greylisted : - ?now:Time.System.t -> 'conn point_info -> bool + val greylisted : ?now:Time.System.t -> 'conn point_info -> bool val greylisted_until : 'conn point_info -> Time.System.t @@ -113,11 +109,9 @@ module Info : sig val log_incoming_rejection : ?timestamp:Time.System.t -> 'conn point_info -> P2p_peer.Id.t -> unit - val fold : - 'conn t -> init:'a -> f:('a -> Pool_event.t -> 'a) -> 'a + val fold : 'conn t -> init:'a -> f:('a -> Pool_event.t -> 'a) -> 'a - val watch : - 'conn t -> Pool_event.t Lwt_stream.t * Lwt_watcher.stopper + val watch : 'conn t -> Pool_event.t Lwt_stream.t * Lwt_watcher.stopper end val get : 'conn Info.t -> 'conn t @@ -125,20 +119,23 @@ val get : 'conn Info.t -> 'conn t val is_disconnected : 'conn Info.t -> bool val set_requested : - ?timestamp:Time.System.t -> - 'conn Info.t -> Lwt_canceler.t -> unit + ?timestamp:Time.System.t -> 'conn Info.t -> Lwt_canceler.t -> unit val set_accepted : ?timestamp:Time.System.t -> - 'conn Info.t -> P2p_peer.Id.t -> Lwt_canceler.t -> unit + 'conn Info.t -> + P2p_peer.Id.t -> + Lwt_canceler.t -> + unit val set_running : - ?timestamp:Time.System.t -> - 'conn Info.t -> P2p_peer.Id.t -> 'conn -> unit + ?timestamp:Time.System.t -> 'conn Info.t -> P2p_peer.Id.t -> 'conn -> unit val set_private : 'conn Info.t -> bool -> unit val set_disconnected : - ?timestamp:Time.System.t -> ?requested:bool -> - Info.greylisting_config -> 'conn Info.t -> unit - + ?timestamp:Time.System.t -> + ?requested:bool -> + Info.greylisting_config -> + 'conn Info.t -> + unit diff --git a/src/lib_p2p/p2p_pool.ml b/src/lib_p2p/p2p_pool.ml index 07c3c97e800889d99fa3dd89772fc19c5aec2c43..14b2791fbe4c2fb2bef0a37c5960916e13cb9d0e 100644 --- a/src/lib_p2p/p2p_pool.ml +++ b/src/lib_p2p/p2p_pool.ml @@ -32,106 +32,110 @@ (* TODO allow to track "requested peer_ids" when we reconnect to a point. *) -include Internal_event.Legacy_logging.Make (struct let name = "p2p.connection-pool" end) +include Internal_event.Legacy_logging.Make (struct + let name = "p2p.connection-pool" +end) type config = { - - identity : P2p_identity.t ; - proof_of_work_target : Crypto_box.target ; - - trusted_points : P2p_point.Id.t list ; - peers_file : string ; - private_mode : bool ; - - greylisting_config : P2p_point_state.Info.greylisting_config ; - - listening_port : P2p_addr.port option ; - min_connections : int ; - max_connections : int ; - max_incoming_connections : int ; - connection_timeout : Time.System.Span.t ; - authentication_timeout : Time.System.Span.t ; - - incoming_app_message_queue_size : int option ; - incoming_message_queue_size : int option ; - outgoing_message_queue_size : int option ; - - known_peer_ids_history_size : int ; - known_points_history_size : int ; - max_known_points : (int * int) option ; (* max, gc target *) - max_known_peer_ids : (int * int) option ; (* max, gc target *) - - swap_linger : Time.System.Span.t ; - - binary_chunks_size : int option ; - + identity : P2p_identity.t; + proof_of_work_target : Crypto_box.target; + trusted_points : P2p_point.Id.t list; + peers_file : string; + private_mode : bool; + greylisting_config : P2p_point_state.Info.greylisting_config; + listening_port : P2p_addr.port option; + min_connections : int; + max_connections : int; + max_incoming_connections : int; + connection_timeout : Time.System.Span.t; + authentication_timeout : Time.System.Span.t; + incoming_app_message_queue_size : int option; + incoming_message_queue_size : int option; + outgoing_message_queue_size : int option; + known_peer_ids_history_size : int; + known_points_history_size : int; + max_known_points : (int * int) option; + (* max, gc target *) + max_known_peer_ids : (int * int) option; + (* max, gc target *) + swap_linger : Time.System.Span.t; + binary_chunks_size : int option } type 'peer_meta peer_meta_config = { - peer_meta_encoding : 'peer_meta Data_encoding.t ; - peer_meta_initial : unit -> 'peer_meta ; - score : 'peer_meta -> float ; + peer_meta_encoding : 'peer_meta Data_encoding.t; + peer_meta_initial : unit -> 'peer_meta; + score : 'peer_meta -> float } type 'msg message_config = { - encoding : 'msg P2p_message.encoding list ; - chain_name : Distributed_db_version.name ; - distributed_db_versions : Distributed_db_version.t list ; + encoding : 'msg P2p_message.encoding list; + chain_name : Distributed_db_version.name; + distributed_db_versions : Distributed_db_version.t list } type ('msg, 'peer_meta, 'conn_meta) t = { - config : config ; - announced_version : Network_version.t ; - custom_p2p_versions : P2p_version.t list ; - greylisting_config : P2p_point_state.Info.greylisting_config ; - peer_meta_config : 'peer_meta peer_meta_config ; - conn_meta_config : 'conn_meta P2p_socket.metadata_config ; - message_config : 'msg message_config ; - my_id_points : unit P2p_point.Table.t ; + config : config; + announced_version : Network_version.t; + custom_p2p_versions : P2p_version.t list; + greylisting_config : P2p_point_state.Info.greylisting_config; + peer_meta_config : 'peer_meta peer_meta_config; + conn_meta_config : 'conn_meta P2p_socket.metadata_config; + message_config : 'msg message_config; + my_id_points : unit P2p_point.Table.t; known_peer_ids : - (('msg, 'peer_meta, 'conn_meta) connection, - 'peer_meta, - 'conn_meta) P2p_peer_state.Info.t P2p_peer.Table.t ; + ( ('msg, 'peer_meta, 'conn_meta) connection, + 'peer_meta, + 'conn_meta ) + P2p_peer_state.Info.t + P2p_peer.Table.t; connected_peer_ids : - (('msg, 'peer_meta, 'conn_meta) connection, - 'peer_meta, - 'conn_meta) P2p_peer_state.Info.t P2p_peer.Table.t ; + ( ('msg, 'peer_meta, 'conn_meta) connection, + 'peer_meta, + 'conn_meta ) + P2p_peer_state.Info.t + P2p_peer.Table.t; known_points : - ('msg, 'peer_meta, 'conn_meta) connection P2p_point_state.Info.t P2p_point.Table.t ; + ('msg, 'peer_meta, 'conn_meta) connection P2p_point_state.Info.t + P2p_point.Table.t; connected_points : - ('msg, 'peer_meta, 'conn_meta) connection P2p_point_state.Info.t P2p_point.Table.t ; - incoming : Lwt_canceler.t P2p_point.Table.t ; - io_sched : P2p_io_scheduler.t ; - encoding : 'msg P2p_message.t Data_encoding.t ; - events : events ; - watcher : P2p_connection.Pool_event.t Lwt_watcher.input ; - acl : P2p_acl.t ; + ('msg, 'peer_meta, 'conn_meta) connection P2p_point_state.Info.t + P2p_point.Table.t; + incoming : Lwt_canceler.t P2p_point.Table.t; + io_sched : P2p_io_scheduler.t; + encoding : 'msg P2p_message.t Data_encoding.t; + events : events; + watcher : P2p_connection.Pool_event.t Lwt_watcher.input; + acl : P2p_acl.t; mutable new_connection_hook : - (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) list ; - mutable latest_accepted_swap : Time.System.t ; - mutable latest_succesfull_swap : Time.System.t ; + (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) list; + mutable latest_accepted_swap : Time.System.t; + mutable latest_succesfull_swap : Time.System.t } and events = { - too_few_connections : unit Lwt_condition.t ; - too_many_connections : unit Lwt_condition.t ; - new_peer : unit Lwt_condition.t ; - new_point : unit Lwt_condition.t ; - new_connection : unit Lwt_condition.t ; + too_few_connections : unit Lwt_condition.t; + too_many_connections : unit Lwt_condition.t; + new_peer : unit Lwt_condition.t; + new_point : unit Lwt_condition.t; + new_connection : unit Lwt_condition.t } and ('msg, 'peer_meta, 'conn_meta) connection = { - canceler : Lwt_canceler.t ; - messages : (int * 'msg) Lwt_pipe.t ; - conn : ('msg P2p_message.t, 'conn_meta) P2p_socket.t ; + canceler : Lwt_canceler.t; + messages : (int * 'msg) Lwt_pipe.t; + conn : ('msg P2p_message.t, 'conn_meta) P2p_socket.t; peer_info : - (('msg, 'peer_meta, 'conn_meta) connection, 'peer_meta, 'conn_meta) P2p_peer_state.Info.t ; + ( ('msg, 'peer_meta, 'conn_meta) connection, + 'peer_meta, + 'conn_meta ) + P2p_peer_state.Info.t; point_info : - ('msg, 'peer_meta, 'conn_meta) connection P2p_point_state.Info.t option ; - negotiated_version : Network_version.t ; - answerer : ('msg, 'conn_meta) P2p_answerer.t Lazy.t ; - mutable last_sent_swap_request : (Time.System.t * P2p_peer.Id.t) option ; - mutable wait_close : bool ; + ('msg, 'peer_meta, 'conn_meta) connection P2p_point_state.Info.t option; + negotiated_version : Network_version.t; + answerer : ('msg, 'conn_meta) P2p_answerer.t Lazy.t; + mutable last_sent_swap_request : (Time.System.t * P2p_peer.Id.t) option; + mutable wait_close : bool } type ('msg, 'peer_meta, 'conn_meta) pool = ('msg, 'peer_meta, 'conn_meta) t @@ -139,78 +143,85 @@ type ('msg, 'peer_meta, 'conn_meta) pool = ('msg, 'peer_meta, 'conn_meta) t module Pool_event = struct let wait_too_few_connections pool = Lwt_condition.wait pool.events.too_few_connections + let wait_too_many_connections pool = Lwt_condition.wait pool.events.too_many_connections - let wait_new_peer pool = - Lwt_condition.wait pool.events.new_peer - let wait_new_point pool = - Lwt_condition.wait pool.events.new_point - let wait_new_connection pool = - Lwt_condition.wait pool.events.new_connection + + let wait_new_peer pool = Lwt_condition.wait pool.events.new_peer + + let wait_new_point pool = Lwt_condition.wait pool.events.new_point + + let wait_new_connection pool = Lwt_condition.wait pool.events.new_connection end -let watch { watcher ; _ } = Lwt_watcher.create_stream watcher -let log { watcher ; _ } event = Lwt_watcher.notify watcher event +let watch {watcher; _} = Lwt_watcher.create_stream watcher + +let log {watcher; _} event = Lwt_watcher.notify watcher event + let private_node_warn fmt = Format.kasprintf (fun s -> lwt_warn "[private node] %s" s) fmt -module Gc_point_set = List.Bounded(struct - type t = Time.System.t * P2p_point.Id.t - let compare (x, _) (y, _) = - (Time.System.compare x y) - end) +module Gc_point_set = List.Bounded (struct + type t = Time.System.t * P2p_point.Id.t -let gc_points ({ config = { max_known_points ; _ } ; known_points ; _ } as pool) = + let compare (x, _) (y, _) = -Time.System.compare x y +end) + +let gc_points ({config = {max_known_points; _}; known_points; _} as pool) = match max_known_points with - | None -> () + | None -> + () | Some (_, target) -> let current_size = P2p_point.Table.length known_points in - if current_size > target then + if current_size > target then ( let to_remove_target = current_size - target in - let now = Systime_os.now () in (* TODO: maybe time of discovery? *) + let now = Systime_os.now () in + (* TODO: maybe time of discovery? *) let table = Gc_point_set.create to_remove_target in - P2p_point.Table.iter (fun p point_info -> + P2p_point.Table.iter + (fun p point_info -> if P2p_point_state.is_disconnected point_info then let time = match P2p_point_state.Info.last_miss point_info with - | None -> now - | Some t -> t in - Gc_point_set.insert (time, p) table - ) known_points ; + | None -> + now + | Some t -> + t + in + Gc_point_set.insert (time, p) table) + known_points ; let to_remove = Gc_point_set.get table in - ListLabels.iter to_remove ~f:begin fun (_, p) -> - P2p_point.Table.remove known_points p - end ; - log pool Gc_points + ListLabels.iter to_remove ~f:(fun (_, p) -> + P2p_point.Table.remove known_points p) ; + log pool Gc_points ) -let register_point ?trusted pool _source_peer_id (addr, port as point) = +let register_point ?trusted pool _source_peer_id ((addr, port) as point) = match P2p_point.Table.find_opt pool.known_points point with | None -> - let point_info = - P2p_point_state.Info.create - ?trusted - addr port in - Option.iter pool.config.max_known_points ~f:begin fun (max, _) -> - if P2p_point.Table.length pool.known_points >= max then gc_points pool - end ; + let point_info = P2p_point_state.Info.create ?trusted addr port in + Option.iter pool.config.max_known_points ~f:(fun (max, _) -> + if P2p_point.Table.length pool.known_points >= max then + gc_points pool) ; P2p_point.Table.add pool.known_points point point_info ; Lwt_condition.broadcast pool.events.new_point () ; log pool (New_point point) ; point_info | Some point_info -> - begin - match trusted with - | Some true -> P2p_point_state.Info.set_trusted point_info ; - | Some false -> P2p_point_state.Info.unset_trusted point_info ; - | None -> () - end ; + ( match trusted with + | Some true -> + P2p_point_state.Info.set_trusted point_info + | Some false -> + P2p_point_state.Info.unset_trusted point_info + | None -> + () ) ; point_info let may_register_my_id_point pool = function | [P2p_errors.Myself (addr, Some port)] -> P2p_point.Table.add pool.my_id_points (addr, port) () ; P2p_point.Table.remove pool.known_points (addr, port) - | _ -> () - + | _ -> + () (* Bounded table used to garbage collect peer_id infos when needed. The strategy used is to remove the info of the peer_id with the lowest @@ -219,190 +230,203 @@ let may_register_my_id_point pool = function case of a flood attack, the newly added infos will probably belong to peer_ids with the same (low) score and removing the most recent ones ensure that older (and probably legit) peer_id infos are kept. *) -module Gc_peer_set = List.Bounded(struct - type t = float * Time.System.t * P2p_peer.Id.t - let compare (s, t, _) (s', t', _) = - let score_cmp = Pervasives.compare s s' in - if score_cmp = 0 then Time.System.compare t t' else - score_cmp - end) - -let gc_peer_ids ({ peer_meta_config = { score ; _ } ; - config = { max_known_peer_ids ; _ } ; - known_peer_ids ; _ } as pool) = +module Gc_peer_set = List.Bounded (struct + type t = float * Time.System.t * P2p_peer.Id.t + + let compare (s, t, _) (s', t', _) = + let score_cmp = Pervasives.compare s s' in + if score_cmp = 0 then Time.System.compare t t' else -score_cmp +end) + +let gc_peer_ids + ( { peer_meta_config = {score; _}; + config = {max_known_peer_ids; _}; + known_peer_ids; + _ } as pool ) = match max_known_peer_ids with - | None -> () + | None -> + () | Some (_, target) -> let current_size = P2p_peer.Table.length known_peer_ids in - if current_size > target then + if current_size > target then ( let to_remove_target = current_size - target in let table = Gc_peer_set.create to_remove_target in - P2p_peer.Table.iter (fun peer_id peer_info -> + P2p_peer.Table.iter + (fun peer_id peer_info -> let created = P2p_peer_state.Info.created peer_info in let score = score @@ P2p_peer_state.Info.peer_metadata peer_info in if P2p_peer_state.is_disconnected peer_info then - Gc_peer_set.insert (score, created, peer_id) table - ) known_peer_ids ; + Gc_peer_set.insert (score, created, peer_id) table) + known_peer_ids ; let to_remove = Gc_peer_set.get table in - ListLabels.iter to_remove ~f:begin fun (_, _, peer_id) -> - P2p_peer.Table.remove known_peer_ids peer_id - end ; - log pool Gc_peer_ids + ListLabels.iter to_remove ~f:(fun (_, _, peer_id) -> + P2p_peer.Table.remove known_peer_ids peer_id) ; + log pool Gc_peer_ids ) let register_peer pool peer_id = match P2p_peer.Table.find_opt pool.known_peer_ids peer_id with | None -> Lwt_condition.broadcast pool.events.new_peer () ; let peer = - P2p_peer_state.Info.create peer_id - ~peer_metadata:(pool.peer_meta_config.peer_meta_initial ()) in - Option.iter pool.config.max_known_peer_ids ~f:begin fun (max, _) -> - if P2p_peer.Table.length pool.known_peer_ids >= max then gc_peer_ids pool - end ; + P2p_peer_state.Info.create + peer_id + ~peer_metadata:(pool.peer_meta_config.peer_meta_initial ()) + in + Option.iter pool.config.max_known_peer_ids ~f:(fun (max, _) -> + if P2p_peer.Table.length pool.known_peer_ids >= max then + gc_peer_ids pool) ; P2p_peer.Table.add pool.known_peer_ids peer_id peer ; log pool (New_peer peer_id) ; peer - | Some peer -> peer - + | Some peer -> + peer (***************************************************************************) -let read { messages ; conn ; _ } = +let read {messages; conn; _} = Lwt.catch (fun () -> - Lwt_pipe.pop messages >>= fun (s, msg) -> - lwt_debug "%d bytes message popped from queue %a\027[0m" - s P2p_peer.Id.pp (P2p_socket.info conn).peer_id >>= fun () -> - return msg) + Lwt_pipe.pop messages + >>= fun (s, msg) -> + lwt_debug + "%d bytes message popped from queue %a\027[0m" + s + P2p_peer.Id.pp + (P2p_socket.info conn).peer_id + >>= fun () -> return msg) (fun _ (* Closed *) -> fail P2p_errors.Connection_closed) -let is_readable { messages ; _ } = +let is_readable {messages; _} = Lwt.catch (fun () -> Lwt_pipe.values_available messages >>= return) (fun _ (* Closed *) -> fail P2p_errors.Connection_closed) -let write { conn ; _ } msg = - P2p_socket.write conn (Message msg) +let write {conn; _} msg = P2p_socket.write conn (Message msg) -let write_sync { conn ; _ } msg = - P2p_socket.write_sync conn (Message msg) +let write_sync {conn; _} msg = P2p_socket.write_sync conn (Message msg) -let raw_write_sync { conn ; _ } buf = - P2p_socket.raw_write_sync conn buf +let raw_write_sync {conn; _} buf = P2p_socket.raw_write_sync conn buf -let write_now { conn ; _ } msg = - P2p_socket.write_now conn (Message msg) +let write_now {conn; _} msg = P2p_socket.write_now conn (Message msg) let write_all pool msg = P2p_peer.Table.iter (fun _peer_id peer_info -> - match P2p_peer_state.get peer_info with - | Running { data = conn ; _ } -> - ignore (write_now conn msg : bool tzresult ) - | _ -> ()) + match P2p_peer_state.get peer_info with + | Running {data = conn; _} -> + ignore (write_now conn msg : bool tzresult) + | _ -> + ()) pool.connected_peer_ids let broadcast_bootstrap_msg pool = if not pool.config.private_mode then P2p_peer.Table.iter (fun _peer_id peer_info -> - match P2p_peer_state.get peer_info with - | Running { data = { conn ; _ } ; _ } -> - (* should not ask private nodes for the list of their + match P2p_peer_state.get peer_info with + | Running {data = {conn; _}; _} -> + (* should not ask private nodes for the list of their known peers*) - if not (P2p_socket.private_node conn) then - ignore (P2p_socket.write_now conn Bootstrap : bool tzresult ) - | _ -> ()) + if not (P2p_socket.private_node conn) then + ignore (P2p_socket.write_now conn Bootstrap : bool tzresult) + | _ -> + ()) pool.connected_peer_ids - (***************************************************************************) (* this function duplicates bit of code from the modules below to avoid creating mutually recursive modules *) let connection_of_peer_id pool peer_id = Option.apply - (P2p_peer.Table.find_opt pool.known_peer_ids peer_id) ~f:begin fun p -> - match P2p_peer_state.get p with - | Running { data ; _ } -> Some data - | _ -> None - end + (P2p_peer.Table.find_opt pool.known_peer_ids peer_id) + ~f:(fun p -> + match P2p_peer_state.get p with + | Running {data; _} -> + Some data + | _ -> + None) (* Every running connection matching the point's ip address is returned. *) let connections_of_addr pool addr = P2p_point.Table.fold (fun (addr', _) p acc -> - if Ipaddr.V6.compare addr addr' = 0 - then - match P2p_point_state.get p with - | P2p_point_state.Running { data ; _ } -> data :: acc - | _ -> acc - else acc - ) pool.connected_points [] + if Ipaddr.V6.compare addr addr' = 0 then + match P2p_point_state.get p with + | P2p_point_state.Running {data; _} -> + data :: acc + | _ -> + acc + else acc) + pool.connected_points + [] let get_addr pool peer_id = - Option.map (connection_of_peer_id pool peer_id) ~f:begin fun ci -> - (P2p_socket.info ci.conn).id_point - end + Option.map (connection_of_peer_id pool peer_id) ~f:(fun ci -> + (P2p_socket.info ci.conn).id_point) module Points = struct - type ('msg, 'peer_meta, 'conn_meta) info = ('msg, 'peer_meta, 'conn_meta) connection P2p_point_state.Info.t - let info { known_points ; _ } point = + let info {known_points; _} point = P2p_point.Table.find_opt known_points point let get_trusted pool point = - Option.unopt_map ~default:false ~f:P2p_point_state.Info.trusted + Option.unopt_map + ~default:false + ~f:P2p_point_state.Info.trusted (P2p_point.Table.find_opt pool.known_points point) let set_trusted pool point = - ignore @@ register_point ~trusted:true pool pool.config.identity.peer_id point + ignore + @@ register_point ~trusted:true pool pool.config.identity.peer_id point let unset_trusted pool point = - Option.iter ~f:P2p_point_state.Info.unset_trusted + Option.iter + ~f:P2p_point_state.Info.unset_trusted (P2p_point.Table.find_opt pool.known_points point) - let fold_known pool ~init ~f = - P2p_point.Table.fold f pool.known_points init + let fold_known pool ~init ~f = P2p_point.Table.fold f pool.known_points init let fold_connected pool ~init ~f = P2p_point.Table.fold f pool.connected_points init - let banned pool (addr, _port) = - P2p_acl.banned_addr pool.acl addr + let banned pool (addr, _port) = P2p_acl.banned_addr pool.acl addr let ban pool (addr, _port) = P2p_acl.IPBlacklist.add pool.acl addr ; (* Kick [addr]:* if it is in `Running` state. *) - List.iter (fun conn -> + List.iter + (fun conn -> conn.wait_close <- false ; - Lwt.async (fun () -> P2p_answerer.shutdown (Lazy.force conn.answerer)) - ) (connections_of_addr pool addr) + Lwt.async (fun () -> P2p_answerer.shutdown (Lazy.force conn.answerer))) + (connections_of_addr pool addr) - let unban pool (addr, _port) = - P2p_acl.IPBlacklist.remove pool.acl addr + let unban pool (addr, _port) = P2p_acl.IPBlacklist.remove pool.acl addr let trust pool ((addr, _port) as point) = P2p_acl.IPBlacklist.remove pool.acl addr ; set_trusted pool point - let untrust pool point = - unset_trusted pool point - + let untrust pool point = unset_trusted pool point end module Peers = struct - type ('msg, 'peer_meta, 'conn_meta) info = - (('msg, 'peer_meta, 'conn_meta) connection, 'peer_meta, 'conn_meta) P2p_peer_state.Info.t + ( ('msg, 'peer_meta, 'conn_meta) connection, + 'peer_meta, + 'conn_meta ) + P2p_peer_state.Info.t - let info { known_peer_ids ; _ } peer_id = + let info {known_peer_ids; _} peer_id = try Some (P2p_peer.Table.find known_peer_ids peer_id) with Not_found -> None let get_peer_metadata pool peer_id = - try P2p_peer_state.Info.peer_metadata (P2p_peer.Table.find pool.known_peer_ids peer_id) + try + P2p_peer_state.Info.peer_metadata + (P2p_peer.Table.find pool.known_peer_ids peer_id) with Not_found -> pool.peer_meta_config.peer_meta_initial () let get_score pool peer_id = @@ -412,7 +436,9 @@ module Peers = struct P2p_peer_state.Info.set_peer_metadata (register_peer pool peer_id) data let get_trusted pool peer_id = - try P2p_peer_state.Info.trusted (P2p_peer.Table.find pool.known_peer_ids peer_id) + try + P2p_peer_state.Info.trusted + (P2p_peer.Table.find pool.known_peer_ids peer_id) with Not_found -> false let set_trusted pool peer_id = @@ -420,11 +446,12 @@ module Peers = struct with Not_found -> () let unset_trusted pool peer_id = - try P2p_peer_state.Info.unset_trusted (P2p_peer.Table.find pool.known_peer_ids peer_id) + try + P2p_peer_state.Info.unset_trusted + (P2p_peer.Table.find pool.known_peer_ids peer_id) with Not_found -> () - let fold_known pool ~init ~f = - P2p_peer.Table.fold f pool.known_peer_ids init + let fold_known pool ~init ~f = P2p_peer.Table.fold f pool.known_peer_ids init let fold_connected pool ~init ~f = P2p_peer.Table.fold f pool.connected_peer_ids init @@ -432,142 +459,135 @@ module Peers = struct let ban pool peer = P2p_acl.PeerBlacklist.add pool.acl peer ; (* Kick [peer] if it is in `Running` state. *) - Option.iter (connection_of_peer_id pool peer) ~f:begin fun conn -> - conn.wait_close <- false ; - Lwt.async (fun () -> P2p_answerer.shutdown (Lazy.force conn.answerer)) - end - - let unban pool peer = - P2p_acl.PeerBlacklist.remove pool.acl peer + Option.iter (connection_of_peer_id pool peer) ~f:(fun conn -> + conn.wait_close <- false ; + Lwt.async (fun () -> P2p_answerer.shutdown (Lazy.force conn.answerer))) - let trust pool peer = - unban pool peer ; - set_trusted pool peer + let unban pool peer = P2p_acl.PeerBlacklist.remove pool.acl peer - let untrust pool peer = - unset_trusted pool peer + let trust pool peer = unban pool peer ; set_trusted pool peer - let banned pool peer = - P2p_acl.banned_peer pool.acl peer + let untrust pool peer = unset_trusted pool peer + let banned pool peer = P2p_acl.banned_peer pool.acl peer end module Connection = struct - let trusted_node conn = - P2p_peer_state.Info.trusted conn.peer_info || - Option.unopt_map - ~default:false - ~f:P2p_point_state.Info.trusted - conn.point_info + P2p_peer_state.Info.trusted conn.peer_info + || Option.unopt_map + ~default:false + ~f:P2p_point_state.Info.trusted + conn.point_info let private_node conn = P2p_socket.private_node conn.conn let fold pool ~init ~f = - Peers.fold_connected pool ~init ~f:begin fun peer_id peer_info acc -> - match P2p_peer_state.get peer_info with - | Running { data ; _ } -> f peer_id data acc - | _ -> acc - end + Peers.fold_connected pool ~init ~f:(fun peer_id peer_info acc -> + match P2p_peer_state.get peer_info with + | Running {data; _} -> + f peer_id data acc + | _ -> + acc) let list pool = fold pool ~init:[] ~f:(fun peer_id c acc -> (peer_id, c) :: acc) let random ?different_than ~no_private pool = let candidates = - fold pool ~init:[] ~f:begin fun _peer conn acc -> - if no_private && (private_node conn) then - acc - else - match different_than with - | Some excluded_conn - when P2p_socket.equal conn.conn excluded_conn.conn -> acc - | Some _ | None -> conn :: acc - end in + fold pool ~init:[] ~f:(fun _peer conn acc -> + if no_private && private_node conn then acc + else + match different_than with + | Some excluded_conn + when P2p_socket.equal conn.conn excluded_conn.conn -> + acc + | Some _ | None -> + conn :: acc) + in match candidates with - | [] -> None + | [] -> + None | _ :: _ -> Some (List.nth candidates (Random.int @@ List.length candidates)) let random_lowid ?different_than ~no_private pool = let candidates = - fold pool ~init:[] ~f:begin fun _peer conn acc -> - if no_private && (private_node conn) then - acc - else - match different_than with - | Some excluded_conn - when P2p_socket.equal conn.conn excluded_conn.conn -> acc - | Some _ | None -> - let ci = P2p_socket.info conn.conn in - match ci.id_point with - | _, None -> acc - | addr, Some port -> ((addr, port), ci.peer_id, conn) :: acc - end in + fold pool ~init:[] ~f:(fun _peer conn acc -> + if no_private && private_node conn then acc + else + match different_than with + | Some excluded_conn + when P2p_socket.equal conn.conn excluded_conn.conn -> + acc + | Some _ | None -> ( + let ci = P2p_socket.info conn.conn in + match ci.id_point with + | (_, None) -> + acc + | (addr, Some port) -> + ((addr, port), ci.peer_id, conn) :: acc )) + in match candidates with - | [] -> None + | [] -> + None | _ :: _ -> Some (List.nth candidates (Random.int @@ List.length candidates)) - let stat { conn ; _ } = - P2p_socket.stat conn + let stat {conn; _} = P2p_socket.stat conn - let info { conn ; _ } = - P2p_socket.info conn + let info {conn; _} = P2p_socket.info conn - let local_metadata { conn ; _ } = - P2p_socket.local_metadata conn + let local_metadata {conn; _} = P2p_socket.local_metadata conn - let remote_metadata { conn ; _ } = - P2p_socket.remote_metadata conn + let remote_metadata {conn; _} = P2p_socket.remote_metadata conn let find_by_peer_id pool peer_id = - Option.apply - (Peers.info pool peer_id) - ~f:(fun p -> - match P2p_peer_state.get p with - | Running { data ; _ } -> Some data - | _ -> None) + Option.apply (Peers.info pool peer_id) ~f:(fun p -> + match P2p_peer_state.get p with + | Running {data; _} -> + Some data + | _ -> + None) let find_by_point pool point = - Option.apply - (Points.info pool point) - ~f:(fun p -> - match P2p_point_state.get p with - | Running { data ; _ } -> Some data - | _ -> None) - + Option.apply (Points.info pool point) ~f:(fun p -> + match P2p_point_state.get p with + | Running {data; _} -> + Some data + | _ -> + None) end let greylist_addr pool addr = P2p_acl.IPGreylist.add pool.acl addr (Systime_os.now ()) let greylist_peer pool peer = - Option.iter (get_addr pool peer) ~f:begin fun (addr, _port) -> - greylist_addr pool addr ; - P2p_acl.PeerGreylist.add pool.acl peer - end + Option.iter (get_addr pool peer) ~f:(fun (addr, _port) -> + greylist_addr pool addr ; + P2p_acl.PeerGreylist.add pool.acl peer) -let acl_clear pool = - P2p_acl.clear pool.acl +let acl_clear pool = P2p_acl.clear pool.acl let gc_greylist ~older_than pool = P2p_acl.IPGreylist.remove_old ~older_than pool.acl -let pool_stat { io_sched ; _ } = - P2p_io_scheduler.global_stat io_sched +let pool_stat {io_sched; _} = P2p_io_scheduler.global_stat io_sched -let config { config ; _ } = config +let config {config; _} = config -let score { peer_meta_config = { score ; _ } ; _ } meta = score meta +let score {peer_meta_config = {score; _}; _} meta = score meta (***************************************************************************) let fail_unless_disconnected_point point_info = match P2p_point_state.get point_info with - | Disconnected -> return_unit - | Requested _ | Accepted _ -> fail P2p_errors.Pending_connection - | Running _ -> fail P2p_errors.Connected + | Disconnected -> + return_unit + | Requested _ | Accepted _ -> + fail P2p_errors.Pending_connection + | Running _ -> + fail P2p_errors.Connected (* [sample best other points] return a list of elements selected in [points]. The [best] first elements are taken, then [other] elements are chosen @@ -576,29 +596,31 @@ let fail_unless_disconnected_point point_info = close to the end of the list is picked multiple times. *) let sample best other points = let l = List.length points in - if l <= best + other then - points + if l <= best + other then points else let best_indexes = List.init best (fun i -> i) in let other_indexes = List.sort compare - @@ List.init other (fun _ -> best + Random.int (l - best)) in + @@ List.init other (fun _ -> best + Random.int (l - best)) + in let indexes = best_indexes @ other_indexes in (* Note: we are doing a [fold_left_i] by hand, passing [i] manually *) - (fun (_, _, result) -> result) @@ List.fold_left - (fun (i, indexes, acc) point -> - match indexes with - | [] -> (0, [], acc) (* TODO: early return *) - | index :: indexes when i >= index -> - (* We compare `i >= index` (rather than `i = index`) to avoid a + (fun (_, _, result) -> result) + @@ List.fold_left + (fun (i, indexes, acc) point -> + match indexes with + | [] -> + (0, [], acc) (* TODO: early return *) + | index :: indexes when i >= index -> + (* We compare `i >= index` (rather than `i = index`) to avoid a corner case whereby two identical `index`es are present in the list. In that case, using `>=` makes it so that if `i` overtakes `index` we still pick elements. *) - (succ i, indexes, point :: acc) - | _ -> - (succ i, indexes, acc)) - (0, indexes, []) - points + (succ i, indexes, point :: acc) + | _ -> + (succ i, indexes, acc)) + (0, indexes, []) + points let compare_known_point_info p1 p2 = (* The most-recently disconnected peers are greater. *) @@ -606,90 +628,123 @@ let compare_known_point_info p1 p2 = let disconnected1 = P2p_point_state.is_disconnected p1 and disconnected2 = P2p_point_state.is_disconnected p2 in let compare_last_seen p1 p2 = - match P2p_point_state.Info.last_seen p1, P2p_point_state.Info.last_seen p2 with - | None, None -> Random.int 2 * 2 - 1 (* HACK... *) - | Some _, None -> 1 - | None, Some _ -> -1 - | Some (_, time1), Some (_, time2) -> - match compare time1 time2 with - | 0 -> Random.int 2 * 2 - 1 (* HACK... *) - | x -> x in - match disconnected1, disconnected2 with - | false, false -> compare_last_seen p1 p2 - | false, true -> -1 - | true, false -> 1 - | true, true -> compare_last_seen p2 p1 + match + (P2p_point_state.Info.last_seen p1, P2p_point_state.Info.last_seen p2) + with + | (None, None) -> + (Random.int 2 * 2) - 1 (* HACK... *) + | (Some _, None) -> + 1 + | (None, Some _) -> + -1 + | (Some (_, time1), Some (_, time2)) -> ( + match compare time1 time2 with + | 0 -> + (Random.int 2 * 2) - 1 (* HACK... *) + | x -> + x ) + in + match (disconnected1, disconnected2) with + | (false, false) -> + compare_last_seen p1 p2 + | (false, true) -> + -1 + | (true, false) -> + 1 + | (true, true) -> + compare_last_seen p2 p1 let rec connect ?timeout pool point = - fail_when (Points.banned pool point) - (P2p_errors.Point_banned point) >>=? fun () -> - let timeout = - Option.unopt ~default:pool.config.connection_timeout timeout in + fail_when (Points.banned pool point) (P2p_errors.Point_banned point) + >>=? fun () -> + let timeout = Option.unopt ~default:pool.config.connection_timeout timeout in fail_unless (active_connections pool <= pool.config.max_connections) - P2p_errors.Too_many_connections >>=? fun () -> + P2p_errors.Too_many_connections + >>=? fun () -> let canceler = Lwt_canceler.create () in - with_timeout ~canceler (Systime_os.sleep timeout) begin fun canceler -> - let point_info = - register_point pool pool.config.identity.peer_id point in - let addr, port as point = P2p_point_state.Info.point point_info in - fail_unless - (not pool.config.private_mode || P2p_point_state.Info.trusted point_info) - P2p_errors.Private_mode >>=? fun () -> - fail_unless_disconnected_point point_info >>=? fun () -> - P2p_point_state.set_requested point_info canceler ; - let fd = P2p_fd.socket PF_INET6 SOCK_STREAM 0 in - let uaddr = - Lwt_unix.ADDR_INET (Ipaddr_unix.V6.to_inet_addr addr, port) in - lwt_debug "connect: %a" P2p_point.Id.pp point >>= fun () -> - protect ~canceler begin fun () -> - log pool (Outgoing_connection point) ; - P2p_fd.connect fd uaddr >>= fun () -> - return_unit - end ~on_error: begin fun err -> - lwt_debug "connect: %a -> disconnect" P2p_point.Id.pp point >>= fun () -> - P2p_point_state.set_disconnected pool.greylisting_config point_info ; - P2p_fd.close fd >>= fun () -> - match err with - | [Exn (Unix.Unix_error (Unix.ECONNREFUSED, _, _))] -> - fail P2p_errors.Connection_refused - | err -> Lwt.return_error err - end >>=? fun () -> - lwt_debug "connect: %a -> authenticate" P2p_point.Id.pp point >>= fun () -> - authenticate pool ~point_info canceler fd point - end + with_timeout ~canceler (Systime_os.sleep timeout) (fun canceler -> + let point_info = + register_point pool pool.config.identity.peer_id point + in + let ((addr, port) as point) = P2p_point_state.Info.point point_info in + fail_unless + ( (not pool.config.private_mode) + || P2p_point_state.Info.trusted point_info ) + P2p_errors.Private_mode + >>=? fun () -> + fail_unless_disconnected_point point_info + >>=? fun () -> + P2p_point_state.set_requested point_info canceler ; + let fd = P2p_fd.socket PF_INET6 SOCK_STREAM 0 in + let uaddr = + Lwt_unix.ADDR_INET (Ipaddr_unix.V6.to_inet_addr addr, port) + in + lwt_debug "connect: %a" P2p_point.Id.pp point + >>= fun () -> + protect + ~canceler + (fun () -> + log pool (Outgoing_connection point) ; + P2p_fd.connect fd uaddr >>= fun () -> return_unit) + ~on_error:(fun err -> + lwt_debug "connect: %a -> disconnect" P2p_point.Id.pp point + >>= fun () -> + P2p_point_state.set_disconnected pool.greylisting_config point_info ; + P2p_fd.close fd + >>= fun () -> + match err with + | [Exn (Unix.Unix_error (Unix.ECONNREFUSED, _, _))] -> + fail P2p_errors.Connection_refused + | err -> + Lwt.return_error err) + >>=? fun () -> + lwt_debug "connect: %a -> authenticate" P2p_point.Id.pp point + >>= fun () -> authenticate pool ~point_info canceler fd point) and authenticate pool ?point_info canceler fd point = let fd = P2p_io_scheduler.register pool.io_sched fd in - raw_authenticate pool ?point_info canceler fd point >>= function - | Ok connection -> return connection + raw_authenticate pool ?point_info canceler fd point + >>= function + | Ok connection -> + return connection | Error _ as err -> - P2p_io_scheduler.close fd >>=? fun () -> - Lwt.return err + P2p_io_scheduler.close fd >>=? fun () -> Lwt.return err and raw_authenticate pool ?point_info canceler fd point = let incoming = point_info = None in - lwt_debug "authenticate: %a%s" - P2p_point.Id.pp point - (if incoming then " incoming" else "") >>= fun () -> - protect ~canceler begin fun () -> - P2p_socket.authenticate - ~canceler - ~proof_of_work_target:pool.config.proof_of_work_target - ~incoming fd point - ?listening_port:pool.config.listening_port - pool.config.identity pool.announced_version - pool.conn_meta_config - end ~on_error: begin fun err -> - begin match err with - | [ Canceled ] -> + lwt_debug + "authenticate: %a%s" + P2p_point.Id.pp + point + (if incoming then " incoming" else "") + >>= fun () -> + protect + ~canceler + (fun () -> + P2p_socket.authenticate + ~canceler + ~proof_of_work_target:pool.config.proof_of_work_target + ~incoming + fd + point + ?listening_port:pool.config.listening_port + pool.config.identity + pool.announced_version + pool.conn_meta_config) + ~on_error:(fun err -> + ( match err with + | [Canceled] -> (* Currently only on time out *) - lwt_debug "authenticate: %a%s -> canceled" - P2p_point.Id.pp point + lwt_debug + "authenticate: %a%s -> canceled" + P2p_point.Id.pp + point (if incoming then " incoming" else "") - | err -> begin + | err -> (* Authentication incorrect! Temp ban the offending points/peers *) - List.iter (function + List.iter + (function | P2p_errors.Not_enough_proof_of_work _ | P2p_errors.Invalid_auth | P2p_errors.Decipher_error @@ -698,72 +753,92 @@ and raw_authenticate pool ?point_info canceler fd point = | P2p_errors.Decoding_error | P2p_errors.Invalid_chunks_size _ -> greylist_addr pool (fst point) - | _ -> () - ) err ; - lwt_debug "@[authenticate: %a%s -> failed@ %a@]" - P2p_point.Id.pp point + | _ -> + ()) + err ; + lwt_debug + "@[authenticate: %a%s -> failed@ %a@]" + P2p_point.Id.pp + point (if incoming then " incoming" else "") - pp_print_error err - end - end >>= fun () -> - may_register_my_id_point pool err ; - log pool (Authentication_failed point) ; - if incoming then - P2p_point.Table.remove pool.incoming point - else - Option.iter - ~f:(P2p_point_state.set_disconnected pool.greylisting_config) - point_info ; - Lwt.return_error err - end >>=? fun (info, auth_fd) -> + pp_print_error + err ) + >>= fun () -> + may_register_my_id_point pool err ; + log pool (Authentication_failed point) ; + if incoming then P2p_point.Table.remove pool.incoming point + else + Option.iter + ~f:(P2p_point_state.set_disconnected pool.greylisting_config) + point_info ; + Lwt.return_error err) + >>=? fun (info, auth_fd) -> (* Authentication correct! *) - lwt_debug "authenticate: %a -> auth %a" - P2p_point.Id.pp point - P2p_peer.Id.pp info.peer_id >>= fun () -> - fail_when (Peers.banned pool info.peer_id) - (P2p_errors.Peer_banned info.peer_id) >>=? fun () -> + lwt_debug + "authenticate: %a -> auth %a" + P2p_point.Id.pp + point + P2p_peer.Id.pp + info.peer_id + >>= fun () -> + fail_when + (Peers.banned pool info.peer_id) + (P2p_errors.Peer_banned info.peer_id) + >>=? fun () -> let remote_point_info = match info.id_point with - | addr, Some port + | (addr, Some port) when not (P2p_point.Table.mem pool.my_id_points (addr, port)) -> Some (register_point pool info.peer_id (addr, port)) - | _ -> None in + | _ -> + None + in let connection_point_info = - match point_info, remote_point_info with - | None, None -> None - | Some _ as point_info, _ | _, (Some _ as point_info) -> point_info in + match (point_info, remote_point_info) with + | (None, None) -> + None + | ((Some _ as point_info), _) | (_, (Some _ as point_info)) -> + point_info + in let peer_info = register_peer pool info.peer_id in let acceptable_version = Network_version.select ~chain_name:pool.message_config.chain_name ~distributed_db_versions:pool.message_config.distributed_db_versions ~p2p_versions:pool.custom_p2p_versions - info.announced_version in + info.announced_version + in let acceptable_point = - Option.unopt_map connection_point_info + Option.unopt_map + connection_point_info ~default:(not pool.config.private_mode) - ~f:begin fun connection_point_info -> + ~f:(fun connection_point_info -> match P2p_point_state.get connection_point_info with - | Requested _ -> not incoming + | Requested _ -> + not incoming | Disconnected -> let unexpected = pool.config.private_mode && not (P2p_point_state.Info.trusted connection_point_info) in if unexpected then - warn "[private node] incoming connection from untrused \ - peer rejected!"; + warn + "[private node] incoming connection from untrused peer \ + rejected!" ; not unexpected - | Accepted _ | Running _ -> false - end + | Accepted _ | Running _ -> + false) in let acceptable_peer_id = match P2p_peer_state.get peer_info with | Accepted _ -> (* TODO: in some circumstances cancel and accept... *) false - | Running _ -> false - | Disconnected -> true in + | Running _ -> + false + | Disconnected -> + true + in (* To Verify : the thread must ? not be interrupted between point removal from incoming and point registration into active connection to prevent flooding attack. @@ -774,189 +849,227 @@ and raw_authenticate pool ?point_info canceler fd point = by giving late Nack. *) if incoming then P2p_point.Table.remove pool.incoming point ; - Option.iter connection_point_info - ~f:(fun point_info -> - (* set the point to private or not, depending on the [info] gethered + Option.iter connection_point_info ~f:(fun point_info -> + (* set the point to private or not, depending on the [info] gethered during authentication *) - P2p_point_state.set_private point_info info.private_node) ; + P2p_point_state.set_private point_info info.private_node) ; match acceptable_version with - | Some version when acceptable_peer_id && acceptable_point -> begin + | Some version when acceptable_peer_id && acceptable_point -> log pool (Accepting_request (point, info.id_point, info.peer_id)) ; - Option.iter connection_point_info - ~f:(fun point_info -> - P2p_point_state.set_accepted point_info info.peer_id canceler) ; + Option.iter connection_point_info ~f:(fun point_info -> + P2p_point_state.set_accepted point_info info.peer_id canceler) ; P2p_peer_state.set_accepted peer_info info.id_point canceler ; - lwt_debug "authenticate: %a -> accept %a" - P2p_point.Id.pp point - P2p_peer.Id.pp info.peer_id >>= fun () -> - protect ~canceler begin fun () -> - P2p_socket.accept - ?incoming_message_queue_size:pool.config.incoming_message_queue_size - ?outgoing_message_queue_size:pool.config.outgoing_message_queue_size - ?binary_chunks_size:pool.config.binary_chunks_size - ~canceler - auth_fd pool.encoding >>=? fun conn -> - lwt_debug "authenticate: %a -> Connected %a" - P2p_point.Id.pp point - P2p_peer.Id.pp info.peer_id >>= fun () -> - return conn - end ~on_error: begin fun err -> - if incoming then - log pool - (Request_rejected (point, Some (info.id_point, info.peer_id))) ; - lwt_debug "authenticate: %a -> rejected %a" - P2p_point.Id.pp point - P2p_peer.Id.pp info.peer_id >>= fun () -> - Option.iter connection_point_info - ~f:(P2p_point_state.set_disconnected pool.greylisting_config) ; - P2p_peer_state.set_disconnected peer_info ; - Lwt.return_error err - end >>=? fun conn -> + lwt_debug + "authenticate: %a -> accept %a" + P2p_point.Id.pp + point + P2p_peer.Id.pp + info.peer_id + >>= fun () -> + protect + ~canceler + (fun () -> + P2p_socket.accept + ?incoming_message_queue_size: + pool.config.incoming_message_queue_size + ?outgoing_message_queue_size: + pool.config.outgoing_message_queue_size + ?binary_chunks_size:pool.config.binary_chunks_size + ~canceler + auth_fd + pool.encoding + >>=? fun conn -> + lwt_debug + "authenticate: %a -> Connected %a" + P2p_point.Id.pp + point + P2p_peer.Id.pp + info.peer_id + >>= fun () -> return conn) + ~on_error:(fun err -> + if incoming then + log + pool + (Request_rejected (point, Some (info.id_point, info.peer_id))) ; + lwt_debug + "authenticate: %a -> rejected %a" + P2p_point.Id.pp + point + P2p_peer.Id.pp + info.peer_id + >>= fun () -> + Option.iter + connection_point_info + ~f:(P2p_point_state.set_disconnected pool.greylisting_config) ; + P2p_peer_state.set_disconnected peer_info ; + Lwt.return_error err) + >>=? fun conn -> let id_point = - match info.id_point, Option.map ~f:P2p_point_state.Info.point point_info with - | (addr, _), Some (_, port) -> addr, Some port - | id_point, None -> id_point in + match + (info.id_point, Option.map ~f:P2p_point_state.Info.point point_info) + with + | ((addr, _), Some (_, port)) -> + (addr, Some port) + | (id_point, None) -> + id_point + in return (create_connection - pool conn - id_point connection_point_info peer_info version) - end - | _ -> begin + pool + conn + id_point + connection_point_info + peer_info + version) + | _ -> ( log pool (Rejecting_request (point, info.id_point, info.peer_id)) ; - lwt_debug "authenticate: %a -> kick %a point: %B peer_id: %B" - P2p_point.Id.pp point - P2p_peer.Id.pp info.peer_id - acceptable_point acceptable_peer_id >>= fun () -> - P2p_socket.kick auth_fd >>= fun () -> - if not incoming then begin + lwt_debug + "authenticate: %a -> kick %a point: %B peer_id: %B" + P2p_point.Id.pp + point + P2p_peer.Id.pp + info.peer_id + acceptable_point + acceptable_peer_id + >>= fun () -> + P2p_socket.kick auth_fd + >>= fun () -> + if not incoming then Option.iter ~f:(P2p_point_state.set_disconnected pool.greylisting_config) - point_info ; - (* FIXME P2p_peer_state.set_disconnected ~requested:true peer_info ; *) - end ; + point_info + (* FIXME P2p_peer_state.set_disconnected ~requested:true peer_info ; *) ; match acceptable_version with - | None -> begin - lwt_debug "No common protocol@.\ - (chains: local %a - remote %a)@.\ - (db_versions: local [%a] - remote %a)@.\ - (p2p_versions: local [%a] - remote %a)" - Distributed_db_version.pp_name pool.message_config.chain_name - Distributed_db_version.pp_name info.announced_version.chain_name - (Format.pp_print_list Distributed_db_version.pp) pool.message_config.distributed_db_versions - Distributed_db_version.pp info.announced_version.distributed_db_version - (Format.pp_print_list P2p_version.pp) pool.custom_p2p_versions - P2p_version.pp info.announced_version.p2p_version + | None -> + lwt_debug + "No common protocol@.(chains: local %a - remote \ + %a)@.(db_versions: local [%a] - remote %a)@.(p2p_versions: local \ + [%a] - remote %a)" + Distributed_db_version.pp_name + pool.message_config.chain_name + Distributed_db_version.pp_name + info.announced_version.chain_name + (Format.pp_print_list Distributed_db_version.pp) + pool.message_config.distributed_db_versions + Distributed_db_version.pp + info.announced_version.distributed_db_version + (Format.pp_print_list P2p_version.pp) + pool.custom_p2p_versions + P2p_version.pp + info.announced_version.p2p_version >>= fun () -> - fail (P2p_errors.Rejected_no_common_protocol - { announced = info.announced_version }) - end - | Some _ -> fail (P2p_errors.Rejected info.peer_id) - end + fail + (P2p_errors.Rejected_no_common_protocol + {announced = info.announced_version}) + | Some _ -> + fail (P2p_errors.Rejected info.peer_id) ) -and create_connection pool p2p_conn id_point point_info peer_info negotiated_version = +and create_connection pool p2p_conn id_point point_info peer_info + negotiated_version = let peer_id = P2p_peer_state.Info.peer_id peer_info in let canceler = Lwt_canceler.create () in let size = - Option.map pool.config.incoming_app_message_queue_size - ~f:(fun qs -> qs, fun (size, _) -> - (Sys.word_size / 8) * 11 + size + Lwt_pipe.push_overhead) in + Option.map pool.config.incoming_app_message_queue_size ~f:(fun qs -> + ( qs, + fun (size, _) -> + (Sys.word_size / 8 * 11) + size + Lwt_pipe.push_overhead )) + in let messages = Lwt_pipe.create ?size () in - let rec callback_default = { P2p_answerer.message = - (fun size msg -> Lwt_pipe.push messages (size, msg)) ; + (fun size msg -> Lwt_pipe.push messages (size, msg)); advertise = - (fun points -> register_new_points pool conn points ; Lwt.return_unit) ; - bootstrap = - (fun () -> list_known_points ~ignore_private:true pool conn) ; + (fun points -> + register_new_points pool conn points ; + Lwt.return_unit); + bootstrap = (fun () -> list_known_points ~ignore_private:true pool conn); swap_request = - (fun point peer_id -> swap_request pool conn point peer_id) ; - swap_ack = - (fun point peer_id -> swap_ack pool conn point peer_id) ; - } - + (fun point peer_id -> swap_request pool conn point peer_id); + swap_ack = (fun point peer_id -> swap_ack pool conn point peer_id) } (* when the node is in private mode: deactivate advertising, peers_swap and sending list of peers in callback *) and callback_private = { P2p_answerer.message = - (fun size msg -> Lwt_pipe.push messages (size, msg)) ; + (fun size msg -> Lwt_pipe.push messages (size, msg)); advertise = (fun _points -> - private_node_warn - "Received new peers addresses from %a" - P2p_peer.Id.pp peer_id >>= fun () -> - Lwt.return_unit - ) ; + private_node_warn + "Received new peers addresses from %a" + P2p_peer.Id.pp + peer_id + >>= fun () -> Lwt.return_unit); bootstrap = (fun () -> - private_node_warn - "Receive requests for peers addresses from %a" - P2p_peer.Id.pp peer_id >>= fun () -> - Lwt.return_nil - ) ; + private_node_warn + "Receive requests for peers addresses from %a" + P2p_peer.Id.pp + peer_id + >>= fun () -> Lwt.return_nil); swap_request = (fun _point _peer_id -> - private_node_warn - "Received swap requests from %a" - P2p_peer.Id.pp peer_id >>= fun () -> - Lwt.return_unit - ) ; + private_node_warn + "Received swap requests from %a" + P2p_peer.Id.pp + peer_id + >>= fun () -> Lwt.return_unit); swap_ack = (fun _point _peer_id -> - private_node_warn - "Received swap ack from %a" - P2p_peer.Id.pp peer_id >>= fun () -> - Lwt.return_unit - ) ; - } - + private_node_warn "Received swap ack from %a" P2p_peer.Id.pp peer_id + >>= fun () -> Lwt.return_unit) } and answerer = - lazy ( - P2p_answerer.run p2p_conn canceler @@ + lazy + ( P2p_answerer.run p2p_conn canceler + @@ if pool.config.private_mode then callback_private else callback_default - ) - + ) and conn = - { conn = p2p_conn ; point_info ; peer_info ; - messages ; canceler ; answerer ; wait_close = false ; - last_sent_swap_request = None ; - negotiated_version } in + { conn = p2p_conn; + point_info; + peer_info; + messages; + canceler; + answerer; + wait_close = false; + last_sent_swap_request = None; + negotiated_version } + in ignore (Lazy.force answerer) ; let conn_meta = P2p_socket.remote_metadata p2p_conn in - Option.iter point_info ~f:begin fun point_info -> - let point = P2p_point_state.Info.point point_info in - P2p_point_state.set_running - point_info peer_id conn; - P2p_point.Table.add pool.connected_points point point_info ; - end ; + Option.iter point_info ~f:(fun point_info -> + let point = P2p_point_state.Info.point point_info in + P2p_point_state.set_running point_info peer_id conn ; + P2p_point.Table.add pool.connected_points point point_info) ; log pool (Connection_established (id_point, peer_id)) ; P2p_peer_state.set_running peer_info id_point conn conn_meta ; P2p_peer.Table.add pool.connected_peer_ids peer_id peer_info ; Lwt_condition.broadcast pool.events.new_connection () ; - Lwt_canceler.on_cancel canceler begin fun () -> - lwt_debug "Disconnect: %a (%a)" - P2p_peer.Id.pp peer_id P2p_connection.Id.pp id_point >>= fun () -> - Option.iter - ~f:(P2p_point_state.set_disconnected pool.greylisting_config) - point_info ; - log pool (Disconnection peer_id) ; - P2p_peer_state.set_disconnected peer_info ; - Option.iter point_info ~f:begin fun point_info -> - P2p_point.Table.remove pool.connected_points (P2p_point_state.Info.point point_info) ; - end ; - P2p_peer.Table.remove pool.connected_peer_ids peer_id ; - if pool.config.max_connections <= active_connections pool then begin - Lwt_condition.broadcast pool.events.too_many_connections () ; - log pool Too_many_connections ; - end ; - Lwt_pipe.close messages ; - P2p_socket.close ~wait:conn.wait_close conn.conn - end ; + Lwt_canceler.on_cancel canceler (fun () -> + lwt_debug + "Disconnect: %a (%a)" + P2p_peer.Id.pp + peer_id + P2p_connection.Id.pp + id_point + >>= fun () -> + Option.iter + ~f:(P2p_point_state.set_disconnected pool.greylisting_config) + point_info ; + log pool (Disconnection peer_id) ; + P2p_peer_state.set_disconnected peer_info ; + Option.iter point_info ~f:(fun point_info -> + P2p_point.Table.remove + pool.connected_points + (P2p_point_state.Info.point point_info)) ; + P2p_peer.Table.remove pool.connected_peer_ids peer_id ; + if pool.config.max_connections <= active_connections pool then ( + Lwt_condition.broadcast pool.events.too_many_connections () ; + log pool Too_many_connections ) ; + Lwt_pipe.close messages ; + P2p_socket.close ~wait:conn.wait_close conn.conn) ; List.iter (fun f -> f peer_id conn) pool.new_connection_hook ; - if active_connections pool < pool.config.min_connections then begin + if active_connections pool < pool.config.min_connections then ( Lwt_condition.broadcast pool.events.too_few_connections () ; - log pool Too_few_connections ; - end ; + log pool Too_few_connections ) ; conn and disconnect ?(wait = false) conn = @@ -974,18 +1087,21 @@ and register_new_point ?trusted pool source_peer_id point = and list_known_points ?(ignore_private = false) pool conn = if Connection.private_node conn then - private_node_warn "Private peer (%a) asked other peers addresses" - P2p_peer.Id.pp (P2p_peer_state.Info.peer_id conn.peer_info) >>= fun () -> - Lwt.return_nil + private_node_warn + "Private peer (%a) asked other peers addresses" + P2p_peer.Id.pp + (P2p_peer_state.Info.peer_id conn.peer_info) + >>= fun () -> Lwt.return_nil else P2p_point.Table.fold (fun point_id point_info acc -> - if (ignore_private && - not (P2p_point_state.Info.known_public point_info)) - || Points.banned pool point_id - then acc - else point_info :: acc) - pool.known_points [] + if + (ignore_private && not (P2p_point_state.Info.known_public point_info)) + || Points.banned pool point_id + then acc + else point_info :: acc) + pool.known_points + [] |> List.sort compare_known_point_info |> sample 30 20 |> List.map P2p_point_state.Info.point @@ -995,80 +1111,93 @@ and active_connections pool = P2p_peer.Table.length pool.connected_peer_ids and swap_request pool conn new_point _new_peer_id = let source_peer_id = P2p_peer_state.Info.peer_id conn.peer_info in - log pool (Swap_request_received { source = source_peer_id }) ; - lwt_log_info - "Swap request received from %a" P2p_peer.Id.pp source_peer_id >>= fun () -> + log pool (Swap_request_received {source = source_peer_id}) ; + lwt_log_info "Swap request received from %a" P2p_peer.Id.pp source_peer_id + >>= fun () -> (* Ignore if already connected to peer or already swapped less than <swap_linger> seconds ago. *) let span_since_last_swap = Ptime.diff (Systime_os.now ()) - (Time.System.max pool.latest_succesfull_swap pool.latest_accepted_swap) in + (Time.System.max pool.latest_succesfull_swap pool.latest_accepted_swap) + in let new_point_info = register_point pool source_peer_id new_point in - if Ptime.Span.compare span_since_last_swap pool.config.swap_linger < 0 - || not (P2p_point_state.is_disconnected new_point_info) then begin - log pool (Swap_request_ignored { source = source_peer_id }) ; + if + Ptime.Span.compare span_since_last_swap pool.config.swap_linger < 0 + || not (P2p_point_state.is_disconnected new_point_info) + then ( + log pool (Swap_request_ignored {source = source_peer_id}) ; lwt_log_info "Ignoring swap request from %a" P2p_peer.Id.pp source_peer_id - end else begin + ) + else match Connection.random_lowid pool ~no_private:true with | None -> - lwt_log_info - "No swap candidate for %a" P2p_peer.Id.pp source_peer_id - | Some (proposed_point, proposed_peer_id, _proposed_conn) -> - match P2p_socket.write_now - conn.conn (Swap_ack (proposed_point, proposed_peer_id)) with - | Ok true -> - log pool (Swap_ack_sent { source = source_peer_id }) ; - swap pool conn proposed_peer_id new_point >>= fun () -> - Lwt.return_unit - | Ok false -> - log pool (Swap_request_received { source = source_peer_id }) ; - Lwt.return_unit - | Error _ -> - log pool (Swap_request_received { source = source_peer_id }) ; - Lwt.return_unit - end + lwt_log_info "No swap candidate for %a" P2p_peer.Id.pp source_peer_id + | Some (proposed_point, proposed_peer_id, _proposed_conn) -> ( + match + P2p_socket.write_now + conn.conn + (Swap_ack (proposed_point, proposed_peer_id)) + with + | Ok true -> + log pool (Swap_ack_sent {source = source_peer_id}) ; + swap pool conn proposed_peer_id new_point + >>= fun () -> Lwt.return_unit + | Ok false -> + log pool (Swap_request_received {source = source_peer_id}) ; + Lwt.return_unit + | Error _ -> + log pool (Swap_request_received {source = source_peer_id}) ; + Lwt.return_unit ) and swap_ack pool conn new_point _new_peer_id = let source_peer_id = P2p_peer_state.Info.peer_id conn.peer_info in - log pool (Swap_ack_received { source = source_peer_id }) ; - lwt_log_info - "Swap ack received from %a" P2p_peer.Id.pp source_peer_id >>= fun () -> + log pool (Swap_ack_received {source = source_peer_id}) ; + lwt_log_info "Swap ack received from %a" P2p_peer.Id.pp source_peer_id + >>= fun () -> match conn.last_sent_swap_request with - | None -> Lwt.return_unit (* ignore *) - | Some (_time, proposed_peer_id) -> - match Connection.find_by_peer_id pool proposed_peer_id with - | None -> - swap pool conn proposed_peer_id new_point >>= fun () -> - Lwt.return_unit - | Some _ -> - Lwt.return_unit + | None -> + Lwt.return_unit (* ignore *) + | Some (_time, proposed_peer_id) -> ( + match Connection.find_by_peer_id pool proposed_peer_id with + | None -> + swap pool conn proposed_peer_id new_point >>= fun () -> Lwt.return_unit + | Some _ -> + Lwt.return_unit ) and swap pool conn current_peer_id new_point = let source_peer_id = P2p_peer_state.Info.peer_id conn.peer_info in pool.latest_accepted_swap <- Systime_os.now () ; - connect pool new_point >>= function - | Ok _new_conn -> begin + connect pool new_point + >>= function + | Ok _new_conn -> ( pool.latest_succesfull_swap <- Systime_os.now () ; - log pool (Swap_success { source = source_peer_id }) ; - lwt_log_info "Swap to %a succeeded" P2p_point.Id.pp new_point >>= fun () -> + log pool (Swap_success {source = source_peer_id}) ; + lwt_log_info "Swap to %a succeeded" P2p_point.Id.pp new_point + >>= fun () -> match Connection.find_by_peer_id pool current_peer_id with - | None -> Lwt.return_unit - | Some conn -> - disconnect conn >>= fun () -> + | None -> Lwt.return_unit - end - | Error err -> begin + | Some conn -> + disconnect conn >>= fun () -> Lwt.return_unit ) + | Error err -> ( pool.latest_accepted_swap <- pool.latest_succesfull_swap ; - log pool (Swap_failure { source = source_peer_id }) ; + log pool (Swap_failure {source = source_peer_id}) ; match err with - | [ Timeout ] -> - lwt_debug "Swap to %a was interrupted: %a" - P2p_point.Id.pp new_point pp_print_error err + | [Timeout] -> + lwt_debug + "Swap to %a was interrupted: %a" + P2p_point.Id.pp + new_point + pp_print_error + err | _ -> - lwt_log_error "Swap to %a failed: %a" - P2p_point.Id.pp new_point pp_print_error err - end + lwt_log_error + "Swap to %a failed: %a" + P2p_point.Id.pp + new_point + pp_print_error + err ) let accept pool fd point = log pool (Incoming_connection point) ; @@ -1076,125 +1205,137 @@ let accept pool fd point = if Random.bool () then (* randomly allow one additional incoming connection *) pool.config.max_connections + 1 - else - pool.config.max_connections in - if pool.config.max_incoming_connections <= P2p_point.Table.length pool.incoming - || max_active_conns <= active_connections pool - (* silently ignore banned points *) - || (P2p_acl.banned_addr pool.acl (fst point)) then - Lwt.async (fun () -> P2p_fd.close fd) + else pool.config.max_connections + in + if + pool.config.max_incoming_connections + <= P2p_point.Table.length pool.incoming + || max_active_conns <= active_connections pool + (* silently ignore banned points *) + || P2p_acl.banned_addr pool.acl (fst point) + then Lwt.async (fun () -> P2p_fd.close fd) else let canceler = Lwt_canceler.create () in P2p_point.Table.add pool.incoming point canceler ; - Lwt.async begin fun () -> - with_timeout - ~canceler (Systime_os.sleep pool.config.authentication_timeout) - (fun canceler -> authenticate pool canceler fd point) - end + Lwt.async (fun () -> + with_timeout + ~canceler + (Systime_os.sleep pool.config.authentication_timeout) + (fun canceler -> authenticate pool canceler fd point)) let send_swap_request pool = match Connection.random ~no_private:true pool with - | Some recipient when not pool.config.private_mode -> begin + | Some recipient when not pool.config.private_mode -> ( let recipient_peer_id = (Connection.info recipient).peer_id in match - Connection.random_lowid - ~different_than:recipient - ~no_private:true pool + Connection.random_lowid ~different_than:recipient ~no_private:true pool with - | None -> () + | None -> + () | Some (proposed_point, proposed_peer_id, _proposed_conn) -> - log pool (Swap_request_sent { source = recipient_peer_id }) ; + log pool (Swap_request_sent {source = recipient_peer_id}) ; recipient.last_sent_swap_request <- Some (Systime_os.now (), proposed_peer_id) ; - ignore (P2p_socket.write_now recipient.conn - (Swap_request (proposed_point, proposed_peer_id))) - end - | Some _ | None -> () + ignore + (P2p_socket.write_now + recipient.conn + (Swap_request (proposed_point, proposed_peer_id))) ) + | Some _ | None -> + () (***************************************************************************) -let create - ?(p2p_versions = P2p_version.supported) - config peer_meta_config conn_meta_config message_config io_sched = - let events = { - too_few_connections = Lwt_condition.create () ; - too_many_connections = Lwt_condition.create () ; - new_peer = Lwt_condition.create () ; - new_point = Lwt_condition.create () ; - new_connection = Lwt_condition.create () ; - } in - let pool = { - config ; peer_meta_config ; conn_meta_config ; message_config ; - greylisting_config = config.greylisting_config ; - announced_version = - Network_version.announced - ~chain_name: message_config.chain_name - ~distributed_db_versions: message_config.distributed_db_versions - ~p2p_versions ; - custom_p2p_versions = p2p_versions ; - my_id_points = P2p_point.Table.create 7 ; - known_peer_ids = P2p_peer.Table.create 53 ; - connected_peer_ids = P2p_peer.Table.create 53 ; - known_points = P2p_point.Table.create 53 ; - connected_points = P2p_point.Table.create 53 ; - incoming = P2p_point.Table.create 53 ; - io_sched ; - encoding = P2p_message.encoding message_config.encoding ; - events ; - watcher = Lwt_watcher.create_input () ; - acl = P2p_acl.create 1023; - new_connection_hook = [] ; - latest_accepted_swap = Ptime.epoch ; - latest_succesfull_swap = Ptime.epoch ; - } in +let create ?(p2p_versions = P2p_version.supported) config peer_meta_config + conn_meta_config message_config io_sched = + let events = + { too_few_connections = Lwt_condition.create (); + too_many_connections = Lwt_condition.create (); + new_peer = Lwt_condition.create (); + new_point = Lwt_condition.create (); + new_connection = Lwt_condition.create () } + in + let pool = + { config; + peer_meta_config; + conn_meta_config; + message_config; + greylisting_config = config.greylisting_config; + announced_version = + Network_version.announced + ~chain_name:message_config.chain_name + ~distributed_db_versions:message_config.distributed_db_versions + ~p2p_versions; + custom_p2p_versions = p2p_versions; + my_id_points = P2p_point.Table.create 7; + known_peer_ids = P2p_peer.Table.create 53; + connected_peer_ids = P2p_peer.Table.create 53; + known_points = P2p_point.Table.create 53; + connected_points = P2p_point.Table.create 53; + incoming = P2p_point.Table.create 53; + io_sched; + encoding = P2p_message.encoding message_config.encoding; + events; + watcher = Lwt_watcher.create_input (); + acl = P2p_acl.create 1023; + new_connection_hook = []; + latest_accepted_swap = Ptime.epoch; + latest_succesfull_swap = Ptime.epoch } + in List.iter (Points.set_trusted pool) config.trusted_points ; P2p_peer_state.Info.File.load config.peers_file - peer_meta_config.peer_meta_encoding >>= function + peer_meta_config.peer_meta_encoding + >>= function | Ok peer_ids -> List.iter (fun peer_info -> - let peer_id = P2p_peer_state.Info.peer_id peer_info in - P2p_peer.Table.add pool.known_peer_ids peer_id peer_info) + let peer_id = P2p_peer_state.Info.peer_id peer_info in + P2p_peer.Table.add pool.known_peer_ids peer_id peer_info) peer_ids ; Lwt.return pool | Error err -> - log_error "@[Failed to parse peers file:@ %a@]" - pp_print_error err ; + log_error "@[Failed to parse peers file:@ %a@]" pp_print_error err ; Lwt.return pool -let destroy ({ config ; peer_meta_config ; _ } as pool) = - lwt_log_info "Saving metadata in %s" config.peers_file >>= fun () -> - begin - P2p_peer_state.Info.File.save - config.peers_file - peer_meta_config.peer_meta_encoding - (P2p_peer.Table.fold (fun _ a b -> a::b) pool.known_peer_ids []) >>= function - | Error err -> - log_error "@[Failed to save peers file:@ %a@]" - pp_print_error err; - Lwt.return_unit - | Ok ()-> Lwt.return_unit - end >>= fun () -> - P2p_point.Table.fold (fun _point point_info acc -> +let destroy ({config; peer_meta_config; _} as pool) = + lwt_log_info "Saving metadata in %s" config.peers_file + >>= fun () -> + P2p_peer_state.Info.File.save + config.peers_file + peer_meta_config.peer_meta_encoding + (P2p_peer.Table.fold (fun _ a b -> a :: b) pool.known_peer_ids []) + >>= (function + | Error err -> + log_error "@[Failed to save peers file:@ %a@]" pp_print_error err ; + Lwt.return_unit + | Ok () -> + Lwt.return_unit) + >>= fun () -> + P2p_point.Table.fold + (fun _point point_info acc -> match P2p_point_state.get point_info with - | Requested { cancel } | Accepted { cancel ; _ } -> + | Requested {cancel} | Accepted {cancel; _} -> Lwt_canceler.cancel cancel >>= fun () -> acc - | Running { data = conn ; _ } -> + | Running {data = conn; _} -> disconnect conn >>= fun () -> acc - | Disconnected -> acc) - pool.known_points @@ - P2p_peer.Table.fold (fun _peer_id peer_info acc -> - match P2p_peer_state.get peer_info with - | Accepted { cancel ; _ } -> - Lwt_canceler.cancel cancel >>= fun () -> acc - | Running { data = conn ; _ } -> - disconnect conn >>= fun () -> acc - | Disconnected -> acc) - pool.known_peer_ids @@ - P2p_point.Table.fold (fun _point canceler acc -> - Lwt_canceler.cancel canceler >>= fun () -> acc) - pool.incoming Lwt.return_unit + | Disconnected -> + acc) + pool.known_points + @@ P2p_peer.Table.fold + (fun _peer_id peer_info acc -> + match P2p_peer_state.get peer_info with + | Accepted {cancel; _} -> + Lwt_canceler.cancel cancel >>= fun () -> acc + | Running {data = conn; _} -> + disconnect conn >>= fun () -> acc + | Disconnected -> + acc) + pool.known_peer_ids + @@ P2p_point.Table.fold + (fun _point canceler acc -> + Lwt_canceler.cancel canceler >>= fun () -> acc) + pool.incoming + Lwt.return_unit let on_new_connection pool f = pool.new_connection_hook <- f :: pool.new_connection_hook diff --git a/src/lib_p2p/p2p_pool.mli b/src/lib_p2p/p2p_pool.mli index d5e71b613b9d44a7204ad55c41ee9fc59b27bb22..a052a9159e6f6ccf25e77dff78c508f15c7901ed 100644 --- a/src/lib_p2p/p2p_pool.mli +++ b/src/lib_p2p/p2p_pool.mli @@ -44,382 +44,391 @@ type ('msg, 'peer_meta, 'conn_meta) t -type ('msg, 'peer_meta, 'conn_meta) pool = ('msg, 'peer_meta, 'conn_meta) t (** The type of a pool of connections, parametrized by resp. the type of messages and the meta-informations associated to an identity and a connection. *) +type ('msg, 'peer_meta, 'conn_meta) pool = ('msg, 'peer_meta, 'conn_meta) t type config = { - - identity : P2p_identity.t ; - (** Our identity. *) - - proof_of_work_target : Crypto_box.target ; - (** The proof of work target we require from peers. *) - - trusted_points : P2p_point.Id.t list ; - (** List of hard-coded known peers to bootstrap the network from. *) - - peers_file : string ; - (** The path to the JSON file where the metadata associated to + identity : P2p_identity.t; (** Our identity. *) + proof_of_work_target : Crypto_box.target; + (** The proof of work target we require from peers. *) + trusted_points : P2p_point.Id.t list; + (** List of hard-coded known peers to bootstrap the network from. *) + peers_file : string; + (** The path to the JSON file where the metadata associated to peer_ids are loaded / stored. *) - - private_mode : bool ; - (** If [true], only open outgoing/accept incoming connections + private_mode : bool; + (** If [true], only open outgoing/accept incoming connections to/from peers whose addresses are in [trusted_peers], and inform these peers that the identity of this node should be revealed to the rest of the network. *) - - greylisting_config : P2p_point_state.Info.greylisting_config ; - (** The greylisting configuration. *) - - listening_port : P2p_addr.port option ; - (** If provided, it will be passed to [P2p_connection.authenticate] + greylisting_config : P2p_point_state.Info.greylisting_config; + (** The greylisting configuration. *) + listening_port : P2p_addr.port option; + (** If provided, it will be passed to [P2p_connection.authenticate] when we authenticate against a new peer. *) - - min_connections : int ; - (** Strict minimum number of connections + min_connections : int; + (** Strict minimum number of connections (triggers [LogEvent.too_few_connections]). *) - - max_connections : int ; - (** Max number of connections. If it's reached, [connect] and + max_connections : int; + (** Max number of connections. If it's reached, [connect] and [accept] will fail, i.e. not add more connections (also triggers [LogEvent.too_many_connections]). *) - - max_incoming_connections : int ; - (** Max not-yet-authentified incoming connections. + max_incoming_connections : int; + (** Max not-yet-authentified incoming connections. Above this number, [accept] will start dropping incoming connections. *) - - connection_timeout : Time.System.Span.t ; - (** Maximum time allowed to the establishment of a connection. *) - - authentication_timeout : Time.System.Span.t ; - (** Delay granted to a peer to perform authentication, in seconds. *) - - incoming_app_message_queue_size : int option ; - (** Size of the message queue for user messages (messages returned + connection_timeout : Time.System.Span.t; + (** Maximum time allowed to the establishment of a connection. *) + authentication_timeout : Time.System.Span.t; + (** Delay granted to a peer to perform authentication, in seconds. *) + incoming_app_message_queue_size : int option; + (** Size of the message queue for user messages (messages returned by this module's [read] function. *) - - incoming_message_queue_size : int option ; - (** Size of the incoming message queue internal of a peer's Reader + incoming_message_queue_size : int option; + (** Size of the incoming message queue internal of a peer's Reader (See [P2p_connection.accept]). *) - - outgoing_message_queue_size : int option ; - (** Size of the outgoing message queue internal to a peer's Writer + outgoing_message_queue_size : int option; + (** Size of the outgoing message queue internal to a peer's Writer (See [P2p_connection.accept]). *) - - known_peer_ids_history_size : int ; - (** Size of the known peer_ids log buffer (default: 50) *) - - known_points_history_size : int ; - (** Size of the known points log buffer (default: 50) *) - - max_known_points : (int * int) option ; - (** Parameters for the the garbage collection of known points. If + known_peer_ids_history_size : int; + (** Size of the known peer_ids log buffer (default: 50) *) + known_points_history_size : int; + (** Size of the known points log buffer (default: 50) *) + max_known_points : (int * int) option; + (** Parameters for the the garbage collection of known points. If None, no garbage collection is performed. Otherwise, the first integer of the couple limits the size of the "known points" table. When this number is reached, the table is expurged from disconnected points, older first, to try to reach the amount of connections indicated by the second integer. *) - - max_known_peer_ids : (int * int) option ; - (** Like [max_known_points], but for known peer_ids. *) - - swap_linger : Time.System.Span.t ; - (** Peer swapping does not occur more than once during a timespan of + max_known_peer_ids : (int * int) option; + (** Like [max_known_points], but for known peer_ids. *) + swap_linger : Time.System.Span.t; + (** Peer swapping does not occur more than once during a timespan of [spap_linger] seconds. *) - - binary_chunks_size : int option ; - (** Size (in bytes) of binary blocks that are sent to other + binary_chunks_size : int option + (** Size (in bytes) of binary blocks that are sent to other peers. Default value is 64 kB. *) } type 'peer_meta peer_meta_config = { - peer_meta_encoding : 'peer_meta Data_encoding.t ; - peer_meta_initial : unit -> 'peer_meta ; - score : 'peer_meta -> float ; + peer_meta_encoding : 'peer_meta Data_encoding.t; + peer_meta_initial : unit -> 'peer_meta; + score : 'peer_meta -> float } type 'msg message_config = { - encoding : 'msg P2p_message.encoding list ; - chain_name : Distributed_db_version.name ; - distributed_db_versions : Distributed_db_version.t list ; + encoding : 'msg P2p_message.encoding list; + chain_name : Distributed_db_version.name; + distributed_db_versions : Distributed_db_version.t list } -val create: - ?p2p_versions: P2p_version.t list -> +(** [create config meta_cfg msg_cfg io_sched] is a freshly minted + pool. *) +val create : + ?p2p_versions:P2p_version.t list -> config -> 'peer_meta peer_meta_config -> 'conn_meta P2p_socket.metadata_config -> 'msg message_config -> P2p_io_scheduler.t -> - ('msg, 'peer_meta,'conn_meta) pool Lwt.t -(** [create config meta_cfg msg_cfg io_sched] is a freshly minted - pool. *) + ('msg, 'peer_meta, 'conn_meta) pool Lwt.t -val destroy: ('msg, 'peer_meta,'conn_meta) pool -> unit Lwt.t (** [destroy pool] returns when member connections are either disconnected or canceled. *) +val destroy : ('msg, 'peer_meta, 'conn_meta) pool -> unit Lwt.t -val active_connections: ('msg, 'peer_meta,'conn_meta) pool -> int (** [active_connections pool] is the number of connections inside [pool]. *) +val active_connections : ('msg, 'peer_meta, 'conn_meta) pool -> int -val pool_stat: ('msg, 'peer_meta,'conn_meta) pool -> P2p_stat.t (** [pool_stat pool] is a snapshot of current bandwidth usage for the entire [pool]. *) +val pool_stat : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_stat.t -val config : _ pool -> config (** [config pool] is the [config] argument passed to [pool] at creation. *) +val config : _ pool -> config -val send_swap_request: ('msg, 'peer_meta,'conn_meta) pool -> unit (** [send_swap_request pool] given two connected peers pi and pj (pi <> pj), suggest swap with pi for the peer pj. This behaviour is disabled in private mode *) +val send_swap_request : ('msg, 'peer_meta, 'conn_meta) pool -> unit -val score: ('msg, 'peer_meta,'conn_meta) pool -> 'peer_meta -> float (** [score pool peer_meta] returns the score of a peer in the pool whose peer_meta is provided *) +val score : ('msg, 'peer_meta, 'conn_meta) pool -> 'peer_meta -> float (** {2 Pool events} *) module Pool_event : sig - - val wait_too_few_connections: ('msg, 'peer_meta,'conn_meta) pool -> unit Lwt.t (** [wait_too_few_connections pool] is determined when the number of connections drops below the desired level. *) + val wait_too_few_connections : + ('msg, 'peer_meta, 'conn_meta) pool -> unit Lwt.t - val wait_too_many_connections: ('msg, 'peer_meta,'conn_meta) pool -> unit Lwt.t (** [wait_too_many_connections pool] is determined when the number of connections exceeds the desired level. *) + val wait_too_many_connections : + ('msg, 'peer_meta, 'conn_meta) pool -> unit Lwt.t - val wait_new_peer: ('msg, 'peer_meta,'conn_meta) pool -> unit Lwt.t (** [wait_new_peer pool] is determined when a new peer (i.e. authentication successful) gets added to the pool. *) + val wait_new_peer : ('msg, 'peer_meta, 'conn_meta) pool -> unit Lwt.t - val wait_new_point: ('msg, 'peer_meta,'conn_meta) pool -> unit Lwt.t (** [wait_new_point pool] is determined when a new point gets registered to the pool. *) + val wait_new_point : ('msg, 'peer_meta, 'conn_meta) pool -> unit Lwt.t - val wait_new_connection: ('msg, 'peer_meta,'conn_meta) pool -> unit Lwt.t (** [wait_new_connection pool] is determined when a new connection is successfully established in the pool. *) - + val wait_new_connection : ('msg, 'peer_meta, 'conn_meta) pool -> unit Lwt.t end - (** {1 Connections management} *) -type ('msg, 'peer_meta,'conn_meta) connection (** Type of a connection to a peer, parametrized by the type of messages exchanged as well as meta-information associated to a peer and a connection. It mostly wraps [P2p_connection.connection], adding meta-information and data-structures describing a more fine-grained logical state of the connection. *) +type ('msg, 'peer_meta, 'conn_meta) connection -val connect: - ?timeout:Time.System.Span.t -> - ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> - ('msg, 'peer_meta,'conn_meta) connection tzresult Lwt.t (** [connect ?timeout pool point] tries to add a connection to [point] in [pool] in less than [timeout] seconds. *) +val connect : + ?timeout:Time.System.Span.t -> + ('msg, 'peer_meta, 'conn_meta) pool -> + P2p_point.Id.t -> + ('msg, 'peer_meta, 'conn_meta) connection tzresult Lwt.t -val accept: - ('msg, 'peer_meta,'conn_meta) pool -> P2p_fd.t -> P2p_point.Id.t -> unit (** [accept pool fd point] instructs [pool] to start the process of accepting a connection from [fd]. Used by [P2p_welcome]. *) +val accept : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_fd.t -> P2p_point.Id.t -> unit -val register_new_point: - ?trusted:bool -> - ('a, 'b, 'c) pool -> P2p_peer.Table.key -> P2p_point.Id.t -> unit (** [register_new_point pool source_peer_id point] tries to register [point] in pool's internal peer table. *) +val register_new_point : + ?trusted:bool -> + ('a, 'b, 'c) pool -> + P2p_peer.Table.key -> + P2p_point.Id.t -> + unit -val disconnect: - ?wait:bool -> ('msg, 'peer_meta,'conn_meta) connection -> unit Lwt.t (** [disconnect conn] cleanly closes [conn] and returns after [conn]'s internal worker has returned. *) +val disconnect : + ?wait:bool -> ('msg, 'peer_meta, 'conn_meta) connection -> unit Lwt.t module Connection : sig + val info : + ('msg, 'peer_meta, 'conn_meta) connection -> + 'conn_meta P2p_connection.Info.t + + val local_metadata : ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta - val info: ('msg, 'peer_meta,'conn_meta) connection -> 'conn_meta P2p_connection.Info.t - val local_metadata: ('msg, 'peer_meta,'conn_meta) connection -> 'conn_meta - val remote_metadata: ('msg, 'peer_meta,'conn_meta) connection -> 'conn_meta + val remote_metadata : ('msg, 'peer_meta, 'conn_meta) connection -> 'conn_meta - val stat: ('msg, 'peer_meta,'conn_meta) connection -> P2p_stat.t (** [stat conn] is a snapshot of current bandwidth usage for [conn]. *) + val stat : ('msg, 'peer_meta, 'conn_meta) connection -> P2p_stat.t - val fold: - ('msg, 'peer_meta,'conn_meta) pool -> + val fold : + ('msg, 'peer_meta, 'conn_meta) pool -> init:'a -> - f:(P2p_peer.Id.t -> ('msg, 'peer_meta,'conn_meta) connection -> 'a -> 'a) -> + f:(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> 'a -> 'a) -> 'a - val list: - ('msg, 'peer_meta,'conn_meta) pool -> - (P2p_peer.Id.t * ('msg, 'peer_meta,'conn_meta) connection) list + val list : + ('msg, 'peer_meta, 'conn_meta) pool -> + (P2p_peer.Id.t * ('msg, 'peer_meta, 'conn_meta) connection) list - val find_by_point: - ('msg, 'peer_meta,'conn_meta) pool -> + val find_by_point : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> - ('msg, 'peer_meta,'conn_meta) connection option + ('msg, 'peer_meta, 'conn_meta) connection option - val find_by_peer_id: - ('msg, 'peer_meta,'conn_meta) pool -> + val find_by_peer_id : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> - ('msg, 'peer_meta,'conn_meta) connection option + ('msg, 'peer_meta, 'conn_meta) connection option - val private_node: ('msg, 'peer_meta,'conn_meta) connection -> bool (** [private_node conn] returns 'true' if the node assocoatied to this connection is in private mode *) + val private_node : ('msg, 'peer_meta, 'conn_meta) connection -> bool - val trusted_node: ('msg, 'peer_meta,'conn_meta) connection -> bool (** [trusted_node conn] returns 'true' if the node assocoatied to this connection is trusted *) - + val trusted_node : ('msg, 'peer_meta, 'conn_meta) connection -> bool end -val on_new_connection: - ('msg, 'peer_meta,'conn_meta) pool -> - (P2p_peer.Id.t -> ('msg, 'peer_meta,'conn_meta) connection -> unit) -> unit (** [on_new_connection pool f] installs [f] as a hook for new connections in [pool]. *) +val on_new_connection : + ('msg, 'peer_meta, 'conn_meta) pool -> + (P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) connection -> unit) -> + unit (** {1 I/O on connections} *) -val read: ('msg, 'peer_meta,'conn_meta) connection -> 'msg tzresult Lwt.t (** [read conn] returns a message popped from [conn]'s app message queue, or fails with [Connection_closed]. *) +val read : ('msg, 'peer_meta, 'conn_meta) connection -> 'msg tzresult Lwt.t -val is_readable: ('msg, 'peer_meta,'conn_meta) connection -> unit tzresult Lwt.t (** [is_readable conn] returns when there is at least one message ready to be read. *) +val is_readable : + ('msg, 'peer_meta, 'conn_meta) connection -> unit tzresult Lwt.t -val write: - ('msg, 'peer_meta,'conn_meta) connection -> 'msg -> unit tzresult Lwt.t (** [write conn msg] is [P2p_connection.write conn' msg] where [conn'] is the internal [P2p_connection.t] inside [conn]. *) +val write : + ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> unit tzresult Lwt.t -val write_sync: - ('msg, 'peer_meta,'conn_meta) connection -> 'msg -> unit tzresult Lwt.t (** [write_sync conn msg] is [P2p_connection.write_sync conn' msg] where [conn'] is the internal [P2p_connection.t] inside [conn]. *) +val write_sync : + ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> unit tzresult Lwt.t (**/**) -val raw_write_sync: - ('msg, 'peer_meta,'conn_meta) connection -> MBytes.t -> unit tzresult Lwt.t + +val raw_write_sync : + ('msg, 'peer_meta, 'conn_meta) connection -> MBytes.t -> unit tzresult Lwt.t + (**/**) -val write_now: ('msg, 'peer_meta,'conn_meta) connection -> 'msg -> bool tzresult (** [write_now conn msg] is [P2p_connection.write_now conn' msg] where [conn'] is the internal [P2p_connection.t] inside [conn]. *) +val write_now : + ('msg, 'peer_meta, 'conn_meta) connection -> 'msg -> bool tzresult (** {2 Broadcast functions} *) -val write_all: ('msg, 'peer_meta,'conn_meta) pool -> 'msg -> unit (** [write_all pool msg] is [write_now conn msg] for all member connections to [pool] in [Running] state. *) +val write_all : ('msg, 'peer_meta, 'conn_meta) pool -> 'msg -> unit -val broadcast_bootstrap_msg: ('msg, 'peer_meta,'conn_meta) pool -> unit (** [broadcast_bootstrap_msg pool] is [P2P_connection.write_now conn Bootstrap] for all member connections to [pool] in [Running] state. This behavior is deactivated if the node is in private mode *) +val broadcast_bootstrap_msg : ('msg, 'peer_meta, 'conn_meta) pool -> unit -val greylist_addr : ('msg, 'peer_meta,'conn_meta) pool -> P2p_addr.t -> unit (** [greylist_addr pool addr] adds [addr] to [pool]'s IP greylist. *) +val greylist_addr : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_addr.t -> unit -val greylist_peer : ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> unit (** [greylist_peer pool peer] adds [peer] to [pool]'s peer greylist and [peer]'s address to [pool]'s IP greylist. *) +val greylist_peer : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> unit -val gc_greylist: older_than:Time.System.t -> ('msg, 'peer_meta,'conn_meta) pool -> unit (** [gc_greylist ~older_than pool] *) +val gc_greylist : + older_than:Time.System.t -> ('msg, 'peer_meta, 'conn_meta) pool -> unit -val acl_clear : ('msg, 'peer_meta,'conn_meta) pool -> unit (** [acl_clear pool] clears ACL tables. *) +val acl_clear : ('msg, 'peer_meta, 'conn_meta) pool -> unit (** {1 Functions on [Peer_id]} *) module Peers : sig + type ('msg, 'peer_meta, 'conn_meta) info = + ( ('msg, 'peer_meta, 'conn_meta) connection, + 'peer_meta, + 'conn_meta ) + P2p_peer_state.Info.t + + val info : + ('msg, 'peer_meta, 'conn_meta) pool -> + P2p_peer.Id.t -> + ('msg, 'peer_meta, 'conn_meta) info option - type ('msg, 'peer_meta,'conn_meta) info = - (('msg, 'peer_meta,'conn_meta) connection, 'peer_meta,'conn_meta) P2p_peer_state.Info.t + val get_peer_metadata : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> 'peer_meta - val info: - ('msg, 'peer_meta,'conn_meta) pool -> - P2p_peer.Id.t -> - ('msg, 'peer_meta,'conn_meta) info option + val set_peer_metadata : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> 'peer_meta -> unit - val get_peer_metadata: - ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> 'peer_meta - val set_peer_metadata: - ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> 'peer_meta -> unit - val get_score: ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> float + val get_score : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> float - val get_trusted: ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> bool - val set_trusted: ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> unit - val unset_trusted: ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> unit + val get_trusted : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> bool - val fold_known: - ('msg, 'peer_meta,'conn_meta) pool -> + val set_trusted : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> unit + + val unset_trusted : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> unit + + val fold_known : + ('msg, 'peer_meta, 'conn_meta) pool -> init:'a -> - f:(P2p_peer.Id.t -> ('msg, 'peer_meta,'conn_meta) info -> 'a -> 'a) -> + f:(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) info -> 'a -> 'a) -> 'a - val fold_connected: - ('msg, 'peer_meta,'conn_meta) pool -> + val fold_connected : + ('msg, 'peer_meta, 'conn_meta) pool -> init:'a -> - f:(P2p_peer.Id.t -> ('msg, 'peer_meta,'conn_meta) info -> 'a -> 'a) -> + f:(P2p_peer.Id.t -> ('msg, 'peer_meta, 'conn_meta) info -> 'a -> 'a) -> 'a - val ban : ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> unit - val unban : ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> unit - val trust : ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> unit - val untrust : ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> unit - val banned : ('msg, 'peer_meta,'conn_meta) pool -> P2p_peer.Id.t -> bool + val ban : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> unit + + val unban : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> unit + + val trust : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> unit + val untrust : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> unit + + val banned : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_peer.Id.t -> bool end (** {1 Functions on [Points]} *) module Points : sig + type ('msg, 'peer_meta, 'conn_meta) info = + ('msg, 'peer_meta, 'conn_meta) connection P2p_point_state.Info.t - type ('msg, 'peer_meta,'conn_meta) info = - ('msg, 'peer_meta,'conn_meta) connection P2p_point_state.Info.t - - val info: - ('msg, 'peer_meta,'conn_meta) pool -> + val info : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> - ('msg, 'peer_meta,'conn_meta) info option + ('msg, 'peer_meta, 'conn_meta) info option + + val get_trusted : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> bool - val get_trusted: ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> bool - val set_trusted: ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> unit - val unset_trusted: ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> unit + val set_trusted : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> unit - val fold_known: - ('msg, 'peer_meta,'conn_meta) pool -> + val unset_trusted : + ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> unit + + val fold_known : + ('msg, 'peer_meta, 'conn_meta) pool -> init:'a -> - f:(P2p_point.Id.t -> ('msg, 'peer_meta,'conn_meta) info -> 'a -> 'a) -> + f:(P2p_point.Id.t -> ('msg, 'peer_meta, 'conn_meta) info -> 'a -> 'a) -> 'a - val fold_connected: - ('msg, 'peer_meta,'conn_meta) pool -> + val fold_connected : + ('msg, 'peer_meta, 'conn_meta) pool -> init:'a -> - f:(P2p_point.Id.t -> ('msg, 'peer_meta,'conn_meta) info -> 'a -> 'a) -> + f:(P2p_point.Id.t -> ('msg, 'peer_meta, 'conn_meta) info -> 'a -> 'a) -> 'a - val ban : ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> unit - val unban : ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> unit - val trust : ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> unit - val untrust : ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> unit - val banned : ('msg, 'peer_meta,'conn_meta) pool -> P2p_point.Id.t -> bool + val ban : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> unit + + val unban : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> unit + + val trust : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> unit + val untrust : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> unit + + val banned : ('msg, 'peer_meta, 'conn_meta) pool -> P2p_point.Id.t -> bool end -val watch: - ('msg, 'peer_meta,'conn_meta) pool -> - P2p_connection.Pool_event.t Lwt_stream.t * Lwt_watcher.stopper (** [watch pool] is a [stream, close] a [stream] of events and a [close] function for this stream. *) +val watch : + ('msg, 'peer_meta, 'conn_meta) pool -> + P2p_connection.Pool_event.t Lwt_stream.t * Lwt_watcher.stopper diff --git a/src/lib_p2p/p2p_socket.ml b/src/lib_p2p/p2p_socket.ml index 3b33d903fe673d115748b396842aac7427142620..a206d38610da96ae1c3c28dc02085b15b931cdc3 100644 --- a/src/lib_p2p/p2p_socket.ml +++ b/src/lib_p2p/p2p_socket.ml @@ -26,24 +26,28 @@ (* TODO test `close ~wait:true`. *) -include Internal_event.Legacy_logging.Make(struct let name = "p2p.connection" end) +include Internal_event.Legacy_logging.Make (struct + let name = "p2p.connection" +end) module Crypto = struct - (* maximal size of the buffer *) - let bufsize = 1 lsl 16 - 1 + let bufsize = (1 lsl 16) - 1 + let header_length = 2 + let max_content_length = bufsize - Crypto_box.zerobytes (* The size of extra data added by encryption. *) let boxextrabytes = Crypto_box.zerobytes - Crypto_box.boxzerobytes + (* The number of bytes added by encryption + header *) let extrabytes = header_length + boxextrabytes type data = { - channel_key : Crypto_box.channel_key ; - mutable local_nonce : Crypto_box.nonce ; - mutable remote_nonce : Crypto_box.nonce ; + channel_key : Crypto_box.channel_key; + mutable local_nonce : Crypto_box.nonce; + mutable remote_nonce : Crypto_box.nonce } (* We do the following assumptions on the NaCl library. Note that @@ -54,15 +58,14 @@ module Crypto = struct let write_chunk ?canceler fd cryptobox_data msg = let msglen = MBytes.length msg in - fail_unless - (msglen <= max_content_length) P2p_errors.Invalid_message_size >>=? fun () -> + fail_unless (msglen <= max_content_length) P2p_errors.Invalid_message_size + >>=? fun () -> let buf_length = msglen + Crypto_box.zerobytes in let buf = MBytes.make buf_length '\x00' in MBytes.blit msg 0 buf Crypto_box.zerobytes msglen ; let local_nonce = cryptobox_data.local_nonce in cryptobox_data.local_nonce <- Crypto_box.increment_nonce local_nonce ; - Crypto_box.fast_box_noalloc - cryptobox_data.channel_key local_nonce buf ; + Crypto_box.fast_box_noalloc cryptobox_data.channel_key local_nonce buf ; let encrypted_length = buf_length - Crypto_box.boxzerobytes in let header_pos = Crypto_box.boxzerobytes - header_length in MBytes.set_int16 buf header_pos encrypted_length ; @@ -71,24 +74,34 @@ module Crypto = struct let read_chunk ?canceler fd cryptobox_data = let header_buf = MBytes.create header_length in - P2p_io_scheduler.read_full ?canceler ~len:header_length fd header_buf >>=? fun () -> + P2p_io_scheduler.read_full ?canceler ~len:header_length fd header_buf + >>=? fun () -> let encrypted_length = MBytes.get_uint16 header_buf 0 in let buf_length = encrypted_length + Crypto_box.boxzerobytes in let buf = MBytes.make buf_length '\x00' in - P2p_io_scheduler.read_full ?canceler - ~pos:Crypto_box.boxzerobytes ~len:encrypted_length fd buf >>=? fun () -> + P2p_io_scheduler.read_full + ?canceler + ~pos:Crypto_box.boxzerobytes + ~len:encrypted_length + fd + buf + >>=? fun () -> let remote_nonce = cryptobox_data.remote_nonce in cryptobox_data.remote_nonce <- Crypto_box.increment_nonce remote_nonce ; match Crypto_box.fast_box_open_noalloc - cryptobox_data.channel_key remote_nonce buf + cryptobox_data.channel_key + remote_nonce + buf with | false -> fail P2p_errors.Decipher_error | true -> - return (MBytes.sub buf Crypto_box.zerobytes - (buf_length - Crypto_box.zerobytes)) - + return + (MBytes.sub + buf + Crypto_box.zerobytes + (buf_length - Crypto_box.zerobytes)) end (* Note: there is an inconsistency here, since we display an error in @@ -98,37 +111,28 @@ end let check_binary_chunks_size size = let value = size - Crypto.extrabytes in fail_unless - (value > 0 && - value <= Crypto.max_content_length) + (value > 0 && value <= Crypto.max_content_length) (P2p_errors.Invalid_chunks_size - { value = size ; - min = Crypto.extrabytes + 1 ; - max = Crypto.bufsize ; - }) + {value = size; min = Crypto.extrabytes + 1; max = Crypto.bufsize}) module Connection_message = struct - type t = { - port : int option ; - public_key : Crypto_box.public_key ; - proof_of_work_stamp : Crypto_box.nonce ; - message_nonce : Crypto_box.nonce ; - version : Network_version.t ; + port : int option; + public_key : Crypto_box.public_key; + proof_of_work_stamp : Crypto_box.nonce; + message_nonce : Crypto_box.nonce; + version : Network_version.t } let encoding = let open Data_encoding in conv - (fun { port ; public_key ; proof_of_work_stamp ; - message_nonce ; version } -> + (fun {port; public_key; proof_of_work_stamp; message_nonce; version} -> let port = match port with None -> 0 | Some port -> port in - (port, public_key, proof_of_work_stamp, - message_nonce, version)) - (fun (port, public_key, proof_of_work_stamp, - message_nonce, version) -> + (port, public_key, proof_of_work_stamp, message_nonce, version)) + (fun (port, public_key, proof_of_work_stamp, message_nonce, version) -> let port = if port = 0 then None else Some port in - { port ; public_key ; proof_of_work_stamp ; - message_nonce ; version }) + {port; public_key; proof_of_work_stamp; message_nonce; version}) (obj5 (req "port" uint16) (req "pubkey" Crypto_box.public_key_encoding) @@ -137,87 +141,90 @@ module Connection_message = struct (req "version" Network_version.encoding)) let write ~canceler fd message = - let encoded_message_len = - Data_encoding.Binary.length encoding message in + let encoded_message_len = Data_encoding.Binary.length encoding message in fail_unless (encoded_message_len < 1 lsl (Crypto.header_length * 8)) - P2p_errors.Encoding_error >>=? fun () -> + P2p_errors.Encoding_error + >>=? fun () -> let len = Crypto.header_length + encoded_message_len in let buf = MBytes.create len in - match Data_encoding.Binary.write - encoding message buf Crypto.header_length len with + match + Data_encoding.Binary.write encoding message buf Crypto.header_length len + with | None -> fail P2p_errors.Encoding_error | Some last -> - fail_unless (last = len) P2p_errors.Encoding_error >>=? fun () -> + fail_unless (last = len) P2p_errors.Encoding_error + >>=? fun () -> MBytes.set_int16 buf 0 encoded_message_len ; - P2p_io_scheduler.write ~canceler fd buf >>=? fun () -> + P2p_io_scheduler.write ~canceler fd buf + >>=? fun () -> (* We return the raw message as it is used later to compute the nonces *) return buf let read ~canceler fd = let header_buf = MBytes.create Crypto.header_length in - P2p_io_scheduler.read_full ~canceler - ~len:Crypto.header_length fd header_buf >>=? fun () -> + P2p_io_scheduler.read_full + ~canceler + ~len:Crypto.header_length + fd + header_buf + >>=? fun () -> let len = MBytes.get_uint16 header_buf 0 in let pos = Crypto.header_length in let buf = MBytes.create (pos + len) in MBytes.set_int16 buf 0 len ; - P2p_io_scheduler.read_full ~canceler ~len ~pos fd buf >>=? fun () -> + P2p_io_scheduler.read_full ~canceler ~len ~pos fd buf + >>=? fun () -> match Data_encoding.Binary.read encoding buf pos len with | None -> fail P2p_errors.Decoding_error | Some (next_pos, message) -> - if next_pos <> pos+len then - fail P2p_errors.Decoding_error - else - return (message, buf) - + if next_pos <> pos + len then fail P2p_errors.Decoding_error + else return (message, buf) end type 'meta metadata_config = { - conn_meta_encoding : 'meta Data_encoding.t ; - conn_meta_value : P2p_peer.Id.t -> 'meta ; - private_node : 'meta -> bool ; + conn_meta_encoding : 'meta Data_encoding.t; + conn_meta_value : P2p_peer.Id.t -> 'meta; + private_node : 'meta -> bool } module Metadata = struct - let write ~canceler metadata_config cryptobox_data fd message = let encoded_message_len = - Data_encoding.Binary.length metadata_config.conn_meta_encoding message in + Data_encoding.Binary.length metadata_config.conn_meta_encoding message + in let buf = MBytes.create encoded_message_len in match Data_encoding.Binary.write - metadata_config.conn_meta_encoding message buf 0 encoded_message_len + metadata_config.conn_meta_encoding + message + buf + 0 + encoded_message_len with | None -> fail P2p_errors.Encoding_error | Some last -> - fail_unless (last = encoded_message_len) - P2p_errors.Encoding_error >>=? fun () -> - Crypto.write_chunk ~canceler cryptobox_data fd buf + fail_unless (last = encoded_message_len) P2p_errors.Encoding_error + >>=? fun () -> Crypto.write_chunk ~canceler cryptobox_data fd buf let read ~canceler metadata_config fd cryptobox_data = - Crypto.read_chunk ~canceler fd cryptobox_data >>=? fun buf -> + Crypto.read_chunk ~canceler fd cryptobox_data + >>=? fun buf -> let length = MBytes.length buf in let encoding = metadata_config.conn_meta_encoding in - match - Data_encoding.Binary.read encoding buf 0 length - with + match Data_encoding.Binary.read encoding buf 0 length with | None -> fail P2p_errors.Decoding_error | Some (read_len, message) -> - if read_len <> length then - fail P2p_errors.Decoding_error - else - return message - + if read_len <> length then fail P2p_errors.Decoding_error + else return message end module Ack = struct - type t = Ack | Nack let encoding = @@ -225,274 +232,305 @@ module Ack = struct let ack_encoding = obj1 (req "ack" empty) in let nack_encoding = obj1 (req "nack" empty) in let ack_case tag = - case tag ack_encoding + case + tag + ack_encoding ~title:"Ack" - (function - | Ack -> Some () - | _ -> None) - (fun () -> Ack) in + (function Ack -> Some () | _ -> None) + (fun () -> Ack) + in let nack_case tag = - case tag nack_encoding + case + tag + nack_encoding ~title:"Nack" - (function - | Nack -> Some () - | _ -> None - ) - (fun _ -> Nack) in - union [ - ack_case (Tag 0) ; - nack_case (Tag 255) ; - ] + (function Nack -> Some () | _ -> None) + (fun _ -> Nack) + in + union [ack_case (Tag 0); nack_case (Tag 255)] let write ?canceler fd cryptobox_data message = - let encoded_message_len = - Data_encoding.Binary.length encoding message in + let encoded_message_len = Data_encoding.Binary.length encoding message in let buf = MBytes.create encoded_message_len in - match Data_encoding.Binary.write encoding message buf 0 encoded_message_len with + match + Data_encoding.Binary.write encoding message buf 0 encoded_message_len + with | None -> fail P2p_errors.Encoding_error | Some last -> - fail_unless (last = encoded_message_len) - P2p_errors.Encoding_error >>=? fun () -> - Crypto.write_chunk ?canceler fd cryptobox_data buf + fail_unless (last = encoded_message_len) P2p_errors.Encoding_error + >>=? fun () -> Crypto.write_chunk ?canceler fd cryptobox_data buf let read ?canceler fd cryptobox_data = - Crypto.read_chunk ?canceler fd cryptobox_data >>=? fun buf -> + Crypto.read_chunk ?canceler fd cryptobox_data + >>=? fun buf -> let length = MBytes.length buf in match Data_encoding.Binary.read encoding buf 0 length with | None -> fail P2p_errors.Decoding_error | Some (read_len, message) -> - if read_len <> length then - fail P2p_errors.Decoding_error - else - return message - + if read_len <> length then fail P2p_errors.Decoding_error + else return message end type 'meta authenticated_connection = { - fd: P2p_io_scheduler.connection ; - info: 'meta P2p_connection.Info.t ; - cryptobox_data: Crypto.data ; + fd : P2p_io_scheduler.connection; + info : 'meta P2p_connection.Info.t; + cryptobox_data : Crypto.data } -let kick { fd ; cryptobox_data ; _ } = - Ack.write fd cryptobox_data Nack >>= fun _ -> - P2p_io_scheduler.close fd >>= fun _ -> - Lwt.return_unit +let kick {fd; cryptobox_data; _} = + Ack.write fd cryptobox_data Nack + >>= fun _ -> P2p_io_scheduler.close fd >>= fun _ -> Lwt.return_unit (* First step: write and read credentials, makes no difference whether we're trying to connect to a peer or checking an incoming connection, both parties must first introduce themselves. *) -let authenticate - ~canceler - ~proof_of_work_target - ~incoming fd (remote_addr, remote_socket_port as point) - ?listening_port identity announced_version metadata_config = +let authenticate ~canceler ~proof_of_work_target ~incoming fd + ((remote_addr, remote_socket_port) as point) ?listening_port identity + announced_version metadata_config = let local_nonce_seed = Crypto_box.random_nonce () in - lwt_debug "Sending authenfication to %a" P2p_point.Id.pp point >>= fun () -> - Connection_message.write ~canceler fd - { public_key = identity.P2p_identity.public_key ; - proof_of_work_stamp = identity.proof_of_work_stamp ; - message_nonce = local_nonce_seed ; - port = listening_port ; - version = announced_version } >>=? fun sent_msg -> - Connection_message.read ~canceler fd >>=? fun (msg, recv_msg) -> + lwt_debug "Sending authenfication to %a" P2p_point.Id.pp point + >>= fun () -> + Connection_message.write + ~canceler + fd + { public_key = identity.P2p_identity.public_key; + proof_of_work_stamp = identity.proof_of_work_stamp; + message_nonce = local_nonce_seed; + port = listening_port; + version = announced_version } + >>=? fun sent_msg -> + Connection_message.read ~canceler fd + >>=? fun (msg, recv_msg) -> let remote_listening_port = - if incoming then msg.port else Some remote_socket_port in - let id_point = remote_addr, remote_listening_port in + if incoming then msg.port else Some remote_socket_port + in + let id_point = (remote_addr, remote_listening_port) in let remote_peer_id = Crypto_box.hash msg.public_key in fail_unless (remote_peer_id <> identity.P2p_identity.peer_id) - (P2p_errors.Myself id_point) >>=? fun () -> + (P2p_errors.Myself id_point) + >>=? fun () -> fail_unless (Crypto_box.check_proof_of_work - msg.public_key msg.proof_of_work_stamp proof_of_work_target) - (P2p_errors.Not_enough_proof_of_work remote_peer_id) >>=? fun () -> + msg.public_key + msg.proof_of_work_stamp + proof_of_work_target) + (P2p_errors.Not_enough_proof_of_work remote_peer_id) + >>=? fun () -> let channel_key = - Crypto_box.precompute identity.P2p_identity.secret_key msg.public_key in + Crypto_box.precompute identity.P2p_identity.secret_key msg.public_key + in let (local_nonce, remote_nonce) = - Crypto_box.generate_nonces ~incoming ~sent_msg ~recv_msg in - let cryptobox_data = { Crypto.channel_key ; local_nonce ; remote_nonce } in + Crypto_box.generate_nonces ~incoming ~sent_msg ~recv_msg + in + let cryptobox_data = {Crypto.channel_key; local_nonce; remote_nonce} in let local_metadata = metadata_config.conn_meta_value remote_peer_id in - Metadata.write ~canceler metadata_config fd cryptobox_data local_metadata >>=? fun () -> - Metadata.read ~canceler metadata_config fd cryptobox_data >>=? fun remote_metadata -> + Metadata.write ~canceler metadata_config fd cryptobox_data local_metadata + >>=? fun () -> + Metadata.read ~canceler metadata_config fd cryptobox_data + >>=? fun remote_metadata -> let info = - { P2p_connection.Info.peer_id = remote_peer_id ; - announced_version = msg.version ; incoming ; - id_point ; remote_socket_port ; - private_node = metadata_config.private_node remote_metadata ; - local_metadata ; - remote_metadata ; - } in - return (info, { fd ; info ; cryptobox_data }) + { P2p_connection.Info.peer_id = remote_peer_id; + announced_version = msg.version; + incoming; + id_point; + remote_socket_port; + private_node = metadata_config.private_node remote_metadata; + local_metadata; + remote_metadata } + in + return (info, {fd; info; cryptobox_data}) module Reader = struct - type ('msg, 'meta) t = { - canceler: Lwt_canceler.t ; - conn: 'meta authenticated_connection ; - encoding: 'msg Data_encoding.t ; - messages: (int * 'msg) tzresult Lwt_pipe.t ; - mutable worker: unit Lwt.t ; + canceler : Lwt_canceler.t; + conn : 'meta authenticated_connection; + encoding : 'msg Data_encoding.t; + messages : (int * 'msg) tzresult Lwt_pipe.t; + mutable worker : unit Lwt.t } let read_message st init = let rec loop status = - Lwt_unix.yield () >>= fun () -> + Lwt_unix.yield () + >>= fun () -> let open Data_encoding.Binary in match status with - | Success { result ; size ; stream } -> + | Success {result; size; stream} -> return_some (result, size, stream) | Error _err -> - lwt_debug "[read_message] incremental decoding error" >>= fun () -> - return_none + lwt_debug "[read_message] incremental decoding error" + >>= fun () -> return_none | Await decode_next_buf -> - Crypto.read_chunk ~canceler:st.canceler - st.conn.fd st.conn.cryptobox_data >>=? fun buf -> + Crypto.read_chunk + ~canceler:st.canceler + st.conn.fd + st.conn.cryptobox_data + >>=? fun buf -> lwt_debug "reading %d bytes from %a" - (MBytes.length buf) P2p_peer.Id.pp st.conn.info.peer_id >>= fun () -> - loop (decode_next_buf buf) in + (MBytes.length buf) + P2p_peer.Id.pp + st.conn.info.peer_id + >>= fun () -> loop (decode_next_buf buf) + in loop (Data_encoding.Binary.read_stream ?init st.encoding) - let rec worker_loop st stream = - begin - read_message st stream >>=? fun msg -> - match msg with - | None -> - protect ~canceler:st.canceler begin fun () -> - Lwt_pipe.push st.messages (Error [P2p_errors.Decoding_error]) >>= fun () -> - return_none - end - | Some (msg, size, stream) -> - protect ~canceler:st.canceler begin fun () -> - Lwt_pipe.push st.messages (Ok (size, msg)) >>= fun () -> - return_some stream - end - end >>= function + read_message st stream + >>=? (fun msg -> + match msg with + | None -> + protect ~canceler:st.canceler (fun () -> + Lwt_pipe.push + st.messages + (Error [P2p_errors.Decoding_error]) + >>= fun () -> return_none) + | Some (msg, size, stream) -> + protect ~canceler:st.canceler (fun () -> + Lwt_pipe.push st.messages (Ok (size, msg)) + >>= fun () -> return_some stream)) + >>= function | Ok (Some stream) -> worker_loop st (Some stream) | Ok None -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit - | Error [Canceled | Exn Lwt_pipe.Closed] -> - lwt_debug "connection closed to %a" - P2p_peer.Id.pp st.conn.info.peer_id >>= fun () -> - Lwt.return_unit + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit + | Error [(Canceled | Exn Lwt_pipe.Closed)] -> + lwt_debug "connection closed to %a" P2p_peer.Id.pp st.conn.info.peer_id + >>= fun () -> Lwt.return_unit | Error _ as err -> Lwt_pipe.safe_push_now st.messages err ; - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit let run ?size conn encoding canceler = let compute_size = function - | Ok (size, _) -> (Sys.word_size / 8) * 11 + size + Lwt_pipe.push_overhead - | Error _ -> 0 (* we push Error only when we close the socket, - we don't fear memory leaks in that case... *) in + | Ok (size, _) -> + (Sys.word_size / 8 * 11) + size + Lwt_pipe.push_overhead + | Error _ -> + 0 + (* we push Error only when we close the socket, + we don't fear memory leaks in that case... *) + in let size = Option.map size ~f:(fun max -> (max, compute_size)) in let st = - { canceler ; conn ; encoding ; - messages = Lwt_pipe.create ?size () ; - worker = Lwt.return_unit ; - } in - Lwt_canceler.on_cancel st.canceler begin fun () -> - Lwt_pipe.close st.messages ; - Lwt.return_unit - end ; + { canceler; + conn; + encoding; + messages = Lwt_pipe.create ?size (); + worker = Lwt.return_unit } + in + Lwt_canceler.on_cancel st.canceler (fun () -> + Lwt_pipe.close st.messages ; Lwt.return_unit) ; st.worker <- - Lwt_utils.worker "reader" + Lwt_utils.worker + "reader" ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop st None) ~cancel:(fun () -> Lwt_canceler.cancel st.canceler) ; st - let shutdown st = - Lwt_canceler.cancel st.canceler >>= fun () -> - st.worker - + let shutdown st = Lwt_canceler.cancel st.canceler >>= fun () -> st.worker end module Writer = struct - type ('msg, 'meta) t = { - canceler: Lwt_canceler.t ; - conn: 'meta authenticated_connection ; - encoding: 'msg Data_encoding.t ; - messages: (MBytes.t list * unit tzresult Lwt.u option) Lwt_pipe.t ; - mutable worker: unit Lwt.t ; - binary_chunks_size: int ; (* in bytes *) + canceler : Lwt_canceler.t; + conn : 'meta authenticated_connection; + encoding : 'msg Data_encoding.t; + messages : (MBytes.t list * unit tzresult Lwt.u option) Lwt_pipe.t; + mutable worker : unit Lwt.t; + binary_chunks_size : int (* in bytes *) } let send_message st buf = let rec loop = function - | [] -> return_unit + | [] -> + return_unit | buf :: l -> - Crypto.write_chunk ~canceler:st.canceler - st.conn.fd st.conn.cryptobox_data buf >>=? fun () -> - lwt_debug "writing %d bytes to %a" - (MBytes.length buf) P2p_peer.Id.pp st.conn.info.peer_id >>= fun () -> - loop l in + Crypto.write_chunk + ~canceler:st.canceler + st.conn.fd + st.conn.cryptobox_data + buf + >>=? fun () -> + lwt_debug + "writing %d bytes to %a" + (MBytes.length buf) + P2p_peer.Id.pp + st.conn.info.peer_id + >>= fun () -> loop l + in loop buf let encode_message st msg = - try ok (MBytes.cut - st.binary_chunks_size - (Data_encoding.Binary.to_bytes_exn st.encoding msg)) + try + ok + (MBytes.cut + st.binary_chunks_size + (Data_encoding.Binary.to_bytes_exn st.encoding msg)) with Data_encoding.Binary.Write_error _ -> error P2p_errors.Encoding_error let rec worker_loop st = - Lwt_unix.yield () >>= fun () -> - protect ~canceler:st.canceler begin fun () -> - Lwt_pipe.pop st.messages >>= return - end >>= function - | Error [Canceled | Exn Lwt_pipe.Closed] -> - lwt_debug "connection closed to %a" - P2p_peer.Id.pp st.conn.info.peer_id >>= fun () -> - Lwt.return_unit + Lwt_unix.yield () + >>= fun () -> + protect ~canceler:st.canceler (fun () -> + Lwt_pipe.pop st.messages >>= return) + >>= function + | Error [(Canceled | Exn Lwt_pipe.Closed)] -> + lwt_debug "connection closed to %a" P2p_peer.Id.pp st.conn.info.peer_id + >>= fun () -> Lwt.return_unit | Error err -> lwt_log_error "@[<v 2>error writing to %a@ %a@]" - P2p_peer.Id.pp st.conn.info.peer_id pp_print_error err >>= fun () -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit - | Ok (buf, wakener) -> - send_message st buf >>= fun res -> + P2p_peer.Id.pp + st.conn.info.peer_id + pp_print_error + err + >>= fun () -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit + | Ok (buf, wakener) -> ( + send_message st buf + >>= fun res -> match res with | Ok () -> Option.iter wakener ~f:(fun u -> Lwt.wakeup_later u res) ; worker_loop st - | Error err -> - Option.iter wakener - ~f:(fun u -> - Lwt.wakeup_later u - (Error [P2p_errors.Connection_closed])) ; + | Error err -> ( + Option.iter wakener ~f:(fun u -> + Lwt.wakeup_later u (Error [P2p_errors.Connection_closed])) ; match err with - | [ Canceled | Exn Lwt_pipe.Closed ] -> - lwt_debug "connection closed to %a" - P2p_peer.Id.pp st.conn.info.peer_id >>= fun () -> - Lwt.return_unit + | [(Canceled | Exn Lwt_pipe.Closed)] -> + lwt_debug + "connection closed to %a" + P2p_peer.Id.pp + st.conn.info.peer_id + >>= fun () -> Lwt.return_unit | P2p_errors.Connection_closed :: _ -> - lwt_debug "connection closed to %a" - P2p_peer.Id.pp st.conn.info.peer_id >>= fun () -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit + lwt_debug + "connection closed to %a" + P2p_peer.Id.pp + st.conn.info.peer_id + >>= fun () -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit | err -> lwt_log_error "@[<v 2>error writing to %a@ %a@]" - P2p_peer.Id.pp st.conn.info.peer_id - pp_print_error err >>= fun () -> - Lwt_canceler.cancel st.canceler >>= fun () -> - Lwt.return_unit - - let run - ?size ?binary_chunks_size - conn encoding canceler = + P2p_peer.Id.pp + st.conn.info.peer_id + pp_print_error + err + >>= fun () -> + Lwt_canceler.cancel st.canceler >>= fun () -> Lwt.return_unit ) + ) + + let run ?size ?binary_chunks_size conn encoding canceler = let binary_chunks_size = match binary_chunks_size with - | None -> Crypto.max_content_length + | None -> + Crypto.max_content_length | Some size -> let size = size - Crypto.extrabytes in assert (size > 0) ; @@ -502,172 +540,184 @@ module Writer = struct let compute_size = let buf_list_size = List.fold_left - (fun sz buf -> - sz + MBytes.length buf + 2 * Sys.word_size) 0 + (fun sz buf -> sz + MBytes.length buf + (2 * Sys.word_size)) + 0 in function - | buf_l, None -> + | (buf_l, None) -> Sys.word_size + buf_list_size buf_l + Lwt_pipe.push_overhead - | buf_l, Some _ -> - 2 * Sys.word_size + buf_list_size buf_l + Lwt_pipe.push_overhead + | (buf_l, Some _) -> + (2 * Sys.word_size) + buf_list_size buf_l + Lwt_pipe.push_overhead in - let size = Option.map size ~f:(fun max -> max, compute_size) in + let size = Option.map size ~f:(fun max -> (max, compute_size)) in let st = - { canceler ; conn ; encoding ; - messages = Lwt_pipe.create ?size () ; - worker = Lwt.return_unit ; - binary_chunks_size = binary_chunks_size ; - } in - Lwt_canceler.on_cancel st.canceler begin fun () -> - Lwt_pipe.close st.messages ; - while not (Lwt_pipe.is_empty st.messages) do - let _, w = Lwt_pipe.pop_now_exn st.messages in - Option.iter w - ~f:(fun u -> Lwt.wakeup_later u (Error [Exn Lwt_pipe.Closed])) - done ; - Lwt.return_unit - end ; + { canceler; + conn; + encoding; + messages = Lwt_pipe.create ?size (); + worker = Lwt.return_unit; + binary_chunks_size } + in + Lwt_canceler.on_cancel st.canceler (fun () -> + Lwt_pipe.close st.messages ; + while not (Lwt_pipe.is_empty st.messages) do + let (_, w) = Lwt_pipe.pop_now_exn st.messages in + Option.iter w ~f:(fun u -> + Lwt.wakeup_later u (Error [Exn Lwt_pipe.Closed])) + done ; + Lwt.return_unit) ; st.worker <- - Lwt_utils.worker "writer" + Lwt_utils.worker + "writer" ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop st) ~cancel:(fun () -> Lwt_canceler.cancel st.canceler) ; st - let shutdown st = - Lwt_canceler.cancel st.canceler >>= fun () -> - st.worker - + let shutdown st = Lwt_canceler.cancel st.canceler >>= fun () -> st.worker end type ('msg, 'meta) t = { - conn : 'meta authenticated_connection ; - reader : ('msg, 'meta) Reader.t ; - writer : ('msg, 'meta) Writer.t ; + conn : 'meta authenticated_connection; + reader : ('msg, 'meta) Reader.t; + writer : ('msg, 'meta) Writer.t } -let equal { conn = { fd = fd2 ; _ } ; _ } { conn = { fd = fd1 ; _ } ; _ } = +let equal {conn = {fd = fd2; _}; _} {conn = {fd = fd1; _}; _} = P2p_io_scheduler.id fd1 = P2p_io_scheduler.id fd2 -let pp ppf { conn ; _ } = P2p_connection.Info.pp (fun _ _ -> ()) ppf conn.info -let info { conn ; _ } = conn.info -let local_metadata { conn ; _ } = conn.info.local_metadata -let remote_metadata { conn ; _ } = conn.info.remote_metadata -let private_node { conn ; _ } = conn.info.private_node - -let accept - ?incoming_message_queue_size ?outgoing_message_queue_size - ?binary_chunks_size - ~canceler - conn - encoding = - protect begin fun () -> - Ack.write ~canceler conn.fd conn.cryptobox_data Ack >>=? fun () -> - Ack.read ~canceler conn.fd conn.cryptobox_data - end ~on_error:begin fun err -> - P2p_io_scheduler.close conn.fd >>= fun _ -> - match err with - | [ P2p_errors.Connection_closed ] -> fail P2p_errors.Rejected_socket_connection - | [ P2p_errors.Decipher_error ] -> fail P2p_errors.Invalid_auth - | err -> Lwt.return_error err - end >>=? function +let pp ppf {conn; _} = P2p_connection.Info.pp (fun _ _ -> ()) ppf conn.info + +let info {conn; _} = conn.info + +let local_metadata {conn; _} = conn.info.local_metadata + +let remote_metadata {conn; _} = conn.info.remote_metadata + +let private_node {conn; _} = conn.info.private_node + +let accept ?incoming_message_queue_size ?outgoing_message_queue_size + ?binary_chunks_size ~canceler conn encoding = + protect + (fun () -> + Ack.write ~canceler conn.fd conn.cryptobox_data Ack + >>=? fun () -> Ack.read ~canceler conn.fd conn.cryptobox_data) + ~on_error:(fun err -> + P2p_io_scheduler.close conn.fd + >>= fun _ -> + match err with + | [P2p_errors.Connection_closed] -> + fail P2p_errors.Rejected_socket_connection + | [P2p_errors.Decipher_error] -> + fail P2p_errors.Invalid_auth + | err -> + Lwt.return_error err) + >>=? function | Ack -> let canceler = Lwt_canceler.create () in let reader = Reader.run ?size:incoming_message_queue_size conn encoding canceler and writer = Writer.run - ?size:outgoing_message_queue_size ?binary_chunks_size - conn encoding canceler + ?size:outgoing_message_queue_size + ?binary_chunks_size + conn + encoding + canceler in - let conn = { conn ; reader ; writer } in - Lwt_canceler.on_cancel canceler begin fun () -> - P2p_io_scheduler.close conn.conn.fd >>= fun _ -> - Lwt.return_unit - end ; + let conn = {conn; reader; writer} in + Lwt_canceler.on_cancel canceler (fun () -> + P2p_io_scheduler.close conn.conn.fd >>= fun _ -> Lwt.return_unit) ; return conn | Nack -> fail P2p_errors.Rejected_socket_connection let catch_closed_pipe f = - Lwt.catch f begin function - | Lwt_pipe.Closed -> fail P2p_errors.Connection_closed - | exn -> fail (Exn exn) - end >>= function + Lwt.catch f (function + | Lwt_pipe.Closed -> + fail P2p_errors.Connection_closed + | exn -> + fail (Exn exn)) + >>= function | Error [Exn Lwt_pipe.Closed] -> fail P2p_errors.Connection_closed - | Error _ | Ok _ as v -> Lwt.return v + | (Error _ | Ok _) as v -> + Lwt.return v let pp_json encoding ppf msg = - Data_encoding.Json.pp ppf - (Data_encoding.Json.construct encoding msg) - -let write { writer ; conn ; _ } msg = - catch_closed_pipe begin fun () -> - debug "Sending message to %a: %a" - P2p_peer.Id.pp_short conn.info.peer_id (pp_json writer.encoding) msg ; - Lwt.return (Writer.encode_message writer msg) >>=? fun buf -> - Lwt_pipe.push writer.messages (buf, None) >>= return - end - -let write_sync { writer ; conn ; _ } msg = - catch_closed_pipe begin fun () -> - let waiter, wakener = Lwt.wait () in - debug "Sending message to %a: %a" - P2p_peer.Id.pp_short conn.info.peer_id ( pp_json writer.encoding ) msg ; - Lwt.return (Writer.encode_message writer msg) >>=? fun buf -> - Lwt_pipe.push writer.messages (buf, Some wakener) >>= fun () -> - waiter - end - -let write_now { writer ; conn ; _ } msg = - debug "Try sending message to %a: %a" - P2p_peer.Id.pp_short conn.info.peer_id (pp_json writer.encoding) msg ; - Writer.encode_message writer msg >>? fun buf -> + Data_encoding.Json.pp ppf (Data_encoding.Json.construct encoding msg) + +let write {writer; conn; _} msg = + catch_closed_pipe (fun () -> + debug + "Sending message to %a: %a" + P2p_peer.Id.pp_short + conn.info.peer_id + (pp_json writer.encoding) + msg ; + Lwt.return (Writer.encode_message writer msg) + >>=? fun buf -> Lwt_pipe.push writer.messages (buf, None) >>= return) + +let write_sync {writer; conn; _} msg = + catch_closed_pipe (fun () -> + let (waiter, wakener) = Lwt.wait () in + debug + "Sending message to %a: %a" + P2p_peer.Id.pp_short + conn.info.peer_id + (pp_json writer.encoding) + msg ; + Lwt.return (Writer.encode_message writer msg) + >>=? fun buf -> + Lwt_pipe.push writer.messages (buf, Some wakener) >>= fun () -> waiter) + +let write_now {writer; conn; _} msg = + debug + "Try sending message to %a: %a" + P2p_peer.Id.pp_short + conn.info.peer_id + (pp_json writer.encoding) + msg ; + Writer.encode_message writer msg + >>? fun buf -> try Ok (Lwt_pipe.push_now writer.messages (buf, None)) with Lwt_pipe.Closed -> Error [P2p_errors.Connection_closed] let rec split_bytes size bytes = - if MBytes.length bytes <= size then - [bytes] + if MBytes.length bytes <= size then [bytes] else - MBytes.sub bytes 0 size :: - split_bytes size (MBytes.sub bytes size (MBytes.length bytes - size)) + MBytes.sub bytes 0 size + :: split_bytes size (MBytes.sub bytes size (MBytes.length bytes - size)) -let raw_write_sync { writer ; _ } bytes = +let raw_write_sync {writer; _} bytes = let bytes = split_bytes writer.binary_chunks_size bytes in - catch_closed_pipe begin fun () -> - let waiter, wakener = Lwt.wait () in - Lwt_pipe.push writer.messages (bytes, Some wakener) >>= fun () -> - waiter - end - -let is_readable { reader ; _ } = - not (Lwt_pipe.is_empty reader.messages) -let wait_readable { reader ; _ } = - catch_closed_pipe begin fun () -> - Lwt_pipe.values_available reader.messages >>= return - end -let read { reader ; _ } = - catch_closed_pipe begin fun () -> - Lwt_pipe.pop reader.messages - end -let read_now { reader ; _ } = + catch_closed_pipe (fun () -> + let (waiter, wakener) = Lwt.wait () in + Lwt_pipe.push writer.messages (bytes, Some wakener) >>= fun () -> waiter) + +let is_readable {reader; _} = not (Lwt_pipe.is_empty reader.messages) + +let wait_readable {reader; _} = + catch_closed_pipe (fun () -> + Lwt_pipe.values_available reader.messages >>= return) + +let read {reader; _} = + catch_closed_pipe (fun () -> Lwt_pipe.pop reader.messages) + +let read_now {reader; _} = try Lwt_pipe.pop_now reader.messages with Lwt_pipe.Closed -> Some (Error [P2p_errors.Connection_closed]) -let stat { conn = { fd ; _ } ; _ } = P2p_io_scheduler.stat fd +let stat {conn = {fd; _}; _} = P2p_io_scheduler.stat fd let close ?(wait = false) st = - begin - if not wait then Lwt.return_unit - else begin - Lwt_pipe.close st.reader.messages ; - Lwt_pipe.close st.writer.messages ; - st.writer.worker - end - end >>= fun () -> - Reader.shutdown st.reader >>= fun () -> - Writer.shutdown st.writer >>= fun () -> - P2p_io_scheduler.close st.conn.fd >>= fun _ -> - Lwt.return_unit + ( if not wait then Lwt.return_unit + else ( + Lwt_pipe.close st.reader.messages ; + Lwt_pipe.close st.writer.messages ; + st.writer.worker ) ) + >>= fun () -> + Reader.shutdown st.reader + >>= fun () -> + Writer.shutdown st.writer + >>= fun () -> P2p_io_scheduler.close st.conn.fd >>= fun _ -> Lwt.return_unit diff --git a/src/lib_p2p/p2p_socket.mli b/src/lib_p2p/p2p_socket.mli index d85585c4f6075f18249e80a818e0ca6e9d83d841..34271f4bed665272f4cf40e033fae245da0c2fe7 100644 --- a/src/lib_p2p/p2p_socket.mli +++ b/src/lib_p2p/p2p_socket.mli @@ -36,111 +36,118 @@ (** {1 Types} *) +(** Type for the parameter negotiation mechanism. *) type 'meta metadata_config = { - conn_meta_encoding : 'meta Data_encoding.t ; - conn_meta_value : P2p_peer.Id.t -> 'meta ; - private_node : 'meta -> bool ; + conn_meta_encoding : 'meta Data_encoding.t; + conn_meta_value : P2p_peer.Id.t -> 'meta; + private_node : 'meta -> bool } -(** Type for the parameter negotiation mechanism. *) -type 'meta authenticated_connection (** Type of a connection that successfully passed the authentication phase, but has not been accepted yet. Parametrized by the type of expected parameter in the `ack` message. *) +type 'meta authenticated_connection -type ('msg, 'meta) t (** Type of an accepted connection, parametrized by the type of messages exchanged between peers. *) +type ('msg, 'meta) t -val equal: ('mst, 'meta) t -> ('msg, 'meta) t -> bool +val equal : ('mst, 'meta) t -> ('msg, 'meta) t -> bool -val pp: Format.formatter -> ('msg, 'meta) t -> unit -val info: ('msg, 'meta) t -> 'meta P2p_connection.Info.t -val local_metadata: ('msg, 'meta) t -> 'meta -val remote_metadata: ('msg, 'meta) t -> 'meta -val private_node: ('msg, 'meta) t -> bool +val pp : Format.formatter -> ('msg, 'meta) t -> unit + +val info : ('msg, 'meta) t -> 'meta P2p_connection.Info.t + +val local_metadata : ('msg, 'meta) t -> 'meta + +val remote_metadata : ('msg, 'meta) t -> 'meta + +val private_node : ('msg, 'meta) t -> bool (** {1 Low-level functions (do not use directly)} *) -val authenticate: +(** (Low-level) (Cancelable) Authentication function of a remote + peer. Used in [P2p_pool], to promote a + [P2P_io_scheduler.connection] into an [authenticated_connection] (auth + correct, acceptation undecided). *) +val authenticate : canceler:Lwt_canceler.t -> proof_of_work_target:Crypto_box.target -> incoming:bool -> - P2p_io_scheduler.connection -> P2p_point.Id.t -> - ?listening_port: int -> - P2p_identity.t -> Network_version.t -> + P2p_io_scheduler.connection -> + P2p_point.Id.t -> + ?listening_port:int -> + P2p_identity.t -> + Network_version.t -> 'meta metadata_config -> ('meta P2p_connection.Info.t * 'meta authenticated_connection) tzresult Lwt.t -(** (Low-level) (Cancelable) Authentication function of a remote - peer. Used in [P2p_pool], to promote a - [P2P_io_scheduler.connection] into an [authenticated_connection] (auth - correct, acceptation undecided). *) -val kick: 'meta authenticated_connection -> unit Lwt.t (** (Low-level) (Cancelable) [kick afd] notifies the remote peer that we refuse this connection and then closes [afd]. Used in [P2p_pool] to reject an [authenticated_connection] which we do not want to connect to for some reason. *) +val kick : 'meta authenticated_connection -> unit Lwt.t -val accept: +(** (Low-level) (Cancelable) Accepts a remote peer given an + authenticated_connection. Used in [P2p_pool], to promote an + [authenticated_connection] to the status of an active peer. *) +val accept : ?incoming_message_queue_size:int -> ?outgoing_message_queue_size:int -> - ?binary_chunks_size: int -> + ?binary_chunks_size:int -> canceler:Lwt_canceler.t -> 'meta authenticated_connection -> - 'msg Data_encoding.t -> ('msg, 'meta) t tzresult Lwt.t -(** (Low-level) (Cancelable) Accepts a remote peer given an - authenticated_connection. Used in [P2p_pool], to promote an - [authenticated_connection] to the status of an active peer. *) + 'msg Data_encoding.t -> + ('msg, 'meta) t tzresult Lwt.t -val check_binary_chunks_size: int -> unit tzresult Lwt.t (** Precheck for the [?binary_chunks_size] parameter of [accept]. *) +val check_binary_chunks_size : int -> unit tzresult Lwt.t (** {1 IO functions on connections} *) (** {2 Output functions} *) -val write: ('msg, 'meta) t -> 'msg -> unit tzresult Lwt.t (** [write conn msg] returns when [msg] has successfully been added to [conn]'s internal write queue or fails with a corresponding error. *) +val write : ('msg, 'meta) t -> 'msg -> unit tzresult Lwt.t -val write_now: ('msg, 'meta) t -> 'msg -> bool tzresult (** [write_now conn msg] is [Ok true] if [msg] has been added to [conn]'s internal write queue, [Ok false] if [msg] has been dropped, or fails with a corresponding error otherwise. *) +val write_now : ('msg, 'meta) t -> 'msg -> bool tzresult -val write_sync: ('msg, 'meta) t -> 'msg -> unit tzresult Lwt.t (** [write_sync conn msg] returns when [msg] has been successfully sent to the remote end of [conn], or fails accordingly. *) +val write_sync : ('msg, 'meta) t -> 'msg -> unit tzresult Lwt.t (** {2 Input functions} *) -val is_readable: ('msg, 'meta) t -> bool (** [is_readable conn] is [true] iff [conn] internal read queue is not empty. *) +val is_readable : ('msg, 'meta) t -> bool -val wait_readable: ('msg, 'meta) t -> unit tzresult Lwt.t (** (Cancelable) [wait_readable conn] returns when [conn]'s internal read queue becomes readable (i.e. not empty). *) +val wait_readable : ('msg, 'meta) t -> unit tzresult Lwt.t -val read: ('msg, 'meta) t -> (int * 'msg) tzresult Lwt.t (** [read conn msg] returns when [msg] has successfully been popped from [conn]'s internal read queue or fails with a corresponding error. *) +val read : ('msg, 'meta) t -> (int * 'msg) tzresult Lwt.t -val read_now: ('msg, 'meta) t -> (int * 'msg) tzresult option (** [read_now conn msg] is [Some msg] if [conn]'s internal read queue is not empty, [None] if it is empty, or fails with a corresponding error otherwise. *) +val read_now : ('msg, 'meta) t -> (int * 'msg) tzresult option -val stat: ('msg, 'meta) t -> P2p_stat.t (** [stat conn] is a snapshot of current bandwidth usage for [conn]. *) +val stat : ('msg, 'meta) t -> P2p_stat.t -val close: ?wait:bool -> ('msg, 'meta) t -> unit Lwt.t +val close : ?wait:bool -> ('msg, 'meta) t -> unit Lwt.t (**/**) (** for testing only *) -val raw_write_sync: ('msg, 'meta) t -> MBytes.t -> unit tzresult Lwt.t +val raw_write_sync : ('msg, 'meta) t -> MBytes.t -> unit tzresult Lwt.t diff --git a/src/lib_p2p/p2p_welcome.ml b/src/lib_p2p/p2p_welcome.ml index 3ca4eecd712e643da3f834cfc316fe6903cfadaa..4a2ec8c43ad89796abc0a74ea7372b5b9f56a654 100644 --- a/src/lib_p2p/p2p_welcome.ml +++ b/src/lib_p2p/p2p_welcome.ml @@ -23,117 +23,133 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make (struct let name = "p2p.welcome" end) +include Internal_event.Legacy_logging.Make (struct + let name = "p2p.welcome" +end) type pool = Pool : ('msg, 'meta, 'meta_conn) P2p_pool.t -> pool type t = { - socket: Lwt_unix.file_descr ; - canceler: Lwt_canceler.t ; - pool: pool ; - mutable worker: unit Lwt.t ; + socket : Lwt_unix.file_descr; + canceler : Lwt_canceler.t; + pool : pool; + mutable worker : unit Lwt.t } let rec worker_loop st = - let Pool pool = st.pool in - Lwt_unix.yield () >>= fun () -> - protect ~canceler:st.canceler begin fun () -> - P2p_fd.accept st.socket >>= return - end >>= function + let (Pool pool) = st.pool in + Lwt_unix.yield () + >>= fun () -> + protect ~canceler:st.canceler (fun () -> P2p_fd.accept st.socket >>= return) + >>= function | Ok (fd, addr) -> let point = match addr with - | Lwt_unix.ADDR_UNIX _ -> assert false + | Lwt_unix.ADDR_UNIX _ -> + assert false | Lwt_unix.ADDR_INET (addr, port) -> - (Ipaddr_unix.V6.of_inet_addr_exn addr, port) in + (Ipaddr_unix.V6.of_inet_addr_exn addr, port) + in P2p_pool.accept pool fd point ; worker_loop st - (* Unix errors related to the failure to create one connection, No reason to abort just now, but we want to stress out that we have a problem preventing us from accepting new connections. *) - | Error (((Exn (Unix.Unix_error (( - EMFILE (* Too many open files by the process *) - | ENFILE (* Too many open files in the system *) - | ENETDOWN (* Network is down *) - ), _ , _)) - ) :: _) as err) -> - lwt_log_error "@[<v 2>Incoming connection failed with %a in the - Welcome worker. Resuming in 5s.@]" - pp_print_error err >>= fun () -> + | Error + ( Exn + (Unix.Unix_error + ( ( EMFILE (* Too many open files by the process *) + | ENFILE (* Too many open files in the system *) + | ENETDOWN (* Network is down *) ), + _, + _ )) + :: _ as err ) -> + lwt_log_error + "@[<v 2>Incoming connection failed with %a in the\n\ + \ Welcome worker. Resuming in 5s.@]" + pp_print_error + err + >>= fun () -> (* These are temporary system errors, giving some time for the system to recover *) - Lwt_unix.sleep 5. >>= fun () -> - worker_loop st - | Error (((Exn (Unix.Unix_error (( - EAGAIN (* Resource temporarily unavailable; try again *) - | EWOULDBLOCK (* Operation would block *) - | ENOPROTOOPT (* Protocol not available *) - | EOPNOTSUPP (* Operation not supported on socket *) - | ENETUNREACH (* Network is unreachable *) - | ECONNABORTED (* Software caused connection abort *) - | ECONNRESET (* Connection reset by peer *) - | ETIMEDOUT (* Connection timed out *) - | EHOSTDOWN (* Host is down *) - | EHOSTUNREACH (* No route to host *) - (* Ugly hack to catch EPROTO and ENONET, Protocol error, which are not + Lwt_unix.sleep 5. >>= fun () -> worker_loop st + | Error + ( Exn + (Unix.Unix_error + ( ( EAGAIN (* Resource temporarily unavailable; try again *) + | EWOULDBLOCK (* Operation would block *) + | ENOPROTOOPT (* Protocol not available *) + | EOPNOTSUPP (* Operation not supported on socket *) + | ENETUNREACH (* Network is unreachable *) + | ECONNABORTED (* Software caused connection abort *) + | ECONNRESET (* Connection reset by peer *) + | ETIMEDOUT (* Connection timed out *) + | EHOSTDOWN (* Host is down *) + | EHOSTUNREACH (* No route to host *) + (* Ugly hack to catch EPROTO and ENONET, Protocol error, which are not defined in the Unix module (which is 20 years late on the POSIX standard). A better solution is to use the package ocaml-unix-errno or redo the work *) - | EUNKNOWNERR (71|64) - (* On Linux EPROTO is 71, ENONET is 64 + | EUNKNOWNERR (71 | 64) + (* On Linux EPROTO is 71, ENONET is 64 On BSD systems, accept cannot raise EPROTO. 71 is EREMOTE for openBSD, NetBSD, Darwin, which is irrelevant here 64 is EHOSTDOWN for openBSD, NetBSD, Darwin, which is already caught *) - ), _ , _)) - ) :: _) as err) -> + ), + _, + _ )) + :: _ as err ) -> (* These are socket-specific errors, ignoring. *) - lwt_log_error "@[<v 2>Incoming connection failed with %a in the Welcome worker@]" - pp_print_error err >>= fun () -> - worker_loop st + lwt_log_error + "@[<v 2>Incoming connection failed with %a in the Welcome worker@]" + pp_print_error + err + >>= fun () -> worker_loop st | Error (Canceled :: _) -> Lwt.return_unit | Error err -> - lwt_log_error "@[<v 2>Unexpected error in the Welcome worker@ %a@]" - pp_print_error err >>= fun () -> - Lwt.return_unit + lwt_log_error + "@[<v 2>Unexpected error in the Welcome worker@ %a@]" + pp_print_error + err + >>= fun () -> Lwt.return_unit let create_listening_socket ~backlog ?(addr = Ipaddr.V6.unspecified) port = let main_socket = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt main_socket SO_REUSEADDR true) ; - Lwt_unix.bind main_socket - Unix.(ADDR_INET (Ipaddr_unix.V6.to_inet_addr addr, port)) >>= fun () -> + Lwt_unix.bind + main_socket + Unix.(ADDR_INET (Ipaddr_unix.V6.to_inet_addr addr, port)) + >>= fun () -> Lwt_unix.listen main_socket backlog ; Lwt.return main_socket let create ?addr ~backlog pool port = - Lwt.catch begin fun () -> - create_listening_socket - ~backlog ?addr port >>= fun socket -> - let canceler = Lwt_canceler.create () in - Lwt_canceler.on_cancel canceler begin fun () -> - Lwt_utils_unix.safe_close socket - end ; - let st = { - socket ; canceler ; pool = Pool pool ; - worker = Lwt.return_unit ; - } in - Lwt.return st - end begin fun exn -> - lwt_log_error - "@[<v 2>Cannot accept incoming connections@ %a@]" - pp_exn exn >>= fun () -> - Lwt.fail exn - end + Lwt.catch + (fun () -> + create_listening_socket ~backlog ?addr port + >>= fun socket -> + let canceler = Lwt_canceler.create () in + Lwt_canceler.on_cancel canceler (fun () -> + Lwt_utils_unix.safe_close socket) ; + let st = + {socket; canceler; pool = Pool pool; worker = Lwt.return_unit} + in + Lwt.return st) + (fun exn -> + lwt_log_error + "@[<v 2>Cannot accept incoming connections@ %a@]" + pp_exn + exn + >>= fun () -> Lwt.fail exn) let activate st = st.worker <- - Lwt_utils.worker "welcome" + Lwt_utils.worker + "welcome" ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop st) ~cancel:(fun () -> Lwt_canceler.cancel st.canceler) -let shutdown st = - Lwt_canceler.cancel st.canceler >>= fun () -> - st.worker +let shutdown st = Lwt_canceler.cancel st.canceler >>= fun () -> st.worker diff --git a/src/lib_p2p/p2p_welcome.mli b/src/lib_p2p/p2p_welcome.mli index c1b81c18be85da93a9f634262f19df1f80bcc68b..865a4106a7cf1037ac14535d9d56a91ec498cf3b 100644 --- a/src/lib_p2p/p2p_welcome.mli +++ b/src/lib_p2p/p2p_welcome.mli @@ -28,18 +28,21 @@ Accept incoming connections and add them to the pool. *) -type t (** Type of a welcome worker. *) +type t -val create : - ?addr:P2p_addr.t -> backlog:int -> - ('msg, 'meta, 'meta_conn) P2p_pool.t -> P2p_addr.port -> t Lwt.t (** [create ?addr ~backlog pool port] returns a running welcome worker adding connections into [pool] listening on [addr:port]. [backlog] is passed to [Lwt_unix.listen]. *) +val create : + ?addr:P2p_addr.t -> + backlog:int -> + ('msg, 'meta, 'meta_conn) P2p_pool.t -> + P2p_addr.port -> + t Lwt.t -val activate : t -> unit (** [activate t] start the worker that will accept connections *) +val activate : t -> unit -val shutdown: t -> unit Lwt.t (** [shutdown t] returns when [t] has completed shutdown. *) +val shutdown : t -> unit Lwt.t diff --git a/src/lib_p2p/test/process.ml b/src/lib_p2p/test/process.ml index 5b5222da5cd42093ee2c7ac82c8dd577006b9537..7462c249539a1a64b65bd762b9ac055a82f737ca 100644 --- a/src/lib_p2p/test/process.ml +++ b/src/lib_p2p/test/process.ml @@ -28,35 +28,42 @@ open Error_monad let () = Lwt_unix.set_default_async_method Async_none let section = Lwt_log.Section.make "process" + let log_f ~level format = if level < Lwt_log.Section.level section then Format.ikfprintf (fun _ -> Lwt.return_unit) Format.std_formatter format - else - Format.kasprintf (fun msg -> Lwt_log.log ~section ~level msg) format + else Format.kasprintf (fun msg -> Lwt_log.log ~section ~level msg) format + let lwt_debug fmt = log_f ~level:Lwt_log.Debug fmt + let lwt_log_notice fmt = log_f ~level:Lwt_log.Notice fmt + let lwt_log_info fmt = log_f ~level:Lwt_log.Info fmt + let lwt_log_error fmt = log_f ~level:Lwt_log.Error fmt exception Exited of int + exception Signaled of int + exception Stopped of int let handle_error f = - Lwt.catch - f - (fun exn -> Lwt.return_error [Exn exn]) >>= function - | Ok () -> Lwt.return_unit + Lwt.catch f (fun exn -> Lwt.return_error [Exn exn]) + >>= function + | Ok () -> + Lwt.return_unit | Error err -> - lwt_debug "%a" pp_print_error err >>= fun () -> - exit 1 + lwt_debug "%a" pp_print_error err >>= fun () -> exit 1 module Channel = struct - type ('a, 'b) t = (Lwt_io.input_channel * Lwt_io.output_channel) + type ('a, 'b) t = Lwt_io.input_channel * Lwt_io.output_channel + let push (_, outch) v = Lwt.catch (fun () -> Lwt_io.write_value outch v >>= Lwt.return_ok) (fun exn -> Lwt.return_error [Exn exn]) + let pop (inch, _) = Lwt.catch (fun () -> Lwt_io.read_value inch >>= Lwt.return_ok) @@ -66,121 +73,135 @@ end let wait pid = Lwt.catch (fun () -> - Lwt_unix.waitpid [] pid >>= function - | (_,Lwt_unix.WEXITED 0) -> - Lwt.return_ok () - | (_,Lwt_unix.WEXITED n) -> - Lwt.return_error [Exn (Exited n)] - | (_,Lwt_unix.WSIGNALED n) -> - Lwt.return_error [Exn (Signaled n)] - | (_,Lwt_unix.WSTOPPED n) -> - Lwt.return_error [Exn (Stopped n)]) + Lwt_unix.waitpid [] pid + >>= function + | (_, Lwt_unix.WEXITED 0) -> + Lwt.return_ok () + | (_, Lwt_unix.WEXITED n) -> + Lwt.return_error [Exn (Exited n)] + | (_, Lwt_unix.WSIGNALED n) -> + Lwt.return_error [Exn (Signaled n)] + | (_, Lwt_unix.WSTOPPED n) -> + Lwt.return_error [Exn (Stopped n)]) (function | Lwt.Canceled -> - Unix.kill pid Sys.sigkill ; - Lwt.return_ok () + Unix.kill pid Sys.sigkill ; Lwt.return_ok () | exn -> Lwt.return_error [Exn exn]) type ('a, 'b) t = { - termination: unit tzresult Lwt.t ; - channel: ('b, 'a) Channel.t ; + termination : unit tzresult Lwt.t; + channel : ('b, 'a) Channel.t } let template = "$(date) - $(section): $(message)" let detach ?(prefix = "") f = - Lwt_io.flush_all () >>= fun () -> - let main_in, child_out = Lwt_io.pipe () in - let child_in, main_out = Lwt_io.pipe () in + Lwt_io.flush_all () + >>= fun () -> + let (main_in, child_out) = Lwt_io.pipe () in + let (child_in, main_out) = Lwt_io.pipe () in match Lwt_unix.fork () with | 0 -> Lwt_log.default := Lwt_log.channel ~template ~close_mode:`Keep ~channel:Lwt_io.stderr () ; Random.self_init () ; let template = Format.asprintf "%s$(message)" prefix in - Lwt_main.run begin - Lwt_io.close main_in >>= fun () -> - Lwt_io.close main_out >>= fun () -> + Lwt_main.run + ( Lwt_io.close main_in + >>= fun () -> + Lwt_io.close main_out + >>= fun () -> Lwt_log.default := Lwt_log.channel ~template ~close_mode:`Keep ~channel:Lwt_io.stderr () ; - lwt_log_notice "PID: %d" (Unix.getpid ()) >>= fun () -> - handle_error (fun () -> f (child_in, child_out)) - end ; + lwt_log_notice "PID: %d" (Unix.getpid ()) + >>= fun () -> handle_error (fun () -> f (child_in, child_out)) ) ; exit 0 | pid -> let termination = wait pid in - Lwt_io.close child_in >>= fun () -> - Lwt_io.close child_out >>= fun () -> - Lwt.return ({ termination ; channel = (main_in, main_out) }) + Lwt_io.close child_in + >>= fun () -> + Lwt_io.close child_out + >>= fun () -> Lwt.return {termination; channel = (main_in, main_out)} let signal_name = let names = - [ Sys.sigabrt, "ABRT" ; - Sys.sigalrm, "ALRM" ; - Sys.sigfpe, "FPE" ; - Sys.sighup, "HUP" ; - Sys.sigill, "ILL" ; - Sys.sigint, "INT" ; - Sys.sigkill, "KILL" ; - Sys.sigpipe, "PIPE" ; - Sys.sigquit, "QUIT" ; - Sys.sigsegv, "SEGV" ; - Sys.sigterm, "TERM" ; - Sys.sigusr1, "USR1" ; - Sys.sigusr2, "USR2" ; - Sys.sigchld, "CHLD" ; - Sys.sigcont, "CONT" ; - Sys.sigstop, "STOP" ; - Sys.sigtstp, "TSTP" ; - Sys.sigttin, "TTIN" ; - Sys.sigttou, "TTOU" ; - Sys.sigvtalrm, "VTALRM" ; - Sys.sigprof, "PROF" ; - Sys.sigbus, "BUS" ; - Sys.sigpoll, "POLL" ; - Sys.sigsys, "SYS" ; - Sys.sigtrap, "TRAP" ; - Sys.sigurg, "URG" ; - Sys.sigxcpu, "XCPU" ; - Sys.sigxfsz, "XFSZ" ] in + [ (Sys.sigabrt, "ABRT"); + (Sys.sigalrm, "ALRM"); + (Sys.sigfpe, "FPE"); + (Sys.sighup, "HUP"); + (Sys.sigill, "ILL"); + (Sys.sigint, "INT"); + (Sys.sigkill, "KILL"); + (Sys.sigpipe, "PIPE"); + (Sys.sigquit, "QUIT"); + (Sys.sigsegv, "SEGV"); + (Sys.sigterm, "TERM"); + (Sys.sigusr1, "USR1"); + (Sys.sigusr2, "USR2"); + (Sys.sigchld, "CHLD"); + (Sys.sigcont, "CONT"); + (Sys.sigstop, "STOP"); + (Sys.sigtstp, "TSTP"); + (Sys.sigttin, "TTIN"); + (Sys.sigttou, "TTOU"); + (Sys.sigvtalrm, "VTALRM"); + (Sys.sigprof, "PROF"); + (Sys.sigbus, "BUS"); + (Sys.sigpoll, "POLL"); + (Sys.sigsys, "SYS"); + (Sys.sigtrap, "TRAP"); + (Sys.sigurg, "URG"); + (Sys.sigxcpu, "XCPU"); + (Sys.sigxfsz, "XFSZ") ] + in fun n -> List.assoc n names let wait_all processes = let rec loop processes = match processes with - | [] -> Lwt.return_none - | processes -> - Lwt.nchoose_split processes >>= function + | [] -> + Lwt.return_none + | processes -> ( + Lwt.nchoose_split processes + >>= function | (finished, remaining) -> let rec handle = function - | [] -> loop remaining - | Ok () :: finished -> handle finished + | [] -> + loop remaining + | Ok () :: finished -> + handle finished | Error err :: _ -> - Lwt.return_some (err, remaining) in - handle finished in - loop (List.map (fun p -> p.termination) processes) >>= function + Lwt.return_some (err, remaining) + in + handle finished ) + in + loop (List.map (fun p -> p.termination) processes) + >>= function | None -> - lwt_log_info "All done!" >>= fun () -> - Lwt.return_ok () + lwt_log_info "All done!" >>= fun () -> Lwt.return_ok () | Some ([Exn (Exited n)], remaining) -> - lwt_log_error "Early error!" >>= fun () -> + lwt_log_error "Early error!" + >>= fun () -> List.iter Lwt.cancel remaining ; - join remaining >>= fun _ -> - failwith "A process finished with error %d !" n + join remaining + >>= fun _ -> failwith "A process finished with error %d !" n | Some ([Exn (Signaled n)], remaining) -> - lwt_log_error "Early error!" >>= fun () -> + lwt_log_error "Early error!" + >>= fun () -> List.iter Lwt.cancel remaining ; - join remaining >>= fun _ -> - failwith "A process was killed by a SIG%s !" (signal_name n) + join remaining + >>= fun _ -> failwith "A process was killed by a SIG%s !" (signal_name n) | Some ([Exn (Stopped n)], remaining) -> - lwt_log_error "Early error!" >>= fun () -> + lwt_log_error "Early error!" + >>= fun () -> List.iter Lwt.cancel remaining ; - join remaining >>= fun _ -> + join remaining + >>= fun _ -> failwith "A process was stopped by a SIG%s !" (signal_name n) | Some (err, remaining) -> - lwt_log_error "@[<v 2>Unexpected error!@,%a@]" - pp_print_error err >>= fun () -> + lwt_log_error "@[<v 2>Unexpected error!@,%a@]" pp_print_error err + >>= fun () -> List.iter Lwt.cancel remaining ; - join remaining >>= fun _ -> - failwith "A process finished with an unexpected error !" + join remaining + >>= fun _ -> failwith "A process finished with an unexpected error !" diff --git a/src/lib_p2p/test/process.mli b/src/lib_p2p/test/process.mli index f8ec3a2113559f36719e3f650ece881725ee7b9d..89756e816670a4344edf1b01660e1acb4a734674 100644 --- a/src/lib_p2p/test/process.mli +++ b/src/lib_p2p/test/process.mli @@ -29,18 +29,20 @@ exception Exited of int module Channel : sig type ('a, 'b) t - val push: ('a, 'b) t -> 'a -> unit tzresult Lwt.t - val pop: ('a, 'b) t -> 'b tzresult Lwt.t + + val push : ('a, 'b) t -> 'a -> unit tzresult Lwt.t + + val pop : ('a, 'b) t -> 'b tzresult Lwt.t end type ('a, 'b) t = { - termination: unit tzresult Lwt.t ; - channel: ('b, 'a) Channel.t ; + termination : unit tzresult Lwt.t; + channel : ('b, 'a) Channel.t } -val detach: +val detach : ?prefix:string -> (('a, 'b) Channel.t -> unit tzresult Lwt.t) -> ('a, 'b) t Lwt.t -val wait_all: ('a, 'b) t list -> unit tzresult Lwt.t +val wait_all : ('a, 'b) t list -> unit tzresult Lwt.t diff --git a/src/lib_p2p/test/test_p2p_banned_peers.ml b/src/lib_p2p/test/test_p2p_banned_peers.ml index 0ef1a2b92bdcca4489414018f95d60d0a1d3a9f5..c4f0a0b0a9a5893cf1b0d5fe21667e474baee614 100644 --- a/src/lib_p2p/test/test_p2p_banned_peers.ml +++ b/src/lib_p2p/test/test_p2p_banned_peers.ml @@ -23,63 +23,69 @@ (* *) (*****************************************************************************) -include - Internal_event.Legacy_logging.Make (struct - let name = "test-p2p-banned_peers" - end) +include Internal_event.Legacy_logging.Make (struct + let name = "test-p2p-banned_peers" +end) -let assert_equal_bool ~msg a b = - if a <> b then Alcotest.fail msg +let assert_equal_bool ~msg a b = if a <> b then Alcotest.fail msg -let a = fun (peer,addr) -> +let a (peer, addr) = (P2p_peer.Id.hash_string [peer], Ipaddr.V6.of_string_exn addr) -let foo = a ("foo","ffff::3") -let bar = a ("bar","ffff:00::ff") -let baz = a ("baz","a::2") -let peers = [foo;bar;baz] +let foo = a ("foo", "ffff::3") + +let bar = a ("bar", "ffff:00::ff") + +let baz = a ("baz", "a::2") + +let peers = [foo; bar; baz] let test_empty _ = let empty = P2p_acl.create 10 in - List.iter (fun (_peer,addr) -> - assert_equal_bool ~msg:__LOC__ false (P2p_acl.banned_addr empty addr) - ) peers ; + List.iter + (fun (_peer, addr) -> + assert_equal_bool ~msg:__LOC__ false (P2p_acl.banned_addr empty addr)) + peers ; Lwt.return_unit -;; let test_ban _ = let set = P2p_acl.create 10 in - List.iter (fun (_,addr) -> P2p_acl.IPGreylist.add set addr Ptime.epoch) peers; - List.iter (fun (_,addr) -> - assert_equal_bool ~msg:__LOC__ true (P2p_acl.banned_addr set addr) - ) peers ; + List.iter + (fun (_, addr) -> P2p_acl.IPGreylist.add set addr Ptime.epoch) + peers ; + List.iter + (fun (_, addr) -> + assert_equal_bool ~msg:__LOC__ true (P2p_acl.banned_addr set addr)) + peers ; Lwt.return_unit -;; let test_gc _ = let set = P2p_acl.create 10 in - List.iter (fun (_,addr) -> P2p_acl.IPGreylist.add set addr Ptime.epoch) peers; - List.iter (fun (_peer,addr) -> - assert_equal_bool ~msg:__LOC__ true (P2p_acl.banned_addr set addr) - ) peers ; + List.iter + (fun (_, addr) -> P2p_acl.IPGreylist.add set addr Ptime.epoch) + peers ; + List.iter + (fun (_peer, addr) -> + assert_equal_bool ~msg:__LOC__ true (P2p_acl.banned_addr set addr)) + peers ; (* remove all peers *) P2p_acl.IPGreylist.remove_old set ~older_than:Ptime.max ; - List.iter (fun (_peer,addr) -> - assert_equal_bool ~msg:__LOC__ false (P2p_acl.banned_addr set addr) - ) peers ; + List.iter + (fun (_peer, addr) -> + assert_equal_bool ~msg:__LOC__ false (P2p_acl.banned_addr set addr)) + peers ; Lwt.return_unit let () = - let init_logs = lazy (Internal_event_unix.init ()) in + let init_logs = lazy (Internal_event_unix.init ()) in let wrap (n, f) = - Alcotest_lwt.test_case n `Quick begin fun _ () -> - Lazy.force init_logs >>= fun () -> - f () end in - Alcotest.run ~argv:[|""|] "tezos-p2p" [ - "p2p.peerset", - List.map wrap [ - "empty", test_empty ; - "ban", test_ban; - "gc", test_gc; - ] - ] + Alcotest_lwt.test_case n `Quick (fun _ () -> + Lazy.force init_logs >>= fun () -> f ()) + in + Alcotest.run + ~argv:[|""|] + "tezos-p2p" + [ ( "p2p.peerset", + List.map + wrap + [("empty", test_empty); ("ban", test_ban); ("gc", test_gc)] ) ] diff --git a/src/lib_p2p/test/test_p2p_io_scheduler.ml b/src/lib_p2p/test/test_p2p_io_scheduler.ml index d1273006558ed55156d09e9460a4ab62a99119e5..3ee648bb2b5a74b70631ffecf74f49835d511f36 100644 --- a/src/lib_p2p/test/test_p2p_io_scheduler.ml +++ b/src/lib_p2p/test/test_p2p_io_scheduler.ml @@ -24,238 +24,249 @@ (*****************************************************************************) include Internal_event.Legacy_logging.Make (struct - let name = "test-p2p-io-scheduler" - end) + let name = "test-p2p-io-scheduler" +end) exception Error of error list let rec listen ?port addr = let tentative_port = - match port with - | None -> 1024 + Random.int 8192 - | Some port -> port in + match port with None -> 1024 + Random.int 8192 | Some port -> port + in let uaddr = Ipaddr_unix.V6.to_inet_addr addr in let main_socket = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt main_socket SO_REUSEADDR true) ; - Lwt.catch begin fun () -> - Lwt_unix.bind main_socket - (ADDR_INET (uaddr, tentative_port)) >>= fun () -> - Lwt_unix.listen main_socket 50 ; - Lwt.return (main_socket, tentative_port) - end begin function - | Unix.Unix_error - ((Unix.EADDRINUSE | Unix.EADDRNOTAVAIL), _, _) when port = None -> - listen addr - | exn -> Lwt.fail exn - end + Lwt.catch + (fun () -> + Lwt_unix.bind main_socket (ADDR_INET (uaddr, tentative_port)) + >>= fun () -> + Lwt_unix.listen main_socket 50 ; + Lwt.return (main_socket, tentative_port)) + (function + | Unix.Unix_error ((Unix.EADDRINUSE | Unix.EADDRNOTAVAIL), _, _) + when port = None -> + listen addr + | exn -> + Lwt.fail exn) let accept main_socket = - P2p_fd.accept main_socket >>= fun (fd, _sockaddr) -> - return fd + P2p_fd.accept main_socket >>= fun (fd, _sockaddr) -> return fd let rec accept_n main_socket n = - if n <= 0 then - return_nil + if n <= 0 then return_nil else - accept_n main_socket (n-1) >>=? fun acc -> - accept main_socket >>=? fun conn -> - return (conn :: acc) + accept_n main_socket (n - 1) + >>=? fun acc -> accept main_socket >>=? fun conn -> return (conn :: acc) let connect addr port = let fd = P2p_fd.socket PF_INET6 SOCK_STREAM 0 in - let uaddr = - Lwt_unix.ADDR_INET (Ipaddr_unix.V6.to_inet_addr addr, port) in - P2p_fd.connect fd uaddr >>= fun () -> - return fd + let uaddr = Lwt_unix.ADDR_INET (Ipaddr_unix.V6.to_inet_addr addr, port) in + P2p_fd.connect fd uaddr >>= fun () -> return fd let simple_msgs = - [| - MBytes.create (1 lsl 6) ; - MBytes.create (1 lsl 7) ; - MBytes.create (1 lsl 8) ; - MBytes.create (1 lsl 9) ; - MBytes.create (1 lsl 10) ; - MBytes.create (1 lsl 11) ; - MBytes.create (1 lsl 12) ; - MBytes.create (1 lsl 13) ; - MBytes.create (1 lsl 14) ; - MBytes.create (1 lsl 15) ; - MBytes.create (1 lsl 16) ; - |] + [| MBytes.create (1 lsl 6); + MBytes.create (1 lsl 7); + MBytes.create (1 lsl 8); + MBytes.create (1 lsl 9); + MBytes.create (1 lsl 10); + MBytes.create (1 lsl 11); + MBytes.create (1 lsl 12); + MBytes.create (1 lsl 13); + MBytes.create (1 lsl 14); + MBytes.create (1 lsl 15); + MBytes.create (1 lsl 16) |] + let nb_simple_msgs = Array.length simple_msgs let receive conn = let buf = MBytes.create (1 lsl 16) in let rec loop () = - P2p_io_scheduler.read conn buf >>= function - | Ok _ -> loop () + P2p_io_scheduler.read conn buf + >>= function + | Ok _ -> + loop () | Error [P2p_errors.Connection_closed] -> Lwt.return_unit - | Error err -> Lwt.fail (Error err) + | Error err -> + Lwt.fail (Error err) in loop () -let server - ?(display_client_stat = true) - ?max_download_speed ?read_queue_size ~read_buffer_size - main_socket n = +let server ?(display_client_stat = true) ?max_download_speed ?read_queue_size + ~read_buffer_size main_socket n = let sched = P2p_io_scheduler.create ?max_download_speed ?read_queue_size ~read_buffer_size - () in - Moving_average.on_update begin fun () -> - log_notice "Stat: %a" P2p_stat.pp (P2p_io_scheduler.global_stat sched) ; - if display_client_stat then - P2p_io_scheduler.iter_connection sched - (fun conn -> - log_notice - " client(%d) %a" - (P2p_io_scheduler.id conn) - P2p_stat.pp (P2p_io_scheduler.stat conn)) ; - end ; + () + in + Moving_average.on_update (fun () -> + log_notice "Stat: %a" P2p_stat.pp (P2p_io_scheduler.global_stat sched) ; + if display_client_stat then + P2p_io_scheduler.iter_connection sched (fun conn -> + log_notice + " client(%d) %a" + (P2p_io_scheduler.id conn) + P2p_stat.pp + (P2p_io_scheduler.stat conn))) ; (* Accept and read message until the connection is closed. *) - accept_n main_socket n >>=? fun conns -> + accept_n main_socket n + >>=? fun conns -> let conns = List.map (P2p_io_scheduler.register sched) conns in - Lwt.join (List.map receive conns) >>= fun () -> - iter_p P2p_io_scheduler.close conns >>=? fun () -> + Lwt.join (List.map receive conns) + >>= fun () -> + iter_p P2p_io_scheduler.close conns + >>=? fun () -> log_notice "OK %a" P2p_stat.pp (P2p_io_scheduler.global_stat sched) ; return_unit let max_size ?max_upload_speed () = match max_upload_speed with - | None -> nb_simple_msgs + | None -> + nb_simple_msgs | Some max_upload_speed -> let rec loop n = if n <= 1 then 1 - else if MBytes.length simple_msgs.(n-1) <= max_upload_speed then n + else if MBytes.length simple_msgs.(n - 1) <= max_upload_speed then n else loop (n - 1) in loop nb_simple_msgs let rec send conn nb_simple_msgs = - Lwt_main.yield () >>= fun () -> + Lwt_main.yield () + >>= fun () -> let msg = simple_msgs.(Random.int nb_simple_msgs) in - P2p_io_scheduler.write conn msg >>=? fun () -> - send conn nb_simple_msgs + P2p_io_scheduler.write conn msg >>=? fun () -> send conn nb_simple_msgs let client ?max_upload_speed ?write_queue_size addr port time _n = let sched = P2p_io_scheduler.create - ?max_upload_speed ?write_queue_size ~read_buffer_size:(1 lsl 12) () in - connect addr port >>=? fun conn -> + ?max_upload_speed + ?write_queue_size + ~read_buffer_size:(1 lsl 12) + () + in + connect addr port + >>=? fun conn -> let conn = P2p_io_scheduler.register sched conn in let nb_simple_msgs = max_size ?max_upload_speed () in - Lwt.pick [ send conn nb_simple_msgs ; - Lwt_unix.sleep time >>= return ] >>=? fun () -> - P2p_io_scheduler.close conn >>=? fun () -> + Lwt.pick [send conn nb_simple_msgs; Lwt_unix.sleep time >>= return] + >>=? fun () -> + P2p_io_scheduler.close conn + >>=? fun () -> let stat = P2p_io_scheduler.stat conn in - lwt_log_notice "Client OK %a" P2p_stat.pp stat >>= fun () -> - return_unit - -let run - ?display_client_stat - ?max_download_speed ?max_upload_speed - ~read_buffer_size ?read_queue_size ?write_queue_size - addr port time n = - Internal_event_unix.init () >>= fun () -> - listen ?port addr >>= fun (main_socket, port) -> - Process.detach ~prefix:"server: " begin fun _ -> - server - ?display_client_stat ?max_download_speed - ~read_buffer_size ?read_queue_size - main_socket n - end >>= fun server_node -> + lwt_log_notice "Client OK %a" P2p_stat.pp stat >>= fun () -> return_unit + +let run ?display_client_stat ?max_download_speed ?max_upload_speed + ~read_buffer_size ?read_queue_size ?write_queue_size addr port time n = + Internal_event_unix.init () + >>= fun () -> + listen ?port addr + >>= fun (main_socket, port) -> + Process.detach ~prefix:"server: " (fun _ -> + server + ?display_client_stat + ?max_download_speed + ~read_buffer_size + ?read_queue_size + main_socket + n) + >>= fun server_node -> let client n = let prefix = Printf.sprintf "client(%d): " n in - Process.detach ~prefix begin fun _ -> - Lwt_utils_unix.safe_close main_socket >>= fun () -> - client ?max_upload_speed ?write_queue_size addr port time n - end in - Lwt_list.map_p client (1 -- n) >>= fun client_nodes -> - Process.wait_all (server_node :: client_nodes) + Process.detach ~prefix (fun _ -> + Lwt_utils_unix.safe_close main_socket + >>= fun () -> + client ?max_upload_speed ?write_queue_size addr port time n) + in + Lwt_list.map_p client (1 -- n) + >>= fun client_nodes -> Process.wait_all (server_node :: client_nodes) let () = Random.self_init () let addr = ref Ipaddr.V6.localhost + let port = ref None let max_download_speed = ref None + let max_upload_speed = ref None let read_buffer_size = ref (1 lsl 14) + let read_queue_size = ref (Some (1 lsl 14)) + let write_queue_size = ref (Some (1 lsl 14)) let delay = ref 60. + let clients = ref 8 let display_client_stat = ref None let spec = - Arg.[ - - "--port", Int (fun p -> port := Some p), " Listening port"; - - "--addr", String (fun p -> addr := Ipaddr.V6.of_string_exn p), - " Listening addr"; - - "--max-download-speed", Int (fun i -> max_download_speed := Some i), - " Max download speed in B/s (default: unbounded)"; - - "--max-upload-speed", Int (fun i -> max_upload_speed := Some i), - " Max upload speed in B/s (default: unbounded)"; - - "--read-buffer-size", Set_int read_buffer_size, - " Size of the read buffers"; - - "--read-queue-size", Int (fun i -> - read_queue_size := if i <= 0 then None else Some i), - " Size of the read queue (0=unbounded)"; - - "--write-queue-size", Int (fun i -> - write_queue_size := if i <= 0 then None else Some i), - " Size of the write queue (0=unbounded)"; - - "--delay", Set_float delay, " Client execution time."; - "--clients", Set_int clients, " Number of concurrent clients."; - - "--hide-clients-stat", Unit (fun () -> display_client_stat := Some false), - " Hide the client bandwidth statistic." ; - - "--display_clients_stat", Unit (fun () -> display_client_stat := Some true), - " Display the client bandwidth statistic." ; - - ] + Arg. + [ ("--port", Int (fun p -> port := Some p), " Listening port"); + ( "--addr", + String (fun p -> addr := Ipaddr.V6.of_string_exn p), + " Listening addr" ); + ( "--max-download-speed", + Int (fun i -> max_download_speed := Some i), + " Max download speed in B/s (default: unbounded)" ); + ( "--max-upload-speed", + Int (fun i -> max_upload_speed := Some i), + " Max upload speed in B/s (default: unbounded)" ); + ( "--read-buffer-size", + Set_int read_buffer_size, + " Size of the read buffers" ); + ( "--read-queue-size", + Int (fun i -> read_queue_size := if i <= 0 then None else Some i), + " Size of the read queue (0=unbounded)" ); + ( "--write-queue-size", + Int (fun i -> write_queue_size := if i <= 0 then None else Some i), + " Size of the write queue (0=unbounded)" ); + ("--delay", Set_float delay, " Client execution time."); + ("--clients", Set_int clients, " Number of concurrent clients."); + ( "--hide-clients-stat", + Unit (fun () -> display_client_stat := Some false), + " Hide the client bandwidth statistic." ); + ( "--display_clients_stat", + Unit (fun () -> display_client_stat := Some true), + " Display the client bandwidth statistic." ) ] let () = let anon_fun _num_peers = raise (Arg.Bad "No anonymous argument.") in let usage_msg = "Usage: %s <num_peers>.\nArguments are:" in Arg.parse spec anon_fun usage_msg -let init_logs = lazy (Internal_event_unix.init ()) +let init_logs = lazy (Internal_event_unix.init ()) let wrap n f = - Alcotest_lwt.test_case n `Quick begin fun _ () -> - Lazy.force init_logs >>= fun () -> - f () >>= function - | Ok () -> Lwt.return_unit - | Error error -> - Format.kasprintf Pervasives.failwith "%a" pp_print_error error - end + Alcotest_lwt.test_case n `Quick (fun _ () -> + Lazy.force init_logs + >>= fun () -> + f () + >>= function + | Ok () -> + Lwt.return_unit + | Error error -> + Format.kasprintf Pervasives.failwith "%a" pp_print_error error) let () = - Alcotest.run ~argv:[|""|] "tezos-p2p" [ - "p2p.io-scheduler", [ - wrap "trivial-quota" (fun () -> - run - ?display_client_stat:!display_client_stat - ?max_download_speed:!max_download_speed - ?max_upload_speed:!max_upload_speed - ~read_buffer_size:!read_buffer_size - ?read_queue_size:!read_queue_size - ?write_queue_size:!write_queue_size - !addr !port !delay !clients) - ] - ] + Alcotest.run + ~argv:[|""|] + "tezos-p2p" + [ ( "p2p.io-scheduler", + [ wrap "trivial-quota" (fun () -> + run + ?display_client_stat:!display_client_stat + ?max_download_speed:!max_download_speed + ?max_upload_speed:!max_upload_speed + ~read_buffer_size:!read_buffer_size + ?read_queue_size:!read_queue_size + ?write_queue_size:!write_queue_size + !addr + !port + !delay + !clients) ] ) ] diff --git a/src/lib_p2p/test/test_p2p_ipv6set.ml b/src/lib_p2p/test/test_p2p_ipv6set.ml index 436a64d3da6c55dfc54fc25fa94490521966f191..bc3fa037549d07e361d15743a9df4187b1a2a685 100644 --- a/src/lib_p2p/test/test_p2p_ipv6set.ml +++ b/src/lib_p2p/test/test_p2p_ipv6set.ml @@ -23,135 +23,151 @@ (* *) (*****************************************************************************) -include - Internal_event.Legacy_logging.Make - (struct let name = "test-p2p-banned_ip" end) - -let assert_equal ?(eq = (=)) ?prn ~msg a b = - let msg = match prn with - | None -> msg +include Internal_event.Legacy_logging.Make (struct + let name = "test-p2p-banned_ip" +end) + +let assert_equal ?(eq = ( = )) ?prn ~msg a b = + let msg = + match prn with + | None -> + msg | Some prn -> - Format.asprintf "@[<v 2>%s@,n(%a)@,<>@,(%a)@]" msg prn a prn b in + Format.asprintf "@[<v 2>%s@,n(%a)@,<>@,(%a)@]" msg prn a prn b + in if not (eq a b) then Alcotest.fail msg let assert_equal_bool = assert_equal let a = Ipaddr.V6.of_string_exn + let p = Ipaddr.V6.Prefix.of_string_exn let timenow = Systime_os.now () let of_list l = - List.fold_left (fun acc k -> - P2p_acl.IpSet.add_prefix k timenow acc - ) P2p_acl.IpSet.empty l + List.fold_left + (fun acc k -> P2p_acl.IpSet.add_prefix k timenow acc) + P2p_acl.IpSet.empty + l let test_empty _ = - let addrs = List.map a [ "::" ; "ffff::" ; "a::2" ; ] in - List.iter (fun addr -> - assert_equal_bool ~msg:__LOC__ false (P2p_acl.IpSet.mem addr P2p_acl.IpSet.empty) - ) addrs + let addrs = List.map a ["::"; "ffff::"; "a::2"] in + List.iter + (fun addr -> + assert_equal_bool + ~msg:__LOC__ + false + (P2p_acl.IpSet.mem addr P2p_acl.IpSet.empty)) + addrs let test_inclusion _ = - let set = P2p_acl.IpSet.add_prefix (p "ffff::/16") timenow P2p_acl.IpSet.empty in - let included = List.map a [ "ffff::3" ; "ffff:ffff::" ; "ffff:00::ff" ; ] in - let not_included = List.map a [ "fffe::3" ; "ffee:ffff::" ; "::" ; ] in - List.iter (fun addr -> - assert_equal_bool ~msg:__LOC__ true (P2p_acl.IpSet.mem addr set) - ) included ; - List.iter (fun addr -> - assert_equal_bool ~msg:__LOC__ false (P2p_acl.IpSet.mem addr set) - ) not_included; - - let set = P2p_acl.IpSet.add_prefix (p "f000::/4") timenow P2p_acl.IpSet.empty in + let set = + P2p_acl.IpSet.add_prefix (p "ffff::/16") timenow P2p_acl.IpSet.empty + in + let included = List.map a ["ffff::3"; "ffff:ffff::"; "ffff:00::ff"] in + let not_included = List.map a ["fffe::3"; "ffee:ffff::"; "::"] in + List.iter + (fun addr -> + assert_equal_bool ~msg:__LOC__ true (P2p_acl.IpSet.mem addr set)) + included ; + List.iter + (fun addr -> + assert_equal_bool ~msg:__LOC__ false (P2p_acl.IpSet.mem addr set)) + not_included ; + let set = + P2p_acl.IpSet.add_prefix (p "f000::/4") timenow P2p_acl.IpSet.empty + in assert_equal ~msg:__LOC__ false (P2p_acl.IpSet.mem (a "e000::") set) ; - (* Add one IP *) - let set = P2p_acl.IpSet.add_prefix (p "::/128") timenow P2p_acl.IpSet.empty in + let set = + P2p_acl.IpSet.add_prefix (p "::/128") timenow P2p_acl.IpSet.empty + in assert_equal ~msg:__LOC__ false (P2p_acl.IpSet.mem (a "1::") set) ; - - let set = P2p_acl.IpSet.add_prefix (p "ffff:eeee::/32") timenow P2p_acl.IpSet.empty in + let set = + P2p_acl.IpSet.add_prefix (p "ffff:eeee::/32") timenow P2p_acl.IpSet.empty + in assert_equal ~msg:__LOC__ false (P2p_acl.IpSet.mem (a "eeee:ffff::1") set) ; assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem (a "ffff:eeee::1") set) ; - let set = P2p_acl.IpSet.add_prefix (p "::/17") timenow P2p_acl.IpSet.empty in assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem (a "0000:0000::") set) ; assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem (a "0000:7000::") set) ; assert_equal ~msg:__LOC__ false (P2p_acl.IpSet.mem (a "0000:8000::1") set) ; - - let setlist = [p "e000::/4" ; p "a000::/4" ; p "ffff::/16"] in + let setlist = [p "e000::/4"; p "a000::/4"; p "ffff::/16"] in let set = of_list setlist in assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem (a "ffff::1") set) ; assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem (a "a111:8000::1") set) ; - - let set = of_list [p "e000::/4" ; p "a000::/4" ; - p "1234:5678::1/128"; p "ffff::/16"] in + let set = + of_list [p "e000::/4"; p "a000::/4"; p "1234:5678::1/128"; p "ffff::/16"] + in assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem (a "1234:5678::1") set) ; assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem (a "a111:8000::1") set) ; assert_equal ~msg:__LOC__ false (P2p_acl.IpSet.mem (a "b111:8000::1") set) ; assert_equal ~msg:__LOC__ false (P2p_acl.IpSet.mem (a "1234:5678::100") set) - let test_contiguous _ = - let set = of_list [p "::/1" ; p "8000::/1"] in - List.iter (fun addr -> - assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem addr set) - ) [a "00::" ; a "01::" ; a "ff::" ] + let set = of_list [p "::/1"; p "8000::/1"] in + List.iter + (fun addr -> assert_equal ~msg:__LOC__ true (P2p_acl.IpSet.mem addr set)) + [a "00::"; a "01::"; a "ff::"] -module PSet = Set.Make(Ipaddr.V6.Prefix) +module PSet = Set.Make (Ipaddr.V6.Prefix) let test_fold _ = - let addr_list = [p "::/1" ; p "8000::/1" ; p "ffff:ffff::/32" ; ] in + let addr_list = [p "::/1"; p "8000::/1"; p "ffff:ffff::/32"] in let pset = PSet.of_list addr_list in let ipv6set = - P2p_acl.IpSet.fold (fun prefix _value s -> - PSet.add prefix s - ) (of_list addr_list) PSet.empty ; + P2p_acl.IpSet.fold + (fun prefix _value s -> PSet.add prefix s) + (of_list addr_list) + PSet.empty in assert_equal ~eq:PSet.equal ~msg:__LOC__ ipv6set pset let print_pset ppf pset = - PSet.iter (fun p -> - Format.fprintf ppf "%a " Ipaddr.V6.Prefix.pp p - ) pset + PSet.iter (fun p -> Format.fprintf ppf "%a " Ipaddr.V6.Prefix.pp p) pset let print_list ppf l = - List.iter (fun p -> - Format.fprintf ppf "%a " Ipaddr.V6.Prefix.pp p - ) l + List.iter (fun p -> Format.fprintf ppf "%a " Ipaddr.V6.Prefix.pp p) l let test_to_list _ = - let to_list s = P2p_acl.IpSet.fold (fun k _v acc -> k::acc) s [] in + let to_list s = P2p_acl.IpSet.fold (fun k _v acc -> k :: acc) s [] in let list_eq = List.for_all2 (fun x y -> Ipaddr.V6.Prefix.compare x y = 0) in let assert_equal_set ~msg a b = let a = List.sort compare a in let b = List.sort compare (to_list b) in - assert_equal ~prn:print_list ~eq:list_eq ~msg a b in - + assert_equal ~prn:print_list ~eq:list_eq ~msg a b + in let set = P2p_acl.IpSet.add_prefix (p "::/0") timenow P2p_acl.IpSet.empty in - assert_equal ~eq:list_eq ~prn:print_list ~msg:__LOC__ [p "::/0"] (to_list set) ; - - let set = of_list [p "::/1" ; p "8000::/1"] in - assert_equal ~eq:list_eq ~prn:print_list ~msg:__LOC__ [p "8000::/1"; p "::/1" ] (to_list set) ; - + assert_equal + ~eq:list_eq + ~prn:print_list + ~msg:__LOC__ + [p "::/0"] + (to_list set) ; + let set = of_list [p "::/1"; p "8000::/1"] in + assert_equal + ~eq:list_eq + ~prn:print_list + ~msg:__LOC__ + [p "8000::/1"; p "::/1"] + (to_list set) ; let setlist = [p "1234:5678::/32"] in let set = of_list setlist in assert_equal_set ~msg:__LOC__ setlist set ; - - let setlist = [p "e000::/4" ; p "a000::/4" ; - p "ffff::/16" ; - p "1234:5678::/32" ; - ] in + let setlist = + [p "e000::/4"; p "a000::/4"; p "ffff::/16"; p "1234:5678::/32"] + in let set = of_list setlist in assert_equal_set ~msg:__LOC__ setlist set let () = - Alcotest.run ~argv:[|""|] "tezos-p2p" [ - "p2p.ipv6set", [ - "empty", `Quick, test_empty ; - "inclusion", `Quick, test_inclusion ; - "contiguous", `Quick, test_contiguous ; - "test_fold", `Quick, test_fold ; - "to_list", `Quick, test_to_list ; - ] - ] + Alcotest.run + ~argv:[|""|] + "tezos-p2p" + [ ( "p2p.ipv6set", + [ ("empty", `Quick, test_empty); + ("inclusion", `Quick, test_inclusion); + ("contiguous", `Quick, test_contiguous); + ("test_fold", `Quick, test_fold); + ("to_list", `Quick, test_to_list) ] ) ] diff --git a/src/lib_p2p/test/test_p2p_peerset.ml b/src/lib_p2p/test/test_p2p_peerset.ml index 84991283f00f3d1f26645c9db948d83f3c79cc23..875d62055324e276ff8a4faccf45cfd21aaa2264 100644 --- a/src/lib_p2p/test/test_p2p_peerset.ml +++ b/src/lib_p2p/test/test_p2p_peerset.ml @@ -23,55 +23,56 @@ (* *) (*****************************************************************************) -include - Internal_event.Legacy_logging.Make - (struct let name = "test-p2p-banned_peers" end) +include Internal_event.Legacy_logging.Make (struct + let name = "test-p2p-banned_peers" +end) -let assert_equal_bool ~msg a b = - if a <> b then Alcotest.fail msg +let assert_equal_bool ~msg a b = if a <> b then Alcotest.fail msg -let a = fun s -> P2p_peer.Id.hash_string [s] +let a s = P2p_peer.Id.hash_string [s] let test_empty _ = - let peers = List.map a [ "foo"; "bar"; "baz" ; ] in + let peers = List.map a ["foo"; "bar"; "baz"] in let empty = P2p_acl.PeerRing.create 10 in - List.iter (fun peer -> - assert_equal_bool ~msg:__LOC__ false (P2p_acl.PeerRing.mem empty peer) - ) peers + List.iter + (fun peer -> + assert_equal_bool ~msg:__LOC__ false (P2p_acl.PeerRing.mem empty peer)) + peers let test_add _ = - let peers = List.map a [ "foo"; "bar"; "baz" ; ] in + let peers = List.map a ["foo"; "bar"; "baz"] in let set = P2p_acl.PeerRing.create 10 in - List.iter (fun peer -> P2p_acl.PeerRing.add set peer) peers; - List.iter (fun peer -> - assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set peer) - ) peers + List.iter (fun peer -> P2p_acl.PeerRing.add set peer) peers ; + List.iter + (fun peer -> + assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set peer)) + peers let test_remove _ = - let peers = List.map a [ "foo"; "bar"; "baz" ; ] in + let peers = List.map a ["foo"; "bar"; "baz"] in let set = P2p_acl.PeerRing.create 10 in - List.iter (fun peer -> P2p_acl.PeerRing.add set peer) peers; - assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "bar")); - P2p_acl.PeerRing.remove set (a "bar"); + List.iter (fun peer -> P2p_acl.PeerRing.add set peer) peers ; + assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "bar")) ; + P2p_acl.PeerRing.remove set (a "bar") ; assert_equal_bool ~msg:__LOC__ false (P2p_acl.PeerRing.mem set (a "bar")) let test_overflow _ = - let peers = List.map a [ "foo"; "bar"; "baz" ; ] in + let peers = List.map a ["foo"; "bar"; "baz"] in let set = P2p_acl.PeerRing.create 3 in - List.iter (fun peer -> P2p_acl.PeerRing.add set peer) peers; - assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "baz")); - P2p_acl.PeerRing.add set (a "zor"); - assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "zor")); - assert_equal_bool ~msg:__LOC__ false (P2p_acl.PeerRing.mem set (a "foo")); - assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "bar")); + List.iter (fun peer -> P2p_acl.PeerRing.add set peer) peers ; + assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "baz")) ; + P2p_acl.PeerRing.add set (a "zor") ; + assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "zor")) ; + assert_equal_bool ~msg:__LOC__ false (P2p_acl.PeerRing.mem set (a "foo")) ; + assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "bar")) ; assert_equal_bool ~msg:__LOC__ true (P2p_acl.PeerRing.mem set (a "baz")) let () = - Alcotest.run ~argv:[|""|] "tezos-p2p" [ - "p2p.peerset", [ - "empty", `Quick, test_empty ; - "add", `Quick, test_add; - "overflow", `Quick, test_overflow; - "remove", `Quick, test_remove; - ] - ] + Alcotest.run + ~argv:[|""|] + "tezos-p2p" + [ ( "p2p.peerset", + [ ("empty", `Quick, test_empty); + ("add", `Quick, test_add); + ("overflow", `Quick, test_overflow); + ("remove", `Quick, test_remove) ] ) ] diff --git a/src/lib_p2p/test/test_p2p_pool.ml b/src/lib_p2p/test/test_p2p_pool.ml index 7d4bfe7dfc97aa70f381f45adabdb1af7e67ccb5..a6fa869df50493a5b83928ee5df5b0c6750277a5 100644 --- a/src/lib_p2p/test/test_p2p_pool.ml +++ b/src/lib_p2p/test/test_p2p_pool.ml @@ -24,212 +24,222 @@ (* *) (*****************************************************************************) -include - Internal_event.Legacy_logging.Make - (struct let name = "test.p2p.connection-pool" end) - -type message = - | Ping - -let msg_config : message P2p_pool.message_config = { - encoding = [ - P2p_message.Encoding { - tag = 0x10 ; - title = "Ping" ; - encoding = Data_encoding.empty ; - wrap = (function () -> Ping) ; - unwrap = (function Ping -> Some ()) ; - max_length = None ; - } ; - ] ; - chain_name = Distributed_db_version.sandboxed_chain_name ; - distributed_db_versions = [ Distributed_db_version.zero ] ; -} +include Internal_event.Legacy_logging.Make (struct + let name = "test.p2p.connection-pool" +end) + +type message = Ping + +let msg_config : message P2p_pool.message_config = + { encoding = + [ P2p_message.Encoding + { tag = 0x10; + title = "Ping"; + encoding = Data_encoding.empty; + wrap = (function () -> Ping); + unwrap = (function Ping -> Some ()); + max_length = None } ]; + chain_name = Distributed_db_version.sandboxed_chain_name; + distributed_db_versions = [Distributed_db_version.zero] } type metadata = unit -let peer_meta_config : metadata P2p_pool.peer_meta_config = { - peer_meta_encoding = Data_encoding.empty ; - peer_meta_initial = (fun _ -> ()) ; - score = fun () -> 0. ; -} +let peer_meta_config : metadata P2p_pool.peer_meta_config = + { peer_meta_encoding = Data_encoding.empty; + peer_meta_initial = (fun _ -> ()); + score = (fun () -> 0.) } -let conn_meta_config : metadata P2p_socket.metadata_config = { - conn_meta_encoding = Data_encoding.empty ; - conn_meta_value = (fun _ -> ()) ; - private_node = (fun _ -> false) ; -} +let conn_meta_config : metadata P2p_socket.metadata_config = + { conn_meta_encoding = Data_encoding.empty; + conn_meta_value = (fun _ -> ()); + private_node = (fun _ -> false) } let sync ch = - Process.Channel.push ch () >>=? fun () -> - Process.Channel.pop ch >>=? fun () -> - return_unit + Process.Channel.push ch () + >>=? fun () -> Process.Channel.pop ch >>=? fun () -> return_unit let rec sync_nodes nodes = - iter_p - (fun { Process.channel ; _ } -> Process.Channel.pop channel) - nodes >>=? fun () -> - iter_p - (fun { Process.channel ; _ } -> Process.Channel.push channel ()) - nodes >>=? fun () -> - sync_nodes nodes + iter_p (fun {Process.channel; _} -> Process.Channel.pop channel) nodes + >>=? fun () -> + iter_p (fun {Process.channel; _} -> Process.Channel.push channel ()) nodes + >>=? fun () -> sync_nodes nodes let sync_nodes nodes = - sync_nodes nodes >>= function + sync_nodes nodes + >>= function | Ok () | Error (Exn End_of_file :: _) -> return_unit | Error _ as err -> Lwt.return err let detach_node f points n = - let (addr, port), points = List.select n points in + let ((addr, port), points) = List.select n points in let proof_of_work_target = Crypto_box.make_target 0. in let identity = P2p_identity.generate proof_of_work_target in let nb_points = List.length points in - let config = P2p_pool.{ - identity ; - proof_of_work_target ; - trusted_points = points ; - peers_file = "/dev/null" ; - private_mode = true ; - listening_port = Some port ; - min_connections = nb_points ; - max_connections = nb_points ; - max_incoming_connections = nb_points ; - connection_timeout = Time.System.Span.of_seconds_exn 10. ; - authentication_timeout = Time.System.Span.of_seconds_exn 2. ; - incoming_app_message_queue_size = None ; - incoming_message_queue_size = None ; - outgoing_message_queue_size = None ; - known_peer_ids_history_size = 100 ; - known_points_history_size = 100 ; - max_known_points = None ; - max_known_peer_ids = None ; - swap_linger = Time.System.Span.of_seconds_exn 0. ; - binary_chunks_size = None ; - greylisting_config = P2p_point_state.Info.default_greylisting_config ; - } in + let config = + P2p_pool. + { identity; + proof_of_work_target; + trusted_points = points; + peers_file = "/dev/null"; + private_mode = true; + listening_port = Some port; + min_connections = nb_points; + max_connections = nb_points; + max_incoming_connections = nb_points; + connection_timeout = Time.System.Span.of_seconds_exn 10.; + authentication_timeout = Time.System.Span.of_seconds_exn 2.; + incoming_app_message_queue_size = None; + incoming_message_queue_size = None; + outgoing_message_queue_size = None; + known_peer_ids_history_size = 100; + known_points_history_size = 100; + max_known_points = None; + max_known_peer_ids = None; + swap_linger = Time.System.Span.of_seconds_exn 0.; + binary_chunks_size = None; + greylisting_config = P2p_point_state.Info.default_greylisting_config } + in Process.detach ~prefix:(Format.asprintf "%a: " P2p_peer.Id.pp_short identity.peer_id) - begin fun channel -> + (fun channel -> let sched = P2p_io_scheduler.create ~read_buffer_size:(1 lsl 12) () in - P2p_pool.create - config peer_meta_config conn_meta_config msg_config sched >>= fun pool -> - P2p_welcome.create ~backlog:10 pool ~addr port >>= fun welcome -> - P2p_welcome.activate welcome; - lwt_log_info "Node ready (port: %d)" port >>= fun () -> - sync channel >>=? fun () -> - f channel pool points >>=? fun () -> - lwt_log_info "Shutting down..." >>= fun () -> - P2p_welcome.shutdown welcome >>= fun () -> - P2p_pool.destroy pool >>= fun () -> - P2p_io_scheduler.shutdown sched >>= fun () -> - lwt_log_info "Bye." >>= fun () -> - return_unit - end + P2p_pool.create config peer_meta_config conn_meta_config msg_config sched + >>= fun pool -> + P2p_welcome.create ~backlog:10 pool ~addr port + >>= fun welcome -> + P2p_welcome.activate welcome ; + lwt_log_info "Node ready (port: %d)" port + >>= fun () -> + sync channel + >>=? fun () -> + f channel pool points + >>=? fun () -> + lwt_log_info "Shutting down..." + >>= fun () -> + P2p_welcome.shutdown welcome + >>= fun () -> + P2p_pool.destroy pool + >>= fun () -> + P2p_io_scheduler.shutdown sched + >>= fun () -> lwt_log_info "Bye." >>= fun () -> return_unit) let detach_nodes run_node points = let clients = List.length points in - Lwt_list.map_p - (detach_node run_node points) (0 -- (clients - 1)) >>= fun nodes -> + Lwt_list.map_p (detach_node run_node points) (0 -- (clients - 1)) + >>= fun nodes -> Lwt.ignore_result (sync_nodes nodes) ; Process.wait_all nodes type error += Connect | Write | Read module Simple = struct - let rec connect ~timeout pool point = - lwt_log_info "Connect to %a" P2p_point.Id.pp point >>= fun () -> - P2p_pool.connect pool point ~timeout >>= function - | Error [P2p_errors.Connected] -> begin - match P2p_pool.Connection.find_by_point pool point with - | Some conn -> return conn - | None -> failwith "Woops..." - end - | Error ([ P2p_errors.Connection_refused - | P2p_errors.Pending_connection - | P2p_errors.Rejected_socket_connection - | Canceled - | Timeout - | P2p_errors.Rejected _ as err ]) -> - lwt_log_info "Connection to %a failed (%a)" - P2p_point.Id.pp point - (fun ppf err -> match err with - | P2p_errors.Connection_refused -> - Format.fprintf ppf "connection refused" - | P2p_errors.Pending_connection -> - Format.fprintf ppf "pending connection" - | P2p_errors.Rejected_socket_connection -> - Format.fprintf ppf "rejected" - | Canceled -> - Format.fprintf ppf "canceled" - | Timeout -> - Format.fprintf ppf "timeout" - | P2p_errors.Rejected peer -> - Format.fprintf ppf "rejected (%a)" P2p_peer.Id.pp peer - | _ -> assert false) err >>= fun () -> - Lwt_unix.sleep (0.5 +. Random.float 2.) >>= fun () -> - connect ~timeout pool point - | Ok _ | Error _ as res -> Lwt.return res - - let connect_all ~timeout pool points = - map_p (connect ~timeout pool) points - - let write_all conns msg = - iter_p - (fun conn -> - trace Write @@ P2p_pool.write_sync conn msg) - conns + lwt_log_info "Connect to %a" P2p_point.Id.pp point + >>= fun () -> + P2p_pool.connect pool point ~timeout + >>= function + | Error [P2p_errors.Connected] -> ( + match P2p_pool.Connection.find_by_point pool point with + | Some conn -> + return conn + | None -> + failwith "Woops..." ) + | Error + [ ( ( P2p_errors.Connection_refused + | P2p_errors.Pending_connection + | P2p_errors.Rejected_socket_connection + | Canceled + | Timeout + | P2p_errors.Rejected _ ) as err ) ] -> + lwt_log_info + "Connection to %a failed (%a)" + P2p_point.Id.pp + point + (fun ppf err -> + match err with + | P2p_errors.Connection_refused -> + Format.fprintf ppf "connection refused" + | P2p_errors.Pending_connection -> + Format.fprintf ppf "pending connection" + | P2p_errors.Rejected_socket_connection -> + Format.fprintf ppf "rejected" + | Canceled -> + Format.fprintf ppf "canceled" + | Timeout -> + Format.fprintf ppf "timeout" + | P2p_errors.Rejected peer -> + Format.fprintf ppf "rejected (%a)" P2p_peer.Id.pp peer + | _ -> + assert false) + err + >>= fun () -> + Lwt_unix.sleep (0.5 +. Random.float 2.) + >>= fun () -> connect ~timeout pool point + | (Ok _ | Error _) as res -> + Lwt.return res + + let connect_all ~timeout pool points = map_p (connect ~timeout pool) points + + let write_all conns msg = + iter_p (fun conn -> trace Write @@ P2p_pool.write_sync conn msg) conns let read_all conns = iter_p (fun conn -> - trace Read @@ P2p_pool.read conn >>=? fun Ping -> - return_unit) + trace Read @@ P2p_pool.read conn >>=? fun Ping -> return_unit) conns - let close_all conns = - Lwt_list.iter_p P2p_pool.disconnect conns + let close_all conns = Lwt_list.iter_p P2p_pool.disconnect conns let node channel pool points = - connect_all ~timeout:(Time.System.Span.of_seconds_exn 2.) pool points >>=? fun conns -> - lwt_log_info "Bootstrap OK" >>= fun () -> - sync channel >>=? fun () -> - write_all conns Ping >>=? fun () -> - lwt_log_info "Sent all messages." >>= fun () -> - sync channel >>=? fun () -> - read_all conns >>=? fun () -> - lwt_log_info "Read all messages." >>= fun () -> - sync channel >>=? fun () -> - close_all conns >>= fun () -> - lwt_log_info "All connections successfully closed." >>= fun () -> - return_unit + connect_all ~timeout:(Time.System.Span.of_seconds_exn 2.) pool points + >>=? fun conns -> + lwt_log_info "Bootstrap OK" + >>= fun () -> + sync channel + >>=? fun () -> + write_all conns Ping + >>=? fun () -> + lwt_log_info "Sent all messages." + >>= fun () -> + sync channel + >>=? fun () -> + read_all conns + >>=? fun () -> + lwt_log_info "Read all messages." + >>= fun () -> + sync channel + >>=? fun () -> + close_all conns + >>= fun () -> + lwt_log_info "All connections successfully closed." + >>= fun () -> return_unit let run points = detach_nodes node points - end module Random_connections = struct - let rec connect_random pool total rem point n = - Lwt_unix.sleep (0.2 +. Random.float 1.0) >>= fun () -> - (trace Connect @@ Simple.connect ~timeout:(Time.System.Span.of_seconds_exn 2.) pool point) >>=? fun conn -> - (trace Write @@ P2p_pool.write conn Ping) >>= fun _ -> - (trace Read @@ P2p_pool.read conn) >>=? fun Ping -> - Lwt_unix.sleep (0.2 +. Random.float 1.0) >>= fun () -> - P2p_pool.disconnect conn >>= fun () -> - begin - decr rem ; - if !rem mod total = 0 then - lwt_log_info "Remaining: %d." (!rem / total) - else - Lwt.return_unit - end >>= fun () -> - if n > 1 then - connect_random pool total rem point (pred n) - else - return_unit + Lwt_unix.sleep (0.2 +. Random.float 1.0) + >>= fun () -> + trace Connect + @@ Simple.connect ~timeout:(Time.System.Span.of_seconds_exn 2.) pool point + >>=? fun conn -> + trace Write @@ P2p_pool.write conn Ping + >>= fun _ -> + trace Read @@ P2p_pool.read conn + >>=? fun Ping -> + Lwt_unix.sleep (0.2 +. Random.float 1.0) + >>= fun () -> + P2p_pool.disconnect conn + >>= fun () -> + ( decr rem ; + if !rem mod total = 0 then lwt_log_info "Remaining: %d." (!rem / total) + else Lwt.return_unit ) + >>= fun () -> + if n > 1 then connect_random pool total rem point (pred n) else return_unit let connect_random_all pool points n = let total = List.length points in @@ -237,20 +247,21 @@ module Random_connections = struct iter_p (fun point -> connect_random pool total rem point n) points let node repeat _channel pool points = - lwt_log_info "Begin random connections." >>= fun () -> - connect_random_all pool points repeat >>=? fun () -> - lwt_log_info "Random connections OK." >>= fun () -> - return_unit + lwt_log_info "Begin random connections." + >>= fun () -> + connect_random_all pool points repeat + >>=? fun () -> + lwt_log_info "Random connections OK." >>= fun () -> return_unit let run points repeat = detach_nodes (node repeat) points - end module Garbled = struct - let is_connection_closed = function - | Error ((Write | Read) :: P2p_errors.Connection_closed :: _) -> true - | Ok _ -> false + | Error ((Write | Read) :: P2p_errors.Connection_closed :: _) -> + true + | Ok _ -> + false | Error err -> log_info "Unexpected error: %a" pp_print_error err ; false @@ -258,85 +269,97 @@ module Garbled = struct let write_bad_all conns = let bad_msg = MBytes.of_string (String.make 16 '\000') in iter_p - (fun conn -> - trace Write @@ P2p_pool.raw_write_sync conn bad_msg) + (fun conn -> trace Write @@ P2p_pool.raw_write_sync conn bad_msg) conns let node ch pool points = - Simple.connect_all ~timeout:(Time.System.Span.of_seconds_exn 2.) pool points >>=? fun conns -> - sync ch >>=? fun () -> - begin - write_bad_all conns >>=? fun () -> - Simple.read_all conns - end >>= fun err -> - _assert (is_connection_closed err) __LOC__ "" + Simple.connect_all + ~timeout:(Time.System.Span.of_seconds_exn 2.) + pool + points + >>=? fun conns -> + sync ch + >>=? fun () -> + write_bad_all conns + >>=? (fun () -> Simple.read_all conns) + >>= fun err -> _assert (is_connection_closed err) __LOC__ "" let run points = detach_nodes node points - end let () = Random.self_init () let addr = ref Ipaddr.V6.localhost -let port = ref (1024 + Random.int 8192) -let clients = ref 10 -let repeat_connections = ref 5 -let log_config = ref None - -let spec = Arg.[ - - "--port", Int (fun p -> port := p), " Listening port of the first peer."; - - "--addr", String (fun p -> addr := Ipaddr.V6.of_string_exn p), - " Listening addr"; - - "--clients", Set_int clients, " Number of concurrent clients." ; - - "--repeat", Set_int repeat_connections, - " Number of connections/disconnections." ; +let port = ref (1024 + Random.int 8192) - "-v", Unit (fun () -> - log_config := Some ( - Lwt_log_sink_unix.create_cfg - ~rules:("test.p2p.connection-pool -> info; p2p.connection-pool -> info") - () )), - " Log up to info msgs" ; +let clients = ref 10 - "-vv", Unit (fun () -> - log_config := Some ( - Lwt_log_sink_unix.create_cfg - ~rules:("test.p2p.connection-pool -> debug; p2p.connection-pool -> debug") - () )), - " Log up to debug msgs"; +let repeat_connections = ref 5 - ] +let log_config = ref None -let init_logs = lazy (Internal_event_unix.init ?lwt_log_sink:!log_config ()) +let spec = + Arg. + [ ("--port", Int (fun p -> port := p), " Listening port of the first peer."); + ( "--addr", + String (fun p -> addr := Ipaddr.V6.of_string_exn p), + " Listening addr" ); + ("--clients", Set_int clients, " Number of concurrent clients."); + ( "--repeat", + Set_int repeat_connections, + " Number of connections/disconnections." ); + ( "-v", + Unit + (fun () -> + log_config := + Some + (Lwt_log_sink_unix.create_cfg + ~rules: + "test.p2p.connection-pool -> info; p2p.connection-pool \ + -> info" + ())), + " Log up to info msgs" ); + ( "-vv", + Unit + (fun () -> + log_config := + Some + (Lwt_log_sink_unix.create_cfg + ~rules: + "test.p2p.connection-pool -> debug; p2p.connection-pool \ + -> debug" + ())), + " Log up to debug msgs" ) ] + +let init_logs = lazy (Internal_event_unix.init ?lwt_log_sink:!log_config ()) let wrap n f = - Alcotest_lwt.test_case n `Quick begin fun _ () -> - Lazy.force init_logs >>= fun () -> - f () >>= function - | Ok () -> Lwt.return_unit - | Error error -> - Format.kasprintf Pervasives.failwith "%a" pp_print_error error - end + Alcotest_lwt.test_case n `Quick (fun _ () -> + Lazy.force init_logs + >>= fun () -> + f () + >>= function + | Ok () -> + Lwt.return_unit + | Error error -> + Format.kasprintf Pervasives.failwith "%a" pp_print_error error) let main () = let anon_fun _num_peers = raise (Arg.Bad "No anonymous argument.") in let usage_msg = "Usage: %s <num_peers>.\nArguments are:" in Arg.parse spec anon_fun usage_msg ; let ports = !port -- (!port + !clients - 1) in - let points = List.map (fun port -> !addr, port) ports in - Alcotest.run ~argv:[|""|] "tezos-p2p" [ - "p2p-connection-pool", [ - wrap "simple" (fun _ -> Simple.run points) ; - wrap "random" (fun _ -> Random_connections.run points !repeat_connections) ; - wrap "garbled" (fun _ -> Garbled.run points) ; - ] - ] + let points = List.map (fun port -> (!addr, port)) ports in + Alcotest.run + ~argv:[|""|] + "tezos-p2p" + [ ( "p2p-connection-pool", + [ wrap "simple" (fun _ -> Simple.run points); + wrap "random" (fun _ -> + Random_connections.run points !repeat_connections); + wrap "garbled" (fun _ -> Garbled.run points) ] ) ] + let () = Sys.catch_break true ; - try main () - with _ -> () + try main () with _ -> () diff --git a/src/lib_p2p/test/test_p2p_socket.ml b/src/lib_p2p/test/test_p2p_socket.ml index 1c8499932189098f47efa6e7c3ec9f0f78eaa4d7..98549a37e670bb5b6f4e3936a6c86b1d67b9346b 100644 --- a/src/lib_p2p/test/test_p2p_socket.ml +++ b/src/lib_p2p/test/test_p2p_socket.ml @@ -24,16 +24,18 @@ (* *) (*****************************************************************************) -include - Internal_event.Legacy_logging.Make - (struct let name = "test.p2p.connection" end) +include Internal_event.Legacy_logging.Make (struct + let name = "test.p2p.connection" +end) let addr = ref Ipaddr.V6.localhost let canceler = Lwt_canceler.create () (* unused *) let proof_of_work_target = Crypto_box.make_target 16. + let id1 = P2p_identity.generate proof_of_work_target + let id2 = P2p_identity.generate proof_of_work_target let id0 = @@ -41,354 +43,377 @@ let id0 = P2p_identity.generate (Crypto_box.make_target 0.) let version = - { Network_version. - chain_name = Distributed_db_version.sandboxed_chain_name ; - distributed_db_version = Distributed_db_version.zero ; - p2p_version = P2p_version.zero ; - } + { Network_version.chain_name = Distributed_db_version.sandboxed_chain_name; + distributed_db_version = Distributed_db_version.zero; + p2p_version = P2p_version.zero } type metadata = unit -let conn_meta_config : metadata P2p_socket.metadata_config = { - conn_meta_encoding = Data_encoding.empty ; - conn_meta_value = (fun _ -> ()) ; - private_node = (fun _ -> false) ; -} + +let conn_meta_config : metadata P2p_socket.metadata_config = + { conn_meta_encoding = Data_encoding.empty; + conn_meta_value = (fun _ -> ()); + private_node = (fun _ -> false) } let rec listen ?port addr = let tentative_port = - match port with - | None -> 1024 + Random.int 8192 - | Some port -> port in + match port with None -> 1024 + Random.int 8192 | Some port -> port + in let uaddr = Ipaddr_unix.V6.to_inet_addr addr in let main_socket = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt main_socket SO_REUSEADDR true) ; - Lwt.catch begin fun () -> - Lwt_unix.bind main_socket - (ADDR_INET (uaddr, tentative_port)) >>= fun () -> - Lwt_unix.listen main_socket 1 ; - Lwt.return (main_socket, tentative_port) - end begin function - | Unix.Unix_error - ((Unix.EADDRINUSE | Unix.EADDRNOTAVAIL), _, _) when port = None -> - listen addr - | exn -> Lwt.fail exn - end + Lwt.catch + (fun () -> + Lwt_unix.bind main_socket (ADDR_INET (uaddr, tentative_port)) + >>= fun () -> + Lwt_unix.listen main_socket 1 ; + Lwt.return (main_socket, tentative_port)) + (function + | Unix.Unix_error ((Unix.EADDRINUSE | Unix.EADDRNOTAVAIL), _, _) + when port = None -> + listen addr + | exn -> + Lwt.fail exn) let sync ch = - Process.Channel.push ch () >>=? fun () -> - Process.Channel.pop ch >>=? fun () -> - return_unit + Process.Channel.push ch () + >>=? fun () -> Process.Channel.pop ch >>=? fun () -> return_unit let rec sync_nodes nodes = - iter_p - (fun { Process.channel ; _ } -> Process.Channel.pop channel) - nodes >>=? fun () -> - iter_p - (fun { Process.channel ; _ } -> Process.Channel.push channel ()) - nodes >>=? fun () -> - sync_nodes nodes + iter_p (fun {Process.channel; _} -> Process.Channel.pop channel) nodes + >>=? fun () -> + iter_p (fun {Process.channel; _} -> Process.Channel.push channel ()) nodes + >>=? fun () -> sync_nodes nodes let sync_nodes nodes = - sync_nodes nodes >>= function + sync_nodes nodes + >>= function | Ok () | Error (Exn End_of_file :: _) -> return_unit | Error _ as err -> Lwt.return err let run_nodes client server = - listen !addr >>= fun (main_socket, port) -> - Process.detach ~prefix:"server: " begin fun channel -> - let sched = P2p_io_scheduler.create ~read_buffer_size:(1 lsl 12) () in - server channel sched main_socket >>=? fun () -> - P2p_io_scheduler.shutdown sched >>= fun () -> - return_unit - end >>= fun server_node -> - Process.detach ~prefix:"client: " begin fun channel -> - Lwt_utils_unix.safe_close main_socket >>= fun () -> - let sched = P2p_io_scheduler.create ~read_buffer_size:(1 lsl 12) () in - client channel sched !addr port >>=? fun () -> - P2p_io_scheduler.shutdown sched >>= fun () -> - return_unit - end >>= fun client_node -> - let nodes = [ server_node ; client_node ] in + listen !addr + >>= fun (main_socket, port) -> + Process.detach ~prefix:"server: " (fun channel -> + let sched = P2p_io_scheduler.create ~read_buffer_size:(1 lsl 12) () in + server channel sched main_socket + >>=? fun () -> P2p_io_scheduler.shutdown sched >>= fun () -> return_unit) + >>= fun server_node -> + Process.detach ~prefix:"client: " (fun channel -> + Lwt_utils_unix.safe_close main_socket + >>= fun () -> + let sched = P2p_io_scheduler.create ~read_buffer_size:(1 lsl 12) () in + client channel sched !addr port + >>=? fun () -> P2p_io_scheduler.shutdown sched >>= fun () -> return_unit) + >>= fun client_node -> + let nodes = [server_node; client_node] in Lwt.ignore_result (sync_nodes nodes) ; Process.wait_all nodes let raw_accept sched main_socket = - P2p_fd.accept main_socket >>= fun (fd, sockaddr) -> + P2p_fd.accept main_socket + >>= fun (fd, sockaddr) -> let fd = P2p_io_scheduler.register sched fd in let point = match sockaddr with - | Lwt_unix.ADDR_UNIX _ -> assert false + | Lwt_unix.ADDR_UNIX _ -> + assert false | Lwt_unix.ADDR_INET (addr, port) -> - Ipaddr_unix.V6.of_inet_addr_exn addr, port in + (Ipaddr_unix.V6.of_inet_addr_exn addr, port) + in Lwt.return (fd, point) let accept sched main_socket = - raw_accept sched main_socket >>= fun (fd, point) -> + raw_accept sched main_socket + >>= fun (fd, point) -> P2p_socket.authenticate ~canceler ~proof_of_work_target - ~incoming:true fd point id1 version + ~incoming:true + fd + point + id1 + version conn_meta_config let raw_connect sched addr port = let fd = P2p_fd.socket PF_INET6 SOCK_STREAM 0 in - let uaddr = - Lwt_unix.ADDR_INET (Ipaddr_unix.V6.to_inet_addr addr, port) in - P2p_fd.connect fd uaddr >>= fun () -> + let uaddr = Lwt_unix.ADDR_INET (Ipaddr_unix.V6.to_inet_addr addr, port) in + P2p_fd.connect fd uaddr + >>= fun () -> let fd = P2p_io_scheduler.register sched fd in Lwt.return fd let connect sched addr port id = - raw_connect sched addr port >>= fun fd -> + raw_connect sched addr port + >>= fun fd -> P2p_socket.authenticate ~canceler ~proof_of_work_target - ~incoming:false fd - (addr, port) id version - conn_meta_config >>=? fun (info, auth_fd) -> - _assert (not info.incoming) __LOC__ "" >>=? fun () -> - _assert (P2p_peer.Id.compare info.peer_id id1.peer_id = 0) - __LOC__ "" >>=? fun () -> - return auth_fd + ~incoming:false + fd + (addr, port) + id + version + conn_meta_config + >>=? fun (info, auth_fd) -> + _assert (not info.incoming) __LOC__ "" + >>=? fun () -> + _assert (P2p_peer.Id.compare info.peer_id id1.peer_id = 0) __LOC__ "" + >>=? fun () -> return auth_fd let is_connection_closed = function - | Error [P2p_errors.Connection_closed] -> true - | Ok _ -> false + | Error [P2p_errors.Connection_closed] -> + true + | Ok _ -> + false | Error err -> log_notice "Error: %a" pp_print_error err ; false let is_decoding_error = function - | Error [P2p_errors.Decoding_error] -> true - | Ok _ -> false + | Error [P2p_errors.Decoding_error] -> + true + | Ok _ -> + false | Error err -> log_notice "Error: %a" pp_print_error err ; false module Low_level = struct - let simple_msg = Rand.generate (1 lsl 4) let client _ch sched addr port = let msg = MBytes.create (MBytes.length simple_msg) in - raw_connect sched addr port >>= fun fd -> - P2p_io_scheduler.read_full fd msg >>=? fun () -> - _assert (MBytes.compare simple_msg msg = 0) __LOC__ "" >>=? fun () -> - P2p_io_scheduler.close fd >>=? fun () -> - return_unit + raw_connect sched addr port + >>= fun fd -> + P2p_io_scheduler.read_full fd msg + >>=? fun () -> + _assert (MBytes.compare simple_msg msg = 0) __LOC__ "" + >>=? fun () -> P2p_io_scheduler.close fd >>=? fun () -> return_unit let server _ch sched socket = - raw_accept sched socket >>= fun (fd, _point) -> - P2p_io_scheduler.write fd simple_msg >>=? fun () -> - P2p_io_scheduler.close fd >>=? fun _ -> - return_unit + raw_accept sched socket + >>= fun (fd, _point) -> + P2p_io_scheduler.write fd simple_msg + >>=? fun () -> P2p_io_scheduler.close fd >>=? fun _ -> return_unit let run _dir = run_nodes client server - end module Kick = struct - let encoding = Data_encoding.bytes let is_rejected = function - | Error [P2p_errors.Rejected_socket_connection] -> true - | Ok _ -> false + | Error [P2p_errors.Rejected_socket_connection] -> + true + | Ok _ -> + false | Error err -> log_notice "Error: %a" pp_print_error err ; false let server _ch sched socket = - accept sched socket >>=? fun (info, auth_fd) -> - _assert (info.incoming) __LOC__ "" >>=? fun () -> - _assert (P2p_peer.Id.compare info.peer_id id2.peer_id = 0) - __LOC__ "" >>=? fun () -> - P2p_socket.kick auth_fd >>= fun () -> - return_unit + accept sched socket + >>=? fun (info, auth_fd) -> + _assert info.incoming __LOC__ "" + >>=? fun () -> + _assert (P2p_peer.Id.compare info.peer_id id2.peer_id = 0) __LOC__ "" + >>=? fun () -> P2p_socket.kick auth_fd >>= fun () -> return_unit let client _ch sched addr port = - connect sched addr port id2 >>=? fun auth_fd -> - P2p_socket.accept ~canceler auth_fd encoding >>= fun conn -> - _assert (is_rejected conn) __LOC__ "" >>=? fun () -> - return_unit + connect sched addr port id2 + >>=? fun auth_fd -> + P2p_socket.accept ~canceler auth_fd encoding + >>= fun conn -> + _assert (is_rejected conn) __LOC__ "" >>=? fun () -> return_unit let run _dir = run_nodes client server - end module Kicked = struct - let encoding = Data_encoding.bytes let server _ch sched socket = - accept sched socket >>=? fun (_info, auth_fd) -> - P2p_socket.accept ~canceler auth_fd encoding >>= fun conn -> - _assert (Kick.is_rejected conn) __LOC__ "" >>=? fun () -> - return_unit + accept sched socket + >>=? fun (_info, auth_fd) -> + P2p_socket.accept ~canceler auth_fd encoding + >>= fun conn -> + _assert (Kick.is_rejected conn) __LOC__ "" >>=? fun () -> return_unit let client _ch sched addr port = - connect sched addr port id2 >>=? fun auth_fd -> - P2p_socket.kick auth_fd >>= fun () -> - return_unit + connect sched addr port id2 + >>=? fun auth_fd -> P2p_socket.kick auth_fd >>= fun () -> return_unit let run _dir = run_nodes client server - end module Simple_message = struct - let encoding = Data_encoding.bytes let simple_msg = Rand.generate (1 lsl 4) + let simple_msg2 = Rand.generate (1 lsl 4) let server ch sched socket = - accept sched socket >>=? fun (_info, auth_fd) -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - P2p_socket.write_sync conn simple_msg >>=? fun () -> - P2p_socket.read conn >>=? fun (_msg_size, msg) -> - _assert (MBytes.compare simple_msg2 msg = 0) __LOC__ "" >>=? fun () -> - sync ch >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + accept sched socket + >>=? fun (_info, auth_fd) -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + P2p_socket.write_sync conn simple_msg + >>=? fun () -> + P2p_socket.read conn + >>=? fun (_msg_size, msg) -> + _assert (MBytes.compare simple_msg2 msg = 0) __LOC__ "" + >>=? fun () -> + sync ch >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let client ch sched addr port = - connect sched addr port id2 >>=? fun auth_fd -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - P2p_socket.write_sync conn simple_msg2 >>=? fun () -> - P2p_socket.read conn >>=? fun (_msg_size, msg) -> - _assert (MBytes.compare simple_msg msg = 0) __LOC__ "" >>=? fun () -> - sync ch >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + connect sched addr port id2 + >>=? fun auth_fd -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + P2p_socket.write_sync conn simple_msg2 + >>=? fun () -> + P2p_socket.read conn + >>=? fun (_msg_size, msg) -> + _assert (MBytes.compare simple_msg msg = 0) __LOC__ "" + >>=? fun () -> + sync ch >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let run _dir = run_nodes client server - end module Chunked_message = struct - let encoding = Data_encoding.bytes let simple_msg = Rand.generate (1 lsl 8) + let simple_msg2 = Rand.generate (1 lsl 8) let server ch sched socket = - accept sched socket >>=? fun (_info, auth_fd) -> - P2p_socket.accept - ~canceler - ~binary_chunks_size:21 auth_fd encoding >>=? fun conn -> - P2p_socket.write_sync conn simple_msg >>=? fun () -> - P2p_socket.read conn >>=? fun (_msg_size, msg) -> - _assert (MBytes.compare simple_msg2 msg = 0) __LOC__ "" >>=? fun () -> - sync ch >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + accept sched socket + >>=? fun (_info, auth_fd) -> + P2p_socket.accept ~canceler ~binary_chunks_size:21 auth_fd encoding + >>=? fun conn -> + P2p_socket.write_sync conn simple_msg + >>=? fun () -> + P2p_socket.read conn + >>=? fun (_msg_size, msg) -> + _assert (MBytes.compare simple_msg2 msg = 0) __LOC__ "" + >>=? fun () -> + sync ch >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let client ch sched addr port = - connect sched addr port id2 >>=? fun auth_fd -> - P2p_socket.accept - ~canceler - ~binary_chunks_size:21 auth_fd encoding >>=? fun conn -> - P2p_socket.write_sync conn simple_msg2 >>=? fun () -> - P2p_socket.read conn >>=? fun (_msg_size, msg) -> - _assert (MBytes.compare simple_msg msg = 0) __LOC__ "" >>=? fun () -> - sync ch >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + connect sched addr port id2 + >>=? fun auth_fd -> + P2p_socket.accept ~canceler ~binary_chunks_size:21 auth_fd encoding + >>=? fun conn -> + P2p_socket.write_sync conn simple_msg2 + >>=? fun () -> + P2p_socket.read conn + >>=? fun (_msg_size, msg) -> + _assert (MBytes.compare simple_msg msg = 0) __LOC__ "" + >>=? fun () -> + sync ch >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let run _dir = run_nodes client server - end module Oversized_message = struct - let encoding = Data_encoding.bytes let simple_msg = Rand.generate (1 lsl 17) + let simple_msg2 = Rand.generate (1 lsl 17) let server ch sched socket = - accept sched socket >>=? fun (_info, auth_fd) -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - P2p_socket.write_sync conn simple_msg >>=? fun () -> - P2p_socket.read conn >>=? fun (_msg_size, msg) -> - _assert (MBytes.compare simple_msg2 msg = 0) __LOC__ "" >>=? fun () -> - sync ch >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + accept sched socket + >>=? fun (_info, auth_fd) -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + P2p_socket.write_sync conn simple_msg + >>=? fun () -> + P2p_socket.read conn + >>=? fun (_msg_size, msg) -> + _assert (MBytes.compare simple_msg2 msg = 0) __LOC__ "" + >>=? fun () -> + sync ch >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let client ch sched addr port = - connect sched addr port id2 >>=? fun auth_fd -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - P2p_socket.write_sync conn simple_msg2 >>=? fun () -> - P2p_socket.read conn >>=? fun (_msg_size, msg) -> - _assert (MBytes.compare simple_msg msg = 0) __LOC__ "" >>=? fun () -> - sync ch >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + connect sched addr port id2 + >>=? fun auth_fd -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + P2p_socket.write_sync conn simple_msg2 + >>=? fun () -> + P2p_socket.read conn + >>=? fun (_msg_size, msg) -> + _assert (MBytes.compare simple_msg msg = 0) __LOC__ "" + >>=? fun () -> + sync ch >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let run _dir = run_nodes client server - end module Close_on_read = struct - let encoding = Data_encoding.bytes let simple_msg = Rand.generate (1 lsl 4) let server ch sched socket = - accept sched socket >>=? fun (_info, auth_fd) -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - sync ch >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + accept sched socket + >>=? fun (_info, auth_fd) -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + sync ch >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let client ch sched addr port = - connect sched addr port id2 >>=? fun auth_fd -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - sync ch >>=? fun () -> - P2p_socket.read conn >>= fun err -> - _assert (is_connection_closed err) __LOC__ "" >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + connect sched addr port id2 + >>=? fun auth_fd -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + sync ch + >>=? fun () -> + P2p_socket.read conn + >>= fun err -> + _assert (is_connection_closed err) __LOC__ "" + >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let run _dir = run_nodes client server - end module Close_on_write = struct - let encoding = Data_encoding.bytes let simple_msg = Rand.generate (1 lsl 4) let server ch sched socket = - accept sched socket >>=? fun (_info, auth_fd) -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - P2p_socket.close conn >>= fun _stat -> - sync ch >>=? fun ()-> - return_unit + accept sched socket + >>=? fun (_info, auth_fd) -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + P2p_socket.close conn >>= fun _stat -> sync ch >>=? fun () -> return_unit let client ch sched addr port = - connect sched addr port id2 >>=? fun auth_fd -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - sync ch >>=? fun ()-> - Lwt_unix.sleep 0.1 >>= fun () -> - P2p_socket.write_sync conn simple_msg >>= fun err -> - _assert (is_connection_closed err) __LOC__ "" >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + connect sched addr port id2 + >>=? fun auth_fd -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + sync ch + >>=? fun () -> + Lwt_unix.sleep 0.1 + >>= fun () -> + P2p_socket.write_sync conn simple_msg + >>= fun err -> + _assert (is_connection_closed err) __LOC__ "" + >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let run _dir = run_nodes client server - end module Garbled_data = struct - let encoding = let open Data_encoding in dynamic_size @@ option @@ string @@ -397,86 +422,95 @@ module Garbled_data = struct _', which blocks 'make test' *) let garbled_msg = let buf = MBytes.create (1 lsl 4) in - MBytes.set_int32 buf 0 (Int32.of_int 4); - MBytes.set_int32 buf 4 (Int32.of_int (-1)); - MBytes.set_int32 buf 8 (Int32.of_int (-1)); - MBytes.set_int32 buf 12 (Int32.of_int (-1)); + MBytes.set_int32 buf 0 (Int32.of_int 4) ; + MBytes.set_int32 buf 4 (Int32.of_int (-1)) ; + MBytes.set_int32 buf 8 (Int32.of_int (-1)) ; + MBytes.set_int32 buf 12 (Int32.of_int (-1)) ; buf let server _ch sched socket = - accept sched socket >>=? fun (_info, auth_fd) -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - P2p_socket.raw_write_sync conn garbled_msg >>=? fun () -> - P2p_socket.read conn >>= fun err -> - _assert (is_connection_closed err) __LOC__ "" >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + accept sched socket + >>=? fun (_info, auth_fd) -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + P2p_socket.raw_write_sync conn garbled_msg + >>=? fun () -> + P2p_socket.read conn + >>= fun err -> + _assert (is_connection_closed err) __LOC__ "" + >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let client _ch sched addr port = - connect sched addr port id2 >>=? fun auth_fd -> - P2p_socket.accept ~canceler auth_fd encoding >>=? fun conn -> - P2p_socket.read conn >>= fun err -> - _assert (is_decoding_error err) __LOC__ "" >>=? fun () -> - P2p_socket.close conn >>= fun _stat -> - return_unit + connect sched addr port id2 + >>=? fun auth_fd -> + P2p_socket.accept ~canceler auth_fd encoding + >>=? fun conn -> + P2p_socket.read conn + >>= fun err -> + _assert (is_decoding_error err) __LOC__ "" + >>=? fun () -> P2p_socket.close conn >>= fun _stat -> return_unit let run _dir = run_nodes client server - end let log_config = ref None -let spec = Arg.[ - - "--addr", String (fun p -> addr := Ipaddr.V6.of_string_exn p), - " Listening addr"; - - "-v", Unit (fun () -> - log_config := Some ( - Lwt_log_sink_unix.create_cfg - ~rules:("test.p2p.connection -> info; p2p.connection -> info") - () )), - " Log up to info msgs" ; - - "-vv", Unit (fun () -> - log_config := Some ( - Lwt_log_sink_unix.create_cfg - ~rules:("test.p2p.connection -> debug; p2p.connection -> debug") - () )), - " Log up to debug msgs"; - - ] - -let init_logs = lazy (Internal_event_unix.init ?lwt_log_sink:!log_config ()) +let spec = + Arg. + [ ( "--addr", + String (fun p -> addr := Ipaddr.V6.of_string_exn p), + " Listening addr" ); + ( "-v", + Unit + (fun () -> + log_config := + Some + (Lwt_log_sink_unix.create_cfg + ~rules:"test.p2p.connection -> info; p2p.connection -> info" + ())), + " Log up to info msgs" ); + ( "-vv", + Unit + (fun () -> + log_config := + Some + (Lwt_log_sink_unix.create_cfg + ~rules: + "test.p2p.connection -> debug; p2p.connection -> debug" + ())), + " Log up to debug msgs" ) ] + +let init_logs = lazy (Internal_event_unix.init ?lwt_log_sink:!log_config ()) let wrap n f = - Alcotest_lwt.test_case n `Quick begin fun _ () -> - Lazy.force init_logs >>= fun () -> - f () >>= function - | Ok () -> Lwt.return_unit - | Error error -> - Format.kasprintf Pervasives.failwith "%a" pp_print_error error - end + Alcotest_lwt.test_case n `Quick (fun _ () -> + Lazy.force init_logs + >>= fun () -> + f () + >>= function + | Ok () -> + Lwt.return_unit + | Error error -> + Format.kasprintf Pervasives.failwith "%a" pp_print_error error) let main () = let anon_fun _num_peers = raise (Arg.Bad "No anonymous argument.") in let usage_msg = "Usage: %s.\nArguments are:" in Arg.parse spec anon_fun usage_msg ; - Alcotest.run ~argv:[|""|] "tezos-p2p" [ - "p2p-connection.", [ - wrap "low-level" Low_level.run ; - wrap "kick" Kick.run ; - wrap "kicked" Kicked.run ; - wrap "simple-message" Simple_message.run ; - wrap "chunked-message" Chunked_message.run ; - wrap "oversized-message" Oversized_message.run ; - wrap "close-on-read" Close_on_read.run ; - wrap "close-on-write" Close_on_write.run ; - wrap "garbled-data" Garbled_data.run ; - ] - ] + Alcotest.run + ~argv:[|""|] + "tezos-p2p" + [ ( "p2p-connection.", + [ wrap "low-level" Low_level.run; + wrap "kick" Kick.run; + wrap "kicked" Kicked.run; + wrap "simple-message" Simple_message.run; + wrap "chunked-message" Chunked_message.run; + wrap "oversized-message" Oversized_message.run; + wrap "close-on-read" Close_on_read.run; + wrap "close-on-write" Close_on_write.run; + wrap "garbled-data" Garbled_data.run ] ) ] let () = Sys.catch_break true ; - try main () - with _ -> () + try main () with _ -> () diff --git a/src/lib_protocol_compiler/.ocamlformat b/src/lib_protocol_compiler/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_protocol_compiler/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_protocol_compiler/byte.ml b/src/lib_protocol_compiler/byte.ml index c8382ee5dda8bd9c9c43e001125bdd84cece5fb7..e3424b348b31dbc9c7a3734d0512503e90deb542 100644 --- a/src/lib_protocol_compiler/byte.ml +++ b/src/lib_protocol_compiler/byte.ml @@ -36,27 +36,29 @@ let pack_objects output objects = let output = output ^ ".cmo" in - Compmisc.init_path true; + Compmisc.init_path true ; Bytepackager.package_files - Format.err_formatter Env.initial_safe_string objects output ; + Format.err_formatter + Env.initial_safe_string + objects + output ; Warnings.check_fatal () ; output let link_shared output objects = Compenv.(readenv Format.err_formatter Before_link) ; - Compmisc.init_path true; + Compmisc.init_path true ; Bytelink.link Format.err_formatter objects output ; Warnings.check_fatal () let compile_ml ?for_pack ml = let target = Filename.chop_extension ml in Clflags.for_package := for_pack ; - Compenv.(readenv Format.err_formatter (Before_compile ml)); + Compenv.(readenv Format.err_formatter (Before_compile ml)) ; Compile.implementation Format.err_formatter ml target ; Clflags.for_package := None ; target ^ ".cmo" -let () = - Clflags.native_code := false +let () = Clflags.native_code := false -let driver = Compiler.{ compile_ml ; link_shared ; pack_objects } +let driver = Compiler.{compile_ml; link_shared; pack_objects} diff --git a/src/lib_protocol_compiler/byte.mli b/src/lib_protocol_compiler/byte.mli index ab47182b443cdb4358becf635f05a0bb29acfd91..7061ab00f949d80bea9ec06a75dbb6f6e4442866 100644 --- a/src/lib_protocol_compiler/byte.mli +++ b/src/lib_protocol_compiler/byte.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val driver: Compiler.driver +val driver : Compiler.driver diff --git a/src/lib_protocol_compiler/compiler.ml b/src/lib_protocol_compiler/compiler.ml index 4b35acb7e28ef55f25ab3b146d66aaa66032f395..67b423dca5ae750feb896b6a3b732f13c4ad68bc 100644 --- a/src/lib_protocol_compiler/compiler.ml +++ b/src/lib_protocol_compiler/compiler.ml @@ -24,10 +24,10 @@ (*****************************************************************************) let warnings = "+a-4-6-7-9-29-40..42-44-45-48" + let warn_error = "-a+8" -let () = - Clflags.unsafe_string := false +let () = Clflags.unsafe_string := false (** Override the default 'Env.Persistent_signature.load' with a lookup in locally defined hashtable. @@ -39,16 +39,16 @@ let preloaded_cmis : (string, Env.Persistent_signature.t) Hashtbl.t = (* Set hook *) let () = Env.Persistent_signature.load := - (fun ~unit_name -> - try Some (Hashtbl.find preloaded_cmis (String.capitalize_ascii unit_name)) - with Not_found -> None) + fun ~unit_name -> + try + Some (Hashtbl.find preloaded_cmis (String.capitalize_ascii unit_name)) + with Not_found -> None let load_cmi_from_file file = - Hashtbl.add preloaded_cmis + Hashtbl.add + preloaded_cmis (String.capitalize_ascii Filename.(basename (chop_extension file))) - { filename = file ; - cmi = Cmi_format.read_cmi file ; - } + {filename = file; cmi = Cmi_format.read_cmi file} let load_embeded_cmi (unit_name, content) = let content = Bytes.of_string content in @@ -62,16 +62,15 @@ let load_embeded_cmi (unit_name, content) = let pos = pos + Marshal.total_size content pos in (* Read cmi_crcs *) let cmi_crcs = Marshal.from_bytes content pos in - let pos = pos + Marshal.total_size content pos in + let pos = pos + Marshal.total_size content pos in (* Read cmi_flags *) let cmi_flags = Marshal.from_bytes content pos in (* TODO check crcrs... *) Hashtbl.add preloaded_cmis (String.capitalize_ascii unit_name) - { filename = unit_name ^ ".cmi" ; - cmi = { cmi_name; cmi_sign; cmi_crcs; cmi_flags } ; - } + { filename = unit_name ^ ".cmi"; + cmi = {cmi_name; cmi_sign; cmi_crcs; cmi_flags} } let load_embeded_cmis cmis = List.iter load_embeded_cmi cmis @@ -88,42 +87,36 @@ let load_embeded_cmis cmis = List.iter load_embeded_cmi cmis *) - let tezos_protocol_env = let open Embedded_cmis in - [ - "CamlinternalFormatBasics", camlinternalFormatBasics_cmi ; - "Tezos_protocol_environment_sigs", tezos_protocol_environment_sigs_cmi ; - "Tezos_protocol_environment_sigs__V1", tezos_protocol_environment_sigs__V1_cmi ; - ] + [ ("CamlinternalFormatBasics", camlinternalFormatBasics_cmi); + ("Tezos_protocol_environment_sigs", tezos_protocol_environment_sigs_cmi); + ( "Tezos_protocol_environment_sigs__V1", + tezos_protocol_environment_sigs__V1_cmi ) ] let register_env = let open Embedded_cmis in - [ - "tezos_protocol_registerer__Registerer", tezos_protocol_registerer__Registerer_cmi ; - ] - + [ ( "tezos_protocol_registerer__Registerer", + tezos_protocol_registerer__Registerer_cmi ) ] (** Helpers *) -let (//) = Filename.concat +let ( // ) = Filename.concat let create_file ?(perm = 0o644) name content = let open Unix in let fd = openfile name [O_TRUNC; O_CREAT; O_WRONLY] perm in - ignore(write_substring fd content 0 (String.length content)); + ignore (write_substring fd content 0 (String.length content)) ; close fd let safe_unlink file = - try Unix.unlink file - with Unix.Unix_error(Unix.ENOENT, _, _) -> () + try Unix.unlink file with Unix.Unix_error (Unix.ENOENT, _, _) -> () -let unlink_cmi dir (file, _) = - safe_unlink (dir // file ^ ".cmi") +let unlink_cmi dir (file, _) = safe_unlink ((dir // file) ^ ".cmi") let unlink_object obj = - safe_unlink obj; - safe_unlink (Filename.chop_suffix obj ".cmx" ^ ".cmi"); + safe_unlink obj ; + safe_unlink (Filename.chop_suffix obj ".cmx" ^ ".cmi") ; safe_unlink (Filename.chop_suffix obj ".cmx" ^ ".o") let debug_flag = ref false @@ -133,18 +126,18 @@ let debug fmt = else Format.ifprintf Format.err_formatter fmt let mktemp_dir () = - Filename.get_temp_dir_name () // - Printf.sprintf "tezos-protocol-build-%06X" (Random.int 0xFFFFFF) + Filename.get_temp_dir_name () + // Printf.sprintf "tezos-protocol-build-%06X" (Random.int 0xFFFFFF) (** Main *) type driver = { - compile_ml: ?for_pack:string -> string -> string ; - pack_objects: string -> string list -> string ; - link_shared: string -> string list -> unit ; + compile_ml : ?for_pack:string -> string -> string; + pack_objects : string -> string list -> string; + link_shared : string -> string list -> unit } -let main { compile_ml ; pack_objects ; link_shared } = +let main {compile_ml; pack_objects; link_shared} = Random.self_init () ; let anonymous = ref [] and static = ref false @@ -154,130 +147,145 @@ let main { compile_ml ; pack_objects ; link_shared } = and output_dep = ref false and hash_only = ref false and check_protocol_hash = ref true in - let args_spec = [ - "-o", Arg.String (fun s -> output_file := Some s), "" ; - "-hash-only", Arg.Set hash_only, " Only display the hash of the protocol and don't compile" ; - "-no-hash-check", Arg.Clear check_protocol_hash, " Don't check that TEZOS_PROTOCOL declares the expected protocol hash (if existent)" ; - "-static", Arg.Set static, " Only build the static library (no .cmxs)" ; - "-register", Arg.Set register, " Generate the `Registerer` module" ; - "-bin-annot", Arg.Set Clflags.binary_annotations, " (see ocamlopt)" ; - "-g", Arg.Set Clflags.debug, " (see ocamlopt)" ; - "-output-dep", Arg.Set output_dep, " ..." ; - "-build-dir", Arg.String (fun s -> build_dir := Some s), - "use custom build directory and preserve build artifacts" - ] in + let args_spec = + [ ("-o", Arg.String (fun s -> output_file := Some s), ""); + ( "-hash-only", + Arg.Set hash_only, + " Only display the hash of the protocol and don't compile" ); + ( "-no-hash-check", + Arg.Clear check_protocol_hash, + " Don't check that TEZOS_PROTOCOL declares the expected protocol hash \ + (if existent)" ); + ("-static", Arg.Set static, " Only build the static library (no .cmxs)"); + ("-register", Arg.Set register, " Generate the `Registerer` module"); + ("-bin-annot", Arg.Set Clflags.binary_annotations, " (see ocamlopt)"); + ("-g", Arg.Set Clflags.debug, " (see ocamlopt)"); + ("-output-dep", Arg.Set output_dep, " ..."); + ( "-build-dir", + Arg.String (fun s -> build_dir := Some s), + "use custom build directory and preserve build artifacts" ) ] + in let usage_msg = - Printf.sprintf - "Usage: %s [options] <srcdir>\nOptions are:" - Sys.argv.(0) in + Printf.sprintf "Usage: %s [options] <srcdir>\nOptions are:" Sys.argv.(0) + in Arg.parse args_spec (fun s -> anonymous := s :: !anonymous) usage_msg ; let source_dir = match List.rev !anonymous with - | [ protocol_dir ] -> protocol_dir - | _ -> Arg.usage args_spec usage_msg ; Pervasives.exit 1 in - let announced_hash, protocol = + | [protocol_dir] -> + protocol_dir + | _ -> + Arg.usage args_spec usage_msg ; + Pervasives.exit 1 + in + let (announced_hash, protocol) = match Lwt_main.run (Lwt_utils_unix.Protocol.read_dir source_dir) with - | Ok (hash, proto) -> (hash, proto) + | Ok (hash, proto) -> + (hash, proto) | Error err -> - Format.eprintf - "Failed to read TEZOS_PROTOCOL: %a" pp_print_error err ; - exit 2 in + Format.eprintf "Failed to read TEZOS_PROTOCOL: %a" pp_print_error err ; + exit 2 + in let real_hash = Protocol.hash protocol in - if !hash_only then begin + if !hash_only then ( Format.printf "%a@." Protocol_hash.pp real_hash ; - exit 0 ; - end ; + exit 0 ) ; let hash = match announced_hash with - | None -> real_hash + | None -> + real_hash | Some hash when !check_protocol_hash && not (Protocol_hash.equal real_hash hash) -> Format.eprintf "Inconsistent hash for protocol in TEZOS_PROTOCOL.@\n\ Found: %a@\n\ Expected: %a@." - Protocol_hash.pp hash - Protocol_hash.pp real_hash ; + Protocol_hash.pp + hash + Protocol_hash.pp + real_hash ; exit 2 - | Some hash -> hash in + | Some hash -> + hash + in let build_dir = match !build_dir with | None -> let dir = mktemp_dir () in at_exit (fun () -> Lwt_main.run (Lwt_utils_unix.remove_dir dir)) ; dir - | Some dir -> dir in + | Some dir -> + dir + in let output = match !output_file with - | Some output -> output - | None -> Format.asprintf "proto_%a" Protocol_hash.pp hash in + | Some output -> + output + | None -> + Format.asprintf "proto_%a" Protocol_hash.pp hash + in Lwt_main.run (Lwt_utils_unix.create_dir ~perm:0o755 build_dir) ; - Lwt_main.run (Lwt_utils_unix.create_dir ~perm:0o755 (Filename.dirname output)) ; + Lwt_main.run + (Lwt_utils_unix.create_dir ~perm:0o755 (Filename.dirname output)) ; (* Generate the 'functor' *) let functor_file = build_dir // "functor.ml" in let oc = open_out functor_file in - Packer.dump oc hash + Packer.dump + oc + hash (Array.map - begin fun { Protocol.name ; _ } -> + (fun {Protocol.name; _} -> let name_lowercase = String.uncapitalize_ascii name in - source_dir // name_lowercase ^ ".ml" - end + (source_dir // name_lowercase) ^ ".ml") (Array.of_list protocol.components)) ; close_out oc ; (* Compile the protocol *) let proto_cmi = Filename.chop_extension functor_file ^ ".cmi" in let functor_unit = - String.capitalize_ascii - Filename.(basename (chop_extension functor_file)) in + String.capitalize_ascii Filename.(basename (chop_extension functor_file)) + in let for_pack = String.capitalize_ascii (Filename.basename output) in (* Initialize the compilers *) - Compenv.(readenv Format.err_formatter Before_args); - Clflags.nopervasives := true; + Compenv.(readenv Format.err_formatter Before_args) ; + Clflags.nopervasives := true ; Clflags.no_std_include := true ; Clflags.include_dirs := [Filename.dirname functor_file] ; Warnings.parse_options false warnings ; Warnings.parse_options true warn_error ; - load_embeded_cmis tezos_protocol_env ; let packed_protocol_object = compile_ml ~for_pack functor_file in - let register_objects = - if not !register then - [] - else begin + if not !register then [] + else ( load_embeded_cmis register_env ; load_cmi_from_file proto_cmi ; (* Compiler the 'registering module' *) let register_file = Filename.dirname functor_file // "register.ml" in - create_file register_file + create_file + register_file (Printf.sprintf "module Name = struct let name = %S end\n\ - \ let () = Tezos_protocol_registerer__Registerer.register Name.name (module %s.Make)" + \ let () = Tezos_protocol_registerer__Registerer.register \ + Name.name (module %s.Make)" (Protocol_hash.to_b58check hash) functor_unit) ; let register_object = compile_ml ~for_pack register_file in - [ register_object ] - end + [register_object] ) in - let resulting_object = - pack_objects output (packed_protocol_object :: register_objects) in - + pack_objects output (packed_protocol_object :: register_objects) + in (* Create the final [cmxs] *) - if not !static then begin + if not !static then ( Clflags.link_everything := true ; - link_shared (output ^ ".cmxs") [resulting_object] ; - end ; - - if !output_dep then begin + link_shared (output ^ ".cmxs") [resulting_object] ) ; + if !output_dep then ( let dsrc = Digest.file functor_file in let dimpl = Digest.file resulting_object in - let dintf = Digest.file (Filename.chop_extension resulting_object ^ ".cmi") in + let dintf = + Digest.file (Filename.chop_extension resulting_object ^ ".cmi") + in Format.printf "module Toto = struct include %s end ;; \n" for_pack ; Format.printf "let src_digest = %S ;;\n" (Digest.to_hex dsrc) ; Format.printf "let impl_digest = %S ;;\n" (Digest.to_hex dimpl) ; - Format.printf "let intf_digest = %S ;;\n" (Digest.to_hex dintf) - end ; - + Format.printf "let intf_digest = %S ;;\n" (Digest.to_hex dintf) ) ; Format.printf "Success: %a.@." Protocol_hash.pp hash - diff --git a/src/lib_protocol_compiler/embedded_cmis.mli b/src/lib_protocol_compiler/embedded_cmis.mli index a4759b727e7895cebd442808deac56ce3a477a3d..d9f1612feb2f8b5aaf600433dbb1291dca6ef0f1 100644 --- a/src/lib_protocol_compiler/embedded_cmis.mli +++ b/src/lib_protocol_compiler/embedded_cmis.mli @@ -23,7 +23,10 @@ (* *) (*****************************************************************************) -val camlinternalFormatBasics_cmi: string -val tezos_protocol_environment_sigs_cmi: string -val tezos_protocol_environment_sigs__V1_cmi: string -val tezos_protocol_registerer__Registerer_cmi: string +val camlinternalFormatBasics_cmi : string + +val tezos_protocol_environment_sigs_cmi : string + +val tezos_protocol_environment_sigs__V1_cmi : string + +val tezos_protocol_registerer__Registerer_cmi : string diff --git a/src/lib_protocol_compiler/main_byte.ml b/src/lib_protocol_compiler/main_byte.ml index a5c99cec3e2bf469fb4e4582216f661d1bc0fcc0..336af4159d8a8a04b5b9fe48b562ea6b92b072b4 100644 --- a/src/lib_protocol_compiler/main_byte.ml +++ b/src/lib_protocol_compiler/main_byte.ml @@ -29,5 +29,5 @@ let () = Tezos_protocol_compiler_byte.Byte.driver ; Pervasives.exit 0 with exn -> - Format.eprintf "%a\n%!" Errors.report_error exn; + Format.eprintf "%a\n%!" Errors.report_error exn ; Pervasives.exit 1 diff --git a/src/lib_protocol_compiler/main_embedded_packer.ml b/src/lib_protocol_compiler/main_embedded_packer.ml index 7cdc23082dc58f8ff134f089e20ef11d035d1588..b5356adee49900ce6bf7004d2699b01aa0a91efa 100644 --- a/src/lib_protocol_compiler/main_embedded_packer.ml +++ b/src/lib_protocol_compiler/main_embedded_packer.ml @@ -24,26 +24,29 @@ (*****************************************************************************) let srcdir = Sys.argv.(1) + let version = Sys.argv.(2) let srcdir = - if Filename.basename srcdir = "TEZOS_PROTOCOL" then - Filename.dirname srcdir - else - srcdir + if Filename.basename srcdir = "TEZOS_PROTOCOL" then Filename.dirname srcdir + else srcdir -let hash, sources = +let (hash, sources) = match Lwt_main.run (Lwt_utils_unix.Protocol.read_dir srcdir) with | Ok (None, proto) -> (Protocol.hash proto, proto) | Ok (Some hash, proto) -> (hash, proto) | Error err -> - Format.kasprintf Pervasives.failwith - "Failed to read TEZOS_PROTOCOL: %a" pp_print_error err + Format.kasprintf + Pervasives.failwith + "Failed to read TEZOS_PROTOCOL: %a" + pp_print_error + err let () = - Format.printf {| + Format.printf + {| module Source = struct let hash = Some (Tezos_crypto.Protocol_hash.of_b58check_exn %S) @@ -51,14 +54,17 @@ module Source = struct end @.|} (Protocol_hash.to_b58check hash) - Protocol.pp_ocaml sources + Protocol.pp_ocaml + sources let () = - Format.printf {| + Format.printf + {| module Registered = Tezos_protocol_updater.Registered_protocol.Register_embedded (Tezos_embedded_protocol_environment_%s.Environment) (Tezos_embedded_raw_protocol_%s.Main) (Source) @.|} - version version + version + version diff --git a/src/lib_protocol_compiler/main_native.ml b/src/lib_protocol_compiler/main_native.ml index 5c11b6783f037ce92417307d00b93d04a1be6dd8..68a3907c3b83aff06072d247105da2cca3c3a825 100644 --- a/src/lib_protocol_compiler/main_native.ml +++ b/src/lib_protocol_compiler/main_native.ml @@ -29,5 +29,5 @@ let () = Tezos_protocol_compiler_native.Native.driver ; Pervasives.exit 0 with exn -> - Format.eprintf "%a\n%!" Opterrors.report_error exn; + Format.eprintf "%a\n%!" Opterrors.report_error exn ; Pervasives.exit 1 diff --git a/src/lib_protocol_compiler/main_packer.ml b/src/lib_protocol_compiler/main_packer.ml index 80212dcabdd3576f130eda652651309e8efdfd26..ed54509324daa57924df65dbd1571bd7bbcddace 100644 --- a/src/lib_protocol_compiler/main_packer.ml +++ b/src/lib_protocol_compiler/main_packer.ml @@ -23,35 +23,43 @@ (* *) (*****************************************************************************) -let (//) = Filename.concat +let ( // ) = Filename.concat let () = Random.self_init () ; let anonymous = ref [] in - let args_spec = [ ] in - let usage_msg = - Printf.sprintf "Usage: %s [options] <srcdir>" Sys.argv.(0) in + let args_spec = [] in + let usage_msg = Printf.sprintf "Usage: %s [options] <srcdir>" Sys.argv.(0) in Arg.parse args_spec (fun s -> anonymous := s :: !anonymous) usage_msg ; let source_dir = match List.rev !anonymous with - | [ source_dir ] when Filename.basename source_dir = "TEZOS_PROTOCOL"-> + | [source_dir] when Filename.basename source_dir = "TEZOS_PROTOCOL" -> Filename.dirname source_dir - | [ source_dir ] -> source_dir - | _ -> Arg.usage args_spec usage_msg ; Pervasives.exit 1 in - let hash, protocol = + | [source_dir] -> + source_dir + | _ -> + Arg.usage args_spec usage_msg ; + Pervasives.exit 1 + in + let (hash, protocol) = match Lwt_main.run (Lwt_utils_unix.Protocol.read_dir source_dir) with | Ok (None, proto) -> (Protocol.hash proto, proto) | Ok (Some hash, proto) -> (hash, proto) | Error err -> - Format.kasprintf Pervasives.failwith - "Failed to read TEZOS_PROTOCOL: %a" pp_print_error err in + Format.kasprintf + Pervasives.failwith + "Failed to read TEZOS_PROTOCOL: %a" + pp_print_error + err + in (* Generate the 'functor' *) - Packer.dump stdout hash + Packer.dump + stdout + hash (Array.map - begin fun { Protocol.name ; _ } -> + (fun {Protocol.name; _} -> let name_lowercase = String.uncapitalize_ascii name in - source_dir // name_lowercase ^ ".ml" - end + (source_dir // name_lowercase) ^ ".ml") (Array.of_list protocol.components)) diff --git a/src/lib_protocol_compiler/native.ml b/src/lib_protocol_compiler/native.ml index 75e723c73201e038b7dd51cd7e185809694092bf..c32bead7da924be107510c95d7d6a961ac6f61dd 100644 --- a/src/lib_protocol_compiler/native.ml +++ b/src/lib_protocol_compiler/native.ml @@ -40,12 +40,15 @@ module Backend = struct (* See backend_intf.mli. *) let symbol_for_global' = Compilenv.symbol_for_global' + let closure_symbol = Compilenv.closure_symbol let really_import_approx = Import_approx.really_import_approx + let import_symbol = Import_approx.import_symbol let size_int = Arch.size_int + let big_endian = Arch.big_endian let max_sensible_number_of_arguments = @@ -59,27 +62,30 @@ let backend = (module Backend : Backend_intf.S) let pack_objects output objects = let output = output ^ ".cmx" in - Compmisc.init_path true; + Compmisc.init_path true ; Asmpackager.package_files - ~backend Format.err_formatter Env.initial_safe_string objects output ; + ~backend + Format.err_formatter + Env.initial_safe_string + objects + output ; Warnings.check_fatal () ; output let link_shared output objects = Compenv.(readenv Format.err_formatter Before_link) ; - Compmisc.init_path true; + Compmisc.init_path true ; Asmlink.link_shared Format.err_formatter objects output ; Warnings.check_fatal () let compile_ml ?for_pack ml = let target = Filename.chop_extension ml in Clflags.for_package := for_pack ; - Compenv.(readenv Format.err_formatter (Before_compile ml)); + Compenv.(readenv Format.err_formatter (Before_compile ml)) ; Optcompile.implementation ~backend Format.err_formatter ml target ; Clflags.for_package := None ; target ^ ".cmx" -let () = - Clflags.native_code := true +let () = Clflags.native_code := true -let driver = { compile_ml ; link_shared ; pack_objects } +let driver = {compile_ml; link_shared; pack_objects} diff --git a/src/lib_protocol_compiler/native.mli b/src/lib_protocol_compiler/native.mli index ab47182b443cdb4358becf635f05a0bb29acfd91..7061ab00f949d80bea9ec06a75dbb6f6e4442866 100644 --- a/src/lib_protocol_compiler/native.mli +++ b/src/lib_protocol_compiler/native.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val driver: Compiler.driver +val driver : Compiler.driver diff --git a/src/lib_protocol_compiler/packer.ml b/src/lib_protocol_compiler/packer.ml index 855dc604453f52b555106aeaee06cb4da35d067c..355bf6933f8b1c878429d7d8d12906bc1259be5a 100644 --- a/src/lib_protocol_compiler/packer.ml +++ b/src/lib_protocol_compiler/packer.ml @@ -29,51 +29,53 @@ let dump_file oc file = let buf = Bytes.create buflen in let rec loop () = let len = input ic buf 0 buflen in - if len <> 0 then begin - Printf.fprintf oc "%s" - (if len = buflen then Bytes.unsafe_to_string buf else Bytes.sub_string buf 0 len) ; - loop () - end + if len <> 0 then ( + Printf.fprintf + oc + "%s" + ( if len = buflen then Bytes.unsafe_to_string buf + else Bytes.sub_string buf 0 len ) ; + loop () ) in - loop () ; - close_in ic + loop () ; close_in ic let include_ml oc file = let unit = - String.capitalize_ascii - (Filename.chop_extension (Filename.basename file)) in + String.capitalize_ascii (Filename.chop_extension (Filename.basename file)) + in (* FIXME insert .mli... *) Printf.fprintf oc "module %s " unit ; - if Sys.file_exists (file ^ "i") then begin + if Sys.file_exists (file ^ "i") then ( Printf.fprintf oc ": sig\n" ; - Printf.fprintf oc "# 1 %S\n" (file ^ "i"); + Printf.fprintf oc "# 1 %S\n" (file ^ "i") ; dump_file oc (file ^ "i") ; - Printf.fprintf oc "end " ; - end ; + Printf.fprintf oc "end " ) ; Printf.fprintf oc "= struct\n" ; Printf.fprintf oc "# 1 %S\n" file ; dump_file oc file ; Printf.fprintf oc "end\n%!" -let opened_modules = [ - "Tezos_protocol_environment" ; - "Pervasives" ; - "Error_monad" ; - "Logging" ; -] +let opened_modules = + ["Tezos_protocol_environment"; "Pervasives"; "Error_monad"; "Logging"] let dump oc hash files = - Printf.fprintf oc - "module Make (Tezos_protocol_environment : Tezos_protocol_environment_sigs__V1.T) = struct\n" ; + Printf.fprintf + oc + "module Make (Tezos_protocol_environment : \ + Tezos_protocol_environment_sigs__V1.T) = struct\n" ; Printf.fprintf oc "[@@@ocaml.warning \"-33\"]\n" ; List.iter (Printf.fprintf oc "open %s\n") opened_modules ; Printf.fprintf oc "[@@@ocaml.warning \"+33\"]\n" ; - Printf.fprintf oc "let hash = Protocol_hash.of_b58check_exn %S;;\n" + Printf.fprintf + oc + "let hash = Protocol_hash.of_b58check_exn %S;;\n" (Protocol_hash.to_b58check hash) ; for i = 0 to Array.length files - 1 do - include_ml oc files.(i) ; + include_ml oc files.(i) done ; - Printf.fprintf oc " include %s\n" + Printf.fprintf + oc + " include %s\n" (String.capitalize_ascii (Filename.basename (Filename.chop_extension files.(Array.length files - 1)))) ; diff --git a/src/lib_protocol_compiler/packer.mli b/src/lib_protocol_compiler/packer.mli index 1822ef528039051922c447d5748584e9bd2f933e..dbc515becf59df91440ac367f51f059e344dcfb2 100644 --- a/src/lib_protocol_compiler/packer.mli +++ b/src/lib_protocol_compiler/packer.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val dump: out_channel -> Protocol_hash.t -> string array -> unit +val dump : out_channel -> Protocol_hash.t -> string array -> unit diff --git a/src/lib_protocol_compiler/registerer.ml b/src/lib_protocol_compiler/registerer.ml index 375df655f30decfc1a577f264c095916576da239..4e4fdb245807fe0a4348e43d28a7e4a1021ad8d8 100644 --- a/src/lib_protocol_compiler/registerer.ml +++ b/src/lib_protocol_compiler/registerer.ml @@ -23,13 +23,13 @@ (* *) (*****************************************************************************) -module type PROTOCOL_V1 = - functor (Env : Tezos_protocol_environment_sigs.V1.T) -> Env.Updater.PROTOCOL +module type PROTOCOL_V1 = functor + (Env : Tezos_protocol_environment_sigs.V1.T) + -> Env.Updater.PROTOCOL module VersionTable = Protocol_hash.Table -let versions : (module PROTOCOL_V1) VersionTable.t = - VersionTable.create 20 +let versions : (module PROTOCOL_V1) VersionTable.t = VersionTable.create 20 let register hash proto = let hash = Protocol_hash.of_b58check_exn hash in @@ -38,7 +38,5 @@ let register hash proto = let mem hash = VersionTable.mem versions hash let get_exn hash = VersionTable.find versions hash -let get hash = - try Some (get_exn hash) - with Not_found -> None +let get hash = try Some (get_exn hash) with Not_found -> None diff --git a/src/lib_protocol_compiler/registerer.mli b/src/lib_protocol_compiler/registerer.mli index 45e322e8ed2486c0de2c6d8db400ddd423dc3932..869ed7546b0c716edd57cb5a322e7fb7c31025b8 100644 --- a/src/lib_protocol_compiler/registerer.mli +++ b/src/lib_protocol_compiler/registerer.mli @@ -23,11 +23,14 @@ (* *) (*****************************************************************************) -module type PROTOCOL_V1 = - functor (Env : Tezos_protocol_environment_sigs.V1.T) -> Env.Updater.PROTOCOL +module type PROTOCOL_V1 = functor + (Env : Tezos_protocol_environment_sigs.V1.T) + -> Env.Updater.PROTOCOL -val register: string -> (module PROTOCOL_V1) -> unit +val register : string -> (module PROTOCOL_V1) -> unit -val mem: Protocol_hash.t -> bool -val get: Protocol_hash.t -> (module PROTOCOL_V1) option -val get_exn: Protocol_hash.t -> (module PROTOCOL_V1) +val mem : Protocol_hash.t -> bool + +val get : Protocol_hash.t -> (module PROTOCOL_V1) option + +val get_exn : Protocol_hash.t -> (module PROTOCOL_V1) diff --git a/src/lib_protocol_compiler/replace.ml b/src/lib_protocol_compiler/replace.ml index b6a5b8ce61c8e657a07c4e2415fa2016dd43c518..e584871c549256df0c60a1dcd56b8bfc4e80f102 100644 --- a/src/lib_protocol_compiler/replace.ml +++ b/src/lib_protocol_compiler/replace.ml @@ -23,7 +23,7 @@ (* *) (*****************************************************************************) -module StringMap = Map.Make(String) +module StringMap = Map.Make (String) let regexp = Str.regexp "%%[^%]*%%" @@ -39,16 +39,18 @@ let guess_version () = String.sub dirname x (n - x) else let updir = Filename.dirname dir in - if updir = dir then begin + if updir = dir then ( Format.eprintf "Cannot guess protocol version in path!@.Looking for `%s*` in `%s`@." - prefix current_dir ; - exit 1 - end; - loop updir in + prefix + current_dir ; + exit 1 ) ; + loop updir + in loop (Sys.getcwd ()) -let warning_message = {| +let warning_message = + {| ; ; /!\ /!\ Do not modify this file /!\ /!\ @@ -66,25 +68,25 @@ let replace ~template ~destination vars = while true do let line = input_line inch in let line = - Str.global_substitute regexp begin fun s -> - let matched = Str.matched_string s in - let var = String.sub matched 2 (String.length matched - 4) in - match StringMap.find_opt var vars with - | Some value -> value - | None -> - prerr_endline ("Unknown variable: " ^ var) ; - exit 1 - end line in - output_string outch line ; - output_string outch "\n" ; - done ; - with End_of_file -> - flush outch ; - close_out outch ; - () + Str.global_substitute + regexp + (fun s -> + let matched = Str.matched_string s in + let var = String.sub matched 2 (String.length matched - 4) in + match StringMap.find_opt var vars with + | Some value -> + value + | None -> + prerr_endline ("Unknown variable: " ^ var) ; + exit 1) + line + in + output_string outch line ; output_string outch "\n" + done + with End_of_file -> flush outch ; close_out outch ; () + +let module_name (c : Protocol.component) = String.capitalize_ascii c.name -let module_name (c : Protocol.component) = - String.capitalize_ascii c.name let sources_name (c : Protocol.component) = let name = String.lowercase_ascii c.name in match c.interface with @@ -96,41 +98,42 @@ let sources_name (c : Protocol.component) = let process ~template ~destination (protocol : Protocol.t) lib_version = let version = String.concat "-" (String.split_on_char '_' lib_version) in let vars = - StringMap.empty |> - StringMap.add "VERSION" version |> - StringMap.add "LIB_VERSION" lib_version |> - StringMap.add "MODULES" - (String.concat " " (List.map module_name protocol.components)) |> - StringMap.add "SOURCES" - (String.concat " " (List.map sources_name protocol.components)) in + StringMap.empty + |> StringMap.add "VERSION" version + |> StringMap.add "LIB_VERSION" lib_version + |> StringMap.add + "MODULES" + (String.concat " " (List.map module_name protocol.components)) + |> StringMap.add + "SOURCES" + (String.concat " " (List.map sources_name protocol.components)) + in replace ~template ~destination vars let read_proto destination = let source_dir = if Filename.is_relative destination then - Filename.concat - current_dir (Filename.dirname destination) - else - Filename.dirname destination in + Filename.concat current_dir (Filename.dirname destination) + else Filename.dirname destination + in match Lwt_main.run (Lwt_utils_unix.Protocol.read_dir source_dir) with | Ok (None, proto) -> (Protocol.hash proto, proto) | Ok (Some hash, proto) -> (hash, proto) | Error err -> - Format.kasprintf Pervasives.failwith + Format.kasprintf + Pervasives.failwith "Failed to read TEZOS_PROTOCOL in %s:@ %a" source_dir - pp_print_error err + pp_print_error + err let main () = let template = Sys.argv.(1) in let destination = Sys.argv.(2) in - let version = - try Sys.argv.(3) - with _ -> guess_version () in - let _hash, proto = read_proto destination in + let version = try Sys.argv.(3) with _ -> guess_version () in + let (_hash, proto) = read_proto destination in process ~template ~destination proto version -let () = - main () +let () = main () diff --git a/src/lib_protocol_environment/.ocamlformat b/src/lib_protocol_environment/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_protocol_environment/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_protocol_environment/sigs/v1/RPC_answer.mli b/src/lib_protocol_environment/sigs/v1/RPC_answer.mli index 1632702ea028dcf5ee4da744a762fa7260acf617..ea5efb297f518c3ddc8f42cda39277dfca37959a 100644 --- a/src/lib_protocol_environment/sigs/v1/RPC_answer.mli +++ b/src/lib_protocol_environment/sigs/v1/RPC_answer.mli @@ -33,15 +33,14 @@ type 'o t = | `Forbidden of error list option (* 403 *) | `Not_found of error list option (* 404 *) | `Conflict of error list option (* 409 *) - | `Error of error list option (* 500 *) - ] + | `Error of error list option (* 500 *) ] -and 'a stream = { - next: unit -> 'a option Lwt.t ; - shutdown: unit -> unit ; -} +and 'a stream = {next : unit -> 'a option Lwt.t; shutdown : unit -> unit} -val return: 'o -> 'o t Lwt.t -val return_stream: 'o stream -> 'o t Lwt.t -val not_found: 'o t Lwt.t -val fail: error list -> 'a t Lwt.t +val return : 'o -> 'o t Lwt.t + +val return_stream : 'o stream -> 'o t Lwt.t + +val not_found : 'o t Lwt.t + +val fail : error list -> 'a t Lwt.t diff --git a/src/lib_protocol_environment/sigs/v1/RPC_arg.mli b/src/lib_protocol_environment/sigs/v1/RPC_arg.mli index 98218d4274c406fd9a9909e3abc52661ae769b2e..2779143c92288c29f5e9a6ac281a54ec7e374566 100644 --- a/src/lib_protocol_environment/sigs/v1/RPC_arg.mli +++ b/src/lib_protocol_environment/sigs/v1/RPC_arg.mli @@ -24,27 +24,33 @@ (*****************************************************************************) type 'a t + type 'a arg = 'a t -val make: + +val make : ?descr:string -> name:string -> destruct:(string -> ('a, string) result) -> construct:('a -> string) -> - unit -> 'a arg + unit -> + 'a arg + +type descr = {name : string; descr : string option} + +val descr : 'a arg -> descr -type descr = { - name: string ; - descr: string option ; -} -val descr: 'a arg -> descr +val int : int arg -val int: int arg -val int32: int32 arg -val int64: int64 arg -val float: float arg -val string: string arg +val int32 : int32 arg -val like: 'a arg -> ?descr:string -> string -> 'a arg +val int64 : int64 arg + +val float : float arg + +val string : string arg + +val like : 'a arg -> ?descr:string -> string -> 'a arg type ('a, 'b) eq = Eq : ('a, 'a) eq -val eq: 'a arg -> 'b arg -> ('a, 'b) eq option + +val eq : 'a arg -> 'b arg -> ('a, 'b) eq option diff --git a/src/lib_protocol_environment/sigs/v1/RPC_context.mli b/src/lib_protocol_environment/sigs/v1/RPC_context.mli index 83d8013c5221ec1d17fe65087e2792cabdd21e83..1826b752b89baa357002090933dc5d67962aefdc 100644 --- a/src/lib_protocol_environment/sigs/v1/RPC_context.mli +++ b/src/lib_protocol_environment/sigs/v1/RPC_context.mli @@ -25,54 +25,113 @@ type t = Updater.rpc_context -class type ['pr] simple = object - method call_proto_service0 : - 'm 'q 'i 'o. - ([< RPC_service.meth ] as 'm, t, t, 'q, 'i, 'o) RPC_service.t -> - 'pr -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t - method call_proto_service1 : - 'm 'a 'q 'i 'o. - ([< RPC_service.meth ] as 'm, t, t * 'a, 'q, 'i, 'o) RPC_service.t -> - 'pr -> 'a -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t - method call_proto_service2 : - 'm 'a 'b 'q 'i 'o. - ([< RPC_service.meth ] as 'm, t, (t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> - 'pr -> 'a -> 'b -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t - method call_proto_service3 : - 'm 'a 'b 'c 'q 'i 'o. - ([< RPC_service.meth ] as 'm, t, ((t * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> - 'pr -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t -end +class type ['pr] simple = + object + method call_proto_service0 : + 'm 'q 'i 'o. + (([< RPC_service.meth] as 'm), t, t, 'q, 'i, 'o) RPC_service.t -> 'pr -> + 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t -val make_call0: - ([< RPC_service.meth ], t, t, 'q, 'i, 'o) RPC_service.t -> - 'pr #simple -> 'pr -> 'q -> 'i -> 'o shell_tzresult Lwt.t + method call_proto_service1 : + 'm 'a 'q 'i 'o. + (([< RPC_service.meth] as 'm), t, t * 'a, 'q, 'i, 'o) RPC_service.t -> + 'pr -> 'a -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t -val make_call1: - ([< RPC_service.meth ], t, t * 'a, 'q, 'i, 'o) RPC_service.t -> - 'pr #simple -> 'pr -> 'a -> 'q -> 'i -> 'o shell_tzresult Lwt.t + method call_proto_service2 : + 'm 'a 'b 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + t, + (t * 'a) * 'b, + 'q, + 'i, + 'o ) + RPC_service.t -> 'pr -> 'a -> 'b -> 'q -> 'i -> + 'o Error_monad.shell_tzresult Lwt.t -val make_call2: - ([< RPC_service.meth ], t, (t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> - 'pr #simple -> 'pr -> 'a -> 'b -> 'q -> 'i -> 'o shell_tzresult Lwt.t + method call_proto_service3 : + 'm 'a 'b 'c 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + t, + ((t * 'a) * 'b) * 'c, + 'q, + 'i, + 'o ) + RPC_service.t -> 'pr -> 'a -> 'b -> 'c -> 'q -> 'i -> + 'o Error_monad.shell_tzresult Lwt.t + end -val make_call3: - ([< RPC_service.meth ], t, ((t * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> - 'pr #simple -> 'pr -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o shell_tzresult Lwt.t +val make_call0 : + ([< RPC_service.meth], t, t, 'q, 'i, 'o) RPC_service.t -> + 'pr #simple -> + 'pr -> + 'q -> + 'i -> + 'o shell_tzresult Lwt.t +val make_call1 : + ([< RPC_service.meth], t, t * 'a, 'q, 'i, 'o) RPC_service.t -> + 'pr #simple -> + 'pr -> + 'a -> + 'q -> + 'i -> + 'o shell_tzresult Lwt.t -val make_opt_call0: - ([< RPC_service.meth ], t, t, 'q, 'i, 'o) RPC_service.t -> - 'pr #simple -> 'pr -> 'q -> 'i -> 'o option shell_tzresult Lwt.t +val make_call2 : + ([< RPC_service.meth], t, (t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> + 'pr #simple -> + 'pr -> + 'a -> + 'b -> + 'q -> + 'i -> + 'o shell_tzresult Lwt.t -val make_opt_call1: - ([< RPC_service.meth ], t, t * 'a, 'q, 'i, 'o) RPC_service.t -> - 'pr #simple -> 'pr -> 'a -> 'q -> 'i -> 'o option shell_tzresult Lwt.t +val make_call3 : + ([< RPC_service.meth], t, ((t * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> + 'pr #simple -> + 'pr -> + 'a -> + 'b -> + 'c -> + 'q -> + 'i -> + 'o shell_tzresult Lwt.t -val make_opt_call2: - ([< RPC_service.meth ], t, (t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> - 'pr #simple -> 'pr -> 'a -> 'b -> 'q -> 'i -> 'o option shell_tzresult Lwt.t +val make_opt_call0 : + ([< RPC_service.meth], t, t, 'q, 'i, 'o) RPC_service.t -> + 'pr #simple -> + 'pr -> + 'q -> + 'i -> + 'o option shell_tzresult Lwt.t -val make_opt_call3: - ([< RPC_service.meth ], t, ((t * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> - 'pr #simple -> 'pr -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o option shell_tzresult Lwt.t +val make_opt_call1 : + ([< RPC_service.meth], t, t * 'a, 'q, 'i, 'o) RPC_service.t -> + 'pr #simple -> + 'pr -> + 'a -> + 'q -> + 'i -> + 'o option shell_tzresult Lwt.t + +val make_opt_call2 : + ([< RPC_service.meth], t, (t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> + 'pr #simple -> + 'pr -> + 'a -> + 'b -> + 'q -> + 'i -> + 'o option shell_tzresult Lwt.t + +val make_opt_call3 : + ([< RPC_service.meth], t, ((t * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> + 'pr #simple -> + 'pr -> + 'a -> + 'b -> + 'c -> + 'q -> + 'i -> + 'o option shell_tzresult Lwt.t diff --git a/src/lib_protocol_environment/sigs/v1/RPC_directory.mli b/src/lib_protocol_environment/sigs/v1/RPC_directory.mli index 17b15c6de356bf4113cde86ccb495c1c079777b6..68780b106277ac635ca5c9dcacd5d1418ffc4428 100644 --- a/src/lib_protocol_environment/sigs/v1/RPC_directory.mli +++ b/src/lib_protocol_environment/sigs/v1/RPC_directory.mli @@ -25,15 +25,17 @@ (** Dispatch tree *) type 'prefix t + type 'prefix directory = 'prefix t (** Empty list of dispatch trees *) -val empty: 'prefix directory +val empty : 'prefix directory + +val map : ('a -> 'b Lwt.t) -> 'b directory -> 'a directory -val map: ('a -> 'b Lwt.t) -> 'b directory -> 'a directory +val prefix : ('pr, 'p) RPC_path.path -> 'p directory -> 'pr directory -val prefix: ('pr, 'p) RPC_path.path -> 'p directory -> 'pr directory -val merge: 'a directory -> 'a directory -> 'a directory +val merge : 'a directory -> 'a directory -> 'a directory (** Possible error while registring services. *) type step = @@ -42,32 +44,35 @@ type step = | DynamicTail of RPC_arg.descr type conflict = - | CService of RPC_service.meth | CDir | CBuilder | CTail - | CTypes of RPC_arg.descr * - RPC_arg.descr + | CService of RPC_service.meth + | CDir + | CBuilder + | CTail + | CTypes of RPC_arg.descr * RPC_arg.descr | CType of RPC_arg.descr * string list + exception Conflict of step list * conflict (** Registring handler in service tree. *) -val register: +val register : 'prefix directory -> ('meth, 'prefix, 'params, 'query, 'input, 'output) RPC_service.t -> ('params -> 'query -> 'input -> 'output tzresult Lwt.t) -> 'prefix directory -val opt_register: +val opt_register : 'prefix directory -> ('meth, 'prefix, 'params, 'query, 'input, 'output) RPC_service.t -> ('params -> 'query -> 'input -> 'output option tzresult Lwt.t) -> 'prefix directory -val gen_register: +val gen_register : 'prefix directory -> ('meth, 'prefix, 'params, 'query, 'input, 'output) RPC_service.t -> - ('params -> 'query -> 'input -> [< 'output RPC_answer.t ] Lwt.t) -> + ('params -> 'query -> 'input -> [< 'output RPC_answer.t] Lwt.t) -> 'prefix directory -val lwt_register: +val lwt_register : 'prefix directory -> ('meth, 'prefix, 'params, 'query, 'input, 'output) RPC_service.t -> ('params -> 'query -> 'input -> 'output Lwt.t) -> @@ -75,153 +80,178 @@ val lwt_register: (** Registring handler in service tree. Curryfied variant. *) -val register0: +val register0 : unit directory -> ('m, unit, unit, 'q, 'i, 'o) RPC_service.t -> ('q -> 'i -> 'o tzresult Lwt.t) -> unit directory -val register1: +val register1 : 'prefix directory -> - ('m, 'prefix, unit * 'a, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, unit * 'a, 'q, 'i, 'o) RPC_service.t -> ('a -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val register2: +val register2 : 'prefix directory -> - ('m, 'prefix, (unit * 'a) * 'b, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val register3: +val register3 : 'prefix directory -> - ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val register4: +val register4 : 'prefix directory -> - ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val register5: +val register5 : 'prefix directory -> - ('m, 'prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'q , 'i, 'o) RPC_service.t -> + ( 'm, + 'prefix, + ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, + 'q, + 'i, + 'o ) + RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val opt_register0: +val opt_register0 : unit directory -> ('m, unit, unit, 'q, 'i, 'o) RPC_service.t -> ('q -> 'i -> 'o option tzresult Lwt.t) -> unit directory -val opt_register1: +val opt_register1 : 'prefix directory -> - ('m, 'prefix, unit * 'a, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, unit * 'a, 'q, 'i, 'o) RPC_service.t -> ('a -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val opt_register2: +val opt_register2 : 'prefix directory -> - ('m, 'prefix, (unit * 'a) * 'b, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val opt_register3: +val opt_register3 : 'prefix directory -> - ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val opt_register4: +val opt_register4 : 'prefix directory -> - ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val opt_register5: +val opt_register5 : 'prefix directory -> - ('m, 'prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'q , 'i, 'o) RPC_service.t -> + ( 'm, + 'prefix, + ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, + 'q, + 'i, + 'o ) + RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val gen_register0: +val gen_register0 : unit directory -> ('m, unit, unit, 'q, 'i, 'o) RPC_service.t -> - ('q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> unit directory -val gen_register1: +val gen_register1 : 'prefix directory -> - ('m, 'prefix, unit * 'a, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('m, 'prefix, unit * 'a, 'q, 'i, 'o) RPC_service.t -> + ('a -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val gen_register2: +val gen_register2 : 'prefix directory -> - ('m, 'prefix, (unit * 'a) * 'b, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'b -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('m, 'prefix, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> + ('a -> 'b -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val gen_register3: +val gen_register3 : 'prefix directory -> - ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'b -> 'c -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> + ('a -> 'b -> 'c -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val gen_register4: +val gen_register4 : 'prefix directory -> - ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q, 'i, 'o) RPC_service.t -> + ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val gen_register5: +val gen_register5 : 'prefix directory -> - ('m, 'prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ( 'm, + 'prefix, + ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, + 'q, + 'i, + 'o ) + RPC_service.t -> + ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val lwt_register0: +val lwt_register0 : unit directory -> ('m, unit, unit, 'q, 'i, 'o) RPC_service.t -> ('q -> 'i -> 'o Lwt.t) -> unit directory -val lwt_register1: +val lwt_register1 : 'prefix directory -> - ('m, 'prefix, unit * 'a, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, unit * 'a, 'q, 'i, 'o) RPC_service.t -> ('a -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory -val lwt_register2: +val lwt_register2 : 'prefix directory -> - ('m, 'prefix, (unit * 'a) * 'b, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory -val lwt_register3: +val lwt_register3 : 'prefix directory -> - ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory -val lwt_register4: +val lwt_register4 : 'prefix directory -> - ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory -val lwt_register5: +val lwt_register5 : 'prefix directory -> - ('m, 'prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'q , 'i, 'o) RPC_service.t -> + ( 'm, + 'prefix, + ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, + 'q, + 'i, + 'o ) + RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory (** Registring dynamic subtree. *) -val register_dynamic_directory: +val register_dynamic_directory : ?descr:string -> 'prefix directory -> - ('prefix, 'a) RPC_path.t -> ('a -> 'a directory Lwt.t) -> + ('prefix, 'a) RPC_path.t -> + ('a -> 'a directory Lwt.t) -> 'prefix directory diff --git a/src/lib_protocol_environment/sigs/v1/RPC_path.mli b/src/lib_protocol_environment/sigs/v1/RPC_path.mli index be5d2b36cfc14715b4614bab319bcaa11588dbb4..4cfe075e3334f54011f8f7a0b50abbd23eac1a02 100644 --- a/src/lib_protocol_environment/sigs/v1/RPC_path.mli +++ b/src/lib_protocol_environment/sigs/v1/RPC_path.mli @@ -24,23 +24,27 @@ (*****************************************************************************) type ('prefix, 'params) t + type ('prefix, 'params) path = ('prefix, 'params) t + type 'prefix context = ('prefix, 'prefix) path -val root: unit context -val open_root: 'a context +val root : unit context + +val open_root : 'a context -val add_suffix: - ('prefix, 'params) path -> string -> ('prefix, 'params) path -val (/): - ('prefix, 'params) path -> string -> ('prefix, 'params) path +val add_suffix : ('prefix, 'params) path -> string -> ('prefix, 'params) path -val add_arg: +val ( / ) : ('prefix, 'params) path -> string -> ('prefix, 'params) path + +val add_arg : ('prefix, 'params) path -> 'a RPC_arg.t -> ('prefix, 'params * 'a) path -val (/:): + +val ( /: ) : ('prefix, 'params) path -> 'a RPC_arg.t -> ('prefix, 'params * 'a) path -val add_final_args: +val add_final_args : ('prefix, 'params) path -> 'a RPC_arg.t -> ('prefix, 'params * 'a list) path -val (/:*): + +val ( /:* ) : ('prefix, 'params) path -> 'a RPC_arg.t -> ('prefix, 'params * 'a list) path diff --git a/src/lib_protocol_environment/sigs/v1/RPC_query.mli b/src/lib_protocol_environment/sigs/v1/RPC_query.mli index 76573c0df7bb820e4222ac20704aff050c47afa9..b0e15c3121a1ea358459748ac2053f60be23abc9 100644 --- a/src/lib_protocol_environment/sigs/v1/RPC_query.mli +++ b/src/lib_protocol_environment/sigs/v1/RPC_query.mli @@ -24,31 +24,43 @@ (*****************************************************************************) type 'a t + type 'a query = 'a t -val empty: unit query +val empty : unit query type ('a, 'b) field -val field: - ?descr: string -> - string -> 'a RPC_arg.t -> 'a -> ('b -> 'a) -> ('b, 'a) field -val opt_field: - ?descr: string -> - string -> 'a RPC_arg.t -> ('b -> 'a option) -> ('b, 'a option) field -val flag: - ?descr: string -> - string -> ('b -> bool) -> ('b, bool) field -val multi_field: - ?descr: string -> - string -> 'a RPC_arg.t -> ('b -> 'a list) -> ('b, 'a list) field + +val field : + ?descr:string -> string -> 'a RPC_arg.t -> 'a -> ('b -> 'a) -> ('b, 'a) field + +val opt_field : + ?descr:string -> + string -> + 'a RPC_arg.t -> + ('b -> 'a option) -> + ('b, 'a option) field + +val flag : ?descr:string -> string -> ('b -> bool) -> ('b, bool) field + +val multi_field : + ?descr:string -> + string -> + 'a RPC_arg.t -> + ('b -> 'a list) -> + ('b, 'a list) field type ('a, 'b, 'c) open_query -val query: 'b -> ('a, 'b, 'b) open_query -val (|+): - ('a, 'b, 'c -> 'd) open_query -> - ('a, 'c) field -> ('a, 'b, 'd) open_query -val seal: ('a, 'b, 'a) open_query -> 'a t + +val query : 'b -> ('a, 'b, 'b) open_query + +val ( |+ ) : + ('a, 'b, 'c -> 'd) open_query -> ('a, 'c) field -> ('a, 'b, 'd) open_query + +val seal : ('a, 'b, 'a) open_query -> 'a t type untyped = (string * string) list + exception Invalid of string -val parse: 'a query -> untyped -> 'a + +val parse : 'a query -> untyped -> 'a diff --git a/src/lib_protocol_environment/sigs/v1/RPC_service.mli b/src/lib_protocol_environment/sigs/v1/RPC_service.mli index bf61a15a43b7b52554870701c7ac2ec46648fdd4..828749243485a2bc981831db696bd66b86d734e6 100644 --- a/src/lib_protocol_environment/sigs/v1/RPC_service.mli +++ b/src/lib_protocol_environment/sigs/v1/RPC_service.mli @@ -24,53 +24,48 @@ (*****************************************************************************) (** HTTP methods. *) -type meth = [ - | `GET - | `POST - | `DELETE - | `PUT - | `PATCH -] +type meth = [`GET | `POST | `DELETE | `PUT | `PATCH] type (+'meth, 'prefix, 'params, 'query, 'input, 'output) t - constraint 'meth = [< meth ] + constraint 'meth = [< meth] + type (+'meth, 'prefix, 'params, 'query, 'input, 'output) service = ('meth, 'prefix, 'params, 'query, 'input, 'output) t -val get_service: - ?description: string -> - query: 'query RPC_query.t -> - output: 'output Data_encoding.t -> +val get_service : + ?description:string -> + query:'query RPC_query.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `GET ], 'prefix, 'params, 'query, unit, 'output) service + ([`GET], 'prefix, 'params, 'query, unit, 'output) service -val post_service: - ?description: string -> +val post_service : + ?description:string -> query:'query RPC_query.t -> - input: 'input Data_encoding.t -> - output: 'output Data_encoding.t -> + input:'input Data_encoding.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `POST ], 'prefix, 'params, 'query, 'input, 'output) service + ([`POST], 'prefix, 'params, 'query, 'input, 'output) service -val delete_service: - ?description: string -> +val delete_service : + ?description:string -> query:'query RPC_query.t -> - output: 'output Data_encoding.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `DELETE ], 'prefix, 'params, 'query, unit, 'output) service + ([`DELETE], 'prefix, 'params, 'query, unit, 'output) service -val patch_service: - ?description: string -> +val patch_service : + ?description:string -> query:'query RPC_query.t -> - input: 'input Data_encoding.t -> - output: 'output Data_encoding.t -> + input:'input Data_encoding.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `PATCH ], 'prefix, 'params, 'query, 'input, 'output) service + ([`PATCH], 'prefix, 'params, 'query, 'input, 'output) service -val put_service: - ?description: string -> +val put_service : + ?description:string -> query:'query RPC_query.t -> - input: 'input Data_encoding.t -> - output: 'output Data_encoding.t -> + input:'input Data_encoding.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `PUT ], 'prefix, 'params, 'query, 'input, 'output) service + ([`PUT], 'prefix, 'params, 'query, 'input, 'output) service diff --git a/src/lib_protocol_environment/sigs/v1/base58.mli b/src/lib_protocol_environment/sigs/v1/base58.mli index 3d7198c7c845797f815a01844d1e1e28e2b444f3..145c91b6e4efafb96944eacb05412df928a798c9 100644 --- a/src/lib_protocol_environment/sigs/v1/base58.mli +++ b/src/lib_protocol_environment/sigs/v1/base58.mli @@ -25,19 +25,20 @@ type 'a encoding -val simple_decode: 'a encoding -> string -> 'a option -val simple_encode: 'a encoding -> 'a -> string +val simple_decode : 'a encoding -> string -> 'a option + +val simple_encode : 'a encoding -> 'a -> string type data = .. -val register_encoding: - prefix: string -> - length: int -> - to_raw: ('a -> string) -> - of_raw: (string -> 'a option) -> - wrap: ('a -> data) -> +val register_encoding : + prefix:string -> + length:int -> + to_raw:('a -> string) -> + of_raw:(string -> 'a option) -> + wrap:('a -> data) -> 'a encoding -val check_encoded_prefix: 'a encoding -> string -> int -> unit +val check_encoded_prefix : 'a encoding -> string -> int -> unit -val decode: string -> data option +val decode : string -> data option diff --git a/src/lib_protocol_environment/sigs/v1/blake2B.mli b/src/lib_protocol_environment/sigs/v1/blake2B.mli index b1b53be8d78451a0b305c8bd6ebf3d02b02285f6..efdbfbe9ccdd268eea94191c31e7ea9a67eeb735 100644 --- a/src/lib_protocol_environment/sigs/v1/blake2B.mli +++ b/src/lib_protocol_environment/sigs/v1/blake2B.mli @@ -31,24 +31,27 @@ module type Name = sig val name : string + val title : string + val size : int option end module type PrefixedName = sig include Name + val b58check_prefix : string end module Make_minimal (Name : Name) : S.MINIMAL_HASH -module Make - (Register : sig - val register_encoding: - prefix: string -> - length: int -> - to_raw: ('a -> string) -> - of_raw: (string -> 'a option) -> - wrap: ('a -> Base58.data) -> - 'a Base58.encoding - end) - (Name : PrefixedName) : S.HASH + +module Make (Register : sig + val register_encoding : + prefix:string -> + length:int -> + to_raw:('a -> string) -> + of_raw:(string -> 'a option) -> + wrap:('a -> Base58.data) -> + 'a Base58.encoding +end) +(Name : PrefixedName) : S.HASH diff --git a/src/lib_protocol_environment/sigs/v1/block_header.mli b/src/lib_protocol_environment/sigs/v1/block_header.mli index 2b250d696d4c0aba46be90521356dd94b556da7e..5be2956365da88154c31d6021a20da63f7948d47 100644 --- a/src/lib_protocol_environment/sigs/v1/block_header.mli +++ b/src/lib_protocol_environment/sigs/v1/block_header.mli @@ -24,26 +24,22 @@ (*****************************************************************************) type shell_header = { - level: Int32.t ; - (** The number of preceding block in this chain, i.e. the genesis + level : Int32.t; + (** The number of preceding block in this chain, i.e. the genesis has level 0. *) - proto_level: int ; - (** The number of preceding protocol change in the chain (modulo 256), + proto_level : int; + (** The number of preceding protocol change in the chain (modulo 256), i.e the genesis has proto_level 0. *) - predecessor: Block_hash.t ; - timestamp: Time.t ; - validation_passes: int ; - operations_hash: Operation_list_list_hash.t ; - fitness: MBytes.t list ; - context: Context_hash.t ; + predecessor : Block_hash.t; + timestamp : Time.t; + validation_passes : int; + operations_hash : Operation_list_list_hash.t; + fitness : MBytes.t list; + context : Context_hash.t } -val shell_header_encoding: shell_header Data_encoding.t +val shell_header_encoding : shell_header Data_encoding.t -type t = { - shell: shell_header ; - protocol_data: MBytes.t ; -} +type t = {shell : shell_header; protocol_data : MBytes.t} -include S.HASHABLE with type t := t - and type hash := Block_hash.t +include S.HASHABLE with type t := t and type hash := Block_hash.t diff --git a/src/lib_protocol_environment/sigs/v1/compare.mli b/src/lib_protocol_environment/sigs/v1/compare.mli index 8a67a866e75bd804ac560231b5ff5145d710f570..fac841f138e619129ce761c9227748a31f522079 100644 --- a/src/lib_protocol_environment/sigs/v1/compare.mli +++ b/src/lib_protocol_environment/sigs/v1/compare.mli @@ -25,35 +25,56 @@ module type COMPARABLE = sig type t + val compare : t -> t -> int end module type S = sig type t - val (=) : t -> t -> bool - val (<>) : t -> t -> bool - val (<) : t -> t -> bool - val (<=) : t -> t -> bool - val (>=) : t -> t -> bool - val (>) : t -> t -> bool + + val ( = ) : t -> t -> bool + + val ( <> ) : t -> t -> bool + + val ( < ) : t -> t -> bool + + val ( <= ) : t -> t -> bool + + val ( >= ) : t -> t -> bool + + val ( > ) : t -> t -> bool + val compare : t -> t -> int + val equal : t -> t -> bool + val max : t -> t -> t + val min : t -> t -> t end module Make (P : COMPARABLE) : S with type t := P.t module Char : S with type t = char + module Bool : S with type t = bool + module Int : S with type t = int + module Int32 : S with type t = int32 + module Uint32 : S with type t = int32 + module Int64 : S with type t = int64 + module Uint64 : S with type t = int64 + module Float : S with type t = float + module String : S with type t = string + module Z : S with type t = Z.t module List (P : COMPARABLE) : S with type t = P.t list + module Option (P : COMPARABLE) : S with type t = P.t option diff --git a/src/lib_protocol_environment/sigs/v1/context.mli b/src/lib_protocol_environment/sigs/v1/context.mli index df262e1ec715e3c579c62409bceaacd985d62fe1..b21bedd73928a907598529f4db998cfe0797127a 100644 --- a/src/lib_protocol_environment/sigs/v1/context.mli +++ b/src/lib_protocol_environment/sigs/v1/context.mli @@ -34,29 +34,33 @@ type key = string list (** Values in (kex x value) database implementations *) type value = MBytes.t -val mem: t -> key -> bool Lwt.t -val dir_mem: t -> key -> bool Lwt.t +val mem : t -> key -> bool Lwt.t -val get: t -> key -> value option Lwt.t +val dir_mem : t -> key -> bool Lwt.t -val set: t -> key -> value -> t Lwt.t +val get : t -> key -> value option Lwt.t + +val set : t -> key -> value -> t Lwt.t (** [copy] returns None if the [from] key is not bound *) -val copy: t -> from:key -> to_:key -> t option Lwt.t +val copy : t -> from:key -> to_:key -> t option Lwt.t + +val del : t -> key -> t Lwt.t -val del: t -> key -> t Lwt.t -val remove_rec: t -> key -> t Lwt.t +val remove_rec : t -> key -> t Lwt.t -val fold: - t -> key -> init:'a -> - f:([ `Key of key | `Dir of key ] -> 'a -> 'a Lwt.t) -> +val fold : + t -> + key -> + init:'a -> + f:([`Key of key | `Dir of key] -> 'a -> 'a Lwt.t) -> 'a Lwt.t -val keys: t -> key -> key list Lwt.t -val fold_keys: - t -> key -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t +val keys : t -> key -> key list Lwt.t + +val fold_keys : t -> key -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t -val register_resolver: +val register_resolver : 'a Base58.encoding -> (t -> string -> 'a list Lwt.t) -> unit -val complete: t -> string -> string list Lwt.t +val complete : t -> string -> string list Lwt.t diff --git a/src/lib_protocol_environment/sigs/v1/data_encoding.mli b/src/lib_protocol_environment/sigs/v1/data_encoding.mli index 68c8b6ac48e4b0882a072fe349d54d996f6883a0..083a116f00592c08e85e10dff6d5dfe0a097ed7e 100644 --- a/src/lib_protocol_environment/sigs/v1/data_encoding.mli +++ b/src/lib_protocol_environment/sigs/v1/data_encoding.mli @@ -35,207 +35,333 @@ type json = type json_schema type 'a t + type 'a encoding = 'a t -val classify : 'a encoding -> [ `Fixed of int | `Dynamic | `Variable ] +val classify : 'a encoding -> [`Fixed of int | `Dynamic | `Variable] val splitted : json:'a encoding -> binary:'a encoding -> 'a encoding val null : unit encoding + val empty : unit encoding + val unit : unit encoding + val constant : string -> unit encoding + val int8 : int encoding + val uint8 : int encoding + val int16 : int encoding + val uint16 : int encoding + val int31 : int encoding + val int32 : int32 encoding + val int64 : int64 encoding + val n : Z.t encoding + val z : Z.t encoding + val bool : bool encoding + val string : string encoding + val bytes : MBytes.t encoding + val float : float encoding + val option : 'a encoding -> 'a option encoding + val string_enum : (string * 'a) list -> 'a encoding module Fixed : sig val string : int -> string encoding + val bytes : int -> MBytes.t encoding + val add_padding : 'a encoding -> int -> 'a encoding end module Variable : sig val string : string encoding + val bytes : MBytes.t encoding - val array : ?max_length: int -> 'a encoding -> 'a array encoding - val list : ?max_length: int -> 'a encoding -> 'a list encoding + + val array : ?max_length:int -> 'a encoding -> 'a array encoding + + val list : ?max_length:int -> 'a encoding -> 'a list encoding end module Bounded : sig val string : int -> string encoding + val bytes : int -> MBytes.t encoding end val dynamic_size : - ?kind: [ `Uint30 | `Uint16 | `Uint8 ] -> - 'a encoding -> 'a encoding + ?kind:[`Uint30 | `Uint16 | `Uint8] -> 'a encoding -> 'a encoding val json : json encoding + val json_schema : json_schema encoding type 'a field + val req : - ?title:string -> ?description:string -> - string -> 't encoding -> 't field + ?title:string -> ?description:string -> string -> 't encoding -> 't field + val opt : - ?title:string -> ?description:string -> - string -> 't encoding -> 't option field + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't option field + val varopt : - ?title:string -> ?description:string -> - string -> 't encoding -> 't option field + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't option field + val dft : - ?title:string -> ?description:string -> - string -> 't encoding -> 't -> 't field - -val obj1 : - 'f1 field -> 'f1 encoding -val obj2 : - 'f1 field -> 'f2 field -> ('f1 * 'f2) encoding -val obj3 : - 'f1 field -> 'f2 field -> 'f3 field -> ('f1 * 'f2 * 'f3) encoding + ?title:string -> + ?description:string -> + string -> + 't encoding -> + 't -> + 't field + +val obj1 : 'f1 field -> 'f1 encoding + +val obj2 : 'f1 field -> 'f2 field -> ('f1 * 'f2) encoding + +val obj3 : 'f1 field -> 'f2 field -> 'f3 field -> ('f1 * 'f2 * 'f3) encoding + val obj4 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> ('f1 * 'f2 * 'f3 * 'f4) encoding + val obj5 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5) encoding + val obj6 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> 'f6 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6) encoding + val obj7 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7) encoding + val obj8 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8) encoding + val obj9 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> 'f9 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> + 'f9 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9) encoding + val obj10 : - 'f1 field -> 'f2 field -> 'f3 field -> 'f4 field -> 'f5 field -> - 'f6 field -> 'f7 field -> 'f8 field -> 'f9 field -> 'f10 field -> + 'f1 field -> + 'f2 field -> + 'f3 field -> + 'f4 field -> + 'f5 field -> + 'f6 field -> + 'f7 field -> + 'f8 field -> + 'f9 field -> + 'f10 field -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9 * 'f10) encoding -val tup1 : - 'f1 encoding -> - 'f1 encoding -val tup2 : - 'f1 encoding -> 'f2 encoding -> - ('f1 * 'f2) encoding +val tup1 : 'f1 encoding -> 'f1 encoding + +val tup2 : 'f1 encoding -> 'f2 encoding -> ('f1 * 'f2) encoding + val tup3 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> - ('f1 * 'f2 * 'f3) encoding + 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> ('f1 * 'f2 * 'f3) encoding + val tup4 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> ('f1 * 'f2 * 'f3 * 'f4) encoding + val tup5 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> 'f5 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5) encoding + val tup6 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6) encoding + val tup7 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7) encoding + val tup8 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8) encoding + val tup9 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> 'f9 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9) encoding + val tup10 : - 'f1 encoding -> 'f2 encoding -> 'f3 encoding -> 'f4 encoding -> - 'f5 encoding -> 'f6 encoding -> 'f7 encoding -> 'f8 encoding -> - 'f9 encoding -> 'f10 encoding -> + 'f1 encoding -> + 'f2 encoding -> + 'f3 encoding -> + 'f4 encoding -> + 'f5 encoding -> + 'f6 encoding -> + 'f7 encoding -> + 'f8 encoding -> + 'f9 encoding -> + 'f10 encoding -> ('f1 * 'f2 * 'f3 * 'f4 * 'f5 * 'f6 * 'f7 * 'f8 * 'f9 * 'f10) encoding val merge_objs : 'o1 encoding -> 'o2 encoding -> ('o1 * 'o2) encoding + val merge_tups : 'a1 encoding -> 'a2 encoding -> ('a1 * 'a2) encoding -val array : ?max_length: int -> 'a encoding -> 'a array encoding -val list : ?max_length: int -> 'a encoding -> 'a list encoding +val array : ?max_length:int -> 'a encoding -> 'a array encoding + +val list : ?max_length:int -> 'a encoding -> 'a list encoding val assoc : 'a encoding -> (string * 'a) list encoding type case_tag = Tag of int | Json_only type 't case + val case : title:string -> ?description:string -> - case_tag -> 'a encoding -> ('t -> 'a option) -> ('a -> 't) -> 't case + case_tag -> + 'a encoding -> + ('t -> 'a option) -> + ('a -> 't) -> + 't case -val union : - ?tag_size:[ `Uint8 | `Uint16 ] -> 't case list -> 't encoding +val union : ?tag_size:[`Uint8 | `Uint16] -> 't case list -> 't encoding val def : - string -> - ?title:string -> - ?description:string -> - 't encoding ->'t encoding + string -> ?title:string -> ?description:string -> 't encoding -> 't encoding val conv : - ('a -> 'b) -> ('b -> 'a) -> - ?schema:json_schema -> - 'b encoding -> 'a encoding + ('a -> 'b) -> ('b -> 'a) -> ?schema:json_schema -> 'b encoding -> 'a encoding val mu : string -> ?title:string -> ?description:string -> - ('a encoding -> 'a encoding) -> 'a encoding + ('a encoding -> 'a encoding) -> + 'a encoding type 'a lazy_t val lazy_encoding : 'a encoding -> 'a lazy_t encoding + val force_decode : 'a lazy_t -> 'a option + val force_bytes : 'a lazy_t -> MBytes.t + val make_lazy : 'a encoding -> 'a -> 'a lazy_t + val apply_lazy : - fun_value:('a -> 'b) -> fun_bytes:(MBytes.t -> 'b) -> fun_combine:('b -> 'b -> 'b) -> - 'a lazy_t -> 'b + fun_value:('a -> 'b) -> + fun_bytes:(MBytes.t -> 'b) -> + fun_combine:('b -> 'b -> 'b) -> + 'a lazy_t -> + 'b module Json : sig - val schema : ?definitions_path:string -> 'a encoding -> json_schema + val construct : 't encoding -> 't -> json + val destruct : 't encoding -> json -> 't (** JSON Error *) type path = path_item list + and path_item = - [ `Field of string - (** A field in an object. *) - | `Index of int - (** An index in an array. *) - | `Star - (** Any / every field or index. *) - | `Next - (** The next element after an array. *) ] + [ `Field of string (** A field in an object. *) + | `Index of int (** An index in an array. *) + | `Star (** Any / every field or index. *) + | `Next (** The next element after an array. *) ] (** Exception raised by destructors, with the location in the original JSON structure and the specific error. *) @@ -257,30 +383,37 @@ module Json : sig exception Unexpected_field of string val print_error : - ?print_unknown: (Format.formatter -> exn -> unit) -> - Format.formatter -> exn -> unit + ?print_unknown:(Format.formatter -> exn -> unit) -> + Format.formatter -> + exn -> + unit (** Helpers for writing encoders. *) val cannot_destruct : ('a, Format.formatter, unit, 'b) format4 -> 'a + val wrap_error : ('a -> 'b) -> 'a -> 'b val pp : Format.formatter -> json -> unit - end module Binary : sig - val length : 'a encoding -> 'a -> int + val fixed_length : 'a encoding -> int option + val read : 'a encoding -> MBytes.t -> int -> int -> (int * 'a) option + val write : 'a encoding -> 'a -> MBytes.t -> int -> int -> int option + val to_bytes : 'a encoding -> 'a -> MBytes.t option + val to_bytes_exn : 'a encoding -> 'a -> MBytes.t + val of_bytes : 'a encoding -> MBytes.t -> 'a option type write_error - exception Write_error of write_error + exception Write_error of write_error end (** [check_size size encoding] ensures that the binary encoding diff --git a/src/lib_protocol_environment/sigs/v1/error_monad.mli b/src/lib_protocol_environment/sigs/v1/error_monad.mli index b2fdf39e8320fe3c8cac84ddb01ada2df2a56bfd..7f80eb5b6e46430b4214d02944566a95022b22ad 100644 --- a/src/lib_protocol_environment/sigs/v1/error_monad.mli +++ b/src/lib_protocol_environment/sigs/v1/error_monad.mli @@ -29,10 +29,9 @@ (** Categories of error *) type error_category = - [ `Branch (** Errors that may not happen in another context *) - | `Temporary (** Errors that may not happen in a later context *) - | `Permanent (** Errors that will happen no matter the context *) - ] + [ `Branch (** Errors that may not happen in another context *) + | `Temporary (** Errors that may not happen in a later context *) + | `Permanent (** Errors that will happen no matter the context *) ] (** Custom error handling for economic protocols. *) @@ -42,18 +41,21 @@ val pp : Format.formatter -> error -> unit (** A JSON error serializer *) val error_encoding : error Data_encoding.t + val json_of_error : error -> Data_encoding.json + val error_of_json : Data_encoding.json -> error (** Error information *) -type error_info = - { category : error_category ; - id: string ; - title : string ; - description : string ; - schema : Data_encoding.json_schema } +type error_info = { + category : error_category; + id : string; + title : string; + description : string; + schema : Data_encoding.json_schema +} -val pp_info: Format.formatter -> error_info -> unit +val pp_info : Format.formatter -> error_info -> unit (** Retrieves information of registered errors *) val get_registered_errors : unit -> error_info list @@ -61,10 +63,13 @@ val get_registered_errors : unit -> error_info list (** For other modules to register specialized error serializers *) val register_error_kind : error_category -> - id:string -> title:string -> description:string -> + id:string -> + title:string -> + description:string -> ?pp:(Format.formatter -> 'err -> unit) -> 'err Data_encoding.t -> - (error -> 'err option) -> ('err -> error) -> + (error -> 'err option) -> + ('err -> error) -> unit (** Classify an error using the registered kinds *) @@ -113,20 +118,22 @@ val error : error -> 'a tzresult val fail : error -> 'a tzresult Lwt.t (** Non-Lwt bind operator *) -val (>>?) : 'a tzresult -> ('a -> 'b tzresult) -> 'b tzresult +val ( >>? ) : 'a tzresult -> ('a -> 'b tzresult) -> 'b tzresult (** Bind operator *) -val (>>=?) : 'a tzresult Lwt.t -> ('a -> 'b tzresult Lwt.t) -> 'b tzresult Lwt.t +val ( >>=? ) : + 'a tzresult Lwt.t -> ('a -> 'b tzresult Lwt.t) -> 'b tzresult Lwt.t (** Lwt's bind reexported *) -val (>>=) : 'a Lwt.t -> ('a -> 'b Lwt.t) -> 'b Lwt.t -val (>|=) : 'a Lwt.t -> ('a -> 'b) -> 'b Lwt.t +val ( >>= ) : 'a Lwt.t -> ('a -> 'b Lwt.t) -> 'b Lwt.t + +val ( >|= ) : 'a Lwt.t -> ('a -> 'b) -> 'b Lwt.t (** To operator *) -val (>>|?) : 'a tzresult Lwt.t -> ('a -> 'b) -> 'b tzresult Lwt.t +val ( >>|? ) : 'a tzresult Lwt.t -> ('a -> 'b) -> 'b tzresult Lwt.t (** Non-Lwt to operator *) -val (>|?) : 'a tzresult -> ('a -> 'b) -> 'b tzresult +val ( >|? ) : 'a tzresult -> ('a -> 'b) -> 'b tzresult (** Enrich an error report (or do nothing on a successful result) manually *) val record_trace : error -> 'a tzresult -> 'a tzresult @@ -138,7 +145,8 @@ val trace : error -> 'b tzresult Lwt.t -> 'b tzresult Lwt.t val record_trace_eval : (unit -> error tzresult) -> 'a tzresult -> 'a tzresult (** Same as trace, for unevaluated Lwt error *) -val trace_eval : (unit -> error tzresult Lwt.t) -> 'b tzresult Lwt.t -> 'b tzresult Lwt.t +val trace_eval : + (unit -> error tzresult Lwt.t) -> 'b tzresult Lwt.t -> 'b tzresult Lwt.t (** Erroneous return on failed assertion *) val fail_unless : bool -> error -> unit tzresult Lwt.t @@ -150,32 +158,38 @@ val fail_when : bool -> error -> unit tzresult Lwt.t (** A {!List.iter} in the monad *) val iter_s : ('a -> unit tzresult Lwt.t) -> 'a list -> unit tzresult Lwt.t + val iter_p : ('a -> unit tzresult Lwt.t) -> 'a list -> unit tzresult Lwt.t (** A {!List.map} in the monad *) val map_s : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t + val map_p : ('a -> 'b tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t (** A {!List.map2} in the monad *) -val map2 : - ('a -> 'b -> 'c tzresult) -> 'a list -> 'b list -> 'c list tzresult +val map2 : ('a -> 'b -> 'c tzresult) -> 'a list -> 'b list -> 'c list tzresult (** A {!List.map2} in the monad *) val map2_s : - ('a -> 'b -> 'c tzresult Lwt.t) -> 'a list -> 'b list -> + ('a -> 'b -> 'c tzresult Lwt.t) -> + 'a list -> + 'b list -> 'c list tzresult Lwt.t (** A {!List.filter_map} in the monad *) -val filter_map_s : ('a -> 'b option tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t +val filter_map_s : + ('a -> 'b option tzresult Lwt.t) -> 'a list -> 'b list tzresult Lwt.t (** A {!List.fold_left} in the monad *) -val fold_left_s : ('a -> 'b -> 'a tzresult Lwt.t) -> 'a -> 'b list -> 'a tzresult Lwt.t +val fold_left_s : + ('a -> 'b -> 'a tzresult Lwt.t) -> 'a -> 'b list -> 'a tzresult Lwt.t (** A {!List.fold_right} in the monad *) -val fold_right_s : ('a -> 'b -> 'b tzresult Lwt.t) -> 'a list -> 'b -> 'b tzresult Lwt.t - +val fold_right_s : + ('a -> 'b -> 'b tzresult Lwt.t) -> 'a list -> 'b -> 'b tzresult Lwt.t (**/**) type shell_error + type 'a shell_tzresult = ('a, shell_error list) result diff --git a/src/lib_protocol_environment/sigs/v1/fitness.mli b/src/lib_protocol_environment/sigs/v1/fitness.mli index 426e3adce82d45802ae8e246092df55fa1f34875..a45316be04c387ccf2da977d924ee6b93a7700d2 100644 --- a/src/lib_protocol_environment/sigs/v1/fitness.mli +++ b/src/lib_protocol_environment/sigs/v1/fitness.mli @@ -25,4 +25,5 @@ (** The fitness of a block is defined as a list of bytes, compared in a lexicographical order (longer list are greater). *) -include S.T with type t = MBytes.t list +include + S.T with type t = MBytes.t list diff --git a/src/lib_protocol_environment/sigs/v1/format.mli b/src/lib_protocol_environment/sigs/v1/format.mli index 73cc075b816277d0d30f9c3063c2f58bb143f07b..c035ef981aade481826ce45ff5c52a7a2455caef 100644 --- a/src/lib_protocol_environment/sigs/v1/format.mli +++ b/src/lib_protocol_environment/sigs/v1/format.mli @@ -120,9 +120,9 @@ *) -type formatter (** Abstract data corresponding to a pretty-printer (also called a formatter) and all its machinery. See also {!section:formatter}. *) +type formatter (** {1:boxes Pretty-printing boxes} *) @@ -148,7 +148,6 @@ type formatter split the line. *) -val pp_open_box : formatter -> int -> unit (** [pp_open_box ppf d] opens a new compacting pretty-printing box with offset [d] in the formatter [ppf]. @@ -168,12 +167,11 @@ val pp_open_box : formatter -> int -> unit If the pretty-printer splits the line in the box, offset [d] is added to the current indentation. *) +val pp_open_box : formatter -> int -> unit - -val pp_close_box : formatter -> unit -> unit (** Closes the most recently open pretty-printing box. *) +val pp_close_box : formatter -> unit -> unit -val pp_open_hbox : formatter -> unit -> unit (** [pp_open_hbox ppf ()] opens a new 'horizontal' pretty-printing box. This box prints material on a single line. @@ -181,8 +179,8 @@ val pp_open_hbox : formatter -> unit -> unit Break hints in a horizontal box never split the line. (Line splitting may still occur inside boxes nested deeper). *) +val pp_open_hbox : formatter -> unit -> unit -val pp_open_vbox : formatter -> int -> unit (** [pp_open_vbox ppf d] opens a new 'vertical' pretty-printing box with offset [d]. @@ -193,8 +191,8 @@ val pp_open_vbox : formatter -> int -> unit If the pretty-printer splits the line in the box, [d] is added to the current indentation. *) +val pp_open_vbox : formatter -> int -> unit -val pp_open_hvbox : formatter -> int -> unit (** [pp_open_hvbox ppf d] opens a new 'horizontal/vertical' pretty-printing box with offset [d]. @@ -204,8 +202,8 @@ val pp_open_hvbox : formatter -> int -> unit If the pretty-printer splits the line in the box, [d] is added to the current indentation. *) +val pp_open_hvbox : formatter -> int -> unit -val pp_open_hovbox : formatter -> int -> unit (** [pp_open_hovbox ppf d] opens a new 'horizontal-or-vertical' pretty-printing box with offset [d]. @@ -217,28 +215,29 @@ val pp_open_hovbox : formatter -> int -> unit If the pretty-printer splits the line in the box, [d] is added to the current indentation. *) +val pp_open_hovbox : formatter -> int -> unit (** {1 Formatting functions} *) -val pp_print_string : formatter -> string -> unit (** [pp_print_string ppf s] prints [s] in the current pretty-printing box. *) +val pp_print_string : formatter -> string -> unit -val pp_print_as : formatter -> int -> string -> unit (** [pp_print_as ppf len s] prints [s] in the current pretty-printing box. The pretty-printer formats [s] as if it were of length [len]. *) +val pp_print_as : formatter -> int -> string -> unit -val pp_print_int : formatter -> int -> unit (** Print an integer in the current pretty-printing box. *) +val pp_print_int : formatter -> int -> unit -val pp_print_float : formatter -> float -> unit (** Print a floating point number in the current pretty-printing box. *) +val pp_print_float : formatter -> float -> unit -val pp_print_char : formatter -> char -> unit (** Print a character in the current pretty-printing box. *) +val pp_print_char : formatter -> char -> unit -val pp_print_bool : formatter -> bool -> unit (** Print a boolean in the current pretty-printing box. *) +val pp_print_bool : formatter -> bool -> unit (** {1:breaks Break hints} *) @@ -261,23 +260,22 @@ val pp_print_bool : formatter -> bool -> unit means printing a newline character (ASCII code 10). *) -val pp_print_space : formatter -> unit -> unit (** [pp_print_space ppf ()] emits a 'space' break hint: the pretty-printer may split the line at this point, otherwise it prints one space. [pp_print_space ppf ()] is equivalent to [pp_print_break ppf 1 0]. *) +val pp_print_space : formatter -> unit -> unit -val pp_print_cut : formatter -> unit -> unit (** [pp_print_cut ppf ()] emits a 'cut' break hint: the pretty-printer may split the line at this point, otherwise it prints nothing. [pp_print_cut ppf ()] is equivalent to [pp_print_break ppf 0 0]. *) +val pp_print_cut : formatter -> unit -> unit -val pp_print_break : formatter -> int -> int -> unit (** [pp_print_break ppf nspaces offset] emits a 'full' break hint: the pretty-printer may split the line at this point, otherwise it prints [nspaces] spaces. @@ -285,8 +283,8 @@ val pp_print_break : formatter -> int -> int -> unit If the pretty-printer splits the line, [offset] is added to the current indentation. *) +val pp_print_break : formatter -> int -> int -> unit -val pp_force_newline : formatter -> unit -> unit (** Force a new line in the current pretty-printing box. The pretty-printer must split the line at this point, @@ -296,16 +294,16 @@ val pp_force_newline : formatter -> unit -> unit Using break hints within an enclosing vertical box is a better alternative. *) +val pp_force_newline : formatter -> unit -> unit -val pp_print_if_newline : formatter -> unit -> unit (** Execute the next formatting command if the preceding line has just been split. Otherwise, ignore the next formatting command. *) +val pp_print_if_newline : formatter -> unit -> unit (** {1 Pretty-printing termination} *) -val pp_print_flush : formatter -> unit -> unit (** End of pretty-printing: resets the pretty-printer to initial state. All open pretty-printing boxes are closed, all pending text is printed. @@ -329,8 +327,8 @@ val pp_print_flush : formatter -> unit -> unit buffering strategy of output channels and could dramatically impact efficiency. *) +val pp_print_flush : formatter -> unit -> unit -val pp_print_newline : formatter -> unit -> unit (** End of pretty-printing: resets the pretty-printer to initial state. All open pretty-printing boxes are closed, all pending text is printed. @@ -342,10 +340,10 @@ val pp_print_newline : formatter -> unit -> unit the preferred method is using break hints within a vertical pretty-printing box. *) +val pp_print_newline : formatter -> unit -> unit (** {1 Margin} *) -val pp_set_margin : formatter -> int -> unit (** [pp_set_margin ppf d] sets the right margin to [d] (in characters): the pretty-printer splits lines that overflow the right margin according to the break hints given. @@ -357,13 +355,13 @@ val pp_set_margin : formatter -> int -> unit a minimal ratio [max_indent/margin>=50%] and if possible the current difference [margin - max_indent]. *) +val pp_set_margin : formatter -> int -> unit -val pp_get_margin : formatter -> unit -> int (** Returns the position of the right margin. *) +val pp_get_margin : formatter -> unit -> int (** {1 Maximum indentation limit} *) -val pp_set_max_indent : formatter -> int -> unit (** [pp_set_max_indent ppf d] sets the maximum indentation limit of lines to [d] (in characters): once this limit is reached, new pretty-printing boxes are rejected to the @@ -376,9 +374,10 @@ val pp_set_max_indent : formatter -> int -> unit If [d] is greater or equal than the current margin, it is ignored, and the current maximum indentation limit is kept. *) +val pp_set_max_indent : formatter -> int -> unit -val pp_get_max_indent : formatter -> unit -> int (** Return the maximum indentation limit (in characters). *) +val pp_get_max_indent : formatter -> unit -> int (** {1 Maximum formatting depth} *) @@ -389,7 +388,6 @@ val pp_get_max_indent : formatter -> unit -> int precisely as the text returned by {!get_ellipsis_text} [()]). *) -val pp_set_max_boxes : formatter -> int -> unit (** [pp_set_max_boxes ppf max] sets the maximum number of pretty-printing boxes simultaneously open. @@ -398,16 +396,17 @@ val pp_set_max_boxes : formatter -> int -> unit Nothing happens if [max] is smaller than 2. *) +val pp_set_max_boxes : formatter -> int -> unit -val pp_get_max_boxes : formatter -> unit -> int (** Returns the maximum number of pretty-printing boxes allowed before ellipsis. *) +val pp_get_max_boxes : formatter -> unit -> int -val pp_over_max_boxes : formatter -> unit -> bool (** Tests if the maximum number of pretty-printing boxes allowed have already been opened. *) +val pp_over_max_boxes : formatter -> unit -> bool (** {1 Tabulation boxes} *) @@ -427,7 +426,6 @@ val pp_over_max_boxes : formatter -> unit -> bool module {!Format}. *) -val pp_open_tbox : formatter -> unit -> unit (** [open_tbox ()] opens a new tabulation box. This box prints lines separated into cells of fixed width. @@ -440,22 +438,22 @@ val pp_open_tbox : formatter -> unit -> unit tabulation marker or split the line. Function {!Format.print_tbreak} prints a tabulation break. *) +val pp_open_tbox : formatter -> unit -> unit -val pp_close_tbox : formatter -> unit -> unit (** Closes the most recently opened tabulation box. *) +val pp_close_tbox : formatter -> unit -> unit -val pp_set_tab : formatter -> unit -> unit (** Sets a tabulation marker at current insertion point. *) +val pp_set_tab : formatter -> unit -> unit -val pp_print_tab : formatter -> unit -> unit (** [print_tab ()] emits a 'next' tabulation break hint: if not already set on a tabulation marker, the insertion point moves to the first tabulation marker on the right, or the pretty-printer splits the line and insertion point moves to the leftmost tabulation marker. It is equivalent to [print_tbreak 0 0]. *) +val pp_print_tab : formatter -> unit -> unit -val pp_print_tbreak : formatter -> int -> int -> unit (** [print_tbreak nspaces offset] emits a 'full' tabulation break hint. If not already set on a tabulation marker, the insertion point moves to the @@ -469,16 +467,17 @@ val pp_print_tbreak : formatter -> int -> int -> unit If the pretty-printer splits the line, [offset] is added to the current indentation. *) +val pp_print_tbreak : formatter -> int -> int -> unit (** {1 Ellipsis} *) -val pp_set_ellipsis_text : formatter -> string -> unit (** Set the text of the ellipsis printed when too many pretty-printing boxes are open (a single dot, [.], by default). *) +val pp_set_ellipsis_text : formatter -> string -> unit -val pp_get_ellipsis_text : formatter -> unit -> string (** Return the text of the ellipsis. *) +val pp_get_ellipsis_text : formatter -> unit -> string (** {1:tags Semantic tags} *) @@ -554,44 +553,41 @@ type tag = string Tag-printing operations may be set on or off with {!set_print_tags}. *) -val pp_open_tag : formatter -> string -> unit (** [pp_open_tag ppf t] opens the semantic tag named [t]. The [print_open_tag] tag-printing function of the formatter is called with [t] as argument; then the opening tag marker for [t], as given by [mark_open_tag t], is written into the output device of the formatter. *) +val pp_open_tag : formatter -> string -> unit -val pp_close_tag : formatter -> unit -> unit (** [pp_close_tag ppf ()] closes the most recently opened semantic tag [t]. The closing tag marker, as given by [mark_close_tag t], is written into the output device of the formatter; then the [print_close_tag] tag-printing function of the formatter is called with [t] as argument. *) +val pp_close_tag : formatter -> unit -> unit -val pp_set_tags : formatter -> bool -> unit (** [pp_set_tags ppf b] turns on or off the treatment of semantic tags (default is off). *) +val pp_set_tags : formatter -> bool -> unit -val pp_set_print_tags : formatter -> bool -> unit (** [pp_set_print_tags ppf b] turns on or off the tag-printing operations. *) +val pp_set_print_tags : formatter -> bool -> unit -val pp_set_mark_tags : formatter -> bool -> unit (** [pp_set_mark_tags ppf b] turns on or off the tag-marking operations. *) +val pp_set_mark_tags : formatter -> bool -> unit -val pp_get_print_tags : formatter -> unit -> bool (** Return the current status of tag-printing operations. *) +val pp_get_print_tags : formatter -> unit -> bool -val pp_get_mark_tags : formatter -> unit -> bool (** Return the current status of tag-marking operations. *) +val pp_get_mark_tags : formatter -> unit -> bool (** {1 Convenience formatting functions.} *) -val pp_print_list: - ?pp_sep:(formatter -> unit -> unit) -> - (formatter -> 'a -> unit) -> (formatter -> 'a list -> unit) (** [pp_print_list ?pp_sep pp_v ppf l] prints items of list [l], using [pp_v] to print each item, and calling [pp_sep] between items ([pp_sep] defaults to {!pp_print_cut}. @@ -599,13 +595,19 @@ val pp_print_list: @since 4.02.0 *) +val pp_print_list : + ?pp_sep:(formatter -> unit -> unit) -> + (formatter -> 'a -> unit) -> + formatter -> + 'a list -> + unit -val pp_print_text : formatter -> string -> unit (** [pp_print_text ppf s] prints [s] with spaces and newlines respectively printed using {!pp_print_space} and {!pp_force_newline}. @since 4.02.0 *) +val pp_print_text : formatter -> string -> unit (** {1:fpp Formatted pretty-printing} *) @@ -695,7 +697,6 @@ val fprintf : formatter -> ('a, formatter, unit) format -> 'a *) -val sprintf : ('a, unit, string) format -> 'a (** Same as [printf] above, but instead of printing on a formatter, returns a string containing the result of formatting the arguments. Note that the pretty-printer queue is flushed at the end of {e each @@ -710,8 +711,8 @@ val sprintf : ('a, unit, string) format -> 'a buffer of your own: flushing the formatter and the buffer at the end of pretty-printing returns the desired string. *) +val sprintf : ('a, unit, string) format -> 'a -val asprintf : ('a, formatter, unit, string) format4 -> 'a (** Same as [printf] above, but instead of printing on a formatter, returns a string containing the result of formatting the arguments. The type of [asprintf] is general enough to interact nicely with [%a] @@ -719,38 +720,37 @@ val asprintf : ('a, formatter, unit, string) format4 -> 'a @since 4.01.0 *) +val asprintf : ('a, formatter, unit, string) format4 -> 'a -val ifprintf : formatter -> ('a, formatter, unit) format -> 'a (** Same as [fprintf] above, but does not print anything. Useful to ignore some material when conditionally printing. @since 3.10.0 *) +val ifprintf : formatter -> ('a, formatter, unit) format -> 'a (** Formatted Pretty-Printing with continuations. *) -val kfprintf : - (formatter -> 'a) -> formatter -> - ('b, formatter, unit, 'a) format4 -> 'b (** Same as [fprintf] above, but instead of returning immediately, passes the formatter to its first argument at the end of printing. *) +val kfprintf : + (formatter -> 'a) -> formatter -> ('b, formatter, unit, 'a) format4 -> 'b -val ikfprintf : - (formatter -> 'a) -> formatter -> - ('b, formatter, unit, 'a) format4 -> 'b (** Same as [kfprintf] above, but does not print anything. Useful to ignore some material when conditionally printing. @since 3.12.0 *) +val ikfprintf : + (formatter -> 'a) -> formatter -> ('b, formatter, unit, 'a) format4 -> 'b -val ksprintf : (string -> 'a) -> ('b, unit, string, 'a) format4 -> 'b (** Same as [sprintf] above, but instead of returning the string, passes it to the first argument. *) +val ksprintf : (string -> 'a) -> ('b, unit, string, 'a) format4 -> 'b -val kasprintf : (string -> 'a) -> ('b, formatter, unit, 'a) format4 -> 'b (** Same as [asprintf] above, but instead of returning the string, passes it to the first argument. @since 4.03 *) +val kasprintf : (string -> 'a) -> ('b, formatter, unit, 'a) format4 -> 'b diff --git a/src/lib_protocol_environment/sigs/v1/int32.mli b/src/lib_protocol_environment/sigs/v1/int32.mli index 4b85af947f1820911e67e2804045b4ecb51ae0e6..08c2554f732e39d448cdbd164228e9dcabe238d2 100644 --- a/src/lib_protocol_environment/sigs/v1/int32.mli +++ b/src/lib_protocol_environment/sigs/v1/int32.mli @@ -33,107 +33,105 @@ [int32] are generally slower than those on [int]. Use [int32] only when the application requires exact 32-bit arithmetic. *) -val zero : int32 (** The 32-bit integer 0. *) +val zero : int32 -val one : int32 (** The 32-bit integer 1. *) +val one : int32 -val minus_one : int32 (** The 32-bit integer -1. *) +val minus_one : int32 -external neg : int32 -> int32 = "%int32_neg" (** Unary negation. *) +external neg : int32 -> int32 = "%int32_neg" -external add : int32 -> int32 -> int32 = "%int32_add" (** Addition. *) +external add : int32 -> int32 -> int32 = "%int32_add" -external sub : int32 -> int32 -> int32 = "%int32_sub" (** Subtraction. *) +external sub : int32 -> int32 -> int32 = "%int32_sub" -external mul : int32 -> int32 -> int32 = "%int32_mul" (** Multiplication. *) +external mul : int32 -> int32 -> int32 = "%int32_mul" -external div : int32 -> int32 -> int32 = "%int32_div" (** Integer division. Raise [Division_by_zero] if the second argument is zero. This division rounds the real quotient of its arguments towards zero, as specified for {!Pervasives.(/)}. *) +external div : int32 -> int32 -> int32 = "%int32_div" -external rem : int32 -> int32 -> int32 = "%int32_mod" (** Integer remainder. If [y] is not zero, the result of [Int32.rem x y] satisfies the following property: [x = Int32.add (Int32.mul (Int32.div x y) y) (Int32.rem x y)]. If [y = 0], [Int32.rem x y] raises [Division_by_zero]. *) +external rem : int32 -> int32 -> int32 = "%int32_mod" -val succ : int32 -> int32 (** Successor. [Int32.succ x] is [Int32.add x Int32.one]. *) +val succ : int32 -> int32 -val pred : int32 -> int32 (** Predecessor. [Int32.pred x] is [Int32.sub x Int32.one]. *) +val pred : int32 -> int32 -val abs : int32 -> int32 (** Return the absolute value of its argument. *) +val abs : int32 -> int32 -val max_int : int32 (** The greatest representable 32-bit integer, 2{^31} - 1. *) +val max_int : int32 -val min_int : int32 (** The smallest representable 32-bit integer, -2{^31}. *) +val min_int : int32 - -external logand : int32 -> int32 -> int32 = "%int32_and" (** Bitwise logical and. *) +external logand : int32 -> int32 -> int32 = "%int32_and" -external logor : int32 -> int32 -> int32 = "%int32_or" (** Bitwise logical or. *) +external logor : int32 -> int32 -> int32 = "%int32_or" -external logxor : int32 -> int32 -> int32 = "%int32_xor" (** Bitwise logical exclusive or. *) +external logxor : int32 -> int32 -> int32 = "%int32_xor" -val lognot : int32 -> int32 (** Bitwise logical negation. *) +val lognot : int32 -> int32 -external shift_left : int32 -> int -> int32 = "%int32_lsl" (** [Int32.shift_left x y] shifts [x] to the left by [y] bits. The result is unspecified if [y < 0] or [y >= 32]. *) +external shift_left : int32 -> int -> int32 = "%int32_lsl" -external shift_right : int32 -> int -> int32 = "%int32_asr" (** [Int32.shift_right x y] shifts [x] to the right by [y] bits. This is an arithmetic shift: the sign bit of [x] is replicated and inserted in the vacated bits. The result is unspecified if [y < 0] or [y >= 32]. *) +external shift_right : int32 -> int -> int32 = "%int32_asr" -external shift_right_logical : int32 -> int -> int32 = "%int32_lsr" (** [Int32.shift_right_logical x y] shifts [x] to the right by [y] bits. This is a logical shift: zeroes are inserted in the vacated bits regardless of the sign of [x]. The result is unspecified if [y < 0] or [y >= 32]. *) +external shift_right_logical : int32 -> int -> int32 = "%int32_lsr" -external of_int : int -> int32 = "%int32_of_int" (** Convert the given integer (type [int]) to a 32-bit integer (type [int32]). *) +external of_int : int -> int32 = "%int32_of_int" -external to_int : int32 -> int = "%int32_to_int" (** Convert the given 32-bit integer (type [int32]) to an integer (type [int]). On 32-bit platforms, the 32-bit integer is taken modulo 2{^31}, i.e. the high-order bit is lost during the conversion. On 64-bit platforms, the conversion is exact. *) +external to_int : int32 -> int = "%int32_to_int" -external of_float : float -> int32 - = "caml_int32_of_float" "caml_int32_of_float_unboxed" -[@@unboxed] [@@noalloc] (** Convert the given floating-point number to a 32-bit integer, discarding the fractional part (truncate towards 0). The result of the conversion is undefined if, after truncation, the number is outside the range \[{!Int32.min_int}, {!Int32.max_int}\]. *) +external of_float : float -> int32 + = "caml_int32_of_float" "caml_int32_of_float_unboxed" + [@@unboxed] [@@noalloc] +(** Convert the given 32-bit integer to a floating-point number. *) external to_float : int32 -> float = "caml_int32_to_float" "caml_int32_to_float_unboxed" -[@@unboxed] [@@noalloc] -(** Convert the given 32-bit integer to a floating-point number. *) + [@@unboxed] [@@noalloc] -external of_string : string -> int32 = "caml_int32_of_string" (** Convert the given string to a 32-bit integer. The string is read in decimal (by default, or if the string begins with [0u]) or in hexadecimal, octal or binary if the @@ -149,40 +147,40 @@ external of_string : string -> int32 = "caml_int32_of_string" Raise [Failure "Int32.of_string"] if the given string is not a valid representation of an integer, or if the integer represented exceeds the range of integers representable in type [int32]. *) +external of_string : string -> int32 = "caml_int32_of_string" -val of_string_opt: string -> int32 option (** Same as [of_string], but return [None] instead of raising. @since 4.05 *) +val of_string_opt : string -> int32 option - -val to_string : int32 -> string (** Return the string representation of its argument, in signed decimal. *) +val to_string : int32 -> string -external bits_of_float : float -> int32 - = "caml_int32_bits_of_float" "caml_int32_bits_of_float_unboxed" -[@@unboxed] [@@noalloc] (** Return the internal representation of the given float according to the IEEE 754 floating-point 'single format' bit layout. Bit 31 of the result represents the sign of the float; bits 30 to 23 represent the (biased) exponent; bits 22 to 0 represent the mantissa. *) +external bits_of_float : float -> int32 + = "caml_int32_bits_of_float" "caml_int32_bits_of_float_unboxed" + [@@unboxed] [@@noalloc] -external float_of_bits : int32 -> float - = "caml_int32_float_of_bits" "caml_int32_float_of_bits_unboxed" -[@@unboxed] [@@noalloc] (** Return the floating-point number whose internal representation, according to the IEEE 754 floating-point 'single format' bit layout, is the given [int32]. *) +external float_of_bits : int32 -> float + = "caml_int32_float_of_bits" "caml_int32_float_of_bits_unboxed" + [@@unboxed] [@@noalloc] -type t = int32 (** An alias for the type of 32-bit integers. *) +type t = int32 -val compare: t -> t -> int (** The comparison function for 32-bit integers, with the same specification as {!Pervasives.compare}. Along with the type [t], this function [compare] allows the module [Int32] to be passed as argument to the functors {!Set.Make} and {!Map.Make}. *) +val compare : t -> t -> int -val equal: t -> t -> bool (** The equal function for int32s. @since 4.03.0 *) +val equal : t -> t -> bool diff --git a/src/lib_protocol_environment/sigs/v1/int64.mli b/src/lib_protocol_environment/sigs/v1/int64.mli index 0ea0ec32cd7d1c787d3b68a610be08372808d1be..3c1a11567dc2b9ff3ed3b591b82a2344f1dbb558 100644 --- a/src/lib_protocol_environment/sigs/v1/int64.mli +++ b/src/lib_protocol_environment/sigs/v1/int64.mli @@ -33,128 +33,126 @@ only when the application requires exact 64-bit arithmetic. *) -val zero : int64 (** The 64-bit integer 0. *) +val zero : int64 -val one : int64 (** The 64-bit integer 1. *) +val one : int64 -val minus_one : int64 (** The 64-bit integer -1. *) +val minus_one : int64 -external neg : int64 -> int64 = "%int64_neg" (** Unary negation. *) +external neg : int64 -> int64 = "%int64_neg" -external add : int64 -> int64 -> int64 = "%int64_add" (** Addition. *) +external add : int64 -> int64 -> int64 = "%int64_add" -external sub : int64 -> int64 -> int64 = "%int64_sub" (** Subtraction. *) +external sub : int64 -> int64 -> int64 = "%int64_sub" -external mul : int64 -> int64 -> int64 = "%int64_mul" (** Multiplication. *) +external mul : int64 -> int64 -> int64 = "%int64_mul" -external div : int64 -> int64 -> int64 = "%int64_div" (** Integer division. Raise [Division_by_zero] if the second argument is zero. This division rounds the real quotient of its arguments towards zero, as specified for {!Pervasives.(/)}. *) +external div : int64 -> int64 -> int64 = "%int64_div" -external rem : int64 -> int64 -> int64 = "%int64_mod" (** Integer remainder. If [y] is not zero, the result of [Int64.rem x y] satisfies the following property: [x = Int64.add (Int64.mul (Int64.div x y) y) (Int64.rem x y)]. If [y = 0], [Int64.rem x y] raises [Division_by_zero]. *) +external rem : int64 -> int64 -> int64 = "%int64_mod" -val succ : int64 -> int64 (** Successor. [Int64.succ x] is [Int64.add x Int64.one]. *) +val succ : int64 -> int64 -val pred : int64 -> int64 (** Predecessor. [Int64.pred x] is [Int64.sub x Int64.one]. *) +val pred : int64 -> int64 -val abs : int64 -> int64 (** Return the absolute value of its argument. *) +val abs : int64 -> int64 -val max_int : int64 (** The greatest representable 64-bit integer, 2{^63} - 1. *) +val max_int : int64 -val min_int : int64 (** The smallest representable 64-bit integer, -2{^63}. *) +val min_int : int64 -external logand : int64 -> int64 -> int64 = "%int64_and" (** Bitwise logical and. *) +external logand : int64 -> int64 -> int64 = "%int64_and" -external logor : int64 -> int64 -> int64 = "%int64_or" (** Bitwise logical or. *) +external logor : int64 -> int64 -> int64 = "%int64_or" -external logxor : int64 -> int64 -> int64 = "%int64_xor" (** Bitwise logical exclusive or. *) +external logxor : int64 -> int64 -> int64 = "%int64_xor" -val lognot : int64 -> int64 (** Bitwise logical negation. *) +val lognot : int64 -> int64 -external shift_left : int64 -> int -> int64 = "%int64_lsl" (** [Int64.shift_left x y] shifts [x] to the left by [y] bits. The result is unspecified if [y < 0] or [y >= 64]. *) +external shift_left : int64 -> int -> int64 = "%int64_lsl" -external shift_right : int64 -> int -> int64 = "%int64_asr" (** [Int64.shift_right x y] shifts [x] to the right by [y] bits. This is an arithmetic shift: the sign bit of [x] is replicated and inserted in the vacated bits. The result is unspecified if [y < 0] or [y >= 64]. *) +external shift_right : int64 -> int -> int64 = "%int64_asr" -external shift_right_logical : int64 -> int -> int64 = "%int64_lsr" (** [Int64.shift_right_logical x y] shifts [x] to the right by [y] bits. This is a logical shift: zeroes are inserted in the vacated bits regardless of the sign of [x]. The result is unspecified if [y < 0] or [y >= 64]. *) +external shift_right_logical : int64 -> int -> int64 = "%int64_lsr" -external of_int : int -> int64 = "%int64_of_int" (** Convert the given integer (type [int]) to a 64-bit integer (type [int64]). *) +external of_int : int -> int64 = "%int64_of_int" -external to_int : int64 -> int = "%int64_to_int" (** Convert the given 64-bit integer (type [int64]) to an integer (type [int]). On 64-bit platforms, the 64-bit integer is taken modulo 2{^63}, i.e. the high-order bit is lost during the conversion. On 32-bit platforms, the 64-bit integer is taken modulo 2{^31}, i.e. the top 33 bits are lost during the conversion. *) +external to_int : int64 -> int = "%int64_to_int" -external of_float : float -> int64 - = "caml_int64_of_float" "caml_int64_of_float_unboxed" -[@@unboxed] [@@noalloc] (** Convert the given floating-point number to a 64-bit integer, discarding the fractional part (truncate towards 0). The result of the conversion is undefined if, after truncation, the number is outside the range \[{!Int64.min_int}, {!Int64.max_int}\]. *) +external of_float : float -> int64 + = "caml_int64_of_float" "caml_int64_of_float_unboxed" + [@@unboxed] [@@noalloc] +(** Convert the given 64-bit integer to a floating-point number. *) external to_float : int64 -> float = "caml_int64_to_float" "caml_int64_to_float_unboxed" -[@@unboxed] [@@noalloc] -(** Convert the given 64-bit integer to a floating-point number. *) - + [@@unboxed] [@@noalloc] -external of_int32 : int32 -> int64 = "%int64_of_int32" (** Convert the given 32-bit integer (type [int32]) to a 64-bit integer (type [int64]). *) +external of_int32 : int32 -> int64 = "%int64_of_int32" -external to_int32 : int64 -> int32 = "%int64_to_int32" (** Convert the given 64-bit integer (type [int64]) to a 32-bit integer (type [int32]). The 64-bit integer is taken modulo 2{^32}, i.e. the top 32 bits are lost during the conversion. *) +external to_int32 : int64 -> int32 = "%int64_to_int32" -external of_nativeint : nativeint -> int64 = "%int64_of_nativeint" (** Convert the given native integer (type [nativeint]) to a 64-bit integer (type [int64]). *) +external of_nativeint : nativeint -> int64 = "%int64_of_nativeint" -external to_nativeint : int64 -> nativeint = "%int64_to_nativeint" (** Convert the given 64-bit integer (type [int64]) to a native integer. On 32-bit platforms, the 64-bit integer is taken modulo 2{^32}. On 64-bit platforms, the conversion is exact. *) +external to_nativeint : int64 -> nativeint = "%int64_to_nativeint" -external of_string : string -> int64 = "caml_int64_of_string" (** Convert the given string to a 64-bit integer. The string is read in decimal (by default, or if the string begins with [0u]) or in hexadecimal, octal or binary if the @@ -170,39 +168,40 @@ external of_string : string -> int64 = "caml_int64_of_string" Raise [Failure "Int64.of_string"] if the given string is not a valid representation of an integer, or if the integer represented exceeds the range of integers representable in type [int64]. *) +external of_string : string -> int64 = "caml_int64_of_string" -val of_string_opt: string -> int64 option (** Same as [of_string], but return [None] instead of raising. @since 4.05 *) +val of_string_opt : string -> int64 option -val to_string : int64 -> string (** Return the string representation of its argument, in decimal. *) +val to_string : int64 -> string -external bits_of_float : float -> int64 - = "caml_int64_bits_of_float" "caml_int64_bits_of_float_unboxed" -[@@unboxed] [@@noalloc] (** Return the internal representation of the given float according to the IEEE 754 floating-point 'double format' bit layout. Bit 63 of the result represents the sign of the float; bits 62 to 52 represent the (biased) exponent; bits 51 to 0 represent the mantissa. *) +external bits_of_float : float -> int64 + = "caml_int64_bits_of_float" "caml_int64_bits_of_float_unboxed" + [@@unboxed] [@@noalloc] -external float_of_bits : int64 -> float - = "caml_int64_float_of_bits" "caml_int64_float_of_bits_unboxed" -[@@unboxed] [@@noalloc] (** Return the floating-point number whose internal representation, according to the IEEE 754 floating-point 'double format' bit layout, is the given [int64]. *) +external float_of_bits : int64 -> float + = "caml_int64_float_of_bits" "caml_int64_float_of_bits_unboxed" + [@@unboxed] [@@noalloc] -type t = int64 (** An alias for the type of 64-bit integers. *) +type t = int64 -val compare: t -> t -> int (** The comparison function for 64-bit integers, with the same specification as {!Pervasives.compare}. Along with the type [t], this function [compare] allows the module [Int64] to be passed as argument to the functors {!Set.Make} and {!Map.Make}. *) +val compare : t -> t -> int -val equal: t -> t -> bool (** The equal function for int64s. @since 4.03.0 *) +val equal : t -> t -> bool diff --git a/src/lib_protocol_environment/sigs/v1/json.mli b/src/lib_protocol_environment/sigs/v1/json.mli index c84efdacad3332166768dd64368aa2441d4030ea..5af3ac389f4f6997b18bbc8cf4e821861eb51471 100644 --- a/src/lib_protocol_environment/sigs/v1/json.mli +++ b/src/lib_protocol_environment/sigs/v1/json.mli @@ -41,4 +41,5 @@ val to_string : json -> string (** Helpers for [Data_encoding] *) val cannot_destruct : ('a, Format.formatter, unit, 'b) format4 -> 'a + val wrap_error : ('a -> 'b) -> 'a -> 'b diff --git a/src/lib_protocol_environment/sigs/v1/list.mli b/src/lib_protocol_environment/sigs/v1/list.mli index 1e37377d8922919da3ac36bf6190b3c665a7b1fc..cff950af020f115ad6ff8b5e3a909106894f1ea8 100644 --- a/src/lib_protocol_environment/sigs/v1/list.mli +++ b/src/lib_protocol_environment/sigs/v1/list.mli @@ -26,208 +26,197 @@ longer than about 10000 elements. *) -val length : 'a list -> int (** Return the length (number of elements) of the given list. *) +val length : 'a list -> int -val compare_lengths : 'a list -> 'b list -> int (** Compare the lengths of two lists. [compare_lengths l1 l2] is equivalent to [compare (length l1) (length l2)], except that the computation stops after itering on the shortest list. @since 4.05.0 *) +val compare_lengths : 'a list -> 'b list -> int -val compare_length_with : 'a list -> int -> int (** Compare the length of a list to an integer. [compare_length_with l n] is equivalent to [compare (length l) n], except that the computation stops after at most [n] iterations on the list. @since 4.05.0 *) +val compare_length_with : 'a list -> int -> int -val cons : 'a -> 'a list -> 'a list (** [cons x xs] is [x :: xs] @since 4.03.0 *) +val cons : 'a -> 'a list -> 'a list -val hd : 'a list -> 'a (** Return the first element of the given list. Raise [Failure "hd"] if the list is empty. *) +val hd : 'a list -> 'a -val tl : 'a list -> 'a list (** Return the given list without its first element. Raise [Failure "tl"] if the list is empty. *) +val tl : 'a list -> 'a list -val nth_opt : 'a list -> int -> 'a option (** Return the [n]-th element of the given list. The first element (head of the list) is at position 0. Return [None] if the list is too short. Raise [Invalid_argument "List.nth"] if [n] is negative. @since 4.05 *) +val nth_opt : 'a list -> int -> 'a option -val rev : 'a list -> 'a list (** List reversal. *) +val rev : 'a list -> 'a list -val init : int -> (int -> 'a) -> 'a list (** [List.init len f] is [f 0; f 1; ...; f (len-1)], evaluated left to right. @raise Invalid_argument if len < 0. @since 4.06.0 *) +val init : int -> (int -> 'a) -> 'a list -val append : 'a list -> 'a list -> 'a list (** Concatenate two lists. Same as the infix operator [@]. Not tail-recursive (length of the first argument). *) +val append : 'a list -> 'a list -> 'a list -val rev_append : 'a list -> 'a list -> 'a list (** [List.rev_append l1 l2] reverses [l1] and concatenates it to [l2]. This is equivalent to {!List.rev}[ l1 @ l2], but [rev_append] is tail-recursive and more efficient. *) +val rev_append : 'a list -> 'a list -> 'a list -val concat : 'a list list -> 'a list (** Concatenate a list of lists. The elements of the argument are all concatenated together (in the same order) to give the result. Not tail-recursive (length of the argument + length of the longest sub-list). *) +val concat : 'a list list -> 'a list -val flatten : 'a list list -> 'a list (** An alias for [concat]. *) - +val flatten : 'a list list -> 'a list (** {1 Iterators} *) - -val iter : ('a -> unit) -> 'a list -> unit (** [List.iter f [a1; ...; an]] applies function [f] in turn to [a1; ...; an]. It is equivalent to [begin f a1; f a2; ...; f an; () end]. *) +val iter : ('a -> unit) -> 'a list -> unit -val iteri : (int -> 'a -> unit) -> 'a list -> unit (** Same as {!List.iter}, but the function is applied to the index of the element as first argument (counting from 0), and the element itself as second argument. @since 4.00.0 *) +val iteri : (int -> 'a -> unit) -> 'a list -> unit -val map : ('a -> 'b) -> 'a list -> 'b list (** [List.map f [a1; ...; an]] applies function [f] to [a1, ..., an], and builds the list [[f a1; ...; f an]] with the results returned by [f]. Not tail-recursive. *) +val map : ('a -> 'b) -> 'a list -> 'b list -val mapi : (int -> 'a -> 'b) -> 'a list -> 'b list (** Same as {!List.map}, but the function is applied to the index of the element as first argument (counting from 0), and the element itself as second argument. Not tail-recursive. @since 4.00.0 *) +val mapi : (int -> 'a -> 'b) -> 'a list -> 'b list -val rev_map : ('a -> 'b) -> 'a list -> 'b list (** [List.rev_map f l] gives the same result as {!List.rev}[ (]{!List.map}[ f l)], but is tail-recursive and more efficient. *) +val rev_map : ('a -> 'b) -> 'a list -> 'b list -val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a (** [List.fold_left f a [b1; ...; bn]] is [f (... (f (f a b1) b2) ...) bn]. *) +val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a -val fold_right : ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b (** [List.fold_right f [a1; ...; an] b] is [f a1 (f a2 (... (f an b) ...))]. Not tail-recursive. *) - +val fold_right : ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b (** {1 Iterators on two lists} *) - -val iter2 : ('a -> 'b -> unit) -> 'a list -> 'b list -> unit (** [List.iter2 f [a1; ...; an] [b1; ...; bn]] calls in turn [f a1 b1; ...; f an bn]. Raise [Invalid_argument] if the two lists are determined to have different lengths. *) +val iter2 : ('a -> 'b -> unit) -> 'a list -> 'b list -> unit -val map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list (** [List.map2 f [a1; ...; an] [b1; ...; bn]] is [[f a1 b1; ...; f an bn]]. Raise [Invalid_argument] if the two lists are determined to have different lengths. Not tail-recursive. *) +val map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list -val rev_map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list (** [List.rev_map2 f l1 l2] gives the same result as {!List.rev}[ (]{!List.map2}[ f l1 l2)], but is tail-recursive and more efficient. *) +val rev_map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list -val fold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b list -> 'c list -> 'a (** [List.fold_left2 f a [b1; ...; bn] [c1; ...; cn]] is [f (... (f (f a b1 c1) b2 c2) ...) bn cn]. Raise [Invalid_argument] if the two lists are determined to have different lengths. *) +val fold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b list -> 'c list -> 'a -val fold_right2 : ('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> 'c -> 'c (** [List.fold_right2 f [a1; ...; an] [b1; ...; bn] c] is [f a1 b1 (f a2 b2 (... (f an bn c) ...))]. Raise [Invalid_argument] if the two lists are determined to have different lengths. Not tail-recursive. *) - +val fold_right2 : ('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> 'c -> 'c (** {1 List scanning} *) - -val for_all : ('a -> bool) -> 'a list -> bool (** [for_all p [a1; ...; an]] checks if all elements of the list satisfy the predicate [p]. That is, it returns [(p a1) && (p a2) && ... && (p an)]. *) +val for_all : ('a -> bool) -> 'a list -> bool -val exists : ('a -> bool) -> 'a list -> bool (** [exists p [a1; ...; an]] checks if at least one element of the list satisfies the predicate [p]. That is, it returns [(p a1) || (p a2) || ... || (p an)]. *) +val exists : ('a -> bool) -> 'a list -> bool -val for_all2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool (** Same as {!List.for_all}, but for a two-argument predicate. Raise [Invalid_argument] if the two lists are determined to have different lengths. *) +val for_all2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool -val exists2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool (** Same as {!List.exists}, but for a two-argument predicate. Raise [Invalid_argument] if the two lists are determined to have different lengths. *) +val exists2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool -val mem : 'a -> 'a list -> bool (** [mem a l] is true if and only if [a] is equal to an element of [l]. *) +val mem : 'a -> 'a list -> bool -val memq : 'a -> 'a list -> bool (** Same as {!List.mem}, but uses physical equality instead of structural equality to compare list elements. *) - +val memq : 'a -> 'a list -> bool (** {1 List searching} *) - -val find_opt: ('a -> bool) -> 'a list -> 'a option (** [find_opt p l] returns the first element of the list [l] that satisfies the predicate [p], or [None] if there is no value that satisfies [p] in the list [l]. @since 4.05 *) +val find_opt : ('a -> bool) -> 'a list -> 'a option -val filter : ('a -> bool) -> 'a list -> 'a list (** [filter p l] returns all the elements of the list [l] that satisfy the predicate [p]. The order of the elements in the input list is preserved. *) +val filter : ('a -> bool) -> 'a list -> 'a list -val find_all : ('a -> bool) -> 'a list -> 'a list (** [find_all] is another name for {!List.filter}. *) +val find_all : ('a -> bool) -> 'a list -> 'a list -val partition : ('a -> bool) -> 'a list -> 'a list * 'a list (** [partition p l] returns a pair of lists [(l1, l2)], where [l1] is the list of all the elements of [l] that satisfy the predicate [p], and [l2] is the list of all the elements of [l] that do not satisfy [p]. The order of the elements in the input list is preserved. *) - +val partition : ('a -> bool) -> 'a list -> 'a list * 'a list (** {1 Association lists} *) - -val assoc_opt: 'a -> ('a * 'b) list -> 'b option (** [assoc_opt a l] returns the value associated with key [a] in the list of pairs [l]. That is, [assoc_opt a [ ...; (a,b); ...] = b] @@ -235,51 +224,47 @@ val assoc_opt: 'a -> ('a * 'b) list -> 'b option Returns [None] if there is no value associated with [a] in the list [l]. @since 4.05 *) +val assoc_opt : 'a -> ('a * 'b) list -> 'b option -val assq_opt : 'a -> ('a * 'b) list -> 'b option (** Same as {!List.assoc_opt}, but uses physical equality instead of structural equality to compare keys. @since 4.05 *) +val assq_opt : 'a -> ('a * 'b) list -> 'b option -val mem_assoc : 'a -> ('a * 'b) list -> bool (** Same as {!List.assoc}, but simply return true if a binding exists, and false if no bindings exist for the given key. *) +val mem_assoc : 'a -> ('a * 'b) list -> bool -val mem_assq : 'a -> ('a * 'b) list -> bool (** Same as {!List.mem_assoc}, but uses physical equality instead of structural equality to compare keys. *) +val mem_assq : 'a -> ('a * 'b) list -> bool -val remove_assoc : 'a -> ('a * 'b) list -> ('a * 'b) list (** [remove_assoc a l] returns the list of pairs [l] without the first pair with key [a], if any. Not tail-recursive. *) +val remove_assoc : 'a -> ('a * 'b) list -> ('a * 'b) list -val remove_assq : 'a -> ('a * 'b) list -> ('a * 'b) list (** Same as {!List.remove_assoc}, but uses physical equality instead of structural equality to compare keys. Not tail-recursive. *) - +val remove_assq : 'a -> ('a * 'b) list -> ('a * 'b) list (** {1 Lists of pairs} *) - -val split : ('a * 'b) list -> 'a list * 'b list (** Transform a list of pairs into a pair of lists: [split [(a1,b1); ...; (an,bn)]] is [([a1; ...; an], [b1; ...; bn])]. Not tail-recursive. *) +val split : ('a * 'b) list -> 'a list * 'b list -val combine : 'a list -> 'b list -> ('a * 'b) list (** Transform a pair of lists into a list of pairs: [combine [a1; ...; an] [b1; ...; bn]] is [[(a1,b1); ...; (an,bn)]]. Raise [Invalid_argument] if the two lists have different lengths. Not tail-recursive. *) - +val combine : 'a list -> 'b list -> ('a * 'b) list (** {1 Sorting} *) - -val sort : ('a -> 'a -> int) -> 'a list -> 'a list (** Sort a list in increasing order according to a comparison function. The comparison function must return 0 if its arguments compare as equal, a positive integer if the first is greater, @@ -294,8 +279,8 @@ val sort : ('a -> 'a -> int) -> 'a list -> 'a list The current implementation uses Merge Sort. It runs in constant heap space and logarithmic stack space. *) +val sort : ('a -> 'a -> int) -> 'a list -> 'a list -val stable_sort : ('a -> 'a -> int) -> 'a list -> 'a list (** Same as {!List.sort}, but the sorting algorithm is guaranteed to be stable (i.e. elements that compare equal are kept in their original order) . @@ -303,16 +288,16 @@ val stable_sort : ('a -> 'a -> int) -> 'a list -> 'a list The current implementation uses Merge Sort. It runs in constant heap space and logarithmic stack space. *) +val stable_sort : ('a -> 'a -> int) -> 'a list -> 'a list -val fast_sort : ('a -> 'a -> int) -> 'a list -> 'a list (** Same as {!List.sort} or {!List.stable_sort}, whichever is faster on typical input. *) +val fast_sort : ('a -> 'a -> int) -> 'a list -> 'a list -val sort_uniq : ('a -> 'a -> int) -> 'a list -> 'a list (** Same as {!List.sort}, but also remove duplicates. @since 4.02.0 *) +val sort_uniq : ('a -> 'a -> int) -> 'a list -> 'a list -val merge : ('a -> 'a -> int) -> 'a list -> 'a list -> 'a list (** Merge two lists: Assuming that [l1] and [l2] are sorted according to the comparison function [cmp], [merge cmp l1 l2] will return a @@ -321,3 +306,4 @@ val merge : ('a -> 'a -> int) -> 'a list -> 'a list -> 'a list before the elements of [l2]. Not tail-recursive (sum of the lengths of the arguments). *) +val merge : ('a -> 'a -> int) -> 'a list -> 'a list -> 'a list diff --git a/src/lib_protocol_environment/sigs/v1/logging.mli b/src/lib_protocol_environment/sigs/v1/logging.mli index 2432fe4c1a71d43414f257a45a513f6ed2ebc017..445fbc1ffc3a456b18aa8022a7e162888062d4f3 100644 --- a/src/lib_protocol_environment/sigs/v1/logging.mli +++ b/src/lib_protocol_environment/sigs/v1/logging.mli @@ -23,15 +23,24 @@ (* *) (*****************************************************************************) -val debug: ('a, Format.formatter, unit, unit) format4 -> 'a -val log_info: ('a, Format.formatter, unit, unit) format4 -> 'a -val log_notice: ('a, Format.formatter, unit, unit) format4 -> 'a -val warn: ('a, Format.formatter, unit, unit) format4 -> 'a -val log_error: ('a, Format.formatter, unit, unit) format4 -> 'a -val fatal_error: ('a, Format.formatter, unit, unit) format4 -> 'a - -val lwt_debug: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a -val lwt_log_info: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a -val lwt_log_notice: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a -val lwt_warn: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a -val lwt_log_error: ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a +val debug : ('a, Format.formatter, unit, unit) format4 -> 'a + +val log_info : ('a, Format.formatter, unit, unit) format4 -> 'a + +val log_notice : ('a, Format.formatter, unit, unit) format4 -> 'a + +val warn : ('a, Format.formatter, unit, unit) format4 -> 'a + +val log_error : ('a, Format.formatter, unit, unit) format4 -> 'a + +val fatal_error : ('a, Format.formatter, unit, unit) format4 -> 'a + +val lwt_debug : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + +val lwt_log_info : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + +val lwt_log_notice : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + +val lwt_warn : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a + +val lwt_log_error : ('a, Format.formatter, unit, unit Lwt.t) format4 -> 'a diff --git a/src/lib_protocol_environment/sigs/v1/lwt.mli b/src/lib_protocol_environment/sigs/v1/lwt.mli index 97fafb14cae6f6205f99b4e0b25ce13d8a2e9f44..8cbdc9a933b6a5cb69cd6159261b144d5231b2d1 100644 --- a/src/lib_protocol_environment/sigs/v1/lwt.mli +++ b/src/lib_protocol_environment/sigs/v1/lwt.mli @@ -20,7 +20,7 @@ * License along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA * 02111-1307, USA. -*) + *) (* TEZOS CHANGES @@ -36,7 +36,6 @@ * lwt exceptions *) - (** Module [Lwt]: cooperative light-weight threads. *) (** This module defines {e cooperative light-weight threads} with @@ -61,17 +60,16 @@ (** {2 Definitions and basics} *) -type +'a t (** The type of threads returning a result of type ['a]. *) +type +'a t -val return : 'a -> 'a t (** [return e] is a thread whose return value is the value of the expression [e]. *) +val return : 'a -> 'a t (* val fail : exn -> 'a t *) (* (\** [fail e] is a thread that fails with the exception [e]. *\) *) -val bind : 'a t -> ('a -> 'b t) -> 'b t (** [bind t f] is a thread which first waits for the thread [t] to terminate and then, if the thread succeeds, behaves as the application of function [f] to the return value of [t]. If the @@ -86,39 +84,40 @@ val bind : 'a t -> ('a -> 'b t) -> 'b t purpose: [t'] will not execute before [t] is terminated. The result of a thread can be bound several time. *) +val bind : 'a t -> ('a -> 'b t) -> 'b t -val (>>=) : 'a t -> ('a -> 'b t) -> 'b t (** [t >>= f] is an alternative notation for [bind t f]. *) +val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t -val (=<<) : ('a -> 'b t) -> 'a t -> 'b t (** [f =<< t] is [t >>= f] *) +val ( =<< ) : ('a -> 'b t) -> 'a t -> 'b t -val map : ('a -> 'b) -> 'a t -> 'b t (** [map f m] map the result of a thread. This is the same as [bind m (fun x -> return (f x))] *) +val map : ('a -> 'b) -> 'a t -> 'b t -val (>|=) : 'a t -> ('a -> 'b) -> 'b t (** [m >|= f] is [map f m] *) +val ( >|= ) : 'a t -> ('a -> 'b) -> 'b t -val (=|<) : ('a -> 'b) -> 'a t -> 'b t (** [f =|< m] is [map f m] *) +val ( =|< ) : ('a -> 'b) -> 'a t -> 'b t (** {3 Pre-allocated threads} *) -val return_unit : unit t (** [return_unit = return ()] *) +val return_unit : unit t -val return_none : 'a option t (** [return_none = return None] *) +val return_none : 'a option t -val return_nil : 'a list t (** [return_nil = return \[\]] *) +val return_nil : 'a list t -val return_true : bool t (** [return_true = return true] *) +val return_true : bool t -val return_false : bool t (** [return_false = return false] *) +val return_false : bool t (* (\** {2 Thread storage} *\) *) @@ -222,19 +221,19 @@ val return_false : bool t (* (\** [nchoose_split l] does the same as {!nchoose} but also retrurns *) (* the list of threads that have not yet terminated. *\) *) -val join : unit t list -> unit t (** [join l] waits for all threads in [l] to terminate. If one of the threads fails, then [join l] will fails with the same exception as the first one to terminate. Note: {!join} leaves the local values of the current thread unchanged. *) +val join : unit t list -> unit t (* val ( <?> ) : 'a t -> 'a t -> 'a t *) (* (\** [t <?> t'] is the same as [choose [t; t']] *\) *) -val ( <&> ) : unit t -> unit t -> unit t (** [t <&> t'] is the same as [join [t; t']] *) +val ( <&> ) : unit t -> unit t -> unit t (* val async : (unit -> 'a t) -> unit *) (* (\** [async f] starts a thread without waiting for the result. If it *) diff --git a/src/lib_protocol_environment/sigs/v1/lwt_list.mli b/src/lib_protocol_environment/sigs/v1/lwt_list.mli index 40282d776e6281fb401cd6d8426ab421939b1be8..b6d43389f0b94a25387ca33a4b27836e45aa6d13 100644 --- a/src/lib_protocol_environment/sigs/v1/lwt_list.mli +++ b/src/lib_protocol_environment/sigs/v1/lwt_list.mli @@ -18,7 +18,7 @@ * License along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA * 02111-1307, USA. -*) + *) (** List helpers *) @@ -34,12 +34,15 @@ (** {2 List iterators} *) val map_s : ('a -> 'b Lwt.t) -> 'a list -> 'b list Lwt.t + val map_p : ('a -> 'b Lwt.t) -> 'a list -> 'b list Lwt.t val mapi_s : (int -> 'a -> 'b Lwt.t) -> 'a list -> 'b list Lwt.t + val mapi_p : (int -> 'a -> 'b Lwt.t) -> 'a list -> 'b list Lwt.t val rev_map_s : ('a -> 'b Lwt.t) -> 'a list -> 'b list Lwt.t + val rev_map_p : ('a -> 'b Lwt.t) -> 'a list -> 'b list Lwt.t val fold_left_s : ('a -> 'b -> 'a Lwt.t) -> 'a -> 'b list -> 'a Lwt.t @@ -49,9 +52,11 @@ val fold_right_s : ('a -> 'b -> 'b Lwt.t) -> 'a list -> 'b -> 'b Lwt.t (** {2 List scanning} *) val for_all_s : ('a -> bool Lwt.t) -> 'a list -> bool Lwt.t + val for_all_p : ('a -> bool Lwt.t) -> 'a list -> bool Lwt.t val exists_s : ('a -> bool Lwt.t) -> 'a list -> bool Lwt.t + val exists_p : ('a -> bool Lwt.t) -> 'a list -> bool Lwt.t (** {2 List searching} *) @@ -59,10 +64,13 @@ val exists_p : ('a -> bool Lwt.t) -> 'a list -> bool Lwt.t val find_s : ('a -> bool Lwt.t) -> 'a list -> 'a Lwt.t val filter_s : ('a -> bool Lwt.t) -> 'a list -> 'a list Lwt.t + val filter_p : ('a -> bool Lwt.t) -> 'a list -> 'a list Lwt.t val filter_map_s : ('a -> 'b option Lwt.t) -> 'a list -> 'b list Lwt.t + val filter_map_p : ('a -> 'b option Lwt.t) -> 'a list -> 'b list Lwt.t val partition_s : ('a -> bool Lwt.t) -> 'a list -> ('a list * 'a list) Lwt.t + val partition_p : ('a -> bool Lwt.t) -> 'a list -> ('a list * 'a list) Lwt.t diff --git a/src/lib_protocol_environment/sigs/v1/mBytes.mli b/src/lib_protocol_environment/sigs/v1/mBytes.mli index fcfac0f76dc4bc12805bd338c99fd9aa733a9961..b56657e1e77382f3878da8c260dee18022d2988c 100644 --- a/src/lib_protocol_environment/sigs/v1/mBytes.mli +++ b/src/lib_protocol_environment/sigs/v1/mBytes.mli @@ -25,124 +25,126 @@ type t -val create: int -> t +val create : int -> t -val length: t -> int +val length : t -> int -val copy: t -> t +val copy : t -> t -val sub: t -> int -> int -> t (** [sub src ofs len] extract a sub-array of [src] starting at [ofs] and of length [len]. No copying of elements is involved: the sub-array and the original array share the same storage space. *) +val sub : t -> int -> int -> t -val blit: t -> int -> t -> int -> int -> unit (** [blit src ofs_src dst ofs_dst len] copy [len] bytes from [src] starting at [ofs_src] into [dst] starting at [ofs_dst]. *) +val blit : t -> int -> t -> int -> int -> unit -val blit_of_string: string -> int -> t -> int -> int -> unit (** See [blit] *) +val blit_of_string : string -> int -> t -> int -> int -> unit -val blit_to_bytes: t -> int -> bytes -> int -> int -> unit (** See [blit] *) +val blit_to_bytes : t -> int -> bytes -> int -> int -> unit -val of_string: string -> t (** [of_string s] create an byte array filled with the same content than [s]. *) +val of_string : string -> t -val to_string: t -> string (** [to_string b] dump the array content in a [string]. *) +val to_string : t -> string -val sub_string: t -> int -> int -> string (** [sub_string b ofs len] is equivalent to [to_string (sub b ofs len)]. *) - - +val sub_string : t -> int -> int -> string (** Functions reading and writing bytes *) -val get_char: t -> int -> char (** [get_char buff i] reads 1 byte at offset i as a char *) +val get_char : t -> int -> char -val get_uint8: t -> int -> int (** [get_uint8 buff i] reads 1 byte at offset i as an unsigned int of 8 bits. i.e. It returns a value between 0 and 2^8-1 *) +val get_uint8 : t -> int -> int -val get_int8: t -> int -> int (** [get_int8 buff i] reads 1 byte at offset i as a signed int of 8 bits. i.e. It returns a value between -2^7 and 2^7-1 *) +val get_int8 : t -> int -> int -val set_char: t -> int -> char -> unit (** [set_char buff i v] writes [v] to [buff] at offset [i] *) +val set_char : t -> int -> char -> unit -val set_int8: t -> int -> int -> unit (** [set_int8 buff i v] writes the least significant 8 bits of [v] to [buff] at offset [i] *) +val set_int8 : t -> int -> int -> unit (** Functions reading according to Big Endian byte order *) -val get_uint16: t -> int -> int (** [get_uint16 buff i] reads 2 bytes at offset i as an unsigned int of 16 bits. i.e. It returns a value between 0 and 2^16-1 *) +val get_uint16 : t -> int -> int -val get_int16: t -> int -> int (** [get_int16 buff i] reads 2 byte at offset i as a signed int of 16 bits. i.e. It returns a value between -2^15 and 2^15-1 *) +val get_int16 : t -> int -> int -val get_int32: t -> int -> int32 (** [get_int32 buff i] reads 4 bytes at offset i as an int32. *) +val get_int32 : t -> int -> int32 -val get_int64: t -> int -> int64 (** [get_int64 buff i] reads 8 bytes at offset i as an int64. *) +val get_int64 : t -> int -> int64 -val set_int16: t -> int -> int -> unit (** [set_int16 buff i v] writes the least significant 16 bits of [v] to [buff] at offset [i] *) +val set_int16 : t -> int -> int -> unit -val set_int32: t -> int -> int32 -> unit (** [set_int32 buff i v] writes [v] to [buff] at offset [i] *) +val set_int32 : t -> int -> int32 -> unit -val set_int64: t -> int -> int64 -> unit (** [set_int64 buff i v] writes [v] to [buff] at offset [i] *) +val set_int64 : t -> int -> int64 -> unit - -module LE: sig - +module LE : sig (** Functions reading according to Little Endian byte order *) - val get_uint16: t -> int -> int (** [get_uint16 buff i] reads 2 bytes at offset i as an unsigned int of 16 bits. i.e. It returns a value between 0 and 2^16-1 *) + val get_uint16 : t -> int -> int - val get_int16: t -> int -> int (** [get_int16 buff i] reads 2 byte at offset i as a signed int of 16 bits. i.e. It returns a value between -2^15 and 2^15-1 *) + val get_int16 : t -> int -> int - val get_int32: t -> int -> int32 (** [get_int32 buff i] reads 4 bytes at offset i as an int32. *) + val get_int32 : t -> int -> int32 - val get_int64: t -> int -> int64 (** [get_int64 buff i] reads 8 bytes at offset i as an int64. *) + val get_int64 : t -> int -> int64 - val set_int16: t -> int -> int -> unit (** [set_int16 buff i v] writes the least significant 16 bits of [v] to [buff] at offset [i] *) + val set_int16 : t -> int -> int -> unit - val set_int32: t -> int -> int32 -> unit (** [set_int32 buff i v] writes [v] to [buff] at offset [i] *) + val set_int32 : t -> int -> int32 -> unit - val set_int64: t -> int -> int64 -> unit (** [set_int64 buff i v] writes [v] to [buff] at offset [i] *) - + val set_int64 : t -> int -> int64 -> unit end -val (=) : t -> t -> bool -val (<>) : t -> t -> bool -val (<) : t -> t -> bool -val (<=) : t -> t -> bool -val (>=) : t -> t -> bool -val (>) : t -> t -> bool +val ( = ) : t -> t -> bool + +val ( <> ) : t -> t -> bool + +val ( < ) : t -> t -> bool + +val ( <= ) : t -> t -> bool + +val ( >= ) : t -> t -> bool + +val ( > ) : t -> t -> bool + val compare : t -> t -> int -val concat: string -> t list -> t +val concat : string -> t list -> t + +val to_hex : t -> [`Hex of string] -val to_hex: t -> [ `Hex of string ] -val of_hex: [ `Hex of string ] -> t +val of_hex : [`Hex of string] -> t diff --git a/src/lib_protocol_environment/sigs/v1/map.mli b/src/lib_protocol_environment/sigs/v1/map.mli index 754fdba48d776bf77a8946645a39330643ad1a83..93c17e9b9a36fa5d9965e1c6e827045360e43468 100644 --- a/src/lib_protocol_environment/sigs/v1/map.mli +++ b/src/lib_protocol_environment/sigs/v1/map.mli @@ -43,12 +43,11 @@ values so its type is [string PairsMap.t]. *) -module type OrderedType = -sig - type t +(** Input signature of the functor {!Map.Make}. *) +module type OrderedType = sig (** The type of the map keys. *) + type t - val compare : t -> t -> int (** A total ordering function over the keys. This is a two-argument function [f] such that [f e1 e2] is zero if the keys [e1] and [e2] are equal, @@ -56,9 +55,9 @@ sig and [f e1 e2] is strictly positive if [e1] is greater than [e2]. Example: a suitable ordering function is the generic structural comparison function {!Pervasives.compare}. *) + val compare : t -> t -> int end -(** Input signature of the functor {!Map.Make}. *) -module Make (Ord : OrderedType) : S.MAP with type key = Ord.t (** Functor building an implementation of the map structure given a totally ordered type. *) +module Make (Ord : OrderedType) : S.MAP with type key = Ord.t diff --git a/src/lib_protocol_environment/sigs/v1/micheline.mli b/src/lib_protocol_environment/sigs/v1/micheline.mli index ba2609374e4921bf05add509f4daca8024e804bd..a81748ef456353fe75045b41b2ec43f2aebf1bb0 100644 --- a/src/lib_protocol_environment/sigs/v1/micheline.mli +++ b/src/lib_protocol_environment/sigs/v1/micheline.mli @@ -33,19 +33,35 @@ type ('l, 'p) node = | Seq of 'l * ('l, 'p) node list type 'p canonical + type canonical_location = int val root : 'p canonical -> (canonical_location, 'p) node + val canonical_location_encoding : canonical_location Data_encoding.encoding -val canonical_encoding : variant:string -> 'l Data_encoding.encoding -> 'l canonical Data_encoding.encoding -val canonical_encoding_v1 : variant:string -> 'l Data_encoding.encoding -> 'l canonical Data_encoding.encoding + +val canonical_encoding : + variant:string -> + 'l Data_encoding.encoding -> + 'l canonical Data_encoding.encoding + +val canonical_encoding_v1 : + variant:string -> + 'l Data_encoding.encoding -> + 'l canonical Data_encoding.encoding + (* val erased_encoding : variant:string -> 'l -> 'p Data_encoding.encoding -> ('l, 'p) node Data_encoding.encoding val table_encoding : variant:string -> 'l Data_encoding.encoding -> 'p Data_encoding.encoding -> ('l, 'p) node Data_encoding.encoding *) val location : ('l, 'p) node -> 'l + val annotations : ('l, 'p) node -> string list val strip_locations : (_, 'p) node -> 'p canonical -val extract_locations : ('l, 'p) node -> 'p canonical * (canonical_location * 'l) list -val inject_locations : (canonical_location -> 'l) -> 'p canonical -> ('l, 'p) node + +val extract_locations : + ('l, 'p) node -> 'p canonical * (canonical_location * 'l) list + +val inject_locations : + (canonical_location -> 'l) -> 'p canonical -> ('l, 'p) node diff --git a/src/lib_protocol_environment/sigs/v1/operation.mli b/src/lib_protocol_environment/sigs/v1/operation.mli index 6a594ce57c32437dbba43ca48668c4d9558008ca..a93048f1976fbbd39962b9be0aa21089d35b12c9 100644 --- a/src/lib_protocol_environment/sigs/v1/operation.mli +++ b/src/lib_protocol_environment/sigs/v1/operation.mli @@ -26,16 +26,13 @@ (** Tezos operations. *) type shell_header = { - branch: Block_hash.t ; - (** The operation is only valid in a branch containing the + branch : Block_hash.t + (** The operation is only valid in a branch containing the block [branch]. *) } -val shell_header_encoding: shell_header Data_encoding.t -type t = { - shell: shell_header ; - proto: MBytes.t ; -} +val shell_header_encoding : shell_header Data_encoding.t + +type t = {shell : shell_header; proto : MBytes.t} -include S.HASHABLE with type t := t - and type hash := Operation_hash.t +include S.HASHABLE with type t := t and type hash := Operation_hash.t diff --git a/src/lib_protocol_environment/sigs/v1/operation_list_hash.mli b/src/lib_protocol_environment/sigs/v1/operation_list_hash.mli index a020291babb641beda9e3189bfd8e5c0e1601539..20d19d4cdb9931d29a57d8bb4d8e76a8a0ba43f5 100644 --- a/src/lib_protocol_environment/sigs/v1/operation_list_hash.mli +++ b/src/lib_protocol_environment/sigs/v1/operation_list_hash.mli @@ -24,5 +24,5 @@ (*****************************************************************************) (** Blocks hashes / IDs. *) -include S.MERKLE_TREE with type elt = Operation_hash.t - +include + S.MERKLE_TREE with type elt = Operation_hash.t diff --git a/src/lib_protocol_environment/sigs/v1/operation_list_list_hash.mli b/src/lib_protocol_environment/sigs/v1/operation_list_list_hash.mli index 949a197839ea63e7c37d81080ceccf1df6835731..b6ee37aab54a75b872553ab21b74acd32d936165 100644 --- a/src/lib_protocol_environment/sigs/v1/operation_list_list_hash.mli +++ b/src/lib_protocol_environment/sigs/v1/operation_list_list_hash.mli @@ -24,4 +24,5 @@ (*****************************************************************************) (** Blocks hashes / IDs. *) -include S.MERKLE_TREE with type elt = Operation_list_hash.t +include + S.MERKLE_TREE with type elt = Operation_list_hash.t diff --git a/src/lib_protocol_environment/sigs/v1/option.mli b/src/lib_protocol_environment/sigs/v1/option.mli index e7cbdc2241ca9d064c6d7c4f7b72df17838e6381..89baeed585d4309130aa6d2857cb6453ee7e0f20 100644 --- a/src/lib_protocol_environment/sigs/v1/option.mli +++ b/src/lib_protocol_environment/sigs/v1/option.mli @@ -23,17 +23,17 @@ (* *) (*****************************************************************************) -val map: f:('a -> 'b) -> 'a option -> 'b option +val map : f:('a -> 'b) -> 'a option -> 'b option -val apply: f:('a -> 'b option) -> 'a option -> 'b option +val apply : f:('a -> 'b option) -> 'a option -> 'b option -val iter: f:('a -> unit) -> 'a option -> unit +val iter : f:('a -> unit) -> 'a option -> unit -val unopt: default:'a -> 'a option -> 'a +val unopt : default:'a -> 'a option -> 'a -val unopt_map: f:('a -> 'b) -> default:'b -> 'a option -> 'b +val unopt_map : f:('a -> 'b) -> default:'b -> 'a option -> 'b -val first_some: 'a option -> 'a option -> 'a option +val first_some : 'a option -> 'a option -> 'a option val try_with : (unit -> 'a) -> 'a option diff --git a/src/lib_protocol_environment/sigs/v1/pervasives.mli b/src/lib_protocol_environment/sigs/v1/pervasives.mli index ccf079c5107f983b69d34e19c6053ff3530c1738..d5c078cbd821cac792ba9709dbfdf3e86143a3ae 100644 --- a/src/lib_protocol_environment/sigs/v1/pervasives.mli +++ b/src/lib_protocol_environment/sigs/v1/pervasives.mli @@ -24,7 +24,6 @@ *) - (** The initially opened module. This module provides the basic operations over the built-in types @@ -36,75 +35,71 @@ name, without prefixing them by [Pervasives]. *) - (** {1 Exceptions} *) -external raise : exn -> 'a = "%raise" (** Raise the given exception value *) +external raise : exn -> 'a = "%raise" -external raise_notrace : exn -> 'a = "%raise_notrace" (** A faster version [raise] which does not record the backtrace. @since 4.02.0 *) +external raise_notrace : exn -> 'a = "%raise_notrace" -val invalid_arg : string -> 'a (** Raise exception [Invalid_argument] with the given string. *) +val invalid_arg : string -> 'a -val failwith : string -> 'a (** Raise exception [Failure] with the given string. *) +val failwith : string -> 'a -exception Exit (** The [Exit] exception is not raised by any library function. It is provided for use in your programs. *) - +exception Exit (** {1 Boolean operations} *) -external not : bool -> bool = "%boolnot" (** The boolean negation. *) +external not : bool -> bool = "%boolnot" -external ( && ) : bool -> bool -> bool = "%sequand" (** The boolean 'and'. Evaluation is sequential, left-to-right: in [e1 && e2], [e1] is evaluated first, and if it returns [false], [e2] is not evaluated at all. Right-associative operator at precedence level 3/11. *) +external ( && ) : bool -> bool -> bool = "%sequand" - -external ( || ) : bool -> bool -> bool = "%sequor" (** The boolean 'or'. Evaluation is sequential, left-to-right: in [e1 || e2], [e1] is evaluated first, and if it returns [true], [e2] is not evaluated at all. Right-associative operator at precedence level 2/11. *) +external ( || ) : bool -> bool -> bool = "%sequor" (** {1 Debugging} *) -external __LOC__ : string = "%loc_LOC" (** [__LOC__] returns the location at which this expression appears in the file currently being parsed by the compiler, with the standard error format of OCaml: "File %S, line %d, characters %d-%d". @since 4.02.0 *) +external __LOC__ : string = "%loc_LOC" -external __FILE__ : string = "%loc_FILE" (** [__FILE__] returns the name of the file currently being parsed by the compiler. @since 4.02.0 *) +external __FILE__ : string = "%loc_FILE" -external __LINE__ : int = "%loc_LINE" (** [__LINE__] returns the line number at which this expression appears in the file currently being parsed by the compiler. @since 4.02.0 *) +external __LINE__ : int = "%loc_LINE" -external __MODULE__ : string = "%loc_MODULE" (** [__MODULE__] returns the module name of the file being parsed by the compiler. @since 4.02.0 *) +external __MODULE__ : string = "%loc_MODULE" -external __POS__ : string * int * int * int = "%loc_POS" (** [__POS__] returns a tuple [(file,lnum,cnum,enum)], corresponding to the location at which this expression appears in the file currently being parsed by the compiler. [file] is the current @@ -112,23 +107,23 @@ external __POS__ : string * int * int * int = "%loc_POS" the line and [enum] the last character position in the line. @since 4.02.0 *) +external __POS__ : string * int * int * int = "%loc_POS" -external __LOC_OF__ : 'a -> string * 'a = "%loc_LOC" (** [__LOC_OF__ expr] returns a pair [(loc, expr)] where [loc] is the location of [expr] in the file currently being parsed by the compiler, with the standard error format of OCaml: "File %S, line %d, characters %d-%d". @since 4.02.0 *) +external __LOC_OF__ : 'a -> string * 'a = "%loc_LOC" -external __LINE_OF__ : 'a -> int * 'a = "%loc_LINE" (** [__LINE__ expr] returns a pair [(line, expr)], where [line] is the line number at which the expression [expr] appears in the file currently being parsed by the compiler. @since 4.02.0 *) +external __LINE_OF__ : 'a -> int * 'a = "%loc_LINE" -external __POS_OF__ : 'a -> (string * int * int * int) * 'a = "%loc_POS" (** [__POS_OF__ expr] returns a pair [(loc,expr)], where [loc] is a tuple [(file,lnum,cnum,enum)] corresponding to the location at which the expression [expr] appears in the file currently being @@ -137,22 +132,23 @@ external __POS_OF__ : 'a -> (string * int * int * int) * 'a = "%loc_POS" the last character position in the line. @since 4.02.0 *) +external __POS_OF__ : 'a -> (string * int * int * int) * 'a = "%loc_POS" (** {1 Composition operators} *) -external ( |> ) : 'a -> ('a -> 'b) -> 'b = "%revapply" (** Reverse-application operator: [x |> f |> g] is exactly equivalent to [g (f (x))]. Left-associative operator at precedence level 4/11. @since 4.01 *) +external ( |> ) : 'a -> ('a -> 'b) -> 'b = "%revapply" -external ( @@ ) : ('a -> 'b) -> 'a -> 'b = "%apply" (** Application operator: [g @@ f @@ x] is exactly equivalent to [g (f (x))]. Right-associative operator at precedence level 5/11. @since 4.01 *) +external ( @@ ) : ('a -> 'b) -> 'a -> 'b = "%apply" (** {1 Integer arithmetic} *) @@ -160,37 +156,36 @@ external ( @@ ) : ('a -> 'b) -> 'a -> 'b = "%apply" All operations are taken modulo 2{^31} (or 2{^63}). They do not fail on overflow. *) -external ( ~- ) : int -> int = "%negint" (** Unary negation. You can also write [- e] instead of [~- e]. Unary operator at precedence level 9/11 for [- e] and 11/11 for [~- e]. *) +external ( ~- ) : int -> int = "%negint" -external ( ~+ ) : int -> int = "%identity" (** Unary addition. You can also write [+ e] instead of [~+ e]. Unary operator at precedence level 9/11 for [+ e] and 11/11 for [~+ e]. @since 3.12.0 *) +external ( ~+ ) : int -> int = "%identity" -external succ : int -> int = "%succint" (** [succ x] is [x + 1]. *) +external succ : int -> int = "%succint" -external pred : int -> int = "%predint" (** [pred x] is [x - 1]. *) +external pred : int -> int = "%predint" -external ( + ) : int -> int -> int = "%addint" (** Integer addition. Left-associative operator at precedence level 6/11. *) +external ( + ) : int -> int -> int = "%addint" -external ( - ) : int -> int -> int = "%subint" (** Integer subtraction. Left-associative operator at precedence level 6/11. *) +external ( - ) : int -> int -> int = "%subint" -external ( * ) : int -> int -> int = "%mulint" (** Integer multiplication. Left-associative operator at precedence level 7/11. *) +external ( * ) : int -> int -> int = "%mulint" -external ( / ) : int -> int -> int = "%divint" (** Integer division. Raise [Division_by_zero] if the second argument is 0. Integer division rounds the real quotient of its arguments towards zero. @@ -198,8 +193,8 @@ external ( / ) : int -> int -> int = "%divint" less than or equal to the real quotient of [x] by [y]. Moreover, [(- x) / y = x / (- y) = - (x / y)]. Left-associative operator at precedence level 7/11. *) +external ( / ) : int -> int -> int = "%divint" -external ( mod ) : int -> int -> int = "%modint" (** Integer remainder. If [y] is not zero, the result of [x mod y] satisfies the following properties: [x = (x / y) * y + x mod y] and @@ -208,109 +203,104 @@ external ( mod ) : int -> int -> int = "%modint" Note that [x mod y] is negative only if [x < 0]. Raise [Division_by_zero] if [y] is zero. Left-associative operator at precedence level 7/11. *) +external ( mod ) : int -> int -> int = "%modint" -val abs : int -> int (** Return the absolute value of the argument. Note that this may be negative if the argument is [min_int]. *) +val abs : int -> int -val max_int : int (** The greatest representable integer. *) +val max_int : int -val min_int : int (** The smallest representable integer. *) - +val min_int : int (** {2 Bitwise operations} *) -external ( land ) : int -> int -> int = "%andint" (** Bitwise logical and. Left-associative operator at precedence level 7/11. *) +external ( land ) : int -> int -> int = "%andint" -external ( lor ) : int -> int -> int = "%orint" (** Bitwise logical or. Left-associative operator at precedence level 7/11. *) +external ( lor ) : int -> int -> int = "%orint" -external ( lxor ) : int -> int -> int = "%xorint" (** Bitwise logical exclusive or. Left-associative operator at precedence level 7/11. *) +external ( lxor ) : int -> int -> int = "%xorint" -val lnot : int -> int (** Bitwise logical negation. *) +val lnot : int -> int -external ( lsl ) : int -> int -> int = "%lslint" (** [n lsl m] shifts [n] to the left by [m] bits. The result is unspecified if [m < 0] or [m >= bitsize], where [bitsize] is [32] on a 32-bit platform and [64] on a 64-bit platform. Right-associative operator at precedence level 8/11. *) +external ( lsl ) : int -> int -> int = "%lslint" -external ( lsr ) : int -> int -> int = "%lsrint" (** [n lsr m] shifts [n] to the right by [m] bits. This is a logical shift: zeroes are inserted regardless of the sign of [n]. The result is unspecified if [m < 0] or [m >= bitsize]. Right-associative operator at precedence level 8/11. *) +external ( lsr ) : int -> int -> int = "%lsrint" -external ( asr ) : int -> int -> int = "%asrint" (** [n asr m] shifts [n] to the right by [m] bits. This is an arithmetic shift: the sign bit of [n] is replicated. The result is unspecified if [m < 0] or [m >= bitsize]. Right-associative operator at precedence level 8/11. *) - +external ( asr ) : int -> int -> int = "%asrint" (** {1 String operations} More string operations are provided in module {!String}. *) -val ( ^ ) : string -> string -> string (** String concatenation. Right-associative operator at precedence level 5/11. *) - +val ( ^ ) : string -> string -> string (** {1 Character operations} More character operations are provided in module {!Char}. *) -external int_of_char : char -> int = "%identity" (** Return the ASCII code of the argument. *) +external int_of_char : char -> int = "%identity" -val char_of_int : int -> char (** Return the character with the given ASCII code. Raise [Invalid_argument "char_of_int"] if the argument is outside the range 0--255. *) - +val char_of_int : int -> char (** {1 Unit operations} *) -external ignore : 'a -> unit = "%ignore" (** Discard the value of its argument and return [()]. For instance, [ignore(f x)] discards the result of the side-effecting function [f]. It is equivalent to [f x; ()], except that the latter may generate a compiler warning; writing [ignore(f x)] instead avoids the warning. *) - +external ignore : 'a -> unit = "%ignore" (** {1 String conversion functions} *) -val string_of_bool : bool -> string (** Return the string representation of a boolean. As the returned values may be shared, the user should not modify them directly. *) +val string_of_bool : bool -> string -val bool_of_string_opt: string -> bool option (** Convert the given string to a boolean. Return [None] if the string is not ["true"] or ["false"]. @since 4.05 *) +val bool_of_string_opt : string -> bool option -val string_of_int : int -> string (** Return the string representation of an integer, in decimal. *) +val string_of_int : int -> string -val int_of_string_opt: string -> int option (** Convert the given string to an integer. The string is read in decimal (by default, or if the string begins with [0u]), in hexadecimal (if it begins with [0x] or @@ -330,57 +320,56 @@ val int_of_string_opt: string -> int option integers representable in type [int]. @since 4.05 *) +val int_of_string_opt : string -> int option (** {1 Pair operations} *) -external fst : 'a * 'b -> 'a = "%field0" (** Return the first component of a pair. *) +external fst : 'a * 'b -> 'a = "%field0" -external snd : 'a * 'b -> 'b = "%field1" (** Return the second component of a pair. *) - +external snd : 'a * 'b -> 'b = "%field1" (** {1 List operations} More list operations are provided in module {!List}. *) -val ( @ ) : 'a list -> 'a list -> 'a list (** List concatenation. Not tail-recursive (length of the first argument). Right-associative operator at precedence level 5/11. *) - +val ( @ ) : 'a list -> 'a list -> 'a list (** {1 References} *) -type 'a ref = { mutable contents : 'a } (** The type of references (mutable indirection cells) containing a value of type ['a]. *) +type 'a ref = {mutable contents : 'a} -external ref : 'a -> 'a ref = "%makemutable" (** Return a fresh reference containing the given value. *) +external ref : 'a -> 'a ref = "%makemutable" -external ( ! ) : 'a ref -> 'a = "%field0" (** [!r] returns the current contents of reference [r]. Equivalent to [fun r -> r.contents]. Unary operator at precedence level 11/11.*) +external ( ! ) : 'a ref -> 'a = "%field0" -external ( := ) : 'a ref -> 'a -> unit = "%setfield0" (** [r := a] stores the value of [a] in reference [r]. Equivalent to [fun r v -> r.contents <- v]. Right-associative operator at precedence level 1/11. *) +external ( := ) : 'a ref -> 'a -> unit = "%setfield0" -external incr : int ref -> unit = "%incr" (** Increment the integer contained in the given reference. Equivalent to [fun r -> r := succ !r]. *) +external incr : int ref -> unit = "%incr" -external decr : int ref -> unit = "%decr" (** Decrement the integer contained in the given reference. Equivalent to [fun r -> r := pred !r]. *) +external decr : int ref -> unit = "%decr" (** {1 Result type} *) (** @since 4.03.0 *) -type ('a,'b) result = Ok of 'a | Error of 'b +type ('a, 'b) result = Ok of 'a | Error of 'b (** {1 Operations on format strings} *) @@ -459,26 +448,26 @@ type ('a, 'b, 'c, 'd) format4 = ('a, 'b, 'c, 'c, 'c, 'd) format6 type ('a, 'b, 'c) format = ('a, 'b, 'c, 'c) format4 -val string_of_format : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> string (** Converts a format string into a string. *) +val string_of_format : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> string -external format_of_string : - ('a, 'b, 'c, 'd, 'e, 'f) format6 -> - ('a, 'b, 'c, 'd, 'e, 'f) format6 = "%identity" (** [format_of_string s] returns a format string read from the string literal [s]. Note: [format_of_string] can not convert a string argument that is not a literal. If you need this functionality, use the more general {!Scanf.format_from_string} function. *) +external format_of_string : + ('a, 'b, 'c, 'd, 'e, 'f) format6 -> ('a, 'b, 'c, 'd, 'e, 'f) format6 + = "%identity" -val ( ^^ ) : - ('a, 'b, 'c, 'd, 'e, 'f) format6 -> - ('f, 'b, 'c, 'e, 'g, 'h) format6 -> - ('a, 'b, 'c, 'd, 'g, 'h) format6 (** [f1 ^^ f2] catenates format strings [f1] and [f2]. The result is a format string that behaves as the concatenation of format strings [f1] and [f2]: in case of formatted output, it accepts arguments from [f1], then arguments from [f2]; in case of formatted input, it returns results from [f1], then results from [f2]. Right-associative operator at precedence level 5/11. *) +val ( ^^ ) : + ('a, 'b, 'c, 'd, 'e, 'f) format6 -> + ('f, 'b, 'c, 'e, 'g, 'h) format6 -> + ('a, 'b, 'c, 'd, 'g, 'h) format6 diff --git a/src/lib_protocol_environment/sigs/v1/protocol.mli b/src/lib_protocol_environment/sigs/v1/protocol.mli index 9a8fb6541bffa2e5c58c45bbf78a4e4a4dec2dbf..8d3099b85e583cc87383fee8ef7fda8aee2e36f2 100644 --- a/src/lib_protocol_environment/sigs/v1/protocol.mli +++ b/src/lib_protocol_environment/sigs/v1/protocol.mli @@ -23,25 +23,22 @@ (* *) (*****************************************************************************) -type t = { - expected_env: env_version ; - components: component list ; -} +type t = {expected_env : env_version; components : component list} (** An OCaml source component of a protocol implementation. *) and component = { (* The OCaml module name. *) - name : string ; + name : string; (* The OCaml interface source code *) - interface : string option ; + interface : string option; (* The OCaml source code *) - implementation : string ; + implementation : string } and env_version = V1 -val component_encoding: component Data_encoding.t -val env_version_encoding: env_version Data_encoding.t +val component_encoding : component Data_encoding.t + +val env_version_encoding : env_version Data_encoding.t -include S.HASHABLE with type t := t - and type hash := Protocol_hash.t +include S.HASHABLE with type t := t and type hash := Protocol_hash.t diff --git a/src/lib_protocol_environment/sigs/v1/s.mli b/src/lib_protocol_environment/sigs/v1/s.mli index fc82d2c719c3401e34b896c8e8362f252e7a139e..bf925e3e8f4406449a96139c1f64e1df0aa42092 100644 --- a/src/lib_protocol_environment/sigs/v1/s.mli +++ b/src/lib_protocol_environment/sigs/v1/s.mli @@ -26,28 +26,29 @@ (** Generic interface for a datatype with comparison, pretty-printer and serialization functions. *) module type T = sig - type t + include Compare.S with type t := t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit - val encoding: t Data_encoding.t - val to_bytes: t -> MBytes.t - val of_bytes: MBytes.t -> t option + val encoding : t Data_encoding.t + val to_bytes : t -> MBytes.t + + val of_bytes : MBytes.t -> t option end (** Generic interface for a datatype with comparison, pretty-printer, serialization functions and a hashing function. *) module type HASHABLE = sig - include T type hash - val hash: t -> hash - val hash_raw: MBytes.t -> hash + val hash : t -> hash + + val hash_raw : MBytes.t -> hash end (** {2 Hash Types} *) @@ -59,211 +60,295 @@ end or in memory sets and maps. *) module type MINIMAL_HASH = sig - type t - val name: string - val title: string + val name : string + + val title : string + + val pp : Format.formatter -> t -> unit - val pp: Format.formatter -> t -> unit - val pp_short: Format.formatter -> t -> unit + val pp_short : Format.formatter -> t -> unit include Compare.S with type t := t - val hash_bytes: ?key:MBytes.t -> MBytes.t list -> t - val hash_string: ?key:string -> string list -> t + val hash_bytes : ?key:MBytes.t -> MBytes.t list -> t - val zero: t + val hash_string : ?key:string -> string list -> t + val zero : t end module type RAW_DATA = sig type t - val size: int (* in bytes *) - val to_bytes: t -> MBytes.t - val of_bytes_opt: MBytes.t -> t option - val of_bytes_exn: MBytes.t -> t + + val size : int (* in bytes *) + + val to_bytes : t -> MBytes.t + + val of_bytes_opt : MBytes.t -> t option + + val of_bytes_exn : MBytes.t -> t end module type B58_DATA = sig - type t - val to_b58check: t -> string - val to_short_b58check: t -> string + val to_b58check : t -> string + + val to_short_b58check : t -> string + + val of_b58check_exn : string -> t - val of_b58check_exn: string -> t - val of_b58check_opt: string -> t option + val of_b58check_opt : string -> t option type Base58.data += Data of t - val b58check_encoding: t Base58.encoding + val b58check_encoding : t Base58.encoding end module type ENCODER = sig type t - val encoding: t Data_encoding.t - val rpc_arg: t RPC_arg.t + + val encoding : t Data_encoding.t + + val rpc_arg : t RPC_arg.t end module type SET = sig type elt + type t - val empty: t - val is_empty: t -> bool - val mem: elt -> t -> bool - val add: elt -> t -> t - val singleton: elt -> t - val remove: elt -> t -> t - val union: t -> t -> t - val inter: t -> t -> t - val diff: t -> t -> t - val compare: t -> t -> int - val equal: t -> t -> bool - val subset: t -> t -> bool - val iter: (elt -> unit) -> t -> unit - val map: (elt -> elt) -> t -> t - val fold: (elt -> 'a -> 'a) -> t -> 'a -> 'a - val for_all: (elt -> bool) -> t -> bool - val exists: (elt -> bool) -> t -> bool - val filter: (elt -> bool) -> t -> t - val partition: (elt -> bool) -> t -> t * t - val cardinal: t -> int - val elements: t -> elt list - val min_elt_opt: t -> elt option - val max_elt_opt: t -> elt option - val choose_opt: t -> elt option - val split: elt -> t -> t * bool * t - val find_opt: elt -> t -> elt option - val find_first_opt: (elt -> bool) -> t -> elt option - val find_last_opt: (elt -> bool) -> t -> elt option - val of_list: elt list -> t + + val empty : t + + val is_empty : t -> bool + + val mem : elt -> t -> bool + + val add : elt -> t -> t + + val singleton : elt -> t + + val remove : elt -> t -> t + + val union : t -> t -> t + + val inter : t -> t -> t + + val diff : t -> t -> t + + val compare : t -> t -> int + + val equal : t -> t -> bool + + val subset : t -> t -> bool + + val iter : (elt -> unit) -> t -> unit + + val map : (elt -> elt) -> t -> t + + val fold : (elt -> 'a -> 'a) -> t -> 'a -> 'a + + val for_all : (elt -> bool) -> t -> bool + + val exists : (elt -> bool) -> t -> bool + + val filter : (elt -> bool) -> t -> t + + val partition : (elt -> bool) -> t -> t * t + + val cardinal : t -> int + + val elements : t -> elt list + + val min_elt_opt : t -> elt option + + val max_elt_opt : t -> elt option + + val choose_opt : t -> elt option + + val split : elt -> t -> t * bool * t + + val find_opt : elt -> t -> elt option + + val find_first_opt : (elt -> bool) -> t -> elt option + + val find_last_opt : (elt -> bool) -> t -> elt option + + val of_list : elt list -> t end module type MAP = sig type key - type (+'a) t - val empty: 'a t - val is_empty: 'a t -> bool - val mem: key -> 'a t -> bool - val add: key -> 'a -> 'a t -> 'a t - val update: key -> ('a option -> 'a option) -> 'a t -> 'a t - val singleton: key -> 'a -> 'a t - val remove: key -> 'a t -> 'a t - val merge: + + type +'a t + + val empty : 'a t + + val is_empty : 'a t -> bool + + val mem : key -> 'a t -> bool + + val add : key -> 'a -> 'a t -> 'a t + + val update : key -> ('a option -> 'a option) -> 'a t -> 'a t + + val singleton : key -> 'a -> 'a t + + val remove : key -> 'a t -> 'a t + + val merge : (key -> 'a option -> 'b option -> 'c option) -> 'a t -> 'b t -> 'c t - val union: (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t - val compare: ('a -> 'a -> int) -> 'a t -> 'a t -> int - val equal: ('a -> 'a -> bool) -> 'a t -> 'a t -> bool - val iter: (key -> 'a -> unit) -> 'a t -> unit - val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b - val for_all: (key -> 'a -> bool) -> 'a t -> bool - val exists: (key -> 'a -> bool) -> 'a t -> bool - val filter: (key -> 'a -> bool) -> 'a t -> 'a t - val partition: (key -> 'a -> bool) -> 'a t -> 'a t * 'a t - val cardinal: 'a t -> int - val bindings: 'a t -> (key * 'a) list - val min_binding_opt: 'a t -> (key * 'a) option - val max_binding_opt: 'a t -> (key * 'a) option - val choose_opt: 'a t -> (key * 'a) option - val split: key -> 'a t -> 'a t * 'a option * 'a t - val find_opt: key -> 'a t -> 'a option - val find_first_opt: (key -> bool) -> 'a t -> (key * 'a) option - val find_last_opt: (key -> bool) -> 'a t -> (key * 'a) option - val map: ('a -> 'b) -> 'a t -> 'b t - val mapi: (key -> 'a -> 'b) -> 'a t -> 'b t + + val union : (key -> 'a -> 'a -> 'a option) -> 'a t -> 'a t -> 'a t + + val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int + + val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool + + val iter : (key -> 'a -> unit) -> 'a t -> unit + + val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + + val for_all : (key -> 'a -> bool) -> 'a t -> bool + + val exists : (key -> 'a -> bool) -> 'a t -> bool + + val filter : (key -> 'a -> bool) -> 'a t -> 'a t + + val partition : (key -> 'a -> bool) -> 'a t -> 'a t * 'a t + + val cardinal : 'a t -> int + + val bindings : 'a t -> (key * 'a) list + + val min_binding_opt : 'a t -> (key * 'a) option + + val max_binding_opt : 'a t -> (key * 'a) option + + val choose_opt : 'a t -> (key * 'a) option + + val split : key -> 'a t -> 'a t * 'a option * 'a t + + val find_opt : key -> 'a t -> 'a option + + val find_first_opt : (key -> bool) -> 'a t -> (key * 'a) option + + val find_last_opt : (key -> bool) -> 'a t -> (key * 'a) option + + val map : ('a -> 'b) -> 'a t -> 'b t + + val mapi : (key -> 'a -> 'b) -> 'a t -> 'b t end module type INDEXES = sig - type t - val to_path: t -> string list -> string list - val of_path: string list -> t option - val of_path_exn: string list -> t + val to_path : t -> string list -> string list + + val of_path : string list -> t option + + val of_path_exn : string list -> t - val prefix_path: string -> string list - val path_length: int + val prefix_path : string -> string list + + val path_length : int module Set : sig include Stdlib.Set.S with type elt = t - val encoding: t Data_encoding.t + + val encoding : t Data_encoding.t end module Map : sig include Stdlib.Map.S with type key = t - val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t - end + val encoding : 'a Data_encoding.t -> 'a t Data_encoding.t + end end module type HASH = sig include MINIMAL_HASH + include RAW_DATA with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t + include INDEXES with type t := t end module type MERKLE_TREE = sig type elt + include HASH - val compute: elt list -> t - val empty: t - type path = - | Left of path * t - | Right of t * path - | Op - val compute_path: elt list -> int -> path - val check_path: path -> elt -> t * int - val path_encoding: path Data_encoding.t + + val compute : elt list -> t + + val empty : t + + type path = Left of path * t | Right of t * path | Op + + val compute_path : elt list -> int -> path + + val check_path : path -> elt -> t * int + + val path_encoding : path Data_encoding.t end module type SIGNATURE = sig - module Public_key_hash : sig - type t - val pp: Format.formatter -> t -> unit - val pp_short: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + + val pp_short : Format.formatter -> t -> unit + include Compare.S with type t := t + include RAW_DATA with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t - include INDEXES with type t := t - val zero: t + include INDEXES with type t := t + val zero : t end module Public_key : sig - type t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + include Compare.S with type t := t + include B58_DATA with type t := t - include ENCODER with type t := t - val hash: t -> Public_key_hash.t + include ENCODER with type t := t + val hash : t -> Public_key_hash.t end type t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + include RAW_DATA with type t := t + include Compare.S with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t - val zero: t + val zero : t type watermark (** Check a signature *) - val check: ?watermark:watermark -> Public_key.t -> t -> MBytes.t -> bool - + val check : ?watermark:watermark -> Public_key.t -> t -> MBytes.t -> bool end - diff --git a/src/lib_protocol_environment/sigs/v1/set.mli b/src/lib_protocol_environment/sigs/v1/set.mli index c4c20d4a72250c81012fee79da2d8c527e63dc68..7e18ef767dcaf841d83665be030775a6b354b8ea 100644 --- a/src/lib_protocol_environment/sigs/v1/set.mli +++ b/src/lib_protocol_environment/sigs/v1/set.mli @@ -44,12 +44,11 @@ of sets of [int * int]. *) -module type OrderedType = -sig - type t +(** Input signature of the functor {!Set.Make}. *) +module type OrderedType = sig (** The type of the set elements. *) + type t - val compare : t -> t -> int (** A total ordering function over the set elements. This is a two-argument function [f] such that [f e1 e2] is zero if the elements [e1] and [e2] are equal, @@ -57,9 +56,9 @@ sig and [f e1 e2] is strictly positive if [e1] is greater than [e2]. Example: a suitable ordering function is the generic structural comparison function {!Pervasives.compare}. *) + val compare : t -> t -> int end -(** Input signature of the functor {!Set.Make}. *) -module Make (Ord : OrderedType) : S.SET with type elt = Ord.t (** Functor building an implementation of the set structure given a totally ordered type. *) +module Make (Ord : OrderedType) : S.SET with type elt = Ord.t diff --git a/src/lib_protocol_environment/sigs/v1/signature.mli b/src/lib_protocol_environment/sigs/v1/signature.mli index 9ad32456696ded41c2559430ac2be06732c33dee..21e6ae5e230c77cba2caa9e981cd291054996c36 100644 --- a/src/lib_protocol_environment/sigs/v1/signature.mli +++ b/src/lib_protocol_environment/sigs/v1/signature.mli @@ -39,6 +39,8 @@ type watermark = | Generic_operation | Custom of MBytes.t -include S.SIGNATURE with type Public_key_hash.t = public_key_hash - and type Public_key.t = public_key - and type watermark := watermark +include + S.SIGNATURE + with type Public_key_hash.t = public_key_hash + and type Public_key.t = public_key + and type watermark := watermark diff --git a/src/lib_protocol_environment/sigs/v1/string.mli b/src/lib_protocol_environment/sigs/v1/string.mli index 2113f9ffa3b4ddc67882b15fae8e38e0eda16fe5..3baf5b6640af17c4cbb2db46fc0f9aa441168fb3 100644 --- a/src/lib_protocol_environment/sigs/v1/string.mli +++ b/src/lib_protocol_environment/sigs/v1/string.mli @@ -57,23 +57,21 @@ *) -external length : string -> int = "%string_length" (** Return the length (number of characters) of the given string. *) +external length : string -> int = "%string_length" -external get : string -> int -> char = "%string_safe_get" (** [String.get s n] returns the character at index [n] in string [s]. You can also write [s.[n]] instead of [String.get s n]. Raise [Invalid_argument] if [n] not a valid index in [s]. *) +external get : string -> int -> char = "%string_safe_get" - -val make : int -> char -> string (** [String.make n c] returns a fresh string of length [n], filled with the character [c]. Raise [Invalid_argument] if [n < 0] or [n > ]{!Sys.max_string_length}. *) +val make : int -> char -> string -val init : int -> (int -> char) -> string (** [String.init n f] returns a string of length [n], with character [i] initialized to the result of [f i] (called in increasing index order). @@ -82,57 +80,57 @@ val init : int -> (int -> char) -> string @since 4.02.0 *) +val init : int -> (int -> char) -> string -val sub : string -> int -> int -> string (** [String.sub s start len] returns a fresh string of length [len], containing the substring of [s] that starts at position [start] and has length [len]. Raise [Invalid_argument] if [start] and [len] do not designate a valid substring of [s]. *) +val sub : string -> int -> int -> string -val blit : string -> int -> bytes -> int -> int -> unit (** Same as {!Bytes.blit_string}. *) +val blit : string -> int -> bytes -> int -> int -> unit -val concat : string -> string list -> string (** [String.concat sep sl] concatenates the list of strings [sl], inserting the separator string [sep] between each. Raise [Invalid_argument] if the result is longer than {!Sys.max_string_length} bytes. *) +val concat : string -> string list -> string -val iter : (char -> unit) -> string -> unit (** [String.iter f s] applies function [f] in turn to all the characters of [s]. It is equivalent to [f s.[0]; f s.[1]; ...; f s.[String.length s - 1]; ()]. *) +val iter : (char -> unit) -> string -> unit -val iteri : (int -> char -> unit) -> string -> unit (** Same as {!String.iter}, but the function is applied to the index of the element as first argument (counting from 0), and the character itself as second argument. @since 4.00.0 *) +val iteri : (int -> char -> unit) -> string -> unit -val map : (char -> char) -> string -> string (** [String.map f s] applies function [f] in turn to all the characters of [s] (in increasing index order) and stores the results in a new string that is returned. @since 4.00.0 *) +val map : (char -> char) -> string -> string -val mapi : (int -> char -> char) -> string -> string (** [String.mapi f s] calls [f] with each character of [s] and its index (in increasing index order) and stores the results in a new string that is returned. @since 4.02.0 *) +val mapi : (int -> char -> char) -> string -> string -val trim : string -> string (** Return a copy of the argument, without leading and trailing whitespace. The characters regarded as whitespace are: [' '], ['\012'], ['\n'], ['\r'], and ['\t']. If there is neither leading nor trailing whitespace character in the argument, return the original string itself, not a copy. @since 4.00.0 *) +val trim : string -> string -val escaped : string -> string (** Return a copy of the argument, with special characters represented by escape sequences, following the lexical conventions of OCaml. @@ -148,20 +146,20 @@ val escaped : string -> string The function {!Scanf.unescaped} is a left inverse of [escaped], i.e. [Scanf.unescaped (escaped s) = s] for any string [s] (unless [escape s] fails). *) +val escaped : string -> string -val index_opt: string -> char -> int option (** [String.index_opt s c] returns the index of the first occurrence of character [c] in string [s], or [None] if [c] does not occur in [s]. @since 4.05 *) +val index_opt : string -> char -> int option -val rindex_opt: string -> char -> int option (** [String.rindex_opt s c] returns the index of the last occurrence of character [c] in string [s], or [None] if [c] does not occur in [s]. @since 4.05 *) +val rindex_opt : string -> char -> int option -val index_from_opt: string -> int -> char -> int option (** [String.index_from_opt s i c] returns the index of the first occurrence of character [c] in string [s] after position [i] or [None] if [c] does not occur in [s] after position [i]. @@ -171,8 +169,8 @@ val index_from_opt: string -> int -> char -> int option @since 4.05 *) +val index_from_opt : string -> int -> char -> int option -val rindex_from_opt: string -> int -> char -> int option (** [String.rindex_from_opt s i c] returns the index of the last occurrence of character [c] in string [s] before position [i+1] or [None] if [c] does not occur in [s] before position [i+1]. @@ -184,60 +182,60 @@ val rindex_from_opt: string -> int -> char -> int option @since 4.05 *) +val rindex_from_opt : string -> int -> char -> int option -val contains : string -> char -> bool (** [String.contains s c] tests if character [c] appears in the string [s]. *) +val contains : string -> char -> bool -val contains_from : string -> int -> char -> bool (** [String.contains_from s start c] tests if character [c] appears in [s] after position [start]. [String.contains s c] is equivalent to [String.contains_from s 0 c]. Raise [Invalid_argument] if [start] is not a valid position in [s]. *) +val contains_from : string -> int -> char -> bool -val rcontains_from : string -> int -> char -> bool (** [String.rcontains_from s stop c] tests if character [c] appears in [s] before position [stop+1]. Raise [Invalid_argument] if [stop < 0] or [stop+1] is not a valid position in [s]. *) +val rcontains_from : string -> int -> char -> bool -val uppercase_ascii : string -> string (** Return a copy of the argument, with all lowercase letters translated to uppercase, using the US-ASCII character set. @since 4.03.0 *) +val uppercase_ascii : string -> string -val lowercase_ascii : string -> string (** Return a copy of the argument, with all uppercase letters translated to lowercase, using the US-ASCII character set. @since 4.03.0 *) +val lowercase_ascii : string -> string -val capitalize_ascii : string -> string (** Return a copy of the argument, with the first character set to uppercase, using the US-ASCII character set. @since 4.03.0 *) +val capitalize_ascii : string -> string -val uncapitalize_ascii : string -> string (** Return a copy of the argument, with the first character set to lowercase, using the US-ASCII character set. @since 4.03.0 *) +val uncapitalize_ascii : string -> string -type t = string (** An alias for the type of strings. *) +type t = string -val compare: t -> t -> int (** The comparison function for strings, with the same specification as {!Pervasives.compare}. Along with the type [t], this function [compare] allows the module [String] to be passed as argument to the functors {!Set.Make} and {!Map.Make}. *) +val compare : t -> t -> int -val equal: t -> t -> bool (** The equal function for strings. @since 4.03.0 *) +val equal : t -> t -> bool -val split_on_char: char -> string -> string list (** [String.split_on_char sep s] returns the list of all (possibly empty) substrings of [s] that are delimited by the [sep] character. @@ -251,52 +249,51 @@ val split_on_char: char -> string -> string list @since 4.04.0 *) +val split_on_char : char -> string -> string list (** Functions reading bytes *) -val get_char: t -> int -> char (** [get_char buff i] reads 1 byte at offset i as a char *) +val get_char : t -> int -> char -val get_uint8: t -> int -> int (** [get_uint8 buff i] reads 1 byte at offset i as an unsigned int of 8 bits. i.e. It returns a value between 0 and 2^8-1 *) +val get_uint8 : t -> int -> int -val get_int8: t -> int -> int (** [get_int8 buff i] reads 1 byte at offset i as a signed int of 8 bits. i.e. It returns a value between -2^7 and 2^7-1 *) +val get_int8 : t -> int -> int (** Functions reading according to Big Endian byte order *) -val get_uint16: t -> int -> int (** [get_uint16 buff i] reads 2 bytes at offset i as an unsigned int of 16 bits. i.e. It returns a value between 0 and 2^16-1 *) +val get_uint16 : t -> int -> int -val get_int16: t -> int -> int (** [get_int16 buff i] reads 2 byte at offset i as a signed int of 16 bits. i.e. It returns a value between -2^15 and 2^15-1 *) +val get_int16 : t -> int -> int -val get_int32: t -> int -> int32 (** [get_int32 buff i] reads 4 bytes at offset i as an int32. *) +val get_int32 : t -> int -> int32 -val get_int64: t -> int -> int64 (** [get_int64 buff i] reads 8 bytes at offset i as an int64. *) +val get_int64 : t -> int -> int64 -module LE: sig - +module LE : sig (** Functions reading according to Little Endian byte order *) - val get_uint16: t -> int -> int (** [get_uint16 buff i] reads 2 bytes at offset i as an unsigned int of 16 bits. i.e. It returns a value between 0 and 2^16-1 *) + val get_uint16 : t -> int -> int - val get_int16: t -> int -> int (** [get_int16 buff i] reads 2 byte at offset i as a signed int of 16 bits. i.e. It returns a value between -2^15 and 2^15-1 *) + val get_int16 : t -> int -> int - val get_int32: t -> int -> int32 (** [get_int32 buff i] reads 4 bytes at offset i as an int32. *) + val get_int32 : t -> int -> int32 - val get_int64: t -> int -> int64 (** [get_int64 buff i] reads 8 bytes at offset i as an int64. *) - + val get_int64 : t -> int -> int64 end diff --git a/src/lib_protocol_environment/sigs/v1/tezos_data.mli b/src/lib_protocol_environment/sigs/v1/tezos_data.mli index 3ba261040570a7e75a17c8df317153ca65144d32..f14e14044b891a7e1c0a5d8b81ce62bfda92d2f3 100644 --- a/src/lib_protocol_environment/sigs/v1/tezos_data.mli +++ b/src/lib_protocol_environment/sigs/v1/tezos_data.mli @@ -23,6 +23,4 @@ (* *) (*****************************************************************************) -module Protocol : sig - -end +module Protocol : sig end diff --git a/src/lib_protocol_environment/sigs/v1/time.mli b/src/lib_protocol_environment/sigs/v1/time.mli index 54a8e134784fff3adef488d4bf24bba198c5fb79..d555494d2b7a807e69da8f43edf2fd1a2428dd1c 100644 --- a/src/lib_protocol_environment/sigs/v1/time.mli +++ b/src/lib_protocol_environment/sigs/v1/time.mli @@ -24,22 +24,25 @@ (*****************************************************************************) type t + include Compare.S with type t := t val add : t -> int64 -> t + val diff : t -> t -> int64 val of_seconds : int64 -> t + val to_seconds : t -> int64 val of_notation : string -> t option + val of_notation_exn : string -> t + val to_notation : t -> string val encoding : t Data_encoding.t + val rfc_encoding : t Data_encoding.t val pp_hum : Format.formatter -> t -> unit - - - diff --git a/src/lib_protocol_environment/sigs/v1/updater.mli b/src/lib_protocol_environment/sigs/v1/updater.mli index 252fc531e1ff1e16f7debf19f7d7d39dbada1367..208f451ffca5b24aa99e8116dc5559fe801bea3a 100644 --- a/src/lib_protocol_environment/sigs/v1/updater.mli +++ b/src/lib_protocol_environment/sigs/v1/updater.mli @@ -28,69 +28,62 @@ (** Validation result: the record returned by the protocol on the successfull validation of a block. *) type validation_result = { - - context: Context.t ; - (** The resulting context, it will be used for the next block. *) - - fitness: Fitness.t ; - (** The effective fitness of the block (to be compared with + context : Context.t; + (** The resulting context, it will be used for the next block. *) + fitness : Fitness.t; + (** The effective fitness of the block (to be compared with the 'announced' one in the block header. *) - - message: string option ; - (** An optional informative message to be used as in the 'git + message : string option; + (** An optional informative message to be used as in the 'git commit' of the block's context. *) - - max_operations_ttl: int ; - (** The "time-to-live" of operation for the next block: any + max_operations_ttl : int; + (** The "time-to-live" of operation for the next block: any operations whose 'branch' is older than 'ttl' blocks in the past cannot be included in the next block. *) - - last_allowed_fork_level: Int32.t ; - (** The level of the last block for which the node might consider an + last_allowed_fork_level : Int32.t + (** The level of the last block for which the node might consider an alternate branch. The shell should consider as invalid any branch whose fork point is older than the given level *) - } type quota = { - max_size: int ; - (** The maximum size (in bytes) of the serialized list of + max_size : int; + (** The maximum size (in bytes) of the serialized list of operations. *) - max_op: int option ; - (** The maximum number of operation. + max_op : int option + (** The maximum number of operation. [None] means no limit. *) } type rpc_context = { - block_hash: Block_hash.t ; - block_header: Block_header.shell_header ; - context: Context.t ; + block_hash : Block_hash.t; + block_header : Block_header.shell_header; + context : Context.t } (** This is the signature of a Tezos protocol implementation. It has access to the standard library and the Environment module. *) module type PROTOCOL = sig - (** The maximum size of a block header in bytes. *) - val max_block_length: int + val max_block_length : int (** The maximum size of an operation in bytes. *) - val max_operation_data_length: int + val max_operation_data_length : int (** The number of validation passes (length of the list) and the operation's quota for each pass. *) - val validation_passes: quota list + val validation_passes : quota list (** The version specific type of blocks. *) type block_header_data (** Encoding for version specific part of block headers. *) - val block_header_data_encoding: block_header_data Data_encoding.t + val block_header_data_encoding : block_header_data Data_encoding.t (** A fully parsed block header. *) type block_header = { - shell: Block_header.shell_header ; - protocol_data: block_header_data ; + shell : Block_header.shell_header; + protocol_data : block_header_data } (** Version-specific side information computed by the protocol @@ -101,7 +94,7 @@ module type PROTOCOL = sig type block_header_metadata (** Encoding for version-specific block metadata. *) - val block_header_metadata_encoding: block_header_metadata Data_encoding.t + val block_header_metadata_encoding : block_header_metadata Data_encoding.t (** The version specific type of operations. *) type operation_data @@ -113,29 +106,29 @@ module type PROTOCOL = sig (** A fully parsed operation. *) type operation = { - shell: Operation.shell_header ; - protocol_data: operation_data ; + shell : Operation.shell_header; + protocol_data : operation_data } (** Encoding for version-specific operation data. *) - val operation_data_encoding: operation_data Data_encoding.t + val operation_data_encoding : operation_data Data_encoding.t (** Encoding for version-specific operation receipts. *) - val operation_receipt_encoding: operation_receipt Data_encoding.t + val operation_receipt_encoding : operation_receipt Data_encoding.t (** Encoding that mixes an operation data and its receipt. *) - val operation_data_and_receipt_encoding: + val operation_data_and_receipt_encoding : (operation_data * operation_receipt) Data_encoding.t (** The Validation passes in which an operation can appear. For instance [[0]] if it only belongs to the first pass. An answer of [[]] means that the operation is ill-formed and cannot be included at all. *) - val acceptable_passes: operation -> int list + val acceptable_passes : operation -> int list (** Basic ordering of operations. [compare_operations op1 op2] means that [op1] should appear before [op2] in a block. *) - val compare_operations: operation -> operation -> int + val compare_operations : operation -> operation -> int (** A functional state that is transmitted through the steps of a block validation sequence. It must retain the current state of @@ -148,7 +141,7 @@ module type PROTOCOL = sig type validation_state (** Access the context at a given validation step. *) - val current_context: validation_state -> Context.t tzresult Lwt.t + val current_context : validation_state -> Context.t tzresult Lwt.t (** Checks that a block is well formed in a given context. This function should run quickly, as its main use is to reject bad @@ -159,11 +152,11 @@ module type PROTOCOL = sig The resulting `validation_state` will be used for multi-pass validation. *) - val begin_partial_application: - chain_id: Chain_id.t -> - ancestor_context: Context.t -> - predecessor_timestamp: Time.t -> - predecessor_fitness: Fitness.t -> + val begin_partial_application : + chain_id:Chain_id.t -> + ancestor_context:Context.t -> + predecessor_timestamp:Time.t -> + predecessor_fitness:Fitness.t -> block_header -> validation_state tzresult Lwt.t @@ -173,11 +166,11 @@ module type PROTOCOL = sig function {!precheck_block} may not have been called before [begin_application], so all the check performed by the former must be repeated in the latter. *) - val begin_application: - chain_id: Chain_id.t -> - predecessor_context: Context.t -> - predecessor_timestamp: Time.t -> - predecessor_fitness: Fitness.t -> + val begin_application : + chain_id:Chain_id.t -> + predecessor_context:Context.t -> + predecessor_timestamp:Time.t -> + predecessor_fitness:Fitness.t -> block_header -> validation_state tzresult Lwt.t @@ -190,20 +183,21 @@ module type PROTOCOL = sig an "equivalent" (but complete) header. For instance, if the block header usually includes a signature, the header provided to {!begin_construction} should includes a faked signature. *) - val begin_construction: - chain_id: Chain_id.t -> - predecessor_context: Context.t -> - predecessor_timestamp: Time.t -> - predecessor_level: Int32.t -> - predecessor_fitness: Fitness.t -> - predecessor: Block_hash.t -> - timestamp: Time.t -> - ?protocol_data: block_header_data -> - unit -> validation_state tzresult Lwt.t + val begin_construction : + chain_id:Chain_id.t -> + predecessor_context:Context.t -> + predecessor_timestamp:Time.t -> + predecessor_level:Int32.t -> + predecessor_fitness:Fitness.t -> + predecessor:Block_hash.t -> + timestamp:Time.t -> + ?protocol_data:block_header_data -> + unit -> + validation_state tzresult Lwt.t (** Called after {!begin_application} (or {!begin_construction}) and before {!finalize_block}, with each operation in the block. *) - val apply_operation: + val apply_operation : validation_state -> operation -> (validation_state * operation_receipt) tzresult Lwt.t @@ -211,32 +205,31 @@ module type PROTOCOL = sig (** The last step in a block validation sequence. It produces the context that will be used as input for the validation of its successor block candidates. *) - val finalize_block: + val finalize_block : validation_state -> (validation_result * block_header_metadata) tzresult Lwt.t (** The list of remote procedures exported by this implementation *) - val rpc_services: rpc_context RPC_directory.t + val rpc_services : rpc_context RPC_directory.t (** Initialize the context (or upgrade the context after a protocol amendment). This function receives the context resulting of the application of a block that triggered the amendment. It also receives the header of the block that triggered the amendment. *) - val init: + val init : Context.t -> Block_header.shell_header -> validation_result tzresult Lwt.t - end (** Activates a given protocol version from a given context. This means that the context used for the next block will use this version (this is not an immediate change). The version must have been previously compiled successfully. *) -val activate: Context.t -> Protocol_hash.t -> Context.t Lwt.t +val activate : Context.t -> Protocol_hash.t -> Context.t Lwt.t (** Fork a test chain. The forkerd chain will use the current block as genesis, and [protocol] as economic protocol. The chain will be destroyed when a (successor) block will have a timestamp greater than [expiration]. The protocol must have been previously compiled successfully. *) -val fork_test_chain: +val fork_test_chain : Context.t -> protocol:Protocol_hash.t -> expiration:Time.t -> Context.t Lwt.t diff --git a/src/lib_protocol_environment/sigs/v1/z.mli b/src/lib_protocol_environment/sigs/v1/z.mli index 48fd674f6b9ac4664027b1ad126d5b04f694591c..e706b2b5226f92d31dbe6be49fbf99412b841758 100644 --- a/src/lib_protocol_environment/sigs/v1/z.mli +++ b/src/lib_protocol_environment/sigs/v1/z.mli @@ -26,64 +26,65 @@ (** Tezos Protocol Environment - Arbitrary precision arithmetic. *) type t -val zero: t -val one: t -val succ: t -> t +val zero : t + +val one : t + (** Returns its argument plus one. *) +val succ : t -> t -val abs: t -> t (** Absolute value. *) +val abs : t -> t -val neg: t -> t (** Unary negation. *) +val neg : t -> t -val add: t -> t -> t (** Addition. *) +val add : t -> t -> t -val sub: t -> t -> t (** Subtraction. *) +val sub : t -> t -> t -val mul: t -> t -> t (** Multiplication. *) +val mul : t -> t -> t -val ediv_rem: t -> t -> (t * t) (** Euclidean division and remainder. [ediv_rem a b] returns a pair [(q, r)] such that [a = b * q + r] and [0 <= r < |b|]. Raises [Division_by_zero] if [b = 0]. *) +val ediv_rem : t -> t -> t * t -val logand: t -> t -> t (** Bitwise logical and. *) +val logand : t -> t -> t -val logor: t -> t -> t (** Bitwise logical or. *) +val logor : t -> t -> t -val logxor: t -> t -> t (** Bitwise logical exclusive or. *) +val logxor : t -> t -> t -val lognot: t -> t (** Bitwise logical negation. The identity [lognot a]=[-a-1] always hold. *) +val lognot : t -> t -val shift_left: t -> int -> t (** Shifts to the left. Equivalent to a multiplication by a power of 2. The second argument must be non-negative. *) +val shift_left : t -> int -> t -val shift_right: t -> int -> t (** Shifts to the right. This is an arithmetic shift, equivalent to a division by a power of 2 with rounding towards -oo. The second argument must be non-negative. *) +val shift_right : t -> int -> t -val to_string: t -> string (** Gives a human-readable, decimal string representation of the argument. *) +val to_string : t -> string -val of_string: string -> t (** Converts a string to an integer. An optional [-] prefix indicates a negative number, while a [+] prefix is ignored. @@ -95,28 +96,31 @@ val of_string: string -> t Raises an [Invalid_argument] exception if the string is not a syntactically correct representation of an integer. *) +val of_string : string -> t -val to_int64: t -> int64 (** Converts to a 64-bit integer. May raise [Overflow]. *) +val to_int64 : t -> int64 -val of_int64: int64 -> t (** Converts from a 64-bit integer. *) +val of_int64 : int64 -> t -val to_int: t -> int (** Converts to a base integer. May raise an [Overflow]. *) +val to_int : t -> int -val of_int: int -> t (** Converts from a base integer. *) +val of_int : int -> t + +val to_bits : ?pad_to:int -> t -> MBytes.t + +val of_bits : MBytes.t -> t -val to_bits: ?pad_to:int -> t -> MBytes.t -val of_bits: MBytes.t -> t +val equal : t -> t -> bool -val equal: t -> t -> bool -val compare: t -> t -> int +val compare : t -> t -> int -val numbits: t -> int (** Returns the number of significant bits in the given number. If [x] is zero, [numbits x] returns 0. Otherwise, [numbits x] returns a positive integer [n] such that [2^{n-1} <= |x| < 2^n]. Note that [numbits] is defined for negative arguments, and that [numbits (-x) = numbits x]. *) +val numbits : t -> int diff --git a/src/lib_protocol_environment/sigs_packer/sigs_packer.ml b/src/lib_protocol_environment/sigs_packer/sigs_packer.ml index 98f3ffaa800a7ccce9695ad94e965c6ca22e8463..b3d6f4e348e0371f86cafa148c9155fb8be935e8 100644 --- a/src/lib_protocol_environment/sigs_packer/sigs_packer.ml +++ b/src/lib_protocol_environment/sigs_packer/sigs_packer.ml @@ -29,37 +29,35 @@ let dump_file oc file = let buf = Bytes.create buflen in let rec loop () = let len = input ic buf 0 buflen in - if len <> 0 then begin - Printf.fprintf oc "%s" (Bytes.to_string (if len = buflen then buf else Bytes.sub buf 0 len)) ; - loop () - end + if len <> 0 then ( + Printf.fprintf + oc + "%s" + (Bytes.to_string (if len = buflen then buf else Bytes.sub buf 0 len)) ; + loop () ) in - loop () ; - close_in ic + loop () ; close_in ic -let opened_modules = [ - "Pervasives" ; - "Error_monad" ; -] +let opened_modules = ["Pervasives"; "Error_monad"] let include_mli oc file = let unit = - String.capitalize_ascii - (Filename.chop_extension (Filename.basename file)) in + String.capitalize_ascii (Filename.chop_extension (Filename.basename file)) + in Printf.fprintf oc "module %s : sig\n" unit ; Printf.fprintf oc "# 1 %S\n" file ; dump_file oc file ; Printf.fprintf oc "end\n" ; if unit = "Result" then - Printf.fprintf oc - "type ('a, 'b) result = ('a, 'b) Result.result = \ - \ Ok of 'a | Error of 'b\n" ; + Printf.fprintf + oc + "type ('a, 'b) result = ('a, 'b) Result.result = Ok of 'a | Error of 'b\n" ; if List.mem unit opened_modules then Printf.fprintf oc "open %s\n" unit let () = Printf.fprintf stdout "module type T = sig\n" ; for i = 1 to Array.length Sys.argv - 1 do let file = Sys.argv.(i) in - include_mli stdout file ; + include_mli stdout file done ; Printf.fprintf stdout "end\n%!" diff --git a/src/lib_protocol_environment/test/assert.ml b/src/lib_protocol_environment/test/assert.ml index 8e8fc024c61d442cdb001491158dd1fcc57b3eb8..8f942691bda9cb51751e33b7aa7eacaeac86e6cf 100644 --- a/src/lib_protocol_environment/test/assert.ml +++ b/src/lib_protocol_environment/test/assert.ml @@ -24,43 +24,47 @@ (*****************************************************************************) let fail expected given msg = - Format.kasprintf Pervasives.failwith - "@[%s@ expected: %s@ got: %s@]" msg expected given + Format.kasprintf + Pervasives.failwith + "@[%s@ expected: %s@ got: %s@]" + msg + expected + given + let fail_msg fmt = Format.kasprintf (fail "" "") fmt let default_printer _ = "" -let equal ?(eq=(=)) ?(prn=default_printer) ?(msg="") x y = +let equal ?(eq = ( = )) ?(prn = default_printer) ?(msg = "") x y = if not (eq x y) then fail (prn x) (prn y) msg let equal_string_option ?msg o1 o2 = - let prn = function - | None -> "None" - | Some s -> s in + let prn = function None -> "None" | Some s -> s in equal ?msg ~prn o1 o2 -let is_none ?(msg="") x = - if x <> None then fail "None" "Some _" msg +let is_none ?(msg = "") x = if x <> None then fail "None" "Some _" msg -let make_equal_list eq prn ?(msg="") x y = +let make_equal_list eq prn ?(msg = "") x y = let rec iter i x y = - match x, y with - | hd_x :: tl_x, hd_y :: tl_y -> - if eq hd_x hd_y then - iter (succ i) tl_x tl_y + match (x, y) with + | (hd_x :: tl_x, hd_y :: tl_y) -> + if eq hd_x hd_y then iter (succ i) tl_x tl_y else let fm = Printf.sprintf "%s (at index %d)" msg i in fail (prn hd_x) (prn hd_y) fm - | _ :: _, [] | [], _ :: _ -> + | (_ :: _, []) | ([], _ :: _) -> let fm = Printf.sprintf "%s (lists of different sizes)" msg in fail_msg "%s" fm - | [], [] -> - () in + | ([], []) -> + () + in iter 0 x y let equal_string_list_list ?msg l1 l2 = let pr_persist l = let res = - String.concat ";" (List.map (fun s -> Printf.sprintf "%S" s) l) in - Printf.sprintf "[%s]" res in - make_equal_list ?msg (=) pr_persist l1 l2 + String.concat ";" (List.map (fun s -> Printf.sprintf "%S" s) l) + in + Printf.sprintf "[%s]" res + in + make_equal_list ?msg ( = ) pr_persist l1 l2 diff --git a/src/lib_protocol_environment/test/test.ml b/src/lib_protocol_environment/test/test.ml index c671d778a2f52b463705afb3e7373aebdb759c0b..ba39851ee47beb23b01fb0fabe21d849234037e0 100644 --- a/src/lib_protocol_environment/test/test.ml +++ b/src/lib_protocol_environment/test/test.ml @@ -24,6 +24,6 @@ (*****************************************************************************) let () = - Alcotest.run "tezos-protocol-environment-shell" [ - "mem_context", Test_mem_context.tests ; - ] + Alcotest.run + "tezos-protocol-environment-shell" + [("mem_context", Test_mem_context.tests)] diff --git a/src/lib_protocol_environment/test/test_mem_context.ml b/src/lib_protocol_environment/test/test_mem_context.ml index 5fbaf781d59525f0bd012ab91c4c6fe19fdebd15..214d96d31d59b8b16b17b410d49639337fb0c505 100644 --- a/src/lib_protocol_environment/test/test_mem_context.ml +++ b/src/lib_protocol_environment/test/test_mem_context.ml @@ -28,136 +28,169 @@ open Tezos_protocol_environment_memory (** Context creation *) let create_block2 ctxt = - Context.set ctxt ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt -> - Context.set ctxt ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt -> - Context.set ctxt ["version";] (MBytes.of_string "0.0") >>= fun ctxt -> - Lwt.return ctxt + Context.set ctxt ["a"; "b"] (MBytes.of_string "Novembre") + >>= fun ctxt -> + Context.set ctxt ["a"; "c"] (MBytes.of_string "Juin") + >>= fun ctxt -> + Context.set ctxt ["version"] (MBytes.of_string "0.0") + >>= fun ctxt -> Lwt.return ctxt let create_block3a ctxt = - Context.del ctxt ["a"; "b"] >>= fun ctxt -> - Context.set ctxt ["a"; "d"] (MBytes.of_string "Mars") >>= fun ctxt -> - Lwt.return ctxt + Context.del ctxt ["a"; "b"] + >>= fun ctxt -> + Context.set ctxt ["a"; "d"] (MBytes.of_string "Mars") + >>= fun ctxt -> Lwt.return ctxt let create_block3b ctxt = - Context.del ctxt ["a"; "c"] >>= fun ctxt -> - Context.set ctxt ["a"; "d"] (MBytes.of_string "Février") >>= fun ctxt -> - Lwt.return ctxt + Context.del ctxt ["a"; "c"] + >>= fun ctxt -> + Context.set ctxt ["a"; "d"] (MBytes.of_string "Février") + >>= fun ctxt -> Lwt.return ctxt type t = { - genesis: Context.t ; - block2: Context.t ; - block3a: Context.t ; - block3b: Context.t ; + genesis : Context.t; + block2 : Context.t; + block3a : Context.t; + block3b : Context.t } let wrap_context_init f _ () = let genesis = Context.empty in - create_block2 genesis >>= fun block2 -> - create_block3a block2 >>= fun block3a -> - create_block3b block2 >>= fun block3b -> - f { genesis; block2 ; block3a; block3b } >>= fun result -> - Lwt.return result + create_block2 genesis + >>= fun block2 -> + create_block3a block2 + >>= fun block3a -> + create_block3b block2 + >>= fun block3b -> + f {genesis; block2; block3a; block3b} >>= fun result -> Lwt.return result (** Simple test *) -let c = function - | None -> None - | Some s -> Some (MBytes.to_string s) +let c = function None -> None | Some s -> Some (MBytes.to_string s) -let test_simple { block2 = ctxt ; _ } = - Context.get ctxt ["version"] >>= fun version -> +let test_simple {block2 = ctxt; _} = + Context.get ctxt ["version"] + >>= fun version -> Assert.equal_string_option ~msg:__LOC__ (c version) (Some "0.0") ; - Context.get ctxt ["a";"b"] >>= fun novembre -> + Context.get ctxt ["a"; "b"] + >>= fun novembre -> Assert.equal_string_option (Some "Novembre") (c novembre) ; - Context.get ctxt ["a";"c"] >>= fun juin -> + Context.get ctxt ["a"; "c"] + >>= fun juin -> Assert.equal_string_option ~msg:__LOC__ (Some "Juin") (c juin) ; Lwt.return_unit -let test_continuation { block3a = ctxt ; _ } = - Context.get ctxt ["version"] >>= fun version -> +let test_continuation {block3a = ctxt; _} = + Context.get ctxt ["version"] + >>= fun version -> Assert.equal_string_option ~msg:__LOC__ (Some "0.0") (c version) ; - Context.get ctxt ["a";"b"] >>= fun novembre -> + Context.get ctxt ["a"; "b"] + >>= fun novembre -> Assert.is_none ~msg:__LOC__ (c novembre) ; - Context.get ctxt ["a";"c"] >>= fun juin -> + Context.get ctxt ["a"; "c"] + >>= fun juin -> Assert.equal_string_option ~msg:__LOC__ (Some "Juin") (c juin) ; - Context.get ctxt ["a";"d"] >>= fun mars -> - Assert.equal_string_option ~msg:__LOC__ (Some "Mars") (c mars) ; + Context.get ctxt ["a"; "d"] + >>= fun mars -> + Assert.equal_string_option ~msg:__LOC__ (Some "Mars") (c mars) ; Lwt.return_unit -let test_fork { block3b = ctxt ; _ } = - Context.get ctxt ["version"] >>= fun version -> +let test_fork {block3b = ctxt; _} = + Context.get ctxt ["version"] + >>= fun version -> Assert.equal_string_option ~msg:__LOC__ (Some "0.0") (c version) ; - Context.get ctxt ["a";"b"] >>= fun novembre -> + Context.get ctxt ["a"; "b"] + >>= fun novembre -> Assert.equal_string_option ~msg:__LOC__ (Some "Novembre") (c novembre) ; - Context.get ctxt ["a";"c"] >>= fun juin -> + Context.get ctxt ["a"; "c"] + >>= fun juin -> Assert.is_none ~msg:__LOC__ (c juin) ; - Context.get ctxt ["a";"d"] >>= fun mars -> + Context.get ctxt ["a"; "d"] + >>= fun mars -> Assert.equal_string_option ~msg:__LOC__ (Some "Février") (c mars) ; Lwt.return_unit -let test_replay { genesis = ctxt0 ; _ } = - Context.set ctxt0 ["version"] (MBytes.of_string "0.0") >>= fun ctxt1 -> - Context.set ctxt1 ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt2 -> - Context.set ctxt2 ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt3 -> - Context.set ctxt3 ["a"; "d"] (MBytes.of_string "July") >>= fun ctxt4a -> - Context.set ctxt3 ["a"; "d"] (MBytes.of_string "Juillet") >>= fun ctxt4b -> - Context.set ctxt4a ["a"; "b"] (MBytes.of_string "November") >>= fun ctxt5a -> - Context.get ctxt4a ["a";"b"] >>= fun novembre -> +let test_replay {genesis = ctxt0; _} = + Context.set ctxt0 ["version"] (MBytes.of_string "0.0") + >>= fun ctxt1 -> + Context.set ctxt1 ["a"; "b"] (MBytes.of_string "Novembre") + >>= fun ctxt2 -> + Context.set ctxt2 ["a"; "c"] (MBytes.of_string "Juin") + >>= fun ctxt3 -> + Context.set ctxt3 ["a"; "d"] (MBytes.of_string "July") + >>= fun ctxt4a -> + Context.set ctxt3 ["a"; "d"] (MBytes.of_string "Juillet") + >>= fun ctxt4b -> + Context.set ctxt4a ["a"; "b"] (MBytes.of_string "November") + >>= fun ctxt5a -> + Context.get ctxt4a ["a"; "b"] + >>= fun novembre -> Assert.equal_string_option ~msg:__LOC__ (Some "Novembre") (c novembre) ; - Context.get ctxt5a ["a";"b"] >>= fun november -> + Context.get ctxt5a ["a"; "b"] + >>= fun november -> Assert.equal_string_option ~msg:__LOC__ (Some "November") (c november) ; - Context.get ctxt5a ["a";"d"] >>= fun july -> + Context.get ctxt5a ["a"; "d"] + >>= fun july -> Assert.equal_string_option ~msg:__LOC__ (Some "July") (c july) ; - Context.get ctxt4b ["a";"b"] >>= fun novembre -> + Context.get ctxt4b ["a"; "b"] + >>= fun novembre -> Assert.equal_string_option ~msg:__LOC__ (Some "Novembre") (c novembre) ; - Context.get ctxt4b ["a";"d"] >>= fun juillet -> + Context.get ctxt4b ["a"; "d"] + >>= fun juillet -> Assert.equal_string_option ~msg:__LOC__ (Some "Juillet") (c juillet) ; Lwt.return_unit let fold_keys s k ~init ~f = let rec loop k acc = - Context.fold s k ~init:acc - ~f:(fun file acc -> - match file with - | `Key k -> f k acc - | `Dir k -> loop k acc) in + Context.fold s k ~init:acc ~f:(fun file acc -> + match file with `Key k -> f k acc | `Dir k -> loop k acc) + in loop k init + let keys t = fold_keys t ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc)) -let test_fold { genesis = ctxt ; _ } = - Context.set ctxt ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt -> - Context.set ctxt ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt -> - Context.set ctxt ["a"; "d"; "e"] (MBytes.of_string "Septembre") >>= fun ctxt -> - Context.set ctxt ["f";] (MBytes.of_string "Avril") >>= fun ctxt -> - Context.set ctxt ["g"; "h"] (MBytes.of_string "Avril") >>= fun ctxt -> - keys ctxt [] >>= fun l -> - Assert.equal_string_list_list ~msg:__LOC__ - [["a";"b"]; - ["a";"c"]; - ["a";"d";"e"]; - ["f"]; - ["g";"h"]] (List.sort compare l) ; - keys ctxt ["a"] >>= fun l -> +let test_fold {genesis = ctxt; _} = + Context.set ctxt ["a"; "b"] (MBytes.of_string "Novembre") + >>= fun ctxt -> + Context.set ctxt ["a"; "c"] (MBytes.of_string "Juin") + >>= fun ctxt -> + Context.set ctxt ["a"; "d"; "e"] (MBytes.of_string "Septembre") + >>= fun ctxt -> + Context.set ctxt ["f"] (MBytes.of_string "Avril") + >>= fun ctxt -> + Context.set ctxt ["g"; "h"] (MBytes.of_string "Avril") + >>= fun ctxt -> + keys ctxt [] + >>= fun l -> + Assert.equal_string_list_list + ~msg:__LOC__ + [["a"; "b"]; ["a"; "c"]; ["a"; "d"; "e"]; ["f"]; ["g"; "h"]] + (List.sort compare l) ; + keys ctxt ["a"] + >>= fun l -> Assert.equal_string_list_list - ~msg:__LOC__ [["a";"b"]; ["a";"c"]; ["a";"d";"e"]] + ~msg:__LOC__ + [["a"; "b"]; ["a"; "c"]; ["a"; "d"; "e"]] (List.sort compare l) ; - keys ctxt ["f"] >>= fun l -> + keys ctxt ["f"] + >>= fun l -> Assert.equal_string_list_list ~msg:__LOC__ [] l ; - keys ctxt ["g"] >>= fun l -> - Assert.equal_string_list_list ~msg:__LOC__ [["g";"h"]] l ; - keys ctxt ["i"] >>= fun l -> + keys ctxt ["g"] + >>= fun l -> + Assert.equal_string_list_list ~msg:__LOC__ [["g"; "h"]] l ; + keys ctxt ["i"] + >>= fun l -> Assert.equal_string_list_list ~msg:__LOC__ [] l ; Lwt.return_unit (******************************************************************************) -let tests = [ - "simple", test_simple ; - "continuation", test_continuation ; - "fork", test_fork ; - "replay", test_replay ; - "fold", test_fold ; -] +let tests = + [ ("simple", test_simple); + ("continuation", test_continuation); + ("fork", test_fork); + ("replay", test_replay); + ("fold", test_fold) ] let tests = List.map diff --git a/src/lib_protocol_environment/tezos_protocol_environment.ml b/src/lib_protocol_environment/tezos_protocol_environment.ml index 1f7b3af1bf737ab54b819442704ee04d7a7d00cf..c0a770f0d1772a9c6fa9d5f06377a0d1bd7a5c98 100644 --- a/src/lib_protocol_environment/tezos_protocol_environment.ml +++ b/src/lib_protocol_environment/tezos_protocol_environment.ml @@ -27,202 +27,254 @@ open Error_monad module type CONTEXT = sig type t + type key = string list + type value = MBytes.t - val mem: t -> key -> bool Lwt.t - val dir_mem: t -> key -> bool Lwt.t - val get: t -> key -> value option Lwt.t - val set: t -> key -> value -> t Lwt.t - val copy: t -> from:key -> to_:key -> t option Lwt.t - val del: t -> key -> t Lwt.t - val remove_rec: t -> key -> t Lwt.t - val fold: - t -> key -> init:'a -> - f:([ `Key of key | `Dir of key ] -> 'a -> 'a Lwt.t) -> + + val mem : t -> key -> bool Lwt.t + + val dir_mem : t -> key -> bool Lwt.t + + val get : t -> key -> value option Lwt.t + + val set : t -> key -> value -> t Lwt.t + + val copy : t -> from:key -> to_:key -> t option Lwt.t + + val del : t -> key -> t Lwt.t + + val remove_rec : t -> key -> t Lwt.t + + val fold : + t -> + key -> + init:'a -> + f:([`Key of key | `Dir of key] -> 'a -> 'a Lwt.t) -> 'a Lwt.t - val set_protocol: t -> Protocol_hash.t -> t Lwt.t - val fork_test_chain: + + val set_protocol : t -> Protocol_hash.t -> t Lwt.t + + val fork_test_chain : t -> protocol:Protocol_hash.t -> expiration:Time.Protocol.t -> t Lwt.t end module Make (Context : CONTEXT) = struct - type validation_result = { - context: Context.t ; - fitness: Fitness.t ; - message: string option ; - max_operations_ttl: int ; - last_allowed_fork_level: Int32.t ; + context : Context.t; + fitness : Fitness.t; + message : string option; + max_operations_ttl : int; + last_allowed_fork_level : Int32.t } - type quota = { - max_size: int ; - max_op: int option ; - } + type quota = {max_size : int; max_op : int option} type rpc_context = { - block_hash: Block_hash.t ; - block_header: Block_header.shell_header ; - context: Context.t ; + block_hash : Block_hash.t; + block_header : Block_header.shell_header; + context : Context.t } module type T = sig type context + type quota + type validation_result + type rpc_context + type 'a tzresult - val max_block_length: int - val max_operation_data_length: int - val validation_passes: quota list + + val max_block_length : int + + val max_operation_data_length : int + + val validation_passes : quota list + type block_header_data - val block_header_data_encoding: block_header_data Data_encoding.t + + val block_header_data_encoding : block_header_data Data_encoding.t + type block_header = { - shell: Block_header.shell_header ; - protocol_data: block_header_data ; + shell : Block_header.shell_header; + protocol_data : block_header_data } + type block_header_metadata - val block_header_metadata_encoding: block_header_metadata Data_encoding.t + + val block_header_metadata_encoding : block_header_metadata Data_encoding.t + type operation_data + type operation_receipt + type operation = { - shell: Operation.shell_header ; - protocol_data: operation_data ; + shell : Operation.shell_header; + protocol_data : operation_data } - val operation_data_encoding: operation_data Data_encoding.t - val operation_receipt_encoding: operation_receipt Data_encoding.t - val operation_data_and_receipt_encoding: + + val operation_data_encoding : operation_data Data_encoding.t + + val operation_receipt_encoding : operation_receipt Data_encoding.t + + val operation_data_and_receipt_encoding : (operation_data * operation_receipt) Data_encoding.t - val acceptable_passes: operation -> int list - val compare_operations: operation -> operation -> int + + val acceptable_passes : operation -> int list + + val compare_operations : operation -> operation -> int + type validation_state - val current_context: validation_state -> context tzresult Lwt.t - val begin_partial_application: - chain_id: Chain_id.t -> - ancestor_context: context -> - predecessor_timestamp: Time.Protocol.t -> - predecessor_fitness: Fitness.t -> + + val current_context : validation_state -> context tzresult Lwt.t + + val begin_partial_application : + chain_id:Chain_id.t -> + ancestor_context:context -> + predecessor_timestamp:Time.Protocol.t -> + predecessor_fitness:Fitness.t -> block_header -> validation_state tzresult Lwt.t - val begin_application: - chain_id: Chain_id.t -> - predecessor_context: context -> - predecessor_timestamp: Time.Protocol.t -> - predecessor_fitness: Fitness.t -> + + val begin_application : + chain_id:Chain_id.t -> + predecessor_context:context -> + predecessor_timestamp:Time.Protocol.t -> + predecessor_fitness:Fitness.t -> block_header -> validation_state tzresult Lwt.t - val begin_construction: - chain_id: Chain_id.t -> - predecessor_context: context -> - predecessor_timestamp: Time.Protocol.t -> - predecessor_level: Int32.t -> - predecessor_fitness: Fitness.t -> - predecessor: Block_hash.t -> - timestamp: Time.Protocol.t -> - ?protocol_data: block_header_data -> - unit -> validation_state tzresult Lwt.t - val apply_operation: - validation_state -> operation -> + + val begin_construction : + chain_id:Chain_id.t -> + predecessor_context:context -> + predecessor_timestamp:Time.Protocol.t -> + predecessor_level:Int32.t -> + predecessor_fitness:Fitness.t -> + predecessor:Block_hash.t -> + timestamp:Time.Protocol.t -> + ?protocol_data:block_header_data -> + unit -> + validation_state tzresult Lwt.t + + val apply_operation : + validation_state -> + operation -> (validation_state * operation_receipt) tzresult Lwt.t - val finalize_block: + + val finalize_block : validation_state -> (validation_result * block_header_metadata) tzresult Lwt.t - val rpc_services: rpc_context RPC_directory.t - val init: + + val rpc_services : rpc_context RPC_directory.t + + val init : context -> Block_header.shell_header -> validation_result tzresult Lwt.t end module type PROTOCOL = - T with type context := Context.t + T + with type context := Context.t and type quota := quota and type validation_result := validation_result and type rpc_context := rpc_context and type 'a tzresult := 'a Error_monad.tzresult module type V1 = sig - - include Tezos_protocol_environment_sigs.V1.T - with type Format.formatter = Format.formatter - and type 'a Data_encoding.t = 'a Data_encoding.t - and type 'a Data_encoding.lazy_t = 'a Data_encoding.lazy_t - and type 'a Lwt.t = 'a Lwt.t - and type ('a, 'b) Pervasives.result = ('a, 'b) result - and type Chain_id.t = Chain_id.t - and type Block_hash.t = Block_hash.t - and type Operation_hash.t = Operation_hash.t - and type Operation_list_hash.t = Operation_list_hash.t - and type Operation_list_list_hash.t = Operation_list_list_hash.t - and type Context.t = Context.t - and type Context_hash.t = Context_hash.t - and type Protocol_hash.t = Protocol_hash.t - and type Time.t = Time.Protocol.t - and type MBytes.t = MBytes.t - and type Operation.shell_header = Operation.shell_header - and type Operation.t = Operation.t - and type Block_header.shell_header = Block_header.shell_header - and type Block_header.t = Block_header.t - and type 'a RPC_directory.t = 'a RPC_directory.t - and type Ed25519.Public_key_hash.t = Ed25519.Public_key_hash.t - and type Ed25519.Public_key.t = Ed25519.Public_key.t - and type Ed25519.t = Ed25519.t - and type Secp256k1.Public_key_hash.t = Secp256k1.Public_key_hash.t - and type Secp256k1.Public_key.t = Secp256k1.Public_key.t - and type Secp256k1.t = Secp256k1.t - and type P256.Public_key_hash.t = P256.Public_key_hash.t - and type P256.Public_key.t = P256.Public_key.t - and type P256.t = P256.t - and type Signature.public_key_hash = Signature.public_key_hash - and type Signature.public_key = Signature.public_key - and type Signature.t = Signature.t - and type Signature.watermark = Signature.watermark - and type 'a Micheline.canonical = 'a Micheline.canonical - and type ('a, 'b) RPC_path.t = ('a, 'b) RPC_path.t - and type Z.t = Z.t - and type ('a, 'b) Micheline.node = ('a, 'b) Micheline.node - and type Data_encoding.json_schema = Data_encoding.json_schema - and type RPC_service.meth = RPC_service.meth - and type (+'m,'pr,'p,'q,'i,'o) RPC_service.t = ('m,'pr,'p,'q,'i,'o) RPC_service.t - and type Error_monad.shell_error = Error_monad.error - and type Z.t = Z.t + include + Tezos_protocol_environment_sigs.V1.T + with type Format.formatter = Format.formatter + and type 'a Data_encoding.t = 'a Data_encoding.t + and type 'a Data_encoding.lazy_t = 'a Data_encoding.lazy_t + and type 'a Lwt.t = 'a Lwt.t + and type ('a, 'b) Pervasives.result = ('a, 'b) result + and type Chain_id.t = Chain_id.t + and type Block_hash.t = Block_hash.t + and type Operation_hash.t = Operation_hash.t + and type Operation_list_hash.t = Operation_list_hash.t + and type Operation_list_list_hash.t = Operation_list_list_hash.t + and type Context.t = Context.t + and type Context_hash.t = Context_hash.t + and type Protocol_hash.t = Protocol_hash.t + and type Time.t = Time.Protocol.t + and type MBytes.t = MBytes.t + and type Operation.shell_header = Operation.shell_header + and type Operation.t = Operation.t + and type Block_header.shell_header = Block_header.shell_header + and type Block_header.t = Block_header.t + and type 'a RPC_directory.t = 'a RPC_directory.t + and type Ed25519.Public_key_hash.t = Ed25519.Public_key_hash.t + and type Ed25519.Public_key.t = Ed25519.Public_key.t + and type Ed25519.t = Ed25519.t + and type Secp256k1.Public_key_hash.t = Secp256k1.Public_key_hash.t + and type Secp256k1.Public_key.t = Secp256k1.Public_key.t + and type Secp256k1.t = Secp256k1.t + and type P256.Public_key_hash.t = P256.Public_key_hash.t + and type P256.Public_key.t = P256.Public_key.t + and type P256.t = P256.t + and type Signature.public_key_hash = Signature.public_key_hash + and type Signature.public_key = Signature.public_key + and type Signature.t = Signature.t + and type Signature.watermark = Signature.watermark + and type 'a Micheline.canonical = 'a Micheline.canonical + and type ('a, 'b) RPC_path.t = ('a, 'b) RPC_path.t + and type Z.t = Z.t + and type ('a, 'b) Micheline.node = ('a, 'b) Micheline.node + and type Data_encoding.json_schema = Data_encoding.json_schema + and type RPC_service.meth = RPC_service.meth + and type (+'m, 'pr, 'p, 'q, 'i, 'o) RPC_service.t = + ('m, 'pr, 'p, 'q, 'i, 'o) RPC_service.t + and type Error_monad.shell_error = Error_monad.error + and type Z.t = Z.t type error += Ecoproto_error of Error_monad.error + val wrap_error : 'a Error_monad.tzresult -> 'a tzresult - module Lift (P : Updater.PROTOCOL) : PROTOCOL - with type block_header_data = P.block_header_data - and type block_header = P.block_header - and type operation_data = P.operation_data - and type operation_receipt = P.operation_receipt - and type operation = P.operation - and type validation_state = P.validation_state + module Lift (P : Updater.PROTOCOL) : + PROTOCOL + with type block_header_data = P.block_header_data + and type block_header = P.block_header + and type operation_data = P.operation_data + and type operation_receipt = P.operation_receipt + and type operation = P.operation + and type validation_state = P.validation_state class ['chain, 'block] proto_rpc_context : - Tezos_rpc.RPC_context.t -> (unit, (unit * 'chain) * 'block) RPC_path.t -> - [('chain * 'block)] RPC_context.simple + Tezos_rpc.RPC_context.t + -> (unit, (unit * 'chain) * 'block) RPC_path.t + -> ['chain * 'block] RPC_context.simple class ['block] proto_rpc_context_of_directory : - ('block -> RPC_context.t) -> RPC_context.t RPC_directory.t -> - ['block] RPC_context.simple - + ('block -> RPC_context.t) + -> RPC_context.t RPC_directory.t + -> ['block] RPC_context.simple end - module MakeV1 (Param : sig val name: string end) () = struct - + module MakeV1 (Param : sig + val name : string + end) + () = + struct include Pervasives module Pervasives = Pervasives module Compare = Compare module Array = Array module List = List + module Bytes = struct include Bytes include EndianBytes.BigEndian module LE = EndianBytes.LittleEndian end + module String = struct include String include EndianString.BigEndian module LE = EndianString.LittleEndian end + module Set = Set module Map = Map module Int32 = Int32 @@ -232,26 +284,31 @@ module Make (Context : CONTEXT) = struct module Format = Format module Option = Option module MBytes = MBytes + module Raw_hashes = struct let sha256 msg = Hacl.Hash.SHA256.digest msg + let sha512 msg = Hacl.Hash.SHA512.digest msg - let blake2b msg = Blake2B.to_bytes (Blake2B.hash_bytes [ msg ]) + + let blake2b msg = Blake2B.to_bytes (Blake2B.hash_bytes [msg]) end + module Z = struct include Z + let to_bits ?(pad_to = 0) z = let bits = to_bits z in let len = Pervasives.((numbits z + 7) / 8) in let full_len = Compare.Int.max pad_to len in - if full_len = 0 then - MBytes.empty + if full_len = 0 then MBytes.empty else let res = MBytes.make full_len '\000' in MBytes.blit_of_string bits 0 res 0 len ; res - let of_bits bytes = - of_bits (MBytes.to_string bytes) + + let of_bits bytes = of_bits (MBytes.to_string bytes) end + module Lwt_sequence = Lwt_sequence module Lwt = Lwt module Lwt_list = Lwt_list @@ -262,160 +319,208 @@ module Make (Context : CONTEXT) = struct module Secp256k1 = Secp256k1 module P256 = P256 module Signature = Signature + module S = struct module type T = Tezos_base.S.T + module type HASHABLE = Tezos_base.S.HASHABLE + module type MINIMAL_HASH = S.MINIMAL_HASH - module type B58_DATA = sig + module type B58_DATA = sig type t - val to_b58check: t -> string - val to_short_b58check: t -> string + val to_b58check : t -> string + + val to_short_b58check : t -> string - val of_b58check_exn: string -> t - val of_b58check_opt: string -> t option + val of_b58check_exn : string -> t + + val of_b58check_opt : string -> t option type Base58.data += Data of t - val b58check_encoding: t Base58.encoding + val b58check_encoding : t Base58.encoding end + module type RAW_DATA = sig type t - val size: int (* in bytes *) - val to_bytes: t -> MBytes.t - val of_bytes_opt: MBytes.t -> t option - val of_bytes_exn: MBytes.t -> t + + val size : int (* in bytes *) + + val to_bytes : t -> MBytes.t + + val of_bytes_opt : MBytes.t -> t option + + val of_bytes_exn : MBytes.t -> t end + module type ENCODER = sig type t - val encoding: t Data_encoding.t - val rpc_arg: t RPC_arg.t + + val encoding : t Data_encoding.t + + val rpc_arg : t RPC_arg.t end + module type SET = Tezos_base.S.SET + module type MAP = Tezos_base.S.MAP - module type INDEXES = sig + module type INDEXES = sig type t - val to_path: t -> string list -> string list - val of_path: string list -> t option - val of_path_exn: string list -> t + val to_path : t -> string list -> string list + + val of_path : string list -> t option + + val of_path_exn : string list -> t - val prefix_path: string -> string list - val path_length: int + val prefix_path : string -> string list + + val path_length : int module Set : sig include Set.S with type elt = t - val encoding: t Data_encoding.t + + val encoding : t Data_encoding.t end module Map : sig include Map.S with type key = t - val encoding: 'a Data_encoding.t -> 'a t Data_encoding.t - end + val encoding : 'a Data_encoding.t -> 'a t Data_encoding.t + end end + module type HASH = sig include MINIMAL_HASH + include RAW_DATA with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t + include INDEXES with type t := t end module type MERKLE_TREE = sig type elt + include HASH - val compute: elt list -> t - val empty: t - type path = - | Left of path * t - | Right of t * path - | Op - val compute_path: elt list -> int -> path - val check_path: path -> elt -> t * int - val path_encoding: path Data_encoding.t + + val compute : elt list -> t + + val empty : t + + type path = Left of path * t | Right of t * path | Op + + val compute_path : elt list -> int -> path + + val check_path : path -> elt -> t * int + + val path_encoding : path Data_encoding.t end module type SIGNATURE = sig - module Public_key_hash : sig - type t - val pp: Format.formatter -> t -> unit - val pp_short: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + + val pp_short : Format.formatter -> t -> unit + include Compare.S with type t := t + include RAW_DATA with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t - include INDEXES with type t := t - val zero: t + include INDEXES with type t := t + val zero : t end module Public_key : sig - type t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + include Compare.S with type t := t + include B58_DATA with type t := t - include ENCODER with type t := t - val hash: t -> Public_key_hash.t + include ENCODER with type t := t + val hash : t -> Public_key_hash.t end type t - val pp: Format.formatter -> t -> unit + val pp : Format.formatter -> t -> unit + include RAW_DATA with type t := t + include Compare.S with type t := t + include B58_DATA with type t := t + include ENCODER with type t := t - val zero: t + val zero : t type watermark (** Check a signature *) - val check: ?watermark:watermark -> Public_key.t -> t -> MBytes.t -> bool - + val check : + ?watermark:watermark -> Public_key.t -> t -> MBytes.t -> bool end - end + module Error_monad = struct type 'a shell_tzresult = 'a Error_monad.tzresult + type shell_error = Error_monad.error = .. - type error_category = [ `Branch | `Temporary | `Permanent ] - include Error_monad.Make(struct let id = Format.asprintf "proto.%s." Param.name end) + + type error_category = [`Branch | `Temporary | `Permanent] + + include Error_monad.Make (struct + let id = Format.asprintf "proto.%s." Param.name + end) end type error += Ecoproto_error of Error_monad.error module Wrapped_error_monad = struct type unwrapped = Error_monad.error = .. + include (Error_monad : Error_monad_sig.S with type error := unwrapped) + let unwrap = function - | Ecoproto_error ecoerror -> Some ecoerror - | _ -> None - let wrap ecoerror = - Ecoproto_error ecoerror + | Ecoproto_error ecoerror -> + Some ecoerror + | _ -> + None + + let wrap ecoerror = Ecoproto_error ecoerror end let () = let id = Format.asprintf "proto.%s.wrapper" Param.name in register_wrapped_error_kind (module Wrapped_error_monad) - ~id ~title: ("Error returned by protocol " ^ Param.name) - ~description: ("Wrapped error for economic protocol " ^ Param.name ^ ".") + ~id + ~title:("Error returned by protocol " ^ Param.name) + ~description:("Wrapped error for economic protocol " ^ Param.name ^ ".") let wrap_error = function - | Ok _ as ok -> ok - | Error errors -> Error (List.map (fun error -> Ecoproto_error error) errors) + | Ok _ as ok -> + ok + | Error errors -> + Error (List.map (fun error -> Ecoproto_error error) errors) module Chain_id = Chain_id module Block_hash = Block_hash @@ -433,8 +538,8 @@ module Make (Context : CONTEXT) = struct module RPC_path = RPC_path module RPC_query = RPC_query module RPC_service = RPC_service - module RPC_answer = struct + module RPC_answer = struct type 'o t = [ `Ok of 'o (* 200 *) | `OkStream of 'o stream (* 200 *) @@ -444,339 +549,475 @@ module Make (Context : CONTEXT) = struct | `Forbidden of Error_monad.error list option (* 403 *) | `Not_found of Error_monad.error list option (* 404 *) | `Conflict of Error_monad.error list option (* 409 *) - | `Error of Error_monad.error list option (* 500 *) - ] + | `Error of Error_monad.error list option (* 500 *) ] and 'a stream = 'a Resto_directory.Answer.stream = { - next: unit -> 'a option Lwt.t ; - shutdown: unit -> unit ; + next : unit -> 'a option Lwt.t; + shutdown : unit -> unit } let return x = Lwt.return (`Ok x) + let return_stream x = Lwt.return (`OkStream x) + let not_found = Lwt.return (`Not_found None) let fail err = Lwt.return (`Error (Some err)) end + module RPC_directory = struct include RPC_directory + let gen_register dir service handler = - gen_register dir service - (fun p q i -> - handler p q i >>= function - | `Ok o -> RPC_answer.return o - | `OkStream s -> RPC_answer.return_stream s - | `Created s -> Lwt.return (`Created s) - | `No_content -> Lwt.return (`No_content) - | `Unauthorized e -> - let e = Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) in - Lwt.return (`Unauthorized e) - | `Forbidden e -> - let e = Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) in - Lwt.return (`Forbidden e) - | `Not_found e -> - let e = Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) in - Lwt.return (`Not_found e) - | `Conflict e -> - let e = Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) in - Lwt.return (`Conflict e) - | `Error e -> - let e = Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) in - Lwt.return (`Error e)) + gen_register dir service (fun p q i -> + handler p q i + >>= function + | `Ok o -> + RPC_answer.return o + | `OkStream s -> + RPC_answer.return_stream s + | `Created s -> + Lwt.return (`Created s) + | `No_content -> + Lwt.return `No_content + | `Unauthorized e -> + let e = + Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) + in + Lwt.return (`Unauthorized e) + | `Forbidden e -> + let e = + Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) + in + Lwt.return (`Forbidden e) + | `Not_found e -> + let e = + Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) + in + Lwt.return (`Not_found e) + | `Conflict e -> + let e = + Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) + in + Lwt.return (`Conflict e) + | `Error e -> + let e = + Option.map e ~f:(List.map (fun e -> Ecoproto_error e)) + in + Lwt.return (`Error e)) let register dir service handler = - gen_register dir service - (fun p q i -> - handler p q i >>= function - | Ok o -> RPC_answer.return o - | Error e -> RPC_answer.fail e) + gen_register dir service (fun p q i -> + handler p q i + >>= function + | Ok o -> RPC_answer.return o | Error e -> RPC_answer.fail e) let opt_register dir service handler = - gen_register dir service - (fun p q i -> - handler p q i >>= function - | Ok (Some o) -> RPC_answer.return o - | Ok None -> RPC_answer.not_found - | Error e -> RPC_answer.fail e) + gen_register dir service (fun p q i -> + handler p q i + >>= function + | Ok (Some o) -> + RPC_answer.return o + | Ok None -> + RPC_answer.not_found + | Error e -> + RPC_answer.fail e) let lwt_register dir service handler = - gen_register dir service - (fun p q i -> - handler p q i >>= fun o -> - RPC_answer.return o) + gen_register dir service (fun p q i -> + handler p q i >>= fun o -> RPC_answer.return o) open Curry let register0 root s f = register root s (curry Z f) + let register1 root s f = register root s (curry (S Z) f) + let register2 root s f = register root s (curry (S (S Z)) f) + let register3 root s f = register root s (curry (S (S (S Z))) f) + let register4 root s f = register root s (curry (S (S (S (S Z)))) f) + let register5 root s f = register root s (curry (S (S (S (S (S Z))))) f) let opt_register0 root s f = opt_register root s (curry Z f) + let opt_register1 root s f = opt_register root s (curry (S Z) f) + let opt_register2 root s f = opt_register root s (curry (S (S Z)) f) + let opt_register3 root s f = opt_register root s (curry (S (S (S Z))) f) - let opt_register4 root s f = opt_register root s (curry (S (S (S (S Z)))) f) - let opt_register5 root s f = opt_register root s (curry (S (S (S (S (S Z))))) f) + + let opt_register4 root s f = + opt_register root s (curry (S (S (S (S Z)))) f) + + let opt_register5 root s f = + opt_register root s (curry (S (S (S (S (S Z))))) f) let gen_register0 root s f = gen_register root s (curry Z f) + let gen_register1 root s f = gen_register root s (curry (S Z) f) + let gen_register2 root s f = gen_register root s (curry (S (S Z)) f) + let gen_register3 root s f = gen_register root s (curry (S (S (S Z))) f) - let gen_register4 root s f = gen_register root s (curry (S (S (S (S Z)))) f) - let gen_register5 root s f = gen_register root s (curry (S (S (S (S (S Z))))) f) + + let gen_register4 root s f = + gen_register root s (curry (S (S (S (S Z)))) f) + + let gen_register5 root s f = + gen_register root s (curry (S (S (S (S (S Z))))) f) let lwt_register0 root s f = lwt_register root s (curry Z f) + let lwt_register1 root s f = lwt_register root s (curry (S Z) f) + let lwt_register2 root s f = lwt_register root s (curry (S (S Z)) f) + let lwt_register3 root s f = lwt_register root s (curry (S (S (S Z))) f) - let lwt_register4 root s f = lwt_register root s (curry (S (S (S (S Z)))) f) - let lwt_register5 root s f = lwt_register root s (curry (S (S (S (S (S Z))))) f) + let lwt_register4 root s f = + lwt_register root s (curry (S (S (S (S Z)))) f) + + let lwt_register5 root s f = + lwt_register root s (curry (S (S (S (S (S Z))))) f) end - module RPC_context = struct + module RPC_context = struct type t = rpc_context - class type ['pr] simple = object - method call_proto_service0 : - 'm 'q 'i 'o. - ([< RPC_service.meth ] as 'm, t, t, 'q, 'i, 'o) RPC_service.t -> - 'pr -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t - method call_proto_service1 : - 'm 'a 'q 'i 'o. - ([< RPC_service.meth ] as 'm, t, t * 'a, 'q, 'i, 'o) RPC_service.t -> - 'pr -> 'a -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t - method call_proto_service2 : - 'm 'a 'b 'q 'i 'o. - ([< RPC_service.meth ] as 'm, t, (t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> - 'pr -> 'a -> 'b -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t - method call_proto_service3 : - 'm 'a 'b 'c 'q 'i 'o. - ([< RPC_service.meth ] as 'm, t, ((t * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> - 'pr -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t - end + class type ['pr] simple = + object + method call_proto_service0 : + 'm 'q 'i 'o. + (([< RPC_service.meth] as 'm), t, t, 'q, 'i, 'o) RPC_service.t -> + 'pr -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t + + method call_proto_service1 : + 'm 'a 'q 'i 'o. + (([< RPC_service.meth] as 'm), t, t * 'a, 'q, 'i, 'o) RPC_service.t -> + 'pr -> 'a -> 'q -> 'i -> 'o Error_monad.shell_tzresult Lwt.t + + method call_proto_service2 : + 'm 'a 'b 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + t, + (t * 'a) * 'b, + 'q, + 'i, + 'o ) + RPC_service.t -> 'pr -> 'a -> 'b -> 'q -> 'i -> + 'o Error_monad.shell_tzresult Lwt.t + + method call_proto_service3 : + 'm 'a 'b 'c 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + t, + ((t * 'a) * 'b) * 'c, + 'q, + 'i, + 'o ) + RPC_service.t -> 'pr -> 'a -> 'b -> 'c -> 'q -> 'i -> + 'o Error_monad.shell_tzresult Lwt.t + end + + let make_call0 s (ctxt : _ simple) = ctxt#call_proto_service0 s - let make_call0 s (ctxt : _ simple) = - ctxt#call_proto_service0 s let make_call0 = (make_call0 : _ -> _ simple -> _ :> _ -> _ #simple -> _) - let make_call1 s (ctxt: _ simple) = - ctxt#call_proto_service1 s + let make_call1 s (ctxt : _ simple) = ctxt#call_proto_service1 s + let make_call1 = (make_call1 : _ -> _ simple -> _ :> _ -> _ #simple -> _) - let make_call2 s (ctxt: _ simple) = - ctxt#call_proto_service2 s + let make_call2 s (ctxt : _ simple) = ctxt#call_proto_service2 s + let make_call2 = (make_call2 : _ -> _ simple -> _ :> _ -> _ #simple -> _) - let make_call3 s (ctxt: _ simple) = - ctxt#call_proto_service3 s + let make_call3 s (ctxt : _ simple) = ctxt#call_proto_service3 s + let make_call3 = (make_call3 : _ -> _ simple -> _ :> _ -> _ #simple -> _) let make_opt_call0 s ctxt block q i = - make_call0 s ctxt block q i >>= function - | Error [RPC_context.Not_found _] -> Lwt.return_ok None - | Error _ as v -> Lwt.return v - | Ok v -> Lwt.return_ok (Some v) + make_call0 s ctxt block q i + >>= function + | Error [RPC_context.Not_found _] -> + Lwt.return_ok None + | Error _ as v -> + Lwt.return v + | Ok v -> + Lwt.return_ok (Some v) let make_opt_call1 s ctxt block a1 q i = - make_call1 s ctxt block a1 q i >>= function - | Error [RPC_context.Not_found _] -> Lwt.return_ok None - | Error _ as v -> Lwt.return v - | Ok v -> Lwt.return_ok (Some v) + make_call1 s ctxt block a1 q i + >>= function + | Error [RPC_context.Not_found _] -> + Lwt.return_ok None + | Error _ as v -> + Lwt.return v + | Ok v -> + Lwt.return_ok (Some v) let make_opt_call2 s ctxt block a1 a2 q i = - make_call2 s ctxt block a1 a2 q i >>= function - | Error [RPC_context.Not_found _] -> Lwt.return_ok None - | Error _ as v -> Lwt.return v - | Ok v -> Lwt.return_ok (Some v) + make_call2 s ctxt block a1 a2 q i + >>= function + | Error [RPC_context.Not_found _] -> + Lwt.return_ok None + | Error _ as v -> + Lwt.return v + | Ok v -> + Lwt.return_ok (Some v) let make_opt_call3 s ctxt block a1 a2 a3 q i = - make_call3 s ctxt block a1 a2 a3 q i >>= function - | Error [RPC_context.Not_found _] -> Lwt.return_ok None - | Error _ as v -> Lwt.return v - | Ok v -> Lwt.return_ok (Some v) - + make_call3 s ctxt block a1 a2 a3 q i + >>= function + | Error [RPC_context.Not_found _] -> + Lwt.return_ok None + | Error _ as v -> + Lwt.return v + | Ok v -> + Lwt.return_ok (Some v) end + module Micheline = struct include Micheline + let canonical_encoding_v1 = canonical_encoding_v1 + let canonical_encoding = canonical_encoding_v0 end - module Logging = Internal_event.Legacy_logging.Make(Param) - module Updater = struct + module Logging = Internal_event.Legacy_logging.Make (Param) + module Updater = struct type nonrec validation_result = validation_result = { - context: Context.t ; - fitness: Fitness.t ; - message: string option ; - max_operations_ttl: int ; - last_allowed_fork_level: Int32.t ; + context : Context.t; + fitness : Fitness.t; + message : string option; + max_operations_ttl : int; + last_allowed_fork_level : Int32.t } - type nonrec quota = quota = { - max_size: int ; - max_op: int option ; - } + type nonrec quota = quota = {max_size : int; max_op : int option} type nonrec rpc_context = rpc_context = { - block_hash: Block_hash.t ; - block_header: Block_header.shell_header ; - context: Context.t ; + block_hash : Block_hash.t; + block_header : Block_header.shell_header; + context : Context.t } let activate = Context.set_protocol + let fork_test_chain = Context.fork_test_chain module type PROTOCOL = - T with type context := Context.t + T + with type context := Context.t and type quota := quota and type validation_result := validation_result and type rpc_context := rpc_context and type 'a tzresult := 'a Error_monad.tzresult - end + module Base58 = struct include Tezos_crypto.Base58 + let simple_encode enc s = simple_encode enc s + let simple_decode enc s = simple_decode enc s - include Make(struct type context = Context.t end) + + include Make (struct + type context = Context.t + end) + let decode s = decode s end + module Context = struct include Context let fold_keys s k ~init ~f = let rec loop k acc = - fold s k ~init:acc - ~f:(fun file acc -> - match file with - | `Key k -> f k acc - | `Dir k -> loop k acc) in + fold s k ~init:acc ~f:(fun file acc -> + match file with `Key k -> f k acc | `Dir k -> loop k acc) + in loop k init let keys t = fold_keys t ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc)) let register_resolver = Base58.register_resolver + let complete ctxt s = Base58.complete ctxt s end - module Lift(P : Updater.PROTOCOL) = struct + module Lift (P : Updater.PROTOCOL) = struct include P - let begin_partial_application - ~chain_id ~ancestor_context ~predecessor_timestamp ~predecessor_fitness - raw_block = + + let begin_partial_application ~chain_id ~ancestor_context + ~predecessor_timestamp ~predecessor_fitness raw_block = begin_partial_application - ~chain_id ~ancestor_context ~predecessor_timestamp ~predecessor_fitness - raw_block >|= wrap_error - let begin_application - ~chain_id ~predecessor_context ~predecessor_timestamp + ~chain_id + ~ancestor_context + ~predecessor_timestamp ~predecessor_fitness - raw_block = + raw_block + >|= wrap_error + + let begin_application ~chain_id ~predecessor_context + ~predecessor_timestamp ~predecessor_fitness raw_block = begin_application - ~chain_id ~predecessor_context ~predecessor_timestamp + ~chain_id + ~predecessor_context + ~predecessor_timestamp ~predecessor_fitness - raw_block >|= wrap_error - let begin_construction - ~chain_id ~predecessor_context ~predecessor_timestamp - ~predecessor_level ~predecessor_fitness + raw_block + >|= wrap_error + + let begin_construction ~chain_id ~predecessor_context + ~predecessor_timestamp ~predecessor_level ~predecessor_fitness ~predecessor ~timestamp ?protocol_data () = begin_construction - ~chain_id ~predecessor_context ~predecessor_timestamp - ~predecessor_level ~predecessor_fitness - ~predecessor ~timestamp ?protocol_data () >|= wrap_error - let current_context c = - current_context c >|= wrap_error - let apply_operation c o = - apply_operation c o >|= wrap_error + ~chain_id + ~predecessor_context + ~predecessor_timestamp + ~predecessor_level + ~predecessor_fitness + ~predecessor + ~timestamp + ?protocol_data + () + >|= wrap_error + + let current_context c = current_context c >|= wrap_error + + let apply_operation c o = apply_operation c o >|= wrap_error + let finalize_block c = finalize_block c >|= wrap_error + let init c bh = init c bh >|= wrap_error end - class ['chain, 'block] proto_rpc_context - (t : Tezos_rpc.RPC_context.t) - (prefix : (unit, (unit * 'chain) * 'block) RPC_path.t) = + class ['chain, 'block] proto_rpc_context (t : Tezos_rpc.RPC_context.t) + (prefix : (unit, (unit * 'chain) * 'block) RPC_path.t) = object method call_proto_service0 - : 'm 'q 'i 'o. - ([< RPC_service.meth ] as 'm, RPC_context.t, - RPC_context.t, 'q, 'i, 'o) RPC_service.t -> - ('chain * 'block) -> 'q -> 'i -> 'o tzresult Lwt.t - = fun s (chain, block) q i -> + : 'm 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + RPC_context.t, + RPC_context.t, + 'q, + 'i, + 'o ) + RPC_service.t -> 'chain * 'block -> 'q -> 'i -> 'o tzresult Lwt.t + = + fun s (chain, block) q i -> let s = RPC_service.subst0 s in let s = RPC_service.prefix prefix s in t#call_service s (((), chain), block) q i + method call_proto_service1 - : 'm 'a 'q 'i 'o. - ([< RPC_service.meth ] as 'm, RPC_context.t, - RPC_context.t * 'a, 'q, 'i, 'o) RPC_service.t -> - ('chain * 'block) -> 'a -> 'q -> 'i -> 'o tzresult Lwt.t - = fun s (chain, block) a1 q i -> + : 'm 'a 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + RPC_context.t, + RPC_context.t * 'a, + 'q, + 'i, + 'o ) + RPC_service.t -> 'chain * 'block -> 'a -> 'q -> 'i -> + 'o tzresult Lwt.t = + fun s (chain, block) a1 q i -> let s = RPC_service.subst1 s in let s = RPC_service.prefix prefix s in t#call_service s ((((), chain), block), a1) q i + method call_proto_service2 - : 'm 'a 'b 'q 'i 'o. - ([< RPC_service.meth ] as 'm, RPC_context.t, - (RPC_context.t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> - ('chain * 'block) -> 'a -> 'b -> 'q -> 'i -> 'o tzresult Lwt.t - = fun s (chain, block) a1 a2 q i -> + : 'm 'a 'b 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + RPC_context.t, + (RPC_context.t * 'a) * 'b, + 'q, + 'i, + 'o ) + RPC_service.t -> 'chain * 'block -> 'a -> 'b -> 'q -> 'i -> + 'o tzresult Lwt.t = + fun s (chain, block) a1 a2 q i -> let s = RPC_service.subst2 s in let s = RPC_service.prefix prefix s in t#call_service s (((((), chain), block), a1), a2) q i + method call_proto_service3 - : 'm 'a 'b 'c 'q 'i 'o. - ([< RPC_service.meth ] as 'm, RPC_context.t, - ((RPC_context.t * 'a) * 'b) * 'c, - 'q, 'i, 'o) RPC_service.t -> - ('chain * 'block) -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o tzresult Lwt.t - = fun s (chain, block) a1 a2 a3 q i -> + : 'm 'a 'b 'c 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + RPC_context.t, + ((RPC_context.t * 'a) * 'b) * 'c, + 'q, + 'i, + 'o ) + RPC_service.t -> 'chain * 'block -> 'a -> 'b -> 'c -> 'q -> 'i -> + 'o tzresult Lwt.t = + fun s (chain, block) a1 a2 a3 q i -> let s = RPC_service.subst3 s in let s = RPC_service.prefix prefix s in t#call_service s ((((((), chain), block), a1), a2), a3) q i end - class ['block] proto_rpc_context_of_directory conv dir : ['block] RPC_context.simple = + class ['block] proto_rpc_context_of_directory conv dir : + ['block] RPC_context.simple = let lookup = new Tezos_rpc.RPC_context.of_directory dir in object method call_proto_service0 - : 'm 'q 'i 'o. - ([< RPC_service.meth ] as 'm, RPC_context.t, - RPC_context.t, 'q, 'i, 'o) RPC_service.t -> - 'block -> 'q -> 'i -> 'o tzresult Lwt.t - = fun s block q i -> + : 'm 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + RPC_context.t, + RPC_context.t, + 'q, + 'i, + 'o ) + RPC_service.t -> 'block -> 'q -> 'i -> 'o tzresult Lwt.t = + fun s block q i -> let rpc_context = conv block in lookup#call_service s rpc_context q i + method call_proto_service1 - : 'm 'a 'q 'i 'o. - ([< RPC_service.meth ] as 'm, RPC_context.t, - RPC_context.t * 'a, 'q, 'i, 'o) RPC_service.t -> - 'block -> 'a -> 'q -> 'i -> 'o tzresult Lwt.t - = fun s block a1 q i -> + : 'm 'a 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + RPC_context.t, + RPC_context.t * 'a, + 'q, + 'i, + 'o ) + RPC_service.t -> 'block -> 'a -> 'q -> 'i -> 'o tzresult Lwt.t = + fun s block a1 q i -> let rpc_context = conv block in lookup#call_service s (rpc_context, a1) q i + method call_proto_service2 - : 'm 'a 'b 'q 'i 'o. - ([< RPC_service.meth ] as 'm, RPC_context.t, - (RPC_context.t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> - 'block -> 'a -> 'b -> 'q -> 'i -> 'o tzresult Lwt.t - = fun s block a1 a2 q i -> + : 'm 'a 'b 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + RPC_context.t, + (RPC_context.t * 'a) * 'b, + 'q, + 'i, + 'o ) + RPC_service.t -> 'block -> 'a -> 'b -> 'q -> 'i -> + 'o tzresult Lwt.t = + fun s block a1 a2 q i -> let rpc_context = conv block in lookup#call_service s ((rpc_context, a1), a2) q i + method call_proto_service3 - : 'm 'a 'b 'c 'q 'i 'o. - ([< RPC_service.meth ] as 'm, RPC_context.t, - ((RPC_context.t * 'a) * 'b) * 'c, - 'q, 'i, 'o) RPC_service.t -> - 'block -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o tzresult Lwt.t - = fun s block a1 a2 a3 q i -> + : 'm 'a 'b 'c 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + RPC_context.t, + ((RPC_context.t * 'a) * 'b) * 'c, + 'q, + 'i, + 'o ) + RPC_service.t -> 'block -> 'a -> 'b -> 'c -> 'q -> 'i -> + 'o tzresult Lwt.t = + fun s block a1 a2 a3 q i -> let rpc_context = conv block in lookup#call_service s (((rpc_context, a1), a2), a3) q i end - end - end diff --git a/src/lib_protocol_environment/tezos_protocol_environment.mli b/src/lib_protocol_environment/tezos_protocol_environment.mli index df1efcae8e02928ef0f93021ec11ef9c927fc98c..ae5beb91714a58151b9db825b0e2e623450a2cc6 100644 --- a/src/lib_protocol_environment/tezos_protocol_environment.mli +++ b/src/lib_protocol_environment/tezos_protocol_environment.mli @@ -1,192 +1,239 @@ - open Error_monad - module type CONTEXT = sig type t + type key = string list + type value = MBytes.t - val mem: t -> key -> bool Lwt.t - val dir_mem: t -> key -> bool Lwt.t - val get: t -> key -> value option Lwt.t - val set: t -> key -> value -> t Lwt.t - val copy: t -> from:key -> to_:key -> t option Lwt.t - val del: t -> key -> t Lwt.t - val remove_rec: t -> key -> t Lwt.t - val fold: - t -> key -> init:'a -> - f:([ `Key of key | `Dir of key ] -> 'a -> 'a Lwt.t) -> + + val mem : t -> key -> bool Lwt.t + + val dir_mem : t -> key -> bool Lwt.t + + val get : t -> key -> value option Lwt.t + + val set : t -> key -> value -> t Lwt.t + + val copy : t -> from:key -> to_:key -> t option Lwt.t + + val del : t -> key -> t Lwt.t + + val remove_rec : t -> key -> t Lwt.t + + val fold : + t -> + key -> + init:'a -> + f:([`Key of key | `Dir of key] -> 'a -> 'a Lwt.t) -> 'a Lwt.t - val set_protocol: t -> Protocol_hash.t -> t Lwt.t - val fork_test_chain: + + val set_protocol : t -> Protocol_hash.t -> t Lwt.t + + val fork_test_chain : t -> protocol:Protocol_hash.t -> expiration:Time.Protocol.t -> t Lwt.t end module Make (Context : CONTEXT) : sig - type validation_result = { - context: Context.t ; - fitness: Fitness.t ; - message: string option ; - max_operations_ttl: int ; - last_allowed_fork_level: Int32.t ; + context : Context.t; + fitness : Fitness.t; + message : string option; + max_operations_ttl : int; + last_allowed_fork_level : Int32.t } - type quota = { - max_size: int ; - max_op: int option ; - } + type quota = {max_size : int; max_op : int option} type rpc_context = { - block_hash: Block_hash.t ; - block_header: Block_header.shell_header ; - context: Context.t ; + block_hash : Block_hash.t; + block_header : Block_header.shell_header; + context : Context.t } module type T = sig type context + type quota + type validation_result + type rpc_context + type 'a tzresult - val max_block_length: int - val max_operation_data_length: int - val validation_passes: quota list + + val max_block_length : int + + val max_operation_data_length : int + + val validation_passes : quota list + type block_header_data - val block_header_data_encoding: block_header_data Data_encoding.t + + val block_header_data_encoding : block_header_data Data_encoding.t + type block_header = { - shell: Block_header.shell_header ; - protocol_data: block_header_data ; + shell : Block_header.shell_header; + protocol_data : block_header_data } + type block_header_metadata - val block_header_metadata_encoding: block_header_metadata Data_encoding.t + + val block_header_metadata_encoding : block_header_metadata Data_encoding.t + type operation_data + type operation_receipt + type operation = { - shell: Operation.shell_header ; - protocol_data: operation_data ; + shell : Operation.shell_header; + protocol_data : operation_data } - val operation_data_encoding: operation_data Data_encoding.t - val operation_receipt_encoding: operation_receipt Data_encoding.t - val operation_data_and_receipt_encoding: + + val operation_data_encoding : operation_data Data_encoding.t + + val operation_receipt_encoding : operation_receipt Data_encoding.t + + val operation_data_and_receipt_encoding : (operation_data * operation_receipt) Data_encoding.t - val acceptable_passes: operation -> int list - val compare_operations: operation -> operation -> int + + val acceptable_passes : operation -> int list + + val compare_operations : operation -> operation -> int + type validation_state - val current_context: validation_state -> context tzresult Lwt.t - val begin_partial_application: - chain_id: Chain_id.t -> - ancestor_context: context -> - predecessor_timestamp: Time.Protocol.t -> - predecessor_fitness: Fitness.t -> + + val current_context : validation_state -> context tzresult Lwt.t + + val begin_partial_application : + chain_id:Chain_id.t -> + ancestor_context:context -> + predecessor_timestamp:Time.Protocol.t -> + predecessor_fitness:Fitness.t -> block_header -> validation_state tzresult Lwt.t - val begin_application: - chain_id: Chain_id.t -> - predecessor_context: context -> - predecessor_timestamp: Time.Protocol.t -> - predecessor_fitness: Fitness.t -> + + val begin_application : + chain_id:Chain_id.t -> + predecessor_context:context -> + predecessor_timestamp:Time.Protocol.t -> + predecessor_fitness:Fitness.t -> block_header -> validation_state tzresult Lwt.t - val begin_construction: - chain_id: Chain_id.t -> - predecessor_context: context -> - predecessor_timestamp: Time.Protocol.t -> - predecessor_level: Int32.t -> - predecessor_fitness: Fitness.t -> - predecessor: Block_hash.t -> - timestamp: Time.Protocol.t -> - ?protocol_data: block_header_data -> - unit -> validation_state tzresult Lwt.t - val apply_operation: - validation_state -> operation -> + + val begin_construction : + chain_id:Chain_id.t -> + predecessor_context:context -> + predecessor_timestamp:Time.Protocol.t -> + predecessor_level:Int32.t -> + predecessor_fitness:Fitness.t -> + predecessor:Block_hash.t -> + timestamp:Time.Protocol.t -> + ?protocol_data:block_header_data -> + unit -> + validation_state tzresult Lwt.t + + val apply_operation : + validation_state -> + operation -> (validation_state * operation_receipt) tzresult Lwt.t - val finalize_block: + + val finalize_block : validation_state -> (validation_result * block_header_metadata) tzresult Lwt.t - val rpc_services: rpc_context RPC_directory.t - val init: + + val rpc_services : rpc_context RPC_directory.t + + val init : context -> Block_header.shell_header -> validation_result tzresult Lwt.t end module type PROTOCOL = - T with type context := Context.t + T + with type context := Context.t and type quota := quota and type validation_result := validation_result and type rpc_context := rpc_context and type 'a tzresult := 'a Error_monad.tzresult module type V1 = sig - - include Tezos_protocol_environment_sigs.V1.T - with type Format.formatter = Format.formatter - and type 'a Data_encoding.t = 'a Data_encoding.t - and type 'a Data_encoding.lazy_t = 'a Data_encoding.lazy_t - and type 'a Lwt.t = 'a Lwt.t - and type ('a, 'b) Pervasives.result = ('a, 'b) result - and type Chain_id.t = Chain_id.t - and type Block_hash.t = Block_hash.t - and type Operation_hash.t = Operation_hash.t - and type Operation_list_hash.t = Operation_list_hash.t - and type Operation_list_list_hash.t = Operation_list_list_hash.t - and type Context.t = Context.t - and type Context_hash.t = Context_hash.t - and type Protocol_hash.t = Protocol_hash.t - and type Time.t = Time.Protocol.t - and type MBytes.t = MBytes.t - and type Operation.shell_header = Operation.shell_header - and type Operation.t = Operation.t - and type Block_header.shell_header = Block_header.shell_header - and type Block_header.t = Block_header.t - and type 'a RPC_directory.t = 'a RPC_directory.t - and type Ed25519.Public_key_hash.t = Ed25519.Public_key_hash.t - and type Ed25519.Public_key.t = Ed25519.Public_key.t - and type Ed25519.t = Ed25519.t - and type Secp256k1.Public_key_hash.t = Secp256k1.Public_key_hash.t - and type Secp256k1.Public_key.t = Secp256k1.Public_key.t - and type Secp256k1.t = Secp256k1.t - and type P256.Public_key_hash.t = P256.Public_key_hash.t - and type P256.Public_key.t = P256.Public_key.t - and type P256.t = P256.t - and type Signature.public_key_hash = Signature.public_key_hash - and type Signature.public_key = Signature.public_key - and type Signature.t = Signature.t - and type Signature.watermark = Signature.watermark - and type 'a Micheline.canonical = 'a Micheline.canonical - and type Z.t = Z.t - and type ('a, 'b) Micheline.node = ('a, 'b) Micheline.node - and type Data_encoding.json_schema = Data_encoding.json_schema - and type ('a, 'b) RPC_path.t = ('a, 'b) RPC_path.t - and type RPC_service.meth = RPC_service.meth - and type (+'m,'pr,'p,'q,'i,'o) RPC_service.t = ('m,'pr,'p,'q,'i,'o) RPC_service.t - and type Error_monad.shell_error = Error_monad.error - and type Z.t = Z.t + include + Tezos_protocol_environment_sigs.V1.T + with type Format.formatter = Format.formatter + and type 'a Data_encoding.t = 'a Data_encoding.t + and type 'a Data_encoding.lazy_t = 'a Data_encoding.lazy_t + and type 'a Lwt.t = 'a Lwt.t + and type ('a, 'b) Pervasives.result = ('a, 'b) result + and type Chain_id.t = Chain_id.t + and type Block_hash.t = Block_hash.t + and type Operation_hash.t = Operation_hash.t + and type Operation_list_hash.t = Operation_list_hash.t + and type Operation_list_list_hash.t = Operation_list_list_hash.t + and type Context.t = Context.t + and type Context_hash.t = Context_hash.t + and type Protocol_hash.t = Protocol_hash.t + and type Time.t = Time.Protocol.t + and type MBytes.t = MBytes.t + and type Operation.shell_header = Operation.shell_header + and type Operation.t = Operation.t + and type Block_header.shell_header = Block_header.shell_header + and type Block_header.t = Block_header.t + and type 'a RPC_directory.t = 'a RPC_directory.t + and type Ed25519.Public_key_hash.t = Ed25519.Public_key_hash.t + and type Ed25519.Public_key.t = Ed25519.Public_key.t + and type Ed25519.t = Ed25519.t + and type Secp256k1.Public_key_hash.t = Secp256k1.Public_key_hash.t + and type Secp256k1.Public_key.t = Secp256k1.Public_key.t + and type Secp256k1.t = Secp256k1.t + and type P256.Public_key_hash.t = P256.Public_key_hash.t + and type P256.Public_key.t = P256.Public_key.t + and type P256.t = P256.t + and type Signature.public_key_hash = Signature.public_key_hash + and type Signature.public_key = Signature.public_key + and type Signature.t = Signature.t + and type Signature.watermark = Signature.watermark + and type 'a Micheline.canonical = 'a Micheline.canonical + and type Z.t = Z.t + and type ('a, 'b) Micheline.node = ('a, 'b) Micheline.node + and type Data_encoding.json_schema = Data_encoding.json_schema + and type ('a, 'b) RPC_path.t = ('a, 'b) RPC_path.t + and type RPC_service.meth = RPC_service.meth + and type (+'m, 'pr, 'p, 'q, 'i, 'o) RPC_service.t = + ('m, 'pr, 'p, 'q, 'i, 'o) RPC_service.t + and type Error_monad.shell_error = Error_monad.error + and type Z.t = Z.t type error += Ecoproto_error of Error_monad.error + val wrap_error : 'a Error_monad.tzresult -> 'a tzresult - module Lift (P : Updater.PROTOCOL) : PROTOCOL - with type block_header_data = P.block_header_data - and type block_header = P.block_header - and type operation_data = P.operation_data - and type operation_receipt = P.operation_receipt - and type operation = P.operation - and type validation_state = P.validation_state + module Lift (P : Updater.PROTOCOL) : + PROTOCOL + with type block_header_data = P.block_header_data + and type block_header = P.block_header + and type operation_data = P.operation_data + and type operation_receipt = P.operation_receipt + and type operation = P.operation + and type validation_state = P.validation_state class ['chain, 'block] proto_rpc_context : - Tezos_rpc.RPC_context.t -> (unit, (unit * 'chain) * 'block) RPC_path.t -> - [('chain * 'block)] RPC_context.simple + Tezos_rpc.RPC_context.t + -> (unit, (unit * 'chain) * 'block) RPC_path.t + -> ['chain * 'block] RPC_context.simple class ['block] proto_rpc_context_of_directory : - ('block -> RPC_context.t) -> RPC_context.t RPC_directory.t -> - ['block] RPC_context.simple - + ('block -> RPC_context.t) + -> RPC_context.t RPC_directory.t + -> ['block] RPC_context.simple end - module MakeV1 (Param : sig val name: string end)() - : V1 with type Context.t = Context.t - and type Updater.validation_result = validation_result - and type Updater.quota = quota - and type Updater.rpc_context = rpc_context - + module MakeV1 (Param : sig + val name : string + end) + () : + V1 + with type Context.t = Context.t + and type Updater.validation_result = validation_result + and type Updater.quota = quota + and type Updater.rpc_context = rpc_context end diff --git a/src/lib_protocol_environment/tezos_protocol_environment_faked.ml b/src/lib_protocol_environment/tezos_protocol_environment_faked.ml index fc0cbbc57d92e7c2ef83932a2738c8be7c6967e1..9d91556410dd89187e567e87a1839122d440b1de 100644 --- a/src/lib_protocol_environment/tezos_protocol_environment_faked.ml +++ b/src/lib_protocol_environment/tezos_protocol_environment_faked.ml @@ -27,21 +27,32 @@ module Context = struct type t type key = string list + type value = MBytes.t + let mem _ _ = assert false + let dir_mem _ _ = assert false + let get _ _ = assert false + let set _ _ _ = assert false + let copy _ ~from:_ ~to_:_ = assert false + let del _ _ = assert false + let remove_rec _ _ = assert false + let fold _ _ ~init:_ ~f:_ = assert false + let keys _ _ = assert false + let fold_keys _ _ ~init:_ ~f:_ = assert false let set_protocol _ _ = assert false - let fork_test_chain _ ~protocol:_ ~expiration:_ = assert false + let fork_test_chain _ ~protocol:_ ~expiration:_ = assert false end -include Tezos_protocol_environment.Make(Context) +include Tezos_protocol_environment.Make (Context) diff --git a/src/lib_protocol_environment/tezos_protocol_environment_memory.ml b/src/lib_protocol_environment/tezos_protocol_environment_memory.ml index 0a0d8e0b6633aa6fc7d0b2eb29a8f7c2c30708a1..52892211caeaf859462d2bb52dac6e146b148e0f 100644 --- a/src/lib_protocol_environment/tezos_protocol_environment_memory.ml +++ b/src/lib_protocol_environment/tezos_protocol_environment_memory.ml @@ -24,105 +24,125 @@ (*****************************************************************************) module Context = struct - - module StringMap = Map.Make(String) + module StringMap = Map.Make (String) type key = string list + type value = MBytes.t - type t = - | Dir of t StringMap.t - | Key of value + type t = Dir of t StringMap.t | Key of value let empty = Dir StringMap.empty let rec raw_get m k = - match k, m with - | [], m -> Some m - | n :: k, Dir m -> begin - match StringMap.find_opt n m with - | Some res -> raw_get res k - | None -> None - end - | _ :: _, Key _ -> None + match (k, m) with + | ([], m) -> + Some m + | (n :: k, Dir m) -> ( + match StringMap.find_opt n m with + | Some res -> + raw_get res k + | None -> + None ) + | (_ :: _, Key _) -> + None let rec raw_set m k v = - match k, m, v with - | [], (Key _ as m), Some v -> + match (k, m, v) with + | ([], (Key _ as m), Some v) -> if m = v then None else Some v - | [], (Dir _ as m), Some v -> + | ([], (Dir _ as m), Some v) -> if m == v then None else Some v - | [], (Key _ | Dir _), None -> Some empty - | n :: k, Dir m, _ -> begin - match raw_set (Option.unopt ~default:empty - (StringMap.find_opt n m)) k v with - | None -> None - | Some rm when rm = empty -> - Some (Dir (StringMap.remove n m)) - | Some rm -> - Some (Dir (StringMap.add n rm m)) - end - | _ :: _, Key _, None -> None - | _ :: _, Key _, Some _ -> + | ([], (Key _ | Dir _), None) -> + Some empty + | (n :: k, Dir m, _) -> ( + match + raw_set (Option.unopt ~default:empty (StringMap.find_opt n m)) k v + with + | None -> + None + | Some rm when rm = empty -> + Some (Dir (StringMap.remove n m)) + | Some rm -> + Some (Dir (StringMap.add n rm m)) ) + | (_ :: _, Key _, None) -> + None + | (_ :: _, Key _, Some _) -> Pervasives.failwith "Mem_context.set" let mem m k = match raw_get m k with - | Some (Key _) -> Lwt.return_true - | Some (Dir _) | None -> Lwt.return_false + | Some (Key _) -> + Lwt.return_true + | Some (Dir _) | None -> + Lwt.return_false let dir_mem m k = match raw_get m k with - | Some (Dir _) -> Lwt.return_true - | Some (Key _) | None -> Lwt.return_false + | Some (Dir _) -> + Lwt.return_true + | Some (Key _) | None -> + Lwt.return_false let get m k = match raw_get m k with - | Some (Key v) -> Lwt.return_some v - | Some (Dir _) | None -> Lwt.return_none + | Some (Key v) -> + Lwt.return_some v + | Some (Dir _) | None -> + Lwt.return_none let set m k v = match raw_set m k (Some (Key v)) with - | None -> Lwt.return m - | Some m -> Lwt.return m + | None -> + Lwt.return m + | Some m -> + Lwt.return m + let del m k = (* TODO assert key *) - match raw_set m k None with - | None -> Lwt.return m - | Some m -> Lwt.return m + match raw_set m k None with None -> Lwt.return m | Some m -> Lwt.return m + let remove_rec m k = - match raw_set m k None with - | None -> Lwt.return m - | Some m -> Lwt.return m + match raw_set m k None with None -> Lwt.return m | Some m -> Lwt.return m + let copy m ~from ~to_ = match raw_get m from with - | None -> Lwt.return_none - | Some v -> Lwt.return (raw_set m to_ (Some v)) + | None -> + Lwt.return_none + | Some v -> + Lwt.return (raw_set m to_ (Some v)) let fold m k ~init ~f = match raw_get m k with - | None -> Lwt.return init - | Some (Key _) -> Lwt.return init + | None -> + Lwt.return init + | Some (Key _) -> + Lwt.return init | Some (Dir m) -> StringMap.fold (fun n m acc -> - acc >>= fun acc -> - match m with - | Key _ -> f (`Key (k @ [n])) acc - | Dir _ -> f (`Dir (k @ [n])) acc) - m (Lwt.return init) + acc + >>= fun acc -> + match m with + | Key _ -> + f (`Key (k @ [n])) acc + | Dir _ -> + f (`Dir (k @ [n])) acc) + m + (Lwt.return init) let rec pp ppf m = match m with - | Key s -> Format.fprintf ppf "%s" (MBytes.to_string s) + | Key s -> + Format.fprintf ppf "%s" (MBytes.to_string s) | Dir m -> StringMap.iter (fun n m -> - match m with - | Key s -> - Format.fprintf ppf "- %s: %s@ " n (MBytes.to_string s) - | Dir m -> - Format.fprintf ppf "- %s:@[<v 2>@ %a@]@ " n pp (Dir m)) + match m with + | Key s -> + Format.fprintf ppf "- %s: %s@ " n (MBytes.to_string s) + | Dir m -> + Format.fprintf ppf "- %s:@[<v 2>@ %a@]@ " n pp (Dir m)) m let dump m = Format.eprintf "@[<v>%a@]" pp m @@ -130,18 +150,18 @@ module Context = struct let current_protocol_key = ["protocol"] let get_protocol v = - raw_get v current_protocol_key |> function - | Some (Key data) -> Lwt.return (Protocol_hash.of_bytes_exn data) - | _ -> assert false + raw_get v current_protocol_key + |> function + | Some (Key data) -> + Lwt.return (Protocol_hash.of_bytes_exn data) + | _ -> + assert false let set_protocol v key = - raw_set v current_protocol_key (Some (Key (Protocol_hash.to_bytes key))) |> function - | Some m -> Lwt.return m - | None -> assert false - + raw_set v current_protocol_key (Some (Key (Protocol_hash.to_bytes key))) + |> function Some m -> Lwt.return m | None -> assert false let fork_test_chain c ~protocol:_ ~expiration:_ = Lwt.return c - end -include Tezos_protocol_environment.Make(Context) +include Tezos_protocol_environment.Make (Context) diff --git a/src/lib_protocol_environment/tezos_protocol_environment_shell.ml b/src/lib_protocol_environment/tezos_protocol_environment_shell.ml index cad98cd65f60dc4c95ae40170495dba650122f76..3725c8543a53d9c59290556e3998f15e10a105d5 100644 --- a/src/lib_protocol_environment/tezos_protocol_environment_shell.ml +++ b/src/lib_protocol_environment/tezos_protocol_environment_shell.ml @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -include Tezos_protocol_environment.Make(Tezos_storage.Context) +include Tezos_protocol_environment.Make (Tezos_storage.Context) diff --git a/src/lib_protocol_updater/.ocamlformat b/src/lib_protocol_updater/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_protocol_updater/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_protocol_updater/registered_protocol.ml b/src/lib_protocol_updater/registered_protocol.ml index 1e70ae6afd5049584bc23450c74d1154ef268c46..f29520fcba81fe1bb88ce73e9316b8fcf49c5fbb 100644 --- a/src/lib_protocol_updater/registered_protocol.ml +++ b/src/lib_protocol_updater/registered_protocol.ml @@ -25,12 +25,19 @@ module type T = sig module P : sig - val hash: Protocol_hash.t + val hash : Protocol_hash.t + include Tezos_protocol_environment_shell.PROTOCOL end - include (module type of (struct include P end)) - module Block_services : - (module type of (struct include Block_services.Make(P)(P) end)) + + include module type of struct + include P + end + + module Block_services : module type of struct + include Block_services.Make (P) (P) + end + val complete_b58prefix : Context.t -> string -> string list Lwt.t end @@ -41,29 +48,31 @@ let build_v1 hash = let module Name = struct let name = Protocol_hash.to_b58check hash end in - let module Env = Tezos_protocol_environment_shell.MakeV1(Name)() in - (module struct - module Raw = F(Env) + let module Env = Tezos_protocol_environment_shell.MakeV1 (Name) () in + ( module struct + module Raw = F (Env) + module P = struct let hash = hash - include Env.Lift(Raw) + + include Env.Lift (Raw) end + include P - module Block_services = Block_services.Make(P)(P) + module Block_services = Block_services.Make (P) (P) + let complete_b58prefix = Env.Context.complete - end : T) + end : T ) module VersionTable = Protocol_hash.Table -let versions : (module T) VersionTable.t = - VersionTable.create 20 +let versions : (module T) VersionTable.t = VersionTable.create 20 -let sources : Protocol.t VersionTable.t = - VersionTable.create 20 +let sources : Protocol.t VersionTable.t = VersionTable.create 20 let mem hash = - VersionTable.mem versions hash || - Tezos_protocol_registerer.Registerer.mem hash + VersionTable.mem versions hash + || Tezos_protocol_registerer.Registerer.mem hash let get_exn hash = try VersionTable.find versions hash @@ -72,49 +81,48 @@ let get_exn hash = VersionTable.add versions hash proto ; proto -let get hash = - try Some (get_exn hash) - with Not_found -> None +let get hash = try Some (get_exn hash) with Not_found -> None -let list () = - VersionTable.fold (fun _ p acc -> p :: acc) versions [] +let list () = VersionTable.fold (fun _ p acc -> p :: acc) versions [] -let list_embedded () = - VersionTable.fold (fun k _ acc -> k :: acc) sources [] +let list_embedded () = VersionTable.fold (fun k _ acc -> k :: acc) sources [] -let get_embedded_sources_exn hash = - VersionTable.find sources hash +let get_embedded_sources_exn hash = VersionTable.find sources hash let get_embedded_sources hash = - try Some (get_embedded_sources_exn hash) - with Not_found -> None + try Some (get_embedded_sources_exn hash) with Not_found -> None module Register_embedded (Env : Tezos_protocol_environment_shell.V1) - (Proto : Env.Updater.PROTOCOL) - (Source : sig - val hash: Protocol_hash.t option - val sources: Protocol.t - end) = struct + (Proto : Env.Updater.PROTOCOL) (Source : sig + val hash : Protocol_hash.t option + val sources : Protocol.t + end) = +struct let hash = match Source.hash with - | None -> Protocol.hash Source.sources - | Some hash -> hash + | None -> + Protocol.hash Source.sources + | Some hash -> + hash + module Self = struct module P = struct let hash = hash - include Env.Lift(Proto) + + include Env.Lift (Proto) end + include P - module Block_services = Block_services.Make(P)(P) + module Block_services = Block_services.Make (P) (P) + let complete_b58prefix = Env.Context.complete end + let () = - VersionTable.add - sources hash Source.sources ; - VersionTable.add - versions hash (module Self : T) + VersionTable.add sources hash Source.sources ; + VersionTable.add versions hash (module Self : T) include Self end diff --git a/src/lib_protocol_updater/registered_protocol.mli b/src/lib_protocol_updater/registered_protocol.mli index 040dd6aae1056b2cafb82ff24e7d6825a676b285..4a09f0a3226aacde1000229bea0bc5a7c3968fad 100644 --- a/src/lib_protocol_updater/registered_protocol.mli +++ b/src/lib_protocol_updater/registered_protocol.mli @@ -25,37 +25,47 @@ module type T = sig module P : sig - val hash: Protocol_hash.t + val hash : Protocol_hash.t + include Tezos_protocol_environment_shell.PROTOCOL end - include (module type of (struct include P end)) - module Block_services : - (module type of (struct include Block_services.Make(P)(P) end)) + + include module type of struct + include P + end + + module Block_services : module type of struct + include Block_services.Make (P) (P) + end + val complete_b58prefix : Context.t -> string -> string list Lwt.t end type t = (module T) -val mem: Protocol_hash.t -> bool +val mem : Protocol_hash.t -> bool + +val list : unit -> t list -val list: unit -> t list +val get : Protocol_hash.t -> t option -val get: Protocol_hash.t -> t option -val get_exn: Protocol_hash.t -> t +val get_exn : Protocol_hash.t -> t -val list_embedded: unit -> Protocol_hash.t list +val list_embedded : unit -> Protocol_hash.t list -val get_embedded_sources: Protocol_hash.t -> Protocol.t option -val get_embedded_sources_exn: Protocol_hash.t -> Protocol.t +val get_embedded_sources : Protocol_hash.t -> Protocol.t option + +val get_embedded_sources_exn : Protocol_hash.t -> Protocol.t module Register_embedded (Env : Tezos_protocol_environment_shell.V1) - (Proto : Env.Updater.PROTOCOL) - (Source : sig - val hash: Protocol_hash.t option - val sources: Protocol.t - end) : - T with type P.block_header_data = Proto.block_header_data + (Proto : Env.Updater.PROTOCOL) (Source : sig + val hash : Protocol_hash.t option + + val sources : Protocol.t + end) : + T + with type P.block_header_data = Proto.block_header_data and type P.operation_data = Proto.operation_data and type P.operation_receipt = Proto.operation_receipt and type P.validation_state = Proto.validation_state diff --git a/src/lib_protocol_updater/updater.ml b/src/lib_protocol_updater/updater.ml index 3d8d9a0ddbb9d5a502b809880af087bdebc8c10a..9f147073b92235fafd53e83bb00201393d12ad55 100644 --- a/src/lib_protocol_updater/updater.ml +++ b/src/lib_protocol_updater/updater.ml @@ -25,20 +25,21 @@ open Updater_logging -let (//) = Filename.concat +let ( // ) = Filename.concat (** Compiler *) let datadir = ref None + let get_datadir () = match !datadir with | None -> fatal_error "Node not initialized" ; Lwt_exit.exit 1 - | Some m -> m + | Some m -> + m -let init dir = - datadir := Some dir +let init dir = datadir := Some dir let compiler_name = "tezos-protocol-compiler" @@ -47,42 +48,54 @@ let do_compile hash p = let datadir = get_datadir () in let source_dir = datadir // Protocol_hash.to_short_b58check hash // "src" in let log_file = datadir // Protocol_hash.to_short_b58check hash // "LOG" in - let plugin_file = datadir // Protocol_hash.to_short_b58check hash // - Format.asprintf "protocol_%a" Protocol_hash.pp hash + let plugin_file = + datadir + // Protocol_hash.to_short_b58check hash + // Format.asprintf "protocol_%a" Protocol_hash.pp hash in - begin - Lwt_utils_unix.Protocol.write_dir source_dir ~hash p >>=? fun () -> - let compiler_command = - (Sys.executable_name, - Array.of_list [ compiler_name ; "-register" ; "-o" ; plugin_file ; source_dir]) in - let fd = Unix.(openfile log_file [O_WRONLY; O_CREAT; O_TRUNC] 0o644) in - Lwt_process.exec - ~stdin:`Close ~stdout:(`FD_copy fd) ~stderr:(`FD_move fd) - compiler_command >>= return - end >>= function + Lwt_utils_unix.Protocol.write_dir source_dir ~hash p + >>=? (fun () -> + let compiler_command = + ( Sys.executable_name, + Array.of_list + [compiler_name; "-register"; "-o"; plugin_file; source_dir] ) + in + let fd = + Unix.(openfile log_file [O_WRONLY; O_CREAT; O_TRUNC] 0o644) + in + Lwt_process.exec + ~stdin:`Close + ~stdout:(`FD_copy fd) + ~stderr:(`FD_move fd) + compiler_command + >>= return) + >>= function | Error err -> log_error "Error %a" pp_print_error err ; Lwt.return_false | Ok (Unix.WSIGNALED _ | Unix.WSTOPPED _) -> - log_error "INTERRUPTED COMPILATION (%s)" log_file; + log_error "INTERRUPTED COMPILATION (%s)" log_file ; Lwt.return_false | Ok (Unix.WEXITED x) when x <> 0 -> - log_error "COMPILATION ERROR (%s)" log_file; + log_error "COMPILATION ERROR (%s)" log_file ; Lwt.return_false - | Ok (Unix.WEXITED _) -> - try Dynlink.loadfile_private (plugin_file ^ ".cmxs"); Lwt.return_true - with Dynlink.Error err -> - log_error "Can't load plugin: %s (%s)" - (Dynlink.error_message err) plugin_file; - Lwt.return_false + | Ok (Unix.WEXITED _) -> ( + try + Dynlink.loadfile_private (plugin_file ^ ".cmxs") ; + Lwt.return_true + with Dynlink.Error err -> + log_error + "Can't load plugin: %s (%s)" + (Dynlink.error_message err) + plugin_file ; + Lwt.return_false ) let compile hash p = - if Tezos_protocol_registerer.Registerer.mem hash then - Lwt.return_true - else begin - do_compile hash p >>= fun success -> + if Tezos_protocol_registerer.Registerer.mem hash then Lwt.return_true + else + do_compile hash p + >>= fun success -> let loaded = Tezos_protocol_registerer.Registerer.mem hash in if success && not loaded then - log_error "Internal error while compiling %a" Protocol_hash.pp hash; + log_error "Internal error while compiling %a" Protocol_hash.pp hash ; Lwt.return loaded - end diff --git a/src/lib_protocol_updater/updater.mli b/src/lib_protocol_updater/updater.mli index f39051970749e1c247af6b41a8f9d8e58e602c97..d0cb5d5f5d4dbe2d70f0815c5774950c885ccd27 100644 --- a/src/lib_protocol_updater/updater.mli +++ b/src/lib_protocol_updater/updater.mli @@ -23,8 +23,8 @@ (* *) (*****************************************************************************) -val compile: Protocol_hash.t -> Protocol.t -> bool Lwt.t +val compile : Protocol_hash.t -> Protocol.t -> bool Lwt.t -val init: string -> unit +val init : string -> unit -val compiler_name: string +val compiler_name : string diff --git a/src/lib_protocol_updater/updater_logging.ml b/src/lib_protocol_updater/updater_logging.ml index 85882d4062c03d1c538bbd52fc2c77d6882971c5..538b2ccb05f8fdbd1e075e8679a5d8d99e3a1936 100644 --- a/src/lib_protocol_updater/updater_logging.ml +++ b/src/lib_protocol_updater/updater_logging.ml @@ -23,4 +23,6 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make(struct let name = "updater" end) +include Internal_event.Legacy_logging.Make (struct + let name = "updater" +end) diff --git a/src/lib_rpc/.ocamlformat b/src/lib_rpc/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_rpc/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_rpc/RPC_answer.ml b/src/lib_rpc/RPC_answer.ml index 873bde1ae9f87d314bb9255e3a9274770499e45c..c93ead98cb7fc7d4a987fd0e4a8b41238df085a3 100644 --- a/src/lib_rpc/RPC_answer.ml +++ b/src/lib_rpc/RPC_answer.ml @@ -33,17 +33,19 @@ type 'o t = | `Forbidden of RPC_service.error option (* 403 *) | `Not_found of RPC_service.error option (* 404 *) | `Conflict of RPC_service.error option (* 409 *) - | `Error of RPC_service.error option (* 500 *) - ] + | `Error of RPC_service.error option (* 500 *) ] and 'a stream = 'a Resto_directory.Answer.stream = { - next: unit -> 'a option Lwt.t ; - shutdown: unit -> unit ; + next : unit -> 'a option Lwt.t; + shutdown : unit -> unit } let return x = Lwt.return (`Ok x) + let return_unit = Lwt.return (`Ok ()) + let return_stream x = Lwt.return (`OkStream x) let not_found = Lwt.return (`Not_found None) + let fail err = Lwt.return (`Error (Some err)) diff --git a/src/lib_rpc/RPC_answer.mli b/src/lib_rpc/RPC_answer.mli index 26bbc359fe2b5ba2f16e601c5c492ada30d1e072..c084d932d6a0dc64eafe7c6451b6ea26f11bf7c9 100644 --- a/src/lib_rpc/RPC_answer.mli +++ b/src/lib_rpc/RPC_answer.mli @@ -33,17 +33,19 @@ type 'o t = | `Forbidden of RPC_service.error option (* 403 *) | `Not_found of RPC_service.error option (* 404 *) | `Conflict of RPC_service.error option (* 409 *) - | `Error of RPC_service.error option (* 500 *) - ] + | `Error of RPC_service.error option (* 500 *) ] and 'a stream = 'a Resto_directory.Answer.stream = { - next: unit -> 'a option Lwt.t ; - shutdown: unit -> unit ; + next : unit -> 'a option Lwt.t; + shutdown : unit -> unit } -val return: 'o -> 'o t Lwt.t -val return_unit: unit t Lwt.t -val return_stream: 'o stream -> 'o t Lwt.t -val not_found: 'o t Lwt.t +val return : 'o -> 'o t Lwt.t -val fail: Error_monad.error list -> 'a t Lwt.t +val return_unit : unit t Lwt.t + +val return_stream : 'o stream -> 'o t Lwt.t + +val not_found : 'o t Lwt.t + +val fail : Error_monad.error list -> 'a t Lwt.t diff --git a/src/lib_rpc/RPC_arg.ml b/src/lib_rpc/RPC_arg.ml index 1255a33b309b71851a3b5ebb96754e4f9c0f8ed4..ee946afe5a30965709c796026589b64918296b13 100644 --- a/src/lib_rpc/RPC_arg.ml +++ b/src/lib_rpc/RPC_arg.ml @@ -24,4 +24,5 @@ (*****************************************************************************) type ('i, 'j) eq = ('i, 'j) Resto.eq = Eq : ('a, 'a) eq + include Resto.Arg diff --git a/src/lib_rpc/RPC_arg.mli b/src/lib_rpc/RPC_arg.mli index 49c341437d60d6681e75ee1f006745cc5978c854..0118407f4a6c3815a78434ebd708aa373da0c3ca 100644 --- a/src/lib_rpc/RPC_arg.mli +++ b/src/lib_rpc/RPC_arg.mli @@ -24,4 +24,7 @@ (*****************************************************************************) type ('i, 'j) eq = ('i, 'j) Resto.eq = Eq : ('a, 'a) eq -include (module type of struct include Resto.Arg end) + +include module type of struct + include Resto.Arg +end diff --git a/src/lib_rpc/RPC_context.ml b/src/lib_rpc/RPC_context.ml index e540ff1b8e566ee2e142ed7378c229cde54c9c58..ea842a14779a98c31c5152412539674261c6c9df 100644 --- a/src/lib_rpc/RPC_context.ml +++ b/src/lib_rpc/RPC_context.ml @@ -25,39 +25,46 @@ open Error_monad -class type ['pr] gen_simple = object - method call_service : - 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> - 'p -> 'q -> 'i -> 'o tzresult Lwt.t -end - -class type ['pr] gen_streamed = object - method call_streamed_service : - 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> - on_chunk: ('o -> unit) -> - on_close: (unit -> unit) -> - 'p -> 'q -> 'i -> (unit -> unit) tzresult Lwt.t -end - -class type ['pr] gen = object - inherit ['pr] gen_simple - inherit ['pr] gen_streamed -end - -class type simple = object - inherit [unit] gen_simple -end - -class type streamed = object - inherit [unit] gen_streamed -end - -class type t = object - inherit simple - inherit streamed -end +class type ['pr] gen_simple = + object + method call_service : + 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> 'p -> + 'q -> 'i -> 'o tzresult Lwt.t + end + +class type ['pr] gen_streamed = + object + method call_streamed_service : + 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> + on_chunk:('o -> unit) -> on_close:(unit -> unit) -> 'p -> 'q -> 'i -> + (unit -> unit) tzresult Lwt.t + end + +class type ['pr] gen = + object + inherit ['pr] gen_simple + + inherit ['pr] gen_streamed + end + +class type simple = + object + inherit [unit] gen_simple + end + +class type streamed = + object + inherit [unit] gen_streamed + end + +class type t = + object + inherit simple + + inherit streamed + end type ('o, 'e) rest_result = [ `Ok of 'o @@ -65,123 +72,147 @@ type ('o, 'e) rest_result = | `Error of 'e | `Forbidden of 'e | `Not_found of 'e - | `Unauthorized of 'e ] tzresult + | `Unauthorized of 'e ] + tzresult -class type json = object - inherit t - method generic_json_call : - RPC_service.meth -> - ?body:Data_encoding.json -> - Uri.t -> - (Data_encoding.json, Data_encoding.json option) - rest_result Lwt.t - method base : Uri.t -end +class type json = + object + inherit t + method generic_json_call : + RPC_service.meth -> + ?body:Data_encoding.json -> + Uri.t -> + (Data_encoding.json, Data_encoding.json option) rest_result Lwt.t + + method base : Uri.t + end type error += - | Not_found of { meth: RPC_service.meth ; - uri: Uri.t } - | Generic_error of { meth: RPC_service.meth ; - uri: Uri.t } + | Not_found of {meth : RPC_service.meth; uri : Uri.t} + | Generic_error of {meth : RPC_service.meth; uri : Uri.t} let base = Uri.make ~scheme:"ocaml" () + let not_found s p q = - let { RPC_service.meth ; uri ; _ } = - RPC_service.forge_partial_request s ~base p q in - fail (Not_found { meth ; uri }) + let {RPC_service.meth; uri; _} = + RPC_service.forge_partial_request s ~base p q + in + fail (Not_found {meth; uri}) let generic_error s p q = - let { RPC_service.meth ; uri ; _ } = - RPC_service.forge_partial_request s ~base p q in - fail (Generic_error { meth ; uri }) - -class ['pr] of_directory (dir : 'pr RPC_directory.t) = object - method call_service : 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> - 'p -> 'q -> 'i -> 'o tzresult Lwt.t = - fun s p q i -> - RPC_directory.transparent_lookup dir s p q i >>= function - | `Ok v -> return v - | `OkStream { next ; shutdown } -> begin - next () >>= function - | Some v -> shutdown () ; return v - | None -> shutdown () ; not_found s p q - end - | `Not_found None -> not_found s p q - | `Unauthorized (Some err) - | `Forbidden (Some err) - | `Not_found (Some err) - | `Conflict (Some err) - | `Error (Some err) -> Lwt.return_error err - | `Unauthorized None - | `Error None - | `Forbidden None - | `Created _ - | `Conflict None - | `No_content -> generic_error s p q - method call_streamed_service : 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> - on_chunk: ('o -> unit) -> - on_close: (unit -> unit) -> - 'p -> 'q -> 'i -> (unit -> unit) tzresult Lwt.t = - fun s ~on_chunk ~on_close p q i -> - RPC_directory.transparent_lookup dir s p q i >>= function - | `OkStream { next; shutdown } -> - let rec loop () = - next () >>= function - | None -> on_close () ; Lwt.return_unit - | Some v -> on_chunk v ; loop () in - let _ = loop () in - return shutdown - | `Ok v -> - on_chunk v ; on_close () ; - return (fun () -> ()) - | `Not_found None -> not_found s p q - | `Unauthorized (Some err) - | `Forbidden (Some err) - | `Not_found (Some err) - | `Conflict (Some err) - | `Error (Some err) -> Lwt.return_error err - | `Unauthorized None - | `Error None - | `Forbidden None - | `Created _ - | `Conflict None - | `No_content -> generic_error s p q -end + let {RPC_service.meth; uri; _} = + RPC_service.forge_partial_request s ~base p q + in + fail (Generic_error {meth; uri}) + +class ['pr] of_directory (dir : 'pr RPC_directory.t) = + object + method call_service + : 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> 'p -> + 'q -> 'i -> 'o tzresult Lwt.t = + fun s p q i -> + RPC_directory.transparent_lookup dir s p q i + >>= function + | `Ok v -> + return v + | `OkStream {next; shutdown} -> ( + next () + >>= function + | Some v -> + shutdown () ; return v + | None -> + shutdown () ; not_found s p q ) + | `Not_found None -> + not_found s p q + | `Unauthorized (Some err) + | `Forbidden (Some err) + | `Not_found (Some err) + | `Conflict (Some err) + | `Error (Some err) -> + Lwt.return_error err + | `Unauthorized None + | `Error None + | `Forbidden None + | `Created _ + | `Conflict None + | `No_content -> + generic_error s p q + + method call_streamed_service + : 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> + on_chunk:('o -> unit) -> on_close:(unit -> unit) -> 'p -> 'q -> 'i -> + (unit -> unit) tzresult Lwt.t = + fun s ~on_chunk ~on_close p q i -> + RPC_directory.transparent_lookup dir s p q i + >>= function + | `OkStream {next; shutdown} -> + let rec loop () = + next () + >>= function + | None -> + on_close () ; Lwt.return_unit + | Some v -> + on_chunk v ; loop () + in + let _ = loop () in + return shutdown + | `Ok v -> + on_chunk v ; + on_close () ; + return (fun () -> ()) + | `Not_found None -> + not_found s p q + | `Unauthorized (Some err) + | `Forbidden (Some err) + | `Not_found (Some err) + | `Conflict (Some err) + | `Error (Some err) -> + Lwt.return_error err + | `Unauthorized None + | `Error None + | `Forbidden None + | `Created _ + | `Conflict None + | `No_content -> + generic_error s p q + end let make_call s (ctxt : #simple) = ctxt#call_service s + let make_call1 s ctxt x = make_call s ctxt ((), x) + let make_call2 s ctxt x y = make_call s ctxt (((), x), y) + let make_call3 s ctxt x y z = make_call s ctxt ((((), x), y), z) type stopper = unit -> unit let make_streamed_call s (ctxt : #streamed) p q i = - let stream, push = Lwt_stream.create () in - let on_chunk v = push (Some v) - and on_close () = push None in - ctxt#call_streamed_service s ~on_chunk ~on_close p q i >>=? fun close -> - return (stream, close) + let (stream, push) = Lwt_stream.create () in + let on_chunk v = push (Some v) and on_close () = push None in + ctxt#call_streamed_service s ~on_chunk ~on_close p q i + >>=? fun close -> return (stream, close) let () = let open Data_encoding in - let uri_encoding = - conv - Uri.to_string - Uri.of_string - string in + let uri_encoding = conv Uri.to_string Uri.of_string string in register_error_kind `Branch ~id:"RPC_context.Not_found" ~title:"RPC lookup failed" - ~description:"RPC lookup failed. No RPC exists at the URL or the RPC tried to access non-existent data." - (obj2 - (req "method" RPC_service.meth_encoding) - (req "uri" uri_encoding)) + ~description: + "RPC lookup failed. No RPC exists at the URL or the RPC tried to access \ + non-existent data." + (obj2 (req "method" RPC_service.meth_encoding) (req "uri" uri_encoding)) ~pp:(fun ppf (meth, uri) -> - Format.fprintf ppf "Did not find service: %s %a" (RPC_service.string_of_meth meth) Uri.pp_hum uri) - (function Not_found { meth ; uri } -> Some (meth, uri) - | _ -> None) - (fun (meth, uri) -> Not_found { meth ; uri }) + Format.fprintf + ppf + "Did not find service: %s %a" + (RPC_service.string_of_meth meth) + Uri.pp_hum + uri) + (function Not_found {meth; uri} -> Some (meth, uri) | _ -> None) + (fun (meth, uri) -> Not_found {meth; uri}) diff --git a/src/lib_rpc/RPC_context.mli b/src/lib_rpc/RPC_context.mli index d85f3b8e9d593c99d7f470352f6e5321f73f44fd..e5568d04a588a490e88f48a2edf757dbaa9a8db2 100644 --- a/src/lib_rpc/RPC_context.mli +++ b/src/lib_rpc/RPC_context.mli @@ -25,39 +25,46 @@ open Error_monad -class type ['pr] gen_simple = object - method call_service : - 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> - 'p -> 'q -> 'i -> 'o tzresult Lwt.t -end - -class type ['pr] gen_streamed = object - method call_streamed_service : - 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> - on_chunk: ('o -> unit) -> - on_close: (unit -> unit) -> - 'p -> 'q -> 'i -> (unit -> unit) tzresult Lwt.t -end - -class type ['pr] gen = object - inherit ['pr] gen_simple - inherit ['pr] gen_streamed -end - -class type simple = object - inherit [unit] gen_simple -end - -class type streamed = object - inherit [unit] gen_streamed -end - -class type t = object - inherit simple - inherit streamed -end +class type ['pr] gen_simple = + object + method call_service : + 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> 'p -> + 'q -> 'i -> 'o tzresult Lwt.t + end + +class type ['pr] gen_streamed = + object + method call_streamed_service : + 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), 'pr, 'p, 'q, 'i, 'o) RPC_service.t -> + on_chunk:('o -> unit) -> on_close:(unit -> unit) -> 'p -> 'q -> 'i -> + (unit -> unit) tzresult Lwt.t + end + +class type ['pr] gen = + object + inherit ['pr] gen_simple + + inherit ['pr] gen_streamed + end + +class type simple = + object + inherit [unit] gen_simple + end + +class type streamed = + object + inherit [unit] gen_streamed + end + +class type t = + object + inherit simple + + inherit streamed + end type ('o, 'e) rest_result = [ `Ok of 'o @@ -65,47 +72,69 @@ type ('o, 'e) rest_result = | `Error of 'e | `Forbidden of 'e | `Not_found of 'e - | `Unauthorized of 'e ] tzresult - -class type json = object - inherit t - method generic_json_call : - RPC_service.meth -> - ?body:Data_encoding.json -> - Uri.t -> - (Data_encoding.json, Data_encoding.json option) - rest_result Lwt.t - method base : Uri.t -end + | `Unauthorized of 'e ] + tzresult + +class type json = + object + inherit t + + method generic_json_call : + RPC_service.meth -> + ?body:Data_encoding.json -> + Uri.t -> + (Data_encoding.json, Data_encoding.json option) rest_result Lwt.t + + method base : Uri.t + end class ['pr] of_directory : 'pr RPC_directory.t -> ['pr] gen type error += - | Not_found of { meth: RPC_service.meth ; - uri: Uri.t } - | Generic_error of { meth: RPC_service.meth ; - uri: Uri.t } + | Not_found of {meth : RPC_service.meth; uri : Uri.t} + | Generic_error of {meth : RPC_service.meth; uri : Uri.t} val make_call : - ([< Resto.meth ], unit, 'p, 'q, 'i, 'o) RPC_service.t -> - #simple -> 'p -> 'q -> 'i -> 'o tzresult Lwt.t + ([< Resto.meth], unit, 'p, 'q, 'i, 'o) RPC_service.t -> + #simple -> + 'p -> + 'q -> + 'i -> + 'o tzresult Lwt.t val make_call1 : - ([< Resto.meth ], unit, unit * 'a, 'q, 'i, 'o) RPC_service.t -> - #simple -> 'a -> 'q -> 'i -> 'o tzresult Lwt.t + ([< Resto.meth], unit, unit * 'a, 'q, 'i, 'o) RPC_service.t -> + #simple -> + 'a -> + 'q -> + 'i -> + 'o tzresult Lwt.t val make_call2 : - ([< Resto.meth ], unit, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> - #simple -> 'a -> 'b -> 'q -> 'i -> 'o tzresult Lwt.t + ([< Resto.meth], unit, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> + #simple -> + 'a -> + 'b -> + 'q -> + 'i -> + 'o tzresult Lwt.t val make_call3 : - ([< Resto.meth ], unit, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> - #simple -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o tzresult Lwt.t + ([< Resto.meth], unit, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> + #simple -> + 'a -> + 'b -> + 'c -> + 'q -> + 'i -> + 'o tzresult Lwt.t type stopper = unit -> unit val make_streamed_call : - ([< Resto.meth ], unit, 'p, 'q, 'i, 'o) RPC_service.t -> - #streamed -> 'p -> 'q -> 'i -> + ([< Resto.meth], unit, 'p, 'q, 'i, 'o) RPC_service.t -> + #streamed -> + 'p -> + 'q -> + 'i -> ('o Lwt_stream.t * stopper) tzresult Lwt.t - diff --git a/src/lib_rpc/RPC_description.ml b/src/lib_rpc/RPC_description.ml index b50dd708bdd30576fac6169a347c54882a54ffd7..b2fca070ef495488c9c8938b3707f7f9f3f13b4b 100644 --- a/src/lib_rpc/RPC_description.ml +++ b/src/lib_rpc/RPC_description.ml @@ -26,5 +26,4 @@ include Resto.Description let describe ctxt ?(recurse = false) path = - RPC_context.make_call1 - RPC_service.description_service ctxt path { recurse } () + RPC_context.make_call1 RPC_service.description_service ctxt path {recurse} () diff --git a/src/lib_rpc/RPC_description.mli b/src/lib_rpc/RPC_description.mli index ba3703aed5a2709ce1aa88abd13d956244a54cdf..909c9e8ac6afa9cc070492586f0a5732ac31581a 100644 --- a/src/lib_rpc/RPC_description.mli +++ b/src/lib_rpc/RPC_description.mli @@ -25,11 +25,12 @@ open Error_monad -include (module type of struct include Resto.Description end) +include module type of struct + include Resto.Description +end -val describe: +val describe : #RPC_context.simple -> ?recurse:bool -> string list -> RPC_encoding.schema directory tzresult Lwt.t - diff --git a/src/lib_rpc/RPC_directory.ml b/src/lib_rpc/RPC_directory.ml index 912beb47ccce0df114f1c175229be0e502aad20c..bba7b8611228653544d1cb184d463fbea629d3b5 100644 --- a/src/lib_rpc/RPC_directory.ml +++ b/src/lib_rpc/RPC_directory.ml @@ -24,70 +24,89 @@ (*****************************************************************************) open Error_monad - -include Resto_directory.Make(RPC_encoding) +include Resto_directory.Make (RPC_encoding) let gen_register dir service handler = - register dir service - (fun p q i -> - Lwt.catch - (fun () -> handler p q i) - (function - | Not_found -> RPC_answer.not_found - | exn -> RPC_answer.fail [Exn exn])) + register dir service (fun p q i -> + Lwt.catch + (fun () -> handler p q i) + (function + | Not_found -> RPC_answer.not_found | exn -> RPC_answer.fail [Exn exn])) let gen_register = - (gen_register - : _ -> _ -> (_ -> _ -> _ -> _ RPC_answer.t Lwt.t) -> _ - :> _ -> _ -> (_ -> _ -> _ -> [< _ RPC_answer.t ] Lwt.t) -> _) + ( gen_register + : _ -> _ -> (_ -> _ -> _ -> _ RPC_answer.t Lwt.t) -> _ + :> _ -> _ -> (_ -> _ -> _ -> [< _ RPC_answer.t] Lwt.t) -> _ ) let register dir service handler = - gen_register dir service - (fun p q i -> - handler p q i >>= function - | Ok o -> RPC_answer.return o - | Error e -> RPC_answer.fail e) + gen_register dir service (fun p q i -> + handler p q i + >>= function Ok o -> RPC_answer.return o | Error e -> RPC_answer.fail e) let opt_register dir service handler = - gen_register dir service - (fun p q i -> - handler p q i >>= function - | Ok (Some o) -> RPC_answer.return o - | Ok None -> RPC_answer.not_found - | Error e -> RPC_answer.fail e) + gen_register dir service (fun p q i -> + handler p q i + >>= function + | Ok (Some o) -> + RPC_answer.return o + | Ok None -> + RPC_answer.not_found + | Error e -> + RPC_answer.fail e) let lwt_register dir service handler = - gen_register dir service - (fun p q i -> - handler p q i >>= fun o -> - RPC_answer.return o) + gen_register dir service (fun p q i -> + handler p q i >>= fun o -> RPC_answer.return o) open Curry let register0 root s f = register root s (curry Z f) + let register1 root s f = register root s (curry (S Z) f) + let register2 root s f = register root s (curry (S (S Z)) f) + let register3 root s f = register root s (curry (S (S (S Z))) f) + let register4 root s f = register root s (curry (S (S (S (S Z)))) f) + let register5 root s f = register root s (curry (S (S (S (S (S Z))))) f) let opt_register0 root s f = opt_register root s (curry Z f) + let opt_register1 root s f = opt_register root s (curry (S Z) f) + let opt_register2 root s f = opt_register root s (curry (S (S Z)) f) + let opt_register3 root s f = opt_register root s (curry (S (S (S Z))) f) + let opt_register4 root s f = opt_register root s (curry (S (S (S (S Z)))) f) -let opt_register5 root s f = opt_register root s (curry (S (S (S (S (S Z))))) f) + +let opt_register5 root s f = + opt_register root s (curry (S (S (S (S (S Z))))) f) let gen_register0 root s f = gen_register root s (curry Z f) + let gen_register1 root s f = gen_register root s (curry (S Z) f) + let gen_register2 root s f = gen_register root s (curry (S (S Z)) f) + let gen_register3 root s f = gen_register root s (curry (S (S (S Z))) f) + let gen_register4 root s f = gen_register root s (curry (S (S (S (S Z)))) f) -let gen_register5 root s f = gen_register root s (curry (S (S (S (S (S Z))))) f) + +let gen_register5 root s f = + gen_register root s (curry (S (S (S (S (S Z))))) f) let lwt_register0 root s f = lwt_register root s (curry Z f) + let lwt_register1 root s f = lwt_register root s (curry (S Z) f) + let lwt_register2 root s f = lwt_register root s (curry (S (S Z)) f) + let lwt_register3 root s f = lwt_register root s (curry (S (S (S Z))) f) + let lwt_register4 root s f = lwt_register root s (curry (S (S (S (S Z)))) f) -let lwt_register5 root s f = lwt_register root s (curry (S (S (S (S (S Z))))) f) + +let lwt_register5 root s f = + lwt_register root s (curry (S (S (S (S (S Z))))) f) diff --git a/src/lib_rpc/RPC_directory.mli b/src/lib_rpc/RPC_directory.mli index 1d94618ca8faeefa786a09fa6982e3cf06800d86..51bc0c084dca748d5d78925cb531d892fe85c168 100644 --- a/src/lib_rpc/RPC_directory.mli +++ b/src/lib_rpc/RPC_directory.mli @@ -25,180 +25,201 @@ open Error_monad -include module type of (struct include Resto_directory.Make(RPC_encoding) end) +include module type of struct + include Resto_directory.Make (RPC_encoding) +end (** Registring handler in service tree. *) -val register: +val register : 'prefix directory -> - ([< Resto.meth ], 'prefix, 'p, 'q, 'i, 'o) RPC_service.t -> + ([< Resto.meth], 'prefix, 'p, 'q, 'i, 'o) RPC_service.t -> ('p -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val opt_register: +val opt_register : 'prefix directory -> - ([< Resto.meth ], 'prefix, 'p, 'q, 'i, 'o) RPC_service.t -> + ([< Resto.meth], 'prefix, 'p, 'q, 'i, 'o) RPC_service.t -> ('p -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val gen_register: +val gen_register : 'prefix directory -> ('meth, 'prefix, 'params, 'query, 'input, 'output) RPC_service.t -> - ('params -> 'query -> 'input -> [< 'output RPC_answer.t ] Lwt.t) -> + ('params -> 'query -> 'input -> [< 'output RPC_answer.t] Lwt.t) -> 'prefix directory -val lwt_register: +val lwt_register : 'prefix directory -> - ([< Resto.meth ], 'prefix, 'p, 'q, 'i, 'o) RPC_service.t -> + ([< Resto.meth], 'prefix, 'p, 'q, 'i, 'o) RPC_service.t -> ('p -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory (** Registring handler in service tree. Curryfied variant. *) -val register0: +val register0 : unit directory -> ('m, unit, unit, 'q, 'i, 'o) RPC_service.t -> ('q -> 'i -> 'o tzresult Lwt.t) -> unit directory -val register1: +val register1 : 'prefix directory -> - ('m, 'prefix, unit * 'a, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, unit * 'a, 'q, 'i, 'o) RPC_service.t -> ('a -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val register2: +val register2 : 'prefix directory -> - ('m, 'prefix, (unit * 'a) * 'b, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val register3: +val register3 : 'prefix directory -> - ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val register4: +val register4 : 'prefix directory -> - ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory -val register5: +val register5 : 'prefix directory -> - ('m, 'prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'q , 'i, 'o) RPC_service.t -> + ( 'm, + 'prefix, + ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, + 'q, + 'i, + 'o ) + RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> 'o tzresult Lwt.t) -> 'prefix directory - -val opt_register0: +val opt_register0 : unit directory -> ('m, unit, unit, 'q, 'i, 'o) RPC_service.t -> ('q -> 'i -> 'o option tzresult Lwt.t) -> unit directory -val opt_register1: +val opt_register1 : 'prefix directory -> - ('m, 'prefix, unit * 'a, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, unit * 'a, 'q, 'i, 'o) RPC_service.t -> ('a -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val opt_register2: +val opt_register2 : 'prefix directory -> - ('m, 'prefix, (unit * 'a) * 'b, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val opt_register3: +val opt_register3 : 'prefix directory -> - ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val opt_register4: +val opt_register4 : 'prefix directory -> - ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory -val opt_register5: +val opt_register5 : 'prefix directory -> - ('m, 'prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'q , 'i, 'o) RPC_service.t -> + ( 'm, + 'prefix, + ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, + 'q, + 'i, + 'o ) + RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> 'o option tzresult Lwt.t) -> 'prefix directory - -val gen_register0: +val gen_register0 : unit directory -> ('m, unit, unit, 'q, 'i, 'o) RPC_service.t -> - ('q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> unit directory -val gen_register1: +val gen_register1 : 'prefix directory -> - ('m, 'prefix, unit * 'a, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('m, 'prefix, unit * 'a, 'q, 'i, 'o) RPC_service.t -> + ('a -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val gen_register2: +val gen_register2 : 'prefix directory -> - ('m, 'prefix, (unit * 'a) * 'b, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'b -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('m, 'prefix, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> + ('a -> 'b -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val gen_register3: +val gen_register3 : 'prefix directory -> - ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'b -> 'c -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> + ('a -> 'b -> 'c -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val gen_register4: +val gen_register4 : 'prefix directory -> - ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q, 'i, 'o) RPC_service.t -> + ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory -val gen_register5: +val gen_register5 : 'prefix directory -> - ('m, 'prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'q , 'i, 'o) RPC_service.t -> - ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> [< 'o RPC_answer.t ] Lwt.t) -> + ( 'm, + 'prefix, + ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, + 'q, + 'i, + 'o ) + RPC_service.t -> + ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> [< 'o RPC_answer.t] Lwt.t) -> 'prefix directory - -val lwt_register0: +val lwt_register0 : unit directory -> ('m, unit, unit, 'q, 'i, 'o) RPC_service.t -> ('q -> 'i -> 'o Lwt.t) -> unit directory -val lwt_register1: +val lwt_register1 : 'prefix directory -> - ('m, 'prefix, unit * 'a, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, unit * 'a, 'q, 'i, 'o) RPC_service.t -> ('a -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory -val lwt_register2: +val lwt_register2 : 'prefix directory -> - ('m, 'prefix, (unit * 'a) * 'b, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (unit * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory -val lwt_register3: +val lwt_register3 : 'prefix directory -> - ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, ((unit * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory -val lwt_register4: +val lwt_register4 : 'prefix directory -> - ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q , 'i, 'o) RPC_service.t -> + ('m, 'prefix, (((unit * 'a) * 'b) * 'c) * 'd, 'q, 'i, 'o) RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory -val lwt_register5: +val lwt_register5 : 'prefix directory -> - ('m, 'prefix, ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, 'q , 'i, 'o) RPC_service.t -> + ( 'm, + 'prefix, + ((((unit * 'a) * 'b) * 'c) * 'd) * 'e, + 'q, + 'i, + 'o ) + RPC_service.t -> ('a -> 'b -> 'c -> 'd -> 'e -> 'q -> 'i -> 'o Lwt.t) -> 'prefix directory - - diff --git a/src/lib_rpc/RPC_encoding.ml b/src/lib_rpc/RPC_encoding.ml index 40bc43cc9f2e119ad3e586d249d096b4bd062e2e..b67be07ecab418380e0b4632c1810becd934d688 100644 --- a/src/lib_rpc/RPC_encoding.ml +++ b/src/lib_rpc/RPC_encoding.ml @@ -24,13 +24,18 @@ (*****************************************************************************) type 'a t = 'a Data_encoding.t + type schema = Data_encoding.json_schema * Data_encoding.Binary_schema.t + let unit = Data_encoding.empty + let untyped = Data_encoding.(obj1 (req "untyped" string)) + let conv f g t = Data_encoding.conv ~schema:(Data_encoding.Json.schema t) f g t + let schema ?definitions_path t = - (Data_encoding.Json.schema ?definitions_path t, - Data_encoding.Binary.describe t) + ( Data_encoding.Json.schema ?definitions_path t, + Data_encoding.Binary.describe t ) let schema_encoding = let open Data_encoding in @@ -43,74 +48,88 @@ module StringMap = Resto.StringMap let arg_encoding = let open Data_encoding in conv - (fun {Resto.Arg.name; descr} -> ((),name, descr)) - (fun ((),name, descr) -> {name; descr}) - (obj3 (req "id" (constant "single")) (req "name" string) (opt "descr" string)) + (fun {Resto.Arg.name; descr} -> ((), name, descr)) + (fun ((), name, descr) -> {name; descr}) + (obj3 + (req "id" (constant "single")) + (req "name" string) + (opt "descr" string)) let multi_arg_encoding = let open Data_encoding in conv - (fun {Resto.Arg.name; descr} -> ((),name, descr)) - (fun ((),name, descr) -> {name; descr}) - (obj3 (req "id" (constant "multiple")) (req "name" string) (opt "descr" string)) + (fun {Resto.Arg.name; descr} -> ((), name, descr)) + (fun ((), name, descr) -> {name; descr}) + (obj3 + (req "id" (constant "multiple")) + (req "name" string) + (opt "descr" string)) open Resto.Description let meth_encoding = Data_encoding.string_enum - [ "GET", `GET ; - "POST", `POST ; - "DELETE", `DELETE ; - "PUT", `PUT ; - "PATCH", `PATCH ] + [ ("GET", `GET); + ("POST", `POST); + ("DELETE", `DELETE); + ("PUT", `PUT); + ("PATCH", `PATCH) ] let path_item_encoding = let open Data_encoding in - union [ - case (Tag 0) string - ~title:"PStatic" - (function PStatic s -> Some s | _ -> None) - (fun s -> PStatic s) ; - case (Tag 1) arg_encoding - ~title:"PDynamic" - (function PDynamic s -> Some s | _ -> None) - (fun s -> PDynamic s) ; - case (Tag 2) multi_arg_encoding - ~title:"PDynamicTail" - (function PDynamicTail s -> Some s | _ -> None) - (fun s -> PDynamicTail s) ; - ] + union + [ case + (Tag 0) + string + ~title:"PStatic" + (function PStatic s -> Some s | _ -> None) + (fun s -> PStatic s); + case + (Tag 1) + arg_encoding + ~title:"PDynamic" + (function PDynamic s -> Some s | _ -> None) + (fun s -> PDynamic s); + case + (Tag 2) + multi_arg_encoding + ~title:"PDynamicTail" + (function PDynamicTail s -> Some s | _ -> None) + (fun s -> PDynamicTail s) ] let query_kind_encoding = let open Data_encoding in - union [ - case (Tag 0) - ~title:"Single" - (obj1 (req "single" arg_encoding)) - (function Single s -> Some s | _ -> None) - (fun s -> Single s) ; - case (Tag 1) - ~title:"Optional" - (obj1 (req "optional" arg_encoding)) - (function Optional s -> Some s | _ -> None) - (fun s -> Optional s) ; - case (Tag 2) - ~title:"Flag" - (obj1 (req "flag" empty)) - (function Flag -> Some () | _ -> None) - (fun () -> Flag) ; - case (Tag 3) - ~title:"Multi" - (obj1 (req "multi" arg_encoding)) - (function Multi s -> Some s | _ -> None) - (fun s -> Multi s) ; - ] + union + [ case + (Tag 0) + ~title:"Single" + (obj1 (req "single" arg_encoding)) + (function Single s -> Some s | _ -> None) + (fun s -> Single s); + case + (Tag 1) + ~title:"Optional" + (obj1 (req "optional" arg_encoding)) + (function Optional s -> Some s | _ -> None) + (fun s -> Optional s); + case + (Tag 2) + ~title:"Flag" + (obj1 (req "flag" empty)) + (function Flag -> Some () | _ -> None) + (fun () -> Flag); + case + (Tag 3) + ~title:"Multi" + (obj1 (req "multi" arg_encoding)) + (function Multi s -> Some s | _ -> None) + (fun s -> Multi s) ] let query_item_encoding = let open Data_encoding in conv - (fun { name ; description ; kind } -> (name, description, kind)) - (fun (name, description, kind) -> { name ; description ; kind }) + (fun {name; description; kind} -> (name, description, kind)) + (fun (name, description, kind) -> {name; description; kind}) (obj3 (req "name" string) (opt "description" string) @@ -119,10 +138,10 @@ let query_item_encoding = let service_descr_encoding = let open Data_encoding in conv - (fun { meth ; path ; description ; query ; input ; output ; error } -> - (meth, path, description, query, input, output, error)) + (fun {meth; path; description; query; input; output; error} -> + (meth, path, description, query, input, output, error)) (fun (meth, path, description, query, input, output, error) -> - { meth ; path ; description ; query ; input ; output ; error }) + {meth; path; description; query; input; output; error}) (obj7 (req "meth" meth_encoding) (req "path" (list path_item_encoding)) @@ -134,74 +153,88 @@ let service_descr_encoding = let directory_descr_encoding = let open Data_encoding in - mu "service_tree" @@ fun directory_descr_encoding -> + mu "service_tree" + @@ fun directory_descr_encoding -> let static_subdirectories_descr_encoding = - union [ - case (Tag 0) - ~title:"Suffixes" - (obj1 (req "suffixes" - (list (obj2 (req "name" string) - (req "tree" directory_descr_encoding))))) - (function Suffixes map -> - Some (StringMap.bindings map) | _ -> None) - (fun m -> - let add acc (n,t) = StringMap.add n t acc in - Suffixes (List.fold_left add StringMap.empty m)) ; - case (Tag 1) - ~title:"Arg" - (obj1 (req "dynamic_dispatch" - (obj2 - (req "arg" arg_encoding) - (req "tree" directory_descr_encoding)))) - (function Arg (ty, tree) -> Some (ty, tree) | _ -> None) - (fun (ty, tree) -> Arg (ty, tree)) - ] in - + union + [ case + (Tag 0) + ~title:"Suffixes" + (obj1 + (req + "suffixes" + (list + (obj2 + (req "name" string) + (req "tree" directory_descr_encoding))))) + (function + | Suffixes map -> Some (StringMap.bindings map) | _ -> None) + (fun m -> + let add acc (n, t) = StringMap.add n t acc in + Suffixes (List.fold_left add StringMap.empty m)); + case + (Tag 1) + ~title:"Arg" + (obj1 + (req + "dynamic_dispatch" + (obj2 + (req "arg" arg_encoding) + (req "tree" directory_descr_encoding)))) + (function Arg (ty, tree) -> Some (ty, tree) | _ -> None) + (fun (ty, tree) -> Arg (ty, tree)) ] + in let static_directory_descr_encoding = conv - (fun { services ; subdirs } -> - let find s = - try Some (Resto.MethMap.find s services) with Not_found -> None in - (find `GET, find `POST, find `DELETE, - find `PUT, find `PATCH, subdirs)) + (fun {services; subdirs} -> + let find s = + try Some (Resto.MethMap.find s services) with Not_found -> None + in + (find `GET, find `POST, find `DELETE, find `PUT, find `PATCH, subdirs)) (fun (get, post, delete, put, patch, subdirs) -> - let add meth s services = - match s with - | None -> services - | Some s -> Resto.MethMap.add meth s services in - let services = - Resto.MethMap.empty - |> add `GET get - |> add `POST post - |> add `DELETE delete - |> add `PUT put - |> add `PATCH patch in - { services ; subdirs }) + let add meth s services = + match s with + | None -> + services + | Some s -> + Resto.MethMap.add meth s services + in + let services = + Resto.MethMap.empty + |> add `GET get + |> add `POST post + |> add `DELETE delete + |> add `PUT put + |> add `PATCH patch + in + {services; subdirs}) (obj6 (opt "get_service" service_descr_encoding) (opt "post_service" service_descr_encoding) (opt "delete_service" service_descr_encoding) (opt "put_service" service_descr_encoding) (opt "patch_service" service_descr_encoding) - (opt "subdirs" static_subdirectories_descr_encoding)) in - union [ - case (Tag 0) - ~title:"Static" - (obj1 (req "static" static_directory_descr_encoding)) - (function Static descr -> Some descr | _ -> None) - (fun descr -> Static descr) ; - case (Tag 1) - ~title:"Dynamic" - (obj1 (req "dynamic" (option string))) - (function Dynamic descr -> Some descr | _ -> None) - (fun descr -> Dynamic descr) ; - ] + (opt "subdirs" static_subdirectories_descr_encoding)) + in + union + [ case + (Tag 0) + ~title:"Static" + (obj1 (req "static" static_directory_descr_encoding)) + (function Static descr -> Some descr | _ -> None) + (fun descr -> Static descr); + case + (Tag 1) + ~title:"Dynamic" + (obj1 (req "dynamic" (option string))) + (function Dynamic descr -> Some descr | _ -> None) + (fun descr -> Dynamic descr) ] let description_request_encoding = let open Data_encoding in conv - (fun { recurse } -> recurse) - (function recurse -> { recurse }) + (fun {recurse} -> recurse) + (function recurse -> {recurse}) (obj1 (dft "recursive" bool false)) let description_answer_encoding = directory_descr_encoding diff --git a/src/lib_rpc/RPC_encoding.mli b/src/lib_rpc/RPC_encoding.mli index 7e48521df09cfecf6acaa0d898c96e1ca44a6f8e..ef50b5bbaedbcb5458a0d6ee6df5380322771602 100644 --- a/src/lib_rpc/RPC_encoding.mli +++ b/src/lib_rpc/RPC_encoding.mli @@ -25,6 +25,5 @@ type schema = Data_encoding.json_schema * Data_encoding.Binary_schema.t -include Resto.ENCODING with type 'a t = 'a Data_encoding.t - and type schema := schema - +include + Resto.ENCODING with type 'a t = 'a Data_encoding.t and type schema := schema diff --git a/src/lib_rpc/RPC_error.ml b/src/lib_rpc/RPC_error.ml index 94cb6ca84285e684a3401d36ec44d56154f47abd..1a0a1f4c9402ff83d21e2bf45aa0daf03869e817 100644 --- a/src/lib_rpc/RPC_error.ml +++ b/src/lib_rpc/RPC_error.ml @@ -23,7 +23,6 @@ (* *) (*****************************************************************************) -let list ctxt = - RPC_context.make_call RPC_service.error_service ctxt () () () +let list ctxt = RPC_context.make_call RPC_service.error_service ctxt () () () let encoding = RPC_service.error_encoding diff --git a/src/lib_rpc/RPC_error.mli b/src/lib_rpc/RPC_error.mli index 9a8fc13f12f209685a6cab0ba60d3bab073b652c..15d3a304d17e08fc4e9802d8d0d10085df6fdc09 100644 --- a/src/lib_rpc/RPC_error.mli +++ b/src/lib_rpc/RPC_error.mli @@ -25,6 +25,6 @@ open Error_monad -val list: #RPC_context.simple -> Json_schema.schema tzresult Lwt.t +val list : #RPC_context.simple -> Json_schema.schema tzresult Lwt.t -val encoding: error list Data_encoding.t +val encoding : error list Data_encoding.t diff --git a/src/lib_rpc/RPC_path.mli b/src/lib_rpc/RPC_path.mli index e43ea6e480557c24e21a7aa34bac4becb046a4ca..ee3f32ef1d9ce8ef176841df34c192e63f1c5c9d 100644 --- a/src/lib_rpc/RPC_path.mli +++ b/src/lib_rpc/RPC_path.mli @@ -23,4 +23,6 @@ (* *) (*****************************************************************************) -include (module type of struct include Resto.Path end) +include module type of struct + include Resto.Path +end diff --git a/src/lib_rpc/RPC_query.mli b/src/lib_rpc/RPC_query.mli index 648623a93e8ef36fa43b62548d1db1bcbebd3324..7904063cb7ce63d7c9764635394cdd5f2a36e699 100644 --- a/src/lib_rpc/RPC_query.mli +++ b/src/lib_rpc/RPC_query.mli @@ -23,4 +23,6 @@ (* *) (*****************************************************************************) -include (module type of struct include Resto.Query end) +include module type of struct + include Resto.Query +end diff --git a/src/lib_rpc/RPC_service.ml b/src/lib_rpc/RPC_service.ml index 437bd5cd2578ebd758851ed5ee1592aff667cdf6..8851c2e4bf1cfbee5518e8ef186d17cc2abf651c 100644 --- a/src/lib_rpc/RPC_service.ml +++ b/src/lib_rpc/RPC_service.ml @@ -23,9 +23,10 @@ (* *) (*****************************************************************************) -type meth = [ `GET | `POST | `DELETE | `PUT | `PATCH ] +type meth = [`GET | `POST | `DELETE | `PUT | `PATCH] let string_of_meth = Resto.string_of_meth + let meth_of_string = Resto.meth_of_string let meth_encoding = @@ -33,74 +34,83 @@ let meth_encoding = conv string_of_meth (fun m -> - match meth_of_string m with - | None -> Pervasives.failwith "Cannot parse methods" - | Some s -> s) + match meth_of_string m with + | None -> + Pervasives.failwith "Cannot parse methods" + | Some s -> + s) string module MethMap = Resto.MethMap -type (+'m,'pr,'p,'q,'i,'o, 'e) raw = - ('m,'pr,'p,'q,'i,'o, 'e) Resto.MakeService(RPC_encoding).t - constraint 'meth = [< meth ] +type (+'m, 'pr, 'p, 'q, 'i, 'o, 'e) raw = + ('m, 'pr, 'p, 'q, 'i, 'o, 'e) Resto.MakeService(RPC_encoding).t + constraint 'meth = [< meth] type error = Error_monad.error list type (+'meth, 'prefix, 'params, 'query, 'input, 'output) t = ('meth, 'prefix, 'params, 'query, 'input, 'output, error) raw - constraint 'meth = [< meth ] + constraint 'meth = [< meth] type (+'meth, 'prefix, 'params, 'query, 'input, 'output) service = ('meth, 'prefix, 'params, 'query, 'input, 'output, error) raw - constraint 'meth = [< meth ] - -include (Resto.MakeService(RPC_encoding) - : (module type of struct include Resto.MakeService(RPC_encoding) end - with type (+'m,'pr,'p,'q,'i,'o, 'e) t := ('m,'pr,'p,'q,'i,'o, 'e) raw - and type (+'m,'pr,'p,'q,'i,'o, 'e) service := ('m,'pr,'p,'q,'i,'o, 'e) raw) - ) - + constraint 'meth = [< meth] + +include ( + Resto.MakeService + (RPC_encoding) : + module type of struct + include Resto.MakeService (RPC_encoding) + end + with type (+'m, 'pr, 'p, 'q, 'i, 'o, 'e) t := + ('m, 'pr, 'p, 'q, 'i, 'o, 'e) raw + and type (+'m, 'pr, 'p, 'q, 'i, 'o, 'e) service := + ('m, 'pr, 'p, 'q, 'i, 'o, 'e) raw ) let error_path = ref None let error_encoding = let open Data_encoding in - delayed begin fun () -> - let { meth ; uri ; _ } = - match !error_path with - | None -> assert false - | Some p -> p in - def - "error" - ~description: - (Printf.sprintf - "The full list of error is available with \ - the global RPC `%s %s`" - (string_of_meth meth) (Uri.path_and_query uri)) @@ - conv - ~schema:Json_schema.any - (fun exn -> `A (List.map Error_monad.json_of_error exn)) - (function `A exns -> List.map Error_monad.error_of_json exns | _ -> []) - json - end + delayed (fun () -> + let {meth; uri; _} = + match !error_path with None -> assert false | Some p -> p + in + def + "error" + ~description: + (Printf.sprintf + "The full list of error is available with the global RPC `%s %s`" + (string_of_meth meth) + (Uri.path_and_query uri)) + @@ conv + ~schema:Json_schema.any + (fun exn -> `A (List.map Error_monad.json_of_error exn)) + (function + | `A exns -> List.map Error_monad.error_of_json exns | _ -> []) + json) let get_service = get_service ~error:error_encoding + let post_service = post_service ~error:error_encoding + let delete_service = delete_service ~error:error_encoding + let patch_service = patch_service ~error:error_encoding + let put_service = put_service ~error:error_encoding let error_service = get_service - ~description: "Schema for all the RPC errors from the shell" - ~query: RPC_query.empty - ~output: Data_encoding.json_schema + ~description:"Schema for all the RPC errors from the shell" + ~query:RPC_query.empty + ~output:Data_encoding.json_schema RPC_path.(root / "errors") let () = error_path := Some (forge_request error_service () ()) let description_service = description_service - ~description: "RPCs documentation and input/output schema" + ~description:"RPCs documentation and input/output schema" error_encoding RPC_path.(root / "describe") diff --git a/src/lib_rpc/RPC_service.mli b/src/lib_rpc/RPC_service.mli index a7b0ac934b396f37b232a1c8a3fd762e937cffc9..c9f1b5ee1a09afc8f3717a55d8b52290d20f4bbe 100644 --- a/src/lib_rpc/RPC_service.mli +++ b/src/lib_rpc/RPC_service.mli @@ -23,78 +23,88 @@ (* *) (*****************************************************************************) -type meth = [ `GET | `POST | `DELETE | `PUT | `PATCH ] +type meth = [`GET | `POST | `DELETE | `PUT | `PATCH] -val string_of_meth: [< meth ] -> string -val meth_of_string: string -> [> meth ] option -val meth_encoding: meth Data_encoding.t +val string_of_meth : [< meth] -> string + +val meth_of_string : string -> [> meth] option + +val meth_encoding : meth Data_encoding.t module MethMap = Resto.MethMap -type (+'m,'pr,'p,'q,'i,'o, 'e) raw = - ('m,'pr,'p,'q,'i,'o, 'e) Resto.MakeService(RPC_encoding).t - constraint 'meth = [< meth ] +type (+'m, 'pr, 'p, 'q, 'i, 'o, 'e) raw = + ('m, 'pr, 'p, 'q, 'i, 'o, 'e) Resto.MakeService(RPC_encoding).t + constraint 'meth = [< meth] type error = Error_monad.error list type (+'meth, 'prefix, 'params, 'query, 'input, 'output) t = ('meth, 'prefix, 'params, 'query, 'input, 'output, error) raw - constraint 'meth = [< meth ] + constraint 'meth = [< meth] type (+'meth, 'prefix, 'params, 'query, 'input, 'output) service = ('meth, 'prefix, 'params, 'query, 'input, 'output, error) raw - constraint 'meth = [< meth ] - -include (module type of struct include Resto.MakeService(RPC_encoding) end - with type (+'m,'pr,'p,'q,'i,'o, 'e) t := ('m,'pr,'p,'q,'i,'o, 'e) raw - and type (+'m,'pr,'p,'q,'i,'o, 'e) service := ('m,'pr,'p,'q,'i,'o, 'e) raw) - -val get_service: - ?description: string -> - query: 'query RPC_query.t -> - output: 'output Data_encoding.t -> + constraint 'meth = [< meth] + +include module type of struct + include Resto.MakeService (RPC_encoding) + end + with type (+'m, 'pr, 'p, 'q, 'i, 'o, 'e) t := + ('m, 'pr, 'p, 'q, 'i, 'o, 'e) raw + and type (+'m, 'pr, 'p, 'q, 'i, 'o, 'e) service := + ('m, 'pr, 'p, 'q, 'i, 'o, 'e) raw + +val get_service : + ?description:string -> + query:'query RPC_query.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `GET ], 'prefix, 'params, 'query, unit, 'output) service + ([`GET], 'prefix, 'params, 'query, unit, 'output) service -val post_service: - ?description: string -> +val post_service : + ?description:string -> query:'query RPC_query.t -> - input: 'input Data_encoding.t -> - output: 'output Data_encoding.t -> + input:'input Data_encoding.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `POST ], 'prefix, 'params, 'query, 'input, 'output) service + ([`POST], 'prefix, 'params, 'query, 'input, 'output) service -val delete_service: - ?description: string -> +val delete_service : + ?description:string -> query:'query RPC_query.t -> - output: 'output Data_encoding.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `DELETE ], 'prefix, 'params, 'query, unit, 'output) service + ([`DELETE], 'prefix, 'params, 'query, unit, 'output) service -val patch_service: - ?description: string -> +val patch_service : + ?description:string -> query:'query RPC_query.t -> - input: 'input Data_encoding.t -> - output: 'output Data_encoding.t -> + input:'input Data_encoding.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `PATCH ], 'prefix, 'params, 'query, 'input, 'output) service + ([`PATCH], 'prefix, 'params, 'query, 'input, 'output) service -val put_service: - ?description: string -> +val put_service : + ?description:string -> query:'query RPC_query.t -> - input: 'input Data_encoding.t -> - output: 'output Data_encoding.t -> + input:'input Data_encoding.t -> + output:'output Data_encoding.t -> ('prefix, 'params) RPC_path.t -> - ([ `PUT ], 'prefix, 'params, 'query, 'input, 'output) service - + ([`PUT], 'prefix, 'params, 'query, 'input, 'output) service (**/**) -val description_service: - ([ `GET ], unit, unit * string list, Resto.Description.request, - unit, RPC_encoding.schema Resto.Description.directory) service +val description_service : + ( [`GET], + unit, + unit * string list, + Resto.Description.request, + unit, + RPC_encoding.schema Resto.Description.directory ) + service -val error_service: - ([ `GET ], unit, unit, unit, unit, Json_schema.schema) service +val error_service : + ([`GET], unit, unit, unit, unit, Json_schema.schema) service -val error_encoding: error Data_encoding.t +val error_encoding : error Data_encoding.t diff --git a/src/lib_rpc_http/.ocamlformat b/src/lib_rpc_http/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_rpc_http/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_rpc_http/RPC_client.ml b/src/lib_rpc_http/RPC_client.ml index 3c323455fe03ad5b7ffda9bd043a754789b0fe30..8e4bc3b48ebb893d7a5e23876f1a9e50c34353f7 100644 --- a/src/lib_rpc_http/RPC_client.ml +++ b/src/lib_rpc_http/RPC_client.ml @@ -23,12 +23,16 @@ (* *) (*****************************************************************************) -module Client = Resto_cohttp.Client.Make(RPC_encoding) +module Client = Resto_cohttp.Client.Make (RPC_encoding) module type LOGGER = Client.LOGGER + type logger = (module LOGGER) + let null_logger = Client.null_logger + let timings_logger = Client.timings_logger + let full_logger = Client.full_logger type rpc_error = @@ -37,69 +41,77 @@ type rpc_error = | Bad_request of string | Method_not_allowed of RPC_service.meth list | Unsupported_media_type of string option - | Not_acceptable of { proposed: string ; acceptable: string } - | Unexpected_status_code of { code: Cohttp.Code.status_code ; - content: string ; - media_type: string option } - | Unexpected_content_type of { received: string ; - acceptable: string list ; - body : string} - | Unexpected_content of { content: string ; - media_type: string ; - error: string } + | Not_acceptable of {proposed : string; acceptable : string} + | Unexpected_status_code of + { code : Cohttp.Code.status_code; + content : string; + media_type : string option } + | Unexpected_content_type of + { received : string; + acceptable : string list; + body : string } + | Unexpected_content of + { content : string; + media_type : string; + error : string } | OCaml_exception of string | Unauthorized_host of string option let rpc_error_encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Empty_answer" - (obj1 - (req "kind" (constant "empty_answer"))) + (obj1 (req "kind" (constant "empty_answer"))) (function Empty_answer -> Some () | _ -> None) - (fun () -> Empty_answer) ; - case (Tag 1) + (fun () -> Empty_answer); + case + (Tag 1) ~title:"Connection_failed" (obj2 (req "kind" (constant "connection_failed")) (req "message" string)) (function Connection_failed msg -> Some ((), msg) | _ -> None) - (function (), msg -> Connection_failed msg) ; - case (Tag 2) + (function ((), msg) -> Connection_failed msg); + case + (Tag 2) ~title:"Bad_request" - (obj2 - (req "kind" (constant "bad_request")) - (req "message" string)) + (obj2 (req "kind" (constant "bad_request")) (req "message" string)) (function Bad_request msg -> Some ((), msg) | _ -> None) - (function (), msg -> Bad_request msg) ; - case (Tag 3) + (function ((), msg) -> Bad_request msg); + case + (Tag 3) ~title:"Method_not_allowed" (obj2 (req "kind" (constant "method_not_allowed")) (req "allowed" (list RPC_service.meth_encoding))) (function Method_not_allowed meths -> Some ((), meths) | _ -> None) - (function ((), meths) -> Method_not_allowed meths) ; - case (Tag 4) + (function ((), meths) -> Method_not_allowed meths); + case + (Tag 4) ~title:"Unsupported_media_type" (obj2 (req "kind" (constant "unsupported_media_type")) (opt "content_type" string)) (function Unsupported_media_type m -> Some ((), m) | _ -> None) - (function ((), m) -> Unsupported_media_type m) ; - case (Tag 5) + (function ((), m) -> Unsupported_media_type m); + case + (Tag 5) ~title:"Not_acceptable" (obj3 (req "kind" (constant "not_acceptable")) (req "proposed" string) (req "acceptable" string)) (function - | Not_acceptable { proposed ; acceptable } -> + | Not_acceptable {proposed; acceptable} -> Some ((), proposed, acceptable) - | _ -> None) - (function ((), proposed, acceptable) -> - Not_acceptable { proposed ; acceptable }) ; - case (Tag 6) + | _ -> + None) + (function + | ((), proposed, acceptable) -> Not_acceptable {proposed; acceptable}); + case + (Tag 6) ~title:"Unexpected_status_code" (obj4 (req "kind" (constant "unexpected_status_code")) @@ -107,13 +119,16 @@ let rpc_error_encoding = (req "content" string) (opt "media_type" string)) (function - | Unexpected_status_code { code ; content ; media_type } -> + | Unexpected_status_code {code; content; media_type} -> Some ((), Cohttp.Code.code_of_status code, content, media_type) - | _ -> None) - (function ((), code, content, media_type) -> - let code = Cohttp.Code.status_of_code code in - Unexpected_status_code { code ; content ; media_type }) ; - case (Tag 7) + | _ -> + None) + (function + | ((), code, content, media_type) -> + let code = Cohttp.Code.status_of_code code in + Unexpected_status_code {code; content; media_type}); + case + (Tag 7) ~title:"Unexpected_content_type" (obj4 (req "kind" (constant "unexpected_content_type")) @@ -121,12 +136,15 @@ let rpc_error_encoding = (req "acceptable" (list string)) (req "body" string)) (function - | Unexpected_content_type { received ; acceptable ; body } -> + | Unexpected_content_type {received; acceptable; body} -> Some ((), received, acceptable, body) - | _ -> None) - (function ((), received, acceptable, body) -> - Unexpected_content_type { received ; acceptable ; body }) ; - case (Tag 8) + | _ -> + None) + (function + | ((), received, acceptable, body) -> + Unexpected_content_type {received; acceptable; body}); + case + (Tag 8) ~title:"Unexpected_content" (obj4 (req "kind" (constant "unexpected_content")) @@ -134,131 +152,152 @@ let rpc_error_encoding = (req "media_type" string) (req "error" string)) (function - | Unexpected_content { content ; media_type ; error } -> + | Unexpected_content {content; media_type; error} -> Some ((), content, media_type, error) - | _ -> None) - (function ((), content, media_type, error) -> - Unexpected_content { content ; media_type ; error }) ; - case (Tag 9) + | _ -> + None) + (function + | ((), content, media_type, error) -> + Unexpected_content {content; media_type; error}); + case + (Tag 9) ~title:"OCaml_exception" - (obj2 - (req "kind" (constant "ocaml_exception")) - (req "content" string)) + (obj2 (req "kind" (constant "ocaml_exception")) (req "content" string)) (function OCaml_exception msg -> Some ((), msg) | _ -> None) - (function ((), msg) -> OCaml_exception msg) ; - ] + (function ((), msg) -> OCaml_exception msg) ] let pp_rpc_error ppf err = match err with | Empty_answer -> - Format.fprintf ppf - "The server answered with an empty response." + Format.fprintf ppf "The server answered with an empty response." | Connection_failed msg -> - Format.fprintf ppf - "Unable to connect to the node: \"%s\"" msg + Format.fprintf ppf "Unable to connect to the node: \"%s\"" msg | Bad_request msg -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 2>Oups! It looks like we forged an invalid HTTP request.@,%s@]" msg | Method_not_allowed meths -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 2>The requested service only accepts the following method:@ %a@]" - (Format.pp_print_list - (fun ppf m -> Format.pp_print_string ppf (RPC_service.string_of_meth m))) + (Format.pp_print_list (fun ppf m -> + Format.pp_print_string ppf (RPC_service.string_of_meth m))) meths | Unsupported_media_type None -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 2>The server wants to known the media type we used.@]" | Unsupported_media_type (Some media) -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 2>The server does not support the media type we used: %s.@]" media - | Not_acceptable { proposed ; acceptable } -> - Format.fprintf ppf - "@[<v 2>No intersection between the media types we accept and \ - \ the ones the server is able to send.@,\ + | Not_acceptable {proposed; acceptable} -> + Format.fprintf + ppf + "@[<v 2>No intersection between the media types we accept and the \ + ones the server is able to send.@,\ \ We proposed: %s@,\ \ The server is only able to serve: %s." - proposed acceptable - | Unexpected_status_code { code ; content ; _ } -> - Format.fprintf ppf + proposed + acceptable + | Unexpected_status_code {code; content; _} -> + Format.fprintf + ppf "@[<v 2>Unexpected error %d:@,%S" - (Cohttp.Code.code_of_status code) content - | Unexpected_content_type { received ; acceptable = _ ; body } -> - Format.fprintf ppf - "@[<v 0>The server answered with a media type we do not understand: %s.@,\ + (Cohttp.Code.code_of_status code) + content + | Unexpected_content_type {received; acceptable = _; body} -> + Format.fprintf + ppf + "@[<v 0>The server answered with a media type we do not understand: \ + %s.@,\ The response body was:@,\ - %s@]" received body - | Unexpected_content { content ; media_type ; error } -> - Format.fprintf ppf - "@[<v 2>Failed to parse the answer (%s):@,@[<v 2>error:@ %s@]@,@[<v 2>content:@ %S@]@]" - media_type error content + %s@]" + received + body + | Unexpected_content {content; media_type; error} -> + Format.fprintf + ppf + "@[<v 2>Failed to parse the answer (%s):@,\ + @[<v 2>error:@ %s@]@,\ + @[<v 2>content:@ %S@]@]" + media_type + error + content | OCaml_exception msg -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 2>The server failed with an unexpected exception:@ %s@]" msg | Unauthorized_host host -> - Format.fprintf ppf - "@[<v 2>The server refused connection to host \"%s\", \ - please check the node settings for CORS allowed origins.@]" + Format.fprintf + ppf + "@[<v 2>The server refused connection to host \"%s\", please check \ + the node settings for CORS allowed origins.@]" (Option.unopt ~default:"" host) type error += - | Request_failed of { meth: RPC_service.meth ; - uri: Uri.t ; - error: rpc_error } + | Request_failed of {meth : RPC_service.meth; uri : Uri.t; error : rpc_error} let uri_encoding = let open Data_encoding in - conv - Uri.to_string - Uri.of_string - string + conv Uri.to_string Uri.of_string string let () = - register_error_kind `Permanent + register_error_kind + `Permanent ~id:"rpc_client.request_failed" ~title:"" ~description:"" ~pp:(fun ppf (meth, uri, error) -> - Format.fprintf ppf - "@[<v 2>Rpc request failed:@ \ - \ - meth: %s@ \ - \ - uri: %s@ \ - \ - error: %a@]" - (RPC_service.string_of_meth meth) - (Uri.to_string uri) - pp_rpc_error error) - Data_encoding.(obj3 - (req "meth" RPC_service.meth_encoding) - (req "uri" uri_encoding) - (req "error" rpc_error_encoding)) + Format.fprintf + ppf + "@[<v 2>Rpc request failed:@ - meth: %s@ - uri: %s@ - error: %a@]" + (RPC_service.string_of_meth meth) + (Uri.to_string uri) + pp_rpc_error + error) + Data_encoding.( + obj3 + (req "meth" RPC_service.meth_encoding) + (req "uri" uri_encoding) + (req "error" rpc_error_encoding)) (function - | Request_failed { uri ; error ; meth } -> Some (meth, uri, error) - | _ -> None) - (fun (meth, uri, error) -> Request_failed { uri ; meth ; error }) + | Request_failed {uri; error; meth} -> + Some (meth, uri, error) + | _ -> + None) + (fun (meth, uri, error) -> Request_failed {uri; meth; error}) let request_failed meth uri error = - let meth = ( meth : [< RPC_service.meth ] :> RPC_service.meth) in - fail (Request_failed { meth ; uri ; error }) + let meth = (meth : [< RPC_service.meth] :> RPC_service.meth) in + fail (Request_failed {meth; uri; error}) + +type content_type = string * string -type content_type = (string * string) type content = Cohttp_lwt.Body.t * content_type option * Media_type.t option -let generic_call ?logger ?headers ?accept ?body ?media meth uri : (content, content) RPC_context.rest_result Lwt.t = - Client.generic_call meth ?logger ?headers ?accept ?body ?media uri >>= function - | `Ok (Some v) -> return (`Ok v) - | `Ok None -> request_failed meth uri Empty_answer - | `Conflict _ - | `Error _ - | `Forbidden _ - | `Unauthorized _ - | `Not_found _ as v -> return v +let generic_call ?logger ?headers ?accept ?body ?media meth uri : + (content, content) RPC_context.rest_result Lwt.t = + Client.generic_call meth ?logger ?headers ?accept ?body ?media uri + >>= function + | `Ok (Some v) -> + return (`Ok v) + | `Ok None -> + request_failed meth uri Empty_answer + | (`Conflict _ | `Error _ | `Forbidden _ | `Unauthorized _ | `Not_found _) as + v -> + return v | `Unexpected_status_code (code, (content, _, media_type)) -> let media_type = Option.map media_type ~f:Media_type.name in - Cohttp_lwt.Body.to_string content >>= fun content -> - request_failed meth uri - (Unexpected_status_code { code ; content ; media_type }) + Cohttp_lwt.Body.to_string content + >>= fun content -> + request_failed + meth + uri + (Unexpected_status_code {code; content; media_type}) | `Method_not_allowed allowed -> let allowed = List.filter_map RPC_service.meth_of_string allowed in request_failed meth uri (Method_not_allowed allowed) @@ -267,8 +306,9 @@ let generic_call ?logger ?headers ?accept ?body ?media meth uri : (content, cont request_failed meth uri (Unsupported_media_type media) | `Not_acceptable acceptable -> let proposed = - Option.unopt_map accept ~default:"" ~f:Media_type.accept_header in - request_failed meth uri (Not_acceptable { proposed ; acceptable }) + Option.unopt_map accept ~default:"" ~f:Media_type.accept_header + in + request_failed meth uri (Not_acceptable {proposed; acceptable}) | `Bad_request msg -> request_failed meth uri (Bad_request msg) | `Connection_failed msg -> @@ -279,67 +319,98 @@ let generic_call ?logger ?headers ?accept ?body ?media meth uri : (content, cont request_failed meth uri (Unauthorized_host host) let handle_error meth uri (body, media, _) f = - Cohttp_lwt.Body.is_empty body >>= fun empty -> - if empty then - return (f None) + Cohttp_lwt.Body.is_empty body + >>= fun empty -> + if empty then return (f None) else match media with - | Some ("application", "json") | None -> begin - Cohttp_lwt.Body.to_string body >>= fun body -> + | Some ("application", "json") | None -> ( + Cohttp_lwt.Body.to_string body + >>= fun body -> match Data_encoding.Json.from_string body with - | Ok body -> return (f (Some body)) + | Ok body -> + return (f (Some body)) | Error msg -> - request_failed meth uri - (Unexpected_content { content = body ; - media_type = Media_type.(name json) ; - error = msg }) - end + request_failed + meth + uri + (Unexpected_content + { content = body; + media_type = Media_type.(name json); + error = msg }) ) | Some (l, r) -> - Cohttp_lwt.Body.to_string body >>= fun body -> - request_failed meth uri - (Unexpected_content_type { received = l^"/"^r ; - acceptable = [Media_type.(name json)] ; - body }) + Cohttp_lwt.Body.to_string body + >>= fun body -> + request_failed + meth + uri + (Unexpected_content_type + { received = l ^ "/" ^ r; + acceptable = [Media_type.(name json)]; + body }) -let generic_json_call ?logger ?headers ?body meth uri : (Data_encoding.json, Data_encoding.json option) RPC_context.rest_result Lwt.t = +let generic_json_call ?logger ?headers ?body meth uri : + (Data_encoding.json, Data_encoding.json option) RPC_context.rest_result + Lwt.t = let body = - Option.map body ~f:begin fun b -> - (Cohttp_lwt.Body.of_string (Data_encoding.Json.to_string b)) - end in + Option.map body ~f:(fun b -> + Cohttp_lwt.Body.of_string (Data_encoding.Json.to_string b)) + in let media = Media_type.json in - generic_call meth ?logger ?headers ~accept:Media_type.[bson ; json] ?body ~media uri >>=? function - | `Ok (body, (Some ("application", "json") | None), _) -> begin - Cohttp_lwt.Body.to_string body >>= fun body -> + generic_call + meth + ?logger + ?headers + ~accept:Media_type.[bson; json] + ?body + ~media + uri + >>=? function + | `Ok (body, (Some ("application", "json") | None), _) -> ( + Cohttp_lwt.Body.to_string body + >>= fun body -> match Data_encoding.Json.from_string body with - | Ok json -> return (`Ok json) + | Ok json -> + return (`Ok json) | Error msg -> - request_failed meth uri - (Unexpected_content { content = body ; - media_type = Media_type.(name json) ; - error = msg }) - end - | `Ok (body, Some ("application", "bson"), _) -> begin - Cohttp_lwt.Body.to_string body >>= fun body -> - match Json_repr_bson.bytes_to_bson ~laziness:false ~copy:false - (Bytes.unsafe_of_string body) with + request_failed + meth + uri + (Unexpected_content + { content = body; + media_type = Media_type.(name json); + error = msg }) ) + | `Ok (body, Some ("application", "bson"), _) -> ( + Cohttp_lwt.Body.to_string body + >>= fun body -> + match + Json_repr_bson.bytes_to_bson + ~laziness:false + ~copy:false + (Bytes.unsafe_of_string body) + with | exception Json_repr_bson.Bson_decoding_error (msg, _, pos) -> let error = Format.asprintf "(at offset: %d) %s" pos msg in - request_failed meth uri - (Unexpected_content { content = body ; - media_type = Media_type.(name bson) ; - error }) + request_failed + meth + uri + (Unexpected_content + {content = body; media_type = Media_type.(name bson); error}) | bson -> - return (`Ok (Json_repr.convert - (module Json_repr_bson.Repr) - (module Json_repr.Ezjsonm) - bson)) - end + return + (`Ok + (Json_repr.convert + (module Json_repr_bson.Repr) + (module Json_repr.Ezjsonm) + bson)) ) | `Ok (body, Some (l, r), _) -> - Cohttp_lwt.Body.to_string body >>= fun body -> - request_failed meth uri - (Unexpected_content_type { received = l^"/"^r ; - acceptable = [Media_type.(name json)] ; - body }) + Cohttp_lwt.Body.to_string body + >>= fun body -> + request_failed + meth + uri + (Unexpected_content_type + {received = l ^ "/" ^ r; acceptable = [Media_type.(name json)]; body}) | `Conflict body -> handle_error meth uri body (fun v -> `Conflict v) | `Error body -> @@ -353,46 +424,63 @@ let generic_json_call ?logger ?headers ?body meth uri : (Data_encoding.json, Dat let handle accept (meth, uri, ans) = match ans with - | `Ok (Some v) -> return v - | `Ok None -> request_failed meth uri Empty_answer - | `Not_found None -> fail (RPC_context.Not_found { meth ; uri }) - | `Conflict (Some err) | `Error (Some err) - | `Forbidden (Some err) | `Unauthorized (Some err) - | `Not_found (Some err) -> Lwt.return_error err + | `Ok (Some v) -> + return v + | `Ok None -> + request_failed meth uri Empty_answer + | `Not_found None -> + fail (RPC_context.Not_found {meth; uri}) + | `Conflict (Some err) + | `Error (Some err) + | `Forbidden (Some err) + | `Unauthorized (Some err) + | `Not_found (Some err) -> + Lwt.return_error err | `Conflict None | `Error None | `Forbidden None | `Unauthorized None -> - fail (RPC_context.Generic_error { meth ; uri }) + fail (RPC_context.Generic_error {meth; uri}) | `Unexpected_status_code (code, (content, _, media_type)) -> let media_type = Option.map media_type ~f:Media_type.name in - Cohttp_lwt.Body.to_string content >>= fun content -> - request_failed meth uri (Unexpected_status_code { code ; content ; media_type }) + Cohttp_lwt.Body.to_string content + >>= fun content -> + request_failed + meth + uri + (Unexpected_status_code {code; content; media_type}) | `Method_not_allowed allowed -> let allowed = List.filter_map RPC_service.meth_of_string allowed in request_failed meth uri (Method_not_allowed allowed) | `Unsupported_media_type -> let name = match Media_type.first_complete_media accept with - | None -> None - | Some ((l, r), _) -> Some (l^"/"^r) in + | None -> + None + | Some ((l, r), _) -> + Some (l ^ "/" ^ r) + in request_failed meth uri (Unsupported_media_type name) | `Not_acceptable acceptable -> let proposed = - Option.unopt_map (Some accept) ~default:"" ~f:Media_type.accept_header in - request_failed meth uri (Not_acceptable { proposed ; acceptable }) + Option.unopt_map (Some accept) ~default:"" ~f:Media_type.accept_header + in + request_failed meth uri (Not_acceptable {proposed; acceptable}) | `Bad_request msg -> request_failed meth uri (Bad_request msg) | `Unexpected_content ((content, media_type), error) | `Unexpected_error_content ((content, media_type), error) -> let media_type = Media_type.name media_type in - request_failed meth uri (Unexpected_content { content ; media_type ; error }) + request_failed meth uri (Unexpected_content {content; media_type; error}) | `Unexpected_error_content_type (body, media) | `Unexpected_content_type (body, media) -> - Cohttp_lwt.Body.to_string body >>= fun body -> + Cohttp_lwt.Body.to_string body + >>= fun body -> let received = - Option.unopt_map media ~default:"" ~f:(fun (l, r) -> l^"/"^r) in - request_failed meth uri - (Unexpected_content_type { received ; - acceptable = List.map Media_type.name accept ; - body}) + Option.unopt_map media ~default:"" ~f:(fun (l, r) -> l ^ "/" ^ r) + in + request_failed + meth + uri + (Unexpected_content_type + {received; acceptable = List.map Media_type.name accept; body}) | `Connection_failed msg -> request_failed meth uri (Connection_failed msg) | `OCaml_exception msg -> @@ -400,48 +488,39 @@ let handle accept (meth, uri, ans) = | `Unauthorized_host host -> request_failed meth uri (Unauthorized_host host) -let call_streamed_service - (type p q i o ) - accept ?logger ?headers ~base (service : (_,_,p,q,i,o) RPC_service.t) - ~on_chunk ~on_close +let call_streamed_service (type p q i o) accept ?logger ?headers ~base + (service : (_, _, p, q, i, o) RPC_service.t) ~on_chunk ~on_close (params : p) (query : q) (body : i) : (unit -> unit) tzresult Lwt.t = Client.call_streamed_service - accept ?logger ?headers ~base ~on_chunk ~on_close - service params query body >>= fun ans -> - handle accept ans + accept + ?logger + ?headers + ~base + ~on_chunk + ~on_close + service + params + query + body + >>= fun ans -> handle accept ans -let call_service - (type p q i o ) - accept ?logger ?headers ~base (service : (_,_,p,q,i,o) RPC_service.t) - (params : p) - (query : q) (body : i) : o tzresult Lwt.t = - Client.call_service - ?logger ?headers ~base accept service params query body >>= fun ans -> - handle accept ans +let call_service (type p q i o) accept ?logger ?headers ~base + (service : (_, _, p, q, i, o) RPC_service.t) (params : p) (query : q) + (body : i) : o tzresult Lwt.t = + Client.call_service ?logger ?headers ~base accept service params query body + >>= fun ans -> handle accept ans -type config = { - host : string ; - port : int ; - tls : bool ; - logger : logger ; -} +type config = {host : string; port : int; tls : bool; logger : logger} let config_encoding = let open Data_encoding in conv - (fun { host ; port ; tls ; logger = _ } -> (host, port, tls)) - (fun (host, port, tls) -> { host ; port ; tls ; logger = null_logger }) - (obj3 - (req "host" string) - (req "port" uint16) - (req "tls" bool)) + (fun {host; port; tls; logger = _} -> (host, port, tls)) + (fun (host, port, tls) -> {host; port; tls; logger = null_logger}) + (obj3 (req "host" string) (req "port" uint16) (req "tls" bool)) -let default_config = { - host = "localhost" ; - port = 8732 ; - tls = false ; - logger = null_logger ; -} +let default_config = + {host = "localhost"; port = 8732; tls = false; logger = null_logger} class http_ctxt config media_types : RPC_context.json = let base = @@ -449,7 +528,8 @@ class http_ctxt config media_types : RPC_context.json = ~scheme:(if config.tls then "https" else "http") ~host:config.host ~port:config.port - () in + () + in let logger = config.logger in object method generic_json_call meth ?body uri = @@ -457,21 +537,30 @@ class http_ctxt config media_types : RPC_context.json = let uri = Uri.with_path base path in let uri = Uri.with_query uri query in generic_json_call ~logger meth ?body uri + method call_service - : 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, unit, 'p, 'q, 'i, 'o) RPC_service.t -> - 'p -> 'q -> 'i -> 'o tzresult Lwt.t = + : 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), unit, 'p, 'q, 'i, 'o) RPC_service.t -> 'p -> + 'q -> 'i -> 'o tzresult Lwt.t = fun service params query body -> - call_service media_types - ~logger ~base service params query body + call_service media_types ~logger ~base service params query body + method call_streamed_service - : 'm 'p 'q 'i 'o. - ([< Resto.meth ] as 'm, unit, 'p, 'q, 'i, 'o) RPC_service.t -> - on_chunk: ('o -> unit) -> - on_close: (unit -> unit) -> - 'p -> 'q -> 'i -> (unit -> unit) tzresult Lwt.t = + : 'm 'p 'q 'i 'o. + (([< Resto.meth] as 'm), unit, 'p, 'q, 'i, 'o) RPC_service.t -> + on_chunk:('o -> unit) -> on_close:(unit -> unit) -> 'p -> 'q -> 'i -> + (unit -> unit) tzresult Lwt.t = fun service ~on_chunk ~on_close params query body -> - call_streamed_service media_types service - ~logger ~base ~on_chunk ~on_close params query body + call_streamed_service + media_types + service + ~logger + ~base + ~on_chunk + ~on_close + params + query + body + method base = base end diff --git a/src/lib_rpc_http/RPC_client.mli b/src/lib_rpc_http/RPC_client.mli index 1624efbfa89c9cf2865faa3ec97ae22800335821..8dc28a1d6a099ada81dd89935b43bb8d88f5ab28 100644 --- a/src/lib_rpc_http/RPC_client.mli +++ b/src/lib_rpc_http/RPC_client.mli @@ -25,29 +25,38 @@ module type LOGGER = sig type request - val log_empty_request: Uri.t -> request Lwt.t - val log_request: - ?media:Media_type.t -> 'a Data_encoding.t -> - Uri.t -> string -> request Lwt.t - val log_response: - request -> ?media:Media_type.t -> 'a Data_encoding.t -> - Cohttp.Code.status_code -> string Lwt.t Lazy.t -> unit Lwt.t + + val log_empty_request : Uri.t -> request Lwt.t + + val log_request : + ?media:Media_type.t -> + 'a Data_encoding.t -> + Uri.t -> + string -> + request Lwt.t + + val log_response : + request -> + ?media:Media_type.t -> + 'a Data_encoding.t -> + Cohttp.Code.status_code -> + string Lwt.t Lazy.t -> + unit Lwt.t end type logger = (module LOGGER) -val null_logger: logger -val timings_logger: Format.formatter -> logger -val full_logger: Format.formatter -> logger +val null_logger : logger + +val timings_logger : Format.formatter -> logger -type config = { - host : string ; - port : int ; - tls : bool ; - logger : logger ; -} -val config_encoding: config Data_encoding.t -val default_config: config +val full_logger : Format.formatter -> logger + +type config = {host : string; port : int; tls : bool; logger : logger} + +val config_encoding : config Data_encoding.t + +val default_config : config class http_ctxt : config -> Media_type.t list -> RPC_context.json @@ -57,23 +66,24 @@ type rpc_error = | Bad_request of string | Method_not_allowed of RPC_service.meth list | Unsupported_media_type of string option - | Not_acceptable of { proposed: string ; acceptable: string } - | Unexpected_status_code of { code: Cohttp.Code.status_code ; - content: string ; - media_type: string option } - | Unexpected_content_type of { received: string ; - acceptable: string list ; - body : string } - | Unexpected_content of { content: string ; - media_type: string ; - error: string } + | Not_acceptable of {proposed : string; acceptable : string} + | Unexpected_status_code of + { code : Cohttp.Code.status_code; + content : string; + media_type : string option } + | Unexpected_content_type of + { received : string; + acceptable : string list; + body : string } + | Unexpected_content of + { content : string; + media_type : string; + error : string } | OCaml_exception of string | Unauthorized_host of string option type error += - | Request_failed of { meth: RPC_service.meth ; - uri: Uri.t ; - error: rpc_error } + | Request_failed of {meth : RPC_service.meth; uri : Uri.t; error : rpc_error} (**/**) @@ -82,27 +92,35 @@ val call_service : ?logger:logger -> ?headers:(string * string) list -> base:Uri.t -> - ([< Resto.meth ], unit, 'p, 'q, 'i, 'o) RPC_service.t -> - 'p -> 'q -> 'i -> 'o tzresult Lwt.t + ([< Resto.meth], unit, 'p, 'q, 'i, 'o) RPC_service.t -> + 'p -> + 'q -> + 'i -> + 'o tzresult Lwt.t val call_streamed_service : Media_type.t list -> ?logger:logger -> ?headers:(string * string) list -> base:Uri.t -> - ([< Resto.meth ], unit, 'p, 'q, 'i, 'o) RPC_service.t -> - on_chunk: ('o -> unit) -> - on_close: (unit -> unit) -> - 'p -> 'q -> 'i -> (unit -> unit) tzresult Lwt.t + ([< Resto.meth], unit, 'p, 'q, 'i, 'o) RPC_service.t -> + on_chunk:('o -> unit) -> + on_close:(unit -> unit) -> + 'p -> + 'q -> + 'i -> + (unit -> unit) tzresult Lwt.t val generic_json_call : ?logger:logger -> ?headers:(string * string) list -> ?body:Data_encoding.json -> - [< RPC_service.meth ] -> Uri.t -> + [< RPC_service.meth] -> + Uri.t -> (Data_encoding.json, Data_encoding.json option) RPC_context.rest_result Lwt.t -type content_type = (string * string) +type content_type = string * string + type content = Cohttp_lwt.Body.t * content_type option * Media_type.t option val generic_call : @@ -111,7 +129,8 @@ val generic_call : ?accept:Media_type.t list -> ?body:Cohttp_lwt.Body.t -> ?media:Media_type.t -> - [< RPC_service.meth ] -> - Uri.t -> (content, content) RPC_context.rest_result Lwt.t + [< RPC_service.meth] -> + Uri.t -> + (content, content) RPC_context.rest_result Lwt.t -val uri_encoding: Uri.t Data_encoding.t +val uri_encoding : Uri.t Data_encoding.t diff --git a/src/lib_rpc_http/RPC_logging.ml b/src/lib_rpc_http/RPC_logging.ml index 26ab83b33dffa13f9e864e7a95d8d91f184fdfb3..00d3fc5909a844e87b05d475477c03f9aac57bcf 100644 --- a/src/lib_rpc_http/RPC_logging.ml +++ b/src/lib_rpc_http/RPC_logging.ml @@ -23,4 +23,6 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make(struct let name = "rpc" end) +include Internal_event.Legacy_logging.Make (struct + let name = "rpc" +end) diff --git a/src/lib_rpc_http/RPC_server.ml b/src/lib_rpc_http/RPC_server.ml index aaa9a7228d881350a46b032a12fd07d14df68411..2be7332ae186f65e4ff105599fce85afb023be98 100644 --- a/src/lib_rpc_http/RPC_server.ml +++ b/src/lib_rpc_http/RPC_server.ml @@ -24,8 +24,8 @@ (*****************************************************************************) type cors = Resto_cohttp.Cors.t = { - allowed_headers : string list ; - allowed_origins : string list ; + allowed_headers : string list; + allowed_origins : string list } -include Resto_cohttp.Server.Make(RPC_encoding)(RPC_logging) +include Resto_cohttp.Server.Make (RPC_encoding) (RPC_logging) diff --git a/src/lib_rpc_http/RPC_server.mli b/src/lib_rpc_http/RPC_server.mli index 9cce101d544b4b9b420d6f4a36f5e713dbd53957..514cb9f80a43fbdfe0ea3dd7089a4127718bf380 100644 --- a/src/lib_rpc_http/RPC_server.mli +++ b/src/lib_rpc_http/RPC_server.mli @@ -25,10 +25,7 @@ (** Typed RPC services: server implementation. *) -type cors = { - allowed_headers : string list ; - allowed_origins : string list ; -} +type cors = {allowed_headers : string list; allowed_origins : string list} (** A handle on the server worker. *) type server diff --git a/src/lib_rpc_http/media_type.ml b/src/lib_rpc_http/media_type.ml index f0c86b5f7262475dd58522376914e637e46a4a51..ea751831a03ae9c2c1e1c6fc48762d79b5b579b2 100644 --- a/src/lib_rpc_http/media_type.ml +++ b/src/lib_rpc_http/media_type.ml @@ -23,96 +23,108 @@ (* *) (*****************************************************************************) -include Resto_cohttp.Media_type.Make(RPC_encoding) +include Resto_cohttp.Media_type.Make (RPC_encoding) -let json = { - name = Cohttp.Accept.MediaType ("application", "json") ; - q = Some 1000 ; - pp = begin fun _enc ppf raw -> - match Data_encoding.Json.from_string raw with - | Error err -> - Format.fprintf ppf - "@[Invalid JSON:@ \ - \ - @[<v 2>Error:@ %s@]\ - \ - @[<v 2>Raw data:@ %s@]@]" - err raw - | Ok json -> - Data_encoding.Json.pp ppf json - end ; - construct = begin fun enc v -> - Data_encoding.Json.to_string ~newline:true ~minify:true @@ - Data_encoding.Json.construct enc v - end ; - destruct = begin fun enc body -> - match Data_encoding.Json.from_string body with - | Error _ as err -> err - | Ok json -> - try Ok (Data_encoding.Json.destruct enc json) - with Data_encoding.Json.Cannot_destruct (_, exn) -> - Error (Format.asprintf "%a" - (fun fmt -> Data_encoding.Json.print_error fmt) - exn) - end ; -} +let json = + { name = Cohttp.Accept.MediaType ("application", "json"); + q = Some 1000; + pp = + (fun _enc ppf raw -> + match Data_encoding.Json.from_string raw with + | Error err -> + Format.fprintf + ppf + "@[Invalid JSON:@ - @[<v 2>Error:@ %s@] - @[<v 2>Raw data:@ \ + %s@]@]" + err + raw + | Ok json -> + Data_encoding.Json.pp ppf json); + construct = + (fun enc v -> + Data_encoding.Json.to_string ~newline:true ~minify:true + @@ Data_encoding.Json.construct enc v); + destruct = + (fun enc body -> + match Data_encoding.Json.from_string body with + | Error _ as err -> + err + | Ok json -> ( + try Ok (Data_encoding.Json.destruct enc json) + with Data_encoding.Json.Cannot_destruct (_, exn) -> + Error + (Format.asprintf + "%a" + (fun fmt -> Data_encoding.Json.print_error fmt) + exn) )) } +let bson = + { name = Cohttp.Accept.MediaType ("application", "bson"); + q = Some 100; + pp = + (fun _enc ppf raw -> + match + Json_repr_bson.bytes_to_bson + ~laziness:false + ~copy:false + (Bytes.unsafe_of_string raw) + with + | exception Json_repr_bson.Bson_decoding_error (msg, _, _) -> + Format.fprintf ppf "@[Invalid BSON:@ %s@]" msg + | bson -> + let json = + Json_repr.convert + (module Json_repr_bson.Repr) + (module Json_repr.Ezjsonm) + bson + in + Data_encoding.Json.pp ppf json); + construct = + (fun enc v -> + Bytes.unsafe_to_string @@ Json_repr_bson.bson_to_bytes + @@ Data_encoding.Bson.construct enc v); + destruct = + (fun enc body -> + match + Json_repr_bson.bytes_to_bson + ~laziness:false + ~copy:false + (Bytes.unsafe_of_string body) + with + | exception Json_repr_bson.Bson_decoding_error (msg, _, pos) -> + Error (Format.asprintf "(at offset: %d) %s" pos msg) + | bson -> ( + try Ok (Data_encoding.Bson.destruct enc bson) + with Data_encoding.Json.Cannot_destruct (_, exn) -> + Error + (Format.asprintf + "%a" + (fun fmt -> Data_encoding.Json.print_error fmt) + exn) )) } -let bson = { - name = Cohttp.Accept.MediaType ("application", "bson") ; - q = Some 100 ; - pp = begin fun _enc ppf raw -> - match Json_repr_bson.bytes_to_bson ~laziness:false ~copy:false - (Bytes.unsafe_of_string raw) with - | exception Json_repr_bson.Bson_decoding_error (msg, _, _) -> - Format.fprintf ppf - "@[Invalid BSON:@ %s@]" - msg - | bson -> - let json = - Json_repr.convert - (module Json_repr_bson.Repr) - (module Json_repr.Ezjsonm) - bson in - Data_encoding.Json.pp ppf json - end ; - construct = begin fun enc v -> - Bytes.unsafe_to_string @@ - Json_repr_bson.bson_to_bytes @@ - Data_encoding.Bson.construct enc v - end ; - destruct = begin fun enc body -> - match Json_repr_bson.bytes_to_bson ~laziness:false ~copy:false - (Bytes.unsafe_of_string body) with - | exception Json_repr_bson.Bson_decoding_error (msg, _, pos) -> - Error (Format.asprintf "(at offset: %d) %s" pos msg) - | bson -> - try Ok (Data_encoding.Bson.destruct enc bson) - with Data_encoding.Json.Cannot_destruct (_, exn) -> - Error (Format.asprintf "%a" - (fun fmt -> Data_encoding.Json.print_error fmt) - exn) - end ; -} +let octet_stream = + { name = Cohttp.Accept.MediaType ("application", "octet-stream"); + q = Some 200; + pp = + (fun enc ppf raw -> + match Data_encoding.Binary.of_bytes enc (MBytes.of_string raw) with + | None -> + Format.fprintf ppf "Invalid binary data." + | Some v -> + Format.fprintf + ppf + ";; binary equivalent of the following json@.%a" + Data_encoding.Json.pp + (Data_encoding.Json.construct enc v)); + construct = + (fun enc v -> + MBytes.to_string @@ Data_encoding.Binary.to_bytes_exn enc v); + destruct = + (fun enc s -> + match Data_encoding.Binary.of_bytes enc (MBytes.of_string s) with + | None -> + Error "Failed to parse binary data." + | Some data -> + Ok data) } -let octet_stream = { - name = Cohttp.Accept.MediaType ("application", "octet-stream") ; - q = Some 200 ; - pp = begin fun enc ppf raw -> - match Data_encoding.Binary.of_bytes enc (MBytes.of_string raw) with - | None -> Format.fprintf ppf "Invalid binary data." - | Some v -> - Format.fprintf ppf - ";; binary equivalent of the following json@.%a" - Data_encoding.Json.pp (Data_encoding.Json.construct enc v) - end ; - construct = begin fun enc v -> - MBytes.to_string @@ - Data_encoding.Binary.to_bytes_exn enc v - end ; - destruct = begin fun enc s -> - match Data_encoding.Binary.of_bytes enc (MBytes.of_string s) with - | None -> Error "Failed to parse binary data." - | Some data -> Ok data - end ; -} - -let all_media_types = [ json ; bson ; octet_stream ] +let all_media_types = [json; bson; octet_stream] diff --git a/src/lib_rpc_http/media_type.mli b/src/lib_rpc_http/media_type.mli index 84203f112a41aefff580e0606341b3a72061c5cd..48b2437c218fc5026c6428c3cc613d93a7570901 100644 --- a/src/lib_rpc_http/media_type.mli +++ b/src/lib_rpc_http/media_type.mli @@ -24,21 +24,23 @@ (*****************************************************************************) type t = Resto_cohttp.Media_type.Make(RPC_encoding).t = { - name: Cohttp.Accept.media_range ; - q: int option ; - pp: 'a. 'a Data_encoding.t -> Format.formatter -> string -> unit ; - construct: 'a. 'a Data_encoding.t -> 'a -> string ; - destruct: 'a. 'a Data_encoding.t -> string -> ('a, string) result ; + name : Cohttp.Accept.media_range; + q : int option; + pp : 'a. 'a Data_encoding.t -> Format.formatter -> string -> unit; + construct : 'a. 'a Data_encoding.t -> 'a -> string; + destruct : 'a. 'a Data_encoding.t -> string -> ('a, string) result } val name : t -> string val json : t + val bson : t + val octet_stream : t val all_media_types : t list - val accept_header : t list -> string + val first_complete_media : t list -> ((string * string) * t) option diff --git a/src/lib_shell/.ocamlformat b/src/lib_shell/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_shell/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_shell/bench/bench_simple.ml b/src/lib_shell/bench/bench_simple.ml index 0c1f70880fdca79e477d5f6172812b46d7fecdd6..6865a4b23ebcb1a90cc09d73d9081c4b2f9a964e 100644 --- a/src/lib_shell/bench/bench_simple.ml +++ b/src/lib_shell/bench/bench_simple.ml @@ -25,37 +25,24 @@ let make_simple blocks = let rec loop pred n = - if n <= 0 then - return pred - else - Block.bake pred >>=? fun block -> - loop block (n - 1) in - Context.init 5 >>=? fun (genesis, _) -> - loop genesis blocks + if n <= 0 then return pred + else Block.bake pred >>=? fun block -> loop block (n - 1) + in + Context.init 5 >>=? fun (genesis, _) -> loop genesis blocks -type args = { - blocks : int ; - accounts : int ; -} +type args = {blocks : int; accounts : int} -let default_args = { - blocks = 1000 ; - accounts = 5 ; -} +let default_args = {blocks = 1000; accounts = 5} -let set_blocks cf blocks = - cf := { !cf with blocks } +let set_blocks cf blocks = cf := {!cf with blocks} -let set_accounts cf accounts = - cf := { !cf with accounts } +let set_accounts cf accounts = cf := {!cf with accounts} let read_args () = let args = ref default_args in let specific = - [ - ("--blocks", Arg.Int (set_blocks args), "number of blocks"); - ("--accounts", Arg.Int (set_accounts args), "number of acount"); - ] + [ ("--blocks", Arg.Int (set_blocks args), "number of blocks"); + ("--accounts", Arg.Int (set_accounts args), "number of acount") ] in let usage = "Usage: [--blocks n] [--accounts n] " in Arg.parse specific (fun _ -> ()) usage ; @@ -65,8 +52,7 @@ let () = let args = read_args () in match Lwt_main.run (make_simple args.blocks) with | Ok _head -> - Format.printf "Success.@." ; - exit 0 + Format.printf "Success.@." ; exit 0 | Error err -> Format.eprintf "%a@." pp_print_error err ; exit 1 diff --git a/src/lib_shell/bench/bench_tool.ml b/src/lib_shell/bench/bench_tool.ml index 710234f6aa67b24085907231c3e1862fa9dc13ec..bc1162da9aa43ba582032829199d7f93cf405a6b 100644 --- a/src/lib_shell/bench/bench_tool.ml +++ b/src/lib_shell/bench/bench_tool.ml @@ -32,338 +32,392 @@ open Alpha_context (** Args *) type args = { - mutable length : int ; - mutable seed : int ; - mutable accounts : int ; - mutable nb_commitments : int ; - mutable params : Parameters_repr.t; + mutable length : int; + mutable seed : int; + mutable accounts : int; + mutable nb_commitments : int; + mutable params : Parameters_repr.t } -let default_args = { - length = 100 ; - seed = 0; - accounts = 100 ; - nb_commitments = 200 ; - params = { bootstrap_accounts = [] ; - commitments = [] ; - bootstrap_contracts = [] ; - constants = Tezos_protocol_alpha_parameters.Default_parameters.constants_mainnet ; - security_deposit_ramp_up_cycles = None ; - no_reward_cycles = None ; - } -} +let default_args = + { length = 100; + seed = 0; + accounts = 100; + nb_commitments = 200; + params = + { bootstrap_accounts = []; + commitments = []; + bootstrap_contracts = []; + constants = + Tezos_protocol_alpha_parameters.Default_parameters.constants_mainnet; + security_deposit_ramp_up_cycles = None; + no_reward_cycles = None } } let debug = ref false -let if_debug k = - if !debug then k () +let if_debug k = if !debug then k () -let if_debug_s k = - if !debug then k () else return_unit +let if_debug_s k = if !debug then k () else return_unit let args = default_args let parse_param_file name = if not (Sys.file_exists name) then failwith "Parameters : Inexistent JSON file" - else begin - Tezos_stdlib_unix.Lwt_utils_unix.Json.read_file name >>=? fun json -> + else + Tezos_stdlib_unix.Lwt_utils_unix.Json.read_file name + >>=? fun json -> match Data_encoding.Json.destruct Parameters_repr.encoding json with | exception exn -> failwith "Parameters : Invalid JSON file - %a" Error_monad.pp_exn exn - | param -> return param - end + | param -> + return param let read_args () = let parse_param name = - parse_param_file name >>= begin function - | Ok p -> Lwt.return p - | Error errs -> - Format.printf "Parameters parsing error : %a ==> using \ - default parameters\n%!" Error_monad.pp_print_error errs ; - Lwt.return default_args.params end |> Lwt_main.run + parse_param_file name + >>= (function + | Ok p -> + Lwt.return p + | Error errs -> + Format.printf + "Parameters parsing error : %a ==> using default parameters\n\ + %!" + Error_monad.pp_print_error + errs ; + Lwt.return default_args.params) + |> Lwt_main.run in - let specific = - [ - ("--length", Arg.Int (fun n -> args.length <- n), "Length of the chain (nb of blocks)") ; - ("--seed", Arg.Int (fun n -> args.seed <- n), "Used seed (default 0)") ; - ("--random-commitments", Arg.Int (fun n -> args.nb_commitments <- n), - "Number of randomly generated commitments. Defaults to 200. If \ - less than 0, commitments in protocol parameter files are used.") ; - ("--accounts", Arg.Int (fun n -> args.accounts <- n), - "Number of initial randomly generated accounts. Still adds \ - bootstrap account if present in the parameters file.") ; - ("--parameters", Arg.String (fun s -> args.params <- parse_param s), "JSON protocol parameters file") ; - - ("--debug", Arg.Set debug, "Print more info") ; - ] + [ ( "--length", + Arg.Int (fun n -> args.length <- n), + "Length of the chain (nb of blocks)" ); + ("--seed", Arg.Int (fun n -> args.seed <- n), "Used seed (default 0)"); + ( "--random-commitments", + Arg.Int (fun n -> args.nb_commitments <- n), + "Number of randomly generated commitments. Defaults to 200. If less \ + than 0, commitments in protocol parameter files are used." ); + ( "--accounts", + Arg.Int (fun n -> args.accounts <- n), + "Number of initial randomly generated accounts. Still adds bootstrap \ + account if present in the parameters file." ); + ( "--parameters", + Arg.String (fun s -> args.params <- parse_param s), + "JSON protocol parameters file" ); + ("--debug", Arg.Set debug, "Print more info") ] + in + let usage = + "Usage: [--length n] [--seed n] [--accounts n] [--parameters json_file]" in - let usage = "Usage: [--length n] [--seed n] [--accounts n] [--parameters json_file]" in Arg.parse specific (fun _ -> ()) usage (** Utils *) let choose_exp_nat n = (* seems fine *) - let lambda = 1. /. (log (float n)) in + let lambda = 1. /. log (float n) in let u = Random.float 1. in - (-. (log u)) /. lambda |> int_of_float + -.log u /. lambda |> int_of_float let pi = 3.1415926502 + let two_pi = 2. *. 3.1415926502 + let round x = x +. 0.5 |> int_of_float let rec choose_gaussian_nat (a, b) = - assert (b >= a); + assert (b >= a) ; let sigma = 4. in - let mu = ((b - a) / 2 + a) |> float in + let mu = ((b - a) / 2) + a |> float in let gauss () = let u1 = Random.float 1. (* |> fun x -> 1. -. x *) in let u2 = Random.float 1. in - let r = sqrt (-. (2. *. log u1)) in + let r = sqrt (-.(2. *. log u1)) in let theta = cos (two_pi *. u2) in r *. theta in let z = gauss () in - let z = z *. sigma +. mu |> round in + let z = (z *. sigma) +. mu |> round in if z > a && z < b then z else choose_gaussian_nat (a, b) let list_shuffle l = - List.map (fun c -> (Random.bits (), c)) l |> - List.sort compare |> List.map snd + List.map (fun c -> (Random.bits (), c)) l + |> List.sort compare |> List.map snd (******************************************************************) -type gen_state = { mutable possible_transfers : (Account.t * Account.t) list ; - mutable remaining_transfers : (Account.t * Account.t) list ; - mutable remaining_activations : (Account.t * Commitment_repr.t) list; - mutable nonce_to_reveal : (Cycle.t * Raw_level.t * Nonce.t) list ; - } +type gen_state = { + mutable possible_transfers : (Account.t * Account.t) list; + mutable remaining_transfers : (Account.t * Account.t) list; + mutable remaining_activations : (Account.t * Commitment_repr.t) list; + mutable nonce_to_reveal : (Cycle.t * Raw_level.t * Nonce.t) list +} let generate_random_endorsement ctxt n = let slot = n in - Context.get_endorser ctxt slot >>=? fun delegate -> - Op.endorsement ~delegate ctxt [ slot ] + Context.get_endorser ctxt slot + >>=? fun delegate -> Op.endorsement ~delegate ctxt [slot] let generate_and_add_random_endorsements inc = let pred inc = Incremental.predecessor inc in let nb_endorsements = let n = args.params.constants.endorsers_per_block in - n - (choose_exp_nat n) + n - choose_exp_nat n in - if_debug begin fun () -> - Format.printf "[DEBUG] Generating up to %d endorsements...\n%!" nb_endorsements end; - - map_s (generate_random_endorsement (B (pred inc))) (0-- (nb_endorsements -1)) >>=? fun endorsements -> - + if_debug (fun () -> + Format.printf + "[DEBUG] Generating up to %d endorsements...\n%!" + nb_endorsements) ; + map_s + (generate_random_endorsement (B (pred inc))) + (0 -- (nb_endorsements - 1)) + >>=? fun endorsements -> let compare op1 op2 = Operation_hash.compare (Operation.hash op1) (Operation.hash op2) in - let endorsements = List.sort_uniq compare endorsements in let endorsements = List.map Operation.pack endorsements in fold_left_s Incremental.add_operation inc endorsements let regenerate_transfers = ref false -let generate_random_activation ({ remaining_activations ; _ } as gen_state) inc = + +let generate_random_activation ({remaining_activations; _} as gen_state) inc = regenerate_transfers := true ; let open Account in match remaining_activations with - | [] -> assert false - | (({ pkh ; _ } as account), _)::l -> - if_debug begin fun () -> - Format.printf "[DEBUG] Generating an activation.\n%!" end; + | [] -> + assert false + | (({pkh; _} as account), _) :: l -> + if_debug (fun () -> + Format.printf "[DEBUG] Generating an activation.\n%!") ; gen_state.remaining_activations <- l ; - add_account account; + add_account account ; Op.activation inc pkh Account.commitment_secret exception No_transfer_left -let rec generate_random_transfer ({ remaining_transfers ; _ } as gen_state) ctxt = - if remaining_transfers = [] then raise No_transfer_left; + +let rec generate_random_transfer ({remaining_transfers; _} as gen_state) ctxt = + if remaining_transfers = [] then raise No_transfer_left ; let (a1, a2) = List.hd remaining_transfers in - gen_state.remaining_transfers <- List.tl remaining_transfers; + gen_state.remaining_transfers <- List.tl remaining_transfers ; let open Account in let c1 = Alpha_context.Contract.implicit_contract a1.pkh in let c2 = Alpha_context.Contract.implicit_contract a2.pkh in - Context.Contract.balance ctxt c1 >>=? fun b1 -> - if Tez.(b1 < Tez.one) then - generate_random_transfer gen_state ctxt - else - Op.transaction ctxt c1 c2 Tez.one - + Context.Contract.balance ctxt c1 + >>=? fun b1 -> + if Tez.(b1 < Tez.one) then generate_random_transfer gen_state ctxt + else Op.transaction ctxt c1 c2 Tez.one let generate_random_operation (inc : Incremental.t) gen_state = let rnd = Random.int 100 in match rnd with | x when x < 2 && gen_state.remaining_activations <> [] -> generate_random_activation gen_state (I inc) - | _ -> generate_random_transfer gen_state (I inc) + | _ -> + generate_random_transfer gen_state (I inc) (* Build a random block *) let step gen_state blk : Block.t tzresult Lwt.t = let priority = choose_exp_nat 5 in (* let nb_operations_per_block = choose_gaussian_nat (10, List.length (Account.get_known_accounts ())) in *) let nb_operations_per_block = choose_gaussian_nat (10, 100) in - - if !regenerate_transfers then begin - let l = Signature.Public_key_hash.Table.fold - (fun _ v acc -> v::acc ) Account.known_accounts [] in + if !regenerate_transfers then ( + let l = + Signature.Public_key_hash.Table.fold + (fun _ v acc -> v :: acc) + Account.known_accounts + [] + in (* TODO : make possible transfer computations efficient.. *) - gen_state.possible_transfers <- List.product l l |> List.filter (fun (a,b) -> a <> b); - regenerate_transfers := false - end; + gen_state.possible_transfers <- + List.product l l |> List.filter (fun (a, b) -> a <> b) ; + regenerate_transfers := false ) ; gen_state.remaining_transfers <- list_shuffle gen_state.possible_transfers ; - let nb_operations = min nb_operations_per_block (List.length gen_state.remaining_transfers) in (* Nonce *) - begin Alpha_services.Helpers.current_level ~offset:1l (Block.rpc_ctxt) blk >>|? function - | Level.{ expected_commitment = true ; cycle ; level ; _ } -> - if_debug begin fun () -> Format.printf "[DEBUG] Commiting a nonce\n%!" end; - begin - let (hash, nonce) = - Helpers_Nonce.generate () in - gen_state.nonce_to_reveal <- (cycle, level, nonce) :: gen_state.nonce_to_reveal; - Some hash - end - | _ -> None - end >>=? fun seed_nonce_hash -> - - Incremental.begin_construction ~priority ?seed_nonce_hash blk >>=? fun inc -> + Alpha_services.Helpers.current_level ~offset:1l Block.rpc_ctxt blk + >>|? (function + | Level.{expected_commitment = true; cycle; level; _} -> + if_debug (fun () -> Format.printf "[DEBUG] Commiting a nonce\n%!") ; + let (hash, nonce) = Helpers_Nonce.generate () in + gen_state.nonce_to_reveal <- + (cycle, level, nonce) :: gen_state.nonce_to_reveal ; + Some hash + | _ -> + None) + >>=? fun seed_nonce_hash -> + Incremental.begin_construction ~priority ?seed_nonce_hash blk + >>=? fun inc -> let open Cycle in - - if_debug begin fun () -> Format.printf "[DEBUG] Generating %d random operations...\n%!" nb_operations end; - + if_debug (fun () -> + Format.printf + "[DEBUG] Generating %d random operations...\n%!" + nb_operations) ; (* Generate random operations *) fold_left_s (fun inc _ -> - try - generate_random_operation inc gen_state >>=? fun op -> - Incremental.add_operation inc op - with No_transfer_left -> return inc - ) - inc (1 -- nb_operations) >>=? fun inc -> - + try + generate_random_operation inc gen_state + >>=? fun op -> Incremental.add_operation inc op + with No_transfer_left -> return inc) + inc + (1 -- nb_operations) + >>=? fun inc -> (* Endorsements *) - generate_and_add_random_endorsements inc >>=? fun inc -> - + generate_and_add_random_endorsements inc + >>=? fun inc -> (* Revelations *) (* TODO debug cycle *) - begin Alpha_services.Helpers.current_level ~offset:1l Incremental.rpc_ctxt inc >>|? function { cycle ; level ; _ } -> - if_debug begin fun () -> Format.printf "[DEBUG] Current cycle : %a\n%!" Cycle.pp cycle end ; - if_debug begin fun () -> Format.printf "[DEBUG] Current level : %a\n%!" Raw_level.pp level end ; - begin match gen_state.nonce_to_reveal with - | ((pred_cycle, _, _)::_) as l when succ pred_cycle = cycle -> - if_debug begin fun () -> Format.printf "[DEBUG] Seed nonce revelation : %d nonces to reveal.\n%!" - @@ List.length l end; - gen_state.nonce_to_reveal <- [] ; - (* fold_left_s (fun inc (_, level, nonce) -> *) - (* Op.seed_nonce_revelation inc level nonce >>=? fun op -> - * Incremental.add_operation inc op *) - (* return *) inc (* TODO reactivate the seeds *) - (* ) inc l *) - | _ -> inc - end - end >>=? fun inc -> + Alpha_services.Helpers.current_level ~offset:1l Incremental.rpc_ctxt inc + >>|? (function + | {cycle; level; _} -> ( + if_debug (fun () -> + Format.printf "[DEBUG] Current cycle : %a\n%!" Cycle.pp cycle) ; + if_debug (fun () -> + Format.printf + "[DEBUG] Current level : %a\n%!" + Raw_level.pp + level) ; + match gen_state.nonce_to_reveal with + | (pred_cycle, _, _) :: _ as l when succ pred_cycle = cycle -> + if_debug (fun () -> + Format.printf + "[DEBUG] Seed nonce revelation : %d nonces to reveal.\n\ + %!" + @@ List.length l) ; + gen_state.nonce_to_reveal <- [] ; + (* fold_left_s (fun inc (_, level, nonce) -> *) + (* Op.seed_nonce_revelation inc level nonce >>=? fun op -> + * Incremental.add_operation inc op *) + (* return *) + inc + (* TODO reactivate the seeds *) + (* ) inc l *) + | _ -> + inc )) + >>=? fun inc -> (* (\* Shuffle the operations a bit (why not) *\) * let operations = endorsements @ operations |> list_shuffle in *) - Incremental.finalize_block inc let init () = Random.init args.seed ; let parameters = args.params in - (* keys randomness is delegated to module Signature's bindings *) (* TODO : distribute the tokens randomly *) (* Right now, we split half of 80.000 rolls between generated accounts *) (* TODO : ensure we don't overflow with the underlying commitments *) Tez_repr.( - Lwt.return @@ Alpha_environment.wrap_error @@ - args.params.Parameters_repr.constants.Constants_repr.tokens_per_roll - *? 80_000L >>=? fun total_amount -> - Lwt.return @@ Alpha_environment.wrap_error @@ - total_amount /? 2L >>=? fun amount -> - Lwt.return @@ Alpha_environment.wrap_error @@ - amount /? (Int64.of_int args.accounts) ) >>=? fun initial_amount -> - + Lwt.return @@ Alpha_environment.wrap_error + @@ args.params.Parameters_repr.constants.Constants_repr.tokens_per_roll + *? 80_000L + >>=? fun total_amount -> + Lwt.return @@ Alpha_environment.wrap_error @@ (total_amount /? 2L) + >>=? fun amount -> + Lwt.return @@ Alpha_environment.wrap_error + @@ (amount /? Int64.of_int args.accounts)) + >>=? fun initial_amount -> (* Ensure a deterministic run *) let new_seed () : MBytes.t = - String.(make 32 '\000' |> map (fun _ -> Random.int 0x100 |> char_of_int)) |> - MBytes.of_string + String.(make 32 '\000' |> map (fun _ -> Random.int 0x100 |> char_of_int)) + |> MBytes.of_string in - map_s - (fun _ -> return (Account.new_account ~seed:(new_seed ()) (), initial_amount)) - (1--args.accounts) >>=? fun initial_accounts -> - if_debug begin fun () -> - List.iter - (fun (Account.{pkh; _ },_) -> Format.printf "[DEBUG] Account %a created\n%!" Signature.Public_key_hash.pp_short pkh ) - initial_accounts end; - + (fun _ -> + return (Account.new_account ~seed:(new_seed ()) (), initial_amount)) + (1 -- args.accounts) + >>=? fun initial_accounts -> + if_debug (fun () -> + List.iter + (fun (Account.{pkh; _}, _) -> + Format.printf + "[DEBUG] Account %a created\n%!" + Signature.Public_key_hash.pp_short + pkh) + initial_accounts) ; let possible_transfers = let l = List.map fst initial_accounts in - List.product l l |> List.filter (fun (a,b) -> a <> b) + List.product l l |> List.filter (fun (a, b) -> a <> b) in - - begin match args.nb_commitments with - | x when x < 0 -> return ([], parameters) - | x -> - map_s - (fun _ -> Account.new_commitment ~seed:(new_seed ()) ()) (1 -- x) >>=? fun commitments -> - return (commitments, { parameters with commitments = List.map snd commitments }) - end >>=? fun (remaining_activations, { bootstrap_accounts=_ ; commitments ; - constants ; security_deposit_ramp_up_cycles ; - no_reward_cycles ; _ }) -> - let gen_state = { possible_transfers ; remaining_transfers = [] ; - nonce_to_reveal = [] ; remaining_activations } in - - Block.genesis_with_parameters constants + ( match args.nb_commitments with + | x when x < 0 -> + return ([], parameters) + | x -> + map_s (fun _ -> Account.new_commitment ~seed:(new_seed ()) ()) (1 -- x) + >>=? fun commitments -> + return + (commitments, {parameters with commitments = List.map snd commitments}) + ) + >>=? fun ( remaining_activations, + { bootstrap_accounts = _; + commitments; + constants; + security_deposit_ramp_up_cycles; + no_reward_cycles; + _ } ) -> + let gen_state = + { possible_transfers; + remaining_transfers = []; + nonce_to_reveal = []; + remaining_activations } + in + Block.genesis_with_parameters + constants ~commitments ~security_deposit_ramp_up_cycles - ~no_reward_cycles initial_accounts + ~no_reward_cycles + initial_accounts >>=? fun genesis -> - - if_debug_s begin fun () -> - iter_s (let open Account in fun ({ pkh ; _ } as acc, _) -> - let contract = Alpha_context.Contract.implicit_contract acc.pkh in - Context.Contract.manager (B genesis) contract >>=? fun { pkh = pkh' ; _ } -> - Context.Contract.balance (B genesis) contract >>=? fun balance -> - return @@ Format.printf "[DEBUG] %a's manager is %a with a balance of %a\n%!" - Signature.Public_key_hash.pp_short pkh - Signature.Public_key_hash.pp_short pkh' - Tez.pp balance - ) initial_accounts end >>=? fun () -> - - if_debug begin fun () -> - Format.printf "[DEBUG] Constants : %a\n%!" - Data_encoding.Json.pp - (Data_encoding.Json.construct - Constants_repr.parametric_encoding parameters.Parameters_repr.constants) - end; - - Format.printf "@[<v 2>Starting generation with :@ \ - @[length = %d@]@ \ - @[seed = %d@]@ \ - @[nb_commi. = %d@]@ \ - @[#accounts = %d@]@ @]@." args.length args.seed args.nb_commitments args.accounts; + if_debug_s (fun () -> + iter_s + (let open Account in + fun (({pkh; _} as acc), _) -> + let contract = Alpha_context.Contract.implicit_contract acc.pkh in + Context.Contract.manager (B genesis) contract + >>=? fun {pkh = pkh'; _} -> + Context.Contract.balance (B genesis) contract + >>=? fun balance -> + return + @@ Format.printf + "[DEBUG] %a's manager is %a with a balance of %a\n%!" + Signature.Public_key_hash.pp_short + pkh + Signature.Public_key_hash.pp_short + pkh' + Tez.pp + balance) + initial_accounts) + >>=? fun () -> + if_debug (fun () -> + Format.printf + "[DEBUG] Constants : %a\n%!" + Data_encoding.Json.pp + (Data_encoding.Json.construct + Constants_repr.parametric_encoding + parameters.Parameters_repr.constants)) ; + Format.printf + "@[<v 2>Starting generation with :@ @[length = %d@]@ @[seed = \ + %d@]@ @[nb_commi. = %d@]@ @[#accounts = %d@]@ @]@." + args.length + args.seed + args.nb_commitments + args.accounts ; let rec loop gen_state blk = function - | 0 -> return (gen_state, blk) - | n -> begin - Block.print_block blk; - step gen_state blk >>=? fun blk' -> - loop gen_state blk' (n-1) - end + | 0 -> + return (gen_state, blk) + | n -> + Block.print_block blk ; + step gen_state blk >>=? fun blk' -> loop gen_state blk' (n - 1) in return (loop gen_state genesis args.length) let () = - Lwt_main.run (read_args (); init ()) |> function + Lwt_main.run (read_args () ; init ()) + |> function | Ok _head -> - Format.printf "Success.@." ; - exit 0 + Format.printf "Success.@." ; exit 0 | Error err -> Format.eprintf "%a@." pp_print_error err ; exit 1 diff --git a/src/lib_shell/bench/helpers/account.ml b/src/lib_shell/bench/helpers/account.ml index 6acb5e5ba7e955b46c25b077214c71b3f94404ae..6555aad41c08cd8f9fc07297c68a18586c15540f 100644 --- a/src/lib_shell/bench/helpers/account.ml +++ b/src/lib_shell/bench/helpers/account.ml @@ -26,10 +26,11 @@ open Proto_alpha type t = { - pkh : Signature.Public_key_hash.t ; - pk : Signature.Public_key.t ; - sk : Signature.Secret_key.t ; + pkh : Signature.Public_key_hash.t; + pk : Signature.Public_key.t; + sk : Signature.Secret_key.t } + type account = t let commitment_secret = @@ -40,11 +41,11 @@ let known_accounts = Signature.Public_key_hash.Table.create 17 let new_account ?seed () = let (pkh, pk, sk) = Signature.generate_key ?seed ~algo:Ed25519 () in - let account = { pkh ; pk ; sk } in + let account = {pkh; pk; sk} in Signature.Public_key_hash.Table.add known_accounts pkh account ; account -let add_account ({ pkh ; _ } as account) = +let add_account ({pkh; _} as account) = Signature.Public_key_hash.Table.add known_accounts pkh account let dictator_account = new_account () @@ -59,8 +60,8 @@ let find_alternate pkh = try Signature.Public_key_hash.Table.iter (fun pkh' account -> - if not (Signature.Public_key_hash.equal pkh pkh') then - raise (Found account)) + if not (Signature.Public_key_hash.equal pkh pkh') then + raise (Found account)) known_accounts ; raise Not_found with Found account -> account @@ -69,21 +70,22 @@ let dummy_account = new_account () let new_commitment ?seed () = let (pkh, pk, sk) = Signature.generate_key ?seed ~algo:Ed25519 () in - let unactivated_account = { pkh; pk; sk } in + let unactivated_account = {pkh; pk; sk} in let open Proto_alpha in let open Commitment_repr in let pkh = match pkh with Ed25519 pkh -> pkh | _ -> assert false in let bpkh = Blinded_public_key_hash.of_ed25519_pkh commitment_secret pkh in - Lwt.return @@ Alpha_environment.wrap_error @@ - Tez_repr.(one *? 4_000L) >>=? fun amount -> - return @@ (unactivated_account, { blinded_public_key_hash = bpkh ; amount }) + (Lwt.return @@ Alpha_environment.wrap_error @@ Tez_repr.(one *? 4_000L)) + >>=? fun amount -> + return @@ (unactivated_account, {blinded_public_key_hash = bpkh; amount}) let generate_accounts n : (t * Tez_repr.t) list = Signature.Public_key_hash.Table.clear known_accounts ; let amount = Tez_repr.of_mutez_exn 4_000_000_000_000L in - List.map (fun _ -> + List.map + (fun _ -> let (pkh, pk, sk) = Signature.generate_key () in - let account = { pkh ; pk ; sk } in + let account = {pkh; pk; sk} in Signature.Public_key_hash.Table.add known_accounts pkh account ; - account, amount) - (0--(n-1)) + (account, amount)) + (0 -- (n - 1)) diff --git a/src/lib_shell/bench/helpers/account.mli b/src/lib_shell/bench/helpers/account.mli index 0e880114764ae528fac03fddcf8c0313945294a8..68c291c7305725585e42d9fb32ff7f63cf29ad0e 100644 --- a/src/lib_shell/bench/helpers/account.mli +++ b/src/lib_shell/bench/helpers/account.mli @@ -26,28 +26,31 @@ open Proto_alpha type t = { - pkh : Signature.Public_key_hash.t ; - pk : Signature.Public_key.t ; - sk : Signature.Secret_key.t ; + pkh : Signature.Public_key_hash.t; + pk : Signature.Public_key.t; + sk : Signature.Secret_key.t } + type account = t val commitment_secret : Blinded_public_key_hash.activation_code -val dictator_account: account -val dummy_account: account +val dictator_account : account + +val dummy_account : account -val new_account: ?seed : MBytes.t -> unit -> account +val new_account : ?seed:MBytes.t -> unit -> account -val new_commitment : ?seed:MBytes.t -> unit -> - (account * Commitment_repr.t) tzresult Lwt.t +val new_commitment : + ?seed:MBytes.t -> unit -> (account * Commitment_repr.t) tzresult Lwt.t val add_account : t -> unit val known_accounts : t Signature.Public_key_hash.Table.t -val find: Signature.Public_key_hash.t -> t tzresult Lwt.t -val find_alternate: Signature.Public_key_hash.t -> t +val find : Signature.Public_key_hash.t -> t tzresult Lwt.t + +val find_alternate : Signature.Public_key_hash.t -> t (** [generate_accounts n] : generates [n] random accounts with 4.000.000.000 tz and add them to the global account state *) diff --git a/src/lib_shell/bench/helpers/assert.ml b/src/lib_shell/bench/helpers/assert.ml index 1a5795def2276fabcdb34c1e734c81f7650b5554..52c783d70aa3f2e6c5eb86aa0e7ad6ab1c3cab23 100644 --- a/src/lib_shell/bench/helpers/assert.ml +++ b/src/lib_shell/bench/helpers/assert.ml @@ -35,45 +35,44 @@ let error ~loc v f = failwith "@[Unexpected error (%s): %a@]" loc pp_print_error err let proto_error ~loc v f = - error ~loc v - (function - | Alpha_environment.Ecoproto_error err -> f err - | _ -> false) + error ~loc v (function + | Alpha_environment.Ecoproto_error err -> + f err + | _ -> + false) -let equal ~loc (cmp : 'a -> 'a -> bool) msg pp a b = +let equal ~loc (cmp : 'a -> 'a -> bool) msg pp a b = if not (cmp a b) then failwith "@[@[[%s]@] - @[%s : %a is not equal to %a@]@]" loc msg pp a pp b - else - return_unit + else return_unit -let not_equal ~loc (cmp : 'a -> 'a -> bool) msg pp a b = +let not_equal ~loc (cmp : 'a -> 'a -> bool) msg pp a b = if cmp a b then failwith "@[@[[%s]@] - @[%s : %a is equal to %a@]@]" loc msg pp a pp b - else - return_unit + else return_unit -let equal_tez ~loc (a:Alpha_context.Tez.t) (b:Alpha_context.Tez.t) = +let equal_tez ~loc (a : Alpha_context.Tez.t) (b : Alpha_context.Tez.t) = let open Alpha_context in - equal ~loc Tez.(=) "Tez aren't equal" Tez.pp a b + equal ~loc Tez.( = ) "Tez aren't equal" Tez.pp a b -let not_equal_tez ~loc (a:Alpha_context.Tez.t) (b:Alpha_context.Tez.t) = +let not_equal_tez ~loc (a : Alpha_context.Tez.t) (b : Alpha_context.Tez.t) = let open Alpha_context in - not_equal ~loc Tez.(=) "Tez are equal" Tez.pp a b + not_equal ~loc Tez.( = ) "Tez are equal" Tez.pp a b -let equal_int ~loc (a:int) (b:int) = - equal ~loc (=) "Integers aren't equal" Format.pp_print_int a b +let equal_int ~loc (a : int) (b : int) = + equal ~loc ( = ) "Integers aren't equal" Format.pp_print_int a b -let not_equal_int ~loc (a:int) (b:int) = - not_equal ~loc (=) "Integers are equal" Format.pp_print_int a b +let not_equal_int ~loc (a : int) (b : int) = + not_equal ~loc ( = ) "Integers are equal" Format.pp_print_int a b -let equal_bool ~loc (a:bool) (b:bool) = - equal ~loc (=) "Booleans aren't equal" Format.pp_print_bool a b - -let not_equal_bool ~loc (a:bool) (b:bool) = - not_equal ~loc (=) "Booleans are equal" Format.pp_print_bool a b +let equal_bool ~loc (a : bool) (b : bool) = + equal ~loc ( = ) "Booleans aren't equal" Format.pp_print_bool a b +let not_equal_bool ~loc (a : bool) (b : bool) = + not_equal ~loc ( = ) "Booleans are equal" Format.pp_print_bool a b open Context + (* Some asserts for account operations *) (** [balance_is b c amount] checks that the current balance of contract [c] is @@ -81,32 +80,38 @@ open Context Default balance type is [Main], pass [~kind] with [Deposit], [Fees] or [Rewards] for the others. *) let balance_is ~loc b contract ?(kind = Contract.Main) expected = - Contract.balance b contract ~kind >>=? fun balance -> - equal_tez ~loc balance expected + Contract.balance b contract ~kind + >>=? fun balance -> equal_tez ~loc balance expected (** [balance_was_operated ~operand b c old_balance amount] checks that the current balance of contract [c] is [operand old_balance amount] and returns the current balance. Default balance type is [Main], pass [~kind] with [Deposit], [Fees] or [Rewards] for the others. *) -let balance_was_operated ~(operand) ~loc b contract ?(kind = Contract.Main) old_balance amount = - operand old_balance amount |> - Alpha_environment.wrap_error |> Lwt.return >>=? fun expected -> - balance_is ~loc b contract ~kind expected - -let balance_was_credited = balance_was_operated ~operand:Alpha_context.Tez.(+?) +let balance_was_operated ~operand ~loc b contract ?(kind = Contract.Main) + old_balance amount = + operand old_balance amount |> Alpha_environment.wrap_error |> Lwt.return + >>=? fun expected -> balance_is ~loc b contract ~kind expected -let balance_was_debited = balance_was_operated ~operand:Alpha_context.Tez.(-?) +let balance_was_credited = + balance_was_operated ~operand:Alpha_context.Tez.( +? ) +let balance_was_debited = + balance_was_operated ~operand:Alpha_context.Tez.( -? ) (* debug *) let print_balances ctxt id = - Contract.balance ~kind:Main ctxt id >>=? fun main -> - Contract.balance ~kind:Deposit ctxt id >>=? fun deposit -> - Contract.balance ~kind:Fees ctxt id >>=? fun fees -> - Contract.balance ~kind:Rewards ctxt id >>|? fun rewards -> - Format.printf "\nMain: %s\nDeposit: %s\nFees: %s\nRewards: %s\n" + Contract.balance ~kind:Main ctxt id + >>=? fun main -> + Contract.balance ~kind:Deposit ctxt id + >>=? fun deposit -> + Contract.balance ~kind:Fees ctxt id + >>=? fun fees -> + Contract.balance ~kind:Rewards ctxt id + >>|? fun rewards -> + Format.printf + "\nMain: %s\nDeposit: %s\nFees: %s\nRewards: %s\n" (Alpha_context.Tez.to_string main) (Alpha_context.Tez.to_string deposit) (Alpha_context.Tez.to_string fees) diff --git a/src/lib_shell/bench/helpers/block.ml b/src/lib_shell/bench/helpers/block.ml index ca6b538dbffe07142773310dc076fc34390c3cea..c32ac4fdf91326dc92aa752bc949039b0c3c21d9 100644 --- a/src/lib_shell/bench/helpers/block.ml +++ b/src/lib_shell/bench/helpers/block.ml @@ -25,26 +25,28 @@ open Proto_alpha module Proto_Nonce = Nonce (* Renamed otherwise is masked by Alpha_context *) + open Alpha_context (* This type collects a block and the context that results from its application *) type t = { - hash : Block_hash.t ; - header : Block_header.t ; - operations : Operation.packed list ; - context : Tezos_protocol_environment_memory.Context.t ; + hash : Block_hash.t; + header : Block_header.t; + operations : Operation.packed list; + context : Tezos_protocol_environment_memory.Context.t } + type block = t -let rpc_context block = { - Alpha_environment.Updater.block_hash = block.hash ; - block_header = block.header.shell ; - context = block.context ; -} +let rpc_context block = + { Alpha_environment.Updater.block_hash = block.hash; + block_header = block.header.shell; + context = block.context } let rpc_ctxt = new Alpha_environment.proto_rpc_context_of_directory - rpc_context Proto_alpha.rpc_services + rpc_context + Proto_alpha.rpc_services (******** Policies ***********) @@ -58,213 +60,230 @@ type baker_policy = | Excluding of public_key_hash list let get_next_baker_by_priority priority block = - Alpha_services.Delegate.Baking_rights.get rpc_ctxt + Alpha_services.Delegate.Baking_rights.get + rpc_ctxt ~all:true - ~max_priority:(priority+1) block >>=? fun bakers -> - let { Alpha_services.Delegate.Baking_rights.delegate = pkh ; - timestamp; _ } = List.find (fun { Alpha_services.Delegate.Baking_rights.priority = p ; _ } -> p = priority) bakers in + ~max_priority:(priority + 1) + block + >>=? fun bakers -> + let {Alpha_services.Delegate.Baking_rights.delegate = pkh; timestamp; _} = + List.find + (fun {Alpha_services.Delegate.Baking_rights.priority = p; _} -> + p = priority) + bakers + in return (pkh, priority, Option.unopt_exn (Failure "") timestamp) let get_next_baker_by_account pkh block = - Alpha_services.Delegate.Baking_rights.get rpc_ctxt + Alpha_services.Delegate.Baking_rights.get + rpc_ctxt ~delegates:[pkh] - ~max_priority:256 block >>=? fun bakers -> - let { Alpha_services.Delegate.Baking_rights.delegate = pkh ; - timestamp ; priority ; _ } = List.hd bakers in + ~max_priority:256 + block + >>=? fun bakers -> + let { Alpha_services.Delegate.Baking_rights.delegate = pkh; + timestamp; + priority; + _ } = + List.hd bakers + in return (pkh, priority, Option.unopt_exn (Failure "") timestamp) let get_next_baker_excluding excludes block = - Alpha_services.Delegate.Baking_rights.get rpc_ctxt - ~max_priority:256 block >>=? fun bakers -> - let { Alpha_services.Delegate.Baking_rights.delegate = pkh ; - timestamp ; priority ; _ } = + Alpha_services.Delegate.Baking_rights.get rpc_ctxt ~max_priority:256 block + >>=? fun bakers -> + let { Alpha_services.Delegate.Baking_rights.delegate = pkh; + timestamp; + priority; + _ } = List.find - (fun { Alpha_services.Delegate.Baking_rights.delegate ; _ } -> - not (List.mem delegate excludes)) - bakers in + (fun {Alpha_services.Delegate.Baking_rights.delegate; _} -> + not (List.mem delegate excludes)) + bakers + in return (pkh, priority, Option.unopt_exn (Failure "") timestamp) let dispatch_policy = function - | By_priority p -> get_next_baker_by_priority p - | By_account a -> get_next_baker_by_account a - | Excluding al -> get_next_baker_excluding al + | By_priority p -> + get_next_baker_by_priority p + | By_account a -> + get_next_baker_by_account a + | Excluding al -> + get_next_baker_excluding al let get_next_baker ?(policy = By_priority 0) = dispatch_policy policy module Forge = struct - type header = { - baker : public_key_hash ; (* the signer of the block *) - shell : Block_header.shell_header ; - contents : Block_header.contents ; + baker : public_key_hash; + (* the signer of the block *) + shell : Block_header.shell_header; + contents : Block_header.contents } let default_proof_of_work_nonce = MBytes.create Constants.proof_of_work_nonce_size - let make_contents - ?(proof_of_work_nonce = default_proof_of_work_nonce) + let make_contents ?(proof_of_work_nonce = default_proof_of_work_nonce) ~priority ~seed_nonce_hash () = - Block_header.{ priority ; - proof_of_work_nonce ; - seed_nonce_hash } - - let make_shell - ~level ~predecessor ~timestamp ~fitness ~operations_hash = - Tezos_base.Block_header.{ - level ; - predecessor ; - timestamp ; - fitness ; - operations_hash ; - (* We don't care of the following values, only the shell validates them. *) - proto_level = 0 ; - validation_passes = 0 ; - context = Context_hash.zero ; - } - - let set_seed_nonce_hash seed_nonce_hash { baker ; shell ; contents } = - { baker ; shell ; contents = { contents with seed_nonce_hash } } - - let set_baker baker header = { header with baker } - - let sign_header { baker ; shell ; contents } = - Account.find baker >>=? fun delegate -> + Block_header.{priority; proof_of_work_nonce; seed_nonce_hash} + + let make_shell ~level ~predecessor ~timestamp ~fitness ~operations_hash = + Tezos_base.Block_header. + { level; + predecessor; + timestamp; + fitness; + operations_hash; + (* We don't care of the following values, only the shell validates them. *) + proto_level = 0; + validation_passes = 0; + context = Context_hash.zero } + + let set_seed_nonce_hash seed_nonce_hash {baker; shell; contents} = + {baker; shell; contents = {contents with seed_nonce_hash}} + + let set_baker baker header = {header with baker} + + let sign_header {baker; shell; contents} = + Account.find baker + >>=? fun delegate -> let unsigned_bytes = Data_encoding.Binary.to_bytes_exn Block_header.unsigned_encoding - (shell, contents) in + (shell, contents) + in let signature = Signature.sign ~watermark:(Signature.Block_header Chain_id.zero) - delegate.sk unsigned_bytes in - Block_header.{ shell ; protocol_data = { contents ; signature } } |> - return - - let forge_header - ?(policy = By_priority 0) - ?(operations = []) pred = - dispatch_policy policy pred >>=? fun (pkh, priority, timestamp) -> + delegate.sk + unsigned_bytes + in + Block_header.{shell; protocol_data = {contents; signature}} |> return + + let forge_header ?(policy = By_priority 0) ?(operations = []) pred = + dispatch_policy policy pred + >>=? fun (pkh, priority, timestamp) -> let level = Int32.succ pred.header.shell.level in - begin - match Fitness_repr.to_int64 pred.header.shell.fitness with - | Ok old_fitness -> - return (Fitness_repr.from_int64 - (Int64.add (Int64.of_int 1) old_fitness)) - | Error _ -> assert false - end >>=? fun fitness -> - begin - Alpha_services.Helpers.current_level ~offset:1l (rpc_ctxt) pred >>|? function - | { expected_commitment = true ; _ } -> Some (fst (Proto_Nonce.generate ())) - | { expected_commitment = false ; _ } -> None - end >>=? fun seed_nonce_hash -> + ( match Fitness_repr.to_int64 pred.header.shell.fitness with + | Ok old_fitness -> + return + (Fitness_repr.from_int64 (Int64.add (Int64.of_int 1) old_fitness)) + | Error _ -> + assert false ) + >>=? fun fitness -> + Alpha_services.Helpers.current_level ~offset:1l rpc_ctxt pred + >>|? (function + | {expected_commitment = true; _} -> + Some (fst (Proto_Nonce.generate ())) + | {expected_commitment = false; _} -> + None) + >>=? fun seed_nonce_hash -> let hashes = List.map Operation.hash_packed operations in - let operations_hash = Operation_list_list_hash.compute - [Operation_list_hash.compute hashes] in - let shell = make_shell ~level ~predecessor:pred.hash - ~timestamp ~fitness ~operations_hash in + let operations_hash = + Operation_list_list_hash.compute [Operation_list_hash.compute hashes] + in + let shell = + make_shell + ~level + ~predecessor:pred.hash + ~timestamp + ~fitness + ~operations_hash + in let contents = make_contents ~priority ~seed_nonce_hash () in - return { baker = pkh ; shell ; contents } + return {baker = pkh; shell; contents} (* compatibility only, needed by incremental *) - let contents - ?(proof_of_work_nonce = default_proof_of_work_nonce) + let contents ?(proof_of_work_nonce = default_proof_of_work_nonce) ?(priority = 0) ?seed_nonce_hash () = - { Block_header.priority ; - proof_of_work_nonce ; - seed_nonce_hash ; - } - + {Block_header.priority; proof_of_work_nonce; seed_nonce_hash} end (********* Genesis creation *************) (* Hard-coded context key *) -let protocol_param_key = [ "protocol_parameters" ] +let protocol_param_key = ["protocol_parameters"] let check_constants_consistency constants = let open Constants_repr in - let { blocks_per_cycle ; blocks_per_commitment ; - blocks_per_roll_snapshot ; _ } = constants in - Error_monad.unless (blocks_per_commitment <= blocks_per_cycle) - (fun () -> failwith "Inconsistent constants : blocks per commitment must be \ - less than blocks per cycle") >>=? fun () -> - Error_monad.unless (blocks_per_cycle >= blocks_per_roll_snapshot) - (fun () -> failwith "Inconsistent constants : blocks per cycle \ - must be superior than blocks per roll snapshot") >>=? - return - -let initial_context + let {blocks_per_cycle; blocks_per_commitment; blocks_per_roll_snapshot; _} = constants - header - commitments - initial_accounts - security_deposit_ramp_up_cycles - no_reward_cycles - = + in + Error_monad.unless (blocks_per_commitment <= blocks_per_cycle) (fun () -> + failwith + "Inconsistent constants : blocks per commitment must be less than \ + blocks per cycle") + >>=? fun () -> + Error_monad.unless (blocks_per_cycle >= blocks_per_roll_snapshot) (fun () -> + failwith + "Inconsistent constants : blocks per cycle must be superior than \ + blocks per roll snapshot") + >>=? return + +let initial_context constants header commitments initial_accounts + security_deposit_ramp_up_cycles no_reward_cycles = let bootstrap_accounts = - List.map (fun (Account.{ pk = public_key ; pkh = public_key_hash ; _ }, amount) -> - Parameters_repr.{ public_key = Some public_key ; public_key_hash ; amount } - ) initial_accounts + List.map + (fun (Account.{pk = public_key; pkh = public_key_hash; _}, amount) -> + Parameters_repr.{public_key = Some public_key; public_key_hash; amount}) + initial_accounts in let json = Data_encoding.Json.construct Parameters_repr.encoding - Parameters_repr.{ - bootstrap_accounts ; - bootstrap_contracts = [] ; - commitments ; - constants ; - security_deposit_ramp_up_cycles ; - no_reward_cycles ; - } + Parameters_repr. + { bootstrap_accounts; + bootstrap_contracts = []; + commitments; + constants; + security_deposit_ramp_up_cycles; + no_reward_cycles } in let proto_params = Data_encoding.Binary.to_bytes_exn Data_encoding.json json in Tezos_protocol_environment_memory.Context.( - set empty protocol_param_key proto_params - ) >>= fun ctxt -> - Main.init ctxt header - >|= Alpha_environment.wrap_error >>=? fun { context; _ } -> - return context + set empty protocol_param_key proto_params) + >>= fun ctxt -> + Main.init ctxt header >|= Alpha_environment.wrap_error + >>=? fun {context; _} -> return context -let genesis_with_parameters - constants - ?(commitments = []) - ?(security_deposit_ramp_up_cycles = None) - ?(no_reward_cycles = None) - (initial_accounts : (Account.t * Tez_repr.t) list) - = +let genesis_with_parameters constants ?(commitments = []) + ?(security_deposit_ramp_up_cycles = None) ?(no_reward_cycles = None) + (initial_accounts : (Account.t * Tez_repr.t) list) = if initial_accounts = [] then - Pervasives.failwith "Must have one account with a roll to bake"; - + Pervasives.failwith "Must have one account with a roll to bake" ; (* Check there is at least one roll *) - begin try + ( try let open Test_utils in - fold_left_s (fun acc (_, amount) -> - Alpha_environment.wrap_error @@ - Tez_repr.(+?) acc amount >>?= fun acc -> - if acc >= constants.Constants_repr.tokens_per_roll then - raise Exit - else return acc - ) Tez_repr.zero initial_accounts >>=? fun _ -> + fold_left_s + (fun acc (_, amount) -> + Alpha_environment.wrap_error @@ Tez_repr.( +? ) acc amount + >>?= fun acc -> + if acc >= constants.Constants_repr.tokens_per_roll then raise Exit + else return acc) + Tez_repr.zero + initial_accounts + >>=? fun _ -> failwith "Insufficient tokens in initial accounts to create one roll" - with Exit -> return_unit - end >>=? fun () -> - check_constants_consistency constants >>=? fun () -> + with Exit -> return_unit ) + >>=? fun () -> + check_constants_consistency constants + >>=? fun () -> let hash = - Block_hash.of_b58check_exn "BLockGenesisGenesisGenesisGenesisGenesisCCCCCeZiLHU" + Block_hash.of_b58check_exn + "BLockGenesisGenesisGenesisGenesisGenesisCCCCCeZiLHU" in - let shell = Forge.make_shell + let shell = + Forge.make_shell ~level:0l ~predecessor:hash ~timestamp:Time.Protocol.epoch - ~fitness: (Fitness_repr.from_int64 0L) - ~operations_hash: Operation_list_list_hash.zero in - let contents = Forge.make_contents - ~priority:0 - ~seed_nonce_hash:None () in + ~fitness:(Fitness_repr.from_int64 0L) + ~operations_hash:Operation_list_list_hash.zero + in + let contents = Forge.make_contents ~priority:0 ~seed_nonce_hash:None () in initial_context constants shell @@ -274,17 +293,10 @@ let genesis_with_parameters no_reward_cycles >>=? fun context -> let block = - { hash ; - header = { - shell = shell ; - protocol_data = { - contents = contents ; - signature = Signature.zero ; - } ; - }; - operations = [] ; - context ; - } + { hash; + header = {shell; protocol_data = {contents; signature = Signature.zero}}; + operations = []; + context } in return block @@ -293,8 +305,8 @@ open Tezos_protocol_alpha_parameters.Default_parameters let default = constants_mainnet [@@@ocaml.warning "-23"] -let genesis - ?(preserved_cycles = default.preserved_cycles) + +let genesis ?(preserved_cycles = default.preserved_cycles) ?(blocks_per_cycle = default.blocks_per_cycle) ?(blocks_per_commitment = default.blocks_per_commitment) ?(blocks_per_roll_snapshot = default.blocks_per_roll_snapshot) @@ -313,107 +325,112 @@ let genesis ?(block_reward = default.block_reward) ?(endorsement_reward = default.endorsement_reward) ?(cost_per_byte = default.cost_per_byte) - ?(hard_storage_limit_per_operation = default.hard_storage_limit_per_operation) + ?(hard_storage_limit_per_operation = + default.hard_storage_limit_per_operation) ?(test_chain_duration = default.test_chain_duration) (initial_accounts : (Account.t * Tez_repr.t) list) = - let constants : Constants_repr.parametric = { - default with - preserved_cycles ; - blocks_per_cycle ; - blocks_per_commitment ; - blocks_per_roll_snapshot ; - blocks_per_voting_period ; - time_between_blocks ; - endorsers_per_block ; - hard_gas_limit_per_operation ; - hard_gas_limit_per_block ; - proof_of_work_threshold ; - tokens_per_roll ; - michelson_maximum_type_size ; - seed_nonce_revelation_tip ; - origination_size ; - block_security_deposit ; - endorsement_security_deposit ; - block_reward ; - endorsement_reward ; - cost_per_byte ; - hard_storage_limit_per_operation ; - test_chain_duration ; - } in + let constants : Constants_repr.parametric = + { default with + preserved_cycles; + blocks_per_cycle; + blocks_per_commitment; + blocks_per_roll_snapshot; + blocks_per_voting_period; + time_between_blocks; + endorsers_per_block; + hard_gas_limit_per_operation; + hard_gas_limit_per_block; + proof_of_work_threshold; + tokens_per_roll; + michelson_maximum_type_size; + seed_nonce_revelation_tip; + origination_size; + block_security_deposit; + endorsement_security_deposit; + block_reward; + endorsement_reward; + cost_per_byte; + hard_storage_limit_per_operation; + test_chain_duration } + in genesis_with_parameters constants initial_accounts (********* Baking *************) let apply header ?(operations = []) pred = - begin - let open Alpha_environment.Error_monad in - Proto_alpha.Main.begin_application - ~chain_id:Chain_id.zero - ~predecessor_context: pred.context - ~predecessor_fitness: pred.header.shell.fitness - ~predecessor_timestamp: pred.header.shell.timestamp - header >>=? fun vstate -> - fold_left_s - (fun vstate op -> - Proto_alpha.apply_operation vstate op >>=? fun (state, _result) -> - return state) - vstate operations >>=? fun vstate -> - Proto_alpha.Main.finalize_block vstate >>=? fun (validation, _result) -> - return validation.context - end >|= Alpha_environment.wrap_error >>|? fun context -> + (let open Alpha_environment.Error_monad in + Proto_alpha.Main.begin_application + ~chain_id:Chain_id.zero + ~predecessor_context:pred.context + ~predecessor_fitness:pred.header.shell.fitness + ~predecessor_timestamp:pred.header.shell.timestamp + header + >>=? fun vstate -> + fold_left_s + (fun vstate op -> + Proto_alpha.apply_operation vstate op + >>=? fun (state, _result) -> return state) + vstate + operations + >>=? fun vstate -> + Proto_alpha.Main.finalize_block vstate + >>=? fun (validation, _result) -> return validation.context) + >|= Alpha_environment.wrap_error + >>|? fun context -> let hash = Block_header.hash header in - { hash ; header ; operations ; context } + {hash; header; operations; context} let bake ?policy ?operation ?operations pred = let operations = - match operation,operations with - | Some op, Some ops -> Some (op::ops) - | Some op, None -> Some [op] - | None, Some ops -> Some ops - | None, None -> None + match (operation, operations) with + | (Some op, Some ops) -> + Some (op :: ops) + | (Some op, None) -> + Some [op] + | (None, Some ops) -> + Some ops + | (None, None) -> + None in - Forge.forge_header ?policy ?operations pred >>=? fun header -> - Forge.sign_header header >>=? fun header -> - apply header ?operations pred + Forge.forge_header ?policy ?operations pred + >>=? fun header -> + Forge.sign_header header >>=? fun header -> apply header ?operations pred (* This function is duplicated from Context to avoid a cyclic dependency *) -let get_constants b = - Alpha_services.Constants.all rpc_ctxt b +let get_constants b = Alpha_services.Constants.all rpc_ctxt b (********** Cycles ****************) let bake_n ?policy n b = - Error_monad.fold_left_s - (fun b _ -> bake ?policy b) b (1 -- n) + Error_monad.fold_left_s (fun b _ -> bake ?policy b) b (1 -- n) let bake_until_cycle_end ?policy b = - get_constants b >>=? fun Constants.{ parametric = { blocks_per_cycle ; _ } ; _ } -> + get_constants b + >>=? fun Constants.{parametric = {blocks_per_cycle; _}; _} -> let current_level = b.header.shell.level in let current_level = Int32.rem current_level blocks_per_cycle in let delta = Int32.sub blocks_per_cycle current_level in bake_n ?policy (Int32.to_int delta) b let bake_until_n_cycle_end ?policy n b = - Error_monad.fold_left_s - (fun b _ -> bake_until_cycle_end ?policy b) b (1 -- n) + Error_monad.fold_left_s (fun b _ -> bake_until_cycle_end ?policy b) b (1 -- n) -let bake_until_cycle ?policy cycle (b:t) = - get_constants b >>=? fun Constants.{ parametric = { blocks_per_cycle ; _ } ; _ } -> - let rec loop (b:t) = +let bake_until_cycle ?policy cycle (b : t) = + get_constants b + >>=? fun Constants.{parametric = {blocks_per_cycle; _}; _} -> + let rec loop (b : t) = let current_cycle = let current_level = b.header.shell.level in let current_cycle = Int32.div current_level blocks_per_cycle in current_cycle in - if Int32.equal (Cycle.to_int32 cycle) current_cycle then - return b - else - bake_until_cycle_end ?policy b >>=? fun b -> - loop b + if Int32.equal (Cycle.to_int32 cycle) current_cycle then return b + else bake_until_cycle_end ?policy b >>=? fun b -> loop b in loop b let print_block block = - Format.printf "@[%6i %s@]\n%!" - (Int32.to_int (block.header.shell.level)) - (Block_hash.to_b58check (block.hash)) + Format.printf + "@[%6i %s@]\n%!" + (Int32.to_int block.header.shell.level) + (Block_hash.to_b58check block.hash) diff --git a/src/lib_shell/bench/helpers/block.mli b/src/lib_shell/bench/helpers/block.mli index c9f48acd98fde42f4ad2e095b20159e0dd76a7be..e61b2d95b2e65e1192c1d9daa6289281e2b2fa3a 100644 --- a/src/lib_shell/bench/helpers/block.mli +++ b/src/lib_shell/bench/helpers/block.mli @@ -27,14 +27,16 @@ open Proto_alpha open Alpha_context type t = { - hash : Block_hash.t ; - header : Block_header.t ; - operations : Operation.packed list ; - context : Tezos_protocol_environment_memory.Context.t ; (** Resulting context *) + hash : Block_hash.t; + header : Block_header.t; + operations : Operation.packed list; + context : Tezos_protocol_environment_memory.Context.t + (** Resulting context *) } + type block = t -val rpc_ctxt: t Alpha_environment.RPC_context.simple +val rpc_ctxt : t Alpha_environment.RPC_context.simple (** Policies to select the next baker: - [By_priority p] selects the baker at priority [p] @@ -48,55 +50,53 @@ type baker_policy = (** Returns (account, priority, timestamp) of the next baker given a policy, defaults to By_priority 0. *) -val get_next_baker: +val get_next_baker : ?policy:baker_policy -> - t -> (public_key_hash * int * Time.Protocol.t) tzresult Lwt.t + t -> + (public_key_hash * int * Time.Protocol.t) tzresult Lwt.t module Forge : sig - - val contents: + val contents : ?proof_of_work_nonce:MBytes.t -> ?priority:int -> - ?seed_nonce_hash: Nonce_hash.t -> - unit -> Block_header.contents + ?seed_nonce_hash:Nonce_hash.t -> + unit -> + Block_header.contents type header (** Forges a correct header following the policy. The header can then be modified and applied with [apply]. *) - val forge_header: + val forge_header : ?policy:baker_policy -> - ?operations: Operation.packed list -> - t -> header tzresult Lwt.t + ?operations:Operation.packed list -> + t -> + header tzresult Lwt.t (** Sets uniquely seed_nonce_hash of a header *) - val set_seed_nonce_hash: - Nonce_hash.t option -> header -> header + val set_seed_nonce_hash : Nonce_hash.t option -> header -> header (** Sets the baker that will sign the header to an arbitrary pkh *) - val set_baker: - public_key_hash -> header -> header + val set_baker : public_key_hash -> header -> header (** Signs the header with the key of the baker configured in the header. The header can no longer be modified, only applied. *) - val sign_header: - header -> - Block_header.block_header tzresult Lwt.t - + val sign_header : header -> Block_header.block_header tzresult Lwt.t end val genesis_with_parameters : Constants_repr.parametric -> - ?commitments: Commitment_repr.t list -> + ?commitments:Commitment_repr.t list -> ?security_deposit_ramp_up_cycles:int option -> ?no_reward_cycles:int option -> - (Account.t * Proto_alpha.Tez_repr.t) list -> block tzresult Lwt.t + (Account.t * Proto_alpha.Tez_repr.t) list -> + block tzresult Lwt.t (** [genesis <opts> accounts] : generates an initial block with the given constants [<opts>] and initializes [accounts] with their associated amounts. *) -val genesis: +val genesis : ?preserved_cycles:int -> ?blocks_per_cycle:int32 -> ?blocks_per_commitment:int32 -> @@ -115,17 +115,19 @@ val genesis: ?endorsement_security_deposit:Tez_repr.tez -> ?block_reward:Tez_repr.tez -> ?endorsement_reward:Tez_repr.tez -> - ?cost_per_byte: Tez_repr.t -> - ?hard_storage_limit_per_operation: Z.t -> - ?test_chain_duration: Int64.t -> - (Account.t * Tez_repr.tez) list -> block tzresult Lwt.t + ?cost_per_byte:Tez_repr.t -> + ?hard_storage_limit_per_operation:Z.t -> + ?test_chain_duration:Int64.t -> + (Account.t * Tez_repr.tez) list -> + block tzresult Lwt.t (** Applies a signed header and its operations to a block and obtains a new block *) -val apply: +val apply : Block_header.block_header -> - ?operations: Operation.packed list -> - t -> t tzresult Lwt.t + ?operations:Operation.packed list -> + t -> + t tzresult Lwt.t (** [bake b] returns a block [b'] which has as predecessor block [b]. @@ -135,11 +137,12 @@ val apply: testing together with setters for properties of the headers. For examples see seed.ml or double_baking.ml *) -val bake: - ?policy: baker_policy -> - ?operation: Operation.packed -> - ?operations: Operation.packed list -> - t -> t tzresult Lwt.t +val bake : + ?policy:baker_policy -> + ?operation:Operation.packed -> + ?operations:Operation.packed list -> + t -> + t tzresult Lwt.t (** Bakes [n] blocks. *) val bake_n : ?policy:baker_policy -> int -> t -> block tzresult Lwt.t @@ -149,9 +152,10 @@ val bake_n : ?policy:baker_policy -> int -> t -> block tzresult Lwt.t val bake_until_cycle_end : ?policy:baker_policy -> t -> t tzresult Lwt.t (** Bakes enough blocks to end [n] cycles. *) -val bake_until_n_cycle_end : ?policy:baker_policy -> int -> t -> t tzresult Lwt.t +val bake_until_n_cycle_end : + ?policy:baker_policy -> int -> t -> t tzresult Lwt.t (** Bakes enough blocks to reach the cycle. *) val bake_until_cycle : ?policy:baker_policy -> Cycle.t -> t -> t tzresult Lwt.t -val print_block: t -> unit +val print_block : t -> unit diff --git a/src/lib_shell/bench/helpers/context.ml b/src/lib_shell/bench/helpers/context.ml index 8d6cc9f86255d216f668f63571136bf6c6b0901c..63a6f13c57051a71cd6773cd1b974b30ebb1a50b 100644 --- a/src/lib_shell/bench/helpers/context.ml +++ b/src/lib_shell/bench/helpers/context.ml @@ -26,169 +26,195 @@ open Proto_alpha open Alpha_context -type t = - | B of Block.t - | I of Incremental.t +type t = B of Block.t | I of Incremental.t -let branch = function - | B b -> b.hash - | I i -> (Incremental.predecessor i).hash +let branch = function B b -> b.hash | I i -> (Incremental.predecessor i).hash -let level = function - | B b -> b.header.shell.level - | I i -> (Incremental.level i) +let level = function B b -> b.header.shell.level | I i -> Incremental.level i let get_level ctxt = - level ctxt - |> Raw_level.of_int32 - |> Alpha_environment.wrap_error + level ctxt |> Raw_level.of_int32 |> Alpha_environment.wrap_error |> Lwt.return -let rpc_ctxt = object - method call_proto_service0 : - 'm 'q 'i 'o. - ([< RPC_service.meth ] as 'm, Alpha_environment.RPC_context.t, Alpha_environment.RPC_context.t, 'q, 'i, 'o) RPC_service.t -> - t -> 'q -> 'i -> 'o tzresult Lwt.t = - fun s pr q i -> - match pr with - | B b -> Block.rpc_ctxt#call_proto_service0 s b q i - | I b -> Incremental.rpc_ctxt#call_proto_service0 s b q i - method call_proto_service1 : - 'm 'a 'q 'i 'o. - ([< RPC_service.meth ] as 'm, Alpha_environment.RPC_context.t, Alpha_environment.RPC_context.t * 'a, 'q, 'i, 'o) RPC_service.t -> - t -> 'a -> 'q -> 'i -> 'o tzresult Lwt.t = - fun s pr a q i -> - match pr with - | B bl -> Block.rpc_ctxt#call_proto_service1 s bl a q i - | I bl -> Incremental.rpc_ctxt#call_proto_service1 s bl a q i - method call_proto_service2 : - 'm 'a 'b 'q 'i 'o. - ([< RPC_service.meth ] as 'm, Alpha_environment.RPC_context.t, (Alpha_environment.RPC_context.t * 'a) * 'b, 'q, 'i, 'o) RPC_service.t -> - t -> 'a -> 'b -> 'q -> 'i -> 'o tzresult Lwt.t = - fun s pr a b q i -> - match pr with - | B bl -> Block.rpc_ctxt#call_proto_service2 s bl a b q i - | I bl -> Incremental.rpc_ctxt#call_proto_service2 s bl a b q i - method call_proto_service3 : - 'm 'a 'b 'c 'q 'i 'o. - ([< RPC_service.meth ] as 'm, Alpha_environment.RPC_context.t, ((Alpha_environment.RPC_context.t * 'a) * 'b) * 'c, 'q, 'i, 'o) RPC_service.t -> - t -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o tzresult Lwt.t = - fun s pr a b c q i -> - match pr with - | B bl -> Block.rpc_ctxt#call_proto_service3 s bl a b c q i - | I bl -> Incremental.rpc_ctxt#call_proto_service3 s bl a b c q i -end +let rpc_ctxt = + object + method call_proto_service0 + : 'm 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + Alpha_environment.RPC_context.t, + Alpha_environment.RPC_context.t, + 'q, + 'i, + 'o ) + RPC_service.t -> t -> 'q -> 'i -> 'o tzresult Lwt.t = + fun s pr q i -> + match pr with + | B b -> + Block.rpc_ctxt#call_proto_service0 s b q i + | I b -> + Incremental.rpc_ctxt#call_proto_service0 s b q i + + method call_proto_service1 + : 'm 'a 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + Alpha_environment.RPC_context.t, + Alpha_environment.RPC_context.t * 'a, + 'q, + 'i, + 'o ) + RPC_service.t -> t -> 'a -> 'q -> 'i -> 'o tzresult Lwt.t = + fun s pr a q i -> + match pr with + | B bl -> + Block.rpc_ctxt#call_proto_service1 s bl a q i + | I bl -> + Incremental.rpc_ctxt#call_proto_service1 s bl a q i + + method call_proto_service2 + : 'm 'a 'b 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + Alpha_environment.RPC_context.t, + (Alpha_environment.RPC_context.t * 'a) * 'b, + 'q, + 'i, + 'o ) + RPC_service.t -> t -> 'a -> 'b -> 'q -> 'i -> 'o tzresult Lwt.t = + fun s pr a b q i -> + match pr with + | B bl -> + Block.rpc_ctxt#call_proto_service2 s bl a b q i + | I bl -> + Incremental.rpc_ctxt#call_proto_service2 s bl a b q i + + method call_proto_service3 + : 'm 'a 'b 'c 'q 'i 'o. + ( ([< RPC_service.meth] as 'm), + Alpha_environment.RPC_context.t, + ((Alpha_environment.RPC_context.t * 'a) * 'b) * 'c, + 'q, + 'i, + 'o ) + RPC_service.t -> t -> 'a -> 'b -> 'c -> 'q -> 'i -> 'o tzresult Lwt.t + = + fun s pr a b c q i -> + match pr with + | B bl -> + Block.rpc_ctxt#call_proto_service3 s bl a b c q i + | I bl -> + Incremental.rpc_ctxt#call_proto_service3 s bl a b c q i + end let get_endorsers ctxt = Alpha_services.Delegate.Endorsing_rights.get rpc_ctxt ctxt let get_endorser ctxt slot = - Alpha_services.Delegate.Endorsing_rights.get - rpc_ctxt ctxt >>=? fun endorsers -> - try return (List.find (fun { Alpha_services.Delegate.Endorsing_rights.slots ; _ } -> List.mem slot slots) endorsers).delegate + Alpha_services.Delegate.Endorsing_rights.get rpc_ctxt ctxt + >>=? fun endorsers -> + try + return + (List.find + (fun {Alpha_services.Delegate.Endorsing_rights.slots; _} -> + List.mem slot slots) + endorsers) + .delegate with _ -> - failwith "Failed to lookup endorsers for ctxt %a, slot %d." - Block_hash.pp_short (branch ctxt) slot + failwith + "Failed to lookup endorsers for ctxt %a, slot %d." + Block_hash.pp_short + (branch ctxt) + slot let get_bakers ctxt = - Alpha_services.Delegate.Baking_rights.get - ~max_priority:256 - rpc_ctxt ctxt >>=? fun bakers -> - return (List.map - (fun p -> p.Alpha_services.Delegate.Baking_rights.delegate) - bakers) + Alpha_services.Delegate.Baking_rights.get ~max_priority:256 rpc_ctxt ctxt + >>=? fun bakers -> + return + (List.map + (fun p -> p.Alpha_services.Delegate.Baking_rights.delegate) + bakers) -let get_constants b = - Alpha_services.Constants.all rpc_ctxt b +let get_constants b = Alpha_services.Constants.all rpc_ctxt b module Contract = struct - - let pkh c = Alpha_context.Contract.is_implicit c |> function - | Some p -> return p - | None -> failwith "pkh: only for implicit contracts" + let pkh c = + Alpha_context.Contract.is_implicit c + |> function + | Some p -> return p | None -> failwith "pkh: only for implicit contracts" type balance_kind = Main | Deposit | Fees | Rewards let balance ?(kind = Main) ctxt contract = - begin match kind with - | Main -> - Alpha_services.Contract.balance rpc_ctxt ctxt contract - | _ -> - match Alpha_context.Contract.is_implicit contract with - | None -> - invalid_arg - "get_balance: no frozen accounts for an originated contract." - | Some pkh -> - Alpha_services.Delegate.frozen_balance_by_cycle - rpc_ctxt ctxt pkh >>=? fun map -> - Lwt.return @@ - Cycle.Map.fold - (fun _cycle { Delegate.deposit ; fees ; rewards } acc -> - acc >>?fun acc -> - match kind with - | Deposit -> Test_tez.Tez.(acc +? deposit) - | Fees -> Test_tez.Tez.(acc +? fees) - | Rewards -> Test_tez.Tez.(acc +? rewards) - | _ -> assert false) - map - (Ok Tez.zero) - end + match kind with + | Main -> + Alpha_services.Contract.balance rpc_ctxt ctxt contract + | _ -> ( + match Alpha_context.Contract.is_implicit contract with + | None -> + invalid_arg + "get_balance: no frozen accounts for an originated contract." + | Some pkh -> + Alpha_services.Delegate.frozen_balance_by_cycle rpc_ctxt ctxt pkh + >>=? fun map -> + Lwt.return + @@ Cycle.Map.fold + (fun _cycle {Delegate.deposit; fees; rewards} acc -> + acc + >>? fun acc -> + match kind with + | Deposit -> + Test_tez.Tez.(acc +? deposit) + | Fees -> + Test_tez.Tez.(acc +? fees) + | Rewards -> + Test_tez.Tez.(acc +? rewards) + | _ -> + assert false) + map + (Ok Tez.zero) ) let counter ctxt contract = Alpha_services.Contract.counter rpc_ctxt ctxt contract let manager ctxt contract = - Alpha_services.Contract.manager rpc_ctxt ctxt contract >>=? fun pkh -> - Account.find pkh + Alpha_services.Contract.manager rpc_ctxt ctxt contract + >>=? fun pkh -> Account.find pkh let is_manager_key_revealed ctxt contract = - Alpha_services.Contract.manager_key rpc_ctxt ctxt contract >>=? fun (_, res) -> - return (res <> None) + Alpha_services.Contract.manager_key rpc_ctxt ctxt contract + >>=? fun (_, res) -> return (res <> None) let delegate_opt ctxt contract = Alpha_services.Contract.delegate_opt rpc_ctxt ctxt contract - end module Delegate = struct - type info = Delegate_services.info = { - balance: Tez.t ; - frozen_balance: Tez.t ; - frozen_balance_by_cycle: Delegate.frozen_balance Cycle.Map.t ; - staking_balance: Tez.t ; - delegated_contracts: Contract_hash.t list ; - delegated_balance: Tez.t ; - deactivated: bool ; - grace_period: Cycle.t ; + balance : Tez.t; + frozen_balance : Tez.t; + frozen_balance_by_cycle : Delegate.frozen_balance Cycle.Map.t; + staking_balance : Tez.t; + delegated_contracts : Contract_hash.t list; + delegated_balance : Tez.t; + deactivated : bool; + grace_period : Cycle.t } - let info ctxt pkh = - Alpha_services.Delegate.info rpc_ctxt ctxt pkh - + let info ctxt pkh = Alpha_services.Delegate.info rpc_ctxt ctxt pkh end -let init - ?(slow=false) - ?preserved_cycles - ?endorsers_per_block - n = +let init ?(slow = false) ?preserved_cycles ?endorsers_per_block n = let accounts = Account.generate_accounts n in - let contracts = List.map (fun (a, _) -> - Alpha_context.Contract.implicit_contract Account.(a.pkh)) accounts in - begin - if slow then - Block.genesis - ?preserved_cycles - ?endorsers_per_block - accounts - else - Block.genesis - ?preserved_cycles - ~blocks_per_cycle:32l - ~blocks_per_commitment:4l - ~blocks_per_roll_snapshot:8l - ?endorsers_per_block - accounts - end >>=? fun blk -> - return (blk, contracts) + let contracts = + List.map + (fun (a, _) -> Alpha_context.Contract.implicit_contract Account.(a.pkh)) + accounts + in + ( if slow then Block.genesis ?preserved_cycles ?endorsers_per_block accounts + else + Block.genesis + ?preserved_cycles + ~blocks_per_cycle:32l + ~blocks_per_commitment:4l + ~blocks_per_roll_snapshot:8l + ?endorsers_per_block + accounts ) + >>=? fun blk -> return (blk, contracts) diff --git a/src/lib_shell/bench/helpers/context.mli b/src/lib_shell/bench/helpers/context.mli index e16b2d3b5d7b0fd84c763b322819a483806a7969..35001570d8d3040c3ed90008018deedf225f75ed 100644 --- a/src/lib_shell/bench/helpers/context.mli +++ b/src/lib_shell/bench/helpers/context.mli @@ -26,63 +26,61 @@ open Proto_alpha open Alpha_context -type t = - | B of Block.t - | I of Incremental.t +type t = B of Block.t | I of Incremental.t -val branch: t -> Block_hash.t +val branch : t -> Block_hash.t -val get_level: t -> Raw_level.t tzresult Lwt.t +val get_level : t -> Raw_level.t tzresult Lwt.t -val get_endorsers: t -> Alpha_services.Delegate.Endorsing_rights.t list tzresult Lwt.t +val get_endorsers : + t -> Alpha_services.Delegate.Endorsing_rights.t list tzresult Lwt.t -val get_endorser: t -> int -> public_key_hash tzresult Lwt.t +val get_endorser : t -> int -> public_key_hash tzresult Lwt.t -val get_bakers: t -> public_key_hash list tzresult Lwt.t +val get_bakers : t -> public_key_hash list tzresult Lwt.t (** Returns all the constants of the protocol *) -val get_constants: t -> Constants.t tzresult Lwt.t +val get_constants : t -> Constants.t tzresult Lwt.t module Contract : sig - - val pkh: Contract.t -> public_key_hash tzresult Lwt.t + val pkh : Contract.t -> public_key_hash tzresult Lwt.t type balance_kind = Main | Deposit | Fees | Rewards (** Returns the balance of a contract, by default the main balance. If the contract is implicit the frozen balances are available too: deposit, fees ot rewards. *) - val balance: ?kind:balance_kind -> t -> Contract.t -> Tez.t tzresult Lwt.t + val balance : ?kind:balance_kind -> t -> Contract.t -> Tez.t tzresult Lwt.t + + val counter : t -> Contract.t -> Z.t tzresult Lwt.t - val counter: t -> Contract.t -> Z.t tzresult Lwt.t - val manager: t -> Contract.t -> Account.t tzresult Lwt.t - val is_manager_key_revealed: t -> Contract.t -> bool tzresult Lwt.t + val manager : t -> Contract.t -> Account.t tzresult Lwt.t - val delegate_opt: t -> Contract.t -> public_key_hash option tzresult Lwt.t + val is_manager_key_revealed : t -> Contract.t -> bool tzresult Lwt.t + val delegate_opt : t -> Contract.t -> public_key_hash option tzresult Lwt.t end module Delegate : sig - type info = Delegate_services.info = { - balance: Tez.t ; - frozen_balance: Tez.t ; - frozen_balance_by_cycle: Delegate.frozen_balance Cycle.Map.t ; - staking_balance: Tez.t ; - delegated_contracts: Contract_hash.t list ; - delegated_balance: Tez.t ; - deactivated: bool ; - grace_period: Cycle.t ; + balance : Tez.t; + frozen_balance : Tez.t; + frozen_balance_by_cycle : Delegate.frozen_balance Cycle.Map.t; + staking_balance : Tez.t; + delegated_contracts : Contract_hash.t list; + delegated_balance : Tez.t; + deactivated : bool; + grace_period : Cycle.t } - val info: t -> public_key_hash -> Delegate_services.info tzresult Lwt.t - + val info : t -> public_key_hash -> Delegate_services.info tzresult Lwt.t end (** [init n] : returns an initial block with [n] initialized accounts and the associated implicit contracts *) -val init: - ?slow: bool -> +val init : + ?slow:bool -> ?preserved_cycles:int -> ?endorsers_per_block:int -> - int -> (Block.t * Alpha_context.Contract.t list) tzresult Lwt.t + int -> + (Block.t * Alpha_context.Contract.t list) tzresult Lwt.t diff --git a/src/lib_shell/bench/helpers/incremental.ml b/src/lib_shell/bench/helpers/incremental.ml index fbb711d6db0d44b15a2e0543e9a67b05a78bb5a6..e7c862e93e06a6bbef1a14c5dafdbf283d301541 100644 --- a/src/lib_shell/bench/helpers/incremental.ml +++ b/src/lib_shell/bench/helpers/incremental.ml @@ -27,96 +27,84 @@ open Proto_alpha open Alpha_context type t = { - predecessor: Block.t ; - state: M.validation_state ; - rev_operations: Operation.packed list ; - header: Block_header.t ; - delegate: Account.t ; + predecessor : Block.t; + state : M.validation_state; + rev_operations : Operation.packed list; + header : Block_header.t; + delegate : Account.t } + type incremental = t -let predecessor { predecessor ; _ } = predecessor +let predecessor {predecessor; _} = predecessor let level st = st.header.shell.level let rpc_context st = let result = Alpha_context.finalize st.state.ctxt in - { - Alpha_environment.Updater.block_hash = Block_hash.zero ; - block_header = { st.header.shell with fitness = result.fitness } ; - context = result.context ; - } + { Alpha_environment.Updater.block_hash = Block_hash.zero; + block_header = {st.header.shell with fitness = result.fitness}; + context = result.context } let rpc_ctxt = new Alpha_environment.proto_rpc_context_of_directory - rpc_context Proto_alpha.rpc_services + rpc_context + Proto_alpha.rpc_services -let begin_construction ?(priority=0) ?timestamp ?seed_nonce_hash (predecessor : Block.t) = - Block.get_next_baker ~policy:(Block.By_priority priority) - predecessor >>=? fun (delegate, priority, real_timestamp) -> - Account.find delegate >>=? fun delegate -> +let begin_construction ?(priority = 0) ?timestamp ?seed_nonce_hash + (predecessor : Block.t) = + Block.get_next_baker ~policy:(Block.By_priority priority) predecessor + >>=? fun (delegate, priority, real_timestamp) -> + Account.find delegate + >>=? fun delegate -> let timestamp = Option.unopt ~default:real_timestamp timestamp in let contents = Block.Forge.contents ~priority ?seed_nonce_hash () in - let protocol_data = { - Block_header.contents ; - signature = Signature.zero ; - } in - let header = { - Block_header.shell = { - predecessor = predecessor.hash ; - proto_level = predecessor.header.shell.proto_level ; - validation_passes = predecessor.header.shell.validation_passes ; - fitness = predecessor.header.shell.fitness ; - timestamp ; - level = predecessor.header.shell.level ; - context = Context_hash.zero ; - operations_hash = Operation_list_list_hash.zero ; - } ; - protocol_data = { - contents ; - signature = Signature.zero ; - } ; - } in + let protocol_data = {Block_header.contents; signature = Signature.zero} in + let header = + { Block_header.shell = + { predecessor = predecessor.hash; + proto_level = predecessor.header.shell.proto_level; + validation_passes = predecessor.header.shell.validation_passes; + fitness = predecessor.header.shell.fitness; + timestamp; + level = predecessor.header.shell.level; + context = Context_hash.zero; + operations_hash = Operation_list_list_hash.zero }; + protocol_data = {contents; signature = Signature.zero} } + in M.begin_construction ~chain_id:Chain_id.zero - ~predecessor_context: predecessor.context - ~predecessor_timestamp: predecessor.header.shell.timestamp - ~predecessor_fitness: predecessor.header.shell.fitness - ~predecessor_level: predecessor.header.shell.level + ~predecessor_context:predecessor.context + ~predecessor_timestamp:predecessor.header.shell.timestamp + ~predecessor_fitness:predecessor.header.shell.fitness + ~predecessor_level:predecessor.header.shell.level ~predecessor:predecessor.hash ~timestamp ~protocol_data - () >>=? fun state -> - return { - predecessor ; - state ; - rev_operations = [] ; - header ; - delegate ; - } + () + >>=? fun state -> + return {predecessor; state; rev_operations = []; header; delegate} let add_operation st op = - M.apply_operation st.state op >>=? fun (state, _result) -> - return { st with state ; rev_operations = op :: st.rev_operations } + M.apply_operation st.state op + >>=? fun (state, _result) -> + return {st with state; rev_operations = op :: st.rev_operations} let finalize_block st = - M.finalize_block st.state >>=? fun (result, _) -> + M.finalize_block st.state + >>=? fun (result, _) -> let operations = List.rev st.rev_operations in let operations_hash = - Operation_list_list_hash.compute [ - Operation_list_hash.compute (List.map Operation.hash_packed operations) - ] in + Operation_list_list_hash.compute + [Operation_list_hash.compute (List.map Operation.hash_packed operations)] + in let header = { st.header with - shell = { - st.header.shell with - operations_hash ; fitness = result.fitness ; - level = Int32.succ st.header.shell.level - } } in + shell = + { st.header.shell with + operations_hash; + fitness = result.fitness; + level = Int32.succ st.header.shell.level } } + in let hash = Block_header.hash header in - return { - Block.hash ; - header ; - operations ; - context = result.context ; - } + return {Block.hash; header; operations; context = result.context} diff --git a/src/lib_shell/bench/helpers/incremental.mli b/src/lib_shell/bench/helpers/incremental.mli index 5ff563c395cf606dbc7997f800605dd70abefc00..735fe318677a23b290ec44274cc6304005e235a5 100644 --- a/src/lib_shell/bench/helpers/incremental.mli +++ b/src/lib_shell/bench/helpers/incremental.mli @@ -27,21 +27,23 @@ open Proto_alpha open Alpha_context type t + type incremental = t -val predecessor: incremental -> Block.t +val predecessor : incremental -> Block.t -val level: incremental -> int32 +val level : incremental -> int32 -val begin_construction: +val begin_construction : ?priority:int -> ?timestamp:Time.Protocol.t -> - ?seed_nonce_hash: Nonce_hash.t -> - Block.t -> incremental tzresult Lwt.t + ?seed_nonce_hash:Nonce_hash.t -> + Block.t -> + incremental tzresult Lwt.t -val add_operation: +val add_operation : incremental -> Operation.packed -> incremental tzresult Lwt.t -val finalize_block: incremental -> Block.t tzresult Lwt.t +val finalize_block : incremental -> Block.t tzresult Lwt.t -val rpc_ctxt: incremental Alpha_environment.RPC_context.simple +val rpc_ctxt : incremental Alpha_environment.RPC_context.simple diff --git a/src/lib_shell/bench/helpers/nonce.ml b/src/lib_shell/bench/helpers/nonce.ml index c5a42bf921d08ef0791c9a7fff70e9a62695d11e..6f5aff1fe6145659836178c29f3aa76bac27c1c2 100644 --- a/src/lib_shell/bench/helpers/nonce.ml +++ b/src/lib_shell/bench/helpers/nonce.ml @@ -25,25 +25,28 @@ open Proto_alpha -module Table = Hashtbl.Make(struct - type t = Nonce_hash.t - let hash h = - Int32.to_int (MBytes.get_int32 (Nonce_hash.to_bytes h) 0) - let equal = Nonce_hash.equal - end) +module Table = Hashtbl.Make (struct + type t = Nonce_hash.t + + let hash h = Int32.to_int (MBytes.get_int32 (Nonce_hash.to_bytes h) 0) + + let equal = Nonce_hash.equal +end) let known_nonces = Table.create 17 let generate () = match - Alpha_context.Nonce.of_bytes @@ - Rand.generate Alpha_context.Constants.nonce_length + Alpha_context.Nonce.of_bytes + @@ Rand.generate Alpha_context.Constants.nonce_length with | Ok nonce -> let hash = Alpha_context.Nonce.hash nonce in Table.add known_nonces hash nonce ; (hash, nonce) - | Error _ -> assert false + | Error _ -> + assert false let forget_all () = Table.clear known_nonces + let get hash = Table.find known_nonces hash diff --git a/src/lib_shell/bench/helpers/nonce.mli b/src/lib_shell/bench/helpers/nonce.mli index d959911573c1b31a68858357f6cafc19c7455c79..f5e838442dbb44c0b07a53bcb6bc6d22cd7b35b9 100644 --- a/src/lib_shell/bench/helpers/nonce.mli +++ b/src/lib_shell/bench/helpers/nonce.mli @@ -26,6 +26,8 @@ open Proto_alpha (** Returns a fresh nonce and its corresponding hash (and stores them). *) -val generate: unit -> Nonce_hash.t * Alpha_context.Nonce.t -val get: Nonce_hash.t -> Alpha_context.Nonce.t -val forget_all: unit -> unit +val generate : unit -> Nonce_hash.t * Alpha_context.Nonce.t + +val get : Nonce_hash.t -> Alpha_context.Nonce.t + +val forget_all : unit -> unit diff --git a/src/lib_shell/bench/helpers/op.ml b/src/lib_shell/bench/helpers/op.ml index ae1c69f5bb48d05fef12aa45b405b81b377d4ed3..dd8a8b1dc29bedfc4eba5c172da5718ce4ebd30e 100644 --- a/src/lib_shell/bench/helpers/op.ml +++ b/src/lib_shell/bench/helpers/op.ml @@ -26,201 +26,197 @@ open Proto_alpha open Alpha_context -let sign ?(watermark = Signature.Generic_operation) - sk ctxt contents = +let sign ?(watermark = Signature.Generic_operation) sk ctxt contents = let branch = Context.branch ctxt in let unsigned = Data_encoding.Binary.to_bytes_exn Operation.unsigned_encoding - ({ branch }, Contents_list contents) in + ({branch}, Contents_list contents) + in let signature = Some (Signature.sign ~watermark sk unsigned) in - ({ shell = { branch } ; - protocol_data = { - contents ; - signature ; - } ; - } : _ Operation.t) - -let endorsement ?delegate ?level ctxt = - fun ?(signing_context=ctxt) slots -> - begin - match delegate with - | None -> Context.get_endorser ctxt (List.hd slots) - | Some delegate -> return delegate - end >>=? fun delegate_pkh -> - Account.find delegate_pkh >>=? fun delegate -> - begin - match level with - | None -> Context.get_level ctxt - | Some level -> return level - end >>=? fun level -> - let op = - Single - (Endorsement { level }) in - return (sign ~watermark:(Signature.Endorsement Chain_id.zero) - delegate.sk signing_context op) + ({shell = {branch}; protocol_data = {contents; signature}} : _ Operation.t) + +let endorsement ?delegate ?level ctxt ?(signing_context = ctxt) slots = + ( match delegate with + | None -> + Context.get_endorser ctxt (List.hd slots) + | Some delegate -> + return delegate ) + >>=? fun delegate_pkh -> + Account.find delegate_pkh + >>=? fun delegate -> + ( match level with + | None -> + Context.get_level ctxt + | Some level -> + return level ) + >>=? fun level -> + let op = Single (Endorsement {level}) in + return + (sign + ~watermark:(Signature.Endorsement Chain_id.zero) + delegate.sk + signing_context + op) let sign ?watermark sk ctxt (Contents_list contents) = Operation.pack (sign ?watermark sk ctxt contents) open Tezos_protocol_alpha_parameters.Default_parameters -let manager_operation - ?(fee = Tez.zero) - ?(gas_limit = constants_mainnet.Constants_repr.hard_gas_limit_per_operation) - ?(storage_limit = constants_mainnet.Constants_repr.hard_storage_limit_per_operation) +let manager_operation ?(fee = Tez.zero) + ?(gas_limit = + constants_mainnet.Constants_repr.hard_gas_limit_per_operation) + ?(storage_limit = + constants_mainnet.Constants_repr.hard_storage_limit_per_operation) ?public_key ~source ctxt operation = - Context.Contract.counter ctxt source >>=? fun counter -> - Context.Contract.manager ctxt source >>=? fun account -> + Context.Contract.counter ctxt source + >>=? fun counter -> + Context.Contract.manager ctxt source + >>=? fun account -> let public_key = Option.unopt ~default:account.pk public_key in let counter = Z.succ counter in - Context.Contract.is_manager_key_revealed ctxt source >>=? function + Context.Contract.is_manager_key_revealed ctxt source + >>=? function | true -> let op = - Manager_operation { - source ; - fee ; - counter ; - operation ; - gas_limit ; - storage_limit ; - } in + Manager_operation + {source; fee; counter; operation; gas_limit; storage_limit} + in return (Contents_list (Single op)) | false -> let op_reveal = - Manager_operation { - source ; - fee = Tez.zero ; - counter ; - operation = Reveal public_key ; - gas_limit = Z.of_int 20 ; - storage_limit = Z.zero ; - } in + Manager_operation + { source; + fee = Tez.zero; + counter; + operation = Reveal public_key; + gas_limit = Z.of_int 20; + storage_limit = Z.zero } + in let op = - Manager_operation { - source ; - fee ; - counter = Z.succ counter ; - operation ; - gas_limit ; - storage_limit ; - } in + Manager_operation + { source; + fee; + counter = Z.succ counter; + operation; + gas_limit; + storage_limit } + in return (Contents_list (Cons (op_reveal, Single op))) let revelation ctxt public_key = let pkh = Signature.Public_key.hash public_key in let source = Contract.implicit_contract pkh in - Context.Contract.counter ctxt source >>=? fun counter -> - Context.Contract.manager ctxt source >>=? fun account -> + Context.Contract.counter ctxt source + >>=? fun counter -> + Context.Contract.manager ctxt source + >>=? fun account -> let counter = Z.succ counter in let sop = Contents_list (Single - (Manager_operation { - source ; - fee = Tez.zero ; - counter ; - operation = Reveal public_key ; - gas_limit = Z.of_int 20 ; - storage_limit = Z.zero ; - })) in + (Manager_operation + { source; + fee = Tez.zero; + counter; + operation = Reveal public_key; + gas_limit = Z.of_int 20; + storage_limit = Z.zero })) + in return @@ sign account.sk ctxt sop -let originated_contract (op: Operation.packed) = +let originated_contract (op : Operation.packed) = let nonce = Contract.initial_origination_nonce (Operation.hash_packed op) in Contract.originated_contract nonce exception Impossible -let origination ?delegate ?script - ?(spendable = true) ?(delegatable = true) ?(preorigination = None) - ?public_key ?manager ?credit ?fee ?gas_limit ?storage_limit ctxt source = - Context.Contract.manager ctxt source >>=? fun account -> +let origination ?delegate ?script ?(spendable = true) ?(delegatable = true) + ?(preorigination = None) ?public_key ?manager ?credit ?fee ?gas_limit + ?storage_limit ctxt source = + Context.Contract.manager ctxt source + >>=? fun account -> let manager = Option.unopt ~default:account.pkh manager in let default_credit = Tez.of_mutez @@ Int64.of_int 1000001 in let default_credit = Option.unopt_exn Impossible default_credit in let credit = Option.unopt ~default:default_credit credit in let operation = - Origination { - manager ; - delegate ; - script ; - spendable ; - delegatable ; - credit ; - preorigination ; - } in - manager_operation ?public_key ?fee ?gas_limit ?storage_limit - ~source ctxt operation >>=? fun sop -> + Origination + { manager; + delegate; + script; + spendable; + delegatable; + credit; + preorigination } + in + manager_operation + ?public_key + ?fee + ?gas_limit + ?storage_limit + ~source + ctxt + operation + >>=? fun sop -> let op = sign account.sk ctxt sop in - return (op , originated_contract op) + return (op, originated_contract op) let miss_signed_endorsement ?level ctxt slot = - begin - match level with - | None -> Context.get_level ctxt - | Some level -> return level - end >>=? fun level -> - Context.get_endorser ctxt slot >>=? fun real_delegate_pkh -> + ( match level with + | None -> + Context.get_level ctxt + | Some level -> + return level ) + >>=? fun level -> + Context.get_endorser ctxt slot + >>=? fun real_delegate_pkh -> let delegate = Account.find_alternate real_delegate_pkh in endorsement ~delegate:delegate.pkh ~level ctxt [slot] let transaction ?fee ?gas_limit ?storage_limit ?parameters ctxt - (src:Contract.t) (dst:Contract.t) - (amount:Tez.t) = - let top = Transaction { - amount; - parameters; - destination=dst; - } in - manager_operation ?fee ?gas_limit ?storage_limit - ~source:src ctxt top >>=? fun sop -> - Context.Contract.manager ctxt src >>=? fun account -> - return @@ sign account.sk ctxt sop + (src : Contract.t) (dst : Contract.t) (amount : Tez.t) = + let top = Transaction {amount; parameters; destination = dst} in + manager_operation ?fee ?gas_limit ?storage_limit ~source:src ctxt top + >>=? fun sop -> + Context.Contract.manager ctxt src + >>=? fun account -> return @@ sign account.sk ctxt sop let delegation ?fee ctxt source dst = let top = Delegation dst in - manager_operation ?fee ~source ctxt top >>=? fun sop -> - Context.Contract.manager ctxt source >>=? fun account -> - return @@ sign account.sk ctxt sop + manager_operation ?fee ~source ctxt top + >>=? fun sop -> + Context.Contract.manager ctxt source + >>=? fun account -> return @@ sign account.sk ctxt sop let activation ctxt (pkh : Signature.Public_key_hash.t) activation_code = - begin match pkh with - | Ed25519 edpkh -> return edpkh - | _ -> failwith "Wrong public key hash : %a - Commitments must be activated with an Ed25519 \ - encrypted public key hash" Signature.Public_key_hash.pp pkh - end >>=? fun id -> - let contents = - Single (Activate_account { id ; activation_code } ) in + ( match pkh with + | Ed25519 edpkh -> + return edpkh + | _ -> + failwith + "Wrong public key hash : %a - Commitments must be activated with an \ + Ed25519 encrypted public key hash" + Signature.Public_key_hash.pp + pkh ) + >>=? fun id -> + let contents = Single (Activate_account {id; activation_code}) in let branch = Context.branch ctxt in - return { - shell = { branch } ; - protocol_data = Operation_data { - contents ; - signature = None ; - } ; - } + return + { shell = {branch}; + protocol_data = Operation_data {contents; signature = None} } let double_endorsement ctxt op1 op2 = - let contents = - Single (Double_endorsement_evidence {op1 ; op2}) in + let contents = Single (Double_endorsement_evidence {op1; op2}) in let branch = Context.branch ctxt in - return { - shell = { branch } ; - protocol_data = Operation_data { - contents ; - signature = None ; - } ; - } + return + { shell = {branch}; + protocol_data = Operation_data {contents; signature = None} } let double_baking ctxt bh1 bh2 = - let contents = - Single (Double_baking_evidence {bh1 ; bh2}) in + let contents = Single (Double_baking_evidence {bh1; bh2}) in let branch = Context.branch ctxt in - return { - shell = { branch } ; - protocol_data = Operation_data { - contents ; - signature = None ; - } ; - } + return + { shell = {branch}; + protocol_data = Operation_data {contents; signature = None} } diff --git a/src/lib_shell/bench/helpers/op.mli b/src/lib_shell/bench/helpers/op.mli index d32266c8e2113939e219e7bfaa28e85aef3dac95..ac3dd8c4ad72a74650355a33b969c7f97d72e236 100644 --- a/src/lib_shell/bench/helpers/op.mli +++ b/src/lib_shell/bench/helpers/op.mli @@ -26,17 +26,21 @@ open Proto_alpha open Alpha_context -val endorsement: +val endorsement : ?delegate:public_key_hash -> ?level:Raw_level.t -> - Context.t -> ?signing_context:Context.t -> - int list -> Kind.endorsement Operation.t tzresult Lwt.t + Context.t -> + ?signing_context:Context.t -> + int list -> + Kind.endorsement Operation.t tzresult Lwt.t -val miss_signed_endorsement: +val miss_signed_endorsement : ?level:Raw_level.t -> - Context.t -> int -> Kind.endorsement Operation.t tzresult Lwt.t + Context.t -> + int -> + Kind.endorsement Operation.t tzresult Lwt.t -val transaction: +val transaction : ?fee:Tez.tez -> ?gas_limit:Z.t -> ?storage_limit:Z.t -> @@ -47,20 +51,21 @@ val transaction: Tez.t -> Operation.packed tzresult Lwt.t -val delegation: - ?fee:Tez.tez -> Context.t -> - Contract.t -> public_key_hash option -> +val delegation : + ?fee:Tez.tez -> + Context.t -> + Contract.t -> + public_key_hash option -> Operation.packed tzresult Lwt.t -val revelation: - Context.t -> public_key -> Operation.packed tzresult Lwt.t +val revelation : Context.t -> public_key -> Operation.packed tzresult Lwt.t -val origination: +val origination : ?delegate:public_key_hash -> ?script:Script.t -> ?spendable:bool -> ?delegatable:bool -> - ?preorigination: Contract.contract option -> + ?preorigination:Contract.contract option -> ?public_key:public_key -> ?manager:public_key_hash -> ?credit:Tez.tez -> @@ -71,22 +76,22 @@ val origination: Contract.contract -> (Operation.packed * Contract.contract) tzresult Lwt.t -val originated_contract: - Operation.packed -> Contract.contract +val originated_contract : Operation.packed -> Contract.contract -val double_endorsement: +val double_endorsement : Context.t -> Kind.endorsement Operation.t -> Kind.endorsement Operation.t -> Operation.packed tzresult Lwt.t -val double_baking: +val double_baking : Context.t -> Block_header.block_header -> Block_header.block_header -> Operation.packed tzresult Lwt.t -val activation: +val activation : Context.t -> - Signature.Public_key_hash.t -> Blinded_public_key_hash.activation_code -> + Signature.Public_key_hash.t -> + Blinded_public_key_hash.activation_code -> Operation.packed tzresult Lwt.t diff --git a/src/lib_shell/bench/helpers/proto_alpha.ml b/src/lib_shell/bench/helpers/proto_alpha.ml index f3a9b4d7886bf4113c3353ee9e7b8b8de91cfa04..dfdfa68a0cd71b471b6e41bf3f5d57e7fbf25452 100644 --- a/src/lib_shell/bench/helpers/proto_alpha.ml +++ b/src/lib_shell/bench/helpers/proto_alpha.ml @@ -24,21 +24,29 @@ (*****************************************************************************) module Proto = Tezos_protocol_alpha_parameters.Proto_alpha + module Block_services = struct include Block_services - include Block_services.Make(Proto)(Proto) + include Block_services.Make (Proto) (Proto) end + include Proto type alpha_error = Alpha_environment.Error_monad.error + type 'a alpha_tzresult = 'a Alpha_environment.Error_monad.tzresult -module M = Alpha_environment.Lift(Main) +module M = Alpha_environment.Lift (Main) -let register_error_kind - category ~id ~title ~description ?pp - encoding from_error to_error = +let register_error_kind category ~id ~title ~description ?pp encoding + from_error to_error = let id = "client." ^ Name.name ^ "." ^ id in register_error_kind - category ~id ~title ~description ?pp - encoding from_error to_error + category + ~id + ~title + ~description + ?pp + encoding + from_error + to_error diff --git a/src/lib_shell/bench/helpers/test.ml b/src/lib_shell/bench/helpers/test.ml index 0482df845ffba0bfd6da379705e2663606ee294d..827be14c206d768de41d873fd67b4ad1e20f92e1 100644 --- a/src/lib_shell/bench/helpers/test.ml +++ b/src/lib_shell/bench/helpers/test.ml @@ -25,11 +25,13 @@ (* Wraps an alcotest so that it prints correcly errors from the Error_monad. *) let tztest name speed f = - Alcotest_lwt.test_case name speed begin fun _sw () -> - f () >>= function - | Ok () -> Lwt.return_unit - | Error err -> - Tezos_stdlib_unix.Internal_event_unix.close () >>= fun () -> - Format.eprintf "WWW %a@." pp_print_error err ; - Lwt.fail Alcotest.Test_error - end + Alcotest_lwt.test_case name speed (fun _sw () -> + f () + >>= function + | Ok () -> + Lwt.return_unit + | Error err -> + Tezos_stdlib_unix.Internal_event_unix.close () + >>= fun () -> + Format.eprintf "WWW %a@." pp_print_error err ; + Lwt.fail Alcotest.Test_error) diff --git a/src/lib_shell/bench/helpers/test_tez.ml b/src/lib_shell/bench/helpers/test_tez.ml index 5d2aeb67a949c175fbdb17535bad0002227e604e..bcf535dbfd294d5a49f0a02134e2bef982fb484d 100644 --- a/src/lib_shell/bench/helpers/test_tez.ml +++ b/src/lib_shell/bench/helpers/test_tez.ml @@ -31,19 +31,25 @@ open Alpha_environment module Tez = struct include Tez - let ( +? ) t1 t2 = (t1 +? t2) |> wrap_error - let ( -? ) t1 t2 = (t1 -? t2) |> wrap_error - let ( *? ) t1 t2 = (t1 *? t2) |> wrap_error - let ( /? ) t1 t2 = (t1 /? t2) |> wrap_error + let ( +? ) t1 t2 = t1 +? t2 |> wrap_error + + let ( -? ) t1 t2 = t1 -? t2 |> wrap_error + + let ( *? ) t1 t2 = t1 *? t2 |> wrap_error + + let ( /? ) t1 t2 = t1 /? t2 |> wrap_error let ( + ) t1 t2 = match t1 +? t2 with - | Ok r -> r + | Ok r -> + r | Error _ -> Pervasives.failwith "adding tez" let of_int x = match Tez.of_mutez (Int64.mul (Int64.of_int x) 1_000_000L) with - | None -> invalid_arg "tez_of_int" - | Some x -> x + | None -> + invalid_arg "tez_of_int" + | Some x -> + x end diff --git a/src/lib_shell/bench/helpers/test_utils.ml b/src/lib_shell/bench/helpers/test_utils.ml index e71947bc7bf07b61be0f3b777ae03ec8f707e5f9..84c519a678cb3a42d4099227ce56612613b0dd88 100644 --- a/src/lib_shell/bench/helpers/test_utils.ml +++ b/src/lib_shell/bench/helpers/test_utils.ml @@ -25,19 +25,18 @@ (* This file should not depend on any other file from tests. *) -let (>>?=) x y = match x with - | Ok(a) -> y a - | Error(b) -> fail @@ List.hd b +let ( >>?= ) x y = match x with Ok a -> y a | Error b -> fail @@ List.hd b (** Like List.find but returns the index of the found element *) let findi p = let rec aux p i = function - | [] -> raise Not_found - | x :: l -> if p x then (x,i) else aux p (i+1) l + | [] -> + raise Not_found + | x :: l -> + if p x then (x, i) else aux p (i + 1) l in aux p 0 exception Pair_of_list -let pair_of_list = function - | [a;b] -> a,b - | _ -> raise Pair_of_list + +let pair_of_list = function [a; b] -> (a, b) | _ -> raise Pair_of_list diff --git a/src/lib_shell/block_directory.ml b/src/lib_shell/block_directory.ml index e1daa32153c250c56dc53d74602993cb02aa8892..69e3f90dd49e69d60dde26068ca4f13a467f59b5 100644 --- a/src/lib_shell/block_directory.ml +++ b/src/lib_shell/block_directory.ml @@ -25,456 +25,466 @@ let rec read_partial_context context path depth = (* non tail-recursive *) - if depth = 0 then - Lwt.return Block_services.Cut + if depth = 0 then Lwt.return Block_services.Cut else (* try to read as file *) - Context.get context path >>= function + Context.get context path + >>= function | Some v -> Lwt.return (Block_services.Key v) | None -> (* try to read as directory *) - Context.fold context path ~init:[] ~f: begin fun k acc -> - match k with - | `Key k | `Dir k -> - read_partial_context context k (depth-1) >>= fun v -> - let k = List.nth k ((List.length k)-1) in - Lwt.return ((k,v)::acc) - end >>= fun l -> - Lwt.return (Block_services.Dir (List.rev l)) + Context.fold context path ~init:[] ~f:(fun k acc -> + match k with + | `Key k | `Dir k -> + read_partial_context context k (depth - 1) + >>= fun v -> + let k = List.nth k (List.length k - 1) in + Lwt.return ((k, v) :: acc)) + >>= fun l -> Lwt.return (Block_services.Dir (List.rev l)) let build_raw_header_rpc_directory (module Proto : Block_services.PROTO) = - - let dir : (State.Chain.t * Block_hash.t * Block_header.t) RPC_directory.t ref = - ref RPC_directory.empty in - + let dir : (State.Chain.t * Block_hash.t * Block_header.t) RPC_directory.t ref + = + ref RPC_directory.empty + in let register0 s f = dir := - RPC_directory.register !dir (RPC_service.subst0 s) - (fun block p q -> f block p q) in - - let module Block_services = Block_services.Make(Proto)(Proto) in + RPC_directory.register !dir (RPC_service.subst0 s) (fun block p q -> + f block p q) + in + let module Block_services = Block_services.Make (Proto) (Proto) in let module S = Block_services.S in - - register0 S.hash begin fun (_, hash, _) () () -> - return hash - end ; - + register0 S.hash (fun (_, hash, _) () () -> return hash) ; (* block header *) - - register0 S.header begin fun (chain_state, hash, header) () () -> - let protocol_data = - Data_encoding.Binary.of_bytes_exn - Proto.block_header_data_encoding - header.protocol_data in - return { Block_services.hash ; chain_id = State.Chain.id chain_state ; - shell = header.shell ; protocol_data } - end ; - - register0 S.raw_header begin fun (_, _, header) () () -> - return (Data_encoding.Binary.to_bytes_exn Block_header.encoding header) - end ; - register0 S.Header.shell_header begin fun (_, _, header) () () -> - return header.shell - end ; - - register0 S.Header.protocol_data begin fun (_, _, header) () () -> - return - (Data_encoding.Binary.of_bytes_exn - Proto.block_header_data_encoding - header.protocol_data) - end ; - register0 S.Header.raw_protocol_data begin fun (_, _, header) () () -> - return header.protocol_data - end ; - + register0 S.header (fun (chain_state, hash, header) () () -> + let protocol_data = + Data_encoding.Binary.of_bytes_exn + Proto.block_header_data_encoding + header.protocol_data + in + return + { Block_services.hash; + chain_id = State.Chain.id chain_state; + shell = header.shell; + protocol_data }) ; + register0 S.raw_header (fun (_, _, header) () () -> + return (Data_encoding.Binary.to_bytes_exn Block_header.encoding header)) ; + register0 S.Header.shell_header (fun (_, _, header) () () -> + return header.shell) ; + register0 S.Header.protocol_data (fun (_, _, header) () () -> + return + (Data_encoding.Binary.of_bytes_exn + Proto.block_header_data_encoding + header.protocol_data)) ; + register0 S.Header.raw_protocol_data (fun (_, _, header) () () -> + return header.protocol_data) ; (* helpers *) - - register0 S.Helpers.Forge.block_header begin fun _block () header -> - return (Data_encoding.Binary.to_bytes_exn Block_header.encoding header) - end ; - + register0 S.Helpers.Forge.block_header (fun _block () header -> + return (Data_encoding.Binary.to_bytes_exn Block_header.encoding header)) ; (* protocols *) - - register0 S.protocols begin fun (chain_state, _hash, header) () () -> - State.Chain.get_level_indexed_protocol chain_state header >>= fun next_protocol_hash -> - State.Block.header_of_hash chain_state header.shell.predecessor >>= function - | None -> - return { - Tezos_shell_services.Block_services.current_protocol = next_protocol_hash ; - next_protocol = next_protocol_hash ; - } - | Some pred_header -> - State.Chain.get_level_indexed_protocol chain_state pred_header >>= fun protocol_hash -> - return { - Tezos_shell_services.Block_services.current_protocol = protocol_hash ; - next_protocol = next_protocol_hash ; - } - end ; - + register0 S.protocols (fun (chain_state, _hash, header) () () -> + State.Chain.get_level_indexed_protocol chain_state header + >>= fun next_protocol_hash -> + State.Block.header_of_hash chain_state header.shell.predecessor + >>= function + | None -> + return + { Tezos_shell_services.Block_services.current_protocol = + next_protocol_hash; + next_protocol = next_protocol_hash } + | Some pred_header -> + State.Chain.get_level_indexed_protocol chain_state pred_header + >>= fun protocol_hash -> + return + { Tezos_shell_services.Block_services.current_protocol = + protocol_hash; + next_protocol = next_protocol_hash }) ; !dir -let build_raw_rpc_directory - (module Proto : Block_services.PROTO) +let build_raw_rpc_directory (module Proto : Block_services.PROTO) (module Next_proto : Registered_protocol.T) = - - let dir : State.Block.block RPC_directory.t ref = - ref RPC_directory.empty in - + let dir : State.Block.block RPC_directory.t ref = ref RPC_directory.empty in let merge d = dir := RPC_directory.merge d !dir in let register0 s f = dir := - RPC_directory.register !dir (RPC_service.subst0 s) - (fun block p q -> f block p q) in + RPC_directory.register !dir (RPC_service.subst0 s) (fun block p q -> + f block p q) + in let register1 s f = dir := - RPC_directory.register !dir (RPC_service.subst1 s) - (fun (block, a) p q -> f block a p q) in + RPC_directory.register !dir (RPC_service.subst1 s) (fun (block, a) p q -> + f block a p q) + in let register2 s f = dir := - RPC_directory.register !dir (RPC_service.subst2 s) - (fun ((block, a), b) p q -> f block a b p q) in - - let module Block_services = Block_services.Make(Proto)(Next_proto) in + RPC_directory.register + !dir + (RPC_service.subst2 s) + (fun ((block, a), b) p q -> f block a b p q) + in + let module Block_services = Block_services.Make (Proto) (Next_proto) in let module S = Block_services.S in - - register0 S.live_blocks begin fun block () () -> - State.Block.max_operations_ttl block >>=? fun max_op_ttl -> - Chain_traversal.live_blocks - block - max_op_ttl >>=? fun (live_blocks, _) -> - return live_blocks - end ; - + register0 S.live_blocks (fun block () () -> + State.Block.max_operations_ttl block + >>=? fun max_op_ttl -> + Chain_traversal.live_blocks block max_op_ttl + >>=? fun (live_blocks, _) -> return live_blocks) ; (* block metadata *) - let metadata block = - State.Block.metadata block >>=? fun metadata -> + State.Block.metadata block + >>=? fun metadata -> let protocol_data = Data_encoding.Binary.of_bytes_exn Proto.block_header_metadata_encoding - metadata in - State.Block.test_chain block >>= fun (test_chain_status, _) -> - State.Block.max_operations_ttl block >>=? fun max_operations_ttl -> - return { - Block_services.protocol_data ; - test_chain_status ; - max_operations_ttl ; - max_operation_data_length = Next_proto.max_operation_data_length ; - max_block_header_length = Next_proto.max_block_length ; - operation_list_quota = - List.map - (fun { Tezos_protocol_environment_shell.max_size; max_op } -> - { Tezos_shell_services.Block_services.max_size ; max_op } ) - Next_proto.validation_passes ; - } in - - register0 S.metadata begin fun block () () -> - metadata block - end ; - + metadata + in + State.Block.test_chain block + >>= fun (test_chain_status, _) -> + State.Block.max_operations_ttl block + >>=? fun max_operations_ttl -> + return + { Block_services.protocol_data; + test_chain_status; + max_operations_ttl; + max_operation_data_length = Next_proto.max_operation_data_length; + max_block_header_length = Next_proto.max_block_length; + operation_list_quota = + List.map + (fun {Tezos_protocol_environment_shell.max_size; max_op} -> + {Tezos_shell_services.Block_services.max_size; max_op}) + Next_proto.validation_passes } + in + register0 S.metadata (fun block () () -> metadata block) ; (* operations *) - let convert chain_id (op : Operation.t) metadata : Block_services.operation = let protocol_data = - Data_encoding.Binary.of_bytes_exn - Proto.operation_data_encoding - op.proto in + Data_encoding.Binary.of_bytes_exn Proto.operation_data_encoding op.proto + in let receipt = Data_encoding.Binary.of_bytes_exn Proto.operation_receipt_encoding - metadata in - { Block_services.chain_id ; - hash = Operation.hash op ; - shell = op.shell ; - protocol_data ; - receipt ; - } in - + metadata + in + { Block_services.chain_id; + hash = Operation.hash op; + shell = op.shell; + protocol_data; + receipt } + in let operations block = - State.Block.all_operations block >>= fun ops -> - State.Block.all_operations_metadata block >>= fun metadata -> - let chain_id = State.Block.chain_id block in - return (List.map2 (List.map2 (convert chain_id)) ops metadata) in - - register0 S.Operations.operations begin fun block () () -> - operations block - end ; - - register1 S.Operations.operations_in_pass begin fun block i () () -> - let chain_id = State.Block.chain_id block in - try - State.Block.operations block i >>= fun (ops, _path) -> - State.Block.operations_metadata block i >>= fun metadata -> - return (List.map2 (convert chain_id) ops metadata) - with _ -> Lwt.fail Not_found - end ; - - register2 S.Operations.operation begin fun block i j () () -> + State.Block.all_operations block + >>= fun ops -> + State.Block.all_operations_metadata block + >>= fun metadata -> let chain_id = State.Block.chain_id block in - begin try - State.Block.operations block i >>= fun (ops, _path) -> - State.Block.operations_metadata block i >>= fun metadata -> - Lwt.return (List.nth ops j, List.nth metadata j) - with _ -> Lwt.fail Not_found end >>= fun (op, md) -> - return (convert chain_id op md) - end ; - + return (List.map2 (List.map2 (convert chain_id)) ops metadata) + in + register0 S.Operations.operations (fun block () () -> operations block) ; + register1 S.Operations.operations_in_pass (fun block i () () -> + let chain_id = State.Block.chain_id block in + try + State.Block.operations block i + >>= fun (ops, _path) -> + State.Block.operations_metadata block i + >>= fun metadata -> return (List.map2 (convert chain_id) ops metadata) + with _ -> Lwt.fail Not_found) ; + register2 S.Operations.operation (fun block i j () () -> + let chain_id = State.Block.chain_id block in + ( try + State.Block.operations block i + >>= fun (ops, _path) -> + State.Block.operations_metadata block i + >>= fun metadata -> Lwt.return (List.nth ops j, List.nth metadata j) + with _ -> Lwt.fail Not_found ) + >>= fun (op, md) -> return (convert chain_id op md)) ; (* operation_hashes *) - - register0 S.Operation_hashes.operation_hashes begin fun block () () -> - State.Block.all_operation_hashes block >>= return - end ; - - register1 S.Operation_hashes.operation_hashes_in_pass begin fun block i () () -> - State.Block.operation_hashes block i >>= fun (ops, _) -> - return ops - end ; - - register2 S.Operation_hashes.operation_hash begin fun block i j () () -> - begin try - State.Block.operation_hashes block i >>= fun (ops, _) -> - Lwt.return (List.nth ops j) - with _ -> Lwt.fail Not_found end >>= fun op -> - return op - end ; - + register0 S.Operation_hashes.operation_hashes (fun block () () -> + State.Block.all_operation_hashes block >>= return) ; + register1 S.Operation_hashes.operation_hashes_in_pass (fun block i () () -> + State.Block.operation_hashes block i >>= fun (ops, _) -> return ops) ; + register2 S.Operation_hashes.operation_hash (fun block i j () () -> + ( try + State.Block.operation_hashes block i + >>= fun (ops, _) -> Lwt.return (List.nth ops j) + with _ -> Lwt.fail Not_found ) + >>= fun op -> return op) ; (* context *) - - register1 S.Context.read begin fun block path q () -> - let depth = Option.unopt ~default:max_int q#depth in - fail_unless (depth >= 0) - (Tezos_shell_services.Block_services.Invalid_depth_arg depth) >>=? fun () -> - State.Block.context block >>= fun context -> - Context.mem context path >>= fun mem -> - Context.dir_mem context path >>= fun dir_mem -> - if not (mem || dir_mem) then - Lwt.fail Not_found - else - read_partial_context context path depth >>= fun dir -> - return dir - end ; - + register1 S.Context.read (fun block path q () -> + let depth = Option.unopt ~default:max_int q#depth in + fail_unless + (depth >= 0) + (Tezos_shell_services.Block_services.Invalid_depth_arg depth) + >>=? fun () -> + State.Block.context block + >>= fun context -> + Context.mem context path + >>= fun mem -> + Context.dir_mem context path + >>= fun dir_mem -> + if not (mem || dir_mem) then Lwt.fail Not_found + else read_partial_context context path depth >>= fun dir -> return dir) ; (* info *) - - register0 S.info begin fun block () () -> - let chain_id = State.Block.chain_id block in - let hash = State.Block.hash block in - let header = State.Block.header block in - let shell = header.shell in - let protocol_data = - Data_encoding.Binary.of_bytes_exn - Proto.block_header_data_encoding - header.protocol_data in - metadata block >>=? fun metadata -> - operations block >>=? fun operations -> - return { Block_services.hash ; chain_id ; - header = { shell ; protocol_data } ; - metadata ; operations } - end ; - + register0 S.info (fun block () () -> + let chain_id = State.Block.chain_id block in + let hash = State.Block.hash block in + let header = State.Block.header block in + let shell = header.shell in + let protocol_data = + Data_encoding.Binary.of_bytes_exn + Proto.block_header_data_encoding + header.protocol_data + in + metadata block + >>=? fun metadata -> + operations block + >>=? fun operations -> + return + { Block_services.hash; + chain_id; + header = {shell; protocol_data}; + metadata; + operations }) ; (* helpers *) - - register0 S.Helpers.Preapply.block begin fun block q p -> - let timestamp = - match q#timestamp with - | None -> Time.System.to_protocol (Systime_os.now ()) - | Some time -> time in - let protocol_data = - Data_encoding.Binary.to_bytes_exn - Next_proto.block_header_data_encoding - p.protocol_data in - let operations = - List.map - (List.map - (fun op -> - let proto = - Data_encoding.Binary.to_bytes_exn - Next_proto.operation_data_encoding - op.Next_proto.protocol_data in - { Operation.shell = op.shell ; proto })) - p.operations in - Prevalidation.preapply - ~predecessor:block - ~timestamp - ~protocol_data - operations - end ; - - register0 S.Helpers.Preapply.operations begin fun block () ops -> - State.Block.context block >>= fun ctxt -> - let predecessor = State.Block.hash block in - let header = State.Block.shell_header block in - Next_proto.begin_construction - ~chain_id: (State.Block.chain_id block) - ~predecessor_context:ctxt - ~predecessor_timestamp:header.timestamp - ~predecessor_level:header.level - ~predecessor_fitness:header.fitness - ~predecessor - ~timestamp:(Time.System.to_protocol (Systime_os.now ())) () >>=? fun state -> - fold_left_s - (fun (state, acc) op -> - Next_proto.apply_operation state op >>=? fun (state, result) -> - return (state, (op.protocol_data, result) :: acc)) - (state, []) ops >>=? fun (state, acc) -> - Next_proto.finalize_block state >>=? fun _ -> - return (List.rev acc) - end ; - - register1 S.Helpers.complete begin fun block prefix () () -> - State.Block.context block >>= fun ctxt -> - Base58.complete prefix >>= fun l1 -> - Next_proto.complete_b58prefix ctxt prefix >>= fun l2 -> - return (l1 @ l2) - end ; - + register0 S.Helpers.Preapply.block (fun block q p -> + let timestamp = + match q#timestamp with + | None -> + Time.System.to_protocol (Systime_os.now ()) + | Some time -> + time + in + let protocol_data = + Data_encoding.Binary.to_bytes_exn + Next_proto.block_header_data_encoding + p.protocol_data + in + let operations = + List.map + (List.map (fun op -> + let proto = + Data_encoding.Binary.to_bytes_exn + Next_proto.operation_data_encoding + op.Next_proto.protocol_data + in + {Operation.shell = op.shell; proto})) + p.operations + in + Prevalidation.preapply + ~predecessor:block + ~timestamp + ~protocol_data + operations) ; + register0 S.Helpers.Preapply.operations (fun block () ops -> + State.Block.context block + >>= fun ctxt -> + let predecessor = State.Block.hash block in + let header = State.Block.shell_header block in + Next_proto.begin_construction + ~chain_id:(State.Block.chain_id block) + ~predecessor_context:ctxt + ~predecessor_timestamp:header.timestamp + ~predecessor_level:header.level + ~predecessor_fitness:header.fitness + ~predecessor + ~timestamp:(Time.System.to_protocol (Systime_os.now ())) + () + >>=? fun state -> + fold_left_s + (fun (state, acc) op -> + Next_proto.apply_operation state op + >>=? fun (state, result) -> + return (state, (op.protocol_data, result) :: acc)) + (state, []) + ops + >>=? fun (state, acc) -> + Next_proto.finalize_block state >>=? fun _ -> return (List.rev acc)) ; + register1 S.Helpers.complete (fun block prefix () () -> + State.Block.context block + >>= fun ctxt -> + Base58.complete prefix + >>= fun l1 -> + Next_proto.complete_b58prefix ctxt prefix >>= fun l2 -> return (l1 @ l2)) ; (* merge protocol rpcs... *) - merge (RPC_directory.map (fun block -> - let chain_state = State.Block.chain_state block in - let hash = State.Block.hash block in - let header = State.Block.header block in - Lwt.return (chain_state, hash, header)) + let chain_state = State.Block.chain_state block in + let hash = State.Block.hash block in + let header = State.Block.header block in + Lwt.return (chain_state, hash, header)) (build_raw_header_rpc_directory (module Proto))) ; - merge (RPC_directory.map (fun block -> - State.Block.context block >|= fun context -> - { Tezos_protocol_environment_shell. - block_hash = State.Block.hash block ; - block_header = State.Block.shell_header block ; - context }) + State.Block.context block + >|= fun context -> + { Tezos_protocol_environment_shell.block_hash = State.Block.hash block; + block_header = State.Block.shell_header block; + context }) Next_proto.rpc_services) ; - !dir let get_protocol hash = match Registered_protocol.get hash with - | None -> raise Not_found - | Some protocol -> protocol + | None -> + raise Not_found + | Some protocol -> + protocol let get_directory chain_state block = - State.Block.get_rpc_directory block >>= function - | Some dir -> Lwt.return dir - | None -> - State.Block.protocol_hash block >>= fun next_protocol_hash -> + State.Block.get_rpc_directory block + >>= function + | Some dir -> + Lwt.return dir + | None -> ( + State.Block.protocol_hash block + >>= fun next_protocol_hash -> let next_protocol = get_protocol next_protocol_hash in - State.Block.predecessor block >>= function + State.Block.predecessor block + >>= function | None -> - Lwt.return (build_raw_rpc_directory - (module Block_services.Fake_protocol) - next_protocol) - | Some pred -> - State.Chain.save_point chain_state >>= fun (save_point_level, _) -> - begin - if Compare.Int32.(State.Block.level pred < save_point_level) then - State.Chain.get_level_indexed_protocol - chain_state (State.Block.header pred) - else - State.Block.protocol_hash pred - end >>= fun protocol_hash -> + Lwt.return + (build_raw_rpc_directory + (module Block_services.Fake_protocol) + next_protocol) + | Some pred -> ( + State.Chain.save_point chain_state + >>= fun (save_point_level, _) -> + ( if Compare.Int32.(State.Block.level pred < save_point_level) then + State.Chain.get_level_indexed_protocol + chain_state + (State.Block.header pred) + else State.Block.protocol_hash pred ) + >>= fun protocol_hash -> let (module Proto) = get_protocol protocol_hash in - State.Block.get_rpc_directory block >>= function - | Some dir -> Lwt.return dir + State.Block.get_rpc_directory block + >>= function + | Some dir -> + Lwt.return dir | None -> let dir = build_raw_rpc_directory (module Proto) next_protocol in - State.Block.set_rpc_directory block dir >>= fun () -> - Lwt.return dir + State.Block.set_rpc_directory block dir + >>= fun () -> Lwt.return dir ) ) let get_header_directory chain_state header = State.Block.header_of_hash chain_state header.Block_header.shell.predecessor >>= function - | None -> (* should not happen *) + | None -> + (* should not happen *) Lwt.fail Not_found - | Some pred -> - State.Chain.get_level_indexed_protocol - chain_state pred >>= fun protocol_hash -> + | Some pred -> ( + State.Chain.get_level_indexed_protocol chain_state pred + >>= fun protocol_hash -> let (module Proto) = get_protocol protocol_hash in - State.Block.get_header_rpc_directory chain_state header >>= function + State.Block.get_header_rpc_directory chain_state header + >>= function | Some dir -> Lwt.return dir | None -> let dir = build_raw_header_rpc_directory (module Proto) in - State.Block.set_header_rpc_directory - chain_state header dir >>= fun () -> - Lwt.return dir + State.Block.set_header_rpc_directory chain_state header dir + >>= fun () -> Lwt.return dir ) let get_block chain_state = function | `Genesis -> - Chain.genesis chain_state >>= fun genesis -> - Lwt.return_some genesis + Chain.genesis chain_state >>= fun genesis -> Lwt.return_some genesis | `Head n -> - Chain.head chain_state >>= fun head -> - if n < 0 then - Lwt.return_none - else if n = 0 then - Lwt.return_some head + Chain.head chain_state + >>= fun head -> + if n < 0 then Lwt.return_none + else if n = 0 then Lwt.return_some head else State.Block.read_predecessor - chain_state ~pred:n ~below_save_point:true (State.Block.hash head) - | `Alias (_, n) | `Hash (_, n) as b -> - begin match b with - | `Alias (`Checkpoint, _) -> - State.Chain.checkpoint chain_state >>= fun checkpoint -> - Lwt.return (Block_header.hash checkpoint) - | `Alias (`Save_point, _) -> - State.Chain.save_point chain_state >>= fun (_, save_point) -> - Lwt.return save_point - | `Alias (`Caboose, _) -> - State.Chain.caboose chain_state >>= fun (_, caboose) -> - Lwt.return caboose - | `Hash (h, _) -> Lwt.return h - end >>= fun hash -> + chain_state + ~pred:n + ~below_save_point:true + (State.Block.hash head) + | (`Alias (_, n) | `Hash (_, n)) as b -> + ( match b with + | `Alias (`Checkpoint, _) -> + State.Chain.checkpoint chain_state + >>= fun checkpoint -> Lwt.return (Block_header.hash checkpoint) + | `Alias (`Save_point, _) -> + State.Chain.save_point chain_state + >>= fun (_, save_point) -> Lwt.return save_point + | `Alias (`Caboose, _) -> + State.Chain.caboose chain_state + >>= fun (_, caboose) -> Lwt.return caboose + | `Hash (h, _) -> + Lwt.return h ) + >>= fun hash -> if n < 0 then - State.Block.read_opt chain_state hash >|= Option.unopt_assert ~loc:__POS__ >>= fun block -> - Chain.head chain_state >>= fun head -> + State.Block.read_opt chain_state hash + >|= Option.unopt_assert ~loc:__POS__ + >>= fun block -> + Chain.head chain_state + >>= fun head -> let head_level = State.Block.level head in let block_level = State.Block.level block in let target = - Int32.(to_int (sub head_level (sub block_level (of_int n)))) in - begin if target < 0 then - Lwt.return_none - else - State.Block.read_predecessor - chain_state ~pred:target ~below_save_point:true (State.Block.hash head) end + Int32.(to_int (sub head_level (sub block_level (of_int n)))) + in + if target < 0 then Lwt.return_none + else + State.Block.read_predecessor + chain_state + ~pred:target + ~below_save_point:true + (State.Block.hash head) else if n = 0 then - Chain.genesis chain_state >>= fun genesis -> + Chain.genesis chain_state + >>= fun genesis -> let genesis_hash = State.Block.hash genesis in - if Block_hash.equal hash genesis_hash then - Lwt.return_some genesis + if Block_hash.equal hash genesis_hash then Lwt.return_some genesis else State.Block.read_predecessor - chain_state ~pred:0 ~below_save_point:true hash + chain_state + ~pred:0 + ~below_save_point:true + hash else State.Block.read_predecessor - chain_state ~pred:n ~below_save_point:true hash + chain_state + ~pred:n + ~below_save_point:true + hash | `Level i -> - Chain.head chain_state >>= fun head -> + Chain.head chain_state + >>= fun head -> let target = Int32.(to_int (sub (State.Block.level head) i)) in - if target < 0 then - Lwt.fail Not_found + if target < 0 then Lwt.fail Not_found else State.Block.read_predecessor - chain_state ~pred:target ~below_save_point:true (State.Block.hash head) + chain_state + ~pred:target + ~below_save_point:true + (State.Block.hash head) let build_rpc_directory chain_state block = - get_block chain_state block >>= function + get_block chain_state block + >>= function | None -> Lwt.fail Not_found | Some b -> - State.Chain.save_point chain_state >>= fun (save_point_level, _) -> + State.Chain.save_point chain_state + >>= fun (save_point_level, _) -> let block_level = State.Block.level b in let block_hash = State.Block.hash b in let genesis = State.Chain.genesis chain_state in - if block_level >= save_point_level || Block_hash.equal block_hash genesis.block then - get_directory chain_state b >>= fun dir -> + if + block_level >= save_point_level + || Block_hash.equal block_hash genesis.block + then + get_directory chain_state b + >>= fun dir -> Lwt.return (RPC_directory.map (fun _ -> Lwt.return b) dir) else let header = State.Block.header b in let hash = State.Block.hash b in - get_header_directory chain_state header >>= fun dir -> - Lwt.return (RPC_directory.map (fun _ -> Lwt.return (chain_state, hash, header)) dir) + get_header_directory chain_state header + >>= fun dir -> + Lwt.return + (RPC_directory.map + (fun _ -> Lwt.return (chain_state, hash, header)) + dir) diff --git a/src/lib_shell/block_directory.mli b/src/lib_shell/block_directory.mli index cdef813ec5b57a169f5ff1286e00f45da600bed6..ca3cd29e33bf825451f9bd79f9244c49d761faef 100644 --- a/src/lib_shell/block_directory.mli +++ b/src/lib_shell/block_directory.mli @@ -23,14 +23,13 @@ (* *) (*****************************************************************************) -val get_block: State.Chain.t -> Block_services.block -> State.Block.t option Lwt.t +val get_block : + State.Chain.t -> Block_services.block -> State.Block.t option Lwt.t -val build_raw_rpc_directory: +val build_raw_rpc_directory : (module Block_services.PROTO) -> (module Registered_protocol.T) -> State.Block.t RPC_directory.directory -val build_rpc_directory: - State.Chain.t -> - Block_services.block -> - 'a RPC_directory.t Lwt.t +val build_rpc_directory : + State.Chain.t -> Block_services.block -> 'a RPC_directory.t Lwt.t diff --git a/src/lib_shell/block_validator.ml b/src/lib_shell/block_validator.ml index 83fa15f0611ab90b8989aded21282da3e1e12aec..5e6449f5532be7023aa7b547636466497c5ee888 100644 --- a/src/lib_shell/block_validator.ml +++ b/src/lib_shell/block_validator.ml @@ -28,8 +28,8 @@ open Block_validator_worker_state open Block_validator_errors type limits = { - protocol_timeout: Time.System.Span.t ; - worker_limits : Worker_types.limits ; + protocol_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } type validator_kind = Block_validator_process.validator_kind = @@ -37,165 +37,207 @@ type validator_kind = Block_validator_process.validator_kind = module Name = struct type t = unit + let encoding = Data_encoding.empty - let base = [ "validator.block" ] + + let base = ["validator.block"] + let pp _ () = () end module Types = struct include Worker_state + type state = { - protocol_validator: Protocol_validator.t ; - validation_process: Block_validator_process.t ; - limits : limits ; - start_testchain : bool ; + protocol_validator : Protocol_validator.t; + validation_process : Block_validator_process.t; + limits : limits; + start_testchain : bool } - type parameters = limits * bool * Distributed_db.t * Block_validator_process.validator_kind + + type parameters = + limits * bool * Distributed_db.t * Block_validator_process.validator_kind + let view _state _parameters = () end module Request = struct include Request + type 'a t = - | Request_validation : { - chain_db: Distributed_db.chain_db ; - notify_new_block: State.Block.t -> unit ; - canceler: Lwt_canceler.t option ; - peer: P2p_peer.Id.t option ; - hash: Block_hash.t ; - header: Block_header.t ; - operations: Operation.t list list ; - } -> State.Block.t option tzresult t - let view - : type a. a t -> view - = fun (Request_validation { chain_db ; peer ; hash ; _ }) -> - let chain_id = chain_db |> Distributed_db.chain_state |> State.Chain.id in - { chain_id ; block = hash ; peer = peer } + | Request_validation : + { chain_db : Distributed_db.chain_db; + notify_new_block : State.Block.t -> unit; + canceler : Lwt_canceler.t option; + peer : P2p_peer.Id.t option; + hash : Block_hash.t; + header : Block_header.t; + operations : Operation.t list list } + -> State.Block.t option tzresult t + + let view : type a. a t -> view = + fun (Request_validation {chain_db; peer; hash; _}) -> + let chain_id = chain_db |> Distributed_db.chain_state |> State.Chain.id in + {chain_id; block = hash; peer} end module Worker = Worker.Make (Name) (Event) (Request) (Types) type t = Worker.infinite Worker.queue Worker.t -let debug w = - Format.kasprintf (fun msg -> Worker.record_event w (Debug msg)) +let debug w = Format.kasprintf (fun msg -> Worker.record_event w (Debug msg)) -let check_chain_liveness chain_db hash (header: Block_header.t) = +let check_chain_liveness chain_db hash (header : Block_header.t) = let chain_state = Distributed_db.chain_state chain_db in match State.Chain.expiration chain_state with | Some eol when Time.Protocol.(eol <= header.shell.timestamp) -> - fail @@ invalid_block hash @@ - Expired_chain { chain_id = State.Chain.id chain_state ; - expiration = eol ; - timestamp = header.shell.timestamp } - | None | Some _ -> return_unit - -let on_request - : type r. t -> r Request.t -> r tzresult Lwt.t - = fun w - (Request.Request_validation - { chain_db ; notify_new_block ; canceler ; - peer ; hash ; header ; operations }) -> - let bv = Worker.state w in - let chain_state = Distributed_db.chain_state chain_db in - State.Block.read_opt chain_state hash >>= function - | Some block -> - debug w "previously validated block %a (after pipe)" - Block_hash.pp_short hash ; - Protocol_validator.prefetch_and_compile_protocols - bv.protocol_validator - ?peer ~timeout:bv.limits.protocol_timeout - block ; - return (Ok None) - | None -> - State.Block.read_invalid chain_state hash >>= function - | Some { errors ; _ } -> - return (Error errors) - | None -> - State.Chain.save_point chain_state >>= fun (save_point_lvl, _) -> - (* Safety and late workers in partial mode. *) - if Compare.Int32.(header.shell.level < save_point_lvl) then - return (Ok None) - else - begin - debug w "validating block %a" Block_hash.pp_short hash ; - State.Block.read - chain_state header.shell.predecessor >>=? fun pred -> - (* TODO also protect with [Worker.canceler w]. *) - protect ?canceler begin fun () -> - begin Block_validator_process.apply_block - bv.validation_process - ~predecessor:pred - header operations >>= function - | Ok x -> return x - | Error [ Missing_test_protocol protocol ] -> - Protocol_validator.fetch_and_compile_protocol - bv.protocol_validator - ?peer ~timeout:bv.limits.protocol_timeout - protocol >>=? fun _ -> - Block_validator_process.apply_block - bv.validation_process - ~predecessor:pred - header operations - | Error _ as x -> Lwt.return x - end >>=? fun { validation_result ; block_metadata ; - ops_metadata ; context_hash ; forking_testchain } -> + fail @@ invalid_block hash + @@ Expired_chain + { chain_id = State.Chain.id chain_state; + expiration = eol; + timestamp = header.shell.timestamp } + | None | Some _ -> + return_unit + +let on_request : type r. t -> r Request.t -> r tzresult Lwt.t = + fun w + (Request.Request_validation + {chain_db; notify_new_block; canceler; peer; hash; header; operations}) -> + let bv = Worker.state w in + let chain_state = Distributed_db.chain_state chain_db in + State.Block.read_opt chain_state hash + >>= function + | Some block -> + debug + w + "previously validated block %a (after pipe)" + Block_hash.pp_short + hash ; + Protocol_validator.prefetch_and_compile_protocols + bv.protocol_validator + ?peer + ~timeout:bv.limits.protocol_timeout + block ; + return (Ok None) + | None -> ( + State.Block.read_invalid chain_state hash + >>= function + | Some {errors; _} -> + return (Error errors) + | None -> ( + State.Chain.save_point chain_state + >>= fun (save_point_lvl, _) -> + (* Safety and late workers in partial mode. *) + if Compare.Int32.(header.shell.level < save_point_lvl) then + return (Ok None) + else + ( debug w "validating block %a" Block_hash.pp_short hash ; + State.Block.read chain_state header.shell.predecessor + >>=? fun pred -> + (* TODO also protect with [Worker.canceler w]. *) + protect ?canceler (fun () -> + Block_validator_process.apply_block + bv.validation_process + ~predecessor:pred + header + operations + >>= (function + | Ok x -> + return x + | Error [Missing_test_protocol protocol] -> + Protocol_validator.fetch_and_compile_protocol + bv.protocol_validator + ?peer + ~timeout:bv.limits.protocol_timeout + protocol + >>=? fun _ -> + Block_validator_process.apply_block + bv.validation_process + ~predecessor:pred + header + operations + | Error _ as x -> + Lwt.return x) + >>=? fun { validation_result; + block_metadata; + ops_metadata; + context_hash; + forking_testchain } -> let validation_store = - ({ context_hash ; - message = validation_result.message ; - max_operations_ttl = validation_result.max_operations_ttl ; - last_allowed_fork_level = validation_result.last_allowed_fork_level} : - State.Block.validation_store) in + ( { context_hash; + message = validation_result.message; + max_operations_ttl = + validation_result.max_operations_ttl; + last_allowed_fork_level = + validation_result.last_allowed_fork_level } + : State.Block.validation_store ) + in Distributed_db.commit_block - chain_db hash - header block_metadata operations ops_metadata + chain_db + hash + header + block_metadata + operations + ops_metadata validation_store - ~forking_testchain >>=? function - | None -> assert false (* should not happen *) - | Some block -> return block - end - end >>= function - | Ok block -> - Protocol_validator.prefetch_and_compile_protocols - bv.protocol_validator - ?peer ~timeout:bv.limits.protocol_timeout - block ; - notify_new_block block ; - return (Ok (Some block)) - | Error [Canceled | Unavailable_protocol _ | Missing_test_protocol _ | System_error _ ] as err -> - (* FIXME: Canceled can escape. Canceled is not registered. BOOM! *) - return err - | Error errors -> - Worker.protect w begin fun () -> + ~forking_testchain + >>=? function + | None -> + assert false (* should not happen *) + | Some block -> + return block) ) + >>= function + | Ok block -> + Protocol_validator.prefetch_and_compile_protocols + bv.protocol_validator + ?peer + ~timeout:bv.limits.protocol_timeout + block ; + notify_new_block block ; + return (Ok (Some block)) + | Error + [ ( Canceled + | Unavailable_protocol _ + | Missing_test_protocol _ + | System_error _ ) ] as err -> + (* FIXME: Canceled can escape. Canceled is not registered. BOOM! *) + return err + | Error errors -> + Worker.protect w (fun () -> Distributed_db.commit_invalid_block - chain_db hash header errors - end >>=? fun commited -> - assert commited ; - return (Error errors) + chain_db + hash + header + errors) + >>=? fun commited -> + assert commited ; + return (Error errors) ) ) let on_launch _ _ (limits, start_testchain, db, validation_kind) = let protocol_validator = Protocol_validator.create db in - Block_validator_process.init validation_kind >>= fun validation_process -> - return { Types.protocol_validator ; validation_process ; limits ; start_testchain } + Block_validator_process.init validation_kind + >>= fun validation_process -> + return {Types.protocol_validator; validation_process; limits; start_testchain} let on_error w r st errs = Worker.record_event w (Validation_failure (r, st, errs)) ; Lwt.return_error errs -let on_completion - : type a. t -> a Request.t -> a -> Worker_types.request_status -> unit Lwt.t - = fun w (Request.Request_validation _ as r) v st -> - match v with - | Ok (Some _) -> - Worker.record_event w - (Event.Validation_success (Request.view r, st)) ; - Lwt.return_unit - | Ok None -> - Lwt.return_unit - | Error errs -> - Worker.record_event w - (Event.Validation_failure (Request.view r, st, errs)) ; - Lwt.return_unit +let on_completion : + type a. t -> a Request.t -> a -> Worker_types.request_status -> unit Lwt.t + = + fun w (Request.Request_validation _ as r) v st -> + match v with + | Ok (Some _) -> + Worker.record_event w (Event.Validation_success (Request.view r, st)) ; + Lwt.return_unit + | Ok None -> + Lwt.return_unit + | Error errs -> + Worker.record_event + w + (Event.Validation_failure (Request.view r, st, errs)) ; + Lwt.return_unit let on_close w = let bv = Worker.state w in @@ -206,11 +248,17 @@ let table = Worker.create_table Queue let create limits db validation_process_kind ~start_testchain = let module Handlers = struct type self = t + let on_launch = on_launch + let on_request = on_request + let on_close = on_close + let on_error = on_error + let on_completion = on_completion + let on_no_request _ = return_unit end in Worker.launch @@ -222,42 +270,58 @@ let create limits db validation_process_kind ~start_testchain = let shutdown = Worker.shutdown -let validate w - ?canceler ?peer ?(notify_new_block = fun _ -> ()) - chain_db hash (header : Block_header.t) operations = +let validate w ?canceler ?peer ?(notify_new_block = fun _ -> ()) chain_db hash + (header : Block_header.t) operations = let bv = Worker.state w in let chain_state = Distributed_db.chain_state chain_db in - State.Block.read_opt chain_state hash >>= function + State.Block.read_opt chain_state hash + >>= function | Some block -> - debug w "previously validated block %a (before pipe)" - Block_hash.pp_short hash ; + debug + w + "previously validated block %a (before pipe)" + Block_hash.pp_short + hash ; Protocol_validator.prefetch_and_compile_protocols bv.protocol_validator - ?peer ~timeout:bv.limits.protocol_timeout + ?peer + ~timeout:bv.limits.protocol_timeout block ; return_none | None -> - map_p (map_p (fun op -> - let op_hash = Operation.hash op in - return op_hash)) - operations >>=? fun hashes -> + map_p + (map_p (fun op -> + let op_hash = Operation.hash op in + return op_hash)) + operations + >>=? fun hashes -> let computed_hash = Operation_list_list_hash.compute - (List.map Operation_list_hash.compute hashes) in + (List.map Operation_list_hash.compute hashes) + in fail_when - (Operation_list_list_hash.compare - computed_hash header.shell.operations_hash <> 0) - (Inconsistent_operations_hash { - block = hash ; - expected = header.shell.operations_hash ; - found = computed_hash ; - }) >>=? fun () -> - check_chain_liveness chain_db hash header >>=? fun () -> - Worker.Queue.push_request_and_wait w + ( Operation_list_list_hash.compare + computed_hash + header.shell.operations_hash + <> 0 ) + (Inconsistent_operations_hash + { block = hash; + expected = header.shell.operations_hash; + found = computed_hash }) + >>=? fun () -> + check_chain_liveness chain_db hash header + >>=? fun () -> + Worker.Queue.push_request_and_wait + w (Request_validation - { chain_db ; notify_new_block ; canceler ; - peer ; hash ; header ; operations }) >>=? fun result -> - Lwt.return result + { chain_db; + notify_new_block; + canceler; + peer; + hash; + header; + operations }) + >>=? fun result -> Lwt.return result let fetch_and_compile_protocol w = let bv = Worker.state w in @@ -267,8 +331,10 @@ let status = Worker.status let running_worker () = match Worker.list table with - | (_, single) :: _ -> single - | [] -> raise Not_found + | (_, single) :: _ -> + single + | [] -> + raise Not_found let pending_requests t = Worker.Queue.pending_requests t diff --git a/src/lib_shell/block_validator.mli b/src/lib_shell/block_validator.mli index 5d286359d03d77540c311649b5628eb79701d8d1..cd01bafa3d528ab8c61677909c834691fd36e575 100644 --- a/src/lib_shell/block_validator.mli +++ b/src/lib_shell/block_validator.mli @@ -27,38 +27,50 @@ type t type limits = { - protocol_timeout: Time.System.Span.t ; - worker_limits : Worker_types.limits ; + protocol_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } -type validator_kind = - | Internal of Context.index +type validator_kind = Internal of Context.index -val create: - limits -> Distributed_db.t -> validator_kind -> +val create : + limits -> + Distributed_db.t -> + validator_kind -> start_testchain:bool -> t tzresult Lwt.t -val validate: +val validate : t -> ?canceler:Lwt_canceler.t -> ?peer:P2p_peer.Id.t -> ?notify_new_block:(State.Block.t -> unit) -> Distributed_db.chain_db -> - Block_hash.t -> Block_header.t -> Operation.t list list -> + Block_hash.t -> + Block_header.t -> + Operation.t list list -> State.Block.t option tzresult Lwt.t -val fetch_and_compile_protocol: +val fetch_and_compile_protocol : t -> ?peer:P2p_peer.Id.t -> ?timeout:Time.System.Span.t -> - Protocol_hash.t -> Registered_protocol.t tzresult Lwt.t + Protocol_hash.t -> + Registered_protocol.t tzresult Lwt.t -val shutdown: t -> unit Lwt.t +val shutdown : t -> unit Lwt.t -val running_worker: unit -> t -val status: t -> Worker_types.worker_status +val running_worker : unit -> t -val pending_requests : t -> (Time.System.t * Block_validator_worker_state.Request.view) list -val current_request : t -> (Time.System.t * Time.System.t * Block_validator_worker_state.Request.view) option -val last_events : t -> (Internal_event.level * Block_validator_worker_state.Event.t list) list +val status : t -> Worker_types.worker_status + +val pending_requests : + t -> (Time.System.t * Block_validator_worker_state.Request.view) list + +val current_request : + t -> + (Time.System.t * Time.System.t * Block_validator_worker_state.Request.view) + option + +val last_events : + t -> (Internal_event.level * Block_validator_worker_state.Event.t list) list diff --git a/src/lib_shell/block_validator_process.ml b/src/lib_shell/block_validator_process.ml index a20a952190680431f4a6a172f1b74249c406a12b..cbab80a1ea15fac5d28efb2354595002a07020e5 100644 --- a/src/lib_shell/block_validator_process.ml +++ b/src/lib_shell/block_validator_process.ml @@ -25,40 +25,34 @@ (*****************************************************************************) let get_context index hash = - Context.checkout index hash >>= function - | None -> fail (Block_validator_errors.Failed_to_checkout_context hash) - | Some ctx -> return ctx + Context.checkout index hash + >>= function + | None -> + fail (Block_validator_errors.Failed_to_checkout_context hash) + | Some ctx -> + return ctx (** The standard block validation method *) module Seq_validator = struct - include Internal_event.Legacy_logging.Make (struct - let name = "validation_process.sequential" - end) + let name = "validation_process.sequential" + end) - type validation_context = { - context_index : Context.index ; - } + type validation_context = {context_index : Context.index} type t = validation_context let init context_index = - lwt_log_notice "Initialized" >>= fun () -> - Lwt.return { context_index } + lwt_log_notice "Initialized" >>= fun () -> Lwt.return {context_index} - let close _ = - lwt_log_notice "Shutting down..." >>= fun () -> - Lwt.return_unit + let close _ = lwt_log_notice "Shutting down..." >>= fun () -> Lwt.return_unit - let apply_block - validator_process - chain_id - ~max_operations_ttl - ~(predecessor_block_header : Block_header.t) - ~block_header - operations = - get_context validator_process.context_index - predecessor_block_header.shell.context >>=? fun predecessor_context -> + let apply_block validator_process chain_id ~max_operations_ttl + ~(predecessor_block_header : Block_header.t) ~block_header operations = + get_context + validator_process.context_index + predecessor_block_header.shell.context + >>=? fun predecessor_context -> Block_validation.apply chain_id ~max_operations_ttl @@ -66,42 +60,43 @@ module Seq_validator = struct ~predecessor_context ~block_header operations - end -type validator_kind = - | Internal of Context.index +type validator_kind = Internal of Context.index -type t = - | Sequential of Seq_validator.t +type t = Sequential of Seq_validator.t let init = function | Internal index -> - Seq_validator.init index >>= fun v -> - Lwt.return (Sequential v) + Seq_validator.init index >>= fun v -> Lwt.return (Sequential v) -let close = function - | Sequential vp -> Seq_validator.close vp +let close = function Sequential vp -> Seq_validator.close vp let apply_block bvp ~predecessor block_header operations = let chain_state = State.Block.chain_state predecessor in let chain_id = State.Block.chain_id predecessor in let predecessor_block_header = State.Block.header predecessor in - State.Block.max_operations_ttl predecessor >>=? fun max_operations_ttl -> + State.Block.max_operations_ttl predecessor + >>=? fun max_operations_ttl -> let block_hash = Block_header.hash block_header in - begin - Chain.data chain_state >>= fun chain_data -> - if State.Block.equal chain_data.current_head predecessor then - return (chain_data.live_blocks, chain_data.live_operations) - else - Chain_traversal.live_blocks - predecessor max_operations_ttl - end >>=? fun (live_blocks, live_operations) -> + Chain.data chain_state + >>= (fun chain_data -> + if State.Block.equal chain_data.current_head predecessor then + return (chain_data.live_blocks, chain_data.live_operations) + else Chain_traversal.live_blocks predecessor max_operations_ttl) + >>=? fun (live_blocks, live_operations) -> Block_validation.check_liveness - ~live_operations ~live_blocks block_hash operations >>=? fun () -> + ~live_operations + ~live_blocks + block_hash + operations + >>=? fun () -> match bvp with | Sequential vp -> - Seq_validator.apply_block vp + Seq_validator.apply_block + vp ~max_operations_ttl - chain_id ~predecessor_block_header - ~block_header operations + chain_id + ~predecessor_block_header + ~block_header + operations diff --git a/src/lib_shell/block_validator_process.mli b/src/lib_shell/block_validator_process.mli index dacb85125adbca832be629cf77ffa86989d1f8da..365adc2e84451ce64d52438d88e175cc2a5ecebb 100644 --- a/src/lib_shell/block_validator_process.mli +++ b/src/lib_shell/block_validator_process.mli @@ -24,12 +24,12 @@ (* *) (*****************************************************************************) -type validator_kind = - | Internal of Context.index +type validator_kind = Internal of Context.index type t val init : validator_kind -> t Lwt.t + val close : t -> unit Lwt.t val apply_block : diff --git a/src/lib_shell/bootstrap_pipeline.ml b/src/lib_shell/bootstrap_pipeline.ml index a7905809a21c21724f88c1ec4c9f8c91513bf563..9ec6b169f95a1a6c2b080937e2b1a2f777104916 100644 --- a/src/lib_shell/bootstrap_pipeline.ml +++ b/src/lib_shell/bootstrap_pipeline.ml @@ -23,11 +23,13 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make_semantic - (struct let name = "node.validator.bootstrap_pipeline" end) +include Internal_event.Legacy_logging.Make_semantic (struct + let name = "node.validator.bootstrap_pipeline" +end) let node_time_tag = Tag.def ~doc:"local time at this node" "node_time" Time.System.pp_hum + let block_time_tag = Tag.def ~doc:"claimed creation time of block" @@ -37,374 +39,440 @@ let block_time_tag = open Validation_errors type t = { - canceler: Lwt_canceler.t ; - block_header_timeout: Time.System.Span.t ; - block_operations_timeout: Time.System.Span.t ; - mutable headers_fetch_worker: unit Lwt.t ; - mutable operations_fetch_worker: unit Lwt.t ; - mutable validation_worker: unit Lwt.t ; - peer_id: P2p_peer.Id.t ; - chain_db: Distributed_db.chain_db ; - locator: Block_locator.t ; - block_validator: Block_validator.t ; - notify_new_block: State.Block.t -> unit ; - fetched_headers: - (Block_hash.t * Block_header.t) Lwt_pipe.t ; - fetched_blocks: - (Block_hash.t * Block_header.t * Operation.t list list tzresult Lwt.t) Lwt_pipe.t ; + canceler : Lwt_canceler.t; + block_header_timeout : Time.System.Span.t; + block_operations_timeout : Time.System.Span.t; + mutable headers_fetch_worker : unit Lwt.t; + mutable operations_fetch_worker : unit Lwt.t; + mutable validation_worker : unit Lwt.t; + peer_id : P2p_peer.Id.t; + chain_db : Distributed_db.chain_db; + locator : Block_locator.t; + block_validator : Block_validator.t; + notify_new_block : State.Block.t -> unit; + fetched_headers : (Block_hash.t * Block_header.t) Lwt_pipe.t; + fetched_blocks : + (Block_hash.t * Block_header.t * Operation.t list list tzresult Lwt.t) + Lwt_pipe.t; (* HACK, a worker should be able to return the 'error'. *) - mutable errors: Error_monad.error list ; + mutable errors : Error_monad.error list } -let operations_index_tag = Tag.def ~doc:"Operations index" "operations_index" Format.pp_print_int +let operations_index_tag = + Tag.def ~doc:"Operations index" "operations_index" Format.pp_print_int let assert_acceptable_header pipeline hash (header : Block_header.t) = let chain_state = Distributed_db.chain_state pipeline.chain_db in let time_now = Systime_os.now () in fail_unless - (Time.Protocol.compare - (Time.Protocol.add (Time.System.to_protocol (Systime_os.now ())) 15L) - header.shell.timestamp - >= 0) - (Future_block_header { block = hash; time = time_now; - block_time = header.shell.timestamp }) >>=? fun () -> - State.Chain.checkpoint chain_state >>= fun checkpoint -> + ( Time.Protocol.compare + (Time.Protocol.add (Time.System.to_protocol (Systime_os.now ())) 15L) + header.shell.timestamp + >= 0 ) + (Future_block_header + {block = hash; time = time_now; block_time = header.shell.timestamp}) + >>=? fun () -> + State.Chain.checkpoint chain_state + >>= fun checkpoint -> fail_when - (Int32.equal header.shell.level checkpoint.shell.level && - not (Block_header.equal checkpoint header)) - (Checkpoint_error (hash, Some pipeline.peer_id)) >>=? fun () -> - Chain.head chain_state >>= fun head -> + ( Int32.equal header.shell.level checkpoint.shell.level + && not (Block_header.equal checkpoint header) ) + (Checkpoint_error (hash, Some pipeline.peer_id)) + >>=? fun () -> + Chain.head chain_state + >>= fun head -> let checkpoint_reached = - (State.Block.header head).shell.level >= checkpoint.shell.level in + (State.Block.header head).shell.level >= checkpoint.shell.level + in if checkpoint_reached then (* If reached the checkpoint, every block before the checkpoint must be part of the chain. *) if header.shell.level <= checkpoint.shell.level then - Chain.mem chain_state hash >>= fun in_chain -> - fail_unless in_chain - (Checkpoint_error (hash, Some pipeline.peer_id)) >>=? fun () -> - return_unit - else - return_unit - else - return_unit + Chain.mem chain_state hash + >>= fun in_chain -> + fail_unless in_chain (Checkpoint_error (hash, Some pipeline.peer_id)) + >>=? fun () -> return_unit + else return_unit + else return_unit -let fetch_step pipeline (step : Block_locator.step) = - lwt_log_info Tag.DSL.(fun f -> - f "fetching step %a -> %a (%a) from peer %a." - -% t event "fetching_step_from_peer" - -% a Block_hash.Logging.tag step.block - -% a Block_hash.Logging.predecessor_tag step.predecessor - -% a (Tag.def ~doc:"" "" Block_locator.pp_step) step - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> +let fetch_step pipeline (step : Block_locator.step) = + lwt_log_info + Tag.DSL.( + fun f -> + f "fetching step %a -> %a (%a) from peer %a." + -% t event "fetching_step_from_peer" + -% a Block_hash.Logging.tag step.block + -% a Block_hash.Logging.predecessor_tag step.predecessor + -% a (Tag.def ~doc:"" "" Block_locator.pp_step) step + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> let rec fetch_loop acc hash cpt = - Lwt_unix.yield () >>= fun () -> + Lwt_unix.yield () + >>= fun () -> if cpt < 0 then - lwt_log_info Tag.DSL.(fun f -> - f "invalid step from peer %a (too long)." - -% t event "step_too_long" - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> - fail (Invalid_locator (pipeline.peer_id, pipeline.locator)) + lwt_log_info + Tag.DSL.( + fun f -> + f "invalid step from peer %a (too long)." + -% t event "step_too_long" + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> fail (Invalid_locator (pipeline.peer_id, pipeline.locator)) else if Block_hash.equal hash step.predecessor then if step.strict_step && cpt <> 0 then - lwt_log_info Tag.DSL.(fun f -> - f "invalid step from peer %a (too short)." - -% t event "step_too_short" - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> + lwt_log_info + Tag.DSL.( + fun f -> + f "invalid step from peer %a (too short)." + -% t event "step_too_short" + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> fail (Invalid_locator (pipeline.peer_id, pipeline.locator)) - else - return acc + else return acc else - lwt_debug Tag.DSL.(fun f -> - f "fetching block header %a from peer %a." - -% t event "fetching_block_header_from_peer" - -% a Block_hash.Logging.tag hash - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> - protect ~canceler:pipeline.canceler begin fun () -> - Distributed_db.Block_header.fetch - ~timeout:pipeline.block_header_timeout - pipeline.chain_db ~peer:pipeline.peer_id - hash () - end >>=? fun header -> - assert_acceptable_header pipeline hash header >>=? fun () -> - lwt_debug Tag.DSL.(fun f -> - f "fetched block header %a from peer %a." - -% t event "fetched_block_header_from_peer" - -% a Block_hash.Logging.tag hash - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> + lwt_debug + Tag.DSL.( + fun f -> + f "fetching block header %a from peer %a." + -% t event "fetching_block_header_from_peer" + -% a Block_hash.Logging.tag hash + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> + protect ~canceler:pipeline.canceler (fun () -> + Distributed_db.Block_header.fetch + ~timeout:pipeline.block_header_timeout + pipeline.chain_db + ~peer:pipeline.peer_id + hash + ()) + >>=? fun header -> + assert_acceptable_header pipeline hash header + >>=? fun () -> + lwt_debug + Tag.DSL.( + fun f -> + f "fetched block header %a from peer %a." + -% t event "fetched_block_header_from_peer" + -% a Block_hash.Logging.tag hash + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> fetch_loop ((hash, header) :: acc) header.shell.predecessor (cpt - 1) in - fetch_loop [] step.block step.step >>=? fun headers -> + fetch_loop [] step.block step.step + >>=? fun headers -> iter_s - begin fun header -> - protect ~canceler:pipeline.canceler begin fun () -> - Lwt_pipe.push pipeline.fetched_headers header >>= return - end - end - headers >>=? fun () -> - return_unit + (fun header -> + protect ~canceler:pipeline.canceler (fun () -> + Lwt_pipe.push pipeline.fetched_headers header >>= return)) + headers + >>=? fun () -> return_unit let headers_fetch_worker_loop pipeline = - begin - let sender_id = Distributed_db.my_peer_id pipeline.chain_db in - (* sender and receiver are inverted here because they are from + (let sender_id = Distributed_db.my_peer_id pipeline.chain_db in + (* sender and receiver are inverted here because they are from the point of view of the node sending the locator *) - let seed = {Block_locator.sender_id=pipeline.peer_id; receiver_id=sender_id } in - let chain_state = Distributed_db.chain_state pipeline.chain_db in - let state = State.Chain.global_state chain_state in - State.history_mode state >>= fun history_mode -> - begin match history_mode with - | History_mode.Archive -> - Lwt.return_none - | Full | Rolling -> - let chain_state = Distributed_db.chain_state pipeline.chain_db in - State.Chain.save_point chain_state >>= Lwt.return_some - end >>= begin fun save_point -> - let steps = match save_point with - | None -> - Block_locator.to_steps seed pipeline.locator - | Some (save_point_level, save_point) -> - let head, _ = (pipeline.locator : Block_locator.t :> _ * _) in - let head_level = head.shell.level in - let truncate_limit = Int32.(sub head_level save_point_level) in - Block_locator.to_steps_truncate ~limit:(Int32.to_int truncate_limit) - ~save_point seed pipeline.locator - in - match steps with - | [] -> - fail (Too_short_locator (sender_id, pipeline.locator)) - | { Block_locator.predecessor ; _ } :: _ -> - State.Block.known chain_state predecessor >>= fun predecessor_known -> - (* Check that the locator is anchored in a block locally known *) - fail_unless - predecessor_known - (Too_short_locator (sender_id, pipeline.locator)) >>=? fun () -> - iter_s (fetch_step pipeline) steps - end >>=? fun () -> - return_unit - end >>= function + let seed = + {Block_locator.sender_id = pipeline.peer_id; receiver_id = sender_id} + in + let chain_state = Distributed_db.chain_state pipeline.chain_db in + let state = State.Chain.global_state chain_state in + State.history_mode state + >>= fun history_mode -> + ( match history_mode with + | History_mode.Archive -> + Lwt.return_none + | Full | Rolling -> + let chain_state = Distributed_db.chain_state pipeline.chain_db in + State.Chain.save_point chain_state >>= Lwt.return_some ) + >>= (fun save_point -> + let steps = + match save_point with + | None -> + Block_locator.to_steps seed pipeline.locator + | Some (save_point_level, save_point) -> + let (head, _) = (pipeline.locator : Block_locator.t :> _ * _) in + let head_level = head.shell.level in + let truncate_limit = Int32.(sub head_level save_point_level) in + Block_locator.to_steps_truncate + ~limit:(Int32.to_int truncate_limit) + ~save_point + seed + pipeline.locator + in + match steps with + | [] -> + fail (Too_short_locator (sender_id, pipeline.locator)) + | {Block_locator.predecessor; _} :: _ -> + State.Block.known chain_state predecessor + >>= fun predecessor_known -> + (* Check that the locator is anchored in a block locally known *) + fail_unless + predecessor_known + (Too_short_locator (sender_id, pipeline.locator)) + >>=? fun () -> iter_s (fetch_step pipeline) steps) + >>=? fun () -> return_unit) + >>= function | Ok () -> - lwt_log_info Tag.DSL.(fun f -> - f "fetched all steps from peer %a." - -% t event "fetched_all_steps_from_peer" - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> + lwt_log_info + Tag.DSL.( + fun f -> + f "fetched all steps from peer %a." + -% t event "fetched_all_steps_from_peer" + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> Lwt_pipe.close pipeline.fetched_headers ; Lwt.return_unit - | Error [Exn Lwt.Canceled | Canceled | Exn Lwt_pipe.Closed] -> + | Error [(Exn Lwt.Canceled | Canceled | Exn Lwt_pipe.Closed)] -> Lwt.return_unit - | Error [ Distributed_db.Block_header.Timeout bh ] -> - lwt_log_info Tag.DSL.(fun f -> - f "request for header %a from peer %a timed out." - -% t event "header_request_timeout" - -% a Block_hash.Logging.tag bh - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - Lwt.return_unit - | Error [ Future_block_header { block; block_time; time } ] -> - lwt_log_notice Tag.DSL.(fun f -> - f "Block locator %a from peer %a contains future blocks. \ - local time: %a, block time: %a" - -% t event "locator_contains_future_blocks" - -% a Block_hash.Logging.tag block - -% a P2p_peer.Id.Logging.tag pipeline.peer_id - -% a node_time_tag time - -% a block_time_tag block_time) >>= fun () -> - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - Lwt.return_unit - | Error ([ Too_short_locator _ ] as err) -> + | Error [Distributed_db.Block_header.Timeout bh] -> + lwt_log_info + Tag.DSL.( + fun f -> + f "request for header %a from peer %a timed out." + -% t event "header_request_timeout" + -% a Block_hash.Logging.tag bh + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> + Lwt_canceler.cancel pipeline.canceler >>= fun () -> Lwt.return_unit + | Error [Future_block_header {block; block_time; time}] -> + lwt_log_notice + Tag.DSL.( + fun f -> + f + "Block locator %a from peer %a contains future blocks. local \ + time: %a, block time: %a" + -% t event "locator_contains_future_blocks" + -% a Block_hash.Logging.tag block + -% a P2p_peer.Id.Logging.tag pipeline.peer_id + -% a node_time_tag time + -% a block_time_tag block_time) + >>= fun () -> + Lwt_canceler.cancel pipeline.canceler >>= fun () -> Lwt.return_unit + | Error ([Too_short_locator _] as err) -> pipeline.errors <- pipeline.errors @ err ; - lwt_log_info Tag.DSL.(fun f -> - f "Too short locator received" - -% t event "too_short_locator" - ) >>= fun () -> - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - Lwt.return_unit + lwt_log_info + Tag.DSL.( + fun f -> + f "Too short locator received" -% t event "too_short_locator") + >>= fun () -> + Lwt_canceler.cancel pipeline.canceler >>= fun () -> Lwt.return_unit | Error err -> pipeline.errors <- pipeline.errors @ err ; - lwt_log_error Tag.DSL.(fun f -> - f "@[Unexpected error (headers fetch):@ %a@]" - -% t event "unexpected_error" - -% a errs_tag err) >>= fun () -> - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - Lwt.return_unit + lwt_log_error + Tag.DSL.( + fun f -> + f "@[Unexpected error (headers fetch):@ %a@]" + -% t event "unexpected_error" -% a errs_tag err) + >>= fun () -> + Lwt_canceler.cancel pipeline.canceler >>= fun () -> Lwt.return_unit let rec operations_fetch_worker_loop pipeline = - begin - Lwt_unix.yield () >>= fun () -> - protect ~canceler:pipeline.canceler begin fun () -> - Lwt_pipe.pop pipeline.fetched_headers >>= return - end >>=? fun (hash, header) -> - lwt_log_info Tag.DSL.(fun f -> - f "fetching operations of block %a from peer %a." - -% t event "fetching_operations" - -% a Block_hash.Logging.tag hash - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> - let operations = - map_p - (fun i -> - protect ~canceler:pipeline.canceler begin fun () -> - Distributed_db.Operations.fetch - ~timeout:pipeline.block_operations_timeout - pipeline.chain_db ~peer:pipeline.peer_id - (hash, i) header.shell.operations_hash - end) - (0 -- (header.shell.validation_passes - 1)) >>=? fun operations -> - lwt_log_info Tag.DSL.(fun f -> - f "fetched operations of block %a from peer %a." - -% t event "fetched_operations" - -% a Block_hash.Logging.tag hash - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> - return operations in - protect ~canceler:pipeline.canceler begin fun () -> - Lwt_pipe.push pipeline.fetched_blocks - (hash, header, operations) >>= return - end - end >>= function + Lwt_unix.yield () + >>= (fun () -> + protect ~canceler:pipeline.canceler (fun () -> + Lwt_pipe.pop pipeline.fetched_headers >>= return) + >>=? fun (hash, header) -> + lwt_log_info + Tag.DSL.( + fun f -> + f "fetching operations of block %a from peer %a." + -% t event "fetching_operations" + -% a Block_hash.Logging.tag hash + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> + let operations = + map_p + (fun i -> + protect ~canceler:pipeline.canceler (fun () -> + Distributed_db.Operations.fetch + ~timeout:pipeline.block_operations_timeout + pipeline.chain_db + ~peer:pipeline.peer_id + (hash, i) + header.shell.operations_hash)) + (0 -- (header.shell.validation_passes - 1)) + >>=? fun operations -> + lwt_log_info + Tag.DSL.( + fun f -> + f "fetched operations of block %a from peer %a." + -% t event "fetched_operations" + -% a Block_hash.Logging.tag hash + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> return operations + in + protect ~canceler:pipeline.canceler (fun () -> + Lwt_pipe.push pipeline.fetched_blocks (hash, header, operations) + >>= return)) + >>= function | Ok () -> operations_fetch_worker_loop pipeline - | Error [Exn Lwt.Canceled | Canceled | Exn Lwt_pipe.Closed] -> + | Error [(Exn Lwt.Canceled | Canceled | Exn Lwt_pipe.Closed)] -> Lwt_pipe.close pipeline.fetched_blocks ; Lwt.return_unit - | Error [ Distributed_db.Operations.Timeout (bh, n) ] -> - lwt_log_info Tag.DSL.(fun f -> - f "request for operations %a:%d from peer %a timed out." - -% t event "request_operations_timeout" - -% a Block_hash.Logging.tag bh - -% s operations_index_tag n - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - Lwt.return_unit + | Error [Distributed_db.Operations.Timeout (bh, n)] -> + lwt_log_info + Tag.DSL.( + fun f -> + f "request for operations %a:%d from peer %a timed out." + -% t event "request_operations_timeout" + -% a Block_hash.Logging.tag bh + -% s operations_index_tag n + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> + Lwt_canceler.cancel pipeline.canceler >>= fun () -> Lwt.return_unit | Error err -> pipeline.errors <- pipeline.errors @ err ; - lwt_log_error Tag.DSL.(fun f -> - f "@[Unexpected error (operations fetch):@ %a@]" - -% t event "unexpected_error" - -% a errs_tag err) >>= fun () -> - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - Lwt.return_unit + lwt_log_error + Tag.DSL.( + fun f -> + f "@[Unexpected error (operations fetch):@ %a@]" + -% t event "unexpected_error" -% a errs_tag err) + >>= fun () -> + Lwt_canceler.cancel pipeline.canceler >>= fun () -> Lwt.return_unit let rec validation_worker_loop pipeline = - begin - Lwt_unix.yield () >>= fun () -> - protect ~canceler:pipeline.canceler begin fun () -> - Lwt_pipe.pop pipeline.fetched_blocks >>= return - end >>=? fun (hash, header, operations) -> - lwt_log_info Tag.DSL.(fun f -> - f "requesting validation for block %a from peer %a." - -% t event "requesting_validation" - -% a Block_hash.Logging.tag hash - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> - operations >>=? fun operations -> - protect ~canceler:pipeline.canceler begin fun () -> - Block_validator.validate - ~canceler:pipeline.canceler - ~notify_new_block:pipeline.notify_new_block - pipeline.block_validator - pipeline.chain_db hash header operations - end >>=? fun _block -> - lwt_log_info Tag.DSL.(fun f -> - f "validated block %a from peer %a." - -% t event "validated_block" - -% a Block_hash.Logging.tag hash - -% a P2p_peer.Id.Logging.tag pipeline.peer_id) >>= fun () -> - return_unit - end >>= function - | Ok () -> validation_worker_loop pipeline - | Error [Exn Lwt.Canceled | Canceled | Exn Lwt_pipe.Closed] -> + Lwt_unix.yield () + >>= (fun () -> + protect ~canceler:pipeline.canceler (fun () -> + Lwt_pipe.pop pipeline.fetched_blocks >>= return) + >>=? fun (hash, header, operations) -> + lwt_log_info + Tag.DSL.( + fun f -> + f "requesting validation for block %a from peer %a." + -% t event "requesting_validation" + -% a Block_hash.Logging.tag hash + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> + operations + >>=? fun operations -> + protect ~canceler:pipeline.canceler (fun () -> + Block_validator.validate + ~canceler:pipeline.canceler + ~notify_new_block:pipeline.notify_new_block + pipeline.block_validator + pipeline.chain_db + hash + header + operations) + >>=? fun _block -> + lwt_log_info + Tag.DSL.( + fun f -> + f "validated block %a from peer %a." + -% t event "validated_block" + -% a Block_hash.Logging.tag hash + -% a P2p_peer.Id.Logging.tag pipeline.peer_id) + >>= fun () -> return_unit) + >>= function + | Ok () -> + validation_worker_loop pipeline + | Error [(Exn Lwt.Canceled | Canceled | Exn Lwt_pipe.Closed)] -> Lwt.return_unit - | Error ([ Block_validator_errors.Invalid_block _ - | Block_validator_errors.Unavailable_protocol _ - | Block_validator_errors.System_error _ - | Timeout] as err ) -> + | Error + ( [ ( Block_validator_errors.Invalid_block _ + | Block_validator_errors.Unavailable_protocol _ + | Block_validator_errors.System_error _ + | Timeout ) ] as err ) -> (* Propagate the error to the peer validator. *) pipeline.errors <- pipeline.errors @ err ; - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - Lwt.return_unit + Lwt_canceler.cancel pipeline.canceler >>= fun () -> Lwt.return_unit | Error err -> pipeline.errors <- pipeline.errors @ err ; - lwt_log_error Tag.DSL.(fun f -> - f "@[Unexpected error (validator):@ %a@]" - -% t event "unexpected_error" - -% a errs_tag err) >>= fun () -> - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - Lwt.return_unit + lwt_log_error + Tag.DSL.( + fun f -> + f "@[Unexpected error (validator):@ %a@]" + -% t event "unexpected_error" -% a errs_tag err) + >>= fun () -> + Lwt_canceler.cancel pipeline.canceler >>= fun () -> Lwt.return_unit -let create - ?(notify_new_block = fun _ -> ()) - ~block_header_timeout ~block_operations_timeout - block_validator peer_id chain_db locator = +let create ?(notify_new_block = fun _ -> ()) ~block_header_timeout + ~block_operations_timeout block_validator peer_id chain_db locator = let canceler = Lwt_canceler.create () in - let fetched_headers = - Lwt_pipe.create ~size:(1024, fun _ -> 1) () in - let fetched_blocks = - Lwt_pipe.create ~size:(128, fun _ -> 1) () in - let pipeline = { - canceler ; - block_header_timeout ; block_operations_timeout ; - headers_fetch_worker = Lwt.return_unit ; - operations_fetch_worker = Lwt.return_unit ; - validation_worker = Lwt.return_unit ; - notify_new_block ; - peer_id ; chain_db ; locator ; - block_validator ; - fetched_headers ; fetched_blocks ; - errors = [] ; - } in - Lwt_canceler.on_cancel pipeline.canceler begin fun () -> - Lwt_pipe.close fetched_blocks ; - Lwt_pipe.close fetched_headers ; - (* TODO proper cleanup of ressources... *) - Lwt.return_unit - end ; - let head, _ = (pipeline.locator : Block_locator.t :> _ * _) in + let fetched_headers = Lwt_pipe.create ~size:(1024, fun _ -> 1) () in + let fetched_blocks = Lwt_pipe.create ~size:(128, fun _ -> 1) () in + let pipeline = + { canceler; + block_header_timeout; + block_operations_timeout; + headers_fetch_worker = Lwt.return_unit; + operations_fetch_worker = Lwt.return_unit; + validation_worker = Lwt.return_unit; + notify_new_block; + peer_id; + chain_db; + locator; + block_validator; + fetched_headers; + fetched_blocks; + errors = [] } + in + Lwt_canceler.on_cancel pipeline.canceler (fun () -> + Lwt_pipe.close fetched_blocks ; + Lwt_pipe.close fetched_headers ; + (* TODO proper cleanup of ressources... *) + Lwt.return_unit) ; + let (head, _) = (pipeline.locator : Block_locator.t :> _ * _) in let hash = Block_header.hash head in pipeline.headers_fetch_worker <- Lwt_utils.worker - (Format.asprintf "bootstrap_pipeline-headers_fetch.%a.%a" - P2p_peer.Id.pp_short peer_id Block_hash.pp_short hash) + (Format.asprintf + "bootstrap_pipeline-headers_fetch.%a.%a" + P2p_peer.Id.pp_short + peer_id + Block_hash.pp_short + hash) ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> headers_fetch_worker_loop pipeline) ~cancel:(fun () -> Lwt_canceler.cancel pipeline.canceler) ; pipeline.operations_fetch_worker <- Lwt_utils.worker - (Format.asprintf "bootstrap_pipeline-operations_fetch.%a.%a" - P2p_peer.Id.pp_short peer_id Block_hash.pp_short hash) + (Format.asprintf + "bootstrap_pipeline-operations_fetch.%a.%a" + P2p_peer.Id.pp_short + peer_id + Block_hash.pp_short + hash) ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> operations_fetch_worker_loop pipeline) ~cancel:(fun () -> Lwt_canceler.cancel pipeline.canceler) ; pipeline.validation_worker <- Lwt_utils.worker - (Format.asprintf "bootstrap_pipeline-validation.%a.%a" - P2p_peer.Id.pp_short peer_id Block_hash.pp_short hash) + (Format.asprintf + "bootstrap_pipeline-validation.%a.%a" + P2p_peer.Id.pp_short + peer_id + Block_hash.pp_short + hash) ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> validation_worker_loop pipeline) ~cancel:(fun () -> Lwt_canceler.cancel pipeline.canceler) ; pipeline let wait_workers pipeline = - pipeline.headers_fetch_worker >>= fun () -> - pipeline.operations_fetch_worker >>= fun () -> - pipeline.validation_worker >>= fun () -> - Lwt.return_unit + pipeline.headers_fetch_worker + >>= fun () -> + pipeline.operations_fetch_worker + >>= fun () -> pipeline.validation_worker >>= fun () -> Lwt.return_unit let wait pipeline = - wait_workers pipeline >>= fun () -> + wait_workers pipeline + >>= fun () -> match pipeline.errors with - | [] -> return_unit - | errors -> Lwt.return_error errors + | [] -> + return_unit + | errors -> + Lwt.return_error errors let cancel pipeline = - Lwt_canceler.cancel pipeline.canceler >>= fun () -> - wait_workers pipeline + Lwt_canceler.cancel pipeline.canceler >>= fun () -> wait_workers pipeline let length pipeline = - Peer_validator_worker_state.Worker_state.{ - fetched_header_length = Lwt_pipe.length pipeline.fetched_headers; - fetched_block_length = Lwt_pipe.length pipeline.fetched_blocks ; - } + Peer_validator_worker_state.Worker_state. + { fetched_header_length = Lwt_pipe.length pipeline.fetched_headers; + fetched_block_length = Lwt_pipe.length pipeline.fetched_blocks } let length_zero = - Peer_validator_worker_state.Worker_state.{ - fetched_header_length = 0 ; - fetched_block_length = 0 ; - } + Peer_validator_worker_state.Worker_state. + {fetched_header_length = 0; fetched_block_length = 0} diff --git a/src/lib_shell/bootstrap_pipeline.mli b/src/lib_shell/bootstrap_pipeline.mli index fb1bd9f1d846ab76cf68feeacf8f898662167c75..c002358b4f8d1cee7cda2d87be3697036d40e459 100644 --- a/src/lib_shell/bootstrap_pipeline.mli +++ b/src/lib_shell/bootstrap_pipeline.mli @@ -25,17 +25,20 @@ type t -val create: - ?notify_new_block: (State.Block.t -> unit) -> +val create : + ?notify_new_block:(State.Block.t -> unit) -> block_header_timeout:Time.System.Span.t -> - block_operations_timeout: Time.System.Span.t -> + block_operations_timeout:Time.System.Span.t -> Block_validator.t -> - P2p_peer.Id.t -> Distributed_db.chain_db -> - Block_locator.t -> t + P2p_peer.Id.t -> + Distributed_db.chain_db -> + Block_locator.t -> + t -val wait: t -> unit tzresult Lwt.t +val wait : t -> unit tzresult Lwt.t -val cancel: t -> unit Lwt.t +val cancel : t -> unit Lwt.t val length : t -> Peer_validator_worker_state.Worker_state.pipeline_length + val length_zero : Peer_validator_worker_state.Worker_state.pipeline_length diff --git a/src/lib_shell/chain.ml b/src/lib_shell/chain.ml index aa85c0781696533517e0af09888fb52e5b366bb0..995254debbfa889dce0feade1ba20e21fdea6cd5 100644 --- a/src/lib_shell/chain.ml +++ b/src/lib_shell/chain.ml @@ -31,113 +31,118 @@ let genesis chain_state = >|= Option.unopt_assert ~loc:__POS__ let known_heads chain_state = - State.read_chain_data chain_state begin fun chain_store _data -> - Store.Chain_data.Known_heads.elements chain_store - end >>= fun hashes -> + State.read_chain_data chain_state (fun chain_store _data -> + Store.Chain_data.Known_heads.elements chain_store) + >>= fun hashes -> Lwt_list.map_p - (fun h -> State.Block.read_opt chain_state h >|= Option.unopt_assert ~loc:__POS__) hashes + (fun h -> + State.Block.read_opt chain_state h >|= Option.unopt_assert ~loc:__POS__) + hashes let head chain_state = - State.read_chain_data chain_state begin fun _chain_store data -> - Lwt.return data.current_head - end + State.read_chain_data chain_state (fun _chain_store data -> + Lwt.return data.current_head) let mem chain_state hash = - State.read_chain_data chain_state begin fun chain_store data -> - if Block_hash.equal (State.Block.hash data.current_head) hash then - Lwt.return_true - else - Store.Chain_data.In_main_branch.known (chain_store, hash) - end + State.read_chain_data chain_state (fun chain_store data -> + if Block_hash.equal (State.Block.hash data.current_head) hash then + Lwt.return_true + else Store.Chain_data.In_main_branch.known (chain_store, hash)) type data = State.chain_data = { - current_head: State.Block.t ; - current_mempool: Mempool.t ; - live_blocks: Block_hash.Set.t ; - live_operations: Operation_hash.Set.t ; - test_chain: Chain_id.t option ; - save_point: Int32.t * Block_hash.t ; - caboose: Int32.t * Block_hash.t ; + current_head : State.Block.t; + current_mempool : Mempool.t; + live_blocks : Block_hash.Set.t; + live_operations : Operation_hash.Set.t; + test_chain : Chain_id.t option; + save_point : Int32.t * Block_hash.t; + caboose : Int32.t * Block_hash.t } let data chain_state = - State.read_chain_data chain_state begin fun _chain_store data -> - Lwt.return data - end + State.read_chain_data chain_state (fun _chain_store data -> Lwt.return data) let locator chain_state seed = - data chain_state >>= fun data -> - State.compute_locator chain_state data.current_head seed + data chain_state + >>= fun data -> State.compute_locator chain_state data.current_head seed let locked_set_head chain_store data block live_blocks live_operations = let rec pop_blocks ancestor block = let hash = State.Block.hash block in - if Block_hash.equal hash ancestor then - Lwt.return_unit + if Block_hash.equal hash ancestor then Lwt.return_unit else - lwt_debug Tag.DSL.(fun f -> - f "pop_block %a" - -% t event "pop_block" - -% a Block_hash.Logging.tag hash) >>= fun () -> - Store.Chain_data.In_main_branch.remove (chain_store, hash) >>= fun () -> - State.Block.predecessor block >>= function + lwt_debug + Tag.DSL.( + fun f -> + f "pop_block %a" -% t event "pop_block" + -% a Block_hash.Logging.tag hash) + >>= fun () -> + Store.Chain_data.In_main_branch.remove (chain_store, hash) + >>= fun () -> + State.Block.predecessor block + >>= function | Some predecessor -> pop_blocks ancestor predecessor - | None -> assert false (* Cannot pop the genesis... *) + | None -> + assert false + (* Cannot pop the genesis... *) in let push_block pred_hash block = let hash = State.Block.hash block in - lwt_debug Tag.DSL.(fun f -> - f "push_block %a" - -% t event "push_block" - -% a Block_hash.Logging.tag hash) >>= fun () -> - Store.Chain_data.In_main_branch.store - (chain_store, pred_hash) hash >>= fun () -> - Lwt.return hash + lwt_debug + Tag.DSL.( + fun f -> + f "push_block %a" -% t event "push_block" + -% a Block_hash.Logging.tag hash) + >>= fun () -> + Store.Chain_data.In_main_branch.store (chain_store, pred_hash) hash + >>= fun () -> Lwt.return hash in - Chain_traversal.new_blocks - ~from_block:data.current_head ~to_block:block >>= fun (ancestor, path) -> + Chain_traversal.new_blocks ~from_block:data.current_head ~to_block:block + >>= fun (ancestor, path) -> let ancestor = State.Block.hash ancestor in - pop_blocks ancestor data.current_head >>= fun () -> - Lwt_list.fold_left_s push_block ancestor path >>= fun _ -> - Store.Chain_data.Current_head.store chain_store (State.Block.hash block) >>= fun () -> + pop_blocks ancestor data.current_head + >>= fun () -> + Lwt_list.fold_left_s push_block ancestor path + >>= fun _ -> + Store.Chain_data.Current_head.store chain_store (State.Block.hash block) + >>= fun () -> (* TODO more optimized updated of live_{blocks/operations} when the new head is a direct successor of the current head... Make sure to do the live blocks computation in `init_head` when this TODO is resolved. *) - Lwt.return { data with current_head = block ; - current_mempool = Mempool.empty ; - live_blocks ; - live_operations ; - } + Lwt.return + { data with + current_head = block; + current_mempool = Mempool.empty; + live_blocks; + live_operations } let set_head chain_state block = - State.Block.max_operations_ttl block >>=? fun max_op_ttl -> - Chain_traversal.live_blocks - block max_op_ttl >>=? fun (live_blocks, - live_operations) -> - State.update_chain_data chain_state begin fun chain_store data -> - locked_set_head - chain_store data block live_blocks live_operations >>= fun new_chain_data -> - Lwt.return (Some new_chain_data, data.current_head) - end >>= fun chain_state -> - return chain_state + State.Block.max_operations_ttl block + >>=? fun max_op_ttl -> + Chain_traversal.live_blocks block max_op_ttl + >>=? fun (live_blocks, live_operations) -> + State.update_chain_data chain_state (fun chain_store data -> + locked_set_head chain_store data block live_blocks live_operations + >>= fun new_chain_data -> + Lwt.return (Some new_chain_data, data.current_head)) + >>= fun chain_state -> return chain_state let test_and_set_head chain_state ~old block = - State.Block.max_operations_ttl block >>=? fun max_op_ttl -> - Chain_traversal.live_blocks - block max_op_ttl >>=? fun (live_blocks, live_operations) -> - State.update_chain_data chain_state begin fun chain_store data -> - if not (State.Block.equal data.current_head old) then - Lwt.return (None, false) - else - locked_set_head - chain_store data block live_blocks live_operations >>= fun new_chain_data -> - Lwt.return (Some new_chain_data, true) - end >>= fun chain_state -> - return chain_state + State.Block.max_operations_ttl block + >>=? fun max_op_ttl -> + Chain_traversal.live_blocks block max_op_ttl + >>=? fun (live_blocks, live_operations) -> + State.update_chain_data chain_state (fun chain_store data -> + if not (State.Block.equal data.current_head old) then + Lwt.return (None, false) + else + locked_set_head chain_store data block live_blocks live_operations + >>= fun new_chain_data -> Lwt.return (Some new_chain_data, true)) + >>= fun chain_state -> return chain_state let init_head chain_state = - head chain_state >>= fun block -> - set_head chain_state block >>=? fun (_ : State.Block.t) -> - return_unit + head chain_state + >>= fun block -> + set_head chain_state block >>=? fun (_ : State.Block.t) -> return_unit diff --git a/src/lib_shell/chain.mli b/src/lib_shell/chain.mli index bc5c21c2e83bb0f2b8af8bdfe024743392490350..780fcc94ccf89bb83b911dc5fc9d2338c325ba01 100644 --- a/src/lib_shell/chain.mli +++ b/src/lib_shell/chain.mli @@ -27,42 +27,43 @@ (** The genesis block of the chain. On a test chain, the test protocol has been promoted as "main" protocol. *) -val genesis: State.Chain.t -> State.Block.t Lwt.t +val genesis : State.Chain.t -> State.Block.t Lwt.t (** The current head of the chain. *) -val head: State.Chain.t -> State.Block.t Lwt.t -val locator: State.Chain.t -> Block_locator.seed -> Block_locator.t Lwt.t +val head : State.Chain.t -> State.Block.t Lwt.t + +val locator : State.Chain.t -> Block_locator.seed -> Block_locator.t Lwt.t (** All the available chain data. *) type data = { - current_head: State.Block.t ; - current_mempool: Mempool.t ; - live_blocks: Block_hash.Set.t ; - live_operations: Operation_hash.Set.t ; - test_chain: Chain_id.t option ; - save_point: Int32.t * Block_hash.t ; - caboose: Int32.t * Block_hash.t ; + current_head : State.Block.t; + current_mempool : Mempool.t; + live_blocks : Block_hash.Set.t; + live_operations : Operation_hash.Set.t; + test_chain : Chain_id.t option; + save_point : Int32.t * Block_hash.t; + caboose : Int32.t * Block_hash.t } (** Reading atomically all the chain data. *) -val data: State.Chain.t -> data Lwt.t +val data : State.Chain.t -> data Lwt.t (** The current head and all the known (valid) alternate heads. *) -val known_heads: State.Chain.t -> State.Block.t list Lwt.t +val known_heads : State.Chain.t -> State.Block.t list Lwt.t (** Test whether a block belongs to the current mainchain. *) -val mem: State.Chain.t -> Block_hash.t -> bool Lwt.t +val mem : State.Chain.t -> Block_hash.t -> bool Lwt.t (** Record a block as the current head of the chain. It returns the previous head. *) -val set_head: State.Chain.t -> State.Block.t -> State.Block.t tzresult Lwt.t +val set_head : State.Chain.t -> State.Block.t -> State.Block.t tzresult Lwt.t (** Atomically change the current head of the chain. This returns [true] whenever the change succeeded, or [false] when the current head os not equal to the [old] argument. *) -val test_and_set_head: +val test_and_set_head : State.Chain.t -> old:State.Block.t -> State.Block.t -> bool tzresult Lwt.t (** Restores the data about the current head at startup (recomputes the sets of live blocks and operations). *) -val init_head: State.Chain.t -> unit tzresult Lwt.t +val init_head : State.Chain.t -> unit tzresult Lwt.t diff --git a/src/lib_shell/chain_directory.ml b/src/lib_shell/chain_directory.ml index d211589e27a700bf087ced872e5b32de9075545f..d06caa1ae255a193643b985506a265d227ed83fb 100644 --- a/src/lib_shell/chain_directory.ml +++ b/src/lib_shell/chain_directory.ml @@ -27,13 +27,14 @@ open Chain_services let get_chain_id state = function - | `Main -> Lwt.return (State.Chain.main state) - | `Test -> begin - State.Chain.get_exn state (State.Chain.main state) >>= fun main_chain -> - State.Chain.test main_chain >>= function - | None -> Lwt.fail Not_found - | Some chain_id -> Lwt.return chain_id - end + | `Main -> + Lwt.return (State.Chain.main state) + | `Test -> ( + State.Chain.get_exn state (State.Chain.main state) + >>= fun main_chain -> + State.Chain.test main_chain + >>= function + | None -> Lwt.fail Not_found | Some chain_id -> Lwt.return chain_id ) | `Hash chain_id -> Lwt.return chain_id @@ -43,149 +44,146 @@ let get_chain_id_opt state chain = (fun _exn -> Lwt.return_none) let get_chain state chain = - get_chain_id state chain >>= fun chain_id -> - State.Chain.get_exn state chain_id + get_chain_id state chain + >>= fun chain_id -> State.Chain.get_exn state chain_id let get_checkpoint state (chain : Chain_services.chain) = - get_chain state chain >>= fun chain -> - State.Chain.checkpoint chain >>= fun header -> - Lwt.return (Block_header.hash header) + get_chain state chain + >>= fun chain -> + State.Chain.checkpoint chain + >>= fun header -> Lwt.return (Block_header.hash header) let predecessors ignored length head = let rec loop acc length block = - if length <= 0 then - Lwt.return (List.rev acc) + if length <= 0 then Lwt.return (List.rev acc) else - State.Block.predecessor block >>= function + State.Block.predecessor block + >>= function | None -> Lwt.return (List.rev acc) | Some pred -> if Block_hash.Set.mem (State.Block.hash block) ignored then Lwt.return (List.rev acc) - else - loop (State.Block.hash pred :: acc) (length-1) pred + else loop (State.Block.hash pred :: acc) (length - 1) pred in - loop [State.Block.hash head] (length-1) head + loop [State.Block.hash head] (length - 1) head let list_blocks chain_state ?(length = 1) ?min_date heads = - begin - match heads with - | [] -> - Chain.known_heads chain_state >>= fun heads -> - let heads = - match min_date with - | None -> heads - | Some min_date -> - List.fold_left - (fun acc block -> - let timestamp = State.Block.timestamp block in - if Time.Protocol.(min_date <= timestamp) then block :: acc - else acc) - [] heads in - let sorted_heads = - List.sort - (fun b1 b2 -> - let f1 = State.Block.fitness b1 in - let f2 = State.Block.fitness b2 in - ~- (Fitness.compare f1 f2)) - heads in - Lwt.return (List.map (fun b -> Some b) sorted_heads) - | _ :: _ as heads -> - Lwt_list.map_p (State.Block.read_opt chain_state) heads - end >>= fun requested_heads -> + ( match heads with + | [] -> + Chain.known_heads chain_state + >>= fun heads -> + let heads = + match min_date with + | None -> + heads + | Some min_date -> + List.fold_left + (fun acc block -> + let timestamp = State.Block.timestamp block in + if Time.Protocol.(min_date <= timestamp) then block :: acc + else acc) + [] + heads + in + let sorted_heads = + List.sort + (fun b1 b2 -> + let f1 = State.Block.fitness b1 in + let f2 = State.Block.fitness b2 in + ~-(Fitness.compare f1 f2)) + heads + in + Lwt.return (List.map (fun b -> Some b) sorted_heads) + | _ :: _ as heads -> + Lwt_list.map_p (State.Block.read_opt chain_state) heads ) + >>= fun requested_heads -> Lwt_list.fold_left_s (fun (ignored, acc) head -> - match head with - | None -> Lwt.return (ignored, []) - | Some block -> - predecessors ignored length block >>= fun predecessors -> - let ignored = List.fold_left (fun acc v -> Block_hash.Set.add v acc) - ignored predecessors in - Lwt.return (ignored, predecessors :: acc)) + match head with + | None -> + Lwt.return (ignored, []) + | Some block -> + predecessors ignored length block + >>= fun predecessors -> + let ignored = + List.fold_left + (fun acc v -> Block_hash.Set.add v acc) + ignored + predecessors + in + Lwt.return (ignored, predecessors :: acc)) (Block_hash.Set.empty, []) - requested_heads >>= fun (_, blocks) -> - return (List.rev blocks) + requested_heads + >>= fun (_, blocks) -> return (List.rev blocks) let rpc_directory = - - let dir : State.Chain.t RPC_directory.t ref = - ref RPC_directory.empty in - + let dir : State.Chain.t RPC_directory.t ref = ref RPC_directory.empty in let register0 s f = dir := - RPC_directory.register !dir (RPC_service.subst0 s) - (fun chain p q -> f chain p q) in + RPC_directory.register !dir (RPC_service.subst0 s) (fun chain p q -> + f chain p q) + in let register1 s f = dir := - RPC_directory.register !dir (RPC_service.subst1 s) - (fun (chain, a) p q -> f chain a p q) in - + RPC_directory.register !dir (RPC_service.subst1 s) (fun (chain, a) p q -> + f chain a p q) + in let register_dynamic_directory2 ?descr s f = dir := RPC_directory.register_dynamic_directory - !dir ?descr (RPC_path.subst1 s) - (fun (chain, a) -> f chain a) in - - register0 S.chain_id begin fun chain () () -> - return (State.Chain.id chain) - end ; - - register0 S.checkpoint begin fun chain () () -> - State.Chain.checkpoint chain >>= fun checkpoint -> - State.Chain.save_point chain >>= fun (save_point, _) -> - State.Chain.caboose chain >>= fun (caboose, _) -> - State.history_mode (State.Chain.global_state chain) >>= fun history_mode -> - return (checkpoint, save_point, caboose, history_mode) - end ; - + !dir + ?descr + (RPC_path.subst1 s) + (fun (chain, a) -> f chain a) + in + register0 S.chain_id (fun chain () () -> return (State.Chain.id chain)) ; + register0 S.checkpoint (fun chain () () -> + State.Chain.checkpoint chain + >>= fun checkpoint -> + State.Chain.save_point chain + >>= fun (save_point, _) -> + State.Chain.caboose chain + >>= fun (caboose, _) -> + State.history_mode (State.Chain.global_state chain) + >>= fun history_mode -> + return (checkpoint, save_point, caboose, history_mode)) ; (* blocks *) - - register0 S.Blocks.list begin fun chain q () -> - list_blocks chain ?length:q#length ?min_date:q#min_date q#heads - end ; - + register0 S.Blocks.list (fun chain q () -> + list_blocks chain ?length:q#length ?min_date:q#min_date q#heads) ; register_dynamic_directory2 Block_services.path Block_directory.build_rpc_directory ; - (* invalid_blocks *) - - register0 S.Invalid_blocks.list begin fun chain () () -> - let convert (hash, level, errors) = { hash ; level ; errors } in - State.Block.list_invalid chain >>= fun blocks -> - return (List.map convert blocks) - end ; - - register1 S.Invalid_blocks.get begin fun chain hash () () -> - State.Block.read_invalid chain hash >>= function - | None -> Lwt.fail Not_found - | Some { level ; errors } -> return { hash ; level ; errors } - end ; - - register1 S.Invalid_blocks.delete begin fun chain hash () () -> - State.Block.unmark_invalid chain hash - end ; - + register0 S.Invalid_blocks.list (fun chain () () -> + let convert (hash, level, errors) = {hash; level; errors} in + State.Block.list_invalid chain + >>= fun blocks -> return (List.map convert blocks)) ; + register1 S.Invalid_blocks.get (fun chain hash () () -> + State.Block.read_invalid chain hash + >>= function + | None -> + Lwt.fail Not_found + | Some {level; errors} -> + return {hash; level; errors}) ; + register1 S.Invalid_blocks.delete (fun chain hash () () -> + State.Block.unmark_invalid chain hash) ; !dir let build_rpc_directory validator = - let distributed_db = Validator.distributed_db validator in let state = Distributed_db.state distributed_db in - let dir = ref rpc_directory in - (* Mempool *) - let merge d = dir := RPC_directory.merge !dir d in merge (RPC_directory.map (fun chain -> - match Validator.get validator (State.Chain.id chain) with - | Error _ -> Lwt.fail Not_found - | Ok chain_validator -> - Lwt.return (Chain_validator.prevalidator chain_validator)) + match Validator.get validator (State.Chain.id chain) with + | Error _ -> + Lwt.fail Not_found + | Ok chain_validator -> + Lwt.return (Chain_validator.prevalidator chain_validator)) Prevalidator.rpc_directory) ; - - RPC_directory.prefix Chain_services.path @@ - RPC_directory.map (fun ((), chain) -> get_chain state chain) !dir + RPC_directory.prefix Chain_services.path + @@ RPC_directory.map (fun ((), chain) -> get_chain state chain) !dir diff --git a/src/lib_shell/chain_directory.mli b/src/lib_shell/chain_directory.mli index 7230df36978ce78d78c983f618d0fcc79b47f749..592c542077a1a36ed2219b8aa286e407071469cd 100644 --- a/src/lib_shell/chain_directory.mli +++ b/src/lib_shell/chain_directory.mli @@ -23,13 +23,15 @@ (* *) (*****************************************************************************) -val get_chain_id: State.t -> Chain_services.chain -> Chain_id.t Lwt.t -val get_chain_id_opt: State.t -> Chain_services.chain -> Chain_id.t option Lwt.t -val get_chain: State.t -> Chain_services.chain -> State.Chain.t Lwt.t +val get_chain_id : State.t -> Chain_services.chain -> Chain_id.t Lwt.t -val get_checkpoint: State.t -> Chain_services.chain -> - Block_hash.t Lwt.t +val get_chain_id_opt : + State.t -> Chain_services.chain -> Chain_id.t option Lwt.t -val rpc_directory: State.Chain.t RPC_directory.t +val get_chain : State.t -> Chain_services.chain -> State.Chain.t Lwt.t -val build_rpc_directory: Validator.t -> unit RPC_directory.t +val get_checkpoint : State.t -> Chain_services.chain -> Block_hash.t Lwt.t + +val rpc_directory : State.Chain.t RPC_directory.t + +val build_rpc_directory : Validator.t -> unit RPC_directory.t diff --git a/src/lib_shell/chain_traversal.ml b/src/lib_shell/chain_traversal.ml index f737a0d5854694a0f8bb0b8ca1b89eba15f87a63..0bdb06c48d7f2532ea2a547787765d04545677ab 100644 --- a/src/lib_shell/chain_traversal.ml +++ b/src/lib_shell/chain_traversal.ml @@ -25,132 +25,161 @@ open State -let path (b1: Block.t) (b2: Block.t) = +let path (b1 : Block.t) (b2 : Block.t) = if not (Chain_id.equal (Block.chain_id b1) (Block.chain_id b2)) then invalid_arg "Chain_traversal.path" ; let rec loop acc current = - if Block.equal b1 current then - Lwt.return_some acc + if Block.equal b1 current then Lwt.return_some acc else - Block.predecessor current >>= function - | Some pred -> loop (current :: acc) pred - | None -> Lwt.return_none in + Block.predecessor current + >>= function + | Some pred -> loop (current :: acc) pred | None -> Lwt.return_none + in loop [] b2 -let common_ancestor (b1: Block.t) (b2: Block.t) = - if not ( Chain_id.equal (Block.chain_id b1) (Block.chain_id b2)) then +let common_ancestor (b1 : Block.t) (b2 : Block.t) = + if not (Chain_id.equal (Block.chain_id b1) (Block.chain_id b2)) then invalid_arg "Chain_traversal.path" ; - let rec loop (b1: Block.t) (b2: Block.t) = - if Block.equal b1 b2 then - Lwt.return b1 + let rec loop (b1 : Block.t) (b2 : Block.t) = + if Block.equal b1 b2 then Lwt.return b1 else if Time.Protocol.(Block.timestamp b1 <= Block.timestamp b2) then - Block.predecessor b2 >>= function - | None -> assert false - | Some b2 -> loop b1 b2 + Block.predecessor b2 + >>= function None -> assert false | Some b2 -> loop b1 b2 else - Block.predecessor b1 >>= function - | None -> assert false - | Some b1 -> loop b1 b2 in + Block.predecessor b1 + >>= function None -> assert false | Some b1 -> loop b1 b2 + in loop b1 b2 let iter_predecessors ?max ?min_fitness ?min_date heads ~f = - let module Local = struct exception Exit end in + let module Local = struct + exception Exit + end in let compare b1 b2 = match Fitness.compare (Block.fitness b1) (Block.fitness b2) with - | 0 -> begin - match Time.Protocol.compare (Block.timestamp b1) (Block.timestamp b2) with - | 0 -> Block.compare b1 b2 - | res -> res - end - | res -> res in - let pop, push = + | 0 -> ( + match + Time.Protocol.compare (Block.timestamp b1) (Block.timestamp b2) + with + | 0 -> + Block.compare b1 b2 + | res -> + res ) + | res -> + res + in + let (pop, push) = (* Poor-man priority queue *) let queue : Block.t list ref = ref [] in let pop () = match !queue with - | [] -> None - | b :: bs -> queue := bs ; Some b in + | [] -> + None + | b :: bs -> + queue := bs ; + Some b + in let push b = let rec loop = function - | [] -> [b] + | [] -> + [b] | b' :: bs' as bs -> let cmp = compare b b' in - if cmp = 0 then - bs - else if cmp < 0 then - b' :: loop bs' - else - b :: bs in - queue := loop !queue in - pop, push in + if cmp = 0 then bs else if cmp < 0 then b' :: loop bs' else b :: bs + in + queue := loop !queue + in + (pop, push) + in let check_count = match max with - | None -> (fun () -> ()) + | None -> + fun () -> () | Some max -> let cpt = ref 0 in fun () -> if !cpt >= max then raise Local.Exit ; - incr cpt in + incr cpt + in let check_fitness = match min_fitness with - | None -> (fun _ -> true) + | None -> + fun _ -> true | Some min_fitness -> - (fun b -> Fitness.compare min_fitness (Block.fitness b) <= 0) in + fun b -> Fitness.compare min_fitness (Block.fitness b) <= 0 + in let check_date = match min_date with - | None -> (fun _ -> true) + | None -> + fun _ -> true | Some min_date -> - (fun b -> Time.Protocol.(min_date <= Block.timestamp b)) in + fun b -> Time.Protocol.(min_date <= Block.timestamp b) + in let rec loop () = match pop () with - | None -> Lwt.return_unit - | Some b -> + | None -> + Lwt.return_unit + | Some b -> ( check_count () ; - f b >>= fun () -> - Block.predecessor b >>= function - | None -> loop () + f b + >>= fun () -> + Block.predecessor b + >>= function + | None -> + loop () | Some p -> if check_fitness p && check_date p then push p ; - loop () in + loop () ) + in List.iter push heads ; try loop () with Local.Exit -> Lwt.return_unit let iter_predecessors ?max ?min_fitness ?min_date heads ~f = match heads with - | [] -> Lwt.return_unit + | [] -> + Lwt.return_unit | b :: _ -> let chain_id = Block.chain_id b in - if not (List.for_all (fun b -> Chain_id.equal chain_id (Block.chain_id b)) heads) then - invalid_arg "State.Helpers.iter_predecessors" ; + if + not + (List.for_all + (fun b -> Chain_id.equal chain_id (Block.chain_id b)) + heads) + then invalid_arg "State.Helpers.iter_predecessors" ; iter_predecessors ?max ?min_fitness ?min_date heads ~f let new_blocks ~from_block ~to_block = - common_ancestor from_block to_block >>= fun ancestor -> - path ancestor to_block >>= function - | None -> assert false - | Some path -> Lwt.return (ancestor, path) + common_ancestor from_block to_block + >>= fun ancestor -> + path ancestor to_block + >>= function None -> assert false | Some path -> Lwt.return (ancestor, path) let live_blocks block n = let rec loop bacc oacc chain_state block_head n = - Block.all_operation_hashes block_head >>= fun hashes -> + Block.all_operation_hashes block_head + >>= fun hashes -> let oacc = List.fold_left - (List.fold_left - (fun oacc op -> Operation_hash.Set.add op oacc)) - oacc hashes in + (List.fold_left (fun oacc op -> Operation_hash.Set.add op oacc)) + oacc + hashes + in let bacc = Block_hash.Set.add (Block.hash block_head) bacc in if n = 0 then return (bacc, oacc) else - State.Block.predecessor block_head >>= function + State.Block.predecessor block_head + >>= function | None -> let genesis_hash = (State.Chain.genesis chain_state).block in let block_hash = Block.hash block_head in - if Block_hash.equal genesis_hash block_hash - then return (bacc, oacc) + if Block_hash.equal genesis_hash block_hash then return (bacc, oacc) else fail (State.Block_not_found block_hash) | Some predecessor -> - loop bacc oacc chain_state predecessor (pred n) in + loop bacc oacc chain_state predecessor (pred n) + in loop - Block_hash.Set.empty Operation_hash.Set.empty - (Block.chain_state block) block + Block_hash.Set.empty + Operation_hash.Set.empty + (Block.chain_state block) + block n diff --git a/src/lib_shell/chain_traversal.mli b/src/lib_shell/chain_traversal.mli index a8df5ece78fd6196cbb8fd2585a7a95a04a39e32..18ab749c5e6fa60ccfac083f870bf41af0708440 100644 --- a/src/lib_shell/chain_traversal.mli +++ b/src/lib_shell/chain_traversal.mli @@ -27,22 +27,15 @@ open State -val path: Block.t -> Block.t -> Block.t list option Lwt.t (** If [h1] is an ancestor of [h2] in the current [state], then [path state h1 h2] returns the chain of block from [h1] (excluded) to [h2] (included). Returns [None] otherwise. *) +val path : Block.t -> Block.t -> Block.t list option Lwt.t -val common_ancestor: Block.t -> Block.t -> Block.t Lwt.t (** [common_ancestor state h1 h2] returns the first common ancestors in the history of blocks [h1] and [h2]. *) +val common_ancestor : Block.t -> Block.t -> Block.t Lwt.t -val iter_predecessors: - ?max:int -> - ?min_fitness:Fitness.t -> - ?min_date:Time.Protocol.t -> - Block.t list -> - f:(Block.t -> unit Lwt.t) -> - unit Lwt.t (** [iter_predecessors state blocks f] iter [f] on [blocks] and their recursive predecessors. Blocks are visited with a decreasing fitness (then decreasing timestamp). If the optional @@ -50,21 +43,27 @@ val iter_predecessors: visited block. If [min_fitness] id provided, blocks with a fitness lower than [min_fitness] are ignored. If [min_date], blocks with a fitness lower than [min_date] are ignored. *) +val iter_predecessors : + ?max:int -> + ?min_fitness:Fitness.t -> + ?min_date:Time.Protocol.t -> + Block.t list -> + f:(Block.t -> unit Lwt.t) -> + unit Lwt.t -val new_blocks: - from_block:Block.t -> to_block:Block.t -> - (Block.t * Block.t list) Lwt.t (** [new_blocks ~from_block ~to_block] returns a pair [(ancestor, path)], where [ancestor] is the common ancestor of [from_block] and [to_block] and where [path] is the chain from [ancestor] (excluded) to [to_block] (included). The function raises an exception when the two provided blocks do not belong the the same [chain]. *) +val new_blocks : + from_block:Block.t -> to_block:Block.t -> (Block.t * Block.t list) Lwt.t -val live_blocks: - Block.t -> int -> (Block_hash.Set.t * Operation_hash.Set.t) tzresult Lwt.t (** [live_blocks b n] return a pair [(blocks,operations)] where [blocks] is the set of arity [n], that contains [b] and its [n-1] predecessors. And where [operations] is the set of operations included in those blocks. *) +val live_blocks : + Block.t -> int -> (Block_hash.Set.t * Operation_hash.Set.t) tzresult Lwt.t diff --git a/src/lib_shell/chain_validator.ml b/src/lib_shell/chain_validator.ml index d3bf3b1a2f128c49425ebc0006b6c6f88af0050d..e854739d61d8be75b0b44b91ad5452de717f55dc 100644 --- a/src/lib_shell/chain_validator.ml +++ b/src/lib_shell/chain_validator.ml @@ -26,237 +26,270 @@ open Chain_validator_worker_state -module Log = - Internal_event.Legacy_logging.Make(struct let name = "node.chain_validator" end) +module Log = Internal_event.Legacy_logging.Make (struct + let name = "node.chain_validator" +end) module Name = struct type t = Chain_id.t + let encoding = Chain_id.encoding - let base = [ "validator.chain" ] + + let base = ["validator.chain"] + let pp = Chain_id.pp_short end module Request = struct include Request + type _ t = Validated : State.Block.t -> Event.update t - let view (type a) (Validated block : a t) : view = - State.Block.hash block + + let view (type a) (Validated block : a t) : view = State.Block.hash block end -type limits = { - bootstrap_threshold: int ; - worker_limits: Worker_types.limits ; -} +type limits = {bootstrap_threshold : int; worker_limits : Worker_types.limits} module Types = struct include Worker_state type parameters = { - parent: Name.t option ; - db: Distributed_db.t ; - chain_state: State.Chain.t ; - chain_db: Distributed_db.chain_db ; - block_validator: Block_validator.t ; - global_valid_block_input: State.Block.t Lwt_watcher.input ; - global_chains_input: (Chain_id.t * bool) Lwt_watcher.input ; - - prevalidator_limits: Prevalidator.limits ; - peer_validator_limits: Peer_validator.limits ; - max_child_ttl: int option ; - limits: limits; + parent : Name.t option; + db : Distributed_db.t; + chain_state : State.Chain.t; + chain_db : Distributed_db.chain_db; + block_validator : Block_validator.t; + global_valid_block_input : State.Block.t Lwt_watcher.input; + global_chains_input : (Chain_id.t * bool) Lwt_watcher.input; + prevalidator_limits : Prevalidator.limits; + peer_validator_limits : Peer_validator.limits; + max_child_ttl : int option; + limits : limits } type state = { - parameters: parameters ; - - mutable bootstrapped: bool ; - bootstrapped_waiter: unit Lwt.t ; - bootstrapped_wakener: unit Lwt.u ; - valid_block_input: State.Block.t Lwt_watcher.input ; - new_head_input: State.Block.t Lwt_watcher.input ; - - mutable child: - (state * (unit -> unit Lwt.t (* shutdown *))) option ; - mutable prevalidator: Prevalidator.t option ; - active_peers: Peer_validator.t P2p_peer.InitializationTable.t ; - bootstrapped_peers: unit P2p_peer.Table.t ; + parameters : parameters; + mutable bootstrapped : bool; + bootstrapped_waiter : unit Lwt.t; + bootstrapped_wakener : unit Lwt.u; + valid_block_input : State.Block.t Lwt_watcher.input; + new_head_input : State.Block.t Lwt_watcher.input; + mutable child : (state * (unit -> unit Lwt.t (* shutdown *))) option; + mutable prevalidator : Prevalidator.t option; + active_peers : Peer_validator.t P2p_peer.InitializationTable.t; + bootstrapped_peers : unit P2p_peer.Table.t } let view (state : state) _ : view = - let { bootstrapped ; active_peers ; bootstrapped_peers ; _ } = state in - { bootstrapped ; + let {bootstrapped; active_peers; bootstrapped_peers; _} = state in + { bootstrapped; active_peers = - P2p_peer.InitializationTable.fold_keys (fun id l -> id :: l) active_peers [] ; + P2p_peer.InitializationTable.fold_keys + (fun id l -> id :: l) + active_peers + []; bootstrapped_peers = P2p_peer.Table.fold (fun id _ l -> id :: l) bootstrapped_peers [] } end module Worker = Worker.Make (Name) (Event) (Request) (Types) - open Types type t = Worker.infinite Worker.queue Worker.t let table = Worker.create_table Queue -let shutdown w = - Worker.shutdown w +let shutdown w = Worker.shutdown w let shutdown_child nv active_chains = - Lwt_utils.may ~f:(fun - ({ parameters = { chain_state ; global_chains_input ; _ } ; _ }, shutdown) -> - Lwt_watcher.notify global_chains_input (State.Chain.id chain_state, false) ; - Chain_id.Table.remove active_chains (State.Chain.id chain_state) ; - State.update_chain_data nv.parameters.chain_state begin fun _ chain_data -> - Lwt.return (Some { chain_data with test_chain = None }, ()) - end >>= fun () -> - shutdown () >>= fun () -> - nv.child <- None ; - Lwt.return_unit - ) nv.child + Lwt_utils.may + ~f: + (fun ({parameters = {chain_state; global_chains_input; _}; _}, shutdown) -> + Lwt_watcher.notify global_chains_input (State.Chain.id chain_state, false) ; + Chain_id.Table.remove active_chains (State.Chain.id chain_state) ; + State.update_chain_data nv.parameters.chain_state (fun _ chain_data -> + Lwt.return (Some {chain_data with test_chain = None}, ())) + >>= fun () -> + shutdown () + >>= fun () -> + nv.child <- None ; + Lwt.return_unit) + nv.child let notify_new_block w block = let nv = Worker.state w in - Option.iter nv.parameters.parent - ~f:(fun id -> try - let w = List.assoc id (Worker.list table) in - let nv = Worker.state w in - Lwt_watcher.notify nv.valid_block_input block - with Not_found -> ()) ; + Option.iter nv.parameters.parent ~f:(fun id -> + try + let w = List.assoc id (Worker.list table) in + let nv = Worker.state w in + Lwt_watcher.notify nv.valid_block_input block + with Not_found -> ()) ; Lwt_watcher.notify nv.valid_block_input block ; Lwt_watcher.notify nv.parameters.global_valid_block_input block ; Worker.Queue.push_request_now w (Validated block) let may_toggle_bootstrapped_chain w = let nv = Worker.state w in - if not nv.bootstrapped && - P2p_peer.Table.length nv.bootstrapped_peers >= nv.parameters.limits.bootstrap_threshold - then begin - Log.log_info "bootstrapped"; + if + (not nv.bootstrapped) + && P2p_peer.Table.length nv.bootstrapped_peers + >= nv.parameters.limits.bootstrap_threshold + then ( + Log.log_info "bootstrapped" ; nv.bootstrapped <- true ; - Lwt.wakeup_later nv.bootstrapped_wakener () ; - end + Lwt.wakeup_later nv.bootstrapped_wakener () ) let with_activated_peer_validator w peer_id f = let nv = Worker.state w in - begin - P2p_peer.InitializationTable.find_or_make - nv.active_peers - peer_id - (fun () -> - Peer_validator.create - ~notify_new_block:(notify_new_block w) - ~notify_bootstrapped: begin fun () -> - P2p_peer.Table.add nv.bootstrapped_peers peer_id () ; - may_toggle_bootstrapped_chain w - end - ~notify_termination: begin fun _pv -> - P2p_peer.InitializationTable.remove nv.active_peers peer_id ; - P2p_peer.Table.remove nv.bootstrapped_peers peer_id ; - end - nv.parameters.peer_validator_limits - nv.parameters.block_validator - nv.parameters.chain_db - peer_id) - end >>=? fun pv -> + P2p_peer.InitializationTable.find_or_make nv.active_peers peer_id (fun () -> + Peer_validator.create + ~notify_new_block:(notify_new_block w) + ~notify_bootstrapped:(fun () -> + P2p_peer.Table.add nv.bootstrapped_peers peer_id () ; + may_toggle_bootstrapped_chain w) + ~notify_termination:(fun _pv -> + P2p_peer.InitializationTable.remove nv.active_peers peer_id ; + P2p_peer.Table.remove nv.bootstrapped_peers peer_id) + nv.parameters.peer_validator_limits + nv.parameters.block_validator + nv.parameters.chain_db + peer_id) + >>=? fun pv -> match Peer_validator.status pv with - | Worker_types.Running _ -> f pv + | Worker_types.Running _ -> + f pv | Worker_types.Closing (_, _) | Worker_types.Closed (_, _, _) - | Worker_types.Launching _ -> return_unit + | Worker_types.Launching _ -> + return_unit let may_update_checkpoint chain_state new_head = - State.Chain.checkpoint chain_state >>= fun checkpoint -> - State.Block.last_allowed_fork_level new_head >>=? fun new_level -> - if new_level <= checkpoint.shell.level then - return_unit + State.Chain.checkpoint chain_state + >>= fun checkpoint -> + State.Block.last_allowed_fork_level new_head + >>=? fun new_level -> + if new_level <= checkpoint.shell.level then return_unit else let state = State.Chain.global_state chain_state in - State.history_mode state >>= fun history_mode -> + State.history_mode state + >>= fun history_mode -> let head_level = State.Block.level new_head in - State.Block.predecessor_n new_head - (Int32.to_int (Int32.sub head_level new_level)) >>= function - | None -> assert false (* should not happen *) - | Some new_checkpoint -> - State.Block.read_opt chain_state new_checkpoint >>= function - | None -> assert false (* should not happen *) - | Some new_checkpoint -> - Log.log_notice "@[Update to checkpoint %a (running in mode %a).@]" - Block_hash.pp (State.Block.hash new_checkpoint) - History_mode.pp history_mode ; + State.Block.predecessor_n + new_head + (Int32.to_int (Int32.sub head_level new_level)) + >>= function + | None -> + assert false (* should not happen *) + | Some new_checkpoint -> ( + State.Block.read_opt chain_state new_checkpoint + >>= function + | None -> + assert false (* should not happen *) + | Some new_checkpoint -> ( + Log.log_notice + "@[Update to checkpoint %a (running in mode %a).@]" + Block_hash.pp + (State.Block.hash new_checkpoint) + History_mode.pp + history_mode ; let new_checkpoint = State.Block.header new_checkpoint in - begin match history_mode with - | History_mode.Archive -> - State.Chain.set_checkpoint chain_state new_checkpoint >>= fun () -> - return_unit - | Full -> - State.Chain.set_checkpoint_then_purge_full chain_state new_checkpoint - | Rolling -> - State.Chain.set_checkpoint_then_purge_rolling chain_state new_checkpoint - end + match history_mode with + | History_mode.Archive -> + State.Chain.set_checkpoint chain_state new_checkpoint + >>= fun () -> return_unit + | Full -> + State.Chain.set_checkpoint_then_purge_full + chain_state + new_checkpoint + | Rolling -> + State.Chain.set_checkpoint_then_purge_rolling + chain_state + new_checkpoint ) ) let may_switch_test_chain w active_chains spawn_child block = let nv = Worker.state w in let create_child block protocol expiration forking_block = let block_header = State.Block.header block in - let genesis = Context.compute_testchain_genesis (State.Block.hash forking_block) in + let genesis = + Context.compute_testchain_genesis (State.Block.hash forking_block) + in let chain_id = Context.compute_testchain_chain_id genesis in let activated = match nv.child with - | None -> false - | Some (child , _) -> + | None -> + false + | Some (child, _) -> Block_hash.equal (State.Chain.genesis child.parameters.chain_state).block - genesis in - begin - match nv.parameters.max_child_ttl with - | None -> Lwt.return_false - | Some ttl -> - let forking_block_timestamp = - (State.Block.shell_header forking_block).Block_header.timestamp - in - let expiration = - let open Time.Protocol in - min expiration (add forking_block_timestamp (Int64.of_int ttl)) in - Lwt.return (expiration < block_header.shell.timestamp) - end >>= fun locally_expired -> + genesis + in + ( match nv.parameters.max_child_ttl with + | None -> + Lwt.return_false + | Some ttl -> + let forking_block_timestamp = + (State.Block.shell_header forking_block).Block_header.timestamp + in + let expiration = + let open Time.Protocol in + min expiration (add forking_block_timestamp (Int64.of_int ttl)) + in + Lwt.return (expiration < block_header.shell.timestamp) ) + >>= fun locally_expired -> if locally_expired && activated then shutdown_child nv active_chains >>= return - else if activated - || locally_expired - || not (State.Chain.allow_forked_chain nv.parameters.chain_state) then - return_unit - else begin - begin - State.Chain.get - (State.Chain.global_state nv.parameters.chain_state) - chain_id >>= function - | Ok chain_state -> - State.update_testchain block ~testchain_state:chain_state >>= fun () -> - return chain_state - | Error _ -> (* TODO proper error matching (Not_found ?) or use `get_opt` ? *) - State.Block.context forking_block >>= fun context -> - let try_init_test_chain cont = - Block_validation.init_test_chain - context (State.Block.header forking_block) >>= function - | Ok genesis_header -> - State.fork_testchain - block chain_id genesis genesis_header protocol expiration >>=? fun chain_state -> - Chain.head chain_state >>= fun new_genesis_block -> - Lwt_watcher.notify nv.parameters.global_valid_block_input new_genesis_block ; - Lwt_watcher.notify nv.valid_block_input new_genesis_block ; - return chain_state - | Error [ Block_validator_errors.Missing_test_protocol missing_protocol ] -> - Block_validator.fetch_and_compile_protocol - nv.parameters.block_validator - missing_protocol >>=? fun _ -> - cont () - | Error _ as errs -> Lwt.return errs - in - try_init_test_chain @@ fun () -> - try_init_test_chain @@ fun () -> - failwith "Could not retrieve test protocol" - end >>=? fun chain_state -> + else if + activated || locally_expired + || not (State.Chain.allow_forked_chain nv.parameters.chain_state) + then return_unit + else + State.Chain.get + (State.Chain.global_state nv.parameters.chain_state) + chain_id + >>= (function + | Ok chain_state -> + State.update_testchain block ~testchain_state:chain_state + >>= fun () -> return chain_state + | Error _ -> + (* TODO proper error matching (Not_found ?) or use `get_opt` ? *) + State.Block.context forking_block + >>= fun context -> + let try_init_test_chain cont = + Block_validation.init_test_chain + context + (State.Block.header forking_block) + >>= function + | Ok genesis_header -> + State.fork_testchain + block + chain_id + genesis + genesis_header + protocol + expiration + >>=? fun chain_state -> + Chain.head chain_state + >>= fun new_genesis_block -> + Lwt_watcher.notify + nv.parameters.global_valid_block_input + new_genesis_block ; + Lwt_watcher.notify nv.valid_block_input new_genesis_block ; + return chain_state + | Error + [ Block_validator_errors.Missing_test_protocol + missing_protocol ] -> + Block_validator.fetch_and_compile_protocol + nv.parameters.block_validator + missing_protocol + >>=? fun _ -> cont () + | Error _ as errs -> + Lwt.return errs + in + try_init_test_chain + @@ fun () -> + try_init_test_chain + @@ fun () -> failwith "Could not retrieve test protocol") + >>=? fun chain_state -> (* [spawn_child] is a callback to [create_node]. Thus, it takes care of global initialization boilerplate (e.g. notifying [global_chains_input], adding the chain to the correct tables, ...) *) @@ -267,362 +300,416 @@ let may_switch_test_chain w active_chains spawn_child block = nv.parameters.block_validator nv.parameters.global_valid_block_input nv.parameters.global_chains_input - nv.parameters.db chain_state - nv.parameters.limits (* TODO: different limits main/test ? *) >>=? fun child -> + nv.parameters.db + chain_state + nv.parameters.limits + (* TODO: different limits main/test ? *) + >>=? fun child -> nv.child <- Some child ; return_unit - end in - begin - State.Block.test_chain block >>= function - | Not_running, _ -> shutdown_child nv active_chains >>= return - | (Forking _ | Running _), None -> return_unit (* only for snapshots *) - | (Forking { protocol ; expiration ; _ } - | Running { protocol ; expiration ; _ }), Some forking_block -> - create_child block protocol expiration forking_block - end >>= function - | Ok () -> Lwt.return_unit + State.Block.test_chain block + >>= (function + | (Not_running, _) -> + shutdown_child nv active_chains >>= return + | ((Forking _ | Running _), None) -> + return_unit (* only for snapshots *) + | ( ( Forking {protocol; expiration; _} + | Running {protocol; expiration; _} ), + Some forking_block ) -> + create_child block protocol expiration forking_block) + >>= function + | Ok () -> + Lwt.return_unit | Error err -> Worker.record_event w (Could_not_switch_testchain err) ; Lwt.return_unit let broadcast_head w ~previous block = let nv = Worker.state w in - if not nv.bootstrapped then - Lwt.return_unit - else begin - begin - State.Block.predecessor block >>= function - | None -> Lwt.return_true - | Some predecessor -> - Lwt.return (State.Block.equal predecessor previous) - end >>= fun successor -> - if successor then begin - Distributed_db.Advertise.current_head - nv.parameters.chain_db block ; - Lwt.return_unit - end else begin - Distributed_db.Advertise.current_branch nv.parameters.chain_db - end - end + if not nv.bootstrapped then Lwt.return_unit + else + State.Block.predecessor block + >>= (function + | None -> + Lwt.return_true + | Some predecessor -> + Lwt.return (State.Block.equal predecessor previous)) + >>= fun successor -> + if successor then ( + Distributed_db.Advertise.current_head nv.parameters.chain_db block ; + Lwt.return_unit ) + else Distributed_db.Advertise.current_branch nv.parameters.chain_db let safe_get_protocol hash = match Registered_protocol.get hash with | None -> (* FIXME. *) (* This should not happen: it should be handled in the validator. *) - failwith "chain_validator: missing protocol '%a' for the current block." - Protocol_hash.pp_short hash + failwith + "chain_validator: missing protocol '%a' for the current block." + Protocol_hash.pp_short + hash | Some protocol -> return protocol -let on_request (type a) w - start_testchain active_chains spawn_child (req : a Request.t) : a tzresult Lwt.t = - let Request.Validated block = req in +let on_request (type a) w start_testchain active_chains spawn_child + (req : a Request.t) : a tzresult Lwt.t = + let (Request.Validated block) = req in let nv = Worker.state w in - Chain.head nv.parameters.chain_state >>= fun head -> + Chain.head nv.parameters.chain_state + >>= fun head -> let head_header = State.Block.header head and head_hash = State.Block.hash head and block_header = State.Block.header block and block_hash = State.Block.hash block in - begin - match nv.prevalidator with - | None -> - Lwt.return head_header.shell.fitness - | Some pv -> - Prevalidator.fitness pv - end >>= fun context_fitness -> + ( match nv.prevalidator with + | None -> + Lwt.return head_header.shell.fitness + | Some pv -> + Prevalidator.fitness pv ) + >>= fun context_fitness -> let head_fitness = head_header.shell.fitness in let new_fitness = block_header.shell.fitness in let accepted_head = if Fitness.(context_fitness = head_fitness) then Fitness.(new_fitness > head_fitness) - else - Fitness.(new_fitness >= context_fitness) in - if not accepted_head then - return Event.Ignored_head - else begin - Chain.set_head nv.parameters.chain_state block >>=? fun previous -> - may_update_checkpoint nv.parameters.chain_state block >>=? fun () -> - broadcast_head w ~previous block >>= fun () -> - begin match nv.prevalidator with - | Some old_prevalidator -> - State.Block.protocol_hash block >>= fun new_protocol -> - let old_protocol = Prevalidator.protocol_hash old_prevalidator in - begin - if not (Protocol_hash.equal old_protocol new_protocol) then begin - safe_get_protocol new_protocol >>=? fun (module Proto) -> - let (limits, chain_db) = Prevalidator.parameters old_prevalidator in - (* TODO inject in the new prevalidator the operation + else Fitness.(new_fitness >= context_fitness) + in + if not accepted_head then return Event.Ignored_head + else + Chain.set_head nv.parameters.chain_state block + >>=? fun previous -> + may_update_checkpoint nv.parameters.chain_state block + >>=? fun () -> + broadcast_head w ~previous block + >>= fun () -> + ( match nv.prevalidator with + | Some old_prevalidator -> + State.Block.protocol_hash block + >>= fun new_protocol -> + let old_protocol = Prevalidator.protocol_hash old_prevalidator in + ( if not (Protocol_hash.equal old_protocol new_protocol) then ( + safe_get_protocol new_protocol + >>=? fun (module Proto) -> + let (limits, chain_db) = Prevalidator.parameters old_prevalidator in + (* TODO inject in the new prevalidator the operation from the previous one. *) - Prevalidator.create limits (module Proto) chain_db >>= function - | Error errs -> - Log.lwt_log_error "@[Failed to reinstantiate prevalidator:@ %a@]" - pp_print_error errs >>= fun () -> - nv.prevalidator <- None ; - Prevalidator.shutdown old_prevalidator >>= fun () -> - return_unit - | Ok prevalidator -> - nv.prevalidator <- Some prevalidator ; - Prevalidator.shutdown old_prevalidator >>= fun () -> - return_unit - end else begin - Prevalidator.flush old_prevalidator block_hash >>=? fun () -> - return_unit - end - end >>=? fun () -> - return_unit - | None -> return_unit - end >>=? fun () -> - (if start_testchain then - may_switch_test_chain w active_chains spawn_child block - else - Lwt.return_unit) >>= fun () -> + Prevalidator.create limits (module Proto) chain_db + >>= function + | Error errs -> + Log.lwt_log_error + "@[Failed to reinstantiate prevalidator:@ %a@]" + pp_print_error + errs + >>= fun () -> + nv.prevalidator <- None ; + Prevalidator.shutdown old_prevalidator >>= fun () -> return_unit + | Ok prevalidator -> + nv.prevalidator <- Some prevalidator ; + Prevalidator.shutdown old_prevalidator >>= fun () -> return_unit + ) + else + Prevalidator.flush old_prevalidator block_hash + >>=? fun () -> return_unit ) + >>=? fun () -> return_unit + | None -> + return_unit ) + >>=? fun () -> + ( if start_testchain then + may_switch_test_chain w active_chains spawn_child block + else Lwt.return_unit ) + >>= fun () -> Lwt_watcher.notify nv.new_head_input block ; if Block_hash.equal head_hash block_header.shell.predecessor then return Event.Head_incrememt - else - return Event.Branch_switch - end + else return Event.Branch_switch -let on_completion (type a) w (req : a Request.t) (update : a) request_status = - let Request.Validated block = req in +let on_completion (type a) w (req : a Request.t) (update : a) request_status = + let (Request.Validated block) = req in let fitness = State.Block.fitness block in let request = State.Block.hash block in - Worker.record_event w (Processed_block { request ; request_status ; update ; fitness }) ; + Worker.record_event + w + (Processed_block {request; request_status; update; fitness}) ; Lwt.return_unit let on_close w = let nv = Worker.state w in - Distributed_db.deactivate nv.parameters.chain_db >>= fun () -> + Distributed_db.deactivate nv.parameters.chain_db + >>= fun () -> let pvs = P2p_peer.InitializationTable.fold_promises (fun _ pv acc -> - (pv >>= function - | Error _ -> Lwt.return_unit - | Ok pv -> Peer_validator.shutdown pv) :: acc) - nv.active_peers [] in + ( pv + >>= function + | Error _ -> Lwt.return_unit | Ok pv -> Peer_validator.shutdown pv ) + :: acc) + nv.active_peers + [] + in Lwt.join - (begin match nv.prevalidator with - | Some prevalidator -> Prevalidator.shutdown prevalidator - | None -> Lwt.return_unit - end :: - Lwt_utils.may ~f:(fun (_, shutdown) -> shutdown ()) nv.child :: - pvs) >>= fun () -> - Lwt.return_unit + ( ( match nv.prevalidator with + | Some prevalidator -> + Prevalidator.shutdown prevalidator + | None -> + Lwt.return_unit ) + :: Lwt_utils.may ~f:(fun (_, shutdown) -> shutdown ()) nv.child + :: pvs ) + >>= fun () -> Lwt.return_unit let on_launch start_prevalidator w _ parameters = - begin if start_prevalidator then - State.read_chain_data parameters.chain_state - (fun _ { State.current_head ; _ } -> Lwt.return current_head) >>= fun head -> - State.Block.protocol_hash head >>= fun head_hash -> - safe_get_protocol head_hash >>= function - | Ok (module Proto) -> begin - Prevalidator.create - parameters.prevalidator_limits - (module Proto) - parameters.chain_db >>= function - | Error err -> - Log.lwt_log_error "@[Failed to instantiate prevalidator:@ %a@]" - pp_print_error err >>= fun () -> - return_none - | Ok prevalidator -> - return_some prevalidator - end - | Error err -> - Log.lwt_log_error "@[Failed to instantiate prevalidator:@ %a@]" - pp_print_error err >>= fun () -> - return_none - else return_none end >>=? fun prevalidator -> + ( if start_prevalidator then + State.read_chain_data + parameters.chain_state + (fun _ {State.current_head; _} -> Lwt.return current_head) + >>= fun head -> + State.Block.protocol_hash head + >>= fun head_hash -> + safe_get_protocol head_hash + >>= function + | Ok (module Proto) -> ( + Prevalidator.create + parameters.prevalidator_limits + (module Proto) + parameters.chain_db + >>= function + | Error err -> + Log.lwt_log_error + "@[Failed to instantiate prevalidator:@ %a@]" + pp_print_error + err + >>= fun () -> return_none + | Ok prevalidator -> + return_some prevalidator ) + | Error err -> + Log.lwt_log_error + "@[Failed to instantiate prevalidator:@ %a@]" + pp_print_error + err + >>= fun () -> return_none + else return_none ) + >>=? fun prevalidator -> let valid_block_input = Lwt_watcher.create_input () in let new_head_input = Lwt_watcher.create_input () in - let bootstrapped_waiter, bootstrapped_wakener = Lwt.wait () in + let (bootstrapped_waiter, bootstrapped_wakener) = Lwt.wait () in let nv = - { parameters ; - valid_block_input ; - new_head_input ; - bootstrapped_wakener ; - bootstrapped_waiter ; - bootstrapped = (parameters.limits.bootstrap_threshold <= 0) ; - active_peers = - P2p_peer.InitializationTable.create 50 ; (* TODO use `2 * max_connection` *) - bootstrapped_peers = - P2p_peer.Table.create 50 ; (* TODO use `2 * max_connection` *) - child = None ; - prevalidator } in + { parameters; + valid_block_input; + new_head_input; + bootstrapped_wakener; + bootstrapped_waiter; + bootstrapped = parameters.limits.bootstrap_threshold <= 0; + active_peers = P2p_peer.InitializationTable.create 50; + (* TODO use `2 * max_connection` *) + bootstrapped_peers = P2p_peer.Table.create 50; + (* TODO use `2 * max_connection` *) + child = None; + prevalidator } + in if nv.bootstrapped then Lwt.wakeup_later bootstrapped_wakener () ; - Distributed_db.set_callback parameters.chain_db { - notify_branch = begin fun peer_id locator -> - Lwt.async begin fun () -> - with_activated_peer_validator w peer_id @@ fun pv -> - Peer_validator.notify_branch pv locator ; - return_unit - end - end ; - notify_head = begin fun peer_id block ops -> - Lwt.async begin fun () -> - with_activated_peer_validator w peer_id (fun pv -> - Peer_validator.notify_head pv block ; - return_unit) >>=? fun () -> - (* TODO notify prevalidator only if head is known ??? *) - match nv.prevalidator with - | Some prevalidator -> - Prevalidator.notify_operations prevalidator peer_id ops >>= fun () -> - return_unit - | None -> return_unit - end; - end ; - disconnection = begin fun peer_id -> - Lwt.async begin fun () -> - let nv = Worker.state w in - match P2p_peer.InitializationTable.find_opt nv.active_peers peer_id with - | None -> return_unit - | Some pv -> - pv >>=? fun pv -> - Peer_validator.shutdown pv >>= fun () -> - return_unit - end - end ; - } ; + Distributed_db.set_callback + parameters.chain_db + { notify_branch = + (fun peer_id locator -> + Lwt.async (fun () -> + with_activated_peer_validator w peer_id + @@ fun pv -> + Peer_validator.notify_branch pv locator ; + return_unit)); + notify_head = + (fun peer_id block ops -> + Lwt.async (fun () -> + with_activated_peer_validator w peer_id (fun pv -> + Peer_validator.notify_head pv block ; + return_unit) + >>=? fun () -> + (* TODO notify prevalidator only if head is known ??? *) + match nv.prevalidator with + | Some prevalidator -> + Prevalidator.notify_operations prevalidator peer_id ops + >>= fun () -> return_unit + | None -> + return_unit)); + disconnection = + (fun peer_id -> + Lwt.async (fun () -> + let nv = Worker.state w in + match + P2p_peer.InitializationTable.find_opt nv.active_peers peer_id + with + | None -> + return_unit + | Some pv -> + pv + >>=? fun pv -> + Peer_validator.shutdown pv >>= fun () -> return_unit)) } ; return nv -let rec create - ?max_child_ttl ~start_prevalidator ~start_testchain ~active_chains ?parent - peer_validator_limits prevalidator_limits block_validator - global_valid_block_input - global_chains_input - db chain_state limits = +let rec create ?max_child_ttl ~start_prevalidator ~start_testchain + ~active_chains ?parent peer_validator_limits prevalidator_limits + block_validator global_valid_block_input global_chains_input db chain_state + limits = let spawn_child ~parent pvl pl bl gvbi gci db n l = - create ~start_prevalidator ~start_testchain ~active_chains ~parent pvl pl bl gvbi gci db n l >>=? fun w -> - return (Worker.state w, (fun () -> Worker.shutdown w)) in + create + ~start_prevalidator + ~start_testchain + ~active_chains + ~parent + pvl + pl + bl + gvbi + gci + db + n + l + >>=? fun w -> return (Worker.state w, fun () -> Worker.shutdown w) + in let module Handlers = struct type self = t + let on_launch = on_launch start_prevalidator + let on_request w = on_request w start_testchain active_chains spawn_child + let on_close = on_close + let on_error _ _ _ errs = Lwt.return_error errs + let on_completion = on_completion + let on_no_request _ = return_unit end in let parameters = - { max_child_ttl ; - parent ; - peer_validator_limits ; - prevalidator_limits ; - block_validator ; - global_valid_block_input ; - global_chains_input ; - db ; - chain_db = Distributed_db.activate db chain_state ; - chain_state ; - limits } in - Chain.init_head chain_state >>=? fun () -> - Worker.launch table + { max_child_ttl; + parent; + peer_validator_limits; + prevalidator_limits; + block_validator; + global_valid_block_input; + global_chains_input; + db; + chain_db = Distributed_db.activate db chain_state; + chain_state; + limits } + in + Chain.init_head chain_state + >>=? fun () -> + Worker.launch + table prevalidator_limits.worker_limits (State.Chain.id chain_state) parameters - (module Handlers) >>=? fun w -> + (module Handlers) + >>=? fun w -> Chain_id.Table.add active_chains (State.Chain.id chain_state) w ; Lwt_watcher.notify global_chains_input (State.Chain.id chain_state, true) ; return w (** Current block computation *) -let create - ?max_child_ttl - ~start_prevalidator - ~start_testchain - ~active_chains - peer_validator_limits prevalidator_limits - block_validator - global_valid_block_input - global_chains_input - global_db state limits = +let create ?max_child_ttl ~start_prevalidator ~start_testchain ~active_chains + peer_validator_limits prevalidator_limits block_validator + global_valid_block_input global_chains_input global_db state limits = (* hide the optional ?parent *) create ?max_child_ttl ~start_prevalidator ~start_testchain ~active_chains - peer_validator_limits prevalidator_limits + peer_validator_limits + prevalidator_limits block_validator global_valid_block_input global_chains_input - global_db state limits + global_db + state + limits let chain_id w = - let { parameters = { chain_state ; _ } ; _ } = Worker.state w in + let {parameters = {chain_state; _}; _} = Worker.state w in State.Chain.id chain_state let chain_state w = - let { parameters = { chain_state ; _ } ; _ } = Worker.state w in + let {parameters = {chain_state; _}; _} = Worker.state w in chain_state let prevalidator w = - let { prevalidator ; _ } = Worker.state w in + let {prevalidator; _} = Worker.state w in prevalidator let chain_db w = - let { parameters = { chain_db ; _ } ; _ } = Worker.state w in + let {parameters = {chain_db; _}; _} = Worker.state w in chain_db let child w = match (Worker.state w).child with - | None -> None - | Some ({ parameters = { chain_state ;_ } ; _ }, _) -> - try Some (List.assoc (State.Chain.id chain_state) (Worker.list table)) - with Not_found -> None + | None -> + None + | Some ({parameters = {chain_state; _}; _}, _) -> ( + try Some (List.assoc (State.Chain.id chain_state) (Worker.list table)) + with Not_found -> None ) let assert_fitness_increases ?(force = false) w distant_header = let pv = Worker.state w in let chain_state = Distributed_db.chain_state pv.parameters.chain_db in - Chain.head chain_state >>= fun local_header -> + Chain.head chain_state + >>= fun local_header -> fail_when - (not force && - Fitness.compare - distant_header.Block_header.shell.fitness - (State.Block.fitness local_header) <= 0) + ( (not force) + && Fitness.compare + distant_header.Block_header.shell.fitness + (State.Block.fitness local_header) + <= 0 ) (failure "Fitness too low") -let assert_checkpoint w (header: Block_header.t) = +let assert_checkpoint w (header : Block_header.t) = let pv = Worker.state w in let chain_state = Distributed_db.chain_state pv.parameters.chain_db in - State.Chain.acceptable_block chain_state header >>= fun acceptable -> - fail_unless acceptable + State.Chain.acceptable_block chain_state header + >>= fun acceptable -> + fail_unless + acceptable (Validation_errors.Checkpoint_error (Block_header.hash header, None)) let validate_block w ?force hash block operations = let nv = Worker.state w in assert (Block_hash.equal hash (Block_header.hash block)) ; - assert_fitness_increases ?force w block >>=? fun () -> - assert_checkpoint w block >>=? fun () -> + assert_fitness_increases ?force w block + >>=? fun () -> + assert_checkpoint w block + >>=? fun () -> Block_validator.validate ~canceler:(Worker.canceler w) ~notify_new_block:(notify_new_block w) nv.parameters.block_validator nv.parameters.chain_db - hash block operations + hash + block + operations let bootstrapped w = - let { bootstrapped_waiter ; _ } = Worker.state w in + let {bootstrapped_waiter; _} = Worker.state w in Lwt.protected bootstrapped_waiter let valid_block_watcher w = - let { valid_block_input ; _ } = Worker.state w in + let {valid_block_input; _} = Worker.state w in Lwt_watcher.create_stream valid_block_input let new_head_watcher w = - let { new_head_input ; _ } = Worker.state w in + let {new_head_input; _} = Worker.state w in Lwt_watcher.create_stream new_head_input let status = Worker.status + let information = Worker.information let running_workers () = Worker.list table let pending_requests t = Worker.Queue.pending_requests t -let pending_requests_length t = Worker.Queue.pending_requests_length t +let pending_requests_length t = Worker.Queue.pending_requests_length t let current_request t = Worker.current_request t diff --git a/src/lib_shell/chain_validator.mli b/src/lib_shell/chain_validator.mli index d2edc1fc19d286a0f4d5c7940c3225429ec0d4c7..d35e8ef41edc6fca48f8063fe52d3769334cddc5 100644 --- a/src/lib_shell/chain_validator.mli +++ b/src/lib_shell/chain_validator.mli @@ -26,16 +26,13 @@ type t -type limits = { - bootstrap_threshold: int ; - worker_limits: Worker_types.limits -} +type limits = {bootstrap_threshold : int; worker_limits : Worker_types.limits} -val create: +val create : ?max_child_ttl:int -> start_prevalidator:bool -> start_testchain:bool -> - active_chains: t Chain_id.Table.t -> + active_chains:t Chain_id.Table.t -> Peer_validator.limits -> Prevalidator.limits -> Block_validator.t -> @@ -46,32 +43,50 @@ val create: limits -> t tzresult Lwt.t -val bootstrapped: t -> unit Lwt.t +val bootstrapped : t -> unit Lwt.t -val chain_id: t -> Chain_id.t -val chain_state: t -> State.Chain.t -val prevalidator: t -> Prevalidator.t option -val chain_db: t -> Distributed_db.chain_db -val child: t -> t option +val chain_id : t -> Chain_id.t -val validate_block: +val chain_state : t -> State.Chain.t + +val prevalidator : t -> Prevalidator.t option + +val chain_db : t -> Distributed_db.chain_db + +val child : t -> t option + +val validate_block : t -> ?force:bool -> - Block_hash.t -> Block_header.t -> Operation.t list list -> + Block_hash.t -> + Block_header.t -> + Operation.t list list -> State.Block.t option tzresult Lwt.t -val shutdown: t -> unit Lwt.t +val shutdown : t -> unit Lwt.t + +val valid_block_watcher : t -> State.Block.t Lwt_stream.t * Lwt_watcher.stopper + +val new_head_watcher : t -> State.Block.t Lwt_stream.t * Lwt_watcher.stopper -val valid_block_watcher: t -> State.Block.t Lwt_stream.t * Lwt_watcher.stopper -val new_head_watcher: t -> State.Block.t Lwt_stream.t * Lwt_watcher.stopper +val running_workers : unit -> (Chain_id.t * t) list -val running_workers: unit -> (Chain_id.t * t) list -val status: t -> Worker_types.worker_status -val information: t -> Worker_types.worker_information +val status : t -> Worker_types.worker_status + +val information : t -> Worker_types.worker_information + +val pending_requests : + t -> (Time.System.t * Chain_validator_worker_state.Request.view) list -val pending_requests : t -> (Time.System.t * Chain_validator_worker_state.Request.view) list val pending_requests_length : t -> int -val current_request : t -> (Time.System.t * Time.System.t * Chain_validator_worker_state.Request.view) option -val last_events : t -> (Internal_event.level * Chain_validator_worker_state.Event.t list) list -val ddb_information: t -> Chain_validator_worker_state.Distributed_db_state.view +val current_request : + t -> + (Time.System.t * Time.System.t * Chain_validator_worker_state.Request.view) + option + +val last_events : + t -> (Internal_event.level * Chain_validator_worker_state.Event.t list) list + +val ddb_information : + t -> Chain_validator_worker_state.Distributed_db_state.view diff --git a/src/lib_shell/distributed_db.ml b/src/lib_shell/distributed_db.ml index e295141e76b1f84034a10d66d5a9ae7282825983..6e3fb118a50e92aab675226dfa89d51e22b27091 100644 --- a/src/lib_shell/distributed_db.ml +++ b/src/lib_shell/distributed_db.ml @@ -27,464 +27,530 @@ module Message = Distributed_db_message type p2p = (Message.t, Peer_metadata.t, Connection_metadata.t) P2p.net -type connection = (Message.t, Peer_metadata.t, Connection_metadata.t) P2p.connection + +type connection = + (Message.t, Peer_metadata.t, Connection_metadata.t) P2p.connection type 'a request_param = { - p2p : (Message.t, Peer_metadata.t, Connection_metadata.t) P2p.t ; - data: 'a ; - active: unit -> P2p_peer.Set.t ; - send: P2p_peer.Id.t -> Message.t -> unit ; + p2p : (Message.t, Peer_metadata.t, Connection_metadata.t) P2p.t; + data : 'a; + active : unit -> P2p_peer.Set.t; + send : P2p_peer.Id.t -> Message.t -> unit } -module Make_raw - (Hash : sig - type t - val name : string - val encoding : t Data_encoding.t - val pp : Format.formatter -> t -> unit - - module Logging : sig - val tag : t Tag.def - end - end) - (Disk_table : - Distributed_db_functors.DISK_TABLE with type key := Hash.t) - (Memory_table : - Distributed_db_functors.MEMORY_TABLE with type key := Hash.t) - (Request_message : sig - type param - val max_length : int - val initial_delay : Time.System.Span.t - val forge : param -> Hash.t list -> Message.t - end) - (Precheck : Distributed_db_functors.PRECHECK - with type key := Hash.t - and type value := Disk_table.value) = struct +module Make_raw (Hash : sig + type t + + val name : string + + val encoding : t Data_encoding.t + val pp : Format.formatter -> t -> unit + + module Logging : sig + val tag : t Tag.def + end +end) +(Disk_table : Distributed_db_functors.DISK_TABLE with type key := Hash.t) +(Memory_table : Distributed_db_functors.MEMORY_TABLE with type key := Hash.t) +(Request_message : sig + type param + + val max_length : int + + val initial_delay : Time.System.Span.t + + val forge : param -> Hash.t list -> Message.t +end) +(Precheck : Distributed_db_functors.PRECHECK + with type key := Hash.t + and type value := Disk_table.value) = +struct module Request = struct type param = Request_message.param request_param - let active { active ; _ } = active () - let initial_delay = Request_message.initial_delay + + let active {active; _} = active () + + let initial_delay = Request_message.initial_delay let rec send state gid keys = - let first_keys, keys = List.split_n Request_message.max_length keys in - let msg = (Request_message.forge state.data first_keys) in + let (first_keys, keys) = List.split_n Request_message.max_length keys in + let msg = Request_message.forge state.data first_keys in state.send gid msg ; let open Peer_metadata in - let (req : requests_kind) = match msg with - | Get_current_branch _ -> Branch - | Get_current_head _ -> Head - | Get_block_headers _ -> Block_header - | Get_operations _ -> Operations - | Get_protocols _ -> Protocols - | Get_operation_hashes_for_blocks _ -> Operation_hashes_for_block - | Get_operations_for_blocks _ -> Operations_for_block - | _ -> Other in + let (req : requests_kind) = + match msg with + | Get_current_branch _ -> + Branch + | Get_current_head _ -> + Head + | Get_block_headers _ -> + Block_header + | Get_operations _ -> + Operations + | Get_protocols _ -> + Protocols + | Get_operation_hashes_for_blocks _ -> + Operation_hashes_for_block + | Get_operations_for_blocks _ -> + Operations_for_block + | _ -> + Other + in let meta = P2p.get_peer_metadata state.p2p gid in Peer_metadata.incr meta @@ Scheduled_request req ; if keys <> [] then send state gid keys end module Scheduler = - Distributed_db_functors.Make_request_scheduler - (Hash) (Memory_table) (Request) + Distributed_db_functors.Make_request_scheduler (Hash) (Memory_table) + (Request) module Table = - Distributed_db_functors.Make_table - (Hash) (Disk_table) (Memory_table) (Scheduler) (Precheck) + Distributed_db_functors.Make_table (Hash) (Disk_table) (Memory_table) + (Scheduler) + (Precheck) - type t = { - scheduler: Scheduler.t ; - table: Table.t ; - } + type t = {scheduler : Scheduler.t; table : Table.t} - let state_of_t { scheduler ; table } = + let state_of_t {scheduler; table} = let table_length = Table.memory_table_length table in let scheduler_length = Scheduler.memory_table_length scheduler in - { Chain_validator_worker_state.Distributed_db_state. - table_length ; scheduler_length } + { Chain_validator_worker_state.Distributed_db_state.table_length; + scheduler_length } let create ?global_input request_param param = let scheduler = Scheduler.create request_param in let table = Table.create ?global_input scheduler param in - { scheduler ; table } - - let shutdown { scheduler ; _ } = - Scheduler.shutdown scheduler + {scheduler; table} + let shutdown {scheduler; _} = Scheduler.shutdown scheduler end module Fake_operation_storage = struct type store = State.Chain.t + type value = Operation.t + let known _ _ = Lwt.return_false + let read _ _ = Lwt.return (Error_monad.error_exn Not_found) + let read_opt _ _ = Lwt.return_none end module Raw_operation = - Make_raw - (Operation_hash) - (Fake_operation_storage) - (Operation_hash.Table) + Make_raw (Operation_hash) (Fake_operation_storage) (Operation_hash.Table) (struct type param = unit + let max_length = 10 + let initial_delay = Time.System.Span.of_seconds_exn 0.5 + let forge () keys = Message.Get_operations keys end) (struct type param = unit + type notified_value = Operation.t + let precheck _ _ v = Some v end) module Block_header_storage = struct type store = State.Chain.t + type value = Block_header.t + let known = State.Block.known_valid + let read chain_state h = - State.Block.read chain_state h >>=? fun b -> - return (State.Block.header b) + State.Block.read chain_state h >>=? fun b -> return (State.Block.header b) + let read_opt chain_state h = - State.Block.read_opt chain_state h >>= fun b -> - Lwt.return (Option.map ~f:State.Block.header b) + State.Block.read_opt chain_state h + >>= fun b -> Lwt.return (Option.map ~f:State.Block.header b) end module Raw_block_header = - Make_raw - (Block_hash) - (Block_header_storage) - (Block_hash.Table) + Make_raw (Block_hash) (Block_header_storage) (Block_hash.Table) (struct type param = unit + let max_length = 10 + let initial_delay = Time.System.Span.of_seconds_exn 0.5 + let forge () keys = Message.Get_block_headers keys end) (struct type param = unit + type notified_value = Block_header.t + let precheck _ _ v = Some v end) module Operation_hashes_storage = struct type store = State.Chain.t + type value = Operation_hash.t list + let known chain_state (h, _) = State.Block.known_valid chain_state h + let read chain_state (h, i) = - State.Block.read chain_state h >>=? fun b -> - State.Block.operation_hashes b i >>= fun (ops, _) -> - return ops + State.Block.read chain_state h + >>=? fun b -> + State.Block.operation_hashes b i >>= fun (ops, _) -> return ops + let read_opt chain_state (h, i) = - State.Block.read_opt chain_state h >>= function - | None -> Lwt.return_none + State.Block.read_opt chain_state h + >>= function + | None -> + Lwt.return_none | Some b -> - State.Block.operation_hashes b i >>= fun (ops, _) -> - Lwt.return_some ops + State.Block.operation_hashes b i + >>= fun (ops, _) -> Lwt.return_some ops end -module Operations_table = - Hashtbl.Make(struct - type t = Block_hash.t * int - let hash = Hashtbl.hash - let equal (b1, i1) (b2, i2) = - Block_hash.equal b1 b2 && i1 = i2 - end) +module Operations_table = Hashtbl.Make (struct + type t = Block_hash.t * int -module Raw_operation_hashes = struct + let hash = Hashtbl.hash - include - Make_raw - (struct - type t = Block_hash.t * int - let name = "operation_hashes" - let pp ppf (h, n) = Format.fprintf ppf "%a:%d" Block_hash.pp h n - let encoding = - let open Data_encoding in - obj2 (req "block" Block_hash.encoding) (req "index" uint16) - module Logging = struct - let tag = Tag.def ~doc:"Operation hashes" "operation_hashes" pp - end - end) - (Operation_hashes_storage) - (Operations_table) - (struct - type param = unit - let max_length = 10 - let initial_delay = Time.System.Span.of_seconds_exn 1. - let forge () keys = - Message.Get_operation_hashes_for_blocks keys - end) - (struct - type param = Operation_list_list_hash.t - type notified_value = - Operation_hash.t list * Operation_list_list_hash.path - let precheck (_block, expected_ofs) expected_hash (ops, path) = - let received_hash, received_ofs = - Operation_list_list_hash.check_path path - (Operation_list_hash.compute ops) in - if - received_ofs = expected_ofs && - Operation_list_list_hash.compare expected_hash received_hash = 0 - then - Some ops - else - None - end) + let equal (b1, i1) (b2, i2) = Block_hash.equal b1 b2 && i1 = i2 +end) - let clear_all table hash n = - List.iter (fun i -> Table.clear_or_cancel table (hash, i)) (0 -- (n-1)) +module Raw_operation_hashes = struct + include Make_raw + (struct + type t = Block_hash.t * int + + let name = "operation_hashes" + + let pp ppf (h, n) = Format.fprintf ppf "%a:%d" Block_hash.pp h n + + let encoding = + let open Data_encoding in + obj2 (req "block" Block_hash.encoding) (req "index" uint16) + + module Logging = struct + let tag = Tag.def ~doc:"Operation hashes" "operation_hashes" pp + end + end) + (Operation_hashes_storage) + (Operations_table) + (struct + type param = unit + + let max_length = 10 + + let initial_delay = Time.System.Span.of_seconds_exn 1. + + let forge () keys = Message.Get_operation_hashes_for_blocks keys + end) + (struct + type param = Operation_list_list_hash.t + + type notified_value = + Operation_hash.t list * Operation_list_list_hash.path + + let precheck (_block, expected_ofs) expected_hash (ops, path) = + let (received_hash, received_ofs) = + Operation_list_list_hash.check_path + path + (Operation_list_hash.compute ops) + in + if + received_ofs = expected_ofs + && Operation_list_list_hash.compare + expected_hash + received_hash + = 0 + then Some ops + else None + end) + let clear_all table hash n = + List.iter (fun i -> Table.clear_or_cancel table (hash, i)) (0 -- (n - 1)) end module Operations_storage = struct type store = State.Chain.t + type value = Operation.t list + let known chain_state (h, _) = State.Block.known_valid chain_state h + let read chain_state (h, i) = - State.Block.read chain_state h >>=? fun b -> - State.Block.operations b i >>= fun (ops, _) -> - return ops + State.Block.read chain_state h + >>=? fun b -> State.Block.operations b i >>= fun (ops, _) -> return ops + let read_opt chain_state (h, i) = - State.Block.read_opt chain_state h >>= function - | None -> Lwt.return_none + State.Block.read_opt chain_state h + >>= function + | None -> + Lwt.return_none | Some b -> - State.Block.operations b i >>= fun (ops, _) -> - Lwt.return_some ops + State.Block.operations b i >>= fun (ops, _) -> Lwt.return_some ops end module Raw_operations = struct - include - Make_raw - (struct - type t = Block_hash.t * int - let name = "operations" - let pp ppf (h, n) = Format.fprintf ppf "%a:%d" Block_hash.pp h n - let encoding = - let open Data_encoding in - obj2 (req "block" Block_hash.encoding) (req "index" uint16) - module Logging = struct - let tag = Tag.def ~doc:"Operations" "operations" pp - end - end) - (Operations_storage) - (Operations_table) - (struct - type param = unit - let max_length = 10 - let initial_delay = Time.System.Span.of_seconds_exn 1. - let forge () keys = - Message.Get_operations_for_blocks keys - end) - (struct - type param = Operation_list_list_hash.t - type notified_value = Operation.t list * Operation_list_list_hash.path - let precheck (_block, expected_ofs) expected_hash (ops, path) = - let received_hash, received_ofs = - Operation_list_list_hash.check_path path - (Operation_list_hash.compute - (List.map Operation.hash ops)) in - if - received_ofs = expected_ofs && - Operation_list_list_hash.compare expected_hash received_hash = 0 - then - Some ops - else - None - end) + include Make_raw + (struct + type t = Block_hash.t * int + + let name = "operations" + + let pp ppf (h, n) = Format.fprintf ppf "%a:%d" Block_hash.pp h n + + let encoding = + let open Data_encoding in + obj2 (req "block" Block_hash.encoding) (req "index" uint16) + + module Logging = struct + let tag = Tag.def ~doc:"Operations" "operations" pp + end + end) + (Operations_storage) + (Operations_table) + (struct + type param = unit + + let max_length = 10 + + let initial_delay = Time.System.Span.of_seconds_exn 1. + + let forge () keys = Message.Get_operations_for_blocks keys + end) + (struct + type param = Operation_list_list_hash.t + + type notified_value = + Operation.t list * Operation_list_list_hash.path + + let precheck (_block, expected_ofs) expected_hash (ops, path) = + let (received_hash, received_ofs) = + Operation_list_list_hash.check_path + path + (Operation_list_hash.compute (List.map Operation.hash ops)) + in + if + received_ofs = expected_ofs + && Operation_list_list_hash.compare + expected_hash + received_hash + = 0 + then Some ops + else None + end) let clear_all table hash n = - List.iter (fun i -> Table.clear_or_cancel table (hash, i)) (0 -- (n-1)) - + List.iter (fun i -> Table.clear_or_cancel table (hash, i)) (0 -- (n - 1)) end module Protocol_storage = struct type store = State.t + type value = Protocol.t + let known = State.Protocol.known + let read = State.Protocol.read + let read_opt = State.Protocol.read_opt end module Raw_protocol = - Make_raw - (Protocol_hash) - (Protocol_storage) - (Protocol_hash.Table) + Make_raw (Protocol_hash) (Protocol_storage) (Protocol_hash.Table) (struct type param = unit + let initial_delay = Time.System.Span.of_seconds_exn 10. + let max_length = 10 + let forge () keys = Message.Get_protocols keys end) (struct type param = unit + type notified_value = Protocol.t + let precheck _ _ v = Some v end) type callback = { - notify_branch: - P2p_peer.Id.t -> Block_locator.t -> unit ; - notify_head: - P2p_peer.Id.t -> Block_header.t -> Mempool.t -> unit ; - disconnection: P2p_peer.Id.t -> unit ; + notify_branch : P2p_peer.Id.t -> Block_locator.t -> unit; + notify_head : P2p_peer.Id.t -> Block_header.t -> Mempool.t -> unit; + disconnection : P2p_peer.Id.t -> unit } type db = { - p2p: p2p ; - p2p_readers: p2p_reader P2p_peer.Table.t ; - disk: State.t ; - active_chains: chain_db Chain_id.Table.t ; - protocol_db: Raw_protocol.t ; - block_input: (Block_hash.t * Block_header.t) Lwt_watcher.input ; - operation_input: (Operation_hash.t * Operation.t) Lwt_watcher.input ; + p2p : p2p; + p2p_readers : p2p_reader P2p_peer.Table.t; + disk : State.t; + active_chains : chain_db Chain_id.Table.t; + protocol_db : Raw_protocol.t; + block_input : (Block_hash.t * Block_header.t) Lwt_watcher.input; + operation_input : (Operation_hash.t * Operation.t) Lwt_watcher.input } and chain_db = { - chain_state: State.Chain.t ; - global_db: db ; - operation_db: Raw_operation.t ; - block_header_db: Raw_block_header.t ; - operation_hashes_db: Raw_operation_hashes.t ; - operations_db: Raw_operations.t ; - mutable callback: callback ; - active_peers: P2p_peer.Set.t ref ; - active_connections: p2p_reader P2p_peer.Table.t ; + chain_state : State.Chain.t; + global_db : db; + operation_db : Raw_operation.t; + block_header_db : Raw_block_header.t; + operation_hashes_db : Raw_operation_hashes.t; + operations_db : Raw_operations.t; + mutable callback : callback; + active_peers : P2p_peer.Set.t ref; + active_connections : p2p_reader P2p_peer.Table.t } and p2p_reader = { - gid: P2p_peer.Id.t ; - conn: connection ; - peer_active_chains: chain_db Chain_id.Table.t ; - canceler: Lwt_canceler.t ; - mutable worker: unit Lwt.t ; + gid : P2p_peer.Id.t; + conn : connection; + peer_active_chains : chain_db Chain_id.Table.t; + canceler : Lwt_canceler.t; + mutable worker : unit Lwt.t } -let noop_callback = { - notify_branch = begin fun _gid _locator -> () end ; - notify_head = begin fun _gid _block _ops -> () end ; - disconnection = begin fun _gid -> () end ; -} +let noop_callback = + { notify_branch = (fun _gid _locator -> ()); + notify_head = (fun _gid _block _ops -> ()); + disconnection = (fun _gid -> ()) } type t = db -let state { disk ; _ } = disk -let chain_state { chain_state ; _ } = chain_state -let db { global_db ; _ } = global_db - -let information ({ global_db = { p2p_readers ; - active_chains ; _ } ; - operation_db ; - operations_db ; - block_header_db ; - operation_hashes_db ; - active_connections ; - active_peers ; _ - } : chain_db) = - { Chain_validator_worker_state.Distributed_db_state. - p2p_readers_length = P2p_peer.Table.length p2p_readers ; - active_chains_length = Chain_id.Table.length active_chains ; - operation_db = Raw_operation.state_of_t operation_db ; - operations_db = Raw_operations.state_of_t operations_db ; - block_header_db = Raw_block_header.state_of_t block_header_db ; - operations_hashed_db = Raw_operation_hashes.state_of_t operation_hashes_db ; - active_connections_length = P2p_peer.Table.length active_connections ; - active_peers_length = P2p_peer.Set.cardinal !active_peers ; - } - +let state {disk; _} = disk + +let chain_state {chain_state; _} = chain_state + +let db {global_db; _} = global_db + +let information + ({ global_db = {p2p_readers; active_chains; _}; + operation_db; + operations_db; + block_header_db; + operation_hashes_db; + active_connections; + active_peers; + _ } : + chain_db) = + { Chain_validator_worker_state.Distributed_db_state.p2p_readers_length = + P2p_peer.Table.length p2p_readers; + active_chains_length = Chain_id.Table.length active_chains; + operation_db = Raw_operation.state_of_t operation_db; + operations_db = Raw_operations.state_of_t operations_db; + block_header_db = Raw_block_header.state_of_t block_header_db; + operations_hashed_db = Raw_operation_hashes.state_of_t operation_hashes_db; + active_connections_length = P2p_peer.Table.length active_connections; + active_peers_length = P2p_peer.Set.cardinal !active_peers } let my_peer_id chain_db = P2p.peer_id chain_db.global_db.p2p let get_peer_metadata chain_db = P2p.get_peer_metadata chain_db.global_db.p2p -let read_block_header { disk ; _ } h = - State.read_block disk h >>= function +let read_block_header {disk; _} h = + State.read_block disk h + >>= function | Some b -> Lwt.return_some (State.Block.chain_id b, State.Block.header b) | None -> Lwt.return_none -let find_pending_block_header { peer_active_chains ; _ } h = +let find_pending_block_header {peer_active_chains; _} h = Chain_id.Table.fold (fun _chain_id chain_db acc -> - match acc with - | Some _ -> acc - | None when Raw_block_header.Table.pending - chain_db.block_header_db.table h -> - Some chain_db - | None -> None) + match acc with + | Some _ -> + acc + | None + when Raw_block_header.Table.pending chain_db.block_header_db.table h -> + Some chain_db + | None -> + None) peer_active_chains None -let find_pending_operations { peer_active_chains ; _ } h i = +let find_pending_operations {peer_active_chains; _} h i = Chain_id.Table.fold (fun _chain_id chain_db acc -> - match acc with - | Some _ -> acc - | None when Raw_operations.Table.pending - chain_db.operations_db.table (h, i) -> - Some chain_db - | None -> None) + match acc with + | Some _ -> + acc + | None + when Raw_operations.Table.pending chain_db.operations_db.table (h, i) + -> + Some chain_db + | None -> + None) peer_active_chains None -let find_pending_operation_hashes { peer_active_chains ; _ } h i = +let find_pending_operation_hashes {peer_active_chains; _} h i = Chain_id.Table.fold (fun _chain_id chain_db acc -> - match acc with - | Some _ -> acc - | None when Raw_operation_hashes.Table.pending - chain_db.operation_hashes_db.table (h, i) -> - Some chain_db - | None -> None) + match acc with + | Some _ -> + acc + | None + when Raw_operation_hashes.Table.pending + chain_db.operation_hashes_db.table + (h, i) -> + Some chain_db + | None -> + None) peer_active_chains None -let find_pending_operation { peer_active_chains ; _ } h = +let find_pending_operation {peer_active_chains; _} h = Chain_id.Table.fold (fun _chain_id chain_db acc -> - match acc with - | Some _ -> acc - | None when Raw_operation.Table.pending - chain_db.operation_db.table h -> - Some chain_db - | None -> None) + match acc with + | Some _ -> + acc + | None when Raw_operation.Table.pending chain_db.operation_db.table h -> + Some chain_db + | None -> + None) peer_active_chains None -let read_operation { active_chains ; _ } h = +let read_operation {active_chains; _} h = Chain_id.Table.fold (fun chain_id chain_db acc -> - acc >>= function - | Some _ -> acc - | None -> - Raw_operation.Table.read_opt - chain_db.operation_db.table h >>= function - | None -> Lwt.return_none - | Some bh -> Lwt.return_some (chain_id, bh)) + acc + >>= function + | Some _ -> + acc + | None -> ( + Raw_operation.Table.read_opt chain_db.operation_db.table h + >>= function + | None -> Lwt.return_none | Some bh -> Lwt.return_some (chain_id, bh) + )) active_chains Lwt.return_none module P2p_reader = struct - let may_activate global_db state chain_id f = match Chain_id.Table.find_opt state.peer_active_chains chain_id with | Some chain_db -> f chain_db - | None -> - match Chain_id.Table.find_opt global_db.active_chains chain_id with - | Some chain_db -> - chain_db.active_peers := - P2p_peer.Set.add state.gid !(chain_db.active_peers) ; - P2p_peer.Table.add chain_db.active_connections - state.gid state ; - Chain_id.Table.add state.peer_active_chains chain_id chain_db ; - f chain_db - | None -> - let meta = P2p.get_peer_metadata global_db.p2p state.gid in - Peer_metadata.incr meta Unactivated_chain ; - Lwt.return_unit + | None -> ( + match Chain_id.Table.find_opt global_db.active_chains chain_id with + | Some chain_db -> + chain_db.active_peers := + P2p_peer.Set.add state.gid !(chain_db.active_peers) ; + P2p_peer.Table.add chain_db.active_connections state.gid state ; + Chain_id.Table.add state.peer_active_chains chain_id chain_db ; + f chain_db + | None -> + let meta = P2p.get_peer_metadata global_db.p2p state.gid in + Peer_metadata.incr meta Unactivated_chain ; + Lwt.return_unit ) let deactivate state chain_db = chain_db.callback.disconnection state.gid ; @@ -510,144 +576,155 @@ module P2p_reader = struct f chain_db module Handle_msg_Logging = - Internal_event.Legacy_logging.Make_semantic - (struct let name = "node.distributed_db.p2p_reader" end) - + Internal_event.Legacy_logging.Make_semantic (struct + let name = "node.distributed_db.p2p_reader" + end) let soon () = let now = Systime_os.now () in match Ptime.add_span now (Ptime.Span.of_int_s 15) with - | Some s -> s - | None -> invalid_arg "Distributed_db.handle_msg: end of time" + | Some s -> + s + | None -> + invalid_arg "Distributed_db.handle_msg: end of time" let handle_msg global_db state msg = - let open Message in let open Handle_msg_Logging in let meta = P2p.get_peer_metadata global_db.p2p state.gid in - - lwt_debug Tag.DSL.(fun f -> - f "Read message from %a: %a" - -% t event "read_message" - -% a P2p_peer.Id.Logging.tag state.gid - -% a Message.Logging.tag msg) >>= fun () -> - + lwt_debug + Tag.DSL.( + fun f -> + f "Read message from %a: %a" + -% t event "read_message" + -% a P2p_peer.Id.Logging.tag state.gid + -% a Message.Logging.tag msg) + >>= fun () -> match msg with | Get_current_branch chain_id -> - Peer_metadata.incr meta @@ Received_request Branch; - may_handle_global global_db chain_id @@ fun chain_db -> + Peer_metadata.incr meta @@ Received_request Branch ; + may_handle_global global_db chain_id + @@ fun chain_db -> if not (Chain_id.Table.mem state.peer_active_chains chain_id) then - Peer_metadata.update_requests meta Branch @@ - P2p.try_send global_db.p2p state.conn @@ - Get_current_branch chain_id ; - let seed = { - Block_locator.receiver_id=state.gid; - sender_id=(my_peer_id chain_db) } in - (Chain.locator chain_db.chain_state seed) >>= fun locator -> - Peer_metadata.update_responses meta Branch @@ - P2p.try_send global_db.p2p state.conn @@ - Current_branch (chain_id, locator) ; + Peer_metadata.update_requests meta Branch + @@ P2p.try_send global_db.p2p state.conn + @@ Get_current_branch chain_id ; + let seed = + { Block_locator.receiver_id = state.gid; + sender_id = my_peer_id chain_db } + in + Chain.locator chain_db.chain_state seed + >>= fun locator -> + Peer_metadata.update_responses meta Branch + @@ P2p.try_send global_db.p2p state.conn + @@ Current_branch (chain_id, locator) ; Lwt.return_unit - | Current_branch (chain_id, locator) -> - may_activate global_db state chain_id @@ fun chain_db -> - let head, hist = (locator :> Block_header.t * Block_hash.t list) in + may_activate global_db state chain_id + @@ fun chain_db -> + let (head, hist) = (locator :> Block_header.t * Block_hash.t list) in Lwt_list.exists_p (State.Block.known_invalid chain_db.chain_state) - (Block_header.hash head :: hist) >>= fun known_invalid -> - if known_invalid then begin - P2p.disconnect global_db.p2p state.conn >>= fun () -> + (Block_header.hash head :: hist) + >>= fun known_invalid -> + if known_invalid then ( + P2p.disconnect global_db.p2p state.conn + >>= fun () -> P2p.greylist_peer global_db.p2p state.gid ; - Lwt.return_unit - end else - if Time.System.(soon () < of_protocol_exn head.shell.timestamp) then begin + Lwt.return_unit ) + else if Time.System.(soon () < of_protocol_exn head.shell.timestamp) + then ( Peer_metadata.incr meta Future_block ; - lwt_log_notice Tag.DSL.(fun f -> - f "Received future block %a from peer %a." - -% t event "received_future_block" - -% a Block_hash.Logging.tag (Block_header.hash head) - -% a P2p_peer.Id.Logging.tag state.gid) >>= fun () -> - Lwt.return_unit - end else begin + lwt_log_notice + Tag.DSL.( + fun f -> + f "Received future block %a from peer %a." + -% t event "received_future_block" + -% a Block_hash.Logging.tag (Block_header.hash head) + -% a P2p_peer.Id.Logging.tag state.gid) + >>= fun () -> Lwt.return_unit ) + else ( chain_db.callback.notify_branch state.gid locator ; (* TODO discriminate between received advertisements and responses? *) Peer_metadata.incr meta @@ Received_advertisement Branch ; - Lwt.return_unit - end - + Lwt.return_unit ) | Deactivate chain_id -> - may_handle global_db state chain_id @@ fun chain_db -> + may_handle global_db state chain_id + @@ fun chain_db -> deactivate state chain_db ; Chain_id.Table.remove state.peer_active_chains chain_id ; Lwt.return_unit - | Get_current_head chain_id -> - may_handle global_db state chain_id @@ fun chain_db -> + may_handle global_db state chain_id + @@ fun chain_db -> Peer_metadata.incr meta @@ Received_request Head ; - let { Connection_metadata.disable_mempool ; _ } = - P2p.connection_remote_metadata chain_db.global_db.p2p state.conn in - begin - if disable_mempool then - Chain.head chain_db.chain_state >>= fun head -> - Lwt.return (State.Block.header head, Mempool.empty) - else - State.Current_mempool.get chain_db.chain_state - end >>= fun (head, mempool) -> + let {Connection_metadata.disable_mempool; _} = + P2p.connection_remote_metadata chain_db.global_db.p2p state.conn + in + ( if disable_mempool then + Chain.head chain_db.chain_state + >>= fun head -> Lwt.return (State.Block.header head, Mempool.empty) + else State.Current_mempool.get chain_db.chain_state ) + >>= fun (head, mempool) -> (* TODO bound the sent mempool size *) - Peer_metadata.update_responses meta Head @@ - P2p.try_send global_db.p2p state.conn @@ - Current_head (chain_id, head, mempool) ; + Peer_metadata.update_responses meta Head + @@ P2p.try_send global_db.p2p state.conn + @@ Current_head (chain_id, head, mempool) ; Lwt.return_unit - | Current_head (chain_id, header, mempool) -> - may_handle global_db state chain_id @@ fun chain_db -> + may_handle global_db state chain_id + @@ fun chain_db -> let head = Block_header.hash header in - State.Block.known_invalid chain_db.chain_state head >>= fun known_invalid -> - let { Connection_metadata.disable_mempool ; _ } = - P2p.connection_local_metadata chain_db.global_db.p2p state.conn in + State.Block.known_invalid chain_db.chain_state head + >>= fun known_invalid -> + let {Connection_metadata.disable_mempool; _} = + P2p.connection_local_metadata chain_db.global_db.p2p state.conn + in let known_invalid = - known_invalid || - (disable_mempool && mempool <> Mempool.empty) + known_invalid || (disable_mempool && mempool <> Mempool.empty) (* A non-empty mempool was received while mempool is desactivated, so the message is ignored. This should probably warrant a reduction of the sender's score. *) in - if known_invalid then begin - P2p.disconnect global_db.p2p state.conn >>= fun () -> + if known_invalid then ( + P2p.disconnect global_db.p2p state.conn + >>= fun () -> P2p.greylist_peer global_db.p2p state.gid ; - Lwt.return_unit - end else if Time.System.(soon () < of_protocol_exn header.shell.timestamp) then begin + Lwt.return_unit ) + else if Time.System.(soon () < of_protocol_exn header.shell.timestamp) + then ( Peer_metadata.incr meta Future_block ; - lwt_log_notice Tag.DSL.(fun f -> - f "Received future block %a from peer %a." - -% t event "received_future_block" - -% a Block_hash.Logging.tag head - -% a P2p_peer.Id.Logging.tag state.gid) >>= fun () -> - Lwt.return_unit - end else begin + lwt_log_notice + Tag.DSL.( + fun f -> + f "Received future block %a from peer %a." + -% t event "received_future_block" + -% a Block_hash.Logging.tag head + -% a P2p_peer.Id.Logging.tag state.gid) + >>= fun () -> Lwt.return_unit ) + else ( chain_db.callback.notify_head state.gid header mempool ; (* TODO discriminate between received advertisements and responses? *) Peer_metadata.incr meta @@ Received_advertisement Head ; - Lwt.return_unit - end - + Lwt.return_unit ) | Get_block_headers hashes -> Peer_metadata.incr meta @@ Received_request Block_header ; Lwt_list.iter_p (fun hash -> - read_block_header global_db hash >>= function - | None -> - Peer_metadata.incr meta @@ Unadvertised Block ; - Lwt.return_unit - | Some (_chain_id, header) -> - Peer_metadata.update_responses meta Block_header @@ - P2p.try_send global_db.p2p state.conn @@ - Block_header header ; - Lwt.return_unit) + read_block_header global_db hash + >>= function + | None -> + Peer_metadata.incr meta @@ Unadvertised Block ; + Lwt.return_unit + | Some (_chain_id, header) -> + Peer_metadata.update_responses meta Block_header + @@ P2p.try_send global_db.p2p state.conn + @@ Block_header header ; + Lwt.return_unit) hashes - | Block_header block -> begin + | Block_header block -> ( let hash = Block_header.hash block in match find_pending_block_header state hash with | None -> @@ -655,27 +732,29 @@ module P2p_reader = struct Lwt.return_unit | Some chain_db -> Raw_block_header.Table.notify - chain_db.block_header_db.table state.gid hash block >>= fun () -> + chain_db.block_header_db.table + state.gid + hash + block + >>= fun () -> Peer_metadata.incr meta @@ Received_response Block_header ; - Lwt.return_unit - end - + Lwt.return_unit ) | Get_operations hashes -> Peer_metadata.incr meta @@ Received_request Operations ; Lwt_list.iter_p (fun hash -> - read_operation global_db hash >>= function - | None -> - Peer_metadata.incr meta @@ Unadvertised Operations ; - Lwt.return_unit - | Some (_chain_id, op) -> - Peer_metadata.update_responses meta Operations @@ - P2p.try_send global_db.p2p state.conn @@ - Operation op ; - Lwt.return_unit) + read_operation global_db hash + >>= function + | None -> + Peer_metadata.incr meta @@ Unadvertised Operations ; + Lwt.return_unit + | Some (_chain_id, op) -> + Peer_metadata.update_responses meta Operations + @@ P2p.try_send global_db.p2p state.conn + @@ Operation op ; + Lwt.return_unit) hashes - - | Operation operation -> begin + | Operation operation -> ( let hash = Operation.hash operation in match find_pending_operation state hash with | None -> @@ -683,102 +762,107 @@ module P2p_reader = struct Lwt.return_unit | Some chain_db -> Raw_operation.Table.notify - chain_db.operation_db.table state.gid hash operation >>= fun () -> + chain_db.operation_db.table + state.gid + hash + operation + >>= fun () -> Peer_metadata.incr meta @@ Received_response Operations ; - Lwt.return_unit - end - + Lwt.return_unit ) | Get_protocols hashes -> Peer_metadata.incr meta @@ Received_request Protocols ; Lwt_list.iter_p (fun hash -> - State.Protocol.read_opt global_db.disk hash >>= function - | None -> - Peer_metadata.incr meta @@ Unadvertised Protocol ; - Lwt.return_unit - | Some p -> - Peer_metadata.update_responses meta Protocols @@ - P2p.try_send global_db.p2p state.conn @@ - Protocol p ; - Lwt.return_unit) + State.Protocol.read_opt global_db.disk hash + >>= function + | None -> + Peer_metadata.incr meta @@ Unadvertised Protocol ; + Lwt.return_unit + | Some p -> + Peer_metadata.update_responses meta Protocols + @@ P2p.try_send global_db.p2p state.conn + @@ Protocol p ; + Lwt.return_unit) hashes - | Protocol protocol -> let hash = Protocol.hash protocol in Raw_protocol.Table.notify - global_db.protocol_db.table state.gid hash protocol >>= fun () -> + global_db.protocol_db.table + state.gid + hash + protocol + >>= fun () -> Peer_metadata.incr meta @@ Received_response Protocols ; Lwt.return_unit - | Get_operation_hashes_for_blocks blocks -> - Peer_metadata.incr meta @@ - Received_request Operation_hashes_for_block ; + Peer_metadata.incr meta @@ Received_request Operation_hashes_for_block ; Lwt_list.iter_p (fun (hash, ofs) -> - State.read_block global_db.disk hash >>= function - | None -> Lwt.return_unit - | Some block -> - State.Block.operation_hashes - block ofs >>= fun (hashes, path) -> - Peer_metadata.update_responses meta - Operation_hashes_for_block @@ - P2p.try_send global_db.p2p state.conn @@ - Operation_hashes_for_block (hash, ofs, hashes, path) ; - Lwt.return_unit) + State.read_block global_db.disk hash + >>= function + | None -> + Lwt.return_unit + | Some block -> + State.Block.operation_hashes block ofs + >>= fun (hashes, path) -> + Peer_metadata.update_responses meta Operation_hashes_for_block + @@ P2p.try_send global_db.p2p state.conn + @@ Operation_hashes_for_block (hash, ofs, hashes, path) ; + Lwt.return_unit) blocks - - | Operation_hashes_for_block (block, ofs, ops, path) -> begin - match find_pending_operation_hashes state block ofs with - | None -> - Peer_metadata.incr meta Unexpected_response ; - Lwt.return_unit - | Some chain_db -> - Raw_operation_hashes.Table.notify - chain_db.operation_hashes_db.table state.gid - (block, ofs) (ops, path) >>= fun () -> - Peer_metadata.incr meta @@ - Received_response Operation_hashes_for_block ; - Lwt.return_unit - end - + | Operation_hashes_for_block (block, ofs, ops, path) -> ( + match find_pending_operation_hashes state block ofs with + | None -> + Peer_metadata.incr meta Unexpected_response ; + Lwt.return_unit + | Some chain_db -> + Raw_operation_hashes.Table.notify + chain_db.operation_hashes_db.table + state.gid + (block, ofs) + (ops, path) + >>= fun () -> + Peer_metadata.incr meta + @@ Received_response Operation_hashes_for_block ; + Lwt.return_unit ) | Get_operations_for_blocks blocks -> - Peer_metadata.incr meta @@ - Received_request Operations_for_block ; + Peer_metadata.incr meta @@ Received_request Operations_for_block ; Lwt_list.iter_p (fun (hash, ofs) -> - State.read_block global_db.disk hash >>= function - | None -> Lwt.return_unit - | Some block -> - State.Block.operations - block ofs >>= fun (ops, path) -> - Peer_metadata.update_responses meta - Operations_for_block @@ - P2p.try_send global_db.p2p state.conn @@ - Operations_for_block (hash, ofs, ops, path) ; - Lwt.return_unit) + State.read_block global_db.disk hash + >>= function + | None -> + Lwt.return_unit + | Some block -> + State.Block.operations block ofs + >>= fun (ops, path) -> + Peer_metadata.update_responses meta Operations_for_block + @@ P2p.try_send global_db.p2p state.conn + @@ Operations_for_block (hash, ofs, ops, path) ; + Lwt.return_unit) blocks - - | Operations_for_block (block, ofs, ops, path) -> begin - match find_pending_operations state block ofs with - | None -> - Peer_metadata.incr meta Unexpected_response ; - Lwt.return_unit - | Some chain_db -> - Raw_operations.Table.notify - chain_db.operations_db.table state.gid - (block, ofs) (ops, path) >>= fun () -> - Peer_metadata.incr meta @@ - Received_response Operations_for_block ; - Lwt.return_unit - end + | Operations_for_block (block, ofs, ops, path) -> ( + match find_pending_operations state block ofs with + | None -> + Peer_metadata.incr meta Unexpected_response ; + Lwt.return_unit + | Some chain_db -> + Raw_operations.Table.notify + chain_db.operations_db.table + state.gid + (block, ofs) + (ops, path) + >>= fun () -> + Peer_metadata.incr meta @@ Received_response Operations_for_block ; + Lwt.return_unit ) let rec worker_loop global_db state = - protect ~canceler:state.canceler begin fun () -> - P2p.recv global_db.p2p state.conn - end >>= function + protect ~canceler:state.canceler (fun () -> + P2p.recv global_db.p2p state.conn) + >>= function | Ok msg -> - handle_msg global_db state msg >>= fun () -> - worker_loop global_db state + handle_msg global_db state msg + >>= fun () -> worker_loop global_db state | Error _ -> Chain_id.Table.iter (fun _ -> deactivate state) @@ -788,169 +872,183 @@ module P2p_reader = struct let run db gid conn = let canceler = Lwt_canceler.create () in - let state = { - conn ; gid ; canceler ; - peer_active_chains = Chain_id.Table.create 17 ; - worker = Lwt.return_unit ; - } in - Chain_id.Table.iter (fun chain_id _chain_db -> - Lwt.async begin fun () -> - let meta = P2p.get_peer_metadata db.p2p gid in - Peer_metadata.incr meta (Sent_request Branch) ; - P2p.send db.p2p conn (Get_current_branch chain_id) - end) + let state = + { conn; + gid; + canceler; + peer_active_chains = Chain_id.Table.create 17; + worker = Lwt.return_unit } + in + Chain_id.Table.iter + (fun chain_id _chain_db -> + Lwt.async (fun () -> + let meta = P2p.get_peer_metadata db.p2p gid in + Peer_metadata.incr meta (Sent_request Branch) ; + P2p.send db.p2p conn (Get_current_branch chain_id))) db.active_chains ; state.worker <- Lwt_utils.worker - (Format.asprintf "db_network_reader.%a" - P2p_peer.Id.pp_short gid) + (Format.asprintf "db_network_reader.%a" P2p_peer.Id.pp_short gid) ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop db state) ~cancel:(fun () -> Lwt_canceler.cancel canceler) ; P2p_peer.Table.add db.p2p_readers gid state - let shutdown s = - Lwt_canceler.cancel s.canceler >>= fun () -> - s.worker - + let shutdown s = Lwt_canceler.cancel s.canceler >>= fun () -> s.worker end let active_peer_ids p2p () = List.fold_left (fun acc conn -> - let { P2p_connection.Info.peer_id ; _ } = P2p.connection_info p2p conn in - P2p_peer.Set.add peer_id acc) + let {P2p_connection.Info.peer_id; _} = P2p.connection_info p2p conn in + P2p_peer.Set.add peer_id acc) P2p_peer.Set.empty (P2p.connections p2p) let raw_try_send p2p peer_id msg = match P2p.find_connection p2p peer_id with - | None -> () - | Some conn -> ignore (P2p.try_send p2p conn msg : bool) - + | None -> + () + | Some conn -> + ignore (P2p.try_send p2p conn msg : bool) let create disk p2p = let global_request = - { p2p ; - data = () ; - active = active_peer_ids p2p ; - send = raw_try_send p2p ; - } in + {p2p; data = (); active = active_peer_ids p2p; send = raw_try_send p2p} + in let protocol_db = Raw_protocol.create global_request disk in let active_chains = Chain_id.Table.create 17 in let p2p_readers = P2p_peer.Table.create 17 in let block_input = Lwt_watcher.create_input () in let operation_input = Lwt_watcher.create_input () in let db = - { p2p ; p2p_readers ; disk ; - active_chains ; protocol_db ; - block_input ; operation_input ; - } in + { p2p; + p2p_readers; + disk; + active_chains; + protocol_db; + block_input; + operation_input } + in db -let activate ({ p2p ; active_chains ; _ } as global_db) chain_state = +let activate ({p2p; active_chains; _} as global_db) chain_state = P2p.on_new_connection p2p (P2p_reader.run global_db) ; P2p.iter_connections p2p (P2p_reader.run global_db) ; - P2p.activate p2p; + P2p.activate p2p ; let chain_id = State.Chain.id chain_state in match Chain_id.Table.find_opt active_chains chain_id with | None -> let active_peers = ref P2p_peer.Set.empty in let p2p_request = - { p2p ; - data = () ; - active = (fun () -> !active_peers) ; - send = raw_try_send p2p ; - } in + { p2p; + data = (); + active = (fun () -> !active_peers); + send = raw_try_send p2p } + in let operation_db = Raw_operation.create - ~global_input:global_db.operation_input p2p_request chain_state in + ~global_input:global_db.operation_input + p2p_request + chain_state + in let block_header_db = Raw_block_header.create - ~global_input:global_db.block_input p2p_request chain_state in + ~global_input:global_db.block_input + p2p_request + chain_state + in let operation_hashes_db = - Raw_operation_hashes.create p2p_request chain_state in - let operations_db = - Raw_operations.create p2p_request chain_state in - let chain = { - global_db ; operation_db ; block_header_db ; - operation_hashes_db ; operations_db ; - chain_state ; callback = noop_callback ; active_peers ; - active_connections = P2p_peer.Table.create 53 ; - } in + Raw_operation_hashes.create p2p_request chain_state + in + let operations_db = Raw_operations.create p2p_request chain_state in + let chain = + { global_db; + operation_db; + block_header_db; + operation_hashes_db; + operations_db; + chain_state; + callback = noop_callback; + active_peers; + active_connections = P2p_peer.Table.create 53 } + in P2p.iter_connections p2p (fun _peer_id conn -> - Lwt.async begin fun () -> - P2p.send p2p conn (Get_current_branch chain_id) - end) ; + Lwt.async (fun () -> P2p.send p2p conn (Get_current_branch chain_id))) ; Chain_id.Table.add active_chains chain_id chain ; chain | Some chain -> chain -let set_callback chain_db callback = - chain_db.callback <- callback +let set_callback chain_db callback = chain_db.callback <- callback let deactivate chain_db = - let { active_chains ; p2p ; _ } = chain_db.global_db in + let {active_chains; p2p; _} = chain_db.global_db in let chain_id = State.Chain.id chain_db.chain_state in Chain_id.Table.remove active_chains chain_id ; P2p_peer.Table.iter (fun _peer_id reader -> - P2p_reader.deactivate reader chain_db ; - Lwt.async begin fun () -> - P2p.send p2p reader.conn (Deactivate chain_id) - end) + P2p_reader.deactivate reader chain_db ; + Lwt.async (fun () -> P2p.send p2p reader.conn (Deactivate chain_id))) chain_db.active_connections ; - Raw_operation.shutdown chain_db.operation_db >>= fun () -> - Raw_block_header.shutdown chain_db.block_header_db >>= fun () -> - Lwt.return_unit >>= fun () -> - Lwt.return_unit + Raw_operation.shutdown chain_db.operation_db + >>= fun () -> + Raw_block_header.shutdown chain_db.block_header_db + >>= fun () -> Lwt.return_unit >>= fun () -> Lwt.return_unit -let get_chain { active_chains ; _ } chain_id = +let get_chain {active_chains; _} chain_id = Chain_id.Table.find_opt active_chains chain_id -let greylist { global_db = { p2p ; _ } ; _ } peer_id = +let greylist {global_db = {p2p; _}; _} peer_id = Lwt.return (P2p.greylist_peer p2p peer_id) -let disconnect { global_db = { p2p ; _ } ; _ } peer_id = +let disconnect {global_db = {p2p; _}; _} peer_id = match P2p.find_connection p2p peer_id with - | None -> Lwt.return_unit - | Some conn -> P2p.disconnect p2p conn + | None -> + Lwt.return_unit + | Some conn -> + P2p.disconnect p2p conn -let shutdown { p2p_readers ; active_chains ; _ } = +let shutdown {p2p_readers; active_chains; _} = P2p_peer.Table.fold - (fun _peer_id reader acc -> - P2p_reader.shutdown reader >>= fun () -> acc) + (fun _peer_id reader acc -> P2p_reader.shutdown reader >>= fun () -> acc) p2p_readers - Lwt.return_unit >>= fun () -> + Lwt.return_unit + >>= fun () -> Chain_id.Table.fold (fun _ chain_db acc -> - Raw_operation.shutdown chain_db.operation_db >>= fun () -> - Raw_block_header.shutdown chain_db.block_header_db >>= fun () -> - acc) + Raw_operation.shutdown chain_db.operation_db + >>= fun () -> + Raw_block_header.shutdown chain_db.block_header_db >>= fun () -> acc) active_chains - Lwt.return_unit >>= fun () -> - Lwt.return_unit + Lwt.return_unit + >>= fun () -> Lwt.return_unit let clear_block chain_db hash n = Raw_operations.clear_all chain_db.operations_db.table hash n ; Raw_operation_hashes.clear_all chain_db.operation_hashes_db.table hash n ; Raw_block_header.Table.clear_or_cancel chain_db.block_header_db.table hash -let commit_block chain_db hash - header header_data operations operations_data result - ~forking_testchain = +let commit_block chain_db hash header header_data operations operations_data + result ~forking_testchain = assert (Block_hash.equal hash (Block_header.hash header)) ; assert (List.length operations = header.shell.validation_passes) ; - State.Block.store chain_db.chain_state - header header_data operations operations_data result - ~forking_testchain >>=? fun res -> + State.Block.store + chain_db.chain_state + header + header_data + operations + operations_data + result + ~forking_testchain + >>=? fun res -> clear_block chain_db hash header.shell.validation_passes ; return res let commit_invalid_block chain_db hash header errors = assert (Block_hash.equal hash (Block_header.hash header)) ; - State.Block.store_invalid chain_db.chain_state header errors >>=? fun res -> + State.Block.store_invalid chain_db.chain_state header errors + >>=? fun res -> clear_block chain_db hash header.shell.validation_passes ; return res @@ -959,193 +1057,247 @@ let inject_operation chain_db h op = Raw_operation.Table.inject chain_db.operation_db.table h op let commit_protocol db h p = - State.Protocol.store db.disk p >>= fun res -> + State.Protocol.store db.disk p + >>= fun res -> Raw_protocol.Table.clear_or_cancel db.protocol_db.table h ; return (res <> None) -let watch_block_header { block_input ; _ } = - Lwt_watcher.create_stream block_input -let watch_operation { operation_input ; _ } = +let watch_block_header {block_input; _} = Lwt_watcher.create_stream block_input + +let watch_operation {operation_input; _} = Lwt_watcher.create_stream operation_input module Raw = struct let encoding = P2p_message.encoding Message.cfg.encoding + let chain_name = Message.cfg.chain_name + let distributed_db_versions = Message.cfg.distributed_db_versions end module Make - (Table : Distributed_db_functors.DISTRIBUTED_DB) - (Kind : sig - type t - val proj: t -> Table.t - end) = struct + (Table : Distributed_db_functors.DISTRIBUTED_DB) (Kind : sig + type t + + val proj : t -> Table.t + end) = +struct type key = Table.key + type value = Table.value + let known t k = Table.known (Kind.proj t) k + type error += Missing_data = Table.Missing_data + type error += Canceled = Table.Canceled + type error += Timeout = Table.Timeout + let read t k = Table.read (Kind.proj t) k + let read_opt t k = Table.read_opt (Kind.proj t) k + let prefetch t ?peer ?timeout k p = Table.prefetch (Kind.proj t) ?peer ?timeout k p - let fetch t ?peer ?timeout k p = - Table.fetch (Kind.proj t) ?peer ?timeout k p + + let fetch t ?peer ?timeout k p = Table.fetch (Kind.proj t) ?peer ?timeout k p + let clear_or_cancel t k = Table.clear_or_cancel (Kind.proj t) k + let inject t k v = Table.inject (Kind.proj t) k v + let pending t k = Table.pending (Kind.proj t) k + let watch t = Table.watch (Kind.proj t) + let resolve_pending t k v = Table.resolve_pending (Kind.proj t) k v end module Block_header = struct type t = Block_header.t - include (Make (Raw_block_header.Table) (struct - type t = chain_db - let proj chain = chain.block_header_db.table - end) : Distributed_db_functors.DISTRIBUTED_DB with type t := chain_db - and type key := Block_hash.t - and type value := Block_header.t - and type param := unit) + + include ( + Make + (Raw_block_header.Table) + (struct + type t = chain_db + + let proj chain = chain.block_header_db.table + end) : + Distributed_db_functors.DISTRIBUTED_DB + with type t := chain_db + and type key := Block_hash.t + and type value := Block_header.t + and type param := unit ) end module Operation_hashes = - Make (Raw_operation_hashes.Table) (struct - type t = chain_db - let proj chain = chain.operation_hashes_db.table - end) + Make + (Raw_operation_hashes.Table) + (struct + type t = chain_db + + let proj chain = chain.operation_hashes_db.table + end) module Operations = - Make (Raw_operations.Table) (struct - type t = chain_db - let proj chain = chain.operations_db.table - end) + Make + (Raw_operations.Table) + (struct + type t = chain_db + + let proj chain = chain.operations_db.table + end) module Operation = struct include Operation - include (Make (Raw_operation.Table) (struct - type t = chain_db - let proj chain = chain.operation_db.table - end) : Distributed_db_functors.DISTRIBUTED_DB with type t := chain_db - and type key := Operation_hash.t - and type value := Operation.t - and type param := unit) + + include ( + Make + (Raw_operation.Table) + (struct + type t = chain_db + + let proj chain = chain.operation_db.table + end) : + Distributed_db_functors.DISTRIBUTED_DB + with type t := chain_db + and type key := Operation_hash.t + and type value := Operation.t + and type param := unit ) end module Protocol = struct type t = Protocol.t - include (Make (Raw_protocol.Table) (struct - type t = db - let proj db = db.protocol_db.table - end) : Distributed_db_functors.DISTRIBUTED_DB with type t := db - and type key := Protocol_hash.t - and type value := Protocol.t - and type param := unit) -end + include ( + Make + (Raw_protocol.Table) + (struct + type t = db + + let proj db = db.protocol_db.table + end) : + Distributed_db_functors.DISTRIBUTED_DB + with type t := db + and type key := Protocol_hash.t + and type value := Protocol.t + and type param := unit ) +end let broadcast chain_db msg = P2p_peer.Table.iter (fun _peer_id state -> - ignore (P2p.try_send chain_db.global_db.p2p state.conn msg)) + ignore (P2p.try_send chain_db.global_db.p2p state.conn msg)) chain_db.active_connections let try_send chain_db peer_id msg = match P2p_peer.Table.find_opt chain_db.active_connections peer_id with - | None -> () + | None -> + () | Some conn -> ignore (P2p.try_send chain_db.global_db.p2p conn.conn msg : bool) let send chain_db ?peer msg = match peer with - | Some peer -> try_send chain_db peer msg - | None -> broadcast chain_db msg + | Some peer -> + try_send chain_db peer msg + | None -> + broadcast chain_db msg module Request = struct - let current_head chain_db ?peer () = let chain_id = State.Chain.id chain_db.chain_state in - begin match peer with - |Some peer -> - let meta = P2p.get_peer_metadata chain_db.global_db.p2p peer in - Peer_metadata.incr meta (Sent_request Head) - |None -> () - end ; + ( match peer with + | Some peer -> + let meta = P2p.get_peer_metadata chain_db.global_db.p2p peer in + Peer_metadata.incr meta (Sent_request Head) + | None -> + () ) ; send chain_db ?peer @@ Get_current_head chain_id let current_branch chain_db ?peer () = let chain_id = State.Chain.id chain_db.chain_state in - begin match peer with - |Some peer -> - let meta = P2p.get_peer_metadata chain_db.global_db.p2p peer in - Peer_metadata.incr meta (Sent_request Head) - |None -> () - end ; + ( match peer with + | Some peer -> + let meta = P2p.get_peer_metadata chain_db.global_db.p2p peer in + Peer_metadata.incr meta (Sent_request Head) + | None -> + () ) ; send chain_db ?peer @@ Get_current_branch chain_id - end module Advertise = struct - let current_head chain_db ?peer ?(mempool = Mempool.empty) head = let chain_id = State.Chain.id chain_db.chain_state in assert (Chain_id.equal chain_id (State.Block.chain_id head)) ; - begin match peer with - | Some peer -> - let meta = P2p.get_peer_metadata chain_db.global_db.p2p peer in - Peer_metadata.incr meta (Sent_advertisement Head) - | None -> () - end ; + ( match peer with + | Some peer -> + let meta = P2p.get_peer_metadata chain_db.global_db.p2p peer in + Peer_metadata.incr meta (Sent_advertisement Head) + | None -> + () ) ; let msg_mempool = - Message.Current_head (chain_id, State.Block.header head, mempool) in - if mempool = Mempool.empty then - send chain_db ?peer msg_mempool + Message.Current_head (chain_id, State.Block.header head, mempool) + in + if mempool = Mempool.empty then send chain_db ?peer msg_mempool else let msg_disable_mempool = - Message.Current_head (chain_id, State.Block.header head, Mempool.empty) in + Message.Current_head (chain_id, State.Block.header head, Mempool.empty) + in let send_mempool state = - let { Connection_metadata.disable_mempool ; _ } = - P2p.connection_remote_metadata chain_db.global_db.p2p state.conn in - let msg = if disable_mempool then msg_disable_mempool else msg_mempool in + let {Connection_metadata.disable_mempool; _} = + P2p.connection_remote_metadata chain_db.global_db.p2p state.conn + in + let msg = + if disable_mempool then msg_disable_mempool else msg_mempool + in ignore @@ P2p.try_send chain_db.global_db.p2p state.conn msg in match peer with | Some receiver_id -> - let state = P2p_peer.Table.find chain_db.active_connections receiver_id in + let state = + P2p_peer.Table.find chain_db.active_connections receiver_id + in send_mempool state | None -> - List.iter (fun (_receiver_id, state) -> send_mempool state) - (P2p_peer.Table.fold (fun k v acc -> (k,v)::acc) chain_db.active_connections []) + List.iter + (fun (_receiver_id, state) -> send_mempool state) + (P2p_peer.Table.fold + (fun k v acc -> (k, v) :: acc) + chain_db.active_connections + []) let current_branch ?peer chain_db = let chain_id = State.Chain.id chain_db.chain_state in let chain_state = chain_state chain_db in let sender_id = my_peer_id chain_db in - begin match peer with - | Some peer -> - let meta = P2p.get_peer_metadata chain_db.global_db.p2p peer in - Peer_metadata.incr meta (Sent_advertisement Branch) - | None -> () - end ; - + ( match peer with + | Some peer -> + let meta = P2p.get_peer_metadata chain_db.global_db.p2p peer in + Peer_metadata.incr meta (Sent_advertisement Branch) + | None -> + () ) ; match peer with | Some receiver_id -> - let seed = { - Block_locator.receiver_id=receiver_id; sender_id } in - (Chain.locator chain_state seed) >>= fun locator -> + let seed = {Block_locator.receiver_id; sender_id} in + Chain.locator chain_state seed + >>= fun locator -> let msg = Message.Current_branch (chain_id, locator) in - try_send chain_db receiver_id msg; + try_send chain_db receiver_id msg ; Lwt.return_unit | None -> Lwt_list.iter_p - (fun (receiver_id,state) -> - let seed = { - Block_locator.receiver_id=receiver_id; sender_id } in - (Chain.locator chain_state seed) >>= fun locator -> - let msg = Message.Current_branch (chain_id, locator) in - ignore (P2p.try_send chain_db.global_db.p2p state.conn msg); - Lwt.return_unit - ) (P2p_peer.Table.fold (fun k v acc -> (k,v)::acc) chain_db.active_connections []) - + (fun (receiver_id, state) -> + let seed = {Block_locator.receiver_id; sender_id} in + Chain.locator chain_state seed + >>= fun locator -> + let msg = Message.Current_branch (chain_id, locator) in + ignore (P2p.try_send chain_db.global_db.p2p state.conn msg) ; + Lwt.return_unit) + (P2p_peer.Table.fold + (fun k v acc -> (k, v) :: acc) + chain_db.active_connections + []) end diff --git a/src/lib_shell/distributed_db.mli b/src/lib_shell/distributed_db.mli index 2c3fb91b5e18ab1c581c6560b108a9f03130c9a3..1988b267d63bfaf70f1549ef4512138fbc14b4f6 100644 --- a/src/lib_shell/distributed_db.mli +++ b/src/lib_shell/distributed_db.mli @@ -29,15 +29,18 @@ open Distributed_db_functors type t + type db = t module Message = Distributed_db_message type p2p = (Message.t, Peer_metadata.t, Connection_metadata.t) P2p.net -val create: State.t -> p2p -> t -val state: db -> State.t -val shutdown: t -> unit Lwt.t +val create : State.t -> p2p -> t + +val state : db -> State.t + +val shutdown : t -> unit Lwt.t (** {1 Network database} *) @@ -48,71 +51,71 @@ type chain_db (** Activate a given chain. The node will notify its neighbours that it now handles the given chain and that it expects notification for new head or new operations. *) -val activate: t -> State.Chain.t -> chain_db +val activate : t -> State.Chain.t -> chain_db (** Look for the database of an active chain. *) -val get_chain: t -> Chain_id.t -> chain_db option +val get_chain : t -> Chain_id.t -> chain_db option (** Deactivate a given chain. The node will notify its neighbours that it does not care anymore about this chain. *) -val deactivate: chain_db -> unit Lwt.t +val deactivate : chain_db -> unit Lwt.t type callback = { - notify_branch: P2p_peer.Id.t -> Block_locator.t -> unit ; - notify_head: P2p_peer.Id.t -> Block_header.t -> Mempool.t -> unit ; - disconnection: P2p_peer.Id.t -> unit ; + notify_branch : P2p_peer.Id.t -> Block_locator.t -> unit; + notify_head : P2p_peer.Id.t -> Block_header.t -> Mempool.t -> unit; + disconnection : P2p_peer.Id.t -> unit } (** Register all the possible callback from the distributed DB to the validator. *) -val set_callback: chain_db -> callback -> unit +val set_callback : chain_db -> callback -> unit (** Kick a given peer. *) -val disconnect: chain_db -> P2p_peer.Id.t -> unit Lwt.t +val disconnect : chain_db -> P2p_peer.Id.t -> unit Lwt.t (** Greylist a given peer. *) -val greylist: chain_db -> P2p_peer.Id.t -> unit Lwt.t +val greylist : chain_db -> P2p_peer.Id.t -> unit Lwt.t (** Various accessors. *) -val chain_state: chain_db -> State.Chain.t -val db: chain_db -> db +val chain_state : chain_db -> State.Chain.t + +val db : chain_db -> db -val information : chain_db -> Chain_validator_worker_state.Distributed_db_state.view +val information : + chain_db -> Chain_validator_worker_state.Distributed_db_state.view (** Return the peer id of the node *) -val my_peer_id: chain_db -> P2p_peer.Id.t +val my_peer_id : chain_db -> P2p_peer.Id.t -val get_peer_metadata: chain_db -> P2p_peer.Id.t -> Peer_metadata.t +val get_peer_metadata : chain_db -> P2p_peer.Id.t -> Peer_metadata.t (** {1 Sending messages} *) module Request : sig - (** Send to a given peer, or to all known active peers for the chain, a friendly request "Hey, what's your current branch ?". The expected answer is a `Block_locator.t.`. *) - val current_branch: chain_db -> ?peer:P2p_peer.Id.t -> unit -> unit + val current_branch : chain_db -> ?peer:P2p_peer.Id.t -> unit -> unit (** Send to a given peer, or to all known active peers for the given chain, a friendly request "Hey, what's your current branch ?". The expected answer is a `Block_locator.t.`. *) - val current_head: chain_db -> ?peer:P2p_peer.Id.t -> unit -> unit - + val current_head : chain_db -> ?peer:P2p_peer.Id.t -> unit -> unit end module Advertise : sig - (** Notify a given peer, or all known active peers for the chain, of a new head and possibly of new operations. *) - val current_head: - chain_db -> ?peer:P2p_peer.Id.t -> - ?mempool:Mempool.t -> State.Block.t -> unit + val current_head : + chain_db -> + ?peer:P2p_peer.Id.t -> + ?mempool:Mempool.t -> + State.Block.t -> + unit (** Notify a given peer, or all known active peers for the chain, of a new head and its sparse history. *) - val current_branch: - ?peer:P2p_peer.Id.t -> chain_db -> unit Lwt.t - + val current_branch : ?peer:P2p_peer.Id.t -> chain_db -> unit Lwt.t end (** {2 Block index} *) @@ -120,69 +123,80 @@ end (** Index of block headers. *) module Block_header : sig type t = Block_header.t (* avoid shadowing. *) - include DISTRIBUTED_DB with type t := chain_db - and type key := Block_hash.t - and type value := Block_header.t - and type param := unit + + include + DISTRIBUTED_DB + with type t := chain_db + and type key := Block_hash.t + and type value := Block_header.t + and type param := unit end (** Lookup for block header in any active chains *) -val read_block_header: +val read_block_header : db -> Block_hash.t -> (Chain_id.t * Block_header.t) option Lwt.t (** Index of all the operations of a given block (per validation pass). *) module Operations : - DISTRIBUTED_DB with type t := chain_db - and type key = Block_hash.t * int - and type value = Operation.t list - and type param := Operation_list_list_hash.t + DISTRIBUTED_DB + with type t := chain_db + and type key = Block_hash.t * int + and type value = Operation.t list + and type param := Operation_list_list_hash.t (** Index of all the hashes of operations of a given block (per validation pass). *) module Operation_hashes : - DISTRIBUTED_DB with type t := chain_db - and type key = Block_hash.t * int - and type value = Operation_hash.t list - and type param := Operation_list_list_hash.t + DISTRIBUTED_DB + with type t := chain_db + and type key = Block_hash.t * int + and type value = Operation_hash.t list + and type param := Operation_list_list_hash.t (** Store on disk all the data associated to a valid block. *) -val commit_block: +val commit_block : chain_db -> Block_hash.t -> - Block_header.t -> MBytes.t -> - Operation.t list list -> MBytes.t list list -> + Block_header.t -> + MBytes.t -> + Operation.t list list -> + MBytes.t list list -> State.Block.validation_store -> - forking_testchain: bool -> + forking_testchain:bool -> State.Block.t option tzresult Lwt.t (** Store on disk all the data associated to an invalid block. *) -val commit_invalid_block: +val commit_invalid_block : chain_db -> - Block_hash.t -> Block_header.t -> Error_monad.error list -> + Block_hash.t -> + Block_header.t -> + Error_monad.error list -> bool tzresult Lwt.t (** Monitor all the fetched block headers (for all activate chains). *) -val watch_block_header: +val watch_block_header : t -> (Block_hash.t * Block_header.t) Lwt_stream.t * Lwt_watcher.stopper - (** {2 Operations index} *) (** Index of operations (for the mempool). *) module Operation : sig type t = Operation.t (* avoid shadowing. *) - include DISTRIBUTED_DB with type t := chain_db - and type key := Operation_hash.t - and type value := Operation.t - and type param := unit + + include + DISTRIBUTED_DB + with type t := chain_db + and type key := Operation_hash.t + and type value := Operation.t + and type param := unit end (** Inject a new operation in the local index (memory only). *) -val inject_operation: +val inject_operation : chain_db -> Operation_hash.t -> Operation.t -> bool Lwt.t (** Monitor all the fetched operations (for all activate chains). *) -val watch_operation: +val watch_operation : t -> (Operation_hash.t * Operation.t) Lwt_stream.t * Lwt_watcher.stopper (** {2 Protocol index} *) @@ -190,20 +204,25 @@ val watch_operation: (** Index of protocol sources. *) module Protocol : sig type t = Protocol.t (* avoid shadowing. *) - include DISTRIBUTED_DB with type t := db - and type key := Protocol_hash.t - and type value := Protocol.t - and type param := unit + + include + DISTRIBUTED_DB + with type t := db + and type key := Protocol_hash.t + and type value := Protocol.t + and type param := unit end (** Store on disk protocol sources. *) -val commit_protocol: +val commit_protocol : db -> Protocol_hash.t -> Protocol.t -> bool tzresult Lwt.t (**/**) module Raw : sig - val encoding: Message.t P2p_message.t Data_encoding.t - val chain_name: Distributed_db_version.name - val distributed_db_versions: Distributed_db_version.t list + val encoding : Message.t P2p_message.t Data_encoding.t + + val chain_name : Distributed_db_version.name + + val distributed_db_versions : Distributed_db_version.t list end diff --git a/src/lib_shell/distributed_db_functors.ml b/src/lib_shell/distributed_db_functors.ml index 157121f2f791868e15a087a60e1e6f778ce3b532..c47062a720415b61eb54beac12e0e1a23e522840 100644 --- a/src/lib_shell/distributed_db_functors.ml +++ b/src/lib_shell/distributed_db_functors.ml @@ -24,362 +24,429 @@ (*****************************************************************************) module type DISTRIBUTED_DB = sig - type t + type key + type value + type param - val known: t -> key -> bool Lwt.t + val known : t -> key -> bool Lwt.t type error += Missing_data of key + type error += Canceled of key + type error += Timeout of key - val read: t -> key -> value tzresult Lwt.t - val read_opt: t -> key -> value option Lwt.t + val read : t -> key -> value tzresult Lwt.t + + val read_opt : t -> key -> value option Lwt.t - val prefetch: + val prefetch : t -> ?peer:P2p_peer.Id.t -> ?timeout:Time.System.Span.t -> - key -> param -> unit + key -> + param -> + unit - val fetch: + val fetch : t -> ?peer:P2p_peer.Id.t -> ?timeout:Time.System.Span.t -> - key -> param -> value tzresult Lwt.t + key -> + param -> + value tzresult Lwt.t - val clear_or_cancel: t -> key -> unit - val resolve_pending: t -> key -> value -> unit - val inject: t -> key -> value -> bool Lwt.t - val watch: t -> (key * value) Lwt_stream.t * Lwt_watcher.stopper + val clear_or_cancel : t -> key -> unit - val pending: t -> key -> bool + val resolve_pending : t -> key -> value -> unit + val inject : t -> key -> value -> bool Lwt.t + + val watch : t -> (key * value) Lwt_stream.t * Lwt_watcher.stopper + + val pending : t -> key -> bool end module type DISK_TABLE = sig type store + type key + type value - val known: store -> key -> bool Lwt.t - val read: store -> key -> value tzresult Lwt.t - val read_opt: store -> key -> value option Lwt.t + + val known : store -> key -> bool Lwt.t + + val read : store -> key -> value tzresult Lwt.t + + val read_opt : store -> key -> value option Lwt.t end module type MEMORY_TABLE = sig type 'a t + type key - val create: int -> 'a t - val find: 'a t -> key -> 'a - val find_opt: 'a t -> key -> 'a option - val add: 'a t -> key -> 'a -> unit - val replace: 'a t -> key -> 'a -> unit - val remove: 'a t -> key -> unit - val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + + val create : int -> 'a t + + val find : 'a t -> key -> 'a + + val find_opt : 'a t -> key -> 'a option + + val add : 'a t -> key -> 'a -> unit + + val replace : 'a t -> key -> 'a -> unit + + val remove : 'a t -> key -> unit + + val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + val length : 'a t -> int end module type SCHEDULER_EVENTS = sig type t + type key - val request: t -> P2p_peer.Id.t option -> key -> unit - val notify: t -> P2p_peer.Id.t -> key -> unit - val notify_cancelation: t -> key -> unit - val notify_unrequested: t -> P2p_peer.Id.t -> key -> unit - val notify_duplicate: t -> P2p_peer.Id.t -> key -> unit - val notify_invalid: t -> P2p_peer.Id.t -> key -> unit + + val request : t -> P2p_peer.Id.t option -> key -> unit + + val notify : t -> P2p_peer.Id.t -> key -> unit + + val notify_cancelation : t -> key -> unit + + val notify_unrequested : t -> P2p_peer.Id.t -> key -> unit + + val notify_duplicate : t -> P2p_peer.Id.t -> key -> unit + + val notify_invalid : t -> P2p_peer.Id.t -> key -> unit + val memory_table_length : t -> int end module type PRECHECK = sig type key + type param + type notified_value + type value - val precheck: key -> param -> notified_value -> value option + + val precheck : key -> param -> notified_value -> value option end -module Make_table - (Hash : sig - type t - val name : string - val encoding : t Data_encoding.t - val pp : Format.formatter -> t -> unit - end) - (Disk_table : DISK_TABLE with type key := Hash.t) - (Memory_table : MEMORY_TABLE with type key := Hash.t) - (Scheduler : SCHEDULER_EVENTS with type key := Hash.t) - (Precheck : PRECHECK with type key := Hash.t - and type value := Disk_table.value) : sig - - include DISTRIBUTED_DB with type key = Hash.t - and type value = Disk_table.value - and type param = Precheck.param - val create: +module Make_table (Hash : sig + type t + + val name : string + + val encoding : t Data_encoding.t + + val pp : Format.formatter -> t -> unit +end) +(Disk_table : DISK_TABLE with type key := Hash.t) +(Memory_table : MEMORY_TABLE with type key := Hash.t) +(Scheduler : SCHEDULER_EVENTS with type key := Hash.t) +(Precheck : PRECHECK with type key := Hash.t and type value := Disk_table.value) : sig + include + DISTRIBUTED_DB + with type key = Hash.t + and type value = Disk_table.value + and type param = Precheck.param + + val create : ?global_input:(key * value) Lwt_watcher.input -> - Scheduler.t -> Disk_table.store -> t - val notify: t -> P2p_peer.Id.t -> key -> Precheck.notified_value -> unit Lwt.t - val memory_table_length : t -> int + Scheduler.t -> + Disk_table.store -> + t -end = struct + val notify : + t -> P2p_peer.Id.t -> key -> Precheck.notified_value -> unit Lwt.t + val memory_table_length : t -> int +end = struct type key = Hash.t + type value = Disk_table.value + type param = Precheck.param type t = { - scheduler: Scheduler.t ; - disk: Disk_table.store ; - memory: status Memory_table.t ; - global_input: (key * value) Lwt_watcher.input option ; - input: (key * value) Lwt_watcher.input ; + scheduler : Scheduler.t; + disk : Disk_table.store; + memory : status Memory_table.t; + global_input : (key * value) Lwt_watcher.input option; + input : (key * value) Lwt_watcher.input } and status = - | Pending of { waiter : value tzresult Lwt.t ; - wakener : value tzresult Lwt.u ; - mutable waiters : int ; - param : param } + | Pending of + { waiter : value tzresult Lwt.t; + wakener : value tzresult Lwt.u; + mutable waiters : int; + param : param } | Found of value let known s k = match Memory_table.find_opt s.memory k with - | None -> Disk_table.known s.disk k - | Some (Pending _) -> Lwt.return_false - | Some (Found _) -> Lwt.return_true + | None -> + Disk_table.known s.disk k + | Some (Pending _) -> + Lwt.return_false + | Some (Found _) -> + Lwt.return_true let read_opt s k = match Memory_table.find_opt s.memory k with - | None -> Disk_table.read_opt s.disk k - | Some (Found v) -> Lwt.return_some v - | Some (Pending _) -> Lwt.return_none + | None -> + Disk_table.read_opt s.disk k + | Some (Found v) -> + Lwt.return_some v + | Some (Pending _) -> + Lwt.return_none type error += Missing_data of key + type error += Canceled of key + type error += Timeout of key let () = (* Missing data key *) register_error_kind `Permanent - ~id: ("distributed_db." ^ Hash.name ^ ".missing") - ~title: ("Missing " ^ Hash.name) - ~description: ("Some " ^ Hash.name ^ " is missing from the distributed db") - ~pp: (fun ppf key -> - Format.fprintf ppf "Missing %s %a" Hash.name Hash.pp key) + ~id:("distributed_db." ^ Hash.name ^ ".missing") + ~title:("Missing " ^ Hash.name) + ~description:("Some " ^ Hash.name ^ " is missing from the distributed db") + ~pp:(fun ppf key -> + Format.fprintf ppf "Missing %s %a" Hash.name Hash.pp key) (Data_encoding.obj1 (Data_encoding.req "key" Hash.encoding)) (function Missing_data key -> Some key | _ -> None) (fun key -> Missing_data key) ; (* Canceled key *) register_error_kind `Permanent - ~title: ("Canceled fetch of a " ^ Hash.name) - ~description: ("The fetch of a " ^ Hash.name ^ " has been canceled") - ~id: ("distributed_db." ^ Hash.name ^ ".fetch_canceled") - ~pp: (fun ppf key -> - Format.fprintf ppf "Fetch of %s %a canceled" Hash.name Hash.pp key) + ~title:("Canceled fetch of a " ^ Hash.name) + ~description:("The fetch of a " ^ Hash.name ^ " has been canceled") + ~id:("distributed_db." ^ Hash.name ^ ".fetch_canceled") + ~pp:(fun ppf key -> + Format.fprintf ppf "Fetch of %s %a canceled" Hash.name Hash.pp key) Data_encoding.(obj1 (req "key" Hash.encoding)) - (function (Canceled key) -> Some key | _ -> None) + (function Canceled key -> Some key | _ -> None) (fun key -> Canceled key) ; (* Timeout key *) register_error_kind `Permanent - ~title: ("Timed out fetch of a " ^ Hash.name) - ~description: ("The fetch of a " ^ Hash.name ^ " has timed out") - ~id: ("distributed_db." ^ Hash.name ^ ".fetch_timeout") - ~pp: (fun ppf key -> - Format.fprintf ppf "Fetch of %s %a timed out" Hash.name Hash.pp key) + ~title:("Timed out fetch of a " ^ Hash.name) + ~description:("The fetch of a " ^ Hash.name ^ " has timed out") + ~id:("distributed_db." ^ Hash.name ^ ".fetch_timeout") + ~pp:(fun ppf key -> + Format.fprintf ppf "Fetch of %s %a timed out" Hash.name Hash.pp key) Data_encoding.(obj1 (req "key" Hash.encoding)) - (function (Timeout key) -> Some key | _ -> None) + (function Timeout key -> Some key | _ -> None) (fun key -> Timeout key) let read s k = match Memory_table.find_opt s.memory k with | None -> - trace (Missing_data k) @@ - Disk_table.read s.disk k - | Some (Found v) -> return v - | Some (Pending _) -> fail (Missing_data k) + trace (Missing_data k) @@ Disk_table.read s.disk k + | Some (Found v) -> + return v + | Some (Pending _) -> + fail (Missing_data k) let wrap s k ?timeout t = let t = Lwt.protected t in - Lwt.on_cancel t begin fun () -> - match Memory_table.find_opt s.memory k with - | None -> () - | Some (Found _) -> () - | Some (Pending data) -> - data.waiters <- data.waiters - 1 ; - if data.waiters = 0 then begin - Memory_table.remove s.memory k ; - Scheduler.notify_cancelation s.scheduler k ; - end - end ; + Lwt.on_cancel t (fun () -> + match Memory_table.find_opt s.memory k with + | None -> + () + | Some (Found _) -> + () + | Some (Pending data) -> + data.waiters <- data.waiters - 1 ; + if data.waiters = 0 then ( + Memory_table.remove s.memory k ; + Scheduler.notify_cancelation s.scheduler k )) ; match timeout with - | None -> t + | None -> + t | Some delay -> let timeout = Systime_os.sleep delay >>= fun () -> fail (Timeout k) in - Lwt.pick [ t ; timeout ] + Lwt.pick [t; timeout] let fetch s ?peer ?timeout k param = match Memory_table.find_opt s.memory k with - | None -> begin - Disk_table.read_opt s.disk k >>= function - | Some v -> return v - | None -> - match Memory_table.find_opt s.memory k with - | None -> begin - let waiter, wakener = Lwt.wait () in - Memory_table.add s.memory k - (Pending { waiter ; wakener ; waiters = 1 ; param }) ; - Scheduler.request s.scheduler peer k ; - wrap s k ?timeout waiter - end - | Some (Pending data) -> - Scheduler.request s.scheduler peer k ; - data.waiters <- data.waiters + 1 ; - wrap s k ?timeout data.waiter - | Some (Found v) -> return v - end + | None -> ( + Disk_table.read_opt s.disk k + >>= function + | Some v -> + return v + | None -> ( + match Memory_table.find_opt s.memory k with + | None -> + let (waiter, wakener) = Lwt.wait () in + Memory_table.add + s.memory + k + (Pending {waiter; wakener; waiters = 1; param}) ; + Scheduler.request s.scheduler peer k ; + wrap s k ?timeout waiter + | Some (Pending data) -> + Scheduler.request s.scheduler peer k ; + data.waiters <- data.waiters + 1 ; + wrap s k ?timeout data.waiter + | Some (Found v) -> + return v ) ) | Some (Pending data) -> Scheduler.request s.scheduler peer k ; data.waiters <- data.waiters + 1 ; wrap s k ?timeout data.waiter - | Some (Found v) -> return v + | Some (Found v) -> + return v let prefetch s ?peer ?timeout k param = try ignore (fetch s ?peer ?timeout k param) with _ -> () let notify s p k v = match Memory_table.find_opt s.memory k with - | None -> begin - Disk_table.known s.disk k >>= function + | None -> ( + Disk_table.known s.disk k + >>= function | true -> Scheduler.notify_duplicate s.scheduler p k ; Lwt.return_unit | false -> Scheduler.notify_unrequested s.scheduler p k ; - Lwt.return_unit - end - | Some (Pending { wakener = w ; param ; _ }) -> begin - match Precheck.precheck k param v with - | None -> - Scheduler.notify_invalid s.scheduler p k ; - Lwt.return_unit - | Some v -> - Scheduler.notify s.scheduler p k ; - Memory_table.replace s.memory k (Found v) ; - Lwt.wakeup_later w (Ok v) ; - Option.iter s.global_input - ~f:(fun input -> Lwt_watcher.notify input (k, v)) ; - Lwt_watcher.notify s.input (k, v) ; - Lwt.return_unit - end + Lwt.return_unit ) + | Some (Pending {wakener = w; param; _}) -> ( + match Precheck.precheck k param v with + | None -> + Scheduler.notify_invalid s.scheduler p k ; + Lwt.return_unit + | Some v -> + Scheduler.notify s.scheduler p k ; + Memory_table.replace s.memory k (Found v) ; + Lwt.wakeup_later w (Ok v) ; + Option.iter s.global_input ~f:(fun input -> + Lwt_watcher.notify input (k, v)) ; + Lwt_watcher.notify s.input (k, v) ; + Lwt.return_unit ) | Some (Found _) -> Scheduler.notify_duplicate s.scheduler p k ; Lwt.return_unit let inject s k v = match Memory_table.find_opt s.memory k with - | None -> begin - Disk_table.known s.disk k >>= function + | None -> ( + Disk_table.known s.disk k + >>= function | true -> Lwt.return_false | false -> Memory_table.add s.memory k (Found v) ; - Lwt.return_true - end - | Some (Pending _) - | Some (Found _) -> + Lwt.return_true ) + | Some (Pending _) | Some (Found _) -> Lwt.return_false let resolve_pending s k v = match Memory_table.find_opt s.memory k with - | Some (Pending { wakener ; _ }) -> + | Some (Pending {wakener; _}) -> Scheduler.notify_cancelation s.scheduler k ; Memory_table.replace s.memory k (Found v) ; Lwt.wakeup_later wakener (Ok v) ; - Option.iter s.global_input - ~f:(fun input -> Lwt_watcher.notify input (k, v)) ; - Lwt_watcher.notify s.input (k, v) ; - | _ -> () + Option.iter s.global_input ~f:(fun input -> + Lwt_watcher.notify input (k, v)) ; + Lwt_watcher.notify s.input (k, v) + | _ -> + () let clear_or_cancel s k = match Memory_table.find_opt s.memory k with - | None -> () - | Some (Pending { wakener = w ; _ }) -> + | None -> + () + | Some (Pending {wakener = w; _}) -> Scheduler.notify_cancelation s.scheduler k ; Memory_table.remove s.memory k ; Lwt.wakeup_later w (Error [Canceled k]) - | Some (Found _) -> Memory_table.remove s.memory k + | Some (Found _) -> + Memory_table.remove s.memory k let watch s = Lwt_watcher.create_stream s.input let create ?global_input scheduler disk = let memory = Memory_table.create 17 in let input = Lwt_watcher.create_input () in - { scheduler ; disk ; memory ; input ; global_input } + {scheduler; disk; memory; input; global_input} let pending s k = match Memory_table.find_opt s.memory k with - | None -> false - | Some (Found _) -> false - | Some (Pending _) -> true + | None -> + false + | Some (Found _) -> + false + | Some (Pending _) -> + true let memory_table_length s = Memory_table.length s.memory - end module type REQUEST = sig type key + type param + val initial_delay : Time.System.Span.t + val active : param -> P2p_peer.Set.t + val send : param -> P2p_peer.Id.t -> key list -> unit end -module Make_request_scheduler - (Hash : sig - type t - val name : string +module Make_request_scheduler (Hash : sig + type t - module Logging : sig - val tag : t Tag.def - end - end) - (Table : MEMORY_TABLE with type key := Hash.t) - (Request : REQUEST with type key := Hash.t) : sig + val name : string + module Logging : sig + val tag : t Tag.def + end +end) +(Table : MEMORY_TABLE with type key := Hash.t) +(Request : REQUEST with type key := Hash.t) : sig type t - val create: Request.param -> t - val shutdown: t -> unit Lwt.t + + val create : Request.param -> t + + val shutdown : t -> unit Lwt.t + include SCHEDULER_EVENTS with type t := t and type key := Hash.t - val memory_table_length : t -> int + val memory_table_length : t -> int end = struct - - include Internal_event.Legacy_logging.Make_semantic - (struct let name = "node.distributed_db.scheduler." ^ Hash.name end) + include Internal_event.Legacy_logging.Make_semantic (struct + let name = "node.distributed_db.scheduler." ^ Hash.name + end) type key = Hash.t type t = { - param: Request.param ; - pending: status Table.t ; - - queue: event Lwt_pipe.t ; - mutable events: event list Lwt.t ; - - canceler: Lwt_canceler.t ; - mutable worker: unit Lwt.t ; + param : Request.param; + pending : status Table.t; + queue : event Lwt_pipe.t; + mutable events : event list Lwt.t; + canceler : Lwt_canceler.t; + mutable worker : unit Lwt.t } and status = { - peers: P2p_peer.Set.t ; - next_request: Time.System.t ; - delay: Time.System.Span.t ; + peers : P2p_peer.Set.t; + next_request : Time.System.t; + delay : Time.System.Span.t } and event = @@ -390,144 +457,178 @@ end = struct | Notify_duplicate of P2p_peer.Id.t * key | Notify_unrequested of P2p_peer.Id.t * key - let request t p k = - assert (Lwt_pipe.push_now t.queue (Request (p, k))) + let request t p k = assert (Lwt_pipe.push_now t.queue (Request (p, k))) + let notify t p k = - debug Tag.DSL.(fun f -> - f "push received %a from %a" - -% t event "push_received" - -% a Hash.Logging.tag k - -% a P2p_peer.Id.Logging.tag p); + debug + Tag.DSL.( + fun f -> + f "push received %a from %a" + -% t event "push_received" -% a Hash.Logging.tag k + -% a P2p_peer.Id.Logging.tag p) ; assert (Lwt_pipe.push_now t.queue (Notify (p, k))) + let notify_cancelation t k = - debug Tag.DSL.(fun f -> - f "push cancelation %a" - -% t event "push_cancelation" - -% a Hash.Logging.tag k); + debug + Tag.DSL.( + fun f -> + f "push cancelation %a" -% t event "push_cancelation" + -% a Hash.Logging.tag k) ; assert (Lwt_pipe.push_now t.queue (Notify_cancelation k)) + let notify_invalid t p k = - debug Tag.DSL.(fun f -> - f "push received invalid %a from %a" - -% t event "push_received_invalid" - -% a Hash.Logging.tag k - -% a P2p_peer.Id.Logging.tag p); + debug + Tag.DSL.( + fun f -> + f "push received invalid %a from %a" + -% t event "push_received_invalid" + -% a Hash.Logging.tag k + -% a P2p_peer.Id.Logging.tag p) ; assert (Lwt_pipe.push_now t.queue (Notify_invalid (p, k))) + let notify_duplicate t p k = - debug Tag.DSL.(fun f -> - f "push received duplicate %a from %a" - -% t event "push_received_duplicate" - -% a Hash.Logging.tag k - -% a P2p_peer.Id.Logging.tag p); + debug + Tag.DSL.( + fun f -> + f "push received duplicate %a from %a" + -% t event "push_received_duplicate" + -% a Hash.Logging.tag k + -% a P2p_peer.Id.Logging.tag p) ; assert (Lwt_pipe.push_now t.queue (Notify_duplicate (p, k))) + let notify_unrequested t p k = - debug Tag.DSL.(fun f -> - f "push received unrequested %a from %a" - -% t event "push_received_unrequested" - -% a Hash.Logging.tag k - -% a P2p_peer.Id.Logging.tag p); + debug + Tag.DSL.( + fun f -> + f "push received unrequested %a from %a" + -% t event "push_received_unrequested" + -% a Hash.Logging.tag k + -% a P2p_peer.Id.Logging.tag p) ; assert (Lwt_pipe.push_now t.queue (Notify_unrequested (p, k))) let compute_timeout state = let next = Table.fold - (fun _ { next_request ; _ } acc -> - match acc with - | None -> Some next_request - | Some x -> Some (Time.System.min x next_request)) - state.pending None in + (fun _ {next_request; _} acc -> + match acc with + | None -> + Some next_request + | Some x -> + Some (Time.System.min x next_request)) + state.pending + None + in match next with - | None -> fst @@ Lwt.task () + | None -> + fst @@ Lwt.task () | Some next -> let now = Systime_os.now () in let delay = Ptime.diff next now in - if Ptime.Span.compare delay Ptime.Span.zero <= 0 then - Lwt.return_unit - else - Systime_os.sleep delay - + if Ptime.Span.compare delay Ptime.Span.zero <= 0 then Lwt.return_unit + else Systime_os.sleep delay let process_event state now = function - | Request (peer, key) -> begin - lwt_debug Tag.DSL.(fun f -> - f "registering request %a from %a" - -% t event "registering_request" - -% a Hash.Logging.tag key - -% a P2p_peer.Id.Logging.tag_opt peer) >>= fun () -> + | Request (peer, key) -> ( + lwt_debug + Tag.DSL.( + fun f -> + f "registering request %a from %a" + -% t event "registering_request" + -% a Hash.Logging.tag key + -% a P2p_peer.Id.Logging.tag_opt peer) + >>= fun () -> try let data = Table.find state.pending key in let peers = match peer with - | None -> data.peers - | Some peer -> P2p_peer.Set.add peer data.peers in + | None -> + data.peers + | Some peer -> + P2p_peer.Set.add peer data.peers + in let next_request = Option.unopt ~default:Ptime.max - (Ptime.add_span now Request.initial_delay) in - Table.replace state.pending key { - delay = Request.initial_delay ; - next_request ; - peers ; - } ; - lwt_debug Tag.DSL.(fun f -> - f "registering request %a from %a -> replaced" - -% t event "registering_request_replaced" - -% a Hash.Logging.tag key - -% a P2p_peer.Id.Logging.tag_opt peer) >>= fun () -> - Lwt.return_unit + (Ptime.add_span now Request.initial_delay) + in + Table.replace + state.pending + key + {delay = Request.initial_delay; next_request; peers} ; + lwt_debug + Tag.DSL.( + fun f -> + f "registering request %a from %a -> replaced" + -% t event "registering_request_replaced" + -% a Hash.Logging.tag key + -% a P2p_peer.Id.Logging.tag_opt peer) + >>= fun () -> Lwt.return_unit with Not_found -> let peers = match peer with - | None -> P2p_peer.Set.empty - | Some peer -> P2p_peer.Set.singleton peer in - Table.add state.pending key { - peers ; - next_request = now ; - delay = Request.initial_delay ; - } ; - lwt_debug Tag.DSL.(fun f -> - f "registering request %a from %a -> added" - -% t event "registering_request_added" - -% a Hash.Logging.tag key - -% a P2p_peer.Id.Logging.tag_opt peer) >>= fun () -> - Lwt.return_unit - end + | None -> + P2p_peer.Set.empty + | Some peer -> + P2p_peer.Set.singleton peer + in + Table.add + state.pending + key + {peers; next_request = now; delay = Request.initial_delay} ; + lwt_debug + Tag.DSL.( + fun f -> + f "registering request %a from %a -> added" + -% t event "registering_request_added" + -% a Hash.Logging.tag key + -% a P2p_peer.Id.Logging.tag_opt peer) + >>= fun () -> Lwt.return_unit ) | Notify (peer, key) -> Table.remove state.pending key ; - lwt_debug Tag.DSL.(fun f -> - f "received %a from %a" - -% t event "received" - -% a Hash.Logging.tag key - -% a P2p_peer.Id.Logging.tag peer) >>= fun () -> - Lwt.return_unit + lwt_debug + Tag.DSL.( + fun f -> + f "received %a from %a" -% t event "received" + -% a Hash.Logging.tag key + -% a P2p_peer.Id.Logging.tag peer) + >>= fun () -> Lwt.return_unit | Notify_cancelation key -> Table.remove state.pending key ; - lwt_debug Tag.DSL.(fun f -> - f "canceled %a" - -% t event "canceled" - -% a Hash.Logging.tag key) >>= fun () -> - Lwt.return_unit + lwt_debug + Tag.DSL.( + fun f -> + f "canceled %a" -% t event "canceled" -% a Hash.Logging.tag key) + >>= fun () -> Lwt.return_unit | Notify_invalid (peer, key) -> - lwt_debug Tag.DSL.(fun f -> - f "received invalid %a from %a" - -% t event "received_invalid" - -% a Hash.Logging.tag key - -% a P2p_peer.Id.Logging.tag peer) >>= fun () -> + lwt_debug + Tag.DSL.( + fun f -> + f "received invalid %a from %a" + -% t event "received_invalid" -% a Hash.Logging.tag key + -% a P2p_peer.Id.Logging.tag peer) + >>= fun () -> (* TODO *) Lwt.return_unit | Notify_unrequested (peer, key) -> - lwt_debug Tag.DSL.(fun f -> - f "received unrequested %a from %a" - -% t event "received_unrequested" - -% a Hash.Logging.tag key - -% a P2p_peer.Id.Logging.tag peer) >>= fun () -> + lwt_debug + Tag.DSL.( + fun f -> + f "received unrequested %a from %a" + -% t event "received_unrequested" + -% a Hash.Logging.tag key + -% a P2p_peer.Id.Logging.tag peer) + >>= fun () -> (* TODO *) Lwt.return_unit | Notify_duplicate (peer, key) -> - lwt_debug Tag.DSL.(fun f -> - f "received duplicate %a from %a" - -% t event "received_duplicate" - -% a Hash.Logging.tag key - -% a P2p_peer.Id.Logging.tag peer) >>= fun () -> + lwt_debug + Tag.DSL.( + fun f -> + f "received duplicate %a from %a" + -% t event "received_duplicate" + -% a Hash.Logging.tag key + -% a P2p_peer.Id.Logging.tag peer) + >>= fun () -> (* TODO *) Lwt.return_unit @@ -535,92 +636,102 @@ end = struct let shutdown = Lwt_canceler.cancelation state.canceler in let rec loop state = let timeout = compute_timeout state in - Lwt.choose - [ (state.events >|= fun _ -> ()) ; timeout ; shutdown ] >>= fun () -> + Lwt.choose [(state.events >|= fun _ -> ()); timeout; shutdown] + >>= fun () -> if Lwt.state shutdown <> Lwt.Sleep then - lwt_debug Tag.DSL.(fun f -> - f "terminating" -% t event "terminating") >>= fun () -> - Lwt.return_unit - else if Lwt.state state.events <> Lwt.Sleep then + lwt_debug Tag.DSL.(fun f -> f "terminating" -% t event "terminating") + >>= fun () -> Lwt.return_unit + else if Lwt.state state.events <> Lwt.Sleep then ( let now = Systime_os.now () in - state.events >>= fun events -> + state.events + >>= fun events -> state.events <- Lwt_pipe.pop_all state.queue ; - Lwt_list.iter_s (process_event state now) events >>= fun () -> - loop state + Lwt_list.iter_s (process_event state now) events + >>= fun () -> loop state ) else - lwt_debug Tag.DSL.(fun f -> - f "timeout" -% t event "timeout") >>= fun () -> + lwt_debug Tag.DSL.(fun f -> f "timeout" -% t event "timeout") + >>= fun () -> let now = Systime_os.now () in let active_peers = Request.active state.param in let requests = Table.fold - (fun key { peers ; next_request ; delay } acc -> - let later = - Option.unopt - ~default:Ptime.max - (Ptime.add_span now (Time.System.Span.of_seconds_exn 0.2)) in - if Ptime.is_later next_request ~than:later then - acc - else - let remaining_peers = - P2p_peer.Set.inter peers active_peers in - if P2p_peer.Set.is_empty remaining_peers && - not (P2p_peer.Set.is_empty peers) then - ( Table.remove state.pending key ; acc ) - else - let requested_peer = - P2p_peer.Id.Set.random_elt - (if P2p_peer.Set.is_empty remaining_peers - then active_peers - else remaining_peers) in - let next_request = - Option.unopt - ~default:Ptime.max - (Ptime.add_span now delay) in - let next = { peers = remaining_peers ; - next_request ; - delay = Time.System.Span.multiply_exn 1.5 delay } in - Table.replace state.pending key next ; - let requests = - try key :: P2p_peer.Map.find requested_peer acc - with Not_found -> [key] in - P2p_peer.Map.add requested_peer requests acc) - state.pending P2p_peer.Map.empty in + (fun key {peers; next_request; delay} acc -> + let later = + Option.unopt + ~default:Ptime.max + (Ptime.add_span now (Time.System.Span.of_seconds_exn 0.2)) + in + if Ptime.is_later next_request ~than:later then acc + else + let remaining_peers = P2p_peer.Set.inter peers active_peers in + if + P2p_peer.Set.is_empty remaining_peers + && not (P2p_peer.Set.is_empty peers) + then ( + Table.remove state.pending key ; + acc ) + else + let requested_peer = + P2p_peer.Id.Set.random_elt + ( if P2p_peer.Set.is_empty remaining_peers then + active_peers + else remaining_peers ) + in + let next_request = + Option.unopt ~default:Ptime.max (Ptime.add_span now delay) + in + let next = + { peers = remaining_peers; + next_request; + delay = Time.System.Span.multiply_exn 1.5 delay } + in + Table.replace state.pending key next ; + let requests = + try key :: P2p_peer.Map.find requested_peer acc + with Not_found -> [key] + in + P2p_peer.Map.add requested_peer requests acc) + state.pending + P2p_peer.Map.empty + in P2p_peer.Map.iter (Request.send state.param) requests ; - P2p_peer.Map.fold begin fun peer request acc -> - acc >>= fun () -> - Lwt_list.iter_s (fun key -> - lwt_debug Tag.DSL.(fun f -> - f "requested %a from %a" - -% t event "requested" - -% a Hash.Logging.tag key - -% a P2p_peer.Id.Logging.tag peer)) - request - end requests Lwt.return_unit >>= fun () -> - loop state + P2p_peer.Map.fold + (fun peer request acc -> + acc + >>= fun () -> + Lwt_list.iter_s + (fun key -> + lwt_debug + Tag.DSL.( + fun f -> + f "requested %a from %a" -% t event "requested" + -% a Hash.Logging.tag key + -% a P2p_peer.Id.Logging.tag peer)) + request) + requests + Lwt.return_unit + >>= fun () -> loop state in loop state let create param = - let state = { - param ; - queue = Lwt_pipe.create () ; - pending = Table.create 17 ; - events = Lwt.return_nil ; - canceler = Lwt_canceler.create () ; - worker = Lwt.return_unit ; - } in + let state = + { param; + queue = Lwt_pipe.create (); + pending = Table.create 17; + events = Lwt.return_nil; + canceler = Lwt_canceler.create (); + worker = Lwt.return_unit } + in state.worker <- - Lwt_utils.worker "db_request_scheduler" + Lwt_utils.worker + "db_request_scheduler" ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop state) ~cancel:(fun () -> Lwt_canceler.cancel state.canceler) ; state - let shutdown s = - Lwt_canceler.cancel s.canceler >>= fun () -> - s.worker + let shutdown s = Lwt_canceler.cancel s.canceler >>= fun () -> s.worker let memory_table_length s = Table.length s.pending - end diff --git a/src/lib_shell/distributed_db_functors.mli b/src/lib_shell/distributed_db_functors.mli index 8b8264d6a98aac8b228989db53896e0d0368c926..e54dcb5a66826c1d115948a87c376c540af887d5 100644 --- a/src/lib_shell/distributed_db_functors.mli +++ b/src/lib_shell/distributed_db_functors.mli @@ -36,7 +36,6 @@ *) module type DISTRIBUTED_DB = sig - type t (** The index key *) @@ -50,27 +49,31 @@ module type DISTRIBUTED_DB = sig type param (** Is the value known locally? *) - val known: t -> key -> bool Lwt.t + val known : t -> key -> bool Lwt.t type error += Missing_data of key + type error += Canceled of key + type error += Timeout of key (** Return the value if it is known locally, otherwise fail with the error [Missing_data]. *) - val read: t -> key -> value tzresult Lwt.t + val read : t -> key -> value tzresult Lwt.t (** Return the value if it is known locally, otherwise fail with the value [None]. *) - val read_opt: t -> key -> value option Lwt.t + val read_opt : t -> key -> value option Lwt.t (** Same as `fetch` but the call is non-blocking: the data will be stored in the local index when received. *) - val prefetch: + val prefetch : t -> ?peer:P2p_peer.Id.t -> ?timeout:Time.System.Span.t -> - key -> param -> unit + key -> + param -> + unit (** Return the value if it is known locally, or block until the data is received from the network. By default, the data will be @@ -86,125 +89,166 @@ module type DISTRIBUTED_DB = sig is called multiple time with the same key but with disctinct peers, the internal scheduler randomly chooses the requested peer (at each retry). *) - val fetch: + val fetch : t -> ?peer:P2p_peer.Id.t -> ?timeout:Time.System.Span.t -> - key -> param -> value tzresult Lwt.t + key -> + param -> + value tzresult Lwt.t (** Remove the data from the local index or cancel all pending request. Any pending [fetch] promises are resolved with the error [Canceled]. *) - val clear_or_cancel: t -> key -> unit + val clear_or_cancel : t -> key -> unit (** [resolve_pending t pids k v] resolves pending request (if any) in the local index for key k with [Found v]. It notifies the scheduler using 'notify_cancelation' for this key and wakes up the the waiter on this request. *) - val resolve_pending: t -> key -> value -> unit + val resolve_pending : t -> key -> value -> unit - val inject: t -> key -> value -> bool Lwt.t + val inject : t -> key -> value -> bool Lwt.t (** Monitor all the fetched data. A given data will appear only once. *) - val watch: t -> (key * value) Lwt_stream.t * Lwt_watcher.stopper - - val pending: t -> key -> bool + val watch : t -> (key * value) Lwt_stream.t * Lwt_watcher.stopper + val pending : t -> key -> bool end module type DISK_TABLE = sig type store + type key + type value - val known: store -> key -> bool Lwt.t - val read: store -> key -> value tzresult Lwt.t - val read_opt: store -> key -> value option Lwt.t + + val known : store -> key -> bool Lwt.t + + val read : store -> key -> value tzresult Lwt.t + + val read_opt : store -> key -> value option Lwt.t end module type MEMORY_TABLE = sig (* A subtype of Hashtbl.S *) type 'a t + type key - val create: int -> 'a t - val find: 'a t -> key -> 'a - val find_opt: 'a t -> key -> 'a option - val add: 'a t -> key -> 'a -> unit - val replace: 'a t -> key -> 'a -> unit - val remove: 'a t -> key -> unit - val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + + val create : int -> 'a t + + val find : 'a t -> key -> 'a + + val find_opt : 'a t -> key -> 'a option + + val add : 'a t -> key -> 'a -> unit + + val replace : 'a t -> key -> 'a -> unit + + val remove : 'a t -> key -> unit + + val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + val length : 'a t -> int end module type SCHEDULER_EVENTS = sig type t + type key - val request: t -> P2p_peer.Id.t option -> key -> unit - val notify: t -> P2p_peer.Id.t -> key -> unit - val notify_cancelation: t -> key -> unit - val notify_unrequested: t -> P2p_peer.Id.t -> key -> unit - val notify_duplicate: t -> P2p_peer.Id.t -> key -> unit - val notify_invalid: t -> P2p_peer.Id.t -> key -> unit + + val request : t -> P2p_peer.Id.t option -> key -> unit + + val notify : t -> P2p_peer.Id.t -> key -> unit + + val notify_cancelation : t -> key -> unit + + val notify_unrequested : t -> P2p_peer.Id.t -> key -> unit + + val notify_duplicate : t -> P2p_peer.Id.t -> key -> unit + + val notify_invalid : t -> P2p_peer.Id.t -> key -> unit + val memory_table_length : t -> int end module type PRECHECK = sig type key + type param + type notified_value + type value - val precheck: key -> param -> notified_value -> value option + + val precheck : key -> param -> notified_value -> value option end -module Make_table - (Hash : sig - type t - val name : string - val encoding : t Data_encoding.t - val pp : Format.formatter -> t -> unit - end) - (Disk_table : DISK_TABLE with type key := Hash.t) - (Memory_table : MEMORY_TABLE with type key := Hash.t) - (Scheduler : SCHEDULER_EVENTS with type key := Hash.t) - (Precheck : PRECHECK with type key := Hash.t - and type value := Disk_table.value) : sig - - include DISTRIBUTED_DB with type key = Hash.t - and type value = Disk_table.value - and type param = Precheck.param - val create: +module Make_table (Hash : sig + type t + + val name : string + + val encoding : t Data_encoding.t + + val pp : Format.formatter -> t -> unit +end) +(Disk_table : DISK_TABLE with type key := Hash.t) +(Memory_table : MEMORY_TABLE with type key := Hash.t) +(Scheduler : SCHEDULER_EVENTS with type key := Hash.t) +(Precheck : PRECHECK with type key := Hash.t and type value := Disk_table.value) : sig + include + DISTRIBUTED_DB + with type key = Hash.t + and type value = Disk_table.value + and type param = Precheck.param + + val create : ?global_input:(key * value) Lwt_watcher.input -> - Scheduler.t -> Disk_table.store -> t - val notify: t -> P2p_peer.Id.t -> key -> Precheck.notified_value -> unit Lwt.t - val memory_table_length : t -> int + Scheduler.t -> + Disk_table.store -> + t + + val notify : + t -> P2p_peer.Id.t -> key -> Precheck.notified_value -> unit Lwt.t + val memory_table_length : t -> int end module type REQUEST = sig type key + type param + val initial_delay : Time.System.Span.t + val active : param -> P2p_peer.Set.t + val send : param -> P2p_peer.Id.t -> key list -> unit end -module Make_request_scheduler - (Hash : sig - type t - val name : string - val encoding : t Data_encoding.t - val pp : Format.formatter -> t -> unit +module Make_request_scheduler (Hash : sig + type t + + val name : string + + val encoding : t Data_encoding.t - module Logging : sig - val tag : t Tag.def - end - end) - (Table : MEMORY_TABLE with type key := Hash.t) - (Request : REQUEST with type key := Hash.t) : sig + val pp : Format.formatter -> t -> unit + module Logging : sig + val tag : t Tag.def + end +end) +(Table : MEMORY_TABLE with type key := Hash.t) +(Request : REQUEST with type key := Hash.t) : sig type t - val create: Request.param -> t - val shutdown: t -> unit Lwt.t - include SCHEDULER_EVENTS with type t := t and type key := Hash.t + val create : Request.param -> t + + val shutdown : t -> unit Lwt.t + + include SCHEDULER_EVENTS with type t := t and type key := Hash.t end diff --git a/src/lib_shell/distributed_db_message.ml b/src/lib_shell/distributed_db_message.ml index e8dcd24e63b885ea4a55b75e2046185b99accdb9..61795e3998fef3fe1980c5cae06b922d80bfcae3 100644 --- a/src/lib_shell/distributed_db_message.ml +++ b/src/lib_shell/distributed_db_message.ml @@ -25,15 +25,20 @@ (*****************************************************************************) module Bounded_encoding = struct - open Data_encoding - let block_header_max_size = ref (Some (8 * 1024 * 1024)) (* FIXME: arbitrary *) + let block_header_max_size = ref (Some (8 * 1024 * 1024)) + + (* FIXME: arbitrary *) + let block_header_cache = ref (Block_header.bounded_encoding ?max_size:!block_header_max_size ()) + let block_locator_cache = - ref (Block_locator.bounded_encoding - ?max_header_size:!block_header_max_size ()) + ref + (Block_locator.bounded_encoding + ?max_header_size:!block_header_max_size + ()) let update_block_header_encoding () = block_header_cache := @@ -44,28 +49,39 @@ module Bounded_encoding = struct let set_block_header_max_size max = block_header_max_size := max ; update_block_header_encoding () + let block_header = delayed (fun () -> !block_header_cache) + let block_locator = delayed (fun () -> !block_locator_cache) (* FIXME: all constants below are arbitrary high bounds until we have the mechanism to update them properly *) let operation_max_size = ref (Some (128 * 1024)) (* FIXME: arbitrary *) + let operation_list_max_size = ref (Some (1024 * 1024)) (* FIXME: arbitrary *) + let operation_list_max_length = ref None (* FIXME: arbitrary *) + let operation_max_pass = ref (Some 8) (* FIXME: arbitrary *) let operation_cache = ref (Operation.bounded_encoding ?max_size:!operation_max_size ()) + let operation_list_cache = - ref (Operation.bounded_list_encoding - ?max_length:!operation_list_max_length - ?max_size:!operation_list_max_size - ?max_operation_size:!operation_max_size - ?max_pass:!operation_max_pass ()) + ref + (Operation.bounded_list_encoding + ?max_length:!operation_list_max_length + ?max_size:!operation_list_max_size + ?max_operation_size:!operation_max_size + ?max_pass:!operation_max_pass + ()) + let operation_hash_list_cache = - ref (Operation.bounded_hash_list_encoding - ?max_length:!operation_list_max_length - ?max_pass:!operation_max_pass ()) + ref + (Operation.bounded_hash_list_encoding + ?max_length:!operation_list_max_length + ?max_pass:!operation_max_pass + ()) let update_operation_list_encoding () = operation_list_cache := @@ -75,12 +91,14 @@ module Bounded_encoding = struct ?max_operation_size:!operation_max_size ?max_pass:!operation_max_pass () + let update_operation_hash_list_encoding () = operation_list_cache := Operation.bounded_list_encoding ?max_length:!operation_list_max_length ?max_pass:!operation_max_pass () + let update_operation_encoding () = operation_cache := Operation.bounded_encoding ?max_size:!operation_max_size () @@ -89,227 +107,225 @@ module Bounded_encoding = struct operation_max_size := max ; update_operation_encoding () ; update_operation_list_encoding () + let set_operation_list_max_size max = operation_list_max_size := max ; update_operation_list_encoding () + let set_operation_list_max_length max = operation_list_max_length := max ; update_operation_list_encoding () ; update_operation_hash_list_encoding () + let set_operation_max_pass max = operation_max_pass := max ; update_operation_list_encoding () ; update_operation_hash_list_encoding () let operation = delayed (fun () -> !operation_cache) + let operation_list = delayed (fun () -> !operation_list_cache) + let operation_hash_list = delayed (fun () -> !operation_hash_list_cache) let protocol_max_size = ref (Some (2 * 1024 * 1024)) (* FIXME: arbitrary *) + let protocol_cache = ref (Protocol.bounded_encoding ?max_size:!protocol_max_size ()) - let set_protocol_max_size max = - protocol_max_size := max + + let set_protocol_max_size max = protocol_max_size := max + let protocol = delayed (fun () -> !protocol_cache) let mempool_max_operations = ref None + let mempool_cache = ref (Mempool.bounded_encoding ?max_operations:!mempool_max_operations ()) - let set_mempool_max_operations max = - mempool_max_operations := max - let mempool = delayed (fun () -> !mempool_cache) + let set_mempool_max_operations max = mempool_max_operations := max + + let mempool = delayed (fun () -> !mempool_cache) end type t = - | Get_current_branch of Chain_id.t | Current_branch of Chain_id.t * Block_locator.t | Deactivate of Chain_id.t - | Get_current_head of Chain_id.t | Current_head of Chain_id.t * Block_header.t * Mempool.t - | Get_block_headers of Block_hash.t list | Block_header of Block_header.t - | Get_operations of Operation_hash.t list | Operation of Operation.t - | Get_protocols of Protocol_hash.t list | Protocol of Protocol.t - | Get_operation_hashes_for_blocks of (Block_hash.t * int) list | Operation_hashes_for_block of - Block_hash.t * int * - Operation_hash.t list * Operation_list_list_hash.path - + Block_hash.t + * int + * Operation_hash.t list + * Operation_list_list_hash.path | Get_operations_for_blocks of (Block_hash.t * int) list | Operations_for_block of - Block_hash.t * int * - Operation.t list * Operation_list_list_hash.path + Block_hash.t * int * Operation.t list * Operation_list_list_hash.path let encoding = let open Data_encoding in let case ?max_length ~tag ~title encoding unwrap wrap = - P2p_message.Encoding { tag ; title ; encoding ; wrap ; unwrap ; max_length } in - [ - case ~tag:0x10 + P2p_message.Encoding {tag; title; encoding; wrap; unwrap; max_length} + in + [ case + ~tag:0x10 ~title:"Get_current_branch" - (obj1 - (req "get_current_branch" Chain_id.encoding)) - (function - | Get_current_branch chain_id -> Some chain_id - | _ -> None) - (fun chain_id -> Get_current_branch chain_id) ; - - case ~tag:0x11 + (obj1 (req "get_current_branch" Chain_id.encoding)) + (function Get_current_branch chain_id -> Some chain_id | _ -> None) + (fun chain_id -> Get_current_branch chain_id); + case + ~tag:0x11 ~title:"Current_branch" (obj2 (req "chain_id" Chain_id.encoding) (req "current_branch" Bounded_encoding.block_locator)) (function - | Current_branch (chain_id, locator) -> Some (chain_id, locator) - | _ -> None) - (fun (chain_id, locator) -> Current_branch (chain_id, locator)) ; - - case ~tag:0x12 + | Current_branch (chain_id, locator) -> + Some (chain_id, locator) + | _ -> + None) + (fun (chain_id, locator) -> Current_branch (chain_id, locator)); + case + ~tag:0x12 ~title:"Deactivate" - (obj1 - (req "deactivate" Chain_id.encoding)) - (function - | Deactivate chain_id -> Some chain_id - | _ -> None) - (fun chain_id -> Deactivate chain_id) ; - - case ~tag:0x13 + (obj1 (req "deactivate" Chain_id.encoding)) + (function Deactivate chain_id -> Some chain_id | _ -> None) + (fun chain_id -> Deactivate chain_id); + case + ~tag:0x13 ~title:"Get_current_head" - (obj1 - (req "get_current_head" Chain_id.encoding)) - (function - | Get_current_head chain_id -> Some chain_id - | _ -> None) - (fun chain_id -> Get_current_head chain_id) ; - - case ~tag:0x14 + (obj1 (req "get_current_head" Chain_id.encoding)) + (function Get_current_head chain_id -> Some chain_id | _ -> None) + (fun chain_id -> Get_current_head chain_id); + case + ~tag:0x14 ~title:"Current_head" (obj3 (req "chain_id" Chain_id.encoding) - (req "current_block_header" (dynamic_size Bounded_encoding.block_header)) + (req + "current_block_header" + (dynamic_size Bounded_encoding.block_header)) (req "current_mempool" Bounded_encoding.mempool)) (function - | Current_head (chain_id, bh, mempool) -> Some (chain_id, bh, mempool) - | _ -> None) - (fun (chain_id, bh, mempool) -> Current_head (chain_id, bh, mempool)) ; - - case ~tag:0x20 + | Current_head (chain_id, bh, mempool) -> + Some (chain_id, bh, mempool) + | _ -> + None) + (fun (chain_id, bh, mempool) -> Current_head (chain_id, bh, mempool)); + case + ~tag:0x20 ~title:"Get_block_headers" (obj1 (req "get_block_headers" (list ~max_length:10 Block_hash.encoding))) - (function - | Get_block_headers bhs -> Some bhs - | _ -> None) - (fun bhs -> Get_block_headers bhs) ; - - case ~tag:0x21 + (function Get_block_headers bhs -> Some bhs | _ -> None) + (fun bhs -> Get_block_headers bhs); + case + ~tag:0x21 ~title:"Block_header" (obj1 (req "block_header" Bounded_encoding.block_header)) - (function - | Block_header bh -> Some bh - | _ -> None) - (fun bh -> Block_header bh) ; - - case ~tag:0x30 + (function Block_header bh -> Some bh | _ -> None) + (fun bh -> Block_header bh); + case + ~tag:0x30 ~title:"Get_operations" - (obj1 (req "get_operations" (list ~max_length:10 Operation_hash.encoding))) - (function - | Get_operations bhs -> Some bhs - | _ -> None) - (fun bhs -> Get_operations bhs) ; - - case ~tag:0x31 + (obj1 + (req "get_operations" (list ~max_length:10 Operation_hash.encoding))) + (function Get_operations bhs -> Some bhs | _ -> None) + (fun bhs -> Get_operations bhs); + case + ~tag:0x31 ~title:"Operation" (obj1 (req "operation" Bounded_encoding.operation)) (function Operation o -> Some o | _ -> None) (fun o -> Operation o); - - case ~tag:0x40 + case + ~tag:0x40 ~title:"Get_protocols" - (obj1 - (req "get_protocols" (list ~max_length:10 Protocol_hash.encoding))) - (function - | Get_protocols protos -> Some protos - | _ -> None) + (obj1 (req "get_protocols" (list ~max_length:10 Protocol_hash.encoding))) + (function Get_protocols protos -> Some protos | _ -> None) (fun protos -> Get_protocols protos); - - case ~tag:0x41 + case + ~tag:0x41 ~title:"Protocol" (obj1 (req "protocol" Bounded_encoding.protocol)) - (function Protocol proto -> Some proto | _ -> None) + (function Protocol proto -> Some proto | _ -> None) (fun proto -> Protocol proto); - - case ~tag:0x50 + case + ~tag:0x50 ~title:"Get_operation_hashes_for_blocks" - (obj1 (req "get_operation_hashes_for_blocks" - (list ~max_length:10 (tup2 Block_hash.encoding int8)))) + (obj1 + (req + "get_operation_hashes_for_blocks" + (list ~max_length:10 (tup2 Block_hash.encoding int8)))) (function - | Get_operation_hashes_for_blocks keys -> Some keys - | _ -> None) + | Get_operation_hashes_for_blocks keys -> Some keys | _ -> None) (fun keys -> Get_operation_hashes_for_blocks keys); - - case ~tag:0x51 + case + ~tag:0x51 ~title:"Operation_hashes_for_blocks" (merge_objs (obj1 - (req "operation_hashes_for_block" + (req + "operation_hashes_for_block" (obj2 (req "hash" Block_hash.encoding) (req "validation_pass" int8)))) Bounded_encoding.operation_hash_list) - (function Operation_hashes_for_block (block, ofs, ops, path) -> - Some ((block, ofs), (path, ops)) | _ -> None) + (function + | Operation_hashes_for_block (block, ofs, ops, path) -> + Some ((block, ofs), (path, ops)) + | _ -> + None) (fun ((block, ofs), (path, ops)) -> - Operation_hashes_for_block (block, ofs, ops, path)) ; - - case ~tag:0x60 + Operation_hashes_for_block (block, ofs, ops, path)); + case + ~tag:0x60 ~title:"Get_operations_for_blocks" - (obj1 (req "get_operations_for_blocks" - (list ~max_length:10 - (obj2 - (req "hash" Block_hash.encoding) - (req "validation_pass" int8))))) - (function - | Get_operations_for_blocks keys -> Some keys - | _ -> None) + (obj1 + (req + "get_operations_for_blocks" + (list + ~max_length:10 + (obj2 + (req "hash" Block_hash.encoding) + (req "validation_pass" int8))))) + (function Get_operations_for_blocks keys -> Some keys | _ -> None) (fun keys -> Get_operations_for_blocks keys); - - case ~tag:0x61 + case + ~tag:0x61 ~title:"Operations_for_blocks" (merge_objs (obj1 - (req "operations_for_block" + (req + "operations_for_block" (obj2 (req "hash" Block_hash.encoding) (req "validation_pass" int8)))) Bounded_encoding.operation_list) - (function Operations_for_block (block, ofs, ops, path) -> - Some ((block, ofs), (path, ops)) | _ -> None) + (function + | Operations_for_block (block, ofs, ops, path) -> + Some ((block, ofs), (path, ops)) + | _ -> + None) (fun ((block, ofs), (path, ops)) -> - Operations_for_block (block, ofs, ops, path)) ; - - ] + Operations_for_block (block, ofs, ops, path)) ] -let cfg : _ P2p.message_config = { - encoding ; - chain_name = Distributed_db_version.chain_name ; - distributed_db_versions = [ - Distributed_db_version.zero ; - ] ; -} +let cfg : _ P2p.message_config = + { encoding; + chain_name = Distributed_db_version.chain_name; + distributed_db_versions = [Distributed_db_version.zero] } let raw_encoding = P2p_message.encoding encoding let pp_json ppf msg = - Data_encoding.Json.pp ppf + Data_encoding.Json.pp + ppf (Data_encoding.Json.construct raw_encoding (Message msg)) module Logging = struct diff --git a/src/lib_shell/distributed_db_message.mli b/src/lib_shell/distributed_db_message.mli index a0aa686735147b755b6f4b44ebdad7bb2c586511..78559a9574ff94d1c80b35b152ad1a6222baab54 100644 --- a/src/lib_shell/distributed_db_message.mli +++ b/src/lib_shell/distributed_db_message.mli @@ -26,45 +26,45 @@ (** Tezos Shell - Network message for the gossip P2P protocol. *) type t = - | Get_current_branch of Chain_id.t | Current_branch of Chain_id.t * Block_locator.t | Deactivate of Chain_id.t - | Get_current_head of Chain_id.t | Current_head of Chain_id.t * Block_header.t * Mempool.t - | Get_block_headers of Block_hash.t list | Block_header of Block_header.t - | Get_operations of Operation_hash.t list | Operation of Operation.t - | Get_protocols of Protocol_hash.t list | Protocol of Protocol.t - | Get_operation_hashes_for_blocks of (Block_hash.t * int) list | Operation_hashes_for_block of - Block_hash.t * int * - Operation_hash.t list * Operation_list_list_hash.path - + Block_hash.t + * int + * Operation_hash.t list + * Operation_list_list_hash.path | Get_operations_for_blocks of (Block_hash.t * int) list | Operations_for_block of - Block_hash.t * int * - Operation.t list * Operation_list_list_hash.path + Block_hash.t * int * Operation.t list * Operation_list_list_hash.path val cfg : t P2p.message_config val pp_json : Format.formatter -> t -> unit module Bounded_encoding : sig - val set_block_header_max_size: int option -> unit - val set_operation_max_size: int option -> unit - val set_operation_list_max_size: int option -> unit - val set_operation_list_max_length: int option -> unit - val set_operation_max_pass: int option -> unit - val set_protocol_max_size: int option -> unit - val set_mempool_max_operations: int option -> unit + val set_block_header_max_size : int option -> unit + + val set_operation_max_size : int option -> unit + + val set_operation_list_max_size : int option -> unit + + val set_operation_list_max_length : int option -> unit + + val set_operation_max_pass : int option -> unit + + val set_protocol_max_size : int option -> unit + + val set_mempool_max_operations : int option -> unit end module Logging : sig diff --git a/src/lib_shell/injection_directory.ml b/src/lib_shell/injection_directory.ml index d609735800be53581a75c6bb0b3bb6cec0bfdf73..ae626416450a765edac23630d186354d95f5d807 100644 --- a/src/lib_shell/injection_directory.ml +++ b/src/lib_shell/injection_directory.ml @@ -26,75 +26,71 @@ let read_chain_id validator chain = let distributed_db = Validator.distributed_db validator in let state = Distributed_db.state distributed_db in - begin - match chain with - | None -> Lwt.return_none - | Some chain -> - Chain_directory.get_chain_id state chain >>= Lwt.return_some - end + match chain with + | None -> + Lwt.return_none + | Some chain -> + Chain_directory.get_chain_id state chain >>= Lwt.return_some let inject_block validator ?force ?chain bytes operations = - read_chain_id validator chain >>= fun chain_id -> - Validator.validate_block - validator ?force ?chain_id bytes operations >>=? fun (hash, block) -> - return (hash, (block >>=? fun _ -> return_unit)) + read_chain_id validator chain + >>= fun chain_id -> + Validator.validate_block validator ?force ?chain_id bytes operations + >>=? fun (hash, block) -> return (hash, block >>=? fun _ -> return_unit) let inject_operation validator ?chain bytes = - read_chain_id validator chain >>= fun chain_id -> + read_chain_id validator chain + >>= fun chain_id -> let t = match Data_encoding.Binary.of_bytes Operation.encoding bytes with - | None -> failwith "Can't parse the operation" + | None -> + failwith "Can't parse the operation" | Some op -> - Validator.inject_operation validator ?chain_id op in + Validator.inject_operation validator ?chain_id op + in let hash = Operation_hash.hash_bytes [bytes] in Lwt.return (hash, t) let inject_protocol state ?force:_ proto = let proto_bytes = - Data_encoding.Binary.to_bytes_exn Protocol.encoding proto in + Data_encoding.Binary.to_bytes_exn Protocol.encoding proto + in let hash = Protocol_hash.hash_bytes [proto_bytes] in let validation = - Updater.compile hash proto >>= function + Updater.compile hash proto + >>= function | false -> - failwith - "Compilation failed (%a)" - Protocol_hash.pp_short hash - | true -> - State.Protocol.store state proto >>= function + failwith "Compilation failed (%a)" Protocol_hash.pp_short hash + | true -> ( + State.Protocol.store state proto + >>= function | None -> failwith "Previously registered protocol (%a)" - Protocol_hash.pp_short hash - | Some _ -> return_unit + Protocol_hash.pp_short + hash + | Some _ -> + return_unit ) in Lwt.return (hash, validation) let build_rpc_directory validator = - let distributed_db = Validator.distributed_db validator in let state = Distributed_db.state distributed_db in - let dir : unit RPC_directory.t ref = ref RPC_directory.empty in let register0 s f = - dir := RPC_directory.register !dir s (fun () p q -> f p q) in - - register0 Injection_services.S.block begin fun q (raw, operations) -> - inject_block validator - ?chain:q#chain ~force:q#force raw operations >>=? fun (hash, wait) -> - (if q#async then return_unit else wait) >>=? fun () -> - return hash - end ; - - register0 Injection_services.S.operation begin fun q contents -> - inject_operation validator ?chain:q#chain contents >>= fun (hash, wait) -> - (if q#async then return_unit else wait) >>=? fun () -> - return hash - end ; - - register0 Injection_services.S.protocol begin fun q protocol -> - inject_protocol state ~force:q#force protocol >>= fun (hash, wait) -> - (if q#async then return_unit else wait) >>=? fun () -> - return hash - end ; - + dir := RPC_directory.register !dir s (fun () p q -> f p q) + in + register0 Injection_services.S.block (fun q (raw, operations) -> + inject_block validator ?chain:q#chain ~force:q#force raw operations + >>=? fun (hash, wait) -> + (if q#async then return_unit else wait) >>=? fun () -> return hash) ; + register0 Injection_services.S.operation (fun q contents -> + inject_operation validator ?chain:q#chain contents + >>= fun (hash, wait) -> + (if q#async then return_unit else wait) >>=? fun () -> return hash) ; + register0 Injection_services.S.protocol (fun q protocol -> + inject_protocol state ~force:q#force protocol + >>= fun (hash, wait) -> + (if q#async then return_unit else wait) >>=? fun () -> return hash) ; !dir diff --git a/src/lib_shell/injection_directory.mli b/src/lib_shell/injection_directory.mli index 149f27ec5cccd348f20f903ee0673218e7a1c9da..dd48f01f411766d2c54f96c10319352804edad9c 100644 --- a/src/lib_shell/injection_directory.mli +++ b/src/lib_shell/injection_directory.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val build_rpc_directory: Validator.t -> unit RPC_directory.t +val build_rpc_directory : Validator.t -> unit RPC_directory.t diff --git a/src/lib_shell/mempool_peer_worker.ml b/src/lib_shell/mempool_peer_worker.ml index f25d30dad324bdb882bb2f366abcf48359def352..b6fd182a60eeeb575e174f920e5d79031e4950ca 100644 --- a/src/lib_shell/mempool_peer_worker.ml +++ b/src/lib_shell/mempool_peer_worker.ml @@ -28,32 +28,30 @@ * compartimentatilsation. *) type limits = { - max_promises_per_request : int ; - worker_limits : Worker_types.limits ; + max_promises_per_request : int; + worker_limits : Worker_types.limits } module type T = sig - module Mempool_worker: Mempool_worker.T + module Mempool_worker : Mempool_worker.T type t + type input = Operation_hash.t list - val create: limits -> P2p_peer.Id.t -> Mempool_worker.t -> t tzresult Lwt.t - val shutdown: t -> input Lwt.t + val create : limits -> P2p_peer.Id.t -> Mempool_worker.t -> t tzresult Lwt.t - val validate: t -> input -> unit tzresult Lwt.t + val shutdown : t -> input Lwt.t + val validate : t -> input -> unit tzresult Lwt.t end - module type STATIC = sig val max_pending_requests : int end -module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) - : T with module Mempool_worker = Mempool_worker -= struct - +module Make (Static : STATIC) (Mempool_worker : Mempool_worker.T) : + T with module Mempool_worker = Mempool_worker = struct (* 0. Prelude: set up base modules and types *) (* See interface file for info if needed. *) @@ -61,71 +59,82 @@ module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) module Mempool_worker = Mempool_worker type input = Operation_hash.t list + type result = | Cannot_download of error list | Cannot_parse of error list | Cannot_validate of error list | Mempool_result of Mempool_worker.result + type output = result Operation_hash.Map.t let pp_input ppf input = - Format.fprintf ppf + Format.fprintf + ppf "@[<v 0>%a@]" (Format.pp_print_list Operation_hash.pp) input + let result_encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Cannot download" (obj1 (req "download_errors" (list Error_monad.error_encoding))) (function Cannot_download errs -> Some errs | _ -> None) - (fun errs -> Cannot_download errs) ; - case (Tag 1) + (fun errs -> Cannot_download errs); + case + (Tag 1) ~title:"Cannot parse" (obj1 (req "parse_errors" (list Error_monad.error_encoding))) (function Cannot_parse errs -> Some errs | _ -> None) - (fun errs -> Cannot_parse errs) ; - case (Tag 2) + (fun errs -> Cannot_parse errs); + case + (Tag 2) ~title:"Cannot validate" (obj1 (req "validation_errors" (list Error_monad.error_encoding))) (function Cannot_validate errs -> Some errs | _ -> None) - (fun errs -> Cannot_validate errs) ; - case (Tag 3) + (fun errs -> Cannot_validate errs); + case + (Tag 3) ~title:"Validation result" (obj1 (req "validation_result" Mempool_worker.result_encoding)) (function Mempool_result result -> Some result | _ -> None) (fun result -> Mempool_result result) ] - module Log = - Internal_event.Legacy_logging.Make (struct - let name = "node.mempool.peer_worker" - end) - + module Log = Internal_event.Legacy_logging.Make (struct + let name = "node.mempool.peer_worker" + end) (* 1. Core: the carefully scheduled work performed by the worker *) module Work : sig - val process_batch: Mempool_worker.t -> int -> input -> output Lwt.t + val process_batch : Mempool_worker.t -> int -> input -> output Lwt.t end = struct type t = { - pool: unit Lwt_pool.t; - received: Operation_hash.t Queue.t; - downloading: (Operation_hash.t * Operation.t tzresult Lwt.t) Queue.t; - applying: (Mempool_worker.operation * Mempool_worker.result tzresult Lwt.t) Queue.t; - mutable results: result Operation_hash.Map.t + pool : unit Lwt_pool.t; + received : Operation_hash.t Queue.t; + downloading : (Operation_hash.t * Operation.t tzresult Lwt.t) Queue.t; + applying : + (Mempool_worker.operation * Mempool_worker.result tzresult Lwt.t) + Queue.t; + mutable results : result Operation_hash.Map.t } (* Primitives *) let is_empty t = - Queue.is_empty t.received && - Queue.is_empty t.downloading && - Queue.is_empty t.applying + Queue.is_empty t.received + && Queue.is_empty t.downloading + && Queue.is_empty t.applying - let has_resolved t = match Lwt.state t with - | Lwt.Return _ | Lwt.Fail _ -> true - | Lwt.Sleep -> false + let has_resolved t = + match Lwt.state t with + | Lwt.Return _ | Lwt.Fail _ -> + true + | Lwt.Sleep -> + false let head_is_resolved q = (not (Queue.is_empty q)) && has_resolved (snd (Queue.peek q)) @@ -133,42 +142,35 @@ module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) let select t = (* A `select`-like function to wait on any of the pipeline's buffers' * heads to resolve *) - assert (not (Queue.is_empty t.downloading && Queue.is_empty t.applying)); + assert (not (Queue.is_empty t.downloading && Queue.is_empty t.applying)) ; let first_task_or_never q = - if Queue.is_empty q then - Lwt_utils.never_ending () - else - snd (Queue.peek q) >>= fun _ -> Lwt.return_unit + if Queue.is_empty q then Lwt_utils.never_ending () + else snd (Queue.peek q) >>= fun _ -> Lwt.return_unit in - Lwt.choose ( - (first_task_or_never t.downloading) :: - (first_task_or_never t.applying) :: - [] - ) + Lwt.choose + [first_task_or_never t.downloading; first_task_or_never t.applying] let record_result pipeline op_hash result = - pipeline.results <- Operation_hash.Map.add op_hash result pipeline.results + pipeline.results <- + Operation_hash.Map.add op_hash result pipeline.results let q_of_list l = let q = Queue.create () in - List.iter (fun x -> Queue.add x q) l; + List.iter (fun x -> Queue.add x q) l ; q let create pool_size op_hashes = - { - pool = Lwt_pool.create pool_size Lwt.return; + { pool = Lwt_pool.create pool_size Lwt.return; received = q_of_list op_hashes; downloading = Queue.create (); applying = Queue.create (); - results = Operation_hash.Map.empty; - } + results = Operation_hash.Map.empty } let cancel pipeline = let cancel_snd (_, p) = Lwt.cancel p in - Queue.iter cancel_snd pipeline.downloading; + Queue.iter cancel_snd pipeline.downloading ; Queue.iter cancel_snd pipeline.applying - (* Exported interactions *) let step mempool_worker pipeline = @@ -176,96 +178,95 @@ module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) (* op_hash: Opertation_hash.t * op: Operation.t * mop: Mempool_worker.operation *) - - if head_is_resolved pipeline.applying then begin + if head_is_resolved pipeline.applying then ( let (op, p) = Queue.pop pipeline.applying in - p >>= function + p + >>= function | Error errs -> - record_result pipeline op.hash (Cannot_validate errs); + record_result pipeline op.hash (Cannot_validate errs) ; Lwt.return_unit | Ok mempool_result -> - record_result pipeline op.hash (Mempool_result mempool_result); - Lwt.return_unit - end - - else if head_is_resolved pipeline.downloading then begin + record_result pipeline op.hash (Mempool_result mempool_result) ; + Lwt.return_unit ) + else if head_is_resolved pipeline.downloading then let (op_hash, p) = Queue.pop pipeline.downloading in - p >>= function + p + >>= function | Error errs -> - record_result pipeline op_hash (Cannot_download errs); + record_result pipeline op_hash (Cannot_download errs) ; Lwt.return_unit - | Ok op -> - match Mempool_worker.parse op with - | Error errs -> - record_result pipeline op_hash (Cannot_parse errs); - Lwt.return_unit - | Ok mop -> - let p = - Lwt_pool.use pipeline.pool (fun () -> - Mempool_worker.validate mempool_worker mop) in - Queue.push (mop, p) pipeline.applying; - Lwt.return_unit - end - - else if (not (Queue.is_empty pipeline.received)) then begin + | Ok op -> ( + match Mempool_worker.parse op with + | Error errs -> + record_result pipeline op_hash (Cannot_parse errs) ; + Lwt.return_unit + | Ok mop -> + let p = + Lwt_pool.use pipeline.pool (fun () -> + Mempool_worker.validate mempool_worker mop) + in + Queue.push (mop, p) pipeline.applying ; + Lwt.return_unit ) + else if not (Queue.is_empty pipeline.received) then ( let op_hash = Queue.pop pipeline.received in (* TODO[?] should we specify the current peer for fetching? *) let chain_db = Mempool_worker.chain_db mempool_worker in let p = Lwt_pool.use pipeline.pool (fun () -> - Distributed_db.Operation.fetch chain_db op_hash ()) in - Queue.push (op_hash, p) pipeline.downloading; - Lwt.return_unit - end - + Distributed_db.Operation.fetch chain_db op_hash ()) + in + Queue.push (op_hash, p) pipeline.downloading ; + Lwt.return_unit ) else (* There are some pending operations, we need to wait on them *) - select pipeline >>= fun () -> - Lwt.return_unit + select pipeline >>= fun () -> Lwt.return_unit let process_batch mempool_worker pool_size input = let pipeline = create pool_size input in let rec loop () = - if is_empty pipeline then - Lwt.return pipeline.results - else - step mempool_worker pipeline >>= fun () -> - loop () + if is_empty pipeline then Lwt.return pipeline.results + else step mempool_worker pipeline >>= fun () -> loop () in let work = loop () in - Lwt.on_cancel work (fun () -> cancel pipeline); + Lwt.on_cancel work (fun () -> cancel pipeline) ; work - end - (* 2. Boilerplate: the set up for the worker architecture *) module Name = struct type t = P2p_peer.Id.t + let encoding = P2p_peer.Id.encoding + let base = let proto_hash = - let _: string = Format.flush_str_formatter () in - Format.fprintf Format.str_formatter "%a" Protocol_hash.pp Proto.hash; - Format.flush_str_formatter () in - [ "node"; "mempool"; "peer_worker"; proto_hash ] + let (_ : string) = Format.flush_str_formatter () in + Format.fprintf Format.str_formatter "%a" Protocol_hash.pp Proto.hash ; + Format.flush_str_formatter () + in + ["node"; "mempool"; "peer_worker"; proto_hash] + let pp = P2p_peer.Id.pp end module Request = struct type 'a t = Batch : input -> output t + type view = input - let view - : type a. a t -> view - = fun (Batch os) -> os + + let view : type a. a t -> view = fun (Batch os) -> os + let encoding = let open Data_encoding in list Operation_hash.encoding + let pp ppf = function - |[] -> Format.fprintf ppf "@[<v 2>Request:@, Empty List of Operations@]" - |os -> - Format.fprintf ppf + | [] -> + Format.fprintf ppf "@[<v 2>Request:@, Empty List of Operations@]" + | os -> + Format.fprintf + ppf "@[<v 2>Request:@,%a@]" (Format.pp_print_list Operation_hash.pp) os @@ -280,83 +281,100 @@ module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) let level req = let open Internal_event in match req with - | Start _ -> Info - | End_ok _ -> Info - | End_error _ -> Error + | Start _ -> + Info + | End_ok _ -> + Info + | End_error _ -> + Error let encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Start" (obj1 (req "input" (list Operation_hash.encoding))) (function Start input -> Some input | _ -> None) - (fun input -> Start input) ; - case (Tag 1) + (fun input -> Start input); + case + (Tag 1) ~title:"End_ok" (obj3 (req "request" Request.encoding) (req "status" Worker_types.request_status_encoding) (req "output" (Operation_hash.Map.encoding result_encoding))) - (function End_ok (view, status, result) -> Some (view, status, result) | _ -> None) - (fun (view, status, result) -> End_ok (view, status, result)) ; - case (Tag 2) + (function + | End_ok (view, status, result) -> + Some (view, status, result) + | _ -> + None) + (fun (view, status, result) -> End_ok (view, status, result)); + case + (Tag 2) ~title:"End_error" (obj3 (req "failed_request" Request.encoding) (req "status" Worker_types.request_status_encoding) (req "error" RPC_error.encoding)) - (function End_error (view, status, errs) -> Some (view, status, errs) | _ -> None) + (function + | End_error (view, status, errs) -> + Some (view, status, errs) + | _ -> + None) (fun (view, status, errs) -> End_error (view, status, errs)) ] let pp ppf = function | Start input -> - Format.fprintf ppf - "@[<v 0>Starting: %a@]" - pp_input - input + Format.fprintf ppf "@[<v 0>Starting: %a@]" pp_input input | End_ok (view, _, _) -> - Format.fprintf ppf - "@[<v 0>Finished: %a@]" - Request.pp view + Format.fprintf ppf "@[<v 0>Finished: %a@]" Request.pp view | End_error (view, _, errs) -> - Format.fprintf ppf + Format.fprintf + ppf "@[<v 0>Errors: %a, Operations: %a@]" - (Format.pp_print_list Error_monad.pp) errs - Request.pp view + (Format.pp_print_list Error_monad.pp) + errs + Request.pp + view end module Types = struct type parameters = Mempool_worker.t * int - type state = { mempool_worker: Mempool_worker.t ; pool_size: int } + + type state = {mempool_worker : Mempool_worker.t; pool_size : int} + type view = unit + let view _ _ = () + let encoding = Data_encoding.unit + let pp _ _ = () end module Worker = Worker.Make (Name) (Event) (Request) (Types) + type t = Worker.bounded Worker.queue Worker.t + let table = let open Worker in - create_table (Bounded { size = Static.max_pending_requests }) - + create_table (Bounded {size = Static.max_pending_requests}) (* 3. Workers' work: setting workers' callbacks to perform core work *) module Handlers = struct - type self = t let on_launch _ _ (mempool_worker, pool_size) = - return Types.{ mempool_worker; pool_size } + return Types.{mempool_worker; pool_size} - let on_request : type a. self -> a Request.t -> a tzresult Lwt.t - = fun t (Request.Batch os) -> - let st = Worker.state t in - Worker.record_event t (Event.Start os) ; - Work.process_batch st.mempool_worker st.pool_size os >>= fun r -> - return r + let on_request : type a. self -> a Request.t -> a tzresult Lwt.t = + fun t (Request.Batch os) -> + let st = Worker.state t in + Worker.record_event t (Event.Start os) ; + Work.process_batch st.mempool_worker st.pool_size os + >>= fun r -> return r let on_no_request _ = return_unit @@ -366,23 +384,22 @@ module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) Worker.record_event t (Event.End_error (view, st, errs)) ; Lwt.return_error errs - let on_completion - : type a. self -> a Request.t -> a -> Worker_types.request_status -> unit Lwt.t - = fun t req output st -> - match req with - | Request.Batch _ -> - Worker.record_event t (Event.End_ok (Request.view req, st, output)) ; - Lwt.return_unit - + let on_completion : + type a. + self -> a Request.t -> a -> Worker_types.request_status -> unit Lwt.t = + fun t req output st -> + match req with + | Request.Batch _ -> + Worker.record_event t (Event.End_ok (Request.view req, st, output)) ; + Lwt.return_unit end - (* 4. Public interface: exporting a thin wrapper around workers and work. *) (* See interface file for documentation *) let validate t os = Worker.Queue.push_request_and_wait t (Request.Batch os) - >>=? fun (_: output) -> return_unit + >>=? fun (_ : output) -> return_unit let create limits peer_id mempool_worker = Worker.launch @@ -397,10 +414,10 @@ module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) let recycled = List.fold_left (fun recycled (_, input) -> - List.fold_left - (fun recycled op_h -> Operation_hash.Set.add op_h recycled) - recycled - input) + List.fold_left + (fun recycled op_h -> Operation_hash.Set.add op_h recycled) + recycled + input) recycled (Worker.Queue.pending_requests w) in @@ -411,10 +428,9 @@ module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) (fun recycled op_h -> Operation_hash.Set.add op_h recycled) recycled input - | None -> recycled + | None -> + recycled in let input = Operation_hash.Set.elements recycled in - Worker.shutdown w >>= fun () -> - Lwt.return input - + Worker.shutdown w >>= fun () -> Lwt.return input end diff --git a/src/lib_shell/mempool_peer_worker.mli b/src/lib_shell/mempool_peer_worker.mli index 7eb63cdb5ee5014edf950c7fd63713ed6c61f152..c6170463f050affbb645d0851215bc3cdcbd976f 100644 --- a/src/lib_shell/mempool_peer_worker.mli +++ b/src/lib_shell/mempool_peer_worker.mli @@ -27,12 +27,12 @@ (** Distributing validation work between different workers, one for each peer. *) type limits = { - max_promises_per_request : int ; - worker_limits : Worker_types.limits ; + max_promises_per_request : int; + worker_limits : Worker_types.limits } module type T = sig - module Mempool_worker: Mempool_worker.T + module Mempool_worker : Mempool_worker.T (** The type of a peer worker. Each peer worker should be used for treating all the operations from a given peer. *) @@ -49,25 +49,23 @@ module type T = sig to be used for validating batches of operations sent by the peer [peer_id]. The validation of each operations is delegated to the associated [mempool_worker]. *) - val create: limits -> P2p_peer.Id.t -> Mempool_worker.t -> t tzresult Lwt.t + val create : limits -> P2p_peer.Id.t -> Mempool_worker.t -> t tzresult Lwt.t (** [shutdown t] closes the peer worker [t]. It returns a list of operation hashes that can be recycled when a new worker is created for the same peer. *) - val shutdown: t -> input Lwt.t + val shutdown : t -> input Lwt.t (** [validate worker input] validates the batch of operations [input]. The work is performed by [worker] and the underlying validation of each operation is performed by the [mempool_worker] that was used to [create] [worker]. *) - val validate: t -> input -> unit tzresult Lwt.t - + val validate : t -> input -> unit tzresult Lwt.t end - module type STATIC = sig val max_pending_requests : int end -module Make (Static: STATIC) (Mempool_worker: Mempool_worker.T) - : T with module Mempool_worker = Mempool_worker +module Make (Static : STATIC) (Mempool_worker : Mempool_worker.T) : + T with module Mempool_worker = Mempool_worker diff --git a/src/lib_shell/mempool_worker.ml b/src/lib_shell/mempool_worker.ml index 0f06652edde19791325cab4cfcf62412cb868d81..b68287caec68bbb5d34c4ccaccdeefe890f8fa8e 100644 --- a/src/lib_shell/mempool_worker.ml +++ b/src/lib_shell/mempool_worker.ml @@ -23,20 +23,17 @@ (* *) (*****************************************************************************) -type limits = { - worker_limits : Worker_types.limits ; -} +type limits = {worker_limits : Worker_types.limits} module type T = sig - - module Proto: Registered_protocol.T + module Proto : Registered_protocol.T type t type operation = private { - hash: Operation_hash.t ; - raw: Operation.t ; - protocol_data: Proto.operation_data ; + hash : Operation_hash.t; + raw : Operation.t; + protocol_data : Proto.operation_data } type result = @@ -46,10 +43,12 @@ module type T = sig | Refused of error list | Duplicate | Not_in_branch + val result_encoding : result Data_encoding.t (** Creates/tear-down a new mempool validator context. *) val create : limits -> Distributed_db.chain_db -> t tzresult Lwt.t + val shutdown : t -> unit Lwt.t (** parse a new operation and add it to the mempool context *) @@ -61,26 +60,23 @@ module type T = sig val chain_db : t -> Distributed_db.chain_db val rpc_directory : t RPC_directory.t - end module type STATIC = sig - val max_size_parsed_cache: int + val max_size_parsed_cache : int end -module Make(Static: STATIC)(Proto: Registered_protocol.T) - : T with module Proto = Proto -= struct - +module Make (Static : STATIC) (Proto : Registered_protocol.T) : + T with module Proto = Proto = struct module Proto = Proto (* used for rpc *) - module Proto_services = Block_services.Make(Proto)(Proto) + module Proto_services = Block_services.Make (Proto) (Proto) type operation = { - hash: Operation_hash.t ; - raw: Operation.t ; - protocol_data: Proto.operation_data ; + hash : Operation_hash.t; + raw : Operation.t; + protocol_data : Proto.operation_data } type result = @@ -94,76 +90,88 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) let result_encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Applied" (obj1 (req "receipt" Proto.operation_receipt_encoding)) (function Applied receipt -> Some receipt | _ -> None) - (fun receipt -> Applied receipt) ; - case (Tag 1) + (fun receipt -> Applied receipt); + case + (Tag 1) ~title:"Branch Delayed" (obj1 (req "error" (list Error_monad.error_encoding))) (function Branch_delayed error -> Some error | _ -> None) - (fun error -> Branch_delayed error) ; - case (Tag 2) + (fun error -> Branch_delayed error); + case + (Tag 2) ~title:"Branch Refused" (obj1 (req "error" (list Error_monad.error_encoding))) (function Branch_refused error -> Some error | _ -> None) - (fun error -> Branch_refused error) ; - case (Tag 3) + (fun error -> Branch_refused error); + case + (Tag 3) ~title:"Refused" (obj1 (req "error" (list Error_monad.error_encoding))) (function Refused error -> Some error | _ -> None) - (fun error -> Refused error) ; - case (Tag 4) + (fun error -> Refused error); + case + (Tag 4) ~title:"Duplicate" empty (function Duplicate -> Some () | _ -> None) - (fun () -> Duplicate) ; - case (Tag 5) + (fun () -> Duplicate); + case + (Tag 5) ~title:"Not_in_branch" empty (function Not_in_branch -> Some () | _ -> None) - (fun () -> Not_in_branch) ; - ] + (fun () -> Not_in_branch) ] let pp_result ppf = function - | Applied _ -> Format.pp_print_string ppf "applied" - | Branch_delayed _ -> Format.pp_print_string ppf "branch delayed" - | Branch_refused _ -> Format.pp_print_string ppf "branch refused" - | Refused _ -> Format.pp_print_string ppf "refused" - | Duplicate -> Format.pp_print_string ppf "duplicate" - | Not_in_branch -> Format.pp_print_string ppf "not in branch" + | Applied _ -> + Format.pp_print_string ppf "applied" + | Branch_delayed _ -> + Format.pp_print_string ppf "branch delayed" + | Branch_refused _ -> + Format.pp_print_string ppf "branch refused" + | Refused _ -> + Format.pp_print_string ppf "refused" + | Duplicate -> + Format.pp_print_string ppf "duplicate" + | Not_in_branch -> + Format.pp_print_string ppf "not in branch" let operation_encoding = let open Data_encoding in conv - (fun { hash ; raw ; protocol_data } -> - ( hash, raw, protocol_data )) - (fun ( hash, raw, protocol_data ) -> { hash ; raw ; protocol_data }) + (fun {hash; raw; protocol_data} -> (hash, raw, protocol_data)) + (fun (hash, raw, protocol_data) -> {hash; raw; protocol_data}) (obj3 (req "hash" Operation_hash.encoding) (req "raw" Operation.encoding) - (req "protocol_data" Proto.operation_data_encoding) - ) + (req "protocol_data" Proto.operation_data_encoding)) - module Log = Internal_event.Legacy_logging.Make(struct - let name = "node.mempool_validator" - end) + module Log = Internal_event.Legacy_logging.Make (struct + let name = "node.mempool_validator" + end) module Name = struct type t = Chain_id.t + let encoding = Chain_id.encoding + let base = let proto_hash = - let _: string = Format.flush_str_formatter () in - Format.fprintf Format.str_formatter "%a" Protocol_hash.pp Proto.hash; - Format.flush_str_formatter () in - [ "node"; "mempool"; "worker"; proto_hash ] + let (_ : string) = Format.flush_str_formatter () in + Format.fprintf Format.str_formatter "%a" Protocol_hash.pp Proto.hash ; + Format.flush_str_formatter () + in + ["node"; "mempool"; "worker"; proto_hash] + let pp = Chain_id.pp_short end module Request = struct - type 'a t = Validate : operation -> result t [@@ocaml.unboxed] type view = View : _ t -> view @@ -177,77 +185,98 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) (fun op -> View (Validate op)) operation_encoding - let pp ppf (View (Validate { hash ; _ })) = + let pp ppf (View (Validate {hash; _})) = Format.fprintf ppf "Validating new operation %a" Operation_hash.pp hash end module Event = struct type t = - | Request of (Request.view * Worker_types.request_status * error list option) + | Request of + (Request.view * Worker_types.request_status * error list option) | Debug of string let level req = match req with - | Debug _ -> Internal_event.Debug - | Request _ -> Internal_event.Info + | Debug _ -> + Internal_event.Debug + | Request _ -> + Internal_event.Info let encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Debug" (obj1 (req "message" string)) (function Debug msg -> Some msg | _ -> None) - (fun msg -> Debug msg) ; - case (Tag 1) + (fun msg -> Debug msg); + case + (Tag 1) ~title:"Request" (obj2 (req "request" Request.encoding) (req "status" Worker_types.request_status_encoding)) (function Request (req, t, None) -> Some (req, t) | _ -> None) - (fun (req, t) -> Request (req, t, None)) ; - case (Tag 2) + (fun (req, t) -> Request (req, t, None)); + case + (Tag 2) ~title:"Failed request" (obj3 (req "error" RPC_error.encoding) (req "failed_request" Request.encoding) (req "status" Worker_types.request_status_encoding)) - (function Request (req, t, Some errs) -> Some (errs, req, t) | _ -> None) + (function + | Request (req, t, Some errs) -> Some (errs, req, t) | _ -> None) (fun (errs, req, t) -> Request (req, t, Some errs)) ] let pp ppf = function - | Debug msg -> Format.fprintf ppf "%s" msg - | Request (view, { pushed ; treated ; completed }, None) -> - Format.fprintf ppf + | Debug msg -> + Format.fprintf ppf "%s" msg + | Request (view, {pushed; treated; completed}, None) -> + Format.fprintf + ppf "@[<v 0>%a@,Pushed: %a, Treated: %a, Completed: %a@]" - Request.pp view - Time.System.pp_hum pushed Time.System.pp_hum treated Time.System.pp_hum completed - | Request (view, { pushed ; treated ; completed }, Some errors) -> - Format.fprintf ppf + Request.pp + view + Time.System.pp_hum + pushed + Time.System.pp_hum + treated + Time.System.pp_hum + completed + | Request (view, {pushed; treated; completed}, Some errors) -> + Format.fprintf + ppf "@[<v 0>%a@,Pushed: %a, Treated: %a, Failed: %a@,Errors: %a@]" - Request.pp view - Time.System.pp_hum pushed Time.System.pp_hum treated Time.System.pp_hum completed - (Format.pp_print_list Error_monad.pp) errors + Request.pp + view + Time.System.pp_hum + pushed + Time.System.pp_hum + treated + Time.System.pp_hum + completed + (Format.pp_print_list Error_monad.pp) + errors end (* parsed operations' cache. used for memoization *) module ParsedCache = struct - type t = { - table: operation tzresult Operation_hash.Table.t ; - ring: Operation_hash.t Ring.t ; + table : operation tzresult Operation_hash.Table.t; + ring : Operation_hash.t Ring.t } - let create () : t = { - table = Operation_hash.Table.create Static.max_size_parsed_cache ; - ring = Ring.create Static.max_size_parsed_cache ; - } + let create () : t = + { table = Operation_hash.Table.create Static.max_size_parsed_cache; + ring = Ring.create Static.max_size_parsed_cache } let add t raw_op parsed_op = let hash = Operation.hash raw_op in Option.iter ~f:(Operation_hash.Table.remove t.table) - (Ring.add_and_return_erased t.ring hash); + (Ring.add_and_return_erased t.ring hash) ; Operation_hash.Table.replace t.table hash parsed_op let find_opt t raw_op = @@ -258,31 +287,22 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) (* NOTE: hashes are not removed from the ring. As a result, the cache size * bound can be lowered. This is a non-issue because it's only a cache. *) Operation_hash.Table.remove t.table hash - end (* validated operations' cache. used for memoization *) module ValidatedCache = struct - type t = (result * Operation.t) Operation_hash.Table.t let encoding = let open Data_encoding in - Operation_hash.Table.encoding ( - tup2 - result_encoding - Operation.encoding - ) + Operation_hash.Table.encoding (tup2 result_encoding Operation.encoding) let pp break ppf table = let open Format in Operation_hash.Table.iter (fun h (r, _) -> - fprintf ppf "Operation %a: %a" - Operation_hash.pp_short h - pp_result r; - break ppf - ) + fprintf ppf "Operation %a: %a" Operation_hash.pp_short h pp_result r ; + break ppf) table let create () = Operation_hash.Table.create 1000 @@ -290,152 +310,144 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) let add t parsed_op result = Operation_hash.Table.replace t parsed_op.hash result - let find_opt t parsed_op = - Operation_hash.Table.find_opt t parsed_op.hash + let find_opt t parsed_op = Operation_hash.Table.find_opt t parsed_op.hash - let iter f t = - Operation_hash.Table.iter f t + let iter f t = Operation_hash.Table.iter f t let to_mempool t = - let empty = { - Proto_services.Mempool.applied = [] ; - refused = Operation_hash.Map.empty ; - branch_refused = Operation_hash.Map.empty ; - branch_delayed = Operation_hash.Map.empty ; - unprocessed = Operation_hash.Map.empty ; - } in + let empty = + { Proto_services.Mempool.applied = []; + refused = Operation_hash.Map.empty; + branch_refused = Operation_hash.Map.empty; + branch_delayed = Operation_hash.Map.empty; + unprocessed = Operation_hash.Map.empty } + in let map_op op = let protocol_data = Data_encoding.Binary.of_bytes_exn Proto.operation_data_encoding - op.Operation.proto in - { Proto.shell = op.shell ; protocol_data } in + op.Operation.proto + in + {Proto.shell = op.shell; protocol_data} + in Operation_hash.Table.fold - (fun hash (result,raw_op) acc -> - let proto_op = map_op raw_op in - match result with - | Applied _ -> { - acc with - Proto_services.Mempool.applied = - (hash, proto_op)::acc.Proto_services.Mempool.applied - } - | Branch_refused err -> { - acc with - Proto_services.Mempool.branch_refused = - Operation_hash.Map.add - hash - (proto_op,err) - acc.Proto_services.Mempool.branch_refused - } - | Branch_delayed err -> { - acc with - Proto_services.Mempool.branch_delayed = - Operation_hash.Map.add - hash - (proto_op,err) - acc.Proto_services.Mempool.branch_delayed - } - | Refused err -> { - acc with - Proto_services.Mempool.refused = - Operation_hash.Map.add - hash - (proto_op,err) - acc.Proto_services.Mempool.refused - } - | _ -> acc - ) t empty + (fun hash (result, raw_op) acc -> + let proto_op = map_op raw_op in + match result with + | Applied _ -> + { acc with + Proto_services.Mempool.applied = + (hash, proto_op) :: acc.Proto_services.Mempool.applied } + | Branch_refused err -> + { acc with + Proto_services.Mempool.branch_refused = + Operation_hash.Map.add + hash + (proto_op, err) + acc.Proto_services.Mempool.branch_refused } + | Branch_delayed err -> + { acc with + Proto_services.Mempool.branch_delayed = + Operation_hash.Map.add + hash + (proto_op, err) + acc.Proto_services.Mempool.branch_delayed } + | Refused err -> + { acc with + Proto_services.Mempool.refused = + Operation_hash.Map.add + hash + (proto_op, err) + acc.Proto_services.Mempool.refused } + | _ -> + acc) + t + empty let clear t = Operation_hash.Table.clear t - end module Types = struct - type parameters = { - limits : limits ; - chain_db : Distributed_db.chain_db ; - validation_state : Proto.validation_state ; + limits : limits; + chain_db : Distributed_db.chain_db; + validation_state : Proto.validation_state } (* internal worker state *) - type state = - { - (* state of the validator. this is updated at each apply_operation *) - mutable validation_state : Proto.validation_state ; - - cache : ValidatedCache.t ; - - (* live blocks and operations, initialized at worker launch *) - live_blocks : Block_hash.Set.t ; - live_operations : Operation_hash.Set.t ; - - operation_stream: ( - result * - Operation.shell_header * - Proto.operation_data - ) Lwt_watcher.input; - - parameters : parameters ; - } + type state = { + (* state of the validator. this is updated at each apply_operation *) + mutable validation_state : Proto.validation_state; + cache : ValidatedCache.t; + (* live blocks and operations, initialized at worker launch *) + live_blocks : Block_hash.Set.t; + live_operations : Operation_hash.Set.t; + operation_stream : + (result * Operation.shell_header * Proto.operation_data) + Lwt_watcher.input; + parameters : parameters + } - type view = { cache : ValidatedCache.t } + type view = {cache : ValidatedCache.t} - let view (state : state) _ : view = { cache = state.cache } + let view (state : state) _ : view = {cache = state.cache} let encoding = let open Data_encoding in conv - (fun { cache } -> cache) - (fun cache -> { cache }) + (fun {cache} -> cache) + (fun cache -> {cache}) ValidatedCache.encoding - let pp ppf { cache } = + let pp ppf {cache} = ValidatedCache.pp (fun ppf -> - Format.pp_print_string ppf ";"; - Format.pp_print_space ppf ()) + Format.pp_print_string ppf ";" ; + Format.pp_print_space ppf ()) ppf cache - end module Worker = Worker.Make (Name) (Event) (Request) (Types) - open Types type t = Worker.infinite Worker.queue Worker.t let parsed_cache = ParsedCache.create () - let shutdown w = - Worker.shutdown w + let shutdown w = Worker.shutdown w (*** prevalidation ****) open Validation_errors let create ?protocol_data ~predecessor ~timestamp () = let { Block_header.shell = - { fitness = predecessor_fitness ; - timestamp = predecessor_timestamp ; - level = predecessor_level ; _ } ; _ } = - State.Block.header predecessor in - State.Block.context predecessor >>= fun predecessor_context -> + { fitness = predecessor_fitness; + timestamp = predecessor_timestamp; + level = predecessor_level; + _ }; + _ } = + State.Block.header predecessor + in + State.Block.context predecessor + >>= fun predecessor_context -> let predecessor_hash = State.Block.hash predecessor in - begin - match protocol_data with - | None -> return_none + ( match protocol_data with + | None -> + return_none + | Some protocol_data -> ( + match + Data_encoding.Binary.of_bytes + Proto.block_header_data_encoding + protocol_data + with + | None -> + failwith "Invalid block header" | Some protocol_data -> - match - Data_encoding.Binary.of_bytes - Proto.block_header_data_encoding - protocol_data - with - | None -> failwith "Invalid block header" - | Some protocol_data -> return_some protocol_data - end >>=? fun protocol_data -> + return_some protocol_data ) ) + >>=? fun protocol_data -> Proto.begin_construction - ~chain_id: (State.Block.chain_id predecessor) + ~chain_id:(State.Block.chain_id predecessor) ~predecessor_context ~predecessor_timestamp ~predecessor_fitness @@ -448,19 +460,25 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) let apply_operation state op = if Operation_hash.Set.mem op.hash state.live_operations then Lwt.return (None, Duplicate) - else if not (Block_hash.Set.mem op.raw.Operation.shell.branch state.live_blocks) then - Lwt.return (None,Not_in_branch) + else if + not (Block_hash.Set.mem op.raw.Operation.shell.branch state.live_blocks) + then Lwt.return (None, Not_in_branch) else - Proto.apply_operation state.validation_state - { shell = op.raw.shell ; protocol_data = op.protocol_data } >|= function + Proto.apply_operation + state.validation_state + {shell = op.raw.shell; protocol_data = op.protocol_data} + >|= function | Ok (validation_state, receipt) -> (Some validation_state, Applied receipt) - | Error errors -> - (None, - match classify_errors errors with - | `Branch -> Branch_refused errors - | `Permanent -> Refused errors - | `Temporary -> Branch_delayed errors) + | Error errors -> ( + ( None, + match classify_errors errors with + | `Branch -> + Branch_refused errors + | `Permanent -> + Refused errors + | `Temporary -> + Branch_delayed errors ) ) (*** end prevalidation ***) @@ -468,80 +486,84 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) let hash = Operation.hash raw_op in let size = Data_encoding.Binary.length Operation.encoding raw_op in if size > Proto.max_operation_data_length then - error (Oversized_operation - { size ; max = Proto.max_operation_data_length }) + error (Oversized_operation {size; max = Proto.max_operation_data_length}) else - match Data_encoding.Binary.of_bytes - Proto.operation_data_encoding - raw_op.Operation.proto with - | None -> error Parse_error + match + Data_encoding.Binary.of_bytes + Proto.operation_data_encoding + raw_op.Operation.proto + with + | None -> + error Parse_error | Some protocol_data -> - ok { hash ; raw = raw_op ; protocol_data } + ok {hash; raw = raw_op; protocol_data} (* this function update the internal state of the worker *) let validate_helper w parsed_op = let state = Worker.state w in - apply_operation state parsed_op >>= fun (validation_state, result) -> - begin - match validation_state with - | Some validation_state -> state.validation_state <- validation_state - | None -> () - end ; + apply_operation state parsed_op + >>= fun (validation_state, result) -> + ( match validation_state with + | Some validation_state -> + state.validation_state <- validation_state + | None -> + () ) ; Lwt.return result - let notify_helper w result { Operation.shell ; proto } = + let notify_helper w result {Operation.shell; proto} = let state = Worker.state w in (* this function is called by on_validate where we take care of the error *) let protocol_data = - Data_encoding.Binary.of_bytes_exn - Proto.operation_data_encoding - proto in + Data_encoding.Binary.of_bytes_exn Proto.operation_data_encoding proto + in Lwt_watcher.notify state.operation_stream (result, shell, protocol_data) (* memoization is done only at on_* level *) let on_validate w parsed_op = let state = Worker.state w in match ValidatedCache.find_opt state.cache parsed_op with - | None | Some ((Branch_delayed _),_) -> - validate_helper w parsed_op >>= fun result -> - ValidatedCache.add state.cache parsed_op (result, parsed_op.raw); + | None | Some (Branch_delayed _, _) -> + validate_helper w parsed_op + >>= fun result -> + ValidatedCache.add state.cache parsed_op (result, parsed_op.raw) ; (* operations are notified only the first time *) notify_helper w result parsed_op.raw ; Lwt.return result - | Some (result,_) -> Lwt.return result + | Some (result, _) -> + Lwt.return result (* worker's handlers *) - let on_request : - type r. t -> r Request.t -> r tzresult Lwt.t = fun w request -> + let on_request : type r. t -> r Request.t -> r tzresult Lwt.t = + fun w request -> match request with - | Request.Validate parsed_op -> on_validate w parsed_op >>= return + | Request.Validate parsed_op -> + on_validate w parsed_op >>= return - let on_launch (_ : t) (_ : Name.t) ( { chain_db ; validation_state ; _ } as parameters ) = + let on_launch (_ : t) (_ : Name.t) + ({chain_db; validation_state; _} as parameters) = let chain_state = Distributed_db.chain_state chain_db in - Chain.data chain_state >>= fun { - current_mempool = _mempool ; - live_blocks ; live_operations ; _ } -> + Chain.data chain_state + >>= fun {current_mempool = _mempool; live_blocks; live_operations; _} -> (* remove all operations that are already included *) - Operation_hash.Set.iter (fun hash -> - ParsedCache.rem parsed_cache hash - ) live_operations; - return { - validation_state ; - cache = ValidatedCache.create () ; - live_blocks ; + Operation_hash.Set.iter + (fun hash -> ParsedCache.rem parsed_cache hash) live_operations ; - operation_stream = Lwt_watcher.create_input (); - parameters - } + return + { validation_state; + cache = ValidatedCache.create (); + live_blocks; + live_operations; + operation_stream = Lwt_watcher.create_input (); + parameters } let on_close w = let state = Worker.state w in - Lwt_watcher.shutdown_input state.operation_stream; - ValidatedCache.iter (fun hash _ -> - Distributed_db.Operation.clear_or_cancel - state.parameters.chain_db hash) + Lwt_watcher.shutdown_input state.operation_stream ; + ValidatedCache.iter + (fun hash _ -> + Distributed_db.Operation.clear_or_cancel state.parameters.chain_db hash) state.cache ; - ValidatedCache.clear state.cache; + ValidatedCache.clear state.cache ; Lwt.return_unit let on_error w r st errs = @@ -554,26 +576,34 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) let table = Worker.create_table Queue - let create limits chain_db = + let create limits chain_db = let chain_state = Distributed_db.chain_state chain_db in let chain_id = State.Chain.id chain_state in let module Handlers = struct type self = t + let on_launch = on_launch + let on_close = on_close + let on_error = on_error + let on_completion = on_completion + let on_no_request _ = return_unit + let on_request = on_request end in - Chain.data chain_state >>= fun { current_head = predecessor ; _ } -> + Chain.data chain_state + >>= fun {current_head = predecessor; _} -> let timestamp = Time.System.to_protocol (Systime_os.now ()) in - create ~predecessor ~timestamp () >>=? fun validation_state -> + create ~predecessor ~timestamp () + >>=? fun validation_state -> Worker.launch table limits.worker_limits chain_id - { limits ; chain_db ; validation_state } + {limits; chain_db; validation_state} (module Handlers) (* Exporting functions *) @@ -583,13 +613,13 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) (* atomic parse + memoization *) let parse raw_op = - begin match ParsedCache.find_opt parsed_cache raw_op with - | None -> - let parsed_op = parse_helper raw_op in - ParsedCache.add parsed_cache raw_op parsed_op; - parsed_op - | Some parsed_op -> parsed_op - end + match ParsedCache.find_opt parsed_cache raw_op with + | None -> + let parsed_op = parse_helper raw_op in + ParsedCache.add parsed_cache raw_op parsed_op ; + parsed_op + | Some parsed_op -> + parsed_op let chain_db t = let state = Worker.state t in @@ -600,38 +630,43 @@ module Make(Static: STATIC)(Proto: Registered_protocol.T) RPC_directory.empty (Proto_services.S.Mempool.pending_operations RPC_path.open_root) (fun w () () -> - let state = Worker.state w in - RPC_answer.return (ValidatedCache.to_mempool state.cache) - ) + let state = Worker.state w in + RPC_answer.return (ValidatedCache.to_mempool state.cache)) let monitor_rpc_directory : t RPC_directory.t = RPC_directory.gen_register RPC_directory.empty (Proto_services.S.Mempool.monitor_operations RPC_path.open_root) (fun w params () -> - let state = Worker.state w in - let filter_result = function - | Applied _ -> params#applied - | Refused _ -> params#branch_refused - | Branch_refused _ -> params#refused - | Branch_delayed _ -> params#branch_delayed - | _ -> false in - - let op_stream, stopper = Lwt_watcher.create_stream state.operation_stream in - let shutdown () = Lwt_watcher.shutdown stopper in - let next () = - Lwt_stream.get op_stream >>= function - | Some (kind, shell, protocol_data) when filter_result kind -> - Lwt.return_some [ { Proto.shell ; protocol_data } ] - | _ -> Lwt.return_none in - RPC_answer.return_stream { next ; shutdown } - ) + let state = Worker.state w in + let filter_result = function + | Applied _ -> + params#applied + | Refused _ -> + params#branch_refused + | Branch_refused _ -> + params#refused + | Branch_delayed _ -> + params#branch_delayed + | _ -> + false + in + let (op_stream, stopper) = + Lwt_watcher.create_stream state.operation_stream + in + let shutdown () = Lwt_watcher.shutdown stopper in + let next () = + Lwt_stream.get op_stream + >>= function + | Some (kind, shell, protocol_data) when filter_result kind -> + Lwt.return_some [{Proto.shell; protocol_data}] + | _ -> + Lwt.return_none + in + RPC_answer.return_stream {next; shutdown}) (* /mempool/<chain_id>/pending /mempool/<chain_id>/monitor *) let rpc_directory = - RPC_directory.merge - pending_rpc_directory - monitor_rpc_directory - + RPC_directory.merge pending_rpc_directory monitor_rpc_directory end diff --git a/src/lib_shell/mempool_worker.mli b/src/lib_shell/mempool_worker.mli index 61fc1e96b7ab870c2c1e4e156c3192cbba522e9b..0d92f5035589850cedae9d72369ab28dc6420ec7 100644 --- a/src/lib_shell/mempool_worker.mli +++ b/src/lib_shell/mempool_worker.mli @@ -24,20 +24,17 @@ (* *) (*****************************************************************************) -type limits = { - worker_limits : Worker_types.limits ; -} +type limits = {worker_limits : Worker_types.limits} module type T = sig - - module Proto: Registered_protocol.T + module Proto : Registered_protocol.T type t type operation = private { - hash: Operation_hash.t ; - raw: Operation.t ; - protocol_data: Proto.operation_data ; + hash : Operation_hash.t; + raw : Operation.t; + protocol_data : Proto.operation_data } type result = @@ -47,10 +44,12 @@ module type T = sig | Refused of error list | Duplicate | Not_in_branch + val result_encoding : result Data_encoding.t (** Creates/tear-down a new mempool validator context. *) val create : limits -> Distributed_db.chain_db -> t tzresult Lwt.t + val shutdown : t -> unit Lwt.t (** parse a new operation *) @@ -62,11 +61,11 @@ module type T = sig val chain_db : t -> Distributed_db.chain_db val rpc_directory : t RPC_directory.t - end module type STATIC = sig - val max_size_parsed_cache: int + val max_size_parsed_cache : int end -module Make (Static : STATIC) (Proto : Registered_protocol.T) : T with module Proto = Proto +module Make (Static : STATIC) (Proto : Registered_protocol.T) : + T with module Proto = Proto diff --git a/src/lib_shell/monitor_directory.ml b/src/lib_shell/monitor_directory.ml index dbf63e1384b3f2d61b1c52cfc5f2b75940a83035..5cb56bc723f60311797656e7e35956c88fa495fa 100644 --- a/src/lib_shell/monitor_directory.ml +++ b/src/lib_shell/monitor_directory.ml @@ -25,167 +25,204 @@ (*****************************************************************************) let build_rpc_directory validator mainchain_validator = - let distributed_db = Validator.distributed_db validator in let state = Distributed_db.state distributed_db in - let dir : unit RPC_directory.t ref = ref RPC_directory.empty in let gen_register0 s f = - dir := RPC_directory.gen_register !dir s (fun () p q -> f p q) in + dir := RPC_directory.gen_register !dir s (fun () p q -> f p q) + in let gen_register1 s f = - dir := RPC_directory.gen_register !dir s (fun ((), a) p q -> f a p q) in - - gen_register0 Monitor_services.S.bootstrapped begin fun () () -> - let block_stream, stopper = - Chain_validator.new_head_watcher mainchain_validator in - let first_run = ref true in - let next () = - if !first_run then begin - first_run := false ; - let chain_state = Chain_validator.chain_state mainchain_validator in - Chain.head chain_state >>= fun head -> - let head_hash = State.Block.hash head in - let head_header = State.Block.header head in - Lwt.return_some (head_hash, head_header.shell.timestamp) - end else begin - Lwt.pick [ - ( Lwt_stream.get block_stream >|= - Option.map ~f:(fun b -> - (State.Block.hash b, (State.Block.header b).shell.timestamp)) ) ; - (Chain_validator.bootstrapped mainchain_validator >|= fun () -> None) ; - ] - end in - let shutdown () = Lwt_watcher.shutdown stopper in - RPC_answer.return_stream { next ; shutdown } - end ; - - gen_register0 Monitor_services.S.valid_blocks begin fun q () -> - let block_stream, stopper = State.watcher state in - let shutdown () = Lwt_watcher.shutdown stopper in - let in_chains block = - match q#chains with - | [] -> Lwt.return_true - | chains -> - let chain_id = State.Block.chain_id block in - Lwt_list.filter_map_p (Chain_directory.get_chain_id_opt state) chains >>= fun chains -> - Lwt.return (List.exists (Chain_id.equal chain_id) chains) in - let in_protocols block = - match q#protocols with - | [] -> Lwt.return_true - | protocols -> - State.Block.predecessor block >>= function - | None -> Lwt.return_false (* won't happen *) - | Some pred -> - State.Block.context pred >>= fun context -> - Context.get_protocol context >>= fun protocol -> - Lwt.return (List.exists (Protocol_hash.equal protocol) protocols) in - let in_next_protocols block = - match q#next_protocols with - | [] -> Lwt.return_true - | protocols -> - State.Block.context block >>= fun context -> - Context.get_protocol context >>= fun next_protocol -> - Lwt.return (List.exists (Protocol_hash.equal next_protocol) protocols) in - let stream = - Lwt_stream.filter_map_s - (fun block -> - in_chains block >>= fun in_chains -> - in_next_protocols block >>= fun in_next_protocols -> - in_protocols block >>= fun in_protocols -> - if in_chains && in_protocols && in_next_protocols then - Lwt.return_some - ((State.Block.chain_id block, State.Block.hash block), - State.Block.header block) - else - Lwt.return_none) - block_stream in - let next () = Lwt_stream.get stream in - RPC_answer.return_stream { next ; shutdown } - end ; - - gen_register1 Monitor_services.S.heads begin fun chain q () -> - (* TODO: when `chain = `Test`, should we reset then stream when - the `testnet` change, or dias we currently do ?? *) - Chain_directory.get_chain state chain >>= fun chain -> - match Validator.get validator (State.Chain.id chain) with - | Error _ -> Lwt.fail Not_found - | Ok chain_validator -> - let block_stream, stopper = Chain_validator.new_head_watcher chain_validator in - Chain.head chain >>= fun head -> - let shutdown () = Lwt_watcher.shutdown stopper in - let in_next_protocols block = - match q#next_protocols with - | [] -> Lwt.return_true - | protocols -> - State.Block.context block >>= fun context -> - Context.get_protocol context >>= fun next_protocol -> - Lwt.return (List.exists (Protocol_hash.equal next_protocol) protocols) in - let stream = - Lwt_stream.filter_map_s - (fun block -> - in_next_protocols block >>= fun in_next_protocols -> - if in_next_protocols then - Lwt.return_some (State.Block.hash block, State.Block.header block) - else - Lwt.return_none) - block_stream in - in_next_protocols head >>= fun first_block_is_among_next_protocols -> - let first_call = - (* Skip the first block if this is false *) - ref first_block_is_among_next_protocols in - let next () = - if !first_call then begin - first_call := false ; Lwt.return_some (State.Block.hash head, State.Block.header head) - end else - Lwt_stream.get stream in - RPC_answer.return_stream { next ; shutdown } - end ; - - gen_register0 Monitor_services.S.protocols begin fun () () -> - let stream, stopper = State.Protocol.watcher state in - let shutdown () = Lwt_watcher.shutdown stopper in - let next () = Lwt_stream.get stream in - RPC_answer.return_stream { next ; shutdown } - end ; - - gen_register0 Monitor_services.S.commit_hash begin fun () () -> - RPC_answer.return Tezos_base.Current_git_info.commit_hash end ; - - gen_register0 Monitor_services.S.active_chains begin fun () () -> - let stream, stopper = Validator.chains_watcher validator in - let shutdown () = Lwt_watcher.shutdown stopper in - let first_call = - (* Only notify the newly created chains if this is false *) - ref true - in - let next () = - let convert (chain_id, b) = - if not b then - Lwt.return (Monitor_services.Stopping chain_id) - else if Chain_id.equal (State.Chain.main state) chain_id then - Lwt.return (Monitor_services.Active_main chain_id) + dir := RPC_directory.gen_register !dir s (fun ((), a) p q -> f a p q) + in + gen_register0 Monitor_services.S.bootstrapped (fun () () -> + let (block_stream, stopper) = + Chain_validator.new_head_watcher mainchain_validator + in + let first_run = ref true in + let next () = + if !first_run then ( + first_run := false ; + let chain_state = Chain_validator.chain_state mainchain_validator in + Chain.head chain_state + >>= fun head -> + let head_hash = State.Block.hash head in + let head_header = State.Block.header head in + Lwt.return_some (head_hash, head_header.shell.timestamp) ) else - State.Chain.get_exn state chain_id >>= fun chain_state -> - let { State.Chain.protocol ; _ } = State.Chain.genesis chain_state in - let expiration_date = Option.unopt_exn - (Invalid_argument - (Format.asprintf "Monitor.active_chains: no expiration date for the chain %a" - Chain_id.pp chain_id)) - (State.Chain.expiration chain_state) + Lwt.pick + [ Lwt_stream.get block_stream + >|= Option.map ~f:(fun b -> + ( State.Block.hash b, + (State.Block.header b).shell.timestamp )); + ( Chain_validator.bootstrapped mainchain_validator + >|= fun () -> None ) ] + in + let shutdown () = Lwt_watcher.shutdown stopper in + RPC_answer.return_stream {next; shutdown}) ; + gen_register0 Monitor_services.S.valid_blocks (fun q () -> + let (block_stream, stopper) = State.watcher state in + let shutdown () = Lwt_watcher.shutdown stopper in + let in_chains block = + match q#chains with + | [] -> + Lwt.return_true + | chains -> + let chain_id = State.Block.chain_id block in + Lwt_list.filter_map_p + (Chain_directory.get_chain_id_opt state) + chains + >>= fun chains -> + Lwt.return (List.exists (Chain_id.equal chain_id) chains) + in + let in_protocols block = + match q#protocols with + | [] -> + Lwt.return_true + | protocols -> ( + State.Block.predecessor block + >>= function + | None -> + Lwt.return_false (* won't happen *) + | Some pred -> + State.Block.context pred + >>= fun context -> + Context.get_protocol context + >>= fun protocol -> + Lwt.return + (List.exists (Protocol_hash.equal protocol) protocols) ) + in + let in_next_protocols block = + match q#next_protocols with + | [] -> + Lwt.return_true + | protocols -> + State.Block.context block + >>= fun context -> + Context.get_protocol context + >>= fun next_protocol -> + Lwt.return + (List.exists (Protocol_hash.equal next_protocol) protocols) + in + let stream = + Lwt_stream.filter_map_s + (fun block -> + in_chains block + >>= fun in_chains -> + in_next_protocols block + >>= fun in_next_protocols -> + in_protocols block + >>= fun in_protocols -> + if in_chains && in_protocols && in_next_protocols then + Lwt.return_some + ( (State.Block.chain_id block, State.Block.hash block), + State.Block.header block ) + else Lwt.return_none) + block_stream + in + let next () = Lwt_stream.get stream in + RPC_answer.return_stream {next; shutdown}) ; + gen_register1 Monitor_services.S.heads (fun chain q () -> + (* TODO: when `chain = `Test`, should we reset then stream when + the `testnet` change, or dias we currently do ?? *) + Chain_directory.get_chain state chain + >>= fun chain -> + match Validator.get validator (State.Chain.id chain) with + | Error _ -> + Lwt.fail Not_found + | Ok chain_validator -> + let (block_stream, stopper) = + Chain_validator.new_head_watcher chain_validator + in + Chain.head chain + >>= fun head -> + let shutdown () = Lwt_watcher.shutdown stopper in + let in_next_protocols block = + match q#next_protocols with + | [] -> + Lwt.return_true + | protocols -> + State.Block.context block + >>= fun context -> + Context.get_protocol context + >>= fun next_protocol -> + Lwt.return + (List.exists (Protocol_hash.equal next_protocol) protocols) + in + let stream = + Lwt_stream.filter_map_s + (fun block -> + in_next_protocols block + >>= fun in_next_protocols -> + if in_next_protocols then + Lwt.return_some + (State.Block.hash block, State.Block.header block) + else Lwt.return_none) + block_stream in - Lwt.return - (Monitor_services.Active_test { chain = chain_id ; protocol ; expiration_date }) + in_next_protocols head + >>= fun first_block_is_among_next_protocols -> + let first_call = + (* Skip the first block if this is false *) + ref first_block_is_among_next_protocols + in + let next () = + if !first_call then ( + first_call := false ; + Lwt.return_some (State.Block.hash head, State.Block.header head) + ) + else Lwt_stream.get stream + in + RPC_answer.return_stream {next; shutdown}) ; + gen_register0 Monitor_services.S.protocols (fun () () -> + let (stream, stopper) = State.Protocol.watcher state in + let shutdown () = Lwt_watcher.shutdown stopper in + let next () = Lwt_stream.get stream in + RPC_answer.return_stream {next; shutdown}) ; + gen_register0 Monitor_services.S.commit_hash (fun () () -> + RPC_answer.return Tezos_base.Current_git_info.commit_hash) ; + gen_register0 Monitor_services.S.active_chains (fun () () -> + let (stream, stopper) = Validator.chains_watcher validator in + let shutdown () = Lwt_watcher.shutdown stopper in + let first_call = + (* Only notify the newly created chains if this is false *) + ref true in - if !first_call then begin - first_call := false ; - Lwt_list.map_p (fun c -> convert (c, true)) (Validator.get_active_chains validator) >>= fun l -> - Lwt.return_some l - end else - Lwt_stream.get stream >>= function - | None -> Lwt.return_none - | Some c -> convert c >>= fun status -> Lwt.return_some [ status ] - in - RPC_answer.return_stream { next ; shutdown } - end ; - + let next () = + let convert (chain_id, b) = + if not b then Lwt.return (Monitor_services.Stopping chain_id) + else if Chain_id.equal (State.Chain.main state) chain_id then + Lwt.return (Monitor_services.Active_main chain_id) + else + State.Chain.get_exn state chain_id + >>= fun chain_state -> + let {State.Chain.protocol; _} = State.Chain.genesis chain_state in + let expiration_date = + Option.unopt_exn + (Invalid_argument + (Format.asprintf + "Monitor.active_chains: no expiration date for the \ + chain %a" + Chain_id.pp + chain_id)) + (State.Chain.expiration chain_state) + in + Lwt.return + (Monitor_services.Active_test + {chain = chain_id; protocol; expiration_date}) + in + if !first_call then ( + first_call := false ; + Lwt_list.map_p + (fun c -> convert (c, true)) + (Validator.get_active_chains validator) + >>= fun l -> Lwt.return_some l ) + else + Lwt_stream.get stream + >>= function + | None -> + Lwt.return_none + | Some c -> + convert c >>= fun status -> Lwt.return_some [status] + in + RPC_answer.return_stream {next; shutdown}) ; !dir diff --git a/src/lib_shell/monitor_directory.mli b/src/lib_shell/monitor_directory.mli index bf904f5f9aede4258390ebd892a17a4974a28d76..0fcfa8b567442d44a76b0bf14ee79869940cf9ed 100644 --- a/src/lib_shell/monitor_directory.mli +++ b/src/lib_shell/monitor_directory.mli @@ -23,5 +23,5 @@ (* *) (*****************************************************************************) -val build_rpc_directory: +val build_rpc_directory : Validator.t -> Chain_validator.t -> unit RPC_directory.t diff --git a/src/lib_shell/node.ml b/src/lib_shell/node.ml index 36be3533b3871a5c0d1f077a7924f81ab1701fa0..2b10958b68adb9c7cbc4da2de990bc58d60a1889 100644 --- a/src/lib_shell/node.ml +++ b/src/lib_shell/node.ml @@ -31,256 +31,280 @@ open Tezos_base module Initialization_event = struct type t = { - time_stamp : float ; - status : [ `P2p_layer_disabled | `Bootstrapping | `P2p_maintain_started ] ; + time_stamp : float; + status : [`P2p_layer_disabled | `Bootstrapping | `P2p_maintain_started] } - let status_names = [ - "p2p_layer_disabled", `P2p_layer_disabled ; - "bootstrapping", `Bootstrapping ; - "p2p_maintain_started", `P2p_maintain_started ; - ] + + let status_names = + [ ("p2p_layer_disabled", `P2p_layer_disabled); + ("bootstrapping", `Bootstrapping); + ("p2p_maintain_started", `P2p_maintain_started) ] + module Definition = struct let name = "shell-node" + type nonrec t = t + let encoding = let open Data_encoding in let v0_encoding = conv - (function { time_stamp ; status } -> time_stamp, status) - (fun (time_stamp, status) -> { time_stamp ; status } ) + (function {time_stamp; status} -> (time_stamp, status)) + (fun (time_stamp, status) -> {time_stamp; status}) (obj2 (req "time-stamp" float) - (req "status" - (string_enum status_names))) in + (req "status" (string_enum status_names))) + in With_version.(encoding ~name (first_version v0_encoding)) - let pp ppf { status ; _ } = - Format.fprintf ppf "%s initialization: %s" - name (List.find (fun (_, s) -> s = status) status_names |> fst) + + let pp ppf {status; _} = + Format.fprintf + ppf + "%s initialization: %s" + name + (List.find (fun (_, s) -> s = status) status_names |> fst) + let doc = "Status of the initialization of the P2P layer." + let level _ = Internal_event.Notice end - module Event = Internal_event.Make(Definition) + + module Event = Internal_event.Make (Definition) + let lwt_emit status = let time_stamp = Unix.gettimeofday () in - Event.emit (fun () -> { time_stamp ; status }) >>= function - | Ok () -> Lwt.return_unit + Event.emit (fun () -> {time_stamp; status}) + >>= function + | Ok () -> + Lwt.return_unit | Error el -> - Format.kasprintf Lwt.fail_with "Initialization_event.emit: %a" - pp_print_error el + Format.kasprintf + Lwt.fail_with + "Initialization_event.emit: %a" + pp_print_error + el end - type t = { - state: State.t ; - distributed_db: Distributed_db.t ; - validator: Validator.t ; - mainchain_validator: Chain_validator.t ; - p2p: Distributed_db.p2p ; (* For P2P RPCs *) - shutdown: unit -> unit Lwt.t ; + state : State.t; + distributed_db : Distributed_db.t; + validator : Validator.t; + mainchain_validator : Chain_validator.t; + p2p : Distributed_db.p2p; + (* For P2P RPCs *) + shutdown : unit -> unit Lwt.t } +let peer_metadata_cfg : _ P2p.peer_meta_config = + { peer_meta_encoding = Peer_metadata.encoding; + peer_meta_initial = Peer_metadata.empty; + score = Peer_metadata.score } -let peer_metadata_cfg : _ P2p.peer_meta_config = { - peer_meta_encoding = Peer_metadata.encoding ; - peer_meta_initial = Peer_metadata.empty ; - score = Peer_metadata.score ; -} - -let connection_metadata_cfg cfg : _ P2p.conn_meta_config = { - conn_meta_encoding = Connection_metadata.encoding ; - private_node = (fun { private_node ; _ } -> private_node) ; - conn_meta_value = fun _ -> cfg; -} +let connection_metadata_cfg cfg : _ P2p.conn_meta_config = + { conn_meta_encoding = Connection_metadata.encoding; + private_node = (fun {private_node; _} -> private_node); + conn_meta_value = (fun _ -> cfg) } let init_connection_metadata opt = let open Connection_metadata in match opt with | None -> - { disable_mempool = false ; - private_node = false } + {disable_mempool = false; private_node = false} | Some c -> - { disable_mempool = c.P2p.disable_mempool ; + { disable_mempool = c.P2p.disable_mempool; private_node = c.P2p.private_mode } let init_p2p ?(sandboxed = false) p2p_params = match p2p_params with | None -> let c_meta = init_connection_metadata None in - Initialization_event.lwt_emit `P2p_layer_disabled >>= fun () -> - return (P2p.faked_network Distributed_db_message.cfg peer_metadata_cfg c_meta) + Initialization_event.lwt_emit `P2p_layer_disabled + >>= fun () -> + return + (P2p.faked_network Distributed_db_message.cfg peer_metadata_cfg c_meta) | Some (config, limits) -> let c_meta = init_connection_metadata (Some config) in let conn_metadata_cfg = connection_metadata_cfg c_meta in - Initialization_event.lwt_emit `Bootstrapping >>= fun () -> + Initialization_event.lwt_emit `Bootstrapping + >>= fun () -> let message_cfg = if sandboxed then { Distributed_db_message.cfg with chain_name = Distributed_db_version.sandboxed_chain_name } - else - Distributed_db_message.cfg in + else Distributed_db_message.cfg + in P2p.create - ~config ~limits + ~config + ~limits peer_metadata_cfg conn_metadata_cfg - message_cfg >>=? fun p2p -> + message_cfg + >>=? fun p2p -> Lwt.async (fun () -> P2p.maintain p2p) ; - Initialization_event.lwt_emit `P2p_maintain_started >>= fun () -> - return p2p + Initialization_event.lwt_emit `P2p_maintain_started + >>= fun () -> return p2p type config = { - genesis: State.Chain.genesis ; - store_root: string ; - context_root: string ; - patch_context: (Context.t -> Context.t Lwt.t) option ; - p2p: (P2p.config * P2p.limits) option ; - test_chain_max_tll: int option ; - checkpoint: Block_header.t option ; + genesis : State.Chain.genesis; + store_root : string; + context_root : string; + patch_context : (Context.t -> Context.t Lwt.t) option; + p2p : (P2p.config * P2p.limits) option; + test_chain_max_tll : int option; + checkpoint : Block_header.t option } and peer_validator_limits = Peer_validator.limits = { - new_head_request_timeout: Time.System.Span.t ; - block_header_timeout: Time.System.Span.t ; - block_operations_timeout: Time.System.Span.t ; - protocol_timeout: Time.System.Span.t ; - worker_limits: Worker_types.limits + new_head_request_timeout : Time.System.Span.t; + block_header_timeout : Time.System.Span.t; + block_operations_timeout : Time.System.Span.t; + protocol_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } and prevalidator_limits = Prevalidator.limits = { - max_refused_operations: int ; - operation_timeout: Time.System.Span.t ; - worker_limits : Worker_types.limits ; + max_refused_operations : int; + operation_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } and block_validator_limits = Block_validator.limits = { - protocol_timeout: Time.System.Span.t ; - worker_limits : Worker_types.limits ; + protocol_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } and chain_validator_limits = Chain_validator.limits = { - bootstrap_threshold: int ; - worker_limits : Worker_types.limits ; + bootstrap_threshold : int; + worker_limits : Worker_types.limits } -let default_block_validator_limits = { - protocol_timeout = Time.System.Span.of_seconds_exn 120. ; - worker_limits = { - backlog_size = 1000 ; - backlog_level = Internal_event.Debug ; +let default_block_validator_limits = + { protocol_timeout = Time.System.Span.of_seconds_exn 120.; + worker_limits = {backlog_size = 1000; backlog_level = Internal_event.Debug} } -} -let default_prevalidator_limits = { - operation_timeout = Time.System.Span.of_seconds_exn 10. ; - max_refused_operations = 1000 ; - worker_limits = { - backlog_size = 1000 ; - backlog_level = Internal_event.Info ; + +let default_prevalidator_limits = + { operation_timeout = Time.System.Span.of_seconds_exn 10.; + max_refused_operations = 1000; + worker_limits = {backlog_size = 1000; backlog_level = Internal_event.Info} } -} -let default_peer_validator_limits = { - block_header_timeout = Time.System.Span.of_seconds_exn 60. ; - block_operations_timeout = Time.System.Span.of_seconds_exn 60. ; - protocol_timeout = Time.System.Span.of_seconds_exn 120. ; - new_head_request_timeout = Time.System.Span.of_seconds_exn 90. ; - worker_limits = { - backlog_size = 1000 ; - backlog_level = Internal_event.Info ; + +let default_peer_validator_limits = + { block_header_timeout = Time.System.Span.of_seconds_exn 60.; + block_operations_timeout = Time.System.Span.of_seconds_exn 60.; + protocol_timeout = Time.System.Span.of_seconds_exn 120.; + new_head_request_timeout = Time.System.Span.of_seconds_exn 90.; + worker_limits = {backlog_size = 1000; backlog_level = Internal_event.Info} } -} -let default_chain_validator_limits = { - bootstrap_threshold = 4 ; - worker_limits = { - backlog_size = 1000 ; - backlog_level = Internal_event.Info ; + +let default_chain_validator_limits = + { bootstrap_threshold = 4; + worker_limits = {backlog_size = 1000; backlog_level = Internal_event.Info} } -} let may_update_checkpoint chain_state checkpoint history_mode = match checkpoint with | None -> return_unit - | Some checkpoint -> - State.best_known_head_for_checkpoint - chain_state checkpoint >>= fun new_head -> - Chain.set_head chain_state new_head >>= fun _old_head -> - begin match history_mode with - | History_mode.Archive -> - State.Chain.set_checkpoint chain_state checkpoint >>= fun () -> - return_unit - | Full -> - State.Chain.set_checkpoint_then_purge_full chain_state checkpoint - | Rolling -> - State.Chain.set_checkpoint_then_purge_rolling chain_state checkpoint - end - -module Local_logging = - Internal_event.Legacy_logging.Make_semantic - (struct let name = "node.worker" end) + | Some checkpoint -> ( + State.best_known_head_for_checkpoint chain_state checkpoint + >>= fun new_head -> + Chain.set_head chain_state new_head + >>= fun _old_head -> + match history_mode with + | History_mode.Archive -> + State.Chain.set_checkpoint chain_state checkpoint + >>= fun () -> return_unit + | Full -> + State.Chain.set_checkpoint_then_purge_full chain_state checkpoint + | Rolling -> + State.Chain.set_checkpoint_then_purge_rolling chain_state checkpoint + ) + +module Local_logging = Internal_event.Legacy_logging.Make_semantic (struct + let name = "node.worker" +end) let store_known_protocols state = let open Local_logging in let embedded_protocols = Registered_protocol.list_embedded () in Lwt_list.iter_s (fun protocol_hash -> - State.Protocol.known state protocol_hash >>= function - | true -> - lwt_log_info Tag.DSL.(fun f -> - f "protocol %a is already in store: nothing to do" - -% a Protocol_hash.Logging.tag protocol_hash - -% t event "embedded_protocol_already_stored") - | false -> - match Registered_protocol.get_embedded_sources protocol_hash with - | None -> - lwt_log_info Tag.DSL.(fun f -> - f "protocol %a won't be stored: missing source files" - -% a Protocol_hash.Logging.tag protocol_hash - -% t event "embedded_protocol_missing_sources" - ) - | Some protocol -> - let hash = Protocol.hash protocol in - if not (Protocol_hash.equal hash protocol_hash) then - lwt_log_info Tag.DSL.(fun f -> - f "protocol %a won't be stored: wrong hash" - -% a Protocol_hash.Logging.tag protocol_hash - -% t event "embedded_protocol_inconsistent_hash") - else - State.Protocol.store state protocol >>= function - | Some hash' -> - assert (hash = hash') ; - lwt_log_info Tag.DSL.(fun f -> - f "protocol %a successfully stored" - -% a Protocol_hash.Logging.tag protocol_hash - -% t event "embedded_protocol_stored") - | None -> - lwt_log_info Tag.DSL.(fun f -> - f "protocol %a is already in store: nothing to do" - -% a Protocol_hash.Logging.tag protocol_hash - -% t event "embedded_protocol_already_stored") - ) embedded_protocols - -let create - ?(sandboxed = false) - { genesis ; store_root ; context_root ; - patch_context ; p2p = p2p_params ; - test_chain_max_tll = max_child_ttl ; - checkpoint } - peer_validator_limits - block_validator_limits - prevalidator_limits - chain_validator_limits - history_mode - = + State.Protocol.known state protocol_hash + >>= function + | true -> + lwt_log_info + Tag.DSL.( + fun f -> + f "protocol %a is already in store: nothing to do" + -% a Protocol_hash.Logging.tag protocol_hash + -% t event "embedded_protocol_already_stored") + | false -> ( + match Registered_protocol.get_embedded_sources protocol_hash with + | None -> + lwt_log_info + Tag.DSL.( + fun f -> + f "protocol %a won't be stored: missing source files" + -% a Protocol_hash.Logging.tag protocol_hash + -% t event "embedded_protocol_missing_sources") + | Some protocol -> ( + let hash = Protocol.hash protocol in + if not (Protocol_hash.equal hash protocol_hash) then + lwt_log_info + Tag.DSL.( + fun f -> + f "protocol %a won't be stored: wrong hash" + -% a Protocol_hash.Logging.tag protocol_hash + -% t event "embedded_protocol_inconsistent_hash") + else + State.Protocol.store state protocol + >>= function + | Some hash' -> + assert (hash = hash') ; + lwt_log_info + Tag.DSL.( + fun f -> + f "protocol %a successfully stored" + -% a Protocol_hash.Logging.tag protocol_hash + -% t event "embedded_protocol_stored") + | None -> + lwt_log_info + Tag.DSL.( + fun f -> + f "protocol %a is already in store: nothing to do" + -% a Protocol_hash.Logging.tag protocol_hash + -% t event "embedded_protocol_already_stored") ) )) + embedded_protocols + +let create ?(sandboxed = false) + { genesis; + store_root; + context_root; + patch_context; + p2p = p2p_params; + test_chain_max_tll = max_child_ttl; + checkpoint } peer_validator_limits block_validator_limits + prevalidator_limits chain_validator_limits history_mode = let (start_prevalidator, start_testchain) = match p2p_params with - | Some (config, _limits) -> not config.P2p.disable_mempool, not config.P2p.disable_testchain - | None -> true, true in - init_p2p ~sandboxed p2p_params >>=? fun p2p -> - State.init - ~store_root ~context_root ?history_mode ?patch_context - genesis >>=? fun (state, mainchain_state, context_index, history_mode) -> - may_update_checkpoint mainchain_state checkpoint history_mode >>=? fun () -> + | Some (config, _limits) -> + (not config.P2p.disable_mempool, not config.P2p.disable_testchain) + | None -> + (true, true) + in + init_p2p ~sandboxed p2p_params + >>=? fun p2p -> + State.init ~store_root ~context_root ?history_mode ?patch_context genesis + >>=? fun (state, mainchain_state, context_index, history_mode) -> + may_update_checkpoint mainchain_state checkpoint history_mode + >>=? fun () -> let distributed_db = Distributed_db.create state p2p in - store_known_protocols state >>= fun () -> - Validator.create state distributed_db + store_known_protocols state + >>= fun () -> + Validator.create + state + distributed_db peer_validator_limits block_validator_limits (Block_validator.Internal context_index) @@ -289,24 +313,21 @@ let create ~start_testchain >>=? fun validator -> (* TODO : Check that the testchain is correctly activated after a node restart *) - Validator.activate validator - ?max_child_ttl ~start_prevalidator - mainchain_state >>=? fun mainchain_validator -> + Validator.activate + validator + ?max_child_ttl + ~start_prevalidator + mainchain_state + >>=? fun mainchain_validator -> let shutdown () = - P2p.shutdown p2p >>= fun () -> - Distributed_db.shutdown distributed_db >>= fun () -> - Validator.shutdown validator >>= fun () -> - State.close state >>= fun () -> - Lwt.return_unit + P2p.shutdown p2p + >>= fun () -> + Distributed_db.shutdown distributed_db + >>= fun () -> + Validator.shutdown validator + >>= fun () -> State.close state >>= fun () -> Lwt.return_unit in - return { - state ; - distributed_db ; - validator ; - mainchain_validator ; - p2p ; - shutdown ; - } + return {state; distributed_db; validator; mainchain_validator; p2p; shutdown} let shutdown node = node.shutdown () @@ -314,22 +335,23 @@ let build_rpc_directory node = let dir : unit RPC_directory.t ref = ref RPC_directory.empty in let merge d = dir := RPC_directory.merge !dir d in let register0 s f = - dir := RPC_directory.register !dir s (fun () p q -> f p q) in - - merge (Protocol_directory.build_rpc_directory - (Block_validator.running_worker ()) node.state) ; - merge (Monitor_directory.build_rpc_directory - node.validator node.mainchain_validator) ; + dir := RPC_directory.register !dir s (fun () p q -> f p q) + in + merge + (Protocol_directory.build_rpc_directory + (Block_validator.running_worker ()) + node.state) ; + merge + (Monitor_directory.build_rpc_directory + node.validator + node.mainchain_validator) ; merge (Injection_directory.build_rpc_directory node.validator) ; merge (Chain_directory.build_rpc_directory node.validator) ; merge (P2p_directory.build_rpc_directory node.p2p) ; merge (Worker_directory.build_rpc_directory node.state) ; - merge (Stat_directory.rpc_directory ()) ; - - register0 RPC_service.error_service begin fun () () -> - return (Data_encoding.Json.schema Error_monad.error_encoding) - end ; - + register0 RPC_service.error_service (fun () () -> + return (Data_encoding.Json.schema Error_monad.error_encoding)) ; RPC_directory.register_describe_directory_service - !dir RPC_service.description_service + !dir + RPC_service.description_service diff --git a/src/lib_shell/node.mli b/src/lib_shell/node.mli index 1d0bc776d369a2b3174c336cd124bf93fd5fc390..eec5349ba3a40ddbfaf5bc4ac0a55821326c68b2 100644 --- a/src/lib_shell/node.mli +++ b/src/lib_shell/node.mli @@ -28,42 +28,48 @@ type t type config = { - genesis: State.Chain.genesis ; - store_root: string ; - context_root: string ; - patch_context: (Context.t -> Context.t Lwt.t) option ; - p2p: (P2p.config * P2p.limits) option ; - test_chain_max_tll: int option ; - checkpoint: Block_header.t option ; + genesis : State.Chain.genesis; + store_root : string; + context_root : string; + patch_context : (Context.t -> Context.t Lwt.t) option; + p2p : (P2p.config * P2p.limits) option; + test_chain_max_tll : int option; + checkpoint : Block_header.t option } and peer_validator_limits = { - new_head_request_timeout: Time.System.Span.t ; - block_header_timeout: Time.System.Span.t ; - block_operations_timeout: Time.System.Span.t ; - protocol_timeout: Time.System.Span.t ; - worker_limits: Worker_types.limits + new_head_request_timeout : Time.System.Span.t; + block_header_timeout : Time.System.Span.t; + block_operations_timeout : Time.System.Span.t; + protocol_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } + and prevalidator_limits = { - max_refused_operations: int ; - operation_timeout: Time.System.Span.t ; - worker_limits : Worker_types.limits ; + max_refused_operations : int; + operation_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } + and block_validator_limits = { - protocol_timeout: Time.System.Span.t ; - worker_limits : Worker_types.limits ; + protocol_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } + and chain_validator_limits = { - bootstrap_threshold: int ; - worker_limits : Worker_types.limits ; + bootstrap_threshold : int; + worker_limits : Worker_types.limits } -val default_peer_validator_limits: peer_validator_limits -val default_prevalidator_limits: prevalidator_limits -val default_block_validator_limits: block_validator_limits -val default_chain_validator_limits: chain_validator_limits +val default_peer_validator_limits : peer_validator_limits + +val default_prevalidator_limits : prevalidator_limits + +val default_block_validator_limits : block_validator_limits + +val default_chain_validator_limits : chain_validator_limits -val create: +val create : ?sandboxed:bool -> config -> peer_validator_limits -> @@ -73,6 +79,6 @@ val create: History_mode.t option -> t tzresult Lwt.t -val shutdown: t -> unit Lwt.t +val shutdown : t -> unit Lwt.t -val build_rpc_directory: t -> unit RPC_directory.t +val build_rpc_directory : t -> unit RPC_directory.t diff --git a/src/lib_shell/p2p_directory.ml b/src/lib_shell/p2p_directory.ml index 7d8894f8659f1469fcb699df81e9877ac80e0d98..3df5cb61cc879c45f8bd7ff99f19be12b5375bf0 100644 --- a/src/lib_shell/p2p_directory.ml +++ b/src/lib_shell/p2p_directory.ml @@ -26,384 +26,418 @@ let info_of_point_info i = let open P2p_point.Info in let open P2p_point.State in - let state = match P2p_point_state.get i with - | Requested _ -> Requested - | Accepted { current_peer_id ; _ } -> Accepted current_peer_id - | Running { current_peer_id ; _ } -> Running current_peer_id - | Disconnected -> Disconnected in - P2p_point_state.Info.{ - trusted = trusted i ; - state ; - greylisted_until = greylisted_until i ; - last_failed_connection = last_failed_connection i ; - last_rejected_connection = last_rejected_connection i ; - last_established_connection = last_established_connection i ; - last_disconnection = last_disconnection i ; - last_seen = last_seen i ; - last_miss = last_miss i ; - } + let state = + match P2p_point_state.get i with + | Requested _ -> + Requested + | Accepted {current_peer_id; _} -> + Accepted current_peer_id + | Running {current_peer_id; _} -> + Running current_peer_id + | Disconnected -> + Disconnected + in + P2p_point_state.Info. + { trusted = trusted i; + state; + greylisted_until = greylisted_until i; + last_failed_connection = last_failed_connection i; + last_rejected_connection = last_rejected_connection i; + last_established_connection = last_established_connection i; + last_disconnection = last_disconnection i; + last_seen = last_seen i; + last_miss = last_miss i } let info_of_peer_info pool i = let open P2p_peer.Info in let open P2p_peer.State in - let state, id_point = match P2p_peer_state.get i with - | Accepted { current_point ; _ } -> Accepted, Some current_point - | Running { current_point ; _ } -> Running, Some current_point - | Disconnected -> Disconnected, None in + let (state, id_point) = + match P2p_peer_state.get i with + | Accepted {current_point; _} -> + (Accepted, Some current_point) + | Running {current_point; _} -> + (Running, Some current_point) + | Disconnected -> + (Disconnected, None) + in let peer_id = P2p_peer_state.Info.peer_id i in let score = P2p_pool.Peers.get_score pool peer_id in let conn_opt = P2p_pool.Connection.find_by_peer_id pool peer_id in let stat = match conn_opt with - | None -> P2p_stat.empty - | Some conn -> P2p_pool.Connection.stat conn in + | None -> + P2p_stat.empty + | Some conn -> + P2p_pool.Connection.stat conn + in let meta_opt = match conn_opt with - | None -> None - | Some conn -> Some (P2p_pool.Connection.remote_metadata conn) in - P2p_peer_state.Info.{ - score ; - trusted = trusted i ; - conn_metadata = meta_opt ; - peer_metadata = peer_metadata i; - state ; - id_point ; - stat ; - last_failed_connection = last_failed_connection i ; - last_rejected_connection = last_rejected_connection i ; - last_established_connection = last_established_connection i ; - last_disconnection = last_disconnection i ; - last_seen = last_seen i ; - last_miss = last_miss i ; - } + | None -> + None + | Some conn -> + Some (P2p_pool.Connection.remote_metadata conn) + in + P2p_peer_state.Info. + { score; + trusted = trusted i; + conn_metadata = meta_opt; + peer_metadata = peer_metadata i; + state; + id_point; + stat; + last_failed_connection = last_failed_connection i; + last_rejected_connection = last_rejected_connection i; + last_established_connection = last_established_connection i; + last_disconnection = last_disconnection i; + last_seen = last_seen i; + last_miss = last_miss i } let build_rpc_directory net = - let dir = RPC_directory.empty in - (* Network : Global *) - let dir = - RPC_directory.register0 dir P2p_services.S.version begin fun () () -> - return (P2p.announced_version net) - end in - + RPC_directory.register0 dir P2p_services.S.version (fun () () -> + return (P2p.announced_version net)) + in let dir = (* DEPRECATED: use [version] instead. *) - RPC_directory.register0 dir P2p_services.S.versions begin fun () () -> - return [P2p.announced_version net] - end in - - let dir = - RPC_directory.register0 dir P2p_services.S.self begin fun () () -> - match P2p.pool net with - | None -> failwith "The P2P layer is disabled." - | Some pool -> return (P2p_pool.config pool).identity.peer_id - end in - + RPC_directory.register0 dir P2p_services.S.versions (fun () () -> + return [P2p.announced_version net]) + in let dir = - RPC_directory.register0 dir P2p_services.S.stat begin fun () () -> - match P2p.pool net with - | None -> return P2p_stat.empty - | Some pool -> return (P2p_pool.pool_stat pool) - end in - + RPC_directory.register0 dir P2p_services.S.self (fun () () -> + match P2p.pool net with + | None -> + failwith "The P2P layer is disabled." + | Some pool -> + return (P2p_pool.config pool).identity.peer_id) + in let dir = - RPC_directory.gen_register0 dir P2p_services.S.events begin fun () () -> - let stream, stopper = + RPC_directory.register0 dir P2p_services.S.stat (fun () () -> match P2p.pool net with - | None -> Lwt_watcher.create_fake_stream () - | Some pool -> P2p_pool.watch pool in - let shutdown () = Lwt_watcher.shutdown stopper in - let next () = Lwt_stream.get stream in - RPC_answer.return_stream { next ; shutdown } - end in - + | None -> + return P2p_stat.empty + | Some pool -> + return (P2p_pool.pool_stat pool)) + in let dir = - RPC_directory.register1 dir P2p_services.S.connect begin fun point q () -> - match P2p.pool net with - | None -> failwith "The P2P layer is disabled." - | Some pool -> - P2p_pool.connect ~timeout:q#timeout pool point >>=? fun _conn -> - return_unit - end in - + RPC_directory.gen_register0 dir P2p_services.S.events (fun () () -> + let (stream, stopper) = + match P2p.pool net with + | None -> + Lwt_watcher.create_fake_stream () + | Some pool -> + P2p_pool.watch pool + in + let shutdown () = Lwt_watcher.shutdown stopper in + let next () = Lwt_stream.get stream in + RPC_answer.return_stream {next; shutdown}) + in + let dir = + RPC_directory.register1 dir P2p_services.S.connect (fun point q () -> + match P2p.pool net with + | None -> + failwith "The P2P layer is disabled." + | Some pool -> + P2p_pool.connect ~timeout:q#timeout pool point + >>=? fun _conn -> return_unit) + in (* Network : Connection *) - let dir = - RPC_directory.opt_register1 dir P2p_services.Connections.S.info - begin fun peer_id () () -> - return @@ - Option.apply (P2p.pool net) ~f: begin fun pool -> - Option.map ~f:P2p_pool.Connection.info - (P2p_pool.Connection.find_by_peer_id pool peer_id) - end - end in - + RPC_directory.opt_register1 + dir + P2p_services.Connections.S.info + (fun peer_id () () -> + return + @@ Option.apply (P2p.pool net) ~f:(fun pool -> + Option.map + ~f:P2p_pool.Connection.info + (P2p_pool.Connection.find_by_peer_id pool peer_id))) + in let dir = - RPC_directory.lwt_register1 dir P2p_services.Connections.S.kick - begin fun peer_id q () -> + RPC_directory.lwt_register1 + dir + P2p_services.Connections.S.kick + (fun peer_id q () -> match P2p.pool net with - | None -> Lwt.return_unit - | Some pool -> - match P2p_pool.Connection.find_by_peer_id pool peer_id with - | None -> Lwt.return_unit - | Some conn -> P2p_pool.disconnect ~wait:q#wait conn - end in - + | None -> + Lwt.return_unit + | Some pool -> ( + match P2p_pool.Connection.find_by_peer_id pool peer_id with + | None -> + Lwt.return_unit + | Some conn -> + P2p_pool.disconnect ~wait:q#wait conn )) + in let dir = - RPC_directory.register0 dir P2p_services.Connections.S.list - begin fun () () -> + RPC_directory.register0 dir P2p_services.Connections.S.list (fun () () -> match P2p.pool net with - | None -> return_nil + | None -> + return_nil | Some pool -> - return @@ - P2p_pool.Connection.fold - pool ~init:[] - ~f:begin fun _peer_id c acc -> - P2p_pool.Connection.info c :: acc - end - end in - + return + @@ P2p_pool.Connection.fold pool ~init:[] ~f:(fun _peer_id c acc -> + P2p_pool.Connection.info c :: acc)) + in (* Network : Peer_id *) - let dir = - RPC_directory.register0 dir P2p_services.Peers.S.list - begin fun q () -> + RPC_directory.register0 dir P2p_services.Peers.S.list (fun q () -> match P2p.pool net with - | None -> return_nil + | None -> + return_nil | Some pool -> - return @@ - P2p_pool.Peers.fold_known pool - ~init:[] - ~f:begin fun peer_id i a -> - let info = info_of_peer_info pool i in - match q#filters with - | [] -> (peer_id, info) :: a - | filters when P2p_peer.State.filter filters info.state -> - (peer_id, info) :: a - | _ -> a - end - end in - + return + @@ P2p_pool.Peers.fold_known pool ~init:[] ~f:(fun peer_id i a -> + let info = info_of_peer_info pool i in + match q#filters with + | [] -> + (peer_id, info) :: a + | filters when P2p_peer.State.filter filters info.state -> + (peer_id, info) :: a + | _ -> + a)) + in let dir = - RPC_directory.opt_register1 dir P2p_services.Peers.S.info - begin fun peer_id () () -> + RPC_directory.opt_register1 + dir + P2p_services.Peers.S.info + (fun peer_id () () -> match P2p.pool net with - | None -> return_none + | None -> + return_none | Some pool -> - return @@ - Option.map ~f:(info_of_peer_info pool) - (P2p_pool.Peers.info pool peer_id) - end in - + return + @@ Option.map + ~f:(info_of_peer_info pool) + (P2p_pool.Peers.info pool peer_id)) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Peers.S.events - begin fun peer_id q () -> + RPC_directory.gen_register1 + dir + P2p_services.Peers.S.events + (fun peer_id q () -> match P2p.pool net with - | None -> RPC_answer.not_found - | Some pool -> - match P2p_pool.Peers.info pool peer_id with - | None -> RPC_answer.return [] - | Some gi -> - let rev = false and max = max_int in - let evts = - P2p_peer_state.Info.fold gi ~init:[] - ~f:(fun a e -> e :: a) in - let evts = (if rev then List.rev_sub else List.sub) evts max in - if not q#monitor then - RPC_answer.return evts - else - let stream, stopper = P2p_peer_state.Info.watch gi in - let shutdown () = Lwt_watcher.shutdown stopper in - let first_request = ref true in - let next () = - if not !first_request then begin - Lwt_stream.get stream >|= Option.map ~f:(fun i -> [i]) - end else begin - first_request := false ; - Lwt.return_some evts - end in - RPC_answer.return_stream { next ; shutdown } - end in - + | None -> + RPC_answer.not_found + | Some pool -> ( + match P2p_pool.Peers.info pool peer_id with + | None -> + RPC_answer.return [] + | Some gi -> + let rev = false and max = max_int in + let evts = + P2p_peer_state.Info.fold gi ~init:[] ~f:(fun a e -> e :: a) + in + let evts = (if rev then List.rev_sub else List.sub) evts max in + if not q#monitor then RPC_answer.return evts + else + let (stream, stopper) = P2p_peer_state.Info.watch gi in + let shutdown () = Lwt_watcher.shutdown stopper in + let first_request = ref true in + let next () = + if not !first_request then + Lwt_stream.get stream >|= Option.map ~f:(fun i -> [i]) + else ( + first_request := false ; + Lwt.return_some evts ) + in + RPC_answer.return_stream {next; shutdown} )) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Peers.S.ban - begin fun peer_id () () -> + RPC_directory.gen_register1 + dir + P2p_services.Peers.S.ban + (fun peer_id () () -> match P2p.pool net with - | None -> RPC_answer.not_found + | None -> + RPC_answer.not_found | Some pool -> P2p_pool.Peers.untrust pool peer_id ; P2p_pool.Peers.ban pool peer_id ; - RPC_answer.return_unit - end in - + RPC_answer.return_unit) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Peers.S.unban - begin fun peer_id () () -> + RPC_directory.gen_register1 + dir + P2p_services.Peers.S.unban + (fun peer_id () () -> match P2p.pool net with - | None -> RPC_answer.not_found + | None -> + RPC_answer.not_found | Some pool -> P2p_pool.Peers.unban pool peer_id ; - RPC_answer.return_unit - end in - + RPC_answer.return_unit) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Peers.S.trust - begin fun peer_id () () -> + RPC_directory.gen_register1 + dir + P2p_services.Peers.S.trust + (fun peer_id () () -> match P2p.pool net with - | None -> RPC_answer.not_found + | None -> + RPC_answer.not_found | Some pool -> P2p_pool.Peers.trust pool peer_id ; - RPC_answer.return_unit - end in - + RPC_answer.return_unit) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Peers.S.untrust - begin fun peer_id () () -> + RPC_directory.gen_register1 + dir + P2p_services.Peers.S.untrust + (fun peer_id () () -> match P2p.pool net with - | None -> RPC_answer.not_found + | None -> + RPC_answer.not_found | Some pool -> P2p_pool.Peers.untrust pool peer_id ; - RPC_answer.return_unit - end in - + RPC_answer.return_unit) + in let dir = - RPC_directory.register1 dir P2p_services.Peers.S.banned - begin fun peer_id () () -> + RPC_directory.register1 + dir + P2p_services.Peers.S.banned + (fun peer_id () () -> match P2p.pool net with - | None -> return_false - | Some pool when (P2p_pool.Peers.get_trusted pool peer_id) -> + | None -> + return_false + | Some pool when P2p_pool.Peers.get_trusted pool peer_id -> return_false | Some pool -> - return (P2p_pool.Peers.banned pool peer_id) - end in - + return (P2p_pool.Peers.banned pool peer_id)) + in (* Network : Point *) - let dir = - RPC_directory.register0 dir P2p_services.Points.S.list - begin fun q () -> + RPC_directory.register0 dir P2p_services.Points.S.list (fun q () -> match P2p.pool net with - | None -> return_nil + | None -> + return_nil | Some pool -> - return @@ - P2p_pool.Points.fold_known - pool ~init:[] - ~f:begin fun point i a -> - let info = info_of_point_info i in - match q#filters with - | [] -> (point, info) :: a - | filters when P2p_point.State.filter filters info.state -> - (point, info) :: a - | _ -> a - end - end in - + return + @@ P2p_pool.Points.fold_known pool ~init:[] ~f:(fun point i a -> + let info = info_of_point_info i in + match q#filters with + | [] -> + (point, info) :: a + | filters when P2p_point.State.filter filters info.state -> + (point, info) :: a + | _ -> + a)) + in let dir = - RPC_directory.opt_register1 dir P2p_services.Points.S.info - begin fun point () () -> + RPC_directory.opt_register1 + dir + P2p_services.Points.S.info + (fun point () () -> match P2p.pool net with - | None -> return_none + | None -> + return_none | Some pool -> - return @@ - Option.map - (P2p_pool.Points.info pool point) - ~f:info_of_point_info - end in - + return + @@ Option.map + (P2p_pool.Points.info pool point) + ~f:info_of_point_info) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Points.S.events - begin fun point_id q () -> + RPC_directory.gen_register1 + dir + P2p_services.Points.S.events + (fun point_id q () -> match P2p.pool net with - | None -> RPC_answer.not_found - | Some pool -> - match P2p_pool.Points.info pool point_id with - | None -> RPC_answer.return [] - | Some gi -> - let rev = false and max = max_int in - let evts = - P2p_point_state.Info.fold gi ~init:[] - ~f:(fun a e -> e :: a) in - let evts = (if rev then List.rev_sub else List.sub) evts max in - if not q#monitor then - RPC_answer.return evts - else - let stream, stopper = P2p_point_state.Info.watch gi in - let shutdown () = Lwt_watcher.shutdown stopper in - let first_request = ref true in - let next () = - if not !first_request then begin - Lwt_stream.get stream >|= Option.map ~f:(fun i -> [i]) - end else begin - first_request := false ; - Lwt.return_some evts - end in - RPC_answer.return_stream { next ; shutdown } - end in - + | None -> + RPC_answer.not_found + | Some pool -> ( + match P2p_pool.Points.info pool point_id with + | None -> + RPC_answer.return [] + | Some gi -> + let rev = false and max = max_int in + let evts = + P2p_point_state.Info.fold gi ~init:[] ~f:(fun a e -> e :: a) + in + let evts = (if rev then List.rev_sub else List.sub) evts max in + if not q#monitor then RPC_answer.return evts + else + let (stream, stopper) = P2p_point_state.Info.watch gi in + let shutdown () = Lwt_watcher.shutdown stopper in + let first_request = ref true in + let next () = + if not !first_request then + Lwt_stream.get stream >|= Option.map ~f:(fun i -> [i]) + else ( + first_request := false ; + Lwt.return_some evts ) + in + RPC_answer.return_stream {next; shutdown} )) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Points.S.ban - begin fun point () () -> + RPC_directory.gen_register1 + dir + P2p_services.Points.S.ban + (fun point () () -> match P2p.pool net with - | None -> RPC_answer.not_found + | None -> + RPC_answer.not_found | Some pool -> - P2p_pool.Points.untrust pool point; - P2p_pool.Points.ban pool point; - RPC_answer.return_unit - end in - + P2p_pool.Points.untrust pool point ; + P2p_pool.Points.ban pool point ; + RPC_answer.return_unit) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Points.S.unban - begin fun point () () -> + RPC_directory.gen_register1 + dir + P2p_services.Points.S.unban + (fun point () () -> match P2p.pool net with - | None -> RPC_answer.not_found + | None -> + RPC_answer.not_found | Some pool -> - P2p_pool.Points.unban pool point; - RPC_answer.return_unit - end in - + P2p_pool.Points.unban pool point ; + RPC_answer.return_unit) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Points.S.trust - begin fun point () () -> + RPC_directory.gen_register1 + dir + P2p_services.Points.S.trust + (fun point () () -> match P2p.pool net with - | None -> RPC_answer.not_found + | None -> + RPC_answer.not_found | Some pool -> P2p_pool.Points.trust pool point ; - RPC_answer.return_unit - end in - + RPC_answer.return_unit) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Points.S.untrust - begin fun point () () -> + RPC_directory.gen_register1 + dir + P2p_services.Points.S.untrust + (fun point () () -> match P2p.pool net with - | None -> RPC_answer.not_found + | None -> + RPC_answer.not_found | Some pool -> P2p_pool.Points.untrust pool point ; - RPC_answer.return_unit - end in - + RPC_answer.return_unit) + in let dir = - RPC_directory.gen_register1 dir P2p_services.Points.S.banned - begin fun point () () -> + RPC_directory.gen_register1 + dir + P2p_services.Points.S.banned + (fun point () () -> match P2p.pool net with - | None -> RPC_answer.not_found - | Some pool when (P2p_pool.Points.get_trusted pool point) -> + | None -> + RPC_answer.not_found + | Some pool when P2p_pool.Points.get_trusted pool point -> RPC_answer.return false | Some pool -> - RPC_answer.return (P2p_pool.Points.banned pool point) - end in - + RPC_answer.return (P2p_pool.Points.banned pool point)) + in (* Network : Greylist *) - let dir = - RPC_directory.register dir P2p_services.ACL.S.clear - begin fun () () () -> + RPC_directory.register dir P2p_services.ACL.S.clear (fun () () () -> match P2p.pool net with - | None -> return_unit - | Some pool -> - P2p_pool.acl_clear pool ; + | None -> return_unit - end in - + | Some pool -> + P2p_pool.acl_clear pool ; return_unit) + in dir diff --git a/src/lib_shell/p2p_directory.mli b/src/lib_shell/p2p_directory.mli index f592f431b8a5076d2f0a507c0115529264e02946..12ea5485ffab515866f8334e3fdf48e3f92dcfb6 100644 --- a/src/lib_shell/p2p_directory.mli +++ b/src/lib_shell/p2p_directory.mli @@ -24,4 +24,4 @@ (*****************************************************************************) val build_rpc_directory : - (_, Peer_metadata.t , Connection_metadata.t) P2p.t -> unit RPC_directory.t \ No newline at end of file + (_, Peer_metadata.t, Connection_metadata.t) P2p.t -> unit RPC_directory.t diff --git a/src/lib_shell/peer_validator.ml b/src/lib_shell/peer_validator.ml index ce47fd7737243262db1d15d7ecd6dd008e6c425e..161569a2de14339eec0e82049fb05af002c9304d 100644 --- a/src/lib_shell/peer_validator.ml +++ b/src/lib_shell/peer_validator.ml @@ -30,12 +30,19 @@ open Peer_validator_worker_state module Name = struct type t = Chain_id.t * P2p_peer.Id.t - let encoding = - Data_encoding.tup2 Chain_id.encoding P2p_peer.Id.encoding - let base = [ "validator.peer" ] + + let encoding = Data_encoding.tup2 Chain_id.encoding P2p_peer.Id.encoding + + let base = ["validator.peer"] + let pp ppf (chain, peer) = - Format.fprintf ppf "%a:%a" - Chain_id.pp_short chain P2p_peer.Id.pp_short peer + Format.fprintf + ppf + "%a:%a" + Chain_id.pp_short + chain + P2p_peer.Id.pp_short + peer end module Request = struct @@ -43,9 +50,12 @@ module Request = struct type _ t = | New_head : Block_hash.t * Block_header.t -> unit t - | New_branch : Block_hash.t * Block_locator.t * Block_locator.seed -> unit t + | New_branch : + Block_hash.t * Block_locator.t * Block_locator.seed + -> unit t - let view (type a) (req : a t) : view = match req with + let view (type a) (req : a t) : view = + match req with | New_head (hash, _) -> New_head hash | New_branch (hash, locator, seed) -> @@ -55,286 +65,371 @@ module Request = struct end type limits = { - new_head_request_timeout: Time.System.Span.t ; - block_header_timeout: Time.System.Span.t ; - block_operations_timeout: Time.System.Span.t ; - protocol_timeout: Time.System.Span.t ; - worker_limits: Worker_types.limits + new_head_request_timeout : Time.System.Span.t; + block_header_timeout : Time.System.Span.t; + block_operations_timeout : Time.System.Span.t; + protocol_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } module Types = struct include Worker_state type parameters = { - chain_db: Distributed_db.chain_db ; - block_validator: Block_validator.t ; + chain_db : Distributed_db.chain_db; + block_validator : Block_validator.t; (* callback to chain_validator *) - notify_new_block: State.Block.t -> unit ; - notify_bootstrapped: unit -> unit ; - notify_termination: unit -> unit ; - limits: limits; + notify_new_block : State.Block.t -> unit; + notify_bootstrapped : unit -> unit; + notify_termination : unit -> unit; + limits : limits } type state = { - peer_id: P2p_peer.Id.t ; - parameters : parameters ; - mutable bootstrapped: bool ; - mutable pipeline : Bootstrap_pipeline.t option ; - mutable last_validated_head: Block_header.t ; - mutable last_advertised_head: Block_header.t ; + peer_id : P2p_peer.Id.t; + parameters : parameters; + mutable bootstrapped : bool; + mutable pipeline : Bootstrap_pipeline.t option; + mutable last_validated_head : Block_header.t; + mutable last_advertised_head : Block_header.t } let pipeline_length = function - | None -> Bootstrap_pipeline.length_zero - | Some p -> Bootstrap_pipeline.length p + | None -> + Bootstrap_pipeline.length_zero + | Some p -> + Bootstrap_pipeline.length p let view (state : state) _ : view = - let { bootstrapped ; pipeline ; - last_validated_head ; last_advertised_head ; _ } = state in - { bootstrapped ; pipeline_length = pipeline_length pipeline ; - last_validated_head = Block_header.hash last_validated_head ; + let {bootstrapped; pipeline; last_validated_head; last_advertised_head; _} + = + state + in + { bootstrapped; + pipeline_length = pipeline_length pipeline; + last_validated_head = Block_header.hash last_validated_head; last_advertised_head = Block_header.hash last_advertised_head } - end module Worker = Worker.Make (Name) (Event) (Request) (Types) - open Types type t = Worker.dropbox Worker.t -let debug w = - Format.kasprintf (fun msg -> Worker.record_event w (Debug msg)) +let debug w = Format.kasprintf (fun msg -> Worker.record_event w (Debug msg)) let set_bootstrapped pv = - if not pv.bootstrapped then begin + if not pv.bootstrapped then ( pv.bootstrapped <- true ; - pv.parameters.notify_bootstrapped () ; - end + pv.parameters.notify_bootstrapped () ) let bootstrap_new_branch w _head unknown_prefix = let pv = Worker.state w in let sender_id = Distributed_db.my_peer_id pv.parameters.chain_db in (* sender and receiver are inverted here because they are from the point of view of the node sending the locator *) - let seed = {Block_locator.sender_id=pv.peer_id; receiver_id = sender_id } in + let seed = {Block_locator.sender_id = pv.peer_id; receiver_id = sender_id} in let len = Block_locator.estimated_length seed unknown_prefix in - debug w + debug + w "validating new branch from peer %a (approx. %d blocks)" - P2p_peer.Id.pp_short pv.peer_id len ; + P2p_peer.Id.pp_short + pv.peer_id + len ; let pipeline = Bootstrap_pipeline.create ~notify_new_block:pv.parameters.notify_new_block ~block_header_timeout:pv.parameters.limits.block_header_timeout ~block_operations_timeout:pv.parameters.limits.block_operations_timeout pv.parameters.block_validator - pv.peer_id pv.parameters.chain_db unknown_prefix in + pv.peer_id + pv.parameters.chain_db + unknown_prefix + in pv.pipeline <- Some pipeline ; - Worker.protect w - ~on_error:begin fun error -> + Worker.protect + w + ~on_error:(fun error -> (* if the peer_validator is killed, let's cancel the pipeline *) pv.pipeline <- None ; - Bootstrap_pipeline.cancel pipeline >>= fun () -> - Lwt.return_error error - end - begin fun () -> - Bootstrap_pipeline.wait pipeline - end >>=? fun () -> + Bootstrap_pipeline.cancel pipeline >>= fun () -> Lwt.return_error error) + (fun () -> Bootstrap_pipeline.wait pipeline) + >>=? fun () -> pv.pipeline <- None ; set_bootstrapped pv ; - debug w + debug + w "done validating new branch from peer %a." - P2p_peer.Id.pp_short pv.peer_id ; + P2p_peer.Id.pp_short + pv.peer_id ; return_unit let validate_new_head w hash (header : Block_header.t) = let pv = Worker.state w in - debug w + debug + w "fetching operations for new head %a from peer %a" - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; map_p (fun i -> - Worker.protect w begin fun () -> - Distributed_db.Operations.fetch - ~timeout:pv.parameters.limits.block_operations_timeout - pv.parameters.chain_db ~peer:pv.peer_id - (hash, i) header.shell.operations_hash - end) - (0 -- (header.shell.validation_passes - 1)) >>=? fun operations -> - debug w + Worker.protect w (fun () -> + Distributed_db.Operations.fetch + ~timeout:pv.parameters.limits.block_operations_timeout + pv.parameters.chain_db + ~peer:pv.peer_id + (hash, i) + header.shell.operations_hash)) + (0 -- (header.shell.validation_passes - 1)) + >>=? fun operations -> + debug + w "requesting validation for new head %a from peer %a" - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; Block_validator.validate ~notify_new_block:pv.parameters.notify_new_block - pv.parameters.block_validator pv.parameters.chain_db - hash header operations >>=? fun _block -> - debug w + pv.parameters.block_validator + pv.parameters.chain_db + hash + header + operations + >>=? fun _block -> + debug + w "end of validation for new head %a from peer %a" - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; set_bootstrapped pv ; - let meta = Distributed_db.get_peer_metadata pv.parameters.chain_db pv.peer_id in - Peer_metadata.incr meta Valid_blocks; + let meta = + Distributed_db.get_peer_metadata pv.parameters.chain_db pv.peer_id + in + Peer_metadata.incr meta Valid_blocks ; return_unit let only_if_fitness_increases w distant_header cont = let pv = Worker.state w in let chain_state = Distributed_db.chain_state pv.parameters.chain_db in - Chain.head chain_state >>= fun local_header -> - if Fitness.compare + Chain.head chain_state + >>= fun local_header -> + if + Fitness.compare distant_header.Block_header.shell.fitness - (State.Block.fitness local_header) <= 0 then begin + (State.Block.fitness local_header) + <= 0 + then ( set_bootstrapped pv ; - debug w + debug + w "ignoring head %a with non increasing fitness from peer: %a." - Block_hash.pp_short (Block_header.hash distant_header) - P2p_peer.Id.pp_short pv.peer_id ; + Block_hash.pp_short + (Block_header.hash distant_header) + P2p_peer.Id.pp_short + pv.peer_id ; (* Don't download a branch that cannot beat the current head. *) - let meta = Distributed_db.get_peer_metadata pv.parameters.chain_db pv.peer_id in - Peer_metadata.incr meta Old_heads; - return_unit - end else cont () - -let assert_acceptable_head w hash (header: Block_header.t) = + let meta = + Distributed_db.get_peer_metadata pv.parameters.chain_db pv.peer_id + in + Peer_metadata.incr meta Old_heads ; + return_unit ) + else cont () + +let assert_acceptable_head w hash (header : Block_header.t) = let pv = Worker.state w in let chain_state = Distributed_db.chain_state pv.parameters.chain_db in - State.Chain.acceptable_block chain_state header >>= fun acceptable -> - fail_unless acceptable + State.Chain.acceptable_block chain_state header + >>= fun acceptable -> + fail_unless + acceptable (Validation_errors.Checkpoint_error (hash, Some pv.peer_id)) let may_validate_new_head w hash (header : Block_header.t) = let pv = Worker.state w in let chain_state = Distributed_db.chain_state pv.parameters.chain_db in - State.Block.known_valid chain_state hash >>= fun valid_block -> - State.Block.known_invalid chain_state hash >>= fun invalid_block -> - State.Block.known_valid chain_state - header.shell.predecessor >>= fun valid_predecessor -> - State.Block.known_invalid chain_state - header.shell.predecessor >>= fun invalid_predecessor -> - if valid_block then begin - debug w + State.Block.known_valid chain_state hash + >>= fun valid_block -> + State.Block.known_invalid chain_state hash + >>= fun invalid_block -> + State.Block.known_valid chain_state header.shell.predecessor + >>= fun valid_predecessor -> + State.Block.known_invalid chain_state header.shell.predecessor + >>= fun invalid_predecessor -> + if valid_block then ( + debug + w "ignoring previously validated block %a from peer %a" - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; set_bootstrapped pv ; pv.last_validated_head <- header ; - return_unit - end else if invalid_block then begin - debug w + return_unit ) + else if invalid_block then ( + debug + w "ignoring known invalid block %a from peer %a" - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; - fail Validation_errors.Known_invalid - end else if invalid_predecessor then begin - debug w + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; + fail Validation_errors.Known_invalid ) + else if invalid_predecessor then ( + debug + w "ignoring known invalid block %a from peer %a" - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; - Distributed_db.commit_invalid_block pv.parameters.chain_db - hash header [Validation_errors.Known_invalid] >>=? fun _ -> - fail Validation_errors.Known_invalid - end else if not valid_predecessor then begin - debug w + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; + Distributed_db.commit_invalid_block + pv.parameters.chain_db + hash + header + [Validation_errors.Known_invalid] + >>=? fun _ -> fail Validation_errors.Known_invalid ) + else if not valid_predecessor then ( + debug + w "missing predecessor for new head %a from peer %a" - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; Distributed_db.Request.current_branch - pv.parameters.chain_db ~peer:pv.peer_id () ; - return_unit - end else begin - only_if_fitness_increases w header @@ fun () -> - assert_acceptable_head w hash header >>=? fun () -> - validate_new_head w hash header - end + pv.parameters.chain_db + ~peer:pv.peer_id + () ; + return_unit ) + else + only_if_fitness_increases w header + @@ fun () -> + assert_acceptable_head w hash header + >>=? fun () -> validate_new_head w hash header let may_validate_new_branch w distant_hash locator = let pv = Worker.state w in - let distant_header, _ = (locator : Block_locator.t :> Block_header.t * _) in - only_if_fitness_increases w distant_header @@ fun () -> - assert_acceptable_head w - (Block_header.hash distant_header) distant_header >>=? fun () -> + let (distant_header, _) = + (locator : Block_locator.t :> Block_header.t * _) + in + only_if_fitness_increases w distant_header + @@ fun () -> + assert_acceptable_head w (Block_header.hash distant_header) distant_header + >>=? fun () -> let chain_state = Distributed_db.chain_state pv.parameters.chain_db in - State.Block.known_ancestor chain_state locator >>= function + State.Block.known_ancestor chain_state locator + >>= function | None -> - debug w + debug + w "ignoring branch %a without common ancestor from peer: %a." - Block_hash.pp_short distant_hash - P2p_peer.Id.pp_short pv.peer_id ; + Block_hash.pp_short + distant_hash + P2p_peer.Id.pp_short + pv.peer_id ; fail Validation_errors.Unknown_ancestor | Some unknown_prefix -> bootstrap_new_branch w distant_header unknown_prefix let on_no_request w = let pv = Worker.state w in - debug w "no new head from peer %a for %g seconds." - P2p_peer.Id.pp_short pv.peer_id + debug + w + "no new head from peer %a for %g seconds." + P2p_peer.Id.pp_short + pv.peer_id (Ptime.Span.to_float_s pv.parameters.limits.new_head_request_timeout) ; - Distributed_db.Request.current_head pv.parameters.chain_db ~peer:pv.peer_id () ; + Distributed_db.Request.current_head + pv.parameters.chain_db + ~peer:pv.peer_id + () ; return_unit let on_request (type a) w (req : a Request.t) : a tzresult Lwt.t = let pv = Worker.state w in match req with | Request.New_head (hash, header) -> - debug w + debug + w "processing new head %a from peer %a." - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; may_validate_new_head w hash header | Request.New_branch (hash, locator, _seed) -> (* TODO penalize empty locator... ?? *) - debug w "processing new branch %a from peer %a." - Block_hash.pp_short hash - P2p_peer.Id.pp_short pv.peer_id ; + debug + w + "processing new branch %a from peer %a." + Block_hash.pp_short + hash + P2p_peer.Id.pp_short + pv.peer_id ; may_validate_new_branch w hash locator let on_completion w r _ st = - Worker.record_event w (Event.Request (Request.view r, st, None )) ; + Worker.record_event w (Event.Request (Request.view r, st, None)) ; Lwt.return_unit let on_error w r st errs = let pv = Worker.state w in match errs with - ((( Validation_errors.Unknown_ancestor - | Validation_errors.Invalid_locator _ - | Block_validator_errors.Invalid_block _ ) :: _) as errors ) -> - Distributed_db.greylist pv.parameters.chain_db pv.peer_id >>= fun () -> - debug w + | ( Validation_errors.Unknown_ancestor + | Validation_errors.Invalid_locator _ + | Block_validator_errors.Invalid_block _ ) + :: _ as errors -> + Distributed_db.greylist pv.parameters.chain_db pv.peer_id + >>= fun () -> + debug + w "Terminating the validation worker for peer %a (kickban)." - P2p_peer.Id.pp_short pv.peer_id ; + P2p_peer.Id.pp_short + pv.peer_id ; debug w "%a" Error_monad.pp_print_error errors ; Worker.trigger_shutdown w ; Worker.record_event w (Event.Request (r, st, Some errs)) ; Lwt.return_error errs - | [Block_validator_errors.System_error _ ] as errs -> + | [Block_validator_errors.System_error _] as errs -> Worker.record_event w (Event.Request (r, st, Some errs)) ; return_unit - | [Block_validator_errors.Unavailable_protocol { protocol ; _ } ] -> begin + | [Block_validator_errors.Unavailable_protocol {protocol; _}] -> ( Block_validator.fetch_and_compile_protocol pv.parameters.block_validator ~peer:pv.peer_id ~timeout:pv.parameters.limits.protocol_timeout - protocol >>= function + protocol + >>= function | Ok _ -> Distributed_db.Request.current_head - pv.parameters.chain_db ~peer:pv.peer_id () ; + pv.parameters.chain_db + ~peer:pv.peer_id + () ; return_unit | Error _ -> (* TODO: punish *) - debug w - "Terminating the validation worker for peer %a \ - (missing protocol %a)." - P2p_peer.Id.pp_short pv.peer_id - Protocol_hash.pp_short protocol ; + debug + w + "Terminating the validation worker for peer %a (missing protocol \ + %a)." + P2p_peer.Id.pp_short + pv.peer_id + Protocol_hash.pp_short + protocol ; Worker.record_event w (Event.Request (r, st, Some errs)) ; - Lwt.return_error errs - end - | [ Validation_errors.Too_short_locator _ ] -> - debug w + Lwt.return_error errs ) + | [Validation_errors.Too_short_locator _] -> + debug + w "Terminating the validation worker for peer %a (kick)." - P2p_peer.Id.pp_short pv.peer_id ; + P2p_peer.Id.pp_short + pv.peer_id ; Worker.trigger_shutdown w ; Worker.record_event w (Event.Request (r, st, Some errs)) ; return_unit @@ -344,25 +439,27 @@ let on_error w r st errs = let on_close w = let pv = Worker.state w in - Distributed_db.disconnect pv.parameters.chain_db pv.peer_id >>= fun () -> + Distributed_db.disconnect pv.parameters.chain_db pv.peer_id + >>= fun () -> pv.parameters.notify_termination () ; Lwt.return_unit let on_launch _ name parameters = let chain_state = Distributed_db.chain_state parameters.chain_db in - State.Block.read_opt chain_state - (State.Chain.genesis chain_state).block >|= Option.unopt_assert ~loc:__POS__ >>= fun genesis -> - let rec pv = { - peer_id = snd name ; - parameters = { parameters with notify_new_block } ; - bootstrapped = false ; - pipeline = None ; - last_validated_head = State.Block.header genesis ; - last_advertised_head = State.Block.header genesis ; - } + State.Block.read_opt chain_state (State.Chain.genesis chain_state).block + >|= Option.unopt_assert ~loc:__POS__ + >>= fun genesis -> + let rec pv = + { peer_id = snd name; + parameters = {parameters with notify_new_block}; + bootstrapped = false; + pipeline = None; + last_validated_head = State.Block.header genesis; + last_advertised_head = State.Block.header genesis } and notify_new_block block = pv.last_validated_head <- State.Block.header block ; - parameters.notify_new_block block in + parameters.notify_new_block block + in return pv let table = @@ -370,10 +467,10 @@ let table = let pv = Worker.state w in match neu with | Request.New_branch (_, locator, _) -> - let header, _ = (locator : Block_locator.t :> _ * _) in + let (header, _) = (locator : Block_locator.t :> _ * _) in pv.last_advertised_head <- header ; Some (Worker.Any_request neu) - | Request.New_head (_, header) -> + | Request.New_head (_, header) -> ( pv.last_advertised_head <- header ; (* TODO penalize decreasing fitness *) match old with @@ -382,52 +479,60 @@ let table = | Some (Worker.Any_request (Request.New_head _)) -> Some (Any_request neu) | None -> - Some (Any_request neu) in - Worker.create_table (Dropbox { merge }) + Some (Any_request neu) ) + in + Worker.create_table (Dropbox {merge}) -let create - ?(notify_new_block = fun _ -> ()) - ?(notify_bootstrapped = fun () -> ()) - ?(notify_termination = fun _ -> ()) +let create ?(notify_new_block = fun _ -> ()) + ?(notify_bootstrapped = fun () -> ()) ?(notify_termination = fun _ -> ()) limits block_validator chain_db peer_id = let name = (State.Chain.id (Distributed_db.chain_state chain_db), peer_id) in - let parameters = { - chain_db ; - notify_termination ; - block_validator ; - notify_new_block ; - notify_bootstrapped ; - limits ; - } in + let parameters = + { chain_db; + notify_termination; + block_validator; + notify_new_block; + notify_bootstrapped; + limits } + in let module Handlers = struct type self = t + let on_launch = on_launch + let on_request = on_request + let on_close = on_close + let on_error = on_error + let on_completion = on_completion + let on_no_request = on_no_request end in - Worker.launch table ~timeout: limits.new_head_request_timeout limits.worker_limits - name parameters + Worker.launch + table + ~timeout:limits.new_head_request_timeout + limits.worker_limits + name + parameters (module Handlers) let notify_branch w locator = - let header, _ = (locator : Block_locator.t :> _ * _) in + let (header, _) = (locator : Block_locator.t :> _ * _) in let hash = Block_header.hash header in let pv = Worker.state w in let sender_id = Distributed_db.my_peer_id pv.parameters.chain_db in (* sender and receiver are inverted here because they are from the point of view of the node sending the locator *) - let seed = {Block_locator.sender_id=pv.peer_id; receiver_id=sender_id } in + let seed = {Block_locator.sender_id = pv.peer_id; receiver_id = sender_id} in Worker.Dropbox.put_request w (New_branch (hash, locator, seed)) let notify_head w header = let hash = Block_header.hash header in Worker.Dropbox.put_request w (New_head (hash, header)) -let shutdown w = - Worker.shutdown w +let shutdown w = Worker.shutdown w let peer_id w = let pv = Worker.state w in @@ -442,6 +547,7 @@ let current_head w = pv.last_validated_head let status = Worker.status + let information = Worker.information let running_workers () = Worker.list table diff --git a/src/lib_shell/peer_validator.mli b/src/lib_shell/peer_validator.mli index 540566e93d18a75ca006276fd69b571f04d4780e..46cb950361feef5a6cbedb9ec95986640e1fffc8 100644 --- a/src/lib_shell/peer_validator.mli +++ b/src/lib_shell/peer_validator.mli @@ -27,34 +27,48 @@ type t type limits = { - new_head_request_timeout: Time.System.Span.t ; - block_header_timeout: Time.System.Span.t ; - block_operations_timeout: Time.System.Span.t ; - protocol_timeout: Time.System.Span.t ; - worker_limits: Worker_types.limits + new_head_request_timeout : Time.System.Span.t; + block_header_timeout : Time.System.Span.t; + block_operations_timeout : Time.System.Span.t; + protocol_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } -val peer_id: t -> P2p_peer.Id.t -val bootstrapped: t -> bool -val current_head: t -> Block_header.t +val peer_id : t -> P2p_peer.Id.t -val create: - ?notify_new_block: (State.Block.t -> unit) -> - ?notify_bootstrapped: (unit -> unit) -> - ?notify_termination: (unit -> unit) -> +val bootstrapped : t -> bool + +val current_head : t -> Block_header.t + +val create : + ?notify_new_block:(State.Block.t -> unit) -> + ?notify_bootstrapped:(unit -> unit) -> + ?notify_termination:(unit -> unit) -> limits -> Block_validator.t -> - Distributed_db.chain_db -> P2p_peer.Id.t -> t tzresult Lwt.t -val shutdown: t -> unit Lwt.t + Distributed_db.chain_db -> + P2p_peer.Id.t -> + t tzresult Lwt.t + +val shutdown : t -> unit Lwt.t + +val notify_branch : t -> Block_locator.t -> unit + +val notify_head : t -> Block_header.t -> unit + +val running_workers : unit -> ((Chain_id.t * P2p_peer.Id.t) * t) list + +val status : t -> Worker_types.worker_status -val notify_branch: t -> Block_locator.t -> unit -val notify_head: t -> Block_header.t -> unit +val information : t -> Worker_types.worker_information -val running_workers: unit -> ((Chain_id.t * P2p_peer.Id.t) * t) list -val status: t -> Worker_types.worker_status -val information: t -> Worker_types.worker_information +val current_request : + t -> + (Time.System.t * Time.System.t * Peer_validator_worker_state.Request.view) + option -val current_request : t -> (Time.System.t * Time.System.t * Peer_validator_worker_state.Request.view) option -val last_events : t -> (Internal_event.level * Peer_validator_worker_state.Event.t list) list +val last_events : + t -> (Internal_event.level * Peer_validator_worker_state.Event.t list) list -val pipeline_length : t -> Peer_validator_worker_state.Worker_state.pipeline_length +val pipeline_length : + t -> Peer_validator_worker_state.Worker_state.pipeline_length diff --git a/src/lib_shell/pipeline.ml b/src/lib_shell/pipeline.ml index ee630f90d5cf2ec4af89f7a470882cdf30615965..75c9146f4c7f34e5859d2da9cd7c7f9abb2a1c23 100644 --- a/src/lib_shell/pipeline.ml +++ b/src/lib_shell/pipeline.ml @@ -25,202 +25,205 @@ open Lwt.Infix -type limiter = { - mutable current : int ; - max : int ; -} +type limiter = {mutable current : int; max : int} + let with_limiter limiter f v = - if limiter.current <= limiter.max then begin + if limiter.current <= limiter.max then ( limiter.current <- limiter.current + 1 ; let p = f v in Lwt.on_termination p (fun () -> limiter.current <- limiter.current - 1) ; - Some p - end else - None -let limiter = function - | None -> { current = 0 ; max = max_int ; } - | Some max -> { current = 0 ; max ; } + Some p ) + else None +let limiter = function + | None -> + {current = 0; max = max_int} + | Some max -> + {current = 0; max} type ('a, 'b) step = | Async_p of ('a -> 'b Lwt.t) | Async_s of ('a -> 'b Lwt.t) | Sync of ('a -> 'b) + let sync f = Sync f + let async_p f = Async_p f + let async_s f = Async_s f let all_ok = Sync (fun v -> Ok v) + let map_in_err m = function | Sync f -> - Sync (function - | Ok _ as ok -> f ok - | Error e -> f (Error (m e))) + Sync (function Ok _ as ok -> f ok | Error e -> f (Error (m e))) | Async_s f -> - Async_s (function - | Ok _ as ok -> f ok - | Error e -> f (Error (m e))) + Async_s (function Ok _ as ok -> f ok | Error e -> f (Error (m e))) | Async_p f -> - Async_p (function - | Ok _ as ok -> f ok - | Error e -> f (Error (m e))) + Async_p (function Ok _ as ok -> f ok | Error e -> f (Error (m e))) + let map_out_err m = function | Sync f -> - Sync (fun x -> match f x with - | Ok _ as ok -> ok - | Error e -> Error (m e)) + Sync + (fun x -> match f x with Ok _ as ok -> ok | Error e -> Error (m e)) | Async_s f -> - Async_s (fun x -> f x >>= function - | Ok _ as ok -> Lwt.return ok - | Error e -> Lwt.return_error (m e)) + Async_s + (fun x -> + f x + >>= function + | Ok _ as ok -> Lwt.return ok | Error e -> Lwt.return_error (m e)) | Async_p f -> - Async_p (fun x -> f x >>= function - | Ok _ as ok -> Lwt.return ok - | Error e -> Lwt.return_error (m e)) + Async_p + (fun x -> + f x + >>= function + | Ok _ as ok -> Lwt.return ok | Error e -> Lwt.return_error (m e)) + let with_err = function - | Async_s f -> Async_s (function | Ok v -> f v | Error e -> Lwt.return_error e) - | Async_p f -> Async_p (function | Ok v -> f v | Error e -> Lwt.return_error e) - | Sync f -> Sync (function | Ok v -> f v | Error e -> Error e) + | Async_s f -> + Async_s (function Ok v -> f v | Error e -> Lwt.return_error e) + | Async_p f -> + Async_p (function Ok v -> f v | Error e -> Lwt.return_error e) + | Sync f -> + Sync (function Ok v -> f v | Error e -> Error e) -let recover f = - Sync (function | Ok v -> v | Error e -> f e) +let recover f = Sync (function Ok v -> v | Error e -> f e) let with_key = function | Async_s f -> Async_s (fun (key, a) -> f a >>= fun b -> Lwt.return (key, b)) | Async_p f -> Async_p (fun (key, a) -> f a >>= fun b -> Lwt.return (key, b)) - | Sync f -> Sync (fun (key, a) -> let b = f a in (key, b)) + | Sync f -> + Sync + (fun (key, a) -> + let b = f a in + (key, b)) + let init_key = Sync (fun x -> (x, x)) type ('i, 'o) pipe = | Nil : ('x, 'x) pipe | Cons : ('a, 'input) step * ('input, 'output) pipe -> ('a, 'output) pipe + let nil : ('x, 'x) pipe = Nil -let cons - : ('a, 'b) step -> ('b, 'c) pipe -> ('a, 'c) pipe - = fun step pipe -> Cons (step, pipe) +let cons : ('a, 'b) step -> ('b, 'c) pipe -> ('a, 'c) pipe = + fun step pipe -> Cons (step, pipe) (* Instantiated values: values with buffers attached *) type ('i, 'o) istep = - | ISync of { f : ('i, exn) result -> ('o, exn) result } - | IAsync_s of { - qin : ('i, exn) result Queue.t ; - f : 'i -> ('o, exn) result Lwt.t ; - vout : ('o, exn) result Lwt.t option ref ; - } - | IAsync_p of { - qin : ('i, exn) result Queue.t ; - f : 'i -> ('o, exn) result Lwt.t ; - qout : ('o, exn) result Lwt.t Queue.t ; - } + | ISync of {f : ('i, exn) result -> ('o, exn) result} + | IAsync_s of + { qin : ('i, exn) result Queue.t; + f : 'i -> ('o, exn) result Lwt.t; + vout : ('o, exn) result Lwt.t option ref } + | IAsync_p of + { qin : ('i, exn) result Queue.t; + f : 'i -> ('o, exn) result Lwt.t; + qout : ('o, exn) result Lwt.t Queue.t } type (_, _) ipipe = - | ICons : { step : ('a, 'b) istep ; pipe : ('b, 'c) ipipe } -> ('a, 'c) ipipe - | IEnd : { q : ('i, exn) result Queue.t } -> ('i, 'i) ipipe - -let sync_wrap f = - function - | Ok v -> (try Ok (f v) with exc -> Error exc) - | Error _ as e -> e -let wrap f = - fun v -> - Lwt.catch - (fun () -> f v >>= Lwt.return_ok) - Lwt.return_error - -let rec instantiate_pipe - : type i o. (i, o) pipe -> (i, o) ipipe - = function - | Nil -> IEnd { q = Queue.create () } - | Cons (Sync step, pipe) -> - let step = ISync { f = sync_wrap step } in - let pipe = instantiate_pipe pipe in - ICons { step ; pipe } - | Cons (Async_s step, pipe) -> - let step = IAsync_s { qin = Queue.create () ; f = wrap step ; vout = ref None } in - let pipe = instantiate_pipe pipe in - ICons { step ; pipe } - | Cons (Async_p step, pipe) -> - let step = IAsync_p { qin = Queue.create () ; f = wrap step ; qout = Queue.create () } in - let pipe = instantiate_pipe pipe in - ICons { step ; pipe } - -let cancel_istep - : ('i, 'o) istep -> unit - = function - | ISync _ -> () - | IAsync_s { vout = { contents = None } ; _ } -> () - | IAsync_s { vout = { contents = Some p } ; _ } -> Lwt.cancel p - | IAsync_p { qout ; _ } -> Queue.iter Lwt.cancel qout - -let rec cancel_ipipe - : type i o . (i, o) ipipe -> unit - = function - | ICons { step ; pipe } -> - cancel_istep step ; - cancel_ipipe pipe - | IEnd _ -> () - - - -let wait_for p = (p >>= fun _ -> Lwt.return_unit) + | ICons : {step : ('a, 'b) istep; pipe : ('b, 'c) ipipe} -> ('a, 'c) ipipe + | IEnd : {q : ('i, exn) result Queue.t} -> ('i, 'i) ipipe + +let sync_wrap f = function + | Ok v -> ( + try Ok (f v) with exc -> Error exc ) + | Error _ as e -> + e + +let wrap f v = Lwt.catch (fun () -> f v >>= Lwt.return_ok) Lwt.return_error + +let rec instantiate_pipe : type i o. (i, o) pipe -> (i, o) ipipe = function + | Nil -> + IEnd {q = Queue.create ()} + | Cons (Sync step, pipe) -> + let step = ISync {f = sync_wrap step} in + let pipe = instantiate_pipe pipe in + ICons {step; pipe} + | Cons (Async_s step, pipe) -> + let step = + IAsync_s {qin = Queue.create (); f = wrap step; vout = ref None} + in + let pipe = instantiate_pipe pipe in + ICons {step; pipe} + | Cons (Async_p step, pipe) -> + let step = + IAsync_p {qin = Queue.create (); f = wrap step; qout = Queue.create ()} + in + let pipe = instantiate_pipe pipe in + ICons {step; pipe} + +let cancel_istep : ('i, 'o) istep -> unit = function + | ISync _ -> + () + | IAsync_s {vout = {contents = None}; _} -> + () + | IAsync_s {vout = {contents = Some p}; _} -> + Lwt.cancel p + | IAsync_p {qout; _} -> + Queue.iter Lwt.cancel qout + +let rec cancel_ipipe : type i o. (i, o) ipipe -> unit = function + | ICons {step; pipe} -> + cancel_istep step ; cancel_ipipe pipe + | IEnd _ -> + () + +let wait_for p = p >>= fun _ -> Lwt.return_unit let rec progress_async_s limiter waiters qin f vout data_out = match !vout with - | None -> begin - if Queue.is_empty qin then - (data_out, waiters) + | None -> ( + if Queue.is_empty qin then (data_out, waiters) else match Queue.peek qin with | Error _ as e -> progress_async_s limiter waiters qin f vout (e :: data_out) - | Ok v -> - match with_limiter limiter f v with - | Some p -> - ignore (Queue.pop qin) ; - vout := Some p ; - (data_out, wait_for p :: waiters) - | None -> - (data_out, waiters) - end - | Some p -> begin - match Lwt.state p with - | Lwt.Sleep -> - (data_out, wait_for p :: waiters) - | Lwt.Return v -> - vout := None ; - progress_async_s limiter waiters qin f vout (v :: data_out) - | Lwt.Fail exc -> - vout := None ; - progress_async_s limiter waiters qin f vout (Error exc :: data_out) - end + | Ok v -> ( + match with_limiter limiter f v with + | Some p -> + ignore (Queue.pop qin) ; + vout := Some p ; + (data_out, wait_for p :: waiters) + | None -> + (data_out, waiters) ) ) + | Some p -> ( + match Lwt.state p with + | Lwt.Sleep -> + (data_out, wait_for p :: waiters) + | Lwt.Return v -> + vout := None ; + progress_async_s limiter waiters qin f vout (v :: data_out) + | Lwt.Fail exc -> + vout := None ; + progress_async_s limiter waiters qin f vout (Error exc :: data_out) ) let rec make_promises limiter qin f qout = - if Queue.is_empty qin then - () + if Queue.is_empty qin then () else match Queue.peek qin with | Error _ as e -> ignore (Queue.pop qin) ; Queue.push (Lwt.return e) qout ; make_promises limiter qin f qout - | Ok v -> - match with_limiter limiter f v with - | Some p -> - ignore (Queue.pop qin) ; - Queue.push p qout ; - make_promises limiter qin f qout - | None -> - () + | Ok v -> ( + match with_limiter limiter f v with + | Some p -> + ignore (Queue.pop qin) ; + Queue.push p qout ; + make_promises limiter qin f qout + | None -> + () ) + let rec get_resolved_top waiters qout resolved = - if Queue.is_empty qout then - (List.rev resolved, waiters) + if Queue.is_empty qout then (List.rev resolved, waiters) else let p = Queue.peek qout in match Lwt.state p with - | Lwt.Sleep -> (List.rev resolved, wait_for p :: waiters) + | Lwt.Sleep -> + (List.rev resolved, wait_for p :: waiters) | Lwt.Return v -> ignore (Queue.pop qout) ; get_resolved_top waiters qout (v :: resolved) @@ -232,83 +235,84 @@ let progress_async_p limiter waiters qin f qout = make_promises limiter qin f qout ; get_resolved_top waiters qout [] +let rec progress : + type i o. + limiter -> + (i, exn) result list -> + unit Lwt.t list -> + (i, o) ipipe -> + unit Lwt.t option = + fun limiter data waiters pipe -> + match pipe with + | ICons {step = ISync {f}; pipe} -> + progress limiter (List.map f data) waiters pipe + | ICons {step = IAsync_s {qin; f; vout}; pipe} -> + List.iter (fun v -> Queue.push v qin) data ; + let (data_out, waiters) = + progress_async_s limiter waiters qin f vout [] + in + progress limiter data_out waiters pipe + | ICons {step = IAsync_p {qin; f; qout}; pipe} -> + List.iter (fun v -> Queue.push v qin) data ; + let (data_out, waiters) = progress_async_p limiter waiters qin f qout in + progress limiter data_out waiters pipe + | IEnd {q} -> ( + List.iter (fun v -> Queue.push v q) data ; + match waiters with [] -> None | _ :: _ -> Some (Lwt.join waiters) ) -let rec progress - : type i o . limiter -> (i, exn) result list -> unit Lwt.t list -> (i, o) ipipe -> unit Lwt.t option - = fun limiter data waiters pipe -> - match pipe with - | ICons { step = ISync { f } ; pipe } -> - progress limiter (List.map f data) waiters pipe - | ICons { step = IAsync_s { qin ; f ; vout } ; pipe } -> - List.iter (fun v -> Queue.push v qin) data ; - let (data_out, waiters) = progress_async_s limiter waiters qin f vout [] in - progress limiter data_out waiters pipe - | ICons { step = IAsync_p { qin ; f ; qout } ; pipe } -> - List.iter (fun v -> Queue.push v qin) data ; - let (data_out, waiters) = progress_async_p limiter waiters qin f qout in - progress limiter data_out waiters pipe - | IEnd { q } -> - List.iter (fun v -> Queue.push v q) data ; - match waiters with - | [] -> None - | _ :: _ -> Some (Lwt.join waiters) - - -let rec get_result - : type i o . (i, o) ipipe -> (o, exn) result list - = function - | ICons { step = ISync _ ; pipe } -> - get_result pipe - | ICons { step = IAsync_s { vout ; _ } ; pipe } -> - assert (!vout = None) ; - get_result pipe - | ICons { step = IAsync_p { qout ; _ } ; pipe } -> - assert (Queue.is_empty qout) ; - get_result pipe - | IEnd { q } -> - let rec mk_list acc = - if Queue.is_empty q then - List.rev acc - else - mk_list (Queue.pop q :: acc) - in - mk_list [] - -let instantiate - : ('input, 'output) pipe -> ('input, 'output) ipipe - = fun pipe -> - instantiate_pipe pipe - -let run - : limiter -> ('input, 'output) ipipe -> 'input list -> ('output, exn) result list Lwt.t - = fun limiter ipipe input -> - let rec loop pipe = - match progress limiter [] [] pipe with - | Some wait -> - wait >>= fun () -> - loop pipe - | None -> - Lwt.return (get_result ipipe) - in - match progress limiter (List.map (fun v -> Ok v) input) [] ipipe with +let rec get_result : type i o. (i, o) ipipe -> (o, exn) result list = function + | ICons {step = ISync _; pipe} -> + get_result pipe + | ICons {step = IAsync_s {vout; _}; pipe} -> + assert (!vout = None) ; + get_result pipe + | ICons {step = IAsync_p {qout; _}; pipe} -> + assert (Queue.is_empty qout) ; + get_result pipe + | IEnd {q} -> + let rec mk_list acc = + if Queue.is_empty q then List.rev acc else mk_list (Queue.pop q :: acc) + in + mk_list [] + +let instantiate : ('input, 'output) pipe -> ('input, 'output) ipipe = + fun pipe -> instantiate_pipe pipe + +let run : + limiter -> + ('input, 'output) ipipe -> + 'input list -> + ('output, exn) result list Lwt.t = + fun limiter ipipe input -> + let rec loop pipe = + match progress limiter [] [] pipe with + | Some wait -> + wait >>= fun () -> loop pipe | None -> Lwt.return (get_result ipipe) - | Some wait -> - wait >>= fun () -> - loop ipipe + in + match progress limiter (List.map (fun v -> Ok v) input) [] ipipe with + | None -> + Lwt.return (get_result ipipe) + | Some wait -> + wait >>= fun () -> loop ipipe let rec separate vs errs excs = function - | [] -> (List.rev vs, List.rev errs, List.rev excs) - | (Ok (Ok v)) :: res -> separate (v :: vs) errs excs res - | (Ok (Error err)) :: res -> separate vs (err :: errs) excs res - | (Error exc) :: res -> separate vs errs (exc :: excs) res + | [] -> + (List.rev vs, List.rev errs, List.rev excs) + | Ok (Ok v) :: res -> + separate (v :: vs) errs excs res + | Ok (Error err) :: res -> + separate vs (err :: errs) excs res + | Error exc :: res -> + separate vs errs (exc :: excs) res + let partition_by_error res = separate [] [] [] res let index_by_key vs empty add = List.fold_left - (fun (m, excs) -> function - | Ok (k, v) -> (add k v m, excs) - | Error exc -> (m, exc :: excs)) + (fun (m, excs) -> function Ok (k, v) -> (add k v m, excs) | Error exc -> + (m, exc :: excs)) (empty, []) vs diff --git a/src/lib_shell/pipeline.mli b/src/lib_shell/pipeline.mli index 727c4c1ab42a3e0568a0627c5dd54647ee1c0164..e0dad61ada3b30b8b47fe144b91cfcaa02049d5f 100644 --- a/src/lib_shell/pipeline.mli +++ b/src/lib_shell/pipeline.mli @@ -29,43 +29,45 @@ (** Steps are the building blocks of pipeline. A step is essentially a function from a given type to another. *) type ('a, 'b) step -val sync: ('a -> 'b) -> ('a, 'b) step -val async_s: ('a -> 'b Lwt.t) -> ('a, 'b) step -val async_p: ('a -> 'b Lwt.t) -> ('a, 'b) step + +val sync : ('a -> 'b) -> ('a, 'b) step + +val async_s : ('a -> 'b Lwt.t) -> ('a, 'b) step + +val async_p : ('a -> 'b Lwt.t) -> ('a, 'b) step (** Error management *) -val all_ok: - ('a, ('a, 'b) result) step -val map_in_err: +val all_ok : ('a, ('a, 'b) result) step + +val map_in_err : ('erra -> 'errb) -> (('a, 'errb) result, 'b) step -> (('a, 'erra) result, 'b) step -val map_out_err: + +val map_out_err : ('erra -> 'errb) -> ('a, ('b, 'erra) result) step -> ('a, ('b, 'errb) result) step -val with_err: - ('a, ('b, 'err) result) step -> - (('a, 'err) result, ('b, 'err) result) step -val recover: - ('err -> 'a) -> - (('a, 'err) result, 'a) step +val with_err : + ('a, ('b, 'err) result) step -> (('a, 'err) result, ('b, 'err) result) step + +val recover : ('err -> 'a) -> (('a, 'err) result, 'a) step (** Carrying ID through a pipeline *) -val with_key: - ('a, 'b) step -> - (('key * 'a), ('key * 'b)) step -val init_key: ('a, ('a * 'a)) step +val with_key : ('a, 'b) step -> ('key * 'a, 'key * 'b) step +val init_key : ('a, 'a * 'a) step (** Pipelines are essentially lists of steps. *) + (* Recommended use: [cons f @@ cons g @@ nil] *) type ('i, 'o) pipe -val nil: ('x, 'x) pipe -val cons: ('a, 'b) step -> ('b, 'c) pipe -> ('a, 'c) pipe +val nil : ('x, 'x) pipe + +val cons : ('a, 'b) step -> ('b, 'c) pipe -> ('a, 'c) pipe (** Core funcitonality: [run ?pool pipe input] runs all the elements of [input] through the steps of @@ -80,15 +82,15 @@ val cons: ('a, 'b) step -> ('b, 'c) pipe -> ('a, 'c) pipe high-level promise of [s] for [x] will be created before the high-level promise of [s] for [y]. *) -val run: ?pool:int -> ('i, 'o) pipe -> 'i list -> ('o, exn) result list Lwt.t +val run : ?pool:int -> ('i, 'o) pipe -> 'i list -> ('o, exn) result list Lwt.t (** Post-processing: useful to deal with pipeline built around error management or id marking combinators. *) -val partition_by_error: - (('o, 'err) result, exn) result list -> - ('o list * 'err list * exn list) -val index_by_key: - (('key * 'o), exn) result list -> +val partition_by_error : + (('o, 'err) result, exn) result list -> 'o list * 'err list * exn list + +val index_by_key : + ('key * 'o, exn) result list -> 'index -> ('key -> 'o -> 'index -> 'index) -> 'index * exn list diff --git a/src/lib_shell/prevalidation.ml b/src/lib_shell/prevalidation.ml index 2b3977a79026e32c3ac07e31163d1b48f160d443..902cad94dbdc73a2819fded4e478a4f0d4fd0a32 100644 --- a/src/lib_shell/prevalidation.ml +++ b/src/lib_shell/prevalidation.ml @@ -26,28 +26,29 @@ open Validation_errors module type T = sig - - module Proto: Registered_protocol.T + module Proto : Registered_protocol.T type t type operation = private { - hash: Operation_hash.t ; - raw: Operation.t ; - protocol_data: Proto.operation_data ; + hash : Operation_hash.t; + raw : Operation.t; + protocol_data : Proto.operation_data } - val compare: operation -> operation -> int - val parse: Operation.t -> operation tzresult + val compare : operation -> operation -> int + + val parse : Operation.t -> operation tzresult (** Creates a new prevalidation context w.r.t. the protocol associate to the predecessor block . When ?protocol_data is passed to this function, it will be used to create the new block *) val create : - ?protocol_data: MBytes.t -> - predecessor: State.Block.t -> - timestamp: Time.Protocol.t -> - unit -> t tzresult Lwt.t + ?protocol_data:MBytes.t -> + predecessor:State.Block.t -> + timestamp:Time.Protocol.t -> + unit -> + t tzresult Lwt.t type result = | Applied of t * Proto.operation_receipt @@ -57,36 +58,35 @@ module type T = sig | Duplicate | Outdated - val apply_operation: t -> operation -> result Lwt.t + val apply_operation : t -> operation -> result Lwt.t type status = { - applied_operations : (operation * Proto.operation_receipt) list ; - block_result : Tezos_protocol_environment_shell.validation_result ; - block_metadata : Proto.block_header_metadata ; + applied_operations : (operation * Proto.operation_receipt) list; + block_result : Tezos_protocol_environment_shell.validation_result; + block_metadata : Proto.block_header_metadata } - val status: t -> status tzresult Lwt.t + val status : t -> status tzresult Lwt.t - val pp_result: Format.formatter -> result -> unit + val pp_result : Format.formatter -> result -> unit end -module Make(Proto : Registered_protocol.T) : T with module Proto = Proto = struct - +module Make (Proto : Registered_protocol.T) : T with module Proto = Proto = +struct module Proto = Proto type operation = { - hash: Operation_hash.t ; - raw: Operation.t ; - protocol_data: Proto.operation_data ; + hash : Operation_hash.t; + raw : Operation.t; + protocol_data : Proto.operation_data } - - type t = - { state : Proto.validation_state ; - applied : (operation * Proto.operation_receipt) list ; - live_blocks : Block_hash.Set.t ; - live_operations : Operation_hash.Set.t ; - } + type t = { + state : Proto.validation_state; + applied : (operation * Proto.operation_receipt) list; + live_blocks : Block_hash.Set.t; + live_operations : Operation_hash.Set.t + } type result = | Applied of t * Proto.operation_receipt @@ -100,222 +100,242 @@ module Make(Proto : Registered_protocol.T) : T with module Proto = Proto = struc let hash = Operation.hash raw in let size = Data_encoding.Binary.length Operation.encoding raw in if size > Proto.max_operation_data_length then - error - (Oversized_operation - { size ; max = Proto.max_operation_data_length }) + error (Oversized_operation {size; max = Proto.max_operation_data_length}) else - match Data_encoding.Binary.of_bytes - Proto.operation_data_encoding - raw.Operation.proto with - | None -> error Parse_error + match + Data_encoding.Binary.of_bytes + Proto.operation_data_encoding + raw.Operation.proto + with + | None -> + error Parse_error | Some protocol_data -> - ok { hash ; raw ; protocol_data } + ok {hash; raw; protocol_data} let compare op1 op2 = Proto.compare_operations - { shell = op1.raw.shell ; protocol_data = op1.protocol_data } - { shell = op2.raw.shell ; protocol_data = op2.protocol_data } + {shell = op1.raw.shell; protocol_data = op1.protocol_data} + {shell = op2.raw.shell; protocol_data = op2.protocol_data} let create ?protocol_data ~predecessor ~timestamp () = (* The prevalidation module receives input from the system byt handles protocol values. It translates timestamps here. *) let { Block_header.shell = - { fitness = predecessor_fitness ; - timestamp = predecessor_timestamp ; - level = predecessor_level ; _ } ; _ } = - State.Block.header predecessor in - State.Block.context predecessor >>= fun predecessor_context -> + { fitness = predecessor_fitness; + timestamp = predecessor_timestamp; + level = predecessor_level; + _ }; + _ } = + State.Block.header predecessor + in + State.Block.context predecessor + >>= fun predecessor_context -> let predecessor_header = State.Block.header predecessor in let predecessor_hash = State.Block.hash predecessor in - State.Block.max_operations_ttl predecessor >>=? fun max_op_ttl -> - Chain_traversal.live_blocks - predecessor - max_op_ttl + State.Block.max_operations_ttl predecessor + >>=? fun max_op_ttl -> + Chain_traversal.live_blocks predecessor max_op_ttl >>=? fun (live_blocks, live_operations) -> Block_validation.update_testchain_status - predecessor_context predecessor_header - timestamp >>=? fun predecessor_context -> - begin - match protocol_data with - | None -> return_none + predecessor_context + predecessor_header + timestamp + >>=? fun predecessor_context -> + ( match protocol_data with + | None -> + return_none + | Some protocol_data -> ( + match + Data_encoding.Binary.of_bytes + Proto.block_header_data_encoding + protocol_data + with + | None -> + failwith "Invalid block header" | Some protocol_data -> - match - Data_encoding.Binary.of_bytes - Proto.block_header_data_encoding - protocol_data - with - | None -> failwith "Invalid block header" - | Some protocol_data -> return_some protocol_data - end >>=? fun protocol_data -> + return_some protocol_data ) ) + >>=? fun protocol_data -> Proto.begin_construction - ~chain_id: (State.Block.chain_id predecessor) + ~chain_id:(State.Block.chain_id predecessor) ~predecessor_context ~predecessor_timestamp ~predecessor_fitness ~predecessor_level - ~predecessor: predecessor_hash + ~predecessor:predecessor_hash ~timestamp ?protocol_data () >>=? fun state -> (* FIXME arbitrary value, to be customisable *) - return { - state ; - applied = [] ; - live_blocks ; - live_operations ; - } + return {state; applied = []; live_blocks; live_operations} let apply_operation pv op = if Operation_hash.Set.mem op.hash pv.live_operations then Lwt.return Outdated else - Proto.apply_operation pv.state - { shell = op.raw.shell ; protocol_data = op.protocol_data } >|= function + Proto.apply_operation + pv.state + {shell = op.raw.shell; protocol_data = op.protocol_data} + >|= function | Ok (state, receipt) -> let pv = - { state ; - applied = (op, receipt) :: pv.applied ; - live_blocks = pv.live_blocks ; - live_operations = Operation_hash.Set.add op.hash pv.live_operations ; - } in + { state; + applied = (op, receipt) :: pv.applied; + live_blocks = pv.live_blocks; + live_operations = + Operation_hash.Set.add op.hash pv.live_operations } + in Applied (pv, receipt) - | Error errors -> - match classify_errors errors with - | `Branch -> Branch_refused errors - | `Permanent -> Refused errors - | `Temporary -> Branch_delayed errors + | Error errors -> ( + match classify_errors errors with + | `Branch -> + Branch_refused errors + | `Permanent -> + Refused errors + | `Temporary -> + Branch_delayed errors ) type status = { - applied_operations : (operation * Proto.operation_receipt) list ; - block_result : Tezos_protocol_environment_shell.validation_result ; - block_metadata : Proto.block_header_metadata ; + applied_operations : (operation * Proto.operation_receipt) list; + block_result : Tezos_protocol_environment_shell.validation_result; + block_metadata : Proto.block_header_metadata } let status pv = - Proto.finalize_block pv.state >>=? fun (block_result, block_metadata) -> - return { - block_metadata ; - block_result ; - applied_operations = pv.applied ; - } + Proto.finalize_block pv.state + >>=? fun (block_result, block_metadata) -> + return {block_metadata; block_result; applied_operations = pv.applied} let pp_result ppf = let open Format in function - | Applied _ -> pp_print_string ppf "applied" - | Branch_delayed err -> fprintf ppf "branch delayed (%a)" pp_print_error err - | Branch_refused err -> fprintf ppf "branch refused (%a)" pp_print_error err - | Refused err -> fprintf ppf "refused (%a)" pp_print_error err - | Duplicate -> pp_print_string ppf "duplicate" - | Outdated -> pp_print_string ppf "outdated" - + | Applied _ -> + pp_print_string ppf "applied" + | Branch_delayed err -> + fprintf ppf "branch delayed (%a)" pp_print_error err + | Branch_refused err -> + fprintf ppf "branch refused (%a)" pp_print_error err + | Refused err -> + fprintf ppf "refused (%a)" pp_print_error err + | Duplicate -> + pp_print_string ppf "duplicate" + | Outdated -> + pp_print_string ppf "outdated" end let preapply ~predecessor ~timestamp ~protocol_data operations = - State.Block.context predecessor >>= fun predecessor_context -> - Context.get_protocol predecessor_context >>= fun protocol -> - begin - match Registered_protocol.get protocol with - | None -> - (* FIXME. *) - (* This should not happen: it should be handled in the validator. *) - failwith "Prevalidation: missing protocol '%a' for the current block." - Protocol_hash.pp_short protocol - | Some protocol -> - return protocol - end >>=? fun (module Proto) -> - let module Prevalidation = Make(Proto) in + State.Block.context predecessor + >>= fun predecessor_context -> + Context.get_protocol predecessor_context + >>= fun protocol -> + ( match Registered_protocol.get protocol with + | None -> + (* FIXME. *) + (* This should not happen: it should be handled in the validator. *) + failwith + "Prevalidation: missing protocol '%a' for the current block." + Protocol_hash.pp_short + protocol + | Some protocol -> + return protocol ) + >>=? fun (module Proto) -> + let module Prevalidation = Make (Proto) in let apply_operation_with_preapply_result preapp t op = let open Preapply_result in - Prevalidation.apply_operation t op >>= function + Prevalidation.apply_operation t op + >>= function | Applied (t, _) -> let applied = (op.hash, op.raw) :: preapp.applied in - Lwt.return ({ preapp with applied }, t) + Lwt.return ({preapp with applied}, t) | Branch_delayed errors -> let branch_delayed = - Operation_hash.Map.add - op.hash - (op.raw, errors) - preapp.branch_delayed in - Lwt.return ({ preapp with branch_delayed }, t) + Operation_hash.Map.add op.hash (op.raw, errors) preapp.branch_delayed + in + Lwt.return ({preapp with branch_delayed}, t) | Branch_refused errors -> let branch_refused = - Operation_hash.Map.add - op.hash - (op.raw, errors) - preapp.branch_refused in - Lwt.return ({ preapp with branch_refused }, t) + Operation_hash.Map.add op.hash (op.raw, errors) preapp.branch_refused + in + Lwt.return ({preapp with branch_refused}, t) | Refused errors -> let refused = - Operation_hash.Map.add - op.hash - (op.raw, errors) - preapp.refused in - Lwt.return ({ preapp with refused }, t) - | Duplicate | Outdated -> Lwt.return (preapp, t) in - Prevalidation.create - ~protocol_data ~predecessor ~timestamp () >>=? fun validation_state -> + Operation_hash.Map.add op.hash (op.raw, errors) preapp.refused + in + Lwt.return ({preapp with refused}, t) + | Duplicate | Outdated -> + Lwt.return (preapp, t) + in + Prevalidation.create ~protocol_data ~predecessor ~timestamp () + >>=? fun validation_state -> Lwt_list.fold_left_s (fun (acc_validation_result, acc_validation_state) operations -> - Lwt_list.fold_left_s - (fun (acc_validation_result, acc_validation_state) op -> - match Prevalidation.parse op with - | Error _ -> - (* FIXME *) - Lwt.return (acc_validation_result, acc_validation_state) - | Ok op -> - apply_operation_with_preapply_result - acc_validation_result acc_validation_state op) - (Preapply_result.empty, acc_validation_state) - operations - >>= fun (new_validation_result, new_validation_state) -> - (* Applied operations are reverted ; revert to the initial ordering *) - let new_validation_result = - { new_validation_result with applied = List.rev new_validation_result.applied } in - Lwt.return (acc_validation_result @ [new_validation_result], new_validation_state) - ) ([], validation_state) operations + Lwt_list.fold_left_s + (fun (acc_validation_result, acc_validation_state) op -> + match Prevalidation.parse op with + | Error _ -> + (* FIXME *) + Lwt.return (acc_validation_result, acc_validation_state) + | Ok op -> + apply_operation_with_preapply_result + acc_validation_result + acc_validation_state + op) + (Preapply_result.empty, acc_validation_state) + operations + >>= fun (new_validation_result, new_validation_state) -> + (* Applied operations are reverted ; revert to the initial ordering *) + let new_validation_result = + { new_validation_result with + applied = List.rev new_validation_result.applied } + in + Lwt.return + (acc_validation_result @ [new_validation_result], new_validation_state)) + ([], validation_state) + operations >>= fun (validation_result_list, validation_state) -> let operations_hash = Operation_list_list_hash.compute - (List.map (fun r -> - Operation_list_hash.compute - (List.map fst r.Preapply_result.applied) - ) validation_result_list) + (List.map + (fun r -> + Operation_list_hash.compute (List.map fst r.Preapply_result.applied)) + validation_result_list) in - Prevalidation.status validation_state >>=? fun { block_result ; _ } -> + Prevalidation.status validation_state + >>=? fun {block_result; _} -> let pred_shell_header = State.Block.shell_header predecessor in let level = Int32.succ pred_shell_header.level in - Block_validation.may_patch_protocol - ~level block_result >>=? fun { fitness ; context ; message ; _ } -> - State.Block.protocol_hash predecessor >>= fun pred_protocol -> - Context.get_protocol context >>= fun protocol -> + Block_validation.may_patch_protocol ~level block_result + >>=? fun {fitness; context; message; _} -> + State.Block.protocol_hash predecessor + >>= fun pred_protocol -> + Context.get_protocol context + >>= fun protocol -> let proto_level = if Protocol_hash.equal protocol pred_protocol then pred_shell_header.proto_level - else - ((pred_shell_header.proto_level + 1) mod 256) in - let shell_header : Block_header.shell_header = { - level ; - proto_level ; - predecessor = State.Block.hash predecessor ; - timestamp ; - validation_passes = List.length validation_result_list ; - operations_hash ; - fitness ; - context = Context_hash.zero ; (* place holder *) - } in - begin - if Protocol_hash.equal protocol pred_protocol then - return (context, message) - else - match Registered_protocol.get protocol with - | None -> - fail (Block_validator_errors.Unavailable_protocol - { block = State.Block.hash predecessor ; protocol }) - | Some (module NewProto) -> - NewProto.init context shell_header >>=? fun { context ; message ; _ } -> - return (context, message) - end >>=? fun (context, message) -> - Context.hash ?message ~time:timestamp context >>= fun context -> - return ({ shell_header with context }, validation_result_list) + else (pred_shell_header.proto_level + 1) mod 256 + in + let shell_header : Block_header.shell_header = + { level; + proto_level; + predecessor = State.Block.hash predecessor; + timestamp; + validation_passes = List.length validation_result_list; + operations_hash; + fitness; + context = Context_hash.zero (* place holder *) } + in + ( if Protocol_hash.equal protocol pred_protocol then return (context, message) + else + match Registered_protocol.get protocol with + | None -> + fail + (Block_validator_errors.Unavailable_protocol + {block = State.Block.hash predecessor; protocol}) + | Some (module NewProto) -> + NewProto.init context shell_header + >>=? fun {context; message; _} -> return (context, message) ) + >>=? fun (context, message) -> + Context.hash ?message ~time:timestamp context + >>= fun context -> + return ({shell_header with context}, validation_result_list) diff --git a/src/lib_shell/prevalidation.mli b/src/lib_shell/prevalidation.mli index 2c3ea8a8d1426f0ae31183547b7b160e91e6ab56..f87f4baf9251fd81c1ae01fbe40ae908ecffcdea 100644 --- a/src/lib_shell/prevalidation.mli +++ b/src/lib_shell/prevalidation.mli @@ -29,28 +29,29 @@ prevalidation_state. *) module type T = sig - - module Proto: Registered_protocol.T + module Proto : Registered_protocol.T type t type operation = private { - hash: Operation_hash.t ; - raw: Operation.t ; - protocol_data: Proto.operation_data ; + hash : Operation_hash.t; + raw : Operation.t; + protocol_data : Proto.operation_data } - val compare: operation -> operation -> int - val parse: Operation.t -> operation tzresult + val compare : operation -> operation -> int + + val parse : Operation.t -> operation tzresult (** Creates a new prevalidation context w.r.t. the protocol associate to the predecessor block . When ?protocol_data is passed to this function, it will be used to create the new block *) val create : - ?protocol_data: MBytes.t -> - predecessor: State.Block.t -> - timestamp: Time.Protocol.t -> - unit -> t tzresult Lwt.t + ?protocol_data:MBytes.t -> + predecessor:State.Block.t -> + timestamp:Time.Protocol.t -> + unit -> + t tzresult Lwt.t type result = | Applied of t * Proto.operation_receipt @@ -60,20 +61,20 @@ module type T = sig | Duplicate | Outdated - val apply_operation: t -> operation -> result Lwt.t + val apply_operation : t -> operation -> result Lwt.t type status = { - applied_operations : (operation * Proto.operation_receipt) list ; - block_result : Tezos_protocol_environment_shell.validation_result ; - block_metadata : Proto.block_header_metadata ; + applied_operations : (operation * Proto.operation_receipt) list; + block_result : Tezos_protocol_environment_shell.validation_result; + block_metadata : Proto.block_header_metadata } - val status: t -> status tzresult Lwt.t + val status : t -> status tzresult Lwt.t - val pp_result: Format.formatter -> result -> unit + val pp_result : Format.formatter -> result -> unit end -module Make(Proto : Registered_protocol.T) : T with module Proto = Proto +module Make (Proto : Registered_protocol.T) : T with module Proto = Proto (** Pre-apply creates a new block and returns it. *) val preapply : diff --git a/src/lib_shell/prevalidator.ml b/src/lib_shell/prevalidator.ml index 00aaf6213cb929f709f058fc0bd6c9e0af716848..63ef33a198d63890dc0c327815d5dd8112295857 100644 --- a/src/lib_shell/prevalidator.ml +++ b/src/lib_shell/prevalidator.ml @@ -27,56 +27,65 @@ open Prevalidator_worker_state type limits = { - max_refused_operations : int ; - operation_timeout : Time.System.Span.t ; - worker_limits : Worker_types.limits ; + max_refused_operations : int; + operation_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } -type name_t = (Chain_id.t * Protocol_hash.t) +type name_t = Chain_id.t * Protocol_hash.t module type T = sig + module Proto : Registered_protocol.T + + val name : name_t + + val parameters : limits * Distributed_db.chain_db + + module Prevalidation : Prevalidation.T with module Proto = Proto - module Proto: Registered_protocol.T - val name: name_t - val parameters: limits * Distributed_db.chain_db - module Prevalidation: Prevalidation.T with module Proto = Proto type types_state = { - chain_db : Distributed_db.chain_db ; - limits : limits ; - mutable predecessor : State.Block.t ; - mutable timestamp : Time.System.t ; - mutable live_blocks : Block_hash.Set.t ; - mutable live_operations : Operation_hash.Set.t ; - refused : Operation_hash.t Ring.t ; - mutable refusals : (Operation.t * error list) Operation_hash.Map.t ; - branch_refused : Operation_hash.t Ring.t ; + chain_db : Distributed_db.chain_db; + limits : limits; + mutable predecessor : State.Block.t; + mutable timestamp : Time.System.t; + mutable live_blocks : Block_hash.Set.t; + mutable live_operations : Operation_hash.Set.t; + refused : Operation_hash.t Ring.t; + mutable refusals : (Operation.t * error list) Operation_hash.Map.t; + branch_refused : Operation_hash.t Ring.t; mutable branch_refusals : (Operation.t * error list) Operation_hash.Map.t; - branch_delayed : Operation_hash.t Ring.t ; + branch_delayed : Operation_hash.t Ring.t; mutable branch_delays : (Operation.t * error list) Operation_hash.Map.t; - mutable fetching : Operation_hash.Set.t ; - mutable pending : Operation.t Operation_hash.Map.t ; - mutable mempool : Mempool.t ; - mutable in_mempool : Operation_hash.Set.t ; + mutable fetching : Operation_hash.Set.t; + mutable pending : Operation.t Operation_hash.Map.t; + mutable mempool : Mempool.t; + mutable in_mempool : Operation_hash.Set.t; mutable applied : (Operation_hash.t * Operation.t) list; - mutable applied_count : int ; - mutable validation_state : Prevalidation.t tzresult ; + mutable applied_count : int; + mutable validation_state : Prevalidation.t tzresult; mutable operation_stream : - ([ `Applied | `Refused | `Branch_refused | `Branch_delayed ] * - Operation.shell_header * - Proto.operation_data - ) Lwt_watcher.input; - mutable advertisement : [ `Pending of Mempool.t | `None ] ; - mutable rpc_directory : types_state RPC_directory.t lazy_t ; + ( [`Applied | `Refused | `Branch_refused | `Branch_delayed] + * Operation.shell_header + * Proto.operation_data ) + Lwt_watcher.input; + mutable advertisement : [`Pending of Mempool.t | `None]; + mutable rpc_directory : types_state RPC_directory.t lazy_t } - module Name: Worker.NAME with type t = name_t - module Types: Worker.TYPES with type state = types_state - module Worker: Worker.T - with type Event.t = Event.t - and type 'a Request.t = 'a Request.t - and type Request.view = Request.view - and type Types.state = types_state + + module Name : Worker.NAME with type t = name_t + + module Types : Worker.TYPES with type state = types_state + + module Worker : + Worker.T + with type Event.t = Event.t + and type 'a Request.t = 'a Request.t + and type Request.view = Request.view + and type Types.state = types_state + type worker = Worker.infinite Worker.queue Worker.t - val list_pendings: + + val list_pendings : Distributed_db.chain_db -> from_block:State.Block.t -> to_block:State.Block.t -> @@ -84,74 +93,85 @@ module type T = sig Operation.t Operation_hash.Map.t -> Operation.t Operation_hash.Map.t Lwt.t - val validation_result: types_state -> error Preapply_result.t + val validation_result : types_state -> error Preapply_result.t + + val fitness : unit -> Fitness.t Lwt.t - val fitness: unit -> Fitness.t Lwt.t - val initialization_errors: unit tzresult Lwt.t - val worker: worker Lazy.t + val initialization_errors : unit tzresult Lwt.t + val worker : worker Lazy.t end module type ARG = sig - val limits: limits - val chain_db: Distributed_db.chain_db - val chain_id: Chain_id.t + val limits : limits + + val chain_db : Distributed_db.chain_db + + val chain_id : Chain_id.t end type t = (module T) -module Make(Proto: Registered_protocol.T)(Arg: ARG): T = struct +module Make (Proto : Registered_protocol.T) (Arg : ARG) : T = struct module Proto = Proto + let name = (Arg.chain_id, Proto.hash) + let parameters = (Arg.limits, Arg.chain_db) - module Prevalidation = Prevalidation.Make(Proto) + + module Prevalidation = Prevalidation.Make (Proto) + type types_state = { - chain_db : Distributed_db.chain_db ; - limits : limits ; - mutable predecessor : State.Block.t ; - mutable timestamp : Time.System.t ; - mutable live_blocks : Block_hash.Set.t ; (* just a cache *) - mutable live_operations : Operation_hash.Set.t ; (* just a cache *) - refused : Operation_hash.t Ring.t ; - mutable refusals : (Operation.t * error list) Operation_hash.Map.t ; - branch_refused : Operation_hash.t Ring.t ; + chain_db : Distributed_db.chain_db; + limits : limits; + mutable predecessor : State.Block.t; + mutable timestamp : Time.System.t; + mutable live_blocks : Block_hash.Set.t; + (* just a cache *) + mutable live_operations : Operation_hash.Set.t; + (* just a cache *) + refused : Operation_hash.t Ring.t; + mutable refusals : (Operation.t * error list) Operation_hash.Map.t; + branch_refused : Operation_hash.t Ring.t; mutable branch_refusals : (Operation.t * error list) Operation_hash.Map.t; - branch_delayed : Operation_hash.t Ring.t ; + branch_delayed : Operation_hash.t Ring.t; mutable branch_delays : (Operation.t * error list) Operation_hash.Map.t; - mutable fetching : Operation_hash.Set.t ; - mutable pending : Operation.t Operation_hash.Map.t ; - mutable mempool : Mempool.t ; - mutable in_mempool : Operation_hash.Set.t ; + mutable fetching : Operation_hash.Set.t; + mutable pending : Operation.t Operation_hash.Map.t; + mutable mempool : Mempool.t; + mutable in_mempool : Operation_hash.Set.t; mutable applied : (Operation_hash.t * Operation.t) list; - mutable applied_count : int ; - mutable validation_state : Prevalidation.t tzresult ; + mutable applied_count : int; + mutable validation_state : Prevalidation.t tzresult; mutable operation_stream : - ([ `Applied | `Refused | `Branch_refused | `Branch_delayed ] * - Operation.shell_header * - Proto.operation_data - ) Lwt_watcher.input; - mutable advertisement : [ `Pending of Mempool.t | `None ] ; - mutable rpc_directory : types_state RPC_directory.t lazy_t ; + ( [`Applied | `Refused | `Branch_refused | `Branch_delayed] + * Operation.shell_header + * Proto.operation_data ) + Lwt_watcher.input; + mutable advertisement : [`Pending of Mempool.t | `None]; + mutable rpc_directory : types_state RPC_directory.t lazy_t } module Name = struct type t = name_t - let encoding = - Data_encoding.tup2 - Chain_id.encoding - Protocol_hash.encoding + + let encoding = Data_encoding.tup2 Chain_id.encoding Protocol_hash.encoding + let chain_id_string = - let _: string = Format.flush_str_formatter () in - Chain_id.pp_short Format.str_formatter Arg.chain_id; + let (_ : string) = Format.flush_str_formatter () in + Chain_id.pp_short Format.str_formatter Arg.chain_id ; Format.flush_str_formatter () + let proto_hash_string = - let _: string = Format.flush_str_formatter () in - Protocol_hash.pp_short Format.str_formatter Proto.hash; + let (_ : string) = Format.flush_str_formatter () in + Protocol_hash.pp_short Format.str_formatter Proto.hash ; Format.flush_str_formatter () - let base = [ "prevalidator" ; chain_id_string ; proto_hash_string ] + + let base = ["prevalidator"; chain_id_string; proto_hash_string] + let pp fmt (chain_id, proto_hash) = - Chain_id.pp_short fmt chain_id; - Format.pp_print_string fmt "."; + Chain_id.pp_short fmt chain_id ; + Format.pp_print_string fmt "." ; Protocol_hash.pp_short fmt proto_hash end @@ -163,6 +183,7 @@ module Make(Proto: Registered_protocol.T)(Arg: ARG): T = struct - pv.prevalidation_result.refused = Ø, refused ops are in pv.refused - the 'applied' operations in pv.validation_result are in reverse order. *) type state = types_state + type parameters = limits * Distributed_db.chain_db include Worker_state @@ -171,83 +192,91 @@ module Make(Proto: Registered_protocol.T)(Arg: ARG): T = struct let domain map = Operation_hash.Map.fold (fun elt _ acc -> Operation_hash.Set.add elt acc) - map Operation_hash.Set.empty in - { head = State.Block.hash state.predecessor ; - timestamp = state.timestamp ; - fetching = state.fetching ; - pending = domain state.pending ; - applied = - List.rev - (List.map (fun (h, _) -> h) - state.applied) ; + map + Operation_hash.Set.empty + in + { head = State.Block.hash state.predecessor; + timestamp = state.timestamp; + fetching = state.fetching; + pending = domain state.pending; + applied = List.rev (List.map (fun (h, _) -> h) state.applied); delayed = Operation_hash.Set.union (domain state.branch_delays) (domain state.branch_refusals) } - end - module Worker: Worker.T - with type Name.t = Name.t - and type Event.t = Event.t - and type 'a Request.t = 'a Request.t - and type Request.view = Request.view - and type Types.state = Types.state - and type Types.parameters = Types.parameters - = Worker.Make (Name) (Prevalidator_worker_state.Event) - (Prevalidator_worker_state.Request) (Types) + module Worker : + Worker.T + with type Name.t = Name.t + and type Event.t = Event.t + and type 'a Request.t = 'a Request.t + and type Request.view = Request.view + and type Types.state = Types.state + and type Types.parameters = Types.parameters = + Worker.Make (Name) (Prevalidator_worker_state.Event) + (Prevalidator_worker_state.Request) + (Types) (** Centralised operation stream for the RPCs *) - let notify_operation { operation_stream ; _ } result { Operation.shell ; proto } = + let notify_operation {operation_stream; _} result {Operation.shell; proto} = let protocol_data = - Data_encoding.Binary.of_bytes_exn - Proto.operation_data_encoding - proto in + Data_encoding.Binary.of_bytes_exn Proto.operation_data_encoding proto + in Lwt_watcher.notify operation_stream (result, shell, protocol_data) open Types type worker = Worker.infinite Worker.queue Worker.t - let debug w = - Format.kasprintf (fun msg -> Worker.record_event w (Debug msg)) + let debug w = Format.kasprintf (fun msg -> Worker.record_event w (Debug msg)) let list_pendings chain_db ~from_block ~to_block ~live_blocks old_mempool = let rec pop_blocks ancestor block mempool = let hash = State.Block.hash block in - if Block_hash.equal hash ancestor then - Lwt.return mempool + if Block_hash.equal hash ancestor then Lwt.return mempool else - State.Block.all_operations block >>= fun operations -> + State.Block.all_operations block + >>= fun operations -> Lwt_list.fold_left_s (Lwt_list.fold_left_s (fun mempool op -> let h = Operation.hash op in - Distributed_db.inject_operation chain_db h op >>= fun (_ : bool) -> + Distributed_db.inject_operation chain_db h op + >>= fun (_ : bool) -> Lwt.return (Operation_hash.Map.add h op mempool))) - mempool operations >>= fun mempool -> - State.Block.predecessor block >>= function - | None -> assert false - | Some predecessor -> pop_blocks ancestor predecessor mempool + mempool + operations + >>= fun mempool -> + State.Block.predecessor block + >>= function + | None -> + assert false + | Some predecessor -> + pop_blocks ancestor predecessor mempool in let push_block mempool block = - State.Block.all_operation_hashes block >|= fun operations -> + State.Block.all_operation_hashes block + >|= fun operations -> List.iter (List.iter (Distributed_db.Operation.clear_or_cancel chain_db)) operations ; List.fold_left (List.fold_left (fun mempool h -> Operation_hash.Map.remove h mempool)) - mempool operations + mempool + operations in - Chain_traversal.new_blocks ~from_block ~to_block >>= fun (ancestor, path) -> - pop_blocks - (State.Block.hash ancestor) - from_block old_mempool >>= fun mempool -> - Lwt_list.fold_left_s push_block mempool path >>= fun new_mempool -> - let new_mempool, outdated = + Chain_traversal.new_blocks ~from_block ~to_block + >>= fun (ancestor, path) -> + pop_blocks (State.Block.hash ancestor) from_block old_mempool + >>= fun mempool -> + Lwt_list.fold_left_s push_block mempool path + >>= fun new_mempool -> + let (new_mempool, outdated) = Operation_hash.Map.partition (fun _oph op -> - Block_hash.Set.mem op.Operation.shell.branch live_blocks) - new_mempool in + Block_hash.Set.mem op.Operation.shell.branch live_blocks) + new_mempool + in Operation_hash.Map.iter (fun oph _op -> Distributed_db.Operation.clear_or_cancel chain_db oph) outdated ; @@ -261,318 +290,422 @@ module Make(Proto: Registered_protocol.T)(Arg: ARG): T = struct || Operation_hash.Set.mem oph pv.in_mempool let validation_result (state : types_state) = - { Preapply_result.applied = List.rev state.applied ; - branch_delayed = state.branch_delays ; - branch_refused = state.branch_refusals ; + { Preapply_result.applied = List.rev state.applied; + branch_delayed = state.branch_delays; + branch_refused = state.branch_refusals; refused = Operation_hash.Map.empty } let advertise (w : worker) pv mempool = match pv.advertisement with - | `Pending { Mempool.known_valid ; pending } -> + | `Pending {Mempool.known_valid; pending} -> pv.advertisement <- `Pending - { known_valid = known_valid @ mempool.Mempool.known_valid ; + { known_valid = known_valid @ mempool.Mempool.known_valid; pending = Operation_hash.Set.union pending mempool.pending } | `None -> pv.advertisement <- `Pending mempool ; Lwt.async (fun () -> - Lwt_unix.sleep 0.01 >>= fun () -> + Lwt_unix.sleep 0.01 + >>= fun () -> Worker.Queue.push_request_now w Advertise ; Lwt.return_unit) - let is_endorsement ( op : Prevalidation.operation ) = - Proto.acceptable_passes { - shell = op.raw.shell ; - protocol_data = op.protocol_data } = [0] + let is_endorsement (op : Prevalidation.operation) = + Proto.acceptable_passes + {shell = op.raw.shell; protocol_data = op.protocol_data} + = [0] let is_endorsement_raw op = match Prevalidation.parse op with - |Ok op -> is_endorsement op - |Error _ -> false + | Ok op -> + is_endorsement op + | Error _ -> + false let handle_unprocessed w pv = - begin match pv.validation_state with - | Error err -> - Operation_hash.Map.iter - (fun h op -> - Option.iter (Ring.add_and_return_erased pv.branch_delayed h) - ~f:(fun e -> - pv.branch_delays <- Operation_hash.Map.remove e pv.branch_delays ; - pv.in_mempool <- Operation_hash.Set.remove e pv.in_mempool) ; - pv.in_mempool <- - Operation_hash.Set.add h pv.in_mempool ; - pv.branch_delays <- - Operation_hash.Map.add h (op, err) pv.branch_delays) - pv.pending ; - pv.pending <- - Operation_hash.Map.empty ; + ( match pv.validation_state with + | Error err -> + Operation_hash.Map.iter + (fun h op -> + Option.iter + (Ring.add_and_return_erased pv.branch_delayed h) + ~f:(fun e -> + pv.branch_delays <- + Operation_hash.Map.remove e pv.branch_delays ; + pv.in_mempool <- Operation_hash.Set.remove e pv.in_mempool) ; + pv.in_mempool <- Operation_hash.Set.add h pv.in_mempool ; + pv.branch_delays <- + Operation_hash.Map.add h (op, err) pv.branch_delays) + pv.pending ; + pv.pending <- Operation_hash.Map.empty ; + Lwt.return_unit + | Ok state -> ( + match Operation_hash.Map.cardinal pv.pending with + | 0 -> Lwt.return_unit - | Ok state -> - match Operation_hash.Map.cardinal pv.pending with - | 0 -> Lwt.return_unit - | n -> - debug w "processing %d operations" n ; - let operations = List.map snd (Operation_hash.Map.bindings pv.pending) in - Lwt_list.fold_left_s (fun (acc_validation_state, acc_mempool) op -> - let refused hash raw errors = - notify_operation pv `Refused raw ; - let new_mempool = Mempool.{ acc_mempool with pending = Operation_hash.Set.add hash acc_mempool.pending } in - Option.iter (Ring.add_and_return_erased pv.refused hash) - ~f:(fun e -> pv.refusals <- Operation_hash.Map.remove e pv.refusals) ; - pv.refusals <- Operation_hash.Map.add hash (raw, errors) pv.refusals ; - Distributed_db.Operation.clear_or_cancel pv.chain_db hash ; - Lwt.return (acc_validation_state, new_mempool) in - match Prevalidation.parse op with - | Error errors -> - refused (Operation.hash op) op errors - | Ok op -> - Prevalidation.apply_operation state op >>= function - | Applied (new_acc_validation_state, _) -> - if pv.applied_count <= 2000 - (* this test is a quick fix while we wait for the new mempool *) - || is_endorsement op then begin - notify_operation pv `Applied op.raw ; - let new_mempool = Mempool.{ acc_mempool with known_valid = op.hash :: acc_mempool.known_valid } in - pv.applied <- (op.hash, op.raw) :: pv.applied ; - pv.in_mempool <- Operation_hash.Set.add op.hash pv.in_mempool ; - Lwt.return (new_acc_validation_state, new_mempool) - end else - Lwt.return (acc_validation_state, acc_mempool) - | Branch_delayed errors -> - notify_operation pv `Branch_delayed op.raw ; - let new_mempool = Mempool.{ acc_mempool with pending = Operation_hash.Set.add op.hash acc_mempool.pending } in - Option.iter (Ring.add_and_return_erased pv.branch_delayed op.hash) - ~f:(fun e -> - pv.branch_delays <- Operation_hash.Map.remove e pv.branch_delays ; - pv.in_mempool <- Operation_hash.Set.remove e pv.in_mempool) ; - pv.in_mempool <- Operation_hash.Set.add op.hash pv.in_mempool ; - pv.branch_delays <- Operation_hash.Map.add op.hash (op.raw, errors) pv.branch_delays ; - Lwt.return (acc_validation_state, new_mempool) - | Branch_refused errors -> - notify_operation pv `Branch_refused op.raw ; - let new_mempool = Mempool.{ acc_mempool with pending = Operation_hash.Set.add op.hash acc_mempool.pending } in - Option.iter (Ring.add_and_return_erased pv.branch_refused op.hash) - ~f:(fun e -> - pv.branch_refusals <- Operation_hash.Map.remove e pv.branch_refusals ; - pv.in_mempool <- Operation_hash.Set.remove e pv.in_mempool) ; - pv.in_mempool <- Operation_hash.Set.add op.hash pv.in_mempool ; - pv.branch_refusals <- Operation_hash.Map.add op.hash (op.raw, errors) pv.branch_refusals ; - Lwt.return (acc_validation_state, new_mempool) - | Refused errors -> - refused op.hash op.raw errors - | Duplicate | Outdated -> Lwt.return (acc_validation_state, acc_mempool)) - (state, Mempool.empty) - operations >>= fun (state, advertised_mempool) -> - pv.validation_state <- Ok state ; - pv.pending <- Operation_hash.Map.empty ; - advertise w pv - { advertised_mempool with known_valid = List.rev advertised_mempool.known_valid } ; - Lwt.return_unit - end >>= fun () -> + | n -> + debug w "processing %d operations" n ; + let operations = + List.map snd (Operation_hash.Map.bindings pv.pending) + in + Lwt_list.fold_left_s + (fun (acc_validation_state, acc_mempool) op -> + let refused hash raw errors = + notify_operation pv `Refused raw ; + let new_mempool = + Mempool. + { acc_mempool with + pending = Operation_hash.Set.add hash acc_mempool.pending + } + in + Option.iter + (Ring.add_and_return_erased pv.refused hash) + ~f:(fun e -> + pv.refusals <- Operation_hash.Map.remove e pv.refusals) ; + pv.refusals <- + Operation_hash.Map.add hash (raw, errors) pv.refusals ; + Distributed_db.Operation.clear_or_cancel pv.chain_db hash ; + Lwt.return (acc_validation_state, new_mempool) + in + match Prevalidation.parse op with + | Error errors -> + refused (Operation.hash op) op errors + | Ok op -> ( + Prevalidation.apply_operation state op + >>= function + | Applied (new_acc_validation_state, _) -> + if + pv.applied_count <= 2000 + (* this test is a quick fix while we wait for the new mempool *) + || is_endorsement op + then ( + notify_operation pv `Applied op.raw ; + let new_mempool = + Mempool. + { acc_mempool with + known_valid = op.hash :: acc_mempool.known_valid + } + in + pv.applied <- (op.hash, op.raw) :: pv.applied ; + pv.in_mempool <- + Operation_hash.Set.add op.hash pv.in_mempool ; + Lwt.return (new_acc_validation_state, new_mempool) ) + else Lwt.return (acc_validation_state, acc_mempool) + | Branch_delayed errors -> + notify_operation pv `Branch_delayed op.raw ; + let new_mempool = + Mempool. + { acc_mempool with + pending = + Operation_hash.Set.add + op.hash + acc_mempool.pending } + in + Option.iter + (Ring.add_and_return_erased pv.branch_delayed op.hash) + ~f:(fun e -> + pv.branch_delays <- + Operation_hash.Map.remove e pv.branch_delays ; + pv.in_mempool <- + Operation_hash.Set.remove e pv.in_mempool) ; + pv.in_mempool <- + Operation_hash.Set.add op.hash pv.in_mempool ; + pv.branch_delays <- + Operation_hash.Map.add + op.hash + (op.raw, errors) + pv.branch_delays ; + Lwt.return (acc_validation_state, new_mempool) + | Branch_refused errors -> + notify_operation pv `Branch_refused op.raw ; + let new_mempool = + Mempool. + { acc_mempool with + pending = + Operation_hash.Set.add + op.hash + acc_mempool.pending } + in + Option.iter + (Ring.add_and_return_erased pv.branch_refused op.hash) + ~f:(fun e -> + pv.branch_refusals <- + Operation_hash.Map.remove e pv.branch_refusals ; + pv.in_mempool <- + Operation_hash.Set.remove e pv.in_mempool) ; + pv.in_mempool <- + Operation_hash.Set.add op.hash pv.in_mempool ; + pv.branch_refusals <- + Operation_hash.Map.add + op.hash + (op.raw, errors) + pv.branch_refusals ; + Lwt.return (acc_validation_state, new_mempool) + | Refused errors -> + refused op.hash op.raw errors + | Duplicate | Outdated -> + Lwt.return (acc_validation_state, acc_mempool) )) + (state, Mempool.empty) + operations + >>= fun (state, advertised_mempool) -> + pv.validation_state <- Ok state ; + pv.pending <- Operation_hash.Map.empty ; + advertise + w + pv + { advertised_mempool with + known_valid = List.rev advertised_mempool.known_valid } ; + Lwt.return_unit ) ) + >>= fun () -> pv.mempool <- - { Mempool.known_valid = - List.rev_map fst pv.applied ; + { Mempool.known_valid = List.rev_map fst pv.applied; pending = Operation_hash.Map.fold - (fun k (op,_) s -> - if is_endorsement_raw op then - Operation_hash.Set.add k s - else s) - pv.branch_delays @@ - Operation_hash.Map.fold - (fun k (op,_) s -> - if is_endorsement_raw op then - Operation_hash.Set.add k s - else s) - pv.branch_refusals @@ - Operation_hash.Set.empty - } ; - State.Current_mempool.set (Distributed_db.chain_state pv.chain_db) - ~head:(State.Block.hash pv.predecessor) pv.mempool >>= fun () -> - Lwt.return_unit + (fun k (op, _) s -> + if is_endorsement_raw op then Operation_hash.Set.add k s else s) + pv.branch_delays + @@ Operation_hash.Map.fold + (fun k (op, _) s -> + if is_endorsement_raw op then Operation_hash.Set.add k s + else s) + pv.branch_refusals + @@ Operation_hash.Set.empty } ; + State.Current_mempool.set + (Distributed_db.chain_state pv.chain_db) + ~head:(State.Block.hash pv.predecessor) + pv.mempool + >>= fun () -> Lwt.return_unit let fetch_operation w pv ?peer oph = - debug w - "fetching operation %a" - Operation_hash.pp_short oph ; + debug w "fetching operation %a" Operation_hash.pp_short oph ; Distributed_db.Operation.fetch ~timeout:pv.limits.operation_timeout - pv.chain_db ?peer oph () >>= function + pv.chain_db + ?peer + oph + () + >>= function | Ok op -> Worker.Queue.push_request_now w (Arrived (oph, op)) ; Lwt.return_unit - | Error [ Distributed_db.Operation.Canceled _ ] -> - debug w + | Error [Distributed_db.Operation.Canceled _] -> + debug + w "operation %a included before being prevalidated" - Operation_hash.pp_short oph ; + Operation_hash.pp_short + oph ; Lwt.return_unit - | Error _ -> (* should not happen *) + | Error _ -> + (* should not happen *) Lwt.return_unit - let rpc_directory = lazy ( - let dir : state RPC_directory.t ref = ref RPC_directory.empty in - - let module Proto_services = Block_services.Make(Proto)(Proto) in - - dir := RPC_directory.register !dir - (Proto_services.S.Mempool.pending_operations RPC_path.open_root) - (fun pv () () -> - let map_op op = - let protocol_data = - Data_encoding.Binary.of_bytes_exn - Proto.operation_data_encoding - op.Operation.proto in - { Proto.shell = op.shell ; protocol_data } in - let map_op_error (op, error) = (map_op op, error) in - return { - Proto_services.Mempool.applied = - List.map - (fun (hash, op) -> (hash, map_op op)) - (List.rev pv.applied) ; - refused = - Operation_hash.Map.map map_op_error pv.refusals ; - branch_refused = - Operation_hash.Map.map map_op_error pv.branch_refusals ; - branch_delayed = - Operation_hash.Map.map map_op_error pv.branch_delays ; - unprocessed = - Operation_hash.Map.map map_op pv.pending ; - }) ; - - dir := RPC_directory.register !dir - (Proto_services.S.Mempool.request_operations RPC_path.open_root) - (fun pv () () -> - Distributed_db.Request.current_head pv.chain_db () ; - return_unit - ) ; - - dir := RPC_directory.gen_register !dir - (Proto_services.S.Mempool.monitor_operations RPC_path.open_root) - begin fun { applied ; refusals = refused ; branch_refusals = branch_refused ; branch_delays = branch_delayed ; operation_stream ; _ } params () -> - let op_stream, stopper = Lwt_watcher.create_stream operation_stream in - (* Convert ops *) - let map_op op = - let protocol_data = - Data_encoding.Binary.of_bytes_exn - Proto.operation_data_encoding - op.Operation.proto in - Proto.{ shell = op.shell ; protocol_data } in - let fold_op _k (op, _error) acc = map_op op :: acc in - (* First call : retrieve the current set of op from the mempool *) - let applied = if params#applied then List.map map_op (List.map snd applied) else [] in - let refused = if params#refused then - Operation_hash.Map.fold fold_op refused [] else [] in - let branch_refused = if params#branch_refused then - Operation_hash.Map.fold fold_op branch_refused [] else [] in - let branch_delayed = if params#branch_delayed then - Operation_hash.Map.fold fold_op branch_delayed [] else [] in - let current_mempool = List.concat [ applied ; refused ; branch_refused ; branch_delayed ] in - let current_mempool = ref (Some current_mempool) in - let filter_result = function - | `Applied -> params#applied - | `Refused -> params#refused - | `Branch_refused -> params#branch_refused - | `Branch_delayed -> params#branch_delayed - in - let rec next () = - match !current_mempool with - | Some mempool -> begin - current_mempool := None ; - Lwt.return_some mempool - end - | None -> begin - Lwt_stream.get op_stream >>= function - | Some (kind, shell, protocol_data) when filter_result kind -> - (* NOTE: Should the protocol change, a new Prevalidation - * context would be created. Thus, we use the same Proto. *) - let bytes = Data_encoding.Binary.to_bytes_exn - Proto.operation_data_encoding - protocol_data in - let protocol_data = Data_encoding.Binary.of_bytes_exn - Proto.operation_data_encoding - bytes in - Lwt.return_some [ { Proto.shell ; protocol_data } ] - | Some _ -> next () - | None -> Lwt.return_none - end - in - let shutdown () = Lwt_watcher.shutdown stopper in - RPC_answer.return_stream { next ; shutdown } - end ; - - !dir - ) + let rpc_directory = + lazy + (let dir : state RPC_directory.t ref = ref RPC_directory.empty in + let module Proto_services = Block_services.Make (Proto) (Proto) in + dir := + RPC_directory.register + !dir + (Proto_services.S.Mempool.pending_operations RPC_path.open_root) + (fun pv () () -> + let map_op op = + let protocol_data = + Data_encoding.Binary.of_bytes_exn + Proto.operation_data_encoding + op.Operation.proto + in + {Proto.shell = op.shell; protocol_data} + in + let map_op_error (op, error) = (map_op op, error) in + return + { Proto_services.Mempool.applied = + List.map + (fun (hash, op) -> (hash, map_op op)) + (List.rev pv.applied); + refused = Operation_hash.Map.map map_op_error pv.refusals; + branch_refused = + Operation_hash.Map.map map_op_error pv.branch_refusals; + branch_delayed = + Operation_hash.Map.map map_op_error pv.branch_delays; + unprocessed = Operation_hash.Map.map map_op pv.pending }) ; + dir := + RPC_directory.register + !dir + (Proto_services.S.Mempool.request_operations RPC_path.open_root) + (fun pv () () -> + Distributed_db.Request.current_head pv.chain_db () ; + return_unit) ; + dir := + RPC_directory.gen_register + !dir + (Proto_services.S.Mempool.monitor_operations RPC_path.open_root) + (fun { applied; + refusals = refused; + branch_refusals = branch_refused; + branch_delays = branch_delayed; + operation_stream; + _ } + params + () + -> + let (op_stream, stopper) = + Lwt_watcher.create_stream operation_stream + in + (* Convert ops *) + let map_op op = + let protocol_data = + Data_encoding.Binary.of_bytes_exn + Proto.operation_data_encoding + op.Operation.proto + in + Proto.{shell = op.shell; protocol_data} + in + let fold_op _k (op, _error) acc = map_op op :: acc in + (* First call : retrieve the current set of op from the mempool *) + let applied = + if params#applied then List.map map_op (List.map snd applied) + else [] + in + let refused = + if params#refused then + Operation_hash.Map.fold fold_op refused [] + else [] + in + let branch_refused = + if params#branch_refused then + Operation_hash.Map.fold fold_op branch_refused [] + else [] + in + let branch_delayed = + if params#branch_delayed then + Operation_hash.Map.fold fold_op branch_delayed [] + else [] + in + let current_mempool = + List.concat [applied; refused; branch_refused; branch_delayed] + in + let current_mempool = ref (Some current_mempool) in + let filter_result = function + | `Applied -> + params#applied + | `Refused -> + params#refused + | `Branch_refused -> + params#branch_refused + | `Branch_delayed -> + params#branch_delayed + in + let rec next () = + match !current_mempool with + | Some mempool -> + current_mempool := None ; + Lwt.return_some mempool + | None -> ( + Lwt_stream.get op_stream + >>= function + | Some (kind, shell, protocol_data) when filter_result kind + -> + (* NOTE: Should the protocol change, a new Prevalidation + * context would be created. Thus, we use the same Proto. *) + let bytes = + Data_encoding.Binary.to_bytes_exn + Proto.operation_data_encoding + protocol_data + in + let protocol_data = + Data_encoding.Binary.of_bytes_exn + Proto.operation_data_encoding + bytes + in + Lwt.return_some [{Proto.shell; protocol_data}] + | Some _ -> + next () + | None -> + Lwt.return_none ) + in + let shutdown () = Lwt_watcher.shutdown stopper in + RPC_answer.return_stream {next; shutdown}) ; + !dir) module Handlers = struct - type self = worker let on_operation_arrived (pv : state) oph op = pv.fetching <- Operation_hash.Set.remove oph pv.fetching ; - if not (Block_hash.Set.mem op.Operation.shell.branch pv.live_blocks) then begin + if not (Block_hash.Set.mem op.Operation.shell.branch pv.live_blocks) then Distributed_db.Operation.clear_or_cancel pv.chain_db oph (* TODO: put in a specific delayed map ? *) - end else if not (already_handled pv oph) (* prevent double inclusion on flush *) then begin - pv.pending <- Operation_hash.Map.add oph op pv.pending - end + else if + not (already_handled pv oph) (* prevent double inclusion on flush *) + then pv.pending <- Operation_hash.Map.add oph op pv.pending let on_inject pv op = let oph = Operation.hash op in - if already_handled pv oph then - return_unit (* FIXME : is this an error ? *) + if already_handled pv oph then return_unit + (* FIXME : is this an error ? *) else - Lwt.return pv.validation_state >>=? fun validation_state -> - Lwt.return (Prevalidation.parse op) >>=? fun parsed_op -> - Prevalidation.apply_operation validation_state parsed_op >>= function + Lwt.return pv.validation_state + >>=? fun validation_state -> + Lwt.return (Prevalidation.parse op) + >>=? fun parsed_op -> + Prevalidation.apply_operation validation_state parsed_op + >>= function | Applied (_, _result) -> - Distributed_db.inject_operation pv.chain_db oph op >>= fun (_ : bool) -> + Distributed_db.inject_operation pv.chain_db oph op + >>= fun (_ : bool) -> pv.pending <- Operation_hash.Map.add parsed_op.hash op pv.pending ; return_unit | res -> - failwith "Error while applying operation %a:@ %a" - Operation_hash.pp oph - Prevalidation.pp_result res + failwith + "Error while applying operation %a:@ %a" + Operation_hash.pp + oph + Prevalidation.pp_result + res let on_notify w pv peer mempool = let all_ophs = List.fold_left (fun s oph -> Operation_hash.Set.add oph s) - mempool.Mempool.pending mempool.known_valid in + mempool.Mempool.pending + mempool.known_valid + in let to_fetch = Operation_hash.Set.filter (fun oph -> not (already_handled pv oph)) - all_ophs in - pv.fetching <- - Operation_hash.Set.union - to_fetch - pv.fetching ; + all_ophs + in + pv.fetching <- Operation_hash.Set.union to_fetch pv.fetching ; Operation_hash.Set.iter (fun oph -> Lwt.ignore_result (fetch_operation w pv ~peer oph)) to_fetch let on_flush w pv predecessor = - Lwt_watcher.shutdown_input pv.operation_stream; - (State.Block.max_operations_ttl predecessor) >>=? fun max_op_ttl -> - Chain_traversal.live_blocks - predecessor - max_op_ttl >>=? fun (new_live_blocks, - new_live_operations) -> + Lwt_watcher.shutdown_input pv.operation_stream ; + State.Block.max_operations_ttl predecessor + >>=? fun max_op_ttl -> + Chain_traversal.live_blocks predecessor max_op_ttl + >>=? fun (new_live_blocks, new_live_operations) -> list_pendings pv.chain_db - ~from_block:pv.predecessor ~to_block:predecessor + ~from_block:pv.predecessor + ~to_block:predecessor ~live_blocks:new_live_blocks (Preapply_result.operations (validation_result pv)) >>= fun pending -> let timestamp_system = Tezos_stdlib_unix.Systime_os.now () in let timestamp = Time.System.to_protocol timestamp_system in - Prevalidation.create ~predecessor ~timestamp () >>= fun validation_state -> - debug w "%d operations were not washed by the flush" + Prevalidation.create ~predecessor ~timestamp () + >>= fun validation_state -> + debug + w + "%d operations were not washed by the flush" (Operation_hash.Map.cardinal pending) ; pv.predecessor <- predecessor ; pv.live_blocks <- new_live_blocks ; pv.live_operations <- new_live_operations ; pv.timestamp <- timestamp_system ; - pv.mempool <- { known_valid = [] ; pending = Operation_hash.Set.empty }; + pv.mempool <- {known_valid = []; pending = Operation_hash.Set.empty} ; pv.pending <- pending ; pv.in_mempool <- Operation_hash.Set.empty ; Ring.clear pv.branch_delayed ; @@ -587,37 +720,36 @@ module Make(Proto: Registered_protocol.T)(Arg: ARG): T = struct let on_advertise pv = match pv.advertisement with - | `None -> () (* should not happen *) + | `None -> + () (* should not happen *) | `Pending mempool -> pv.advertisement <- `None ; - Distributed_db.Advertise.current_head pv.chain_db ~mempool pv.predecessor - - let on_request - : type r. worker -> r Request.t -> r tzresult Lwt.t - = fun w request -> - let pv = Worker.state w in - begin match request with - | Request.Flush hash -> - on_advertise pv ; - (* TODO: rebase the advertisement instead *) - let chain_state = Distributed_db.chain_state pv.chain_db in - State.Block.read chain_state hash >>=? fun block -> - on_flush w pv block >>=? fun () -> - return (() : r) - | Request.Notify (peer, mempool) -> - on_notify w pv peer mempool ; - return_unit - | Request.Inject op -> - on_inject pv op - | Request.Arrived (oph, op) -> - on_operation_arrived pv oph op ; - return_unit - | Request.Advertise -> - on_advertise pv ; - return_unit - end >>=? fun r -> - handle_unprocessed w pv >>= fun () -> - return r + Distributed_db.Advertise.current_head + pv.chain_db + ~mempool + pv.predecessor + + let on_request : type r. worker -> r Request.t -> r tzresult Lwt.t = + fun w request -> + let pv = Worker.state w in + ( match request with + | Request.Flush hash -> + on_advertise pv ; + (* TODO: rebase the advertisement instead *) + let chain_state = Distributed_db.chain_state pv.chain_db in + State.Block.read chain_state hash + >>=? fun block -> on_flush w pv block >>=? fun () -> return (() : r) + | Request.Notify (peer, mempool) -> + on_notify w pv peer mempool ; + return_unit + | Request.Inject op -> + on_inject pv op + | Request.Arrived (oph, op) -> + on_operation_arrived pv oph op ; + return_unit + | Request.Advertise -> + on_advertise pv ; return_unit ) + >>=? fun r -> handle_unprocessed w pv >>= fun () -> return r let on_close w = let pv = Worker.state w in @@ -628,37 +760,46 @@ module Make(Proto: Registered_protocol.T)(Arg: ARG): T = struct let on_launch w _ (limits, chain_db) = let chain_state = Distributed_db.chain_state chain_db in - Chain.data chain_state >>= fun - { current_head = predecessor ; current_mempool = mempool ; - live_blocks ; live_operations ; _ } -> + Chain.data chain_state + >>= fun { current_head = predecessor; + current_mempool = mempool; + live_blocks; + live_operations; + _ } -> let timestamp_system = Tezos_stdlib_unix.Systime_os.now () in let timestamp = Time.System.to_protocol timestamp_system in - Prevalidation.create ~predecessor ~timestamp () >>= fun validation_state -> + Prevalidation.create ~predecessor ~timestamp () + >>= fun validation_state -> let fetching = List.fold_left (fun s h -> Operation_hash.Set.add h s) - Operation_hash.Set.empty mempool.known_valid in + Operation_hash.Set.empty + mempool.known_valid + in let pv = - { limits ; chain_db ; - predecessor ; timestamp = timestamp_system ; - live_blocks ; live_operations ; - mempool = { known_valid = [] ; pending = Operation_hash.Set.empty }; - refused = Ring.create limits.max_refused_operations ; - refusals = Operation_hash.Map.empty ; - fetching ; - pending = Operation_hash.Map.empty ; - in_mempool = Operation_hash.Set.empty ; - applied = [] ; - applied_count = 0 ; - branch_refused = Ring.create limits.max_refused_operations ; - branch_refusals = Operation_hash.Map.empty ; - branch_delayed = Ring.create limits.max_refused_operations ; - branch_delays = Operation_hash.Map.empty ; - validation_state ; - operation_stream = Lwt_watcher.create_input () ; - advertisement = `None ; - rpc_directory = rpc_directory ; - } in + { limits; + chain_db; + predecessor; + timestamp = timestamp_system; + live_blocks; + live_operations; + mempool = {known_valid = []; pending = Operation_hash.Set.empty}; + refused = Ring.create limits.max_refused_operations; + refusals = Operation_hash.Map.empty; + fetching; + pending = Operation_hash.Map.empty; + in_mempool = Operation_hash.Set.empty; + applied = []; + applied_count = 0; + branch_refused = Ring.create limits.max_refused_operations; + branch_refusals = Operation_hash.Map.empty; + branch_delayed = Ring.create limits.max_refused_operations; + branch_delays = Operation_hash.Map.empty; + validation_state; + operation_stream = Lwt_watcher.create_input (); + advertisement = `None; + rpc_directory } + in List.iter (fun oph -> Lwt.ignore_result (fetch_operation w pv oph)) mempool.known_valid ; @@ -667,15 +808,16 @@ module Make(Proto: Registered_protocol.T)(Arg: ARG): T = struct let on_error w r st errs = Worker.record_event w (Event.Request (r, st, Some errs)) ; match r with - | Request.(View (Inject _)) -> return_unit - | _ -> Lwt.return_error errs + | Request.(View (Inject _)) -> + return_unit + | _ -> + Lwt.return_error errs let on_completion w r _ st = Worker.record_event w (Event.Request (Request.view r, st, None)) ; Lwt.return_unit let on_no_request _ = return_unit - end let table = Worker.create_table Queue @@ -685,151 +827,155 @@ module Make(Proto: Registered_protocol.T)(Arg: ARG): T = struct * Whislt this is somewhat abusing the intended purpose of worker, it is part * of a transition plan to a one-worker-per-peer architecture. *) let worker_promise = - Worker.launch table Arg.limits.worker_limits + Worker.launch + table + Arg.limits.worker_limits name (Arg.limits, Arg.chain_db) (module Handlers) - let initialization_errors = - worker_promise >>=? fun _ -> return_unit + let initialization_errors = worker_promise >>=? fun _ -> return_unit - let worker = lazy begin - match Lwt.state worker_promise with - | Lwt.Return (Ok worker) -> worker - | Lwt.Return (Error _) | Lwt.Fail _ | Lwt.Sleep -> assert false - end + let worker = + lazy + ( match Lwt.state worker_promise with + | Lwt.Return (Ok worker) -> + worker + | Lwt.Return (Error _) | Lwt.Fail _ | Lwt.Sleep -> + assert false ) let fitness () = let w = Lazy.force worker in let pv = Worker.state w in - begin - Lwt.return pv.validation_state >>=? fun state -> - Prevalidation.status state >>=? fun status -> - return status.block_result.fitness - end >>= function - | Ok fitness -> Lwt.return fitness + Lwt.return pv.validation_state + >>=? (fun state -> + Prevalidation.status state + >>=? fun status -> return status.block_result.fitness) + >>= function + | Ok fitness -> + Lwt.return fitness | Error _ -> Lwt.return (State.Block.fitness pv.predecessor) - end -module ChainProto_registry = - Registry.Make(struct - type v = t - type t = (Chain_id.t * Protocol_hash.t) - let compare (c1, p1) (c2, p2) = - let pc = Protocol_hash.compare p1 p2 in - if pc = 0 then - Chain_id.compare c1 c2 - else - pc - end) +module ChainProto_registry = Registry.Make (struct + type v = t + + type t = Chain_id.t * Protocol_hash.t + let compare (c1, p1) (c2, p2) = + let pc = Protocol_hash.compare p1 p2 in + if pc = 0 then Chain_id.compare c1 c2 else pc +end) -let create limits (module Proto: Registered_protocol.T) chain_db = +let create limits (module Proto : Registered_protocol.T) chain_db = let chain_state = Distributed_db.chain_state chain_db in let chain_id = State.Chain.id chain_state in match ChainProto_registry.query (chain_id, Proto.hash) with | None -> let module Prevalidator = - Make(Proto)(struct - let limits = limits - let chain_db = chain_db - let chain_id = chain_id - end) in + Make + (Proto) + (struct + let limits = limits + + let chain_db = chain_db + + let chain_id = chain_id + end) + in (* Checking initialization errors before giving a reference to dnagerous * `worker` value to caller. *) - Prevalidator.initialization_errors >>=? fun () -> - ChainProto_registry.register Prevalidator.name (module Prevalidator: T); - return (module Prevalidator: T) + Prevalidator.initialization_errors + >>=? fun () -> + ChainProto_registry.register Prevalidator.name (module Prevalidator : T) ; + return (module Prevalidator : T) | Some p -> return p -let shutdown (t:t) = - let module Prevalidator: T = (val t) in +let shutdown (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in - ChainProto_registry.remove Prevalidator.name; + ChainProto_registry.remove Prevalidator.name ; Prevalidator.Worker.shutdown w -let flush (t:t) head = - let module Prevalidator: T = (val t) in +let flush (t : t) head = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.Queue.push_request_and_wait w (Request.Flush head) -let notify_operations (t:t) peer mempool = - let module Prevalidator: T = (val t) in +let notify_operations (t : t) peer mempool = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.Queue.push_request w (Request.Notify (peer, mempool)) -let operations (t:t) = - let module Prevalidator: T = (val t) in +let operations (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in let pv = Prevalidator.Worker.state w in - ({ (Prevalidator.validation_result pv) with applied = List.rev pv.applied }, - pv.pending) + ( {(Prevalidator.validation_result pv) with applied = List.rev pv.applied}, + pv.pending ) -let pending (t:t) = - let module Prevalidator: T = (val t) in +let pending (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in let pv = Prevalidator.Worker.state w in let ops = Preapply_result.operations (Prevalidator.validation_result pv) in Lwt.return ops -let timestamp (t:t) = - let module Prevalidator: T = (val t) in +let timestamp (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in let pv = Prevalidator.Worker.state w in pv.timestamp -let fitness (t:t) = - let module Prevalidator: T = (val t) in +let fitness (t : t) = + let module Prevalidator : T = (val t) in Prevalidator.fitness () -let inject_operation (t:t) op = - let module Prevalidator: T = (val t) in +let inject_operation (t : t) op = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.Queue.push_request_and_wait w (Inject op) -let status (t:t) = - let module Prevalidator: T = (val t) in +let status (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.status w let running_workers () = - ChainProto_registry.fold - (fun (id, proto) t acc -> (id, proto, t) :: acc) - [] + ChainProto_registry.fold (fun (id, proto) t acc -> (id, proto, t) :: acc) [] -let pending_requests (t:t) = - let module Prevalidator: T = (val t) in +let pending_requests (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.Queue.pending_requests w -let current_request (t:t) = - let module Prevalidator: T = (val t) in +let current_request (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.current_request w -let last_events (t:t) = - let module Prevalidator: T = (val t) in +let last_events (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.last_events w -let protocol_hash (t:t) = - let module Prevalidator: T = (val t) in +let protocol_hash (t : t) = + let module Prevalidator : T = (val t) in Prevalidator.Proto.hash -let parameters (t:t) = - let module Prevalidator: T = (val t) in +let parameters (t : t) = + let module Prevalidator : T = (val t) in Prevalidator.parameters -let information (t:t) = - let module Prevalidator: T = (val t) in +let information (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.information w -let pipeline_length (t:t) = - let module Prevalidator: T = (val t) in +let pipeline_length (t : t) = + let module Prevalidator : T = (val t) in let w = Lazy.force Prevalidator.worker in Prevalidator.Worker.Queue.pending_requests_length w @@ -838,29 +984,33 @@ let empty_rpc_directory : unit RPC_directory.t = RPC_directory.empty (Block_services.Empty.S.Mempool.pending_operations RPC_path.open_root) (fun _pv () () -> - return { - Block_services.Empty.Mempool.applied = [] ; - refused = Operation_hash.Map.empty ; - branch_refused = Operation_hash.Map.empty ; - branch_delayed = Operation_hash.Map.empty ; - unprocessed = Operation_hash.Map.empty ; - }) - - -let rpc_directory : t option RPC_directory.t = + return + { Block_services.Empty.Mempool.applied = []; + refused = Operation_hash.Map.empty; + branch_refused = Operation_hash.Map.empty; + branch_delayed = Operation_hash.Map.empty; + unprocessed = Operation_hash.Map.empty }) + +let rpc_directory : t option RPC_directory.t = RPC_directory.register_dynamic_directory RPC_directory.empty (Block_services.mempool_path RPC_path.open_root) (function | None -> - Lwt.return (RPC_directory.map (fun _ -> Lwt.return_unit) empty_rpc_directory) - | Some t -> - let module Prevalidator: T = (val t: T) in - Prevalidator.initialization_errors >>= function + Lwt.return + (RPC_directory.map (fun _ -> Lwt.return_unit) empty_rpc_directory) + | Some t -> ( + let module Prevalidator : T = (val t : T) in + Prevalidator.initialization_errors + >>= function | Error _ -> - Lwt.return (RPC_directory.map (fun _ -> Lwt.return_unit) empty_rpc_directory) + Lwt.return + (RPC_directory.map + (fun _ -> Lwt.return_unit) + empty_rpc_directory) | Ok () -> let w = Lazy.force Prevalidator.worker in let pv = Prevalidator.Worker.state w in let pv_rpc_dir = Lazy.force pv.rpc_directory in - Lwt.return (RPC_directory.map (fun _ -> Lwt.return pv) pv_rpc_dir)) + Lwt.return + (RPC_directory.map (fun _ -> Lwt.return pv) pv_rpc_dir) )) diff --git a/src/lib_shell/prevalidator.mli b/src/lib_shell/prevalidator.mli index 2155d9138b2b0776822f053c5947c46f5b7ec91f..a141ec8a7dd1d485b4f36e5a9ad8f3565c0533ad 100644 --- a/src/lib_shell/prevalidator.mli +++ b/src/lib_shell/prevalidator.mli @@ -45,68 +45,76 @@ *) - - (** An (abstract) prevalidator context. Separate prevalidator contexts should be * used for separate chains (e.g., mainchain vs testchain). *) type t type limits = { - max_refused_operations : int ; - operation_timeout : Time.System.Span.t ; - worker_limits : Worker_types.limits ; + max_refused_operations : int; + operation_timeout : Time.System.Span.t; + worker_limits : Worker_types.limits } (** Creates/tear-down a new prevalidator context. *) -val create: +val create : limits -> (module Registered_protocol.T) -> Distributed_db.chain_db -> t tzresult Lwt.t -val shutdown: t -> unit Lwt.t + +val shutdown : t -> unit Lwt.t (** Notify the prevalidator that the identified peer has sent a bunch of * operations relevant to the specified context. *) -val notify_operations: t -> P2p_peer.Id.t -> Mempool.t -> unit Lwt.t +val notify_operations : t -> P2p_peer.Id.t -> Mempool.t -> unit Lwt.t (** Notify the prevalidator worker of a new injected operation. *) -val inject_operation: t -> Operation.t -> unit tzresult Lwt.t +val inject_operation : t -> Operation.t -> unit tzresult Lwt.t (** Notify the prevalidator that a new head has been selected. *) -val flush: t -> Block_hash.t -> unit tzresult Lwt.t +val flush : t -> Block_hash.t -> unit tzresult Lwt.t (** Returns the timestamp of the prevalidator worker, that is the timestamp of the last reset of the prevalidation context *) -val timestamp: t -> Time.System.t +val timestamp : t -> Time.System.t (** Returns the fitness of the current prevalidation context *) -val fitness: t -> Fitness.t Lwt.t +val fitness : t -> Fitness.t Lwt.t (** Returns the list of valid operations known to this prevalidation worker *) -val operations: t -> (error Preapply_result.t * Operation.t Operation_hash.Map.t) +val operations : + t -> error Preapply_result.t * Operation.t Operation_hash.Map.t (** Returns the list of pending operations known to this prevalidation worker *) -val pending: t -> Operation.t Operation_hash.Map.t Lwt.t +val pending : t -> Operation.t Operation_hash.Map.t Lwt.t (** Returns the list of prevalidation contexts running and their associated chain *) -val running_workers: unit -> (Chain_id.t * Protocol_hash.t * t) list +val running_workers : unit -> (Chain_id.t * Protocol_hash.t * t) list (** Two functions that are useful for managing the prevalidator's transition * from one protocol to the next. *) (** Returns the hash of the protocol the prevalidator was instantiated with *) -val protocol_hash: t -> Protocol_hash.t +val protocol_hash : t -> Protocol_hash.t (** Returns the parameters the prevalidator was created with. *) -val parameters: t -> limits * Distributed_db.chain_db +val parameters : t -> limits * Distributed_db.chain_db (** Worker status and events *) (* None indicates the there are no workers for the current protocol. *) -val status: t -> Worker_types.worker_status -val pending_requests : t -> (Time.System.t * Prevalidator_worker_state.Request.view) list -val current_request : t -> (Time.System.t * Time.System.t * Prevalidator_worker_state.Request.view) option -val last_events : t -> (Internal_event.level * Prevalidator_worker_state.Event.t list) list +val status : t -> Worker_types.worker_status + +val pending_requests : + t -> (Time.System.t * Prevalidator_worker_state.Request.view) list + +val current_request : + t -> + (Time.System.t * Time.System.t * Prevalidator_worker_state.Request.view) + option + +val last_events : + t -> (Internal_event.level * Prevalidator_worker_state.Event.t list) list val information : t -> Worker_types.worker_information diff --git a/src/lib_shell/protocol_directory.ml b/src/lib_shell/protocol_directory.ml index 8e8e2efd466addecedce95d45b75633751bc1868..d2e079bec80669cee498d9f823a3ee13e8f108e0 100644 --- a/src/lib_shell/protocol_directory.ml +++ b/src/lib_shell/protocol_directory.ml @@ -24,31 +24,30 @@ (*****************************************************************************) let build_rpc_directory block_validator state = - let dir : unit RPC_directory.t ref = ref RPC_directory.empty in let gen_register0 s f = - dir := RPC_directory.gen_register !dir s (fun () p q -> f p q) in + dir := RPC_directory.gen_register !dir s (fun () p q -> f p q) + in let register1 s f = - dir := RPC_directory.register !dir s (fun ((), a) p q -> f a p q) in - - gen_register0 Protocol_services.S.list begin fun () () -> - State.Protocol.list state >>= fun set -> - let protocols = - List.fold_left - (fun acc x -> Protocol_hash.Set.add x acc) - set (Registered_protocol.list_embedded ()) in - RPC_answer.return (Protocol_hash.Set.elements protocols) - end ; - - register1 Protocol_services.S.contents begin fun hash () () -> - match Registered_protocol.get_embedded_sources hash with - | Some p -> return p - | None -> State.Protocol.read state hash - end ; - - register1 Protocol_services.S.fetch begin fun hash () () -> - Block_validator.fetch_and_compile_protocol block_validator hash >>=? fun _proto -> - return_unit - end ; - + dir := RPC_directory.register !dir s (fun ((), a) p q -> f a p q) + in + gen_register0 Protocol_services.S.list (fun () () -> + State.Protocol.list state + >>= fun set -> + let protocols = + List.fold_left + (fun acc x -> Protocol_hash.Set.add x acc) + set + (Registered_protocol.list_embedded ()) + in + RPC_answer.return (Protocol_hash.Set.elements protocols)) ; + register1 Protocol_services.S.contents (fun hash () () -> + match Registered_protocol.get_embedded_sources hash with + | Some p -> + return p + | None -> + State.Protocol.read state hash) ; + register1 Protocol_services.S.fetch (fun hash () () -> + Block_validator.fetch_and_compile_protocol block_validator hash + >>=? fun _proto -> return_unit) ; !dir diff --git a/src/lib_shell/protocol_directory.mli b/src/lib_shell/protocol_directory.mli index 539354bfe148af5ac721dc8abbddcc15555722d7..751ef6f43372ecbff224de3f4ec1e60d234b2614 100644 --- a/src/lib_shell/protocol_directory.mli +++ b/src/lib_shell/protocol_directory.mli @@ -23,5 +23,4 @@ (* *) (*****************************************************************************) -val build_rpc_directory: - Block_validator.t -> State.t -> unit RPC_directory.t +val build_rpc_directory : Block_validator.t -> State.t -> unit RPC_directory.t diff --git a/src/lib_shell/protocol_validator.ml b/src/lib_shell/protocol_validator.ml index 708f77e20d563fb7d17accd11c26bb7e75c94746..6d6d5739b46cc0b316f1c4100096a43d0944f1f0 100644 --- a/src/lib_shell/protocol_validator.ml +++ b/src/lib_shell/protocol_validator.ml @@ -25,162 +25,183 @@ open Validation_errors -include Internal_event.Legacy_logging.Make_semantic(struct let name = "node.validator.block" end) +include Internal_event.Legacy_logging.Make_semantic (struct + let name = "node.validator.block" +end) type 'a request = - | Request_validation: { - hash: Protocol_hash.t ; - protocol: Protocol.t ; - } -> Registered_protocol.t tzresult request + | Request_validation : + { hash : Protocol_hash.t; + protocol : Protocol.t } + -> Registered_protocol.t tzresult request -type message = Message: 'a request * 'a Lwt.u option -> message +type message = Message : 'a request * 'a Lwt.u option -> message type t = { - db: Distributed_db.t ; - mutable worker: unit Lwt.t ; - messages: message Lwt_pipe.t ; - canceler: Lwt_canceler.t ; + db : Distributed_db.t; + mutable worker : unit Lwt.t; + messages : message Lwt_pipe.t; + canceler : Lwt_canceler.t } (** Block validation *) let rec worker_loop bv = - begin - protect ~canceler:bv.canceler begin fun () -> - Lwt_pipe.pop bv.messages >>= return - end >>=? function Message (request, wakener) -> - match request with - | Request_validation { hash ; protocol } -> - Updater.compile hash protocol >>= fun valid -> - begin - if valid then - Distributed_db.commit_protocol bv.db hash protocol - else - (* no need to tag 'invalid' protocol on disk, + protect ~canceler:bv.canceler (fun () -> Lwt_pipe.pop bv.messages >>= return) + >>=? (function + | Message (request, wakener) -> ( + match request with + | Request_validation {hash; protocol} -> ( + Updater.compile hash protocol + >>= fun valid -> + ( if valid then + Distributed_db.commit_protocol bv.db hash protocol + else + (* no need to tag 'invalid' protocol on disk, the economic protocol prevents us from being spammed with protocol validation. *) - return_true - end >>=? fun _ -> - match wakener with - | None -> - return_unit - | Some wakener -> - if valid then - match Registered_protocol.get hash with - | Some protocol -> - Lwt.wakeup_later wakener (Ok protocol) - | None -> - Lwt.wakeup_later wakener - (Error - [Invalid_protocol { hash ; - error = Dynlinking_failed }]) - else - Lwt.wakeup_later wakener - (Error - [Invalid_protocol { hash ; - error = Compilation_failed }]) ; - return_unit - end >>= function + return_true ) + >>=? fun _ -> + match wakener with + | None -> + return_unit + | Some wakener -> + if valid then + match Registered_protocol.get hash with + | Some protocol -> + Lwt.wakeup_later wakener (Ok protocol) + | None -> + Lwt.wakeup_later + wakener + (Error + [ Invalid_protocol + {hash; error = Dynlinking_failed} ]) + else + Lwt.wakeup_later + wakener + (Error + [Invalid_protocol {hash; error = Compilation_failed}]) ; + return_unit ) )) + >>= function | Ok () -> worker_loop bv - | Error [Canceled | Exn Lwt_pipe.Closed] -> - lwt_log_notice Tag.DSL.(fun f -> - f "terminating" -% t event "terminating") >>= fun () -> - Lwt.return_unit + | Error [(Canceled | Exn Lwt_pipe.Closed)] -> + lwt_log_notice + Tag.DSL.(fun f -> f "terminating" -% t event "terminating") + >>= fun () -> Lwt.return_unit | Error err -> - lwt_log_error Tag.DSL.(fun f -> - f "@[Unexpected error (worker):@ %a@]" - -% t event "unexpected_error" - -% a errs_tag err) >>= fun () -> - Lwt_canceler.cancel bv.canceler >>= fun () -> - Lwt.return_unit + lwt_log_error + Tag.DSL.( + fun f -> + f "@[Unexpected error (worker):@ %a@]" + -% t event "unexpected_error" -% a errs_tag err) + >>= fun () -> + Lwt_canceler.cancel bv.canceler >>= fun () -> Lwt.return_unit let create db = let canceler = Lwt_canceler.create () in let messages = Lwt_pipe.create () in - let bv = { - canceler ; messages ; db ; - worker = Lwt.return_unit } in - Lwt_canceler.on_cancel bv.canceler begin fun () -> - Lwt_pipe.close bv.messages ; - Lwt.return_unit - end ; + let bv = {canceler; messages; db; worker = Lwt.return_unit} in + Lwt_canceler.on_cancel bv.canceler (fun () -> + Lwt_pipe.close bv.messages ; Lwt.return_unit) ; bv.worker <- - Lwt_utils.worker "block_validator" + Lwt_utils.worker + "block_validator" ~on_event:Internal_event.Lwt_worker_event.on_event ~run:(fun () -> worker_loop bv) ~cancel:(fun () -> Lwt_canceler.cancel bv.canceler) ; bv -let shutdown { canceler ; worker ; _ } = - Lwt_canceler.cancel canceler >>= fun () -> - worker +let shutdown {canceler; worker; _} = + Lwt_canceler.cancel canceler >>= fun () -> worker -let validate { messages ; _ } hash protocol = +let validate {messages; _} hash protocol = match Registered_protocol.get hash with | Some protocol -> - lwt_debug Tag.DSL.(fun f -> - f "previously validated protocol %a (before pipe)" - -% t event "previously_validated_protocol" - -% a Protocol_hash.Logging.tag hash) >>= fun () -> - return protocol + lwt_debug + Tag.DSL.( + fun f -> + f "previously validated protocol %a (before pipe)" + -% t event "previously_validated_protocol" + -% a Protocol_hash.Logging.tag hash) + >>= fun () -> return protocol | None -> - let res, wakener = Lwt.task () in - lwt_debug Tag.DSL.(fun f -> - f "pushing validation request for protocol %a" - -% t event "pushing_validation_request" - -% a Protocol_hash.Logging.tag hash) >>= fun () -> - Lwt_pipe.push messages - (Message (Request_validation { hash ; protocol }, - Some wakener)) >>= fun () -> - res + let (res, wakener) = Lwt.task () in + lwt_debug + Tag.DSL.( + fun f -> + f "pushing validation request for protocol %a" + -% t event "pushing_validation_request" + -% a Protocol_hash.Logging.tag hash) + >>= fun () -> + Lwt_pipe.push + messages + (Message (Request_validation {hash; protocol}, Some wakener)) + >>= fun () -> res let fetch_and_compile_protocol pv ?peer ?timeout hash = match Registered_protocol.get hash with - | Some proto -> return proto - | None -> - begin - Distributed_db.Protocol.read_opt pv.db hash >>= function - | Some protocol -> return protocol - | None -> - lwt_log_notice Tag.DSL.(fun f -> - f "Fetching protocol %a%a" - -% t event "fetching_protocol" - -% a Protocol_hash.Logging.tag hash - -% a P2p_peer.Id.Logging.tag_source peer) >>= fun () -> - Distributed_db.Protocol.fetch pv.db ?peer ?timeout hash () - end >>=? fun protocol -> - validate pv hash protocol >>=? fun proto -> + | Some proto -> return proto + | None -> + Distributed_db.Protocol.read_opt pv.db hash + >>= (function + | Some protocol -> + return protocol + | None -> + lwt_log_notice + Tag.DSL.( + fun f -> + f "Fetching protocol %a%a" + -% t event "fetching_protocol" + -% a Protocol_hash.Logging.tag hash + -% a P2p_peer.Id.Logging.tag_source peer) + >>= fun () -> + Distributed_db.Protocol.fetch pv.db ?peer ?timeout hash ()) + >>=? fun protocol -> + validate pv hash protocol >>=? fun proto -> return proto -let fetch_and_compile_protocols pv ?peer ?timeout (block: State.Block.t) = +let fetch_and_compile_protocols pv ?peer ?timeout (block : State.Block.t) = let protocol_level = State.Block.protocol_level block in let chain_state = State.Block.chain_state block in - State.Block.context block >>= fun context -> + State.Block.context block + >>= fun context -> let protocol = - Context.get_protocol context >>= fun protocol_hash -> - fetch_and_compile_protocol pv ?peer ?timeout protocol_hash >>=? fun _p -> + Context.get_protocol context + >>= fun protocol_hash -> + fetch_and_compile_protocol pv ?peer ?timeout protocol_hash + >>=? fun _p -> let chain_id = State.Chain.id chain_state in State.Chain.update_level_indexed_protocol_store - chain_state chain_id protocol_level protocol_hash (State.Block.header block) - >>= fun () -> - return_unit + chain_state + chain_id + protocol_level + protocol_hash + (State.Block.header block) + >>= fun () -> return_unit and test_protocol = - Context.get_test_chain context >>= function - | Not_running -> return_unit - | Forking { protocol ; _ } - | Running { protocol ; _ } -> - fetch_and_compile_protocol pv ?peer ?timeout protocol >>=? fun _ -> - begin State.Chain.test chain_state >>= function - | None -> Lwt.return_unit - | Some chain_id -> - State.Chain.update_level_indexed_protocol_store - chain_state chain_id protocol_level protocol (State.Block.header block) - end >>= fun () -> - return_unit in - protocol >>=? fun () -> - test_protocol >>=? fun () -> - return_unit + Context.get_test_chain context + >>= function + | Not_running -> + return_unit + | Forking {protocol; _} | Running {protocol; _} -> + fetch_and_compile_protocol pv ?peer ?timeout protocol + >>=? fun _ -> + State.Chain.test chain_state + >>= (function + | None -> + Lwt.return_unit + | Some chain_id -> + State.Chain.update_level_indexed_protocol_store + chain_state + chain_id + protocol_level + protocol + (State.Block.header block)) + >>= fun () -> return_unit + in + protocol >>=? fun () -> test_protocol >>=? fun () -> return_unit let prefetch_and_compile_protocols pv ?peer ?timeout block = - try ignore (fetch_and_compile_protocols pv ?peer ?timeout block) with _ -> () + try ignore (fetch_and_compile_protocols pv ?peer ?timeout block) + with _ -> () diff --git a/src/lib_shell/protocol_validator.mli b/src/lib_shell/protocol_validator.mli index 8a9a20049c5c1ded5319a2e0c6bc738079a0ef5f..73fb0699aeef06c24b6fa10b121a76295d075e9e 100644 --- a/src/lib_shell/protocol_validator.mli +++ b/src/lib_shell/protocol_validator.mli @@ -25,30 +25,26 @@ type t -val create: Distributed_db.t -> t +val create : Distributed_db.t -> t -val validate: - t -> - Protocol_hash.t -> Protocol.t -> - Registered_protocol.t tzresult Lwt.t +val validate : + t -> Protocol_hash.t -> Protocol.t -> Registered_protocol.t tzresult Lwt.t -val shutdown: t -> unit Lwt.t +val shutdown : t -> unit Lwt.t -val fetch_and_compile_protocol: +val fetch_and_compile_protocol : t -> ?peer:P2p_peer.Id.t -> ?timeout:Ptime.Span.t -> - Protocol_hash.t -> Registered_protocol.t tzresult Lwt.t - -val fetch_and_compile_protocols: - t -> - ?peer:P2p_peer.Id.t -> - ?timeout:Ptime.Span.t -> - State.Block.t -> unit tzresult Lwt.t + Protocol_hash.t -> + Registered_protocol.t tzresult Lwt.t -val prefetch_and_compile_protocols: +val fetch_and_compile_protocols : t -> ?peer:P2p_peer.Id.t -> ?timeout:Ptime.Span.t -> - State.Block.t -> unit + State.Block.t -> + unit tzresult Lwt.t +val prefetch_and_compile_protocols : + t -> ?peer:P2p_peer.Id.t -> ?timeout:Ptime.Span.t -> State.Block.t -> unit diff --git a/src/lib_shell/snapshots.ml b/src/lib_shell/snapshots.ml index 5145b6f25721d24b30ac80b0b7d92980a41136fe..60a0d212c48244adde03bb60145d98919b0e60f6 100644 --- a/src/lib_shell/snapshots.ml +++ b/src/lib_shell/snapshots.ml @@ -24,279 +24,340 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make_semantic(struct - let name = "shell.snapshots" - end) +include Internal_event.Legacy_logging.Make_semantic (struct + let name = "shell.snapshots" +end) + +let ( // ) = Filename.concat -let (//) = Filename.concat let context_dir data_dir = data_dir // "context" + let store_dir data_dir = data_dir // "store" type error += Wrong_snapshot_export of History_mode.t * History_mode.t -type error += Wrong_block_export of - Block_hash.t * [ `Pruned | `Too_few_predecessors | `Cannot_be_found ] + +type error += + | Wrong_block_export of + Block_hash.t * [`Pruned | `Too_few_predecessors | `Cannot_be_found] + type error += Inconsistent_imported_block of Block_hash.t * Block_hash.t + type error += Snapshot_import_failure of string + type error += Wrong_protocol_hash of Protocol_hash.t -type error += Inconsistent_operation_hashes of - (Operation_list_list_hash.t * Operation_list_list_hash.t) -let () = begin - let open Data_encoding in +type error += + | Inconsistent_operation_hashes of + (Operation_list_list_hash.t * Operation_list_list_hash.t) +let () = + let open Data_encoding in register_error_kind `Permanent ~id:"WrongSnapshotExport" ~title:"Wrong snapshot export" - ~description:"Snapshot exports is not compatible with the current configuration." - ~pp:begin fun ppf (src,dst) -> - Format.fprintf ppf + ~description: + "Snapshot exports is not compatible with the current configuration." + ~pp:(fun ppf (src, dst) -> + Format.fprintf + ppf "Cannot export a %a snapshot from a %a node." - History_mode.pp dst History_mode.pp src - end - (obj2 - (req "src" History_mode.encoding) - (req "dst" History_mode.encoding)) - (function Wrong_snapshot_export (src,dst) -> Some (src, dst) | _ -> None) + History_mode.pp + dst + History_mode.pp + src) + (obj2 (req "src" History_mode.encoding) (req "dst" History_mode.encoding)) + (function + | Wrong_snapshot_export (src, dst) -> Some (src, dst) | _ -> None) (fun (src, dst) -> Wrong_snapshot_export (src, dst)) ; - let pp_wrong_block_export_error ppf kind = let str = match kind with - | `Pruned -> "is pruned" - | `Too_few_predecessors -> "has not enough predecessors" - | `Cannot_be_found -> "cannot be found" in - Format.fprintf ppf "%s" str in + | `Pruned -> + "is pruned" + | `Too_few_predecessors -> + "has not enough predecessors" + | `Cannot_be_found -> + "cannot be found" + in + Format.fprintf ppf "%s" str + in let error_kind_encoding = string_enum - [ "pruned", `Pruned ; - "too_few_predecessors", `Too_few_predecessors ; - "cannot_be_found", `Cannot_be_found ] in + [ ("pruned", `Pruned); + ("too_few_predecessors", `Too_few_predecessors); + ("cannot_be_found", `Cannot_be_found) ] + in register_error_kind `Permanent ~id:"WrongBlockExport" ~title:"Wrong block export" ~description:"The block to export in the snapshot is not valid." - ~pp:(fun ppf (bh,kind) -> - Format.fprintf ppf - "Fails to export snapshot as the block with block hash %a %a." - Block_hash.pp bh pp_wrong_block_export_error kind) + ~pp:(fun ppf (bh, kind) -> + Format.fprintf + ppf + "Fails to export snapshot as the block with block hash %a %a." + Block_hash.pp + bh + pp_wrong_block_export_error + kind) (obj2 (req "block_hash" Block_hash.encoding) (req "kind" error_kind_encoding)) (function Wrong_block_export (bh, kind) -> Some (bh, kind) | _ -> None) (fun (bh, kind) -> Wrong_block_export (bh, kind)) ; - register_error_kind `Permanent ~id:"InconsistentImportedBlock" ~title:"Inconsistent imported block" ~description:"The imported block is not the expected one." - ~pp:begin fun ppf (got,exp) -> - Format.fprintf ppf + ~pp:(fun ppf (got, exp) -> + Format.fprintf + ppf "The block contained in the file is %a instead of %a." - Block_hash.pp got Block_hash.pp exp - end + Block_hash.pp + got + Block_hash.pp + exp) (obj2 (req "block_hash" Block_hash.encoding) (req "block_hash_expected" Block_hash.encoding)) - (function Inconsistent_imported_block (got, exp) -> Some (got, exp) | _ -> None) + (function + | Inconsistent_imported_block (got, exp) -> Some (got, exp) | _ -> None) (fun (got, exp) -> Inconsistent_imported_block (got, exp)) ; - register_error_kind `Permanent ~id:"SnapshotImportFailure" ~title:"Snapshot import failure" ~description:"The imported snapshot is malformed." - ~pp:begin fun ppf msg -> - Format.fprintf ppf - "The data contained in the snapshot is not valid. The import mechanism \ - failed to validate the file: %s." - msg - end + ~pp:(fun ppf msg -> + Format.fprintf + ppf + "The data contained in the snapshot is not valid. The import \ + mechanism failed to validate the file: %s." + msg) (obj1 (req "message" string)) (function Snapshot_import_failure str -> Some str | _ -> None) (fun str -> Snapshot_import_failure str) ; - register_error_kind `Permanent ~id:"WrongProtocolHash" ~title:"Wrong protocol hash" ~description:"Wrong protocol hash" ~pp:(fun ppf p -> - Format.fprintf ppf - "Wrong protocol hash (%a) found in snapshot. Snapshot is corrupted." - Protocol_hash.pp p) + Format.fprintf + ppf + "Wrong protocol hash (%a) found in snapshot. Snapshot is corrupted." + Protocol_hash.pp + p) (obj1 (req "protocol_hash" Protocol_hash.encoding)) (function Wrong_protocol_hash p -> Some p | _ -> None) (fun p -> Wrong_protocol_hash p) ; - register_error_kind `Permanent ~id:"InconsistentOperationHashes" ~title:"Inconsistent operation hashes" ~description:"The operations given do not match their hashes." ~pp:(fun ppf (oph, oph') -> - Format.fprintf ppf - "Inconsistent operation hashes. Expected: %a, got: %a." - Operation_list_list_hash.pp oph Operation_list_list_hash.pp oph') + Format.fprintf + ppf + "Inconsistent operation hashes. Expected: %a, got: %a." + Operation_list_list_hash.pp + oph + Operation_list_list_hash.pp + oph') (obj2 (req "expected_operation_hashes" Operation_list_list_hash.encoding) (req "received_operation_hashes" Operation_list_list_hash.encoding)) (function - | Inconsistent_operation_hashes (oph, oph') -> Some (oph, oph') - | _ -> None) - (fun (oph, oph') -> Inconsistent_operation_hashes (oph, oph')) ; -end - -let compute_export_limit - block_store chain_data_store - block_header export_rolling = + | Inconsistent_operation_hashes (oph, oph') -> + Some (oph, oph') + | _ -> + None) + (fun (oph, oph') -> Inconsistent_operation_hashes (oph, oph')) + +let compute_export_limit block_store chain_data_store block_header + export_rolling = let block_hash = Block_header.hash block_header in - Store.Block.Contents.read_opt - (block_store, block_hash) >>= begin function - | Some contents -> return contents - | None -> fail (Wrong_block_export (block_hash, `Pruned)) - end >>=? fun { max_operations_ttl ; _ } -> + Store.Block.Contents.read_opt (block_store, block_hash) + >>= (function + | Some contents -> + return contents + | None -> + fail (Wrong_block_export (block_hash, `Pruned))) + >>=? fun {max_operations_ttl; _} -> if not export_rolling then - Store.Chain_data.Caboose.read chain_data_store >>=? fun (caboose_level, _) -> - return (max 1l caboose_level) + Store.Chain_data.Caboose.read chain_data_store + >>=? fun (caboose_level, _) -> return (max 1l caboose_level) else - let limit = Int32.(sub - block_header.Block_header.shell.level - (of_int max_operations_ttl)) in + let limit = + Int32.( + sub block_header.Block_header.shell.level (of_int max_operations_ttl)) + in (* fails when the limit exceeds the genesis or the genesis is included in the export limit *) fail_when (limit <= 0l) - (Wrong_block_export (block_hash, `Too_few_predecessors)) >>=? fun () -> - return limit + (Wrong_block_export (block_hash, `Too_few_predecessors)) + >>=? fun () -> return limit (** When called with a block, returns its predecessor if it exists and its protocol_data if the block is a transition block (i.e. protocol level changing block) or when there is no more predecessor. *) let pruned_block_iterator index block_store limit header = if header.Block_header.shell.level <= limit then - Context.get_protocol_data_from_header index header >>= fun protocol_data -> - return (None, Some protocol_data) + Context.get_protocol_data_from_header index header + >>= fun protocol_data -> return (None, Some protocol_data) else let pred_hash = header.Block_header.shell.predecessor in - State.Block.Header.read (block_store, pred_hash) >>=? fun pred_header -> - Store.Block.Operations.bindings (block_store, pred_hash) >>= fun pred_operations -> - Store.Block.Operation_hashes.bindings (block_store, pred_hash) >>= fun pred_operation_hashes -> - let pruned_block = { - Context.Pruned_block.block_header = pred_header ; - operations = pred_operations ; - operation_hashes = pred_operation_hashes ; - } in + State.Block.Header.read (block_store, pred_hash) + >>=? fun pred_header -> + Store.Block.Operations.bindings (block_store, pred_hash) + >>= fun pred_operations -> + Store.Block.Operation_hashes.bindings (block_store, pred_hash) + >>= fun pred_operation_hashes -> + let pruned_block = + { Context.Pruned_block.block_header = pred_header; + operations = pred_operations; + operation_hashes = pred_operation_hashes } + in let header_proto_level = header.Block_header.shell.proto_level in let pred_header_proto_level = pred_header.Block_header.shell.proto_level in if header_proto_level <> pred_header_proto_level then - Context.get_protocol_data_from_header index header >>= fun proto_data -> - return (Some pruned_block, Some proto_data) - else - return (Some pruned_block, None) + Context.get_protocol_data_from_header index header + >>= fun proto_data -> return (Some pruned_block, Some proto_data) + else return (Some pruned_block, None) -let filename_tag = - Tag.def "filename" Format.pp_print_string +let filename_tag = Tag.def "filename" Format.pp_print_string -let block_level_tag = - Tag.def "block_level" Format.pp_print_int +let block_level_tag = Tag.def "block_level" Format.pp_print_int -let export ?(export_rolling=false) ~context_index ~store ~genesis filename block = +let export ?(export_rolling = false) ~context_index ~store ~genesis filename + block = let chain_id = Chain_id.of_block_hash genesis in let chain_store = Store.Chain.get store chain_id in let chain_data_store = Store.Chain_data.get chain_store in let block_store = Store.Block.get chain_store in - begin Store.Configuration.History_mode.read_opt store >>= function - | Some (Archive | Full) | None -> return_unit - | Some (Rolling as history_mode) -> - if export_rolling then return_unit else - fail (Wrong_snapshot_export (history_mode, History_mode.Full)) - end >>=? fun () -> - begin match block with - | Some block_hash -> Lwt.return (Block_hash.of_b58check block_hash) - | None -> - Store.Chain_data.Checkpoint.read_opt (chain_data_store) >|= - Option.unopt_assert ~loc:__POS__ >>= fun last_checkpoint -> - if last_checkpoint.shell.level = 0l then - fail (Wrong_block_export (genesis, `Too_few_predecessors)) - else - let last_checkpoint_hash = Block_header.hash last_checkpoint in - lwt_log_notice Tag.DSL.(fun f -> - f "There is no block hash specified with the `--block` option. Using %a (last checkpoint)" - -%a Block_hash.Logging.tag last_checkpoint_hash - ) >>= fun () -> - return last_checkpoint_hash - end >>=? fun checkpoint_block_hash -> - begin State.Block.Header.read_opt - (block_store, checkpoint_block_hash) >>= function - | None -> - fail (Wrong_block_export (checkpoint_block_hash, `Cannot_be_found)) - | Some block_header -> - let export_mode = if export_rolling then History_mode.Rolling else Full in - lwt_log_notice Tag.DSL.(fun f -> - f "Exporting a snapshot in mode %a, targeting block hash %a at level %a" - -%a History_mode.tag export_mode - -%a Block_hash.Logging.tag checkpoint_block_hash - -%a block_level_tag (Int32.to_int block_header.shell.level) - ) >>= fun () -> - (* Get block precessor's block header *) - Store.Block.Predecessors.read - (block_store, checkpoint_block_hash) 0 >>=? fun pred_block_hash -> - State.Block.Header.read - (block_store, pred_block_hash) >>=? fun pred_block_header -> - (* Get operation list *) - let validations_passes = block_header.shell.validation_passes in - map_s - (fun i -> Store.Block.Operations.read (block_store, checkpoint_block_hash) i) - (0 -- (validations_passes - 1)) >>=? fun operations -> - compute_export_limit - block_store chain_data_store block_header export_rolling >>=? fun export_limit -> - let iterator = pruned_block_iterator context_index block_store export_limit in - let block_data = { Context.Block_data.block_header ; operations } in - return (pred_block_header, block_data, export_mode, iterator) - end >>=? fun data_to_dump -> - lwt_log_notice (fun f -> f "Now loading data") >>= fun () -> - Context.dump_contexts context_index data_to_dump ~filename >>=? fun () -> - lwt_log_notice Tag.DSL.(fun f -> - f "@[Successful export: %a@]" - -% a filename_tag filename - ) >>= fun () -> - return_unit + Store.Configuration.History_mode.read_opt store + >>= (function + | Some (Archive | Full) | None -> + return_unit + | Some (Rolling as history_mode) -> + if export_rolling then return_unit + else fail (Wrong_snapshot_export (history_mode, History_mode.Full))) + >>=? fun () -> + ( match block with + | Some block_hash -> + Lwt.return (Block_hash.of_b58check block_hash) + | None -> + Store.Chain_data.Checkpoint.read_opt chain_data_store + >|= Option.unopt_assert ~loc:__POS__ + >>= fun last_checkpoint -> + if last_checkpoint.shell.level = 0l then + fail (Wrong_block_export (genesis, `Too_few_predecessors)) + else + let last_checkpoint_hash = Block_header.hash last_checkpoint in + lwt_log_notice + Tag.DSL.( + fun f -> + f + "There is no block hash specified with the `--block` option. \ + Using %a (last checkpoint)" + -% a Block_hash.Logging.tag last_checkpoint_hash) + >>= fun () -> return last_checkpoint_hash ) + >>=? fun checkpoint_block_hash -> + State.Block.Header.read_opt (block_store, checkpoint_block_hash) + >>= (function + | None -> + fail (Wrong_block_export (checkpoint_block_hash, `Cannot_be_found)) + | Some block_header -> + let export_mode = + if export_rolling then History_mode.Rolling else Full + in + lwt_log_notice + Tag.DSL.( + fun f -> + f + "Exporting a snapshot in mode %a, targeting block hash %a \ + at level %a" + -% a History_mode.tag export_mode + -% a Block_hash.Logging.tag checkpoint_block_hash + -% a block_level_tag (Int32.to_int block_header.shell.level)) + >>= fun () -> + (* Get block precessor's block header *) + Store.Block.Predecessors.read + (block_store, checkpoint_block_hash) + 0 + >>=? fun pred_block_hash -> + State.Block.Header.read (block_store, pred_block_hash) + >>=? fun pred_block_header -> + (* Get operation list *) + let validations_passes = block_header.shell.validation_passes in + map_s + (fun i -> + Store.Block.Operations.read + (block_store, checkpoint_block_hash) + i) + (0 -- (validations_passes - 1)) + >>=? fun operations -> + compute_export_limit + block_store + chain_data_store + block_header + export_rolling + >>=? fun export_limit -> + let iterator = + pruned_block_iterator context_index block_store export_limit + in + let block_data = {Context.Block_data.block_header; operations} in + return (pred_block_header, block_data, export_mode, iterator)) + >>=? fun data_to_dump -> + lwt_log_notice (fun f -> f "Now loading data") + >>= fun () -> + Context.dump_contexts context_index data_to_dump ~filename + >>=? fun () -> + lwt_log_notice + Tag.DSL.(fun f -> f "@[Successful export: %a@]" -% a filename_tag filename) + >>= fun () -> return_unit let check_operations_consistency block_header operations operation_hashes = (* Compute operations hashes and compare *) List.iter2 (fun (_, op) (_, oph) -> - let expected_op_hash = List.map Operation.hash op in - List.iter2 (fun expected found -> - assert (Operation_hash.equal expected found) (* paul:here *) - ) expected_op_hash oph ; - ) - operations operation_hashes ; + let expected_op_hash = List.map Operation.hash op in + List.iter2 + (fun expected found -> + assert (Operation_hash.equal expected found) (* paul:here *)) + expected_op_hash + oph) + operations + operation_hashes ; (* Check header hashes based on merkel tree *) - let hashes = List.map (fun (_,opl) -> - List.map Operation.hash opl) - (List.rev operations) in + let hashes = + List.map + (fun (_, opl) -> List.map Operation.hash opl) + (List.rev operations) + in let computed_hash = Operation_list_list_hash.compute - (List.map Operation_list_hash.compute hashes) in - let are_oph_equal = Operation_list_list_hash.equal + (List.map Operation_list_hash.compute hashes) + in + let are_oph_equal = + Operation_list_list_hash.equal computed_hash - block_header.Block_header.shell.operations_hash in - fail_unless are_oph_equal + block_header.Block_header.shell.operations_hash + in + fail_unless + are_oph_equal (Inconsistent_operation_hashes (computed_hash, block_header.Block_header.shell.operations_hash)) let compute_predecessors ~genesis_hash oldest_level block_hashes i = let rec step s d acc = - if oldest_level = 1l && i - d = -1 then - List.rev ((s, genesis_hash) :: acc) - else if i - d < 0 then - List.rev acc - else - step (s + 1) (d * 2) ((s, block_hashes.(i - d)) :: acc) in + if oldest_level = 1l && i - d = -1 then List.rev ((s, genesis_hash) :: acc) + else if i - d < 0 then List.rev acc + else step (s + 1) (d * 2) ((s, block_hashes.(i - d)) :: acc) + in step 0 1 [] let check_context_hash_consistency block_validation_result block_header = @@ -309,87 +370,98 @@ let check_context_hash_consistency block_validation_result block_header = let set_history_mode store history_mode = match history_mode with | History_mode.Full | History_mode.Rolling -> - lwt_log_notice Tag.DSL.(fun f -> - f "Setting history-mode to %a" - -%a History_mode.tag history_mode - ) >>= fun () -> - Store.Configuration.History_mode.store store history_mode >>= fun () -> return_unit + lwt_log_notice + Tag.DSL.( + fun f -> + f "Setting history-mode to %a" -% a History_mode.tag history_mode) + >>= fun () -> + Store.Configuration.History_mode.store store history_mode + >>= fun () -> return_unit | History_mode.Archive -> fail (Snapshot_import_failure "cannot import an archive context") -let store_new_head - chain_state chain_data - ~genesis block_header - operations block_validation_result = - let { Tezos_validation.Block_validation. - validation_result ; - block_metadata ; - ops_metadata ; - forking_testchain ; - context_hash } = block_validation_result in - let validation_store = { - State.Block.context_hash ; - message = validation_result.message ; - max_operations_ttl = validation_result.max_operations_ttl ; - last_allowed_fork_level = validation_result.last_allowed_fork_level ; - } in +let store_new_head chain_state chain_data ~genesis block_header operations + block_validation_result = + let { Tezos_validation.Block_validation.validation_result; + block_metadata; + ops_metadata; + forking_testchain; + context_hash } = + block_validation_result + in + let validation_store = + { State.Block.context_hash; + message = validation_result.message; + max_operations_ttl = validation_result.max_operations_ttl; + last_allowed_fork_level = validation_result.last_allowed_fork_level } + in State.Block.store chain_state - block_header block_metadata - operations ops_metadata + block_header + block_metadata + operations + ops_metadata ~forking_testchain - validation_store >>=? fun new_head -> - begin match new_head with - | None -> - (* Should not happen as the data-dir must be empty *) - fail (Snapshot_import_failure "a chain head is already present in the store") - | Some new_head -> - (* New head is set*) - Store.Chain_data.Known_heads.remove chain_data genesis >>= fun () -> - Store.Chain_data.Known_heads.store chain_data (State.Block.hash new_head) >>= fun () -> - Store.Chain_data.Current_head.store chain_data (State.Block.hash new_head) >>= fun () -> - return_unit end + validation_store + >>=? fun new_head -> + match new_head with + | None -> + (* Should not happen as the data-dir must be empty *) + fail + (Snapshot_import_failure "a chain head is already present in the store") + | Some new_head -> + (* New head is set*) + Store.Chain_data.Known_heads.remove chain_data genesis + >>= fun () -> + Store.Chain_data.Known_heads.store chain_data (State.Block.hash new_head) + >>= fun () -> + Store.Chain_data.Current_head.store + chain_data + (State.Block.hash new_head) + >>= fun () -> return_unit let update_checkpoint chain_state checkpoint_header = let block_hash = Block_header.hash checkpoint_header in (* Imported block is set as the current checkpoint/save_point … *) - let new_checkpoint = (checkpoint_header.Block_header.shell.level, block_hash) in - State.Chain.set_checkpoint chain_state checkpoint_header >>= fun () -> - Lwt.return new_checkpoint + let new_checkpoint = + (checkpoint_header.Block_header.shell.level, block_hash) + in + State.Chain.set_checkpoint chain_state checkpoint_header + >>= fun () -> Lwt.return new_checkpoint let update_savepoint chain_state new_savepoint = - State.update_chain_data chain_state begin fun store data -> - let new_data = { data with save_point = new_savepoint } in - Store.Chain_data.Save_point.store store new_savepoint >>= fun () -> - Lwt.return (Some new_data, ()) - end + State.update_chain_data chain_state (fun store data -> + let new_data = {data with save_point = new_savepoint} in + Store.Chain_data.Save_point.store store new_savepoint + >>= fun () -> Lwt.return (Some new_data, ())) let update_caboose chain_data ~genesis block_header oldest_header max_op_ttl = let oldest_level = oldest_header.Block_header.shell.level in - let caboose_level = - if oldest_level = 1l then 0l else oldest_level in + let caboose_level = if oldest_level = 1l then 0l else oldest_level in let caboose_hash = - if oldest_level = 1l then genesis else Block_header.hash oldest_header in + if oldest_level = 1l then genesis else Block_header.hash oldest_header + in let minimal_caboose_level = - Int32.(sub - block_header.Block_header.shell.level - (of_int max_op_ttl)) in + Int32.(sub block_header.Block_header.shell.level (of_int max_op_ttl)) + in fail_unless Compare.Int32.(caboose_level <= minimal_caboose_level) (Snapshot_import_failure - (Format.sprintf "caboose level (%ld) is not valid" caboose_level)) >>=? fun () -> - Store.Chain_data.Caboose.store chain_data (caboose_level, caboose_hash) >>= fun () -> - return_unit + (Format.sprintf "caboose level (%ld) is not valid" caboose_level)) + >>=? fun () -> + Store.Chain_data.Caboose.store chain_data (caboose_level, caboose_hash) + >>= fun () -> return_unit -let import_protocol_data index store block_hash_arr level_oldest_block (level, protocol_data) = +let import_protocol_data index store block_hash_arr level_oldest_block + (level, protocol_data) = (* Retrieve the original context hash of the block. *) let delta = Int32.(to_int (sub level level_oldest_block)) in let pruned_block_hash = block_hash_arr.(delta) in let block_store = Store.Block.get store in - begin State.Block.Header.read_opt (block_store, pruned_block_hash) >>= function - | None -> assert false - | Some block_header -> Lwt.return block_header - end >>= fun block_header -> + State.Block.Header.read_opt (block_store, pruned_block_hash) + >>= (function + | None -> assert false | Some block_header -> Lwt.return block_header) + >>= fun block_header -> let expected_context_hash = block_header.Block_header.shell.context in (* Retrieve the input info. *) let info = protocol_data.Context.Protocol_data.info in @@ -407,206 +479,264 @@ let import_protocol_data index store block_hash_arr level_oldest_block (level, p ~expected_context_hash ~test_chain ~protocol_hash - ~index >>= function + ~index + >>= function | true -> let protocol_level = block_header.shell.proto_level in let block_level = block_header.shell.level in - Store.Chain.Protocol_info.store store protocol_level (protocol_hash, block_level) >>= fun () -> - return_unit - | false -> fail (Wrong_protocol_hash protocol_hash) - -let import_protocol_data_list index store block_hash_arr level_oldest_block protocol_data = + Store.Chain.Protocol_info.store + store + protocol_level + (protocol_hash, block_level) + >>= fun () -> return_unit + | false -> + fail (Wrong_protocol_hash protocol_hash) + +let import_protocol_data_list index store block_hash_arr level_oldest_block + protocol_data = let rec aux = function - | [] -> return_unit + | [] -> + return_unit | (level, protocol_data) :: xs -> import_protocol_data - index store - block_hash_arr level_oldest_block - (level, protocol_data) >>=? fun () -> - aux xs in + index + store + block_hash_arr + level_oldest_block + (level, protocol_data) + >>=? fun () -> aux xs + in aux protocol_data -let verify_predecessors header_opt pred_hash = match header_opt with - | None -> return_unit +let verify_predecessors header_opt pred_hash = + match header_opt with + | None -> + return_unit | Some header -> - fail_unless (header.Block_header.shell.level >= 2l && - Block_hash.equal header.shell.predecessor pred_hash) + fail_unless + ( header.Block_header.shell.level >= 2l + && Block_hash.equal header.shell.predecessor pred_hash ) (Snapshot_import_failure "inconsistent predecessors") let verify_oldest_header oldest_header genesis_hash = let oldest_level = oldest_header.Block_header.shell.level in - fail_unless (oldest_level >= 1l || - (Compare.Int32.(oldest_level = 1l) && - Block_hash.equal oldest_header.Block_header.shell.predecessor genesis_hash)) + fail_unless + ( oldest_level >= 1l + || Compare.Int32.(oldest_level = 1l) + && Block_hash.equal + oldest_header.Block_header.shell.predecessor + genesis_hash ) (Snapshot_import_failure "inconsistent oldest level") -let block_validation - succ_header_opt header_hash - { Context.Pruned_block.block_header ; operations ; operation_hashes } = - verify_predecessors succ_header_opt header_hash >>=? fun () -> - check_operations_consistency block_header operations operation_hashes >>=? fun () -> - return_unit +let block_validation succ_header_opt header_hash + {Context.Pruned_block.block_header; operations; operation_hashes} = + verify_predecessors succ_header_opt header_hash + >>=? fun () -> + check_operations_consistency block_header operations operation_hashes + >>=? fun () -> return_unit let import ~data_dir ~dir_cleaner ~patch_context ~genesis filename block = - lwt_log_notice Tag.DSL.(fun f -> - f "Importing data from snapshot file %a" -%a filename_tag filename - ) >>= fun () -> - begin match block with - | None -> - lwt_log_notice (fun f -> - f "You may consider using the --block <block_hash> \ - argument to verify that the block imported is the one you expect" - ) - | Some _ -> Lwt.return_unit - end >>= fun () -> + lwt_log_notice + Tag.DSL.( + fun f -> + f "Importing data from snapshot file %a" -% a filename_tag filename) + >>= fun () -> + ( match block with + | None -> + lwt_log_notice (fun f -> + f + "You may consider using the --block <block_hash> argument to \ + verify that the block imported is the one you expect") + | Some _ -> + Lwt.return_unit ) + >>= fun () -> lwt_log_notice (fun f -> - f "Retrieving and validating data. This can take a while, please bear with us" - ) >>= fun () -> + f + "Retrieving and validating data. This can take a while, please bear \ + with us") + >>= fun () -> let context_root = context_dir data_dir in let store_root = store_dir data_dir in let chain_id = Chain_id.of_block_hash genesis.State.Chain.block in (* FIXME: use config value ? *) State.init - ~context_root ~store_root genesis - ~patch_context:(patch_context None) >>=? fun (state, chain_state, context_index, _history_mode) -> - Store.init store_root >>=? fun store -> + ~context_root + ~store_root + genesis + ~patch_context:(patch_context None) + >>=? fun (state, chain_state, context_index, _history_mode) -> + Store.init store_root + >>=? fun store -> let chain_store = Store.Chain.get store chain_id in let chain_data = Store.Chain_data.get chain_store in let block_store = Store.Block.get chain_store in let open Context in Lwt.try_bind (fun () -> - let k_store_pruned_blocks data = - Store.with_atomic_rw store begin fun () -> - Error_monad.iter_s begin fun (pruned_header_hash, pruned_block) -> - Store.Block.Pruned_contents.store - (block_store, pruned_header_hash) - { header = pruned_block.Context.Pruned_block.block_header } >>= fun () -> - Lwt_list.iter_s - (fun (i, v) -> Store.Block.Operations.store (block_store, pruned_header_hash) i v) - pruned_block.operations >>= fun () -> - Lwt_list.iter_s - (fun (i, v) -> Store.Block.Operation_hashes.store (block_store, pruned_header_hash) i v) - pruned_block.operation_hashes >>= fun () -> - return_unit - end data - end - in - (* Restore context and fetch data *) - restore_contexts - context_index ~filename k_store_pruned_blocks block_validation >>=? - fun (predecessor_block_header, meta, history_mode, oldest_header_opt, - rev_block_hashes, protocol_data) -> - let oldest_header = Option.unopt_assert ~loc:__POS__ oldest_header_opt in - let block_hashes_arr = Array.of_list rev_block_hashes in - - let write_predecessors_table to_write = - Store.with_atomic_rw store (fun () -> - Lwt_list.iter_s (fun (current_hash, predecessors_list) -> - Lwt_list.iter_s (fun (l, h) -> - Store.Block.Predecessors.store (block_store, current_hash) l h - ) predecessors_list >>= fun () -> - match predecessors_list with - | (0, pred_hash) :: _ -> - Store.Chain_data.In_main_branch.store (chain_data, pred_hash) current_hash - | [] -> Lwt.return_unit - | _ :: _ -> assert false ) - to_write) in - - Lwt_list.fold_left_s (fun (cpt, to_write) current_hash -> - Tezos_stdlib.Utils.display_progress - ~refresh_rate:(cpt, 1_000) - "Computing predecessors table %dK elements%!" - (cpt / 1_000); - begin if (cpt + 1) mod 5_000 = 0 then - write_predecessors_table to_write >>= fun () -> - Lwt.return_nil - else - Lwt.return to_write - end >>= fun to_write -> - let predecessors_list = - compute_predecessors - ~genesis_hash:genesis.block oldest_header.shell.level block_hashes_arr cpt in - Lwt.return (cpt + 1, (current_hash, predecessors_list) :: to_write) - ) (0, []) rev_block_hashes >>= fun (_, to_write) -> - write_predecessors_table to_write >>= fun () -> - Tezos_stdlib.Utils.display_progress_end () ; - - (* Process data imported from snapshot *) - let { Block_data.block_header ; operations } = meta in - let block_hash = Block_header.hash block_header in - (* Checks that the block hash imported by the snapshot is the expected one *) - begin - match block with - | Some str -> - let bh = Block_hash.of_b58check_exn str in - fail_unless - (Block_hash.equal bh block_hash) - (Inconsistent_imported_block (bh, block_hash)) - | None -> - return_unit - end >>=? fun () -> - - lwt_log_notice Tag.DSL.(fun f -> - f "Setting current head to block %a" - -% a Block_hash.Logging.tag (Block_header.hash block_header) - ) >>= fun () -> - let pred_context_hash = predecessor_block_header.shell.context in - - checkout_exn context_index pred_context_hash >>= fun predecessor_context -> - - (* ... we can now call apply ... *) - Tezos_validation.Block_validation.apply - chain_id - ~max_operations_ttl:(Int32.to_int predecessor_block_header.shell.level) - ~predecessor_block_header:predecessor_block_header - ~predecessor_context - ~block_header - operations >>=? fun block_validation_result -> - - check_context_hash_consistency - block_validation_result - block_header >>=? fun () -> - - verify_oldest_header oldest_header genesis.block >>=? fun () -> - - (* ... we set the history mode regarding the snapshot version hint ... *) - set_history_mode store history_mode >>=? fun () -> - - (* ... and we import protocol data...*) - import_protocol_data_list - context_index chain_store block_hashes_arr - oldest_header.Block_header.shell.level protocol_data >>=? fun () -> - - (* Everything is ok. We can store the new head *) - store_new_head - chain_state - chain_data - ~genesis:genesis.block - block_header - operations - block_validation_result >>=? fun () -> - - (* Update history mode flags *) - update_checkpoint chain_state block_header >>= fun new_checkpoint -> - update_savepoint chain_state new_checkpoint >>= fun () -> - update_caboose - chain_data - ~genesis:genesis.block block_header oldest_header - block_validation_result.validation_result.max_operations_ttl >>=? fun () -> - Store.close store ; - State.close state >>= fun () -> - return_unit) + let k_store_pruned_blocks data = + Store.with_atomic_rw store (fun () -> + Error_monad.iter_s + (fun (pruned_header_hash, pruned_block) -> + Store.Block.Pruned_contents.store + (block_store, pruned_header_hash) + {header = pruned_block.Context.Pruned_block.block_header} + >>= fun () -> + Lwt_list.iter_s + (fun (i, v) -> + Store.Block.Operations.store + (block_store, pruned_header_hash) + i + v) + pruned_block.operations + >>= fun () -> + Lwt_list.iter_s + (fun (i, v) -> + Store.Block.Operation_hashes.store + (block_store, pruned_header_hash) + i + v) + pruned_block.operation_hashes + >>= fun () -> return_unit) + data) + in + (* Restore context and fetch data *) + restore_contexts + context_index + ~filename + k_store_pruned_blocks + block_validation + >>=? fun ( predecessor_block_header, + meta, + history_mode, + oldest_header_opt, + rev_block_hashes, + protocol_data ) -> + let oldest_header = Option.unopt_assert ~loc:__POS__ oldest_header_opt in + let block_hashes_arr = Array.of_list rev_block_hashes in + let write_predecessors_table to_write = + Store.with_atomic_rw store (fun () -> + Lwt_list.iter_s + (fun (current_hash, predecessors_list) -> + Lwt_list.iter_s + (fun (l, h) -> + Store.Block.Predecessors.store + (block_store, current_hash) + l + h) + predecessors_list + >>= fun () -> + match predecessors_list with + | (0, pred_hash) :: _ -> + Store.Chain_data.In_main_branch.store + (chain_data, pred_hash) + current_hash + | [] -> + Lwt.return_unit + | _ :: _ -> + assert false) + to_write) + in + Lwt_list.fold_left_s + (fun (cpt, to_write) current_hash -> + Tezos_stdlib.Utils.display_progress + ~refresh_rate:(cpt, 1_000) + "Computing predecessors table %dK elements%!" + (cpt / 1_000) ; + ( if (cpt + 1) mod 5_000 = 0 then + write_predecessors_table to_write >>= fun () -> Lwt.return_nil + else Lwt.return to_write ) + >>= fun to_write -> + let predecessors_list = + compute_predecessors + ~genesis_hash:genesis.block + oldest_header.shell.level + block_hashes_arr + cpt + in + Lwt.return (cpt + 1, (current_hash, predecessors_list) :: to_write)) + (0, []) + rev_block_hashes + >>= fun (_, to_write) -> + write_predecessors_table to_write + >>= fun () -> + Tezos_stdlib.Utils.display_progress_end () ; + (* Process data imported from snapshot *) + let {Block_data.block_header; operations} = meta in + let block_hash = Block_header.hash block_header in + (* Checks that the block hash imported by the snapshot is the expected one *) + ( match block with + | Some str -> + let bh = Block_hash.of_b58check_exn str in + fail_unless + (Block_hash.equal bh block_hash) + (Inconsistent_imported_block (bh, block_hash)) + | None -> + return_unit ) + >>=? fun () -> + lwt_log_notice + Tag.DSL.( + fun f -> + f "Setting current head to block %a" + -% a Block_hash.Logging.tag (Block_header.hash block_header)) + >>= fun () -> + let pred_context_hash = predecessor_block_header.shell.context in + checkout_exn context_index pred_context_hash + >>= fun predecessor_context -> + (* ... we can now call apply ... *) + Tezos_validation.Block_validation.apply + chain_id + ~max_operations_ttl:(Int32.to_int predecessor_block_header.shell.level) + ~predecessor_block_header + ~predecessor_context + ~block_header + operations + >>=? fun block_validation_result -> + check_context_hash_consistency block_validation_result block_header + >>=? fun () -> + verify_oldest_header oldest_header genesis.block + >>=? fun () -> + (* ... we set the history mode regarding the snapshot version hint ... *) + set_history_mode store history_mode + >>=? fun () -> + (* ... and we import protocol data...*) + import_protocol_data_list + context_index + chain_store + block_hashes_arr + oldest_header.Block_header.shell.level + protocol_data + >>=? fun () -> + (* Everything is ok. We can store the new head *) + store_new_head + chain_state + chain_data + ~genesis:genesis.block + block_header + operations + block_validation_result + >>=? fun () -> + (* Update history mode flags *) + update_checkpoint chain_state block_header + >>= fun new_checkpoint -> + update_savepoint chain_state new_checkpoint + >>= fun () -> + update_caboose + chain_data + ~genesis:genesis.block + block_header + oldest_header + block_validation_result.validation_result.max_operations_ttl + >>=? fun () -> + Store.close store ; + State.close state >>= fun () -> return_unit) (function | Ok () -> - lwt_log_notice Tag.DSL.(fun f -> - f "@[Successful import from file %a@]" - -% a filename_tag filename - ) >>= fun () -> - return_unit + lwt_log_notice + Tag.DSL.( + fun f -> + f "@[Successful import from file %a@]" + -% a filename_tag filename) + >>= fun () -> return_unit | Error errors -> - dir_cleaner data_dir >>= fun () -> - Lwt.return (Error errors)) - (fun exn -> - dir_cleaner data_dir >>= fun () -> - Lwt.fail exn) + dir_cleaner data_dir >>= fun () -> Lwt.return (Error errors)) + (fun exn -> dir_cleaner data_dir >>= fun () -> Lwt.fail exn) diff --git a/src/lib_shell/snapshots.mli b/src/lib_shell/snapshots.mli index c858c54043a978ffc2c1c8c2bb2924a5ddc15973..e873def698853ee9fe934bd1cbcf21991e8bb539 100644 --- a/src/lib_shell/snapshots.mli +++ b/src/lib_shell/snapshots.mli @@ -24,7 +24,7 @@ (* *) (*****************************************************************************) -val export: +val export : ?export_rolling:bool -> context_index:Context.index -> store:Store.t -> @@ -33,7 +33,7 @@ val export: string option -> unit tzresult Lwt.t -val import: +val import : data_dir:string -> dir_cleaner:(string -> unit Lwt.t) -> patch_context:('a option -> Context.t -> Context.t Lwt.t) -> diff --git a/src/lib_shell/stat_directory.ml b/src/lib_shell/stat_directory.ml index 1d84624c37227ace699b649243848e48692aea00..eb9ae02e9a07614b417a509ad06aca327280ef18 100644 --- a/src/lib_shell/stat_directory.ml +++ b/src/lib_shell/stat_directory.ml @@ -24,11 +24,10 @@ let rpc_directory () = let dir = RPC_directory.empty in - RPC_directory.gen_register dir Stat_services.S.gc begin fun () () () -> - RPC_answer.return @@ Gc.stat () end |> fun dir -> - - RPC_directory.gen_register dir Stat_services.S.memory begin fun () () () -> - Sys_info.memory_stats () >>= function - | Ok stats -> - RPC_answer.return stats - | Error err -> RPC_answer.fail [err] end + RPC_directory.gen_register dir Stat_services.S.gc (fun () () () -> + RPC_answer.return @@ Gc.stat ()) + |> fun dir -> + RPC_directory.gen_register dir Stat_services.S.memory (fun () () () -> + Sys_info.memory_stats () + >>= function + | Ok stats -> RPC_answer.return stats | Error err -> RPC_answer.fail [err]) diff --git a/src/lib_shell/state.ml b/src/lib_shell/state.ml index c0af4168c7dd0b1f8b57abc47bd3dabbd1d44670..102041899dfca6bf34b96bd272fcc3178592805f 100644 --- a/src/lib_shell/state.ml +++ b/src/lib_shell/state.ml @@ -29,111 +29,109 @@ open State_logging open Validation_errors module Shared = struct - type 'a t = { - data: 'a ; - lock: Lwt_mutex.t ; - } - let create data = { data ; lock = Lwt_mutex.create () } - let use { data ; lock } f = - Lwt_mutex.with_lock lock (fun () -> f data) + type 'a t = {data : 'a; lock : Lwt_mutex.t} + + let create data = {data; lock = Lwt_mutex.create ()} + + let use {data; lock} f = Lwt_mutex.with_lock lock (fun () -> f data) end type genesis = { - time: Time.Protocol.t ; - block: Block_hash.t ; - protocol: Protocol_hash.t ; + time : Time.Protocol.t; + block : Block_hash.t; + protocol : Protocol_hash.t } type global_state = { - global_data: global_data Shared.t ; - protocol_store: Store.Protocol.store Shared.t ; - main_chain: Chain_id.t ; - protocol_watcher: Protocol_hash.t Lwt_watcher.input ; - block_watcher: block Lwt_watcher.input ; + global_data : global_data Shared.t; + protocol_store : Store.Protocol.store Shared.t; + main_chain : Chain_id.t; + protocol_watcher : Protocol_hash.t Lwt_watcher.input; + block_watcher : block Lwt_watcher.input } and global_data = { - chains: chain_state Chain_id.Table.t ; - global_store: Store.t ; - context_index: Context.index ; + chains : chain_state Chain_id.Table.t; + global_store : Store.t; + context_index : Context.index } and chain_state = { (* never take the lock on 'block_store' when holding the lock on 'chain_data'. *) - global_state: global_state ; - chain_id: Chain_id.t ; - genesis: genesis ; - faked_genesis_hash: Block_hash.t ; - expiration: Time.Protocol.t option ; - allow_forked_chain: bool ; - block_store: Store.Block.store Shared.t ; - context_index: Context.index Shared.t ; - block_watcher: block Lwt_watcher.input ; - chain_data: chain_data_state Shared.t ; - block_rpc_directories: - block RPC_directory.t Protocol_hash.Map.t Protocol_hash.Table.t ; - header_rpc_directories: - (chain_state * Block_hash.t * Block_header.t) - RPC_directory.t Protocol_hash.Map.t Protocol_hash.Table.t ; + global_state : global_state; + chain_id : Chain_id.t; + genesis : genesis; + faked_genesis_hash : Block_hash.t; + expiration : Time.Protocol.t option; + allow_forked_chain : bool; + block_store : Store.Block.store Shared.t; + context_index : Context.index Shared.t; + block_watcher : block Lwt_watcher.input; + chain_data : chain_data_state Shared.t; + block_rpc_directories : + block RPC_directory.t Protocol_hash.Map.t Protocol_hash.Table.t; + header_rpc_directories : + (chain_state * Block_hash.t * Block_header.t) RPC_directory.t + Protocol_hash.Map.t + Protocol_hash.Table.t } and chain_data_state = { - mutable data: chain_data ; - mutable checkpoint: Block_header.t ; - chain_data_store: Store.Chain_data.store ; + mutable data : chain_data; + mutable checkpoint : Block_header.t; + chain_data_store : Store.Chain_data.store } and chain_data = { - current_head: block ; - current_mempool: Mempool.t ; - live_blocks: Block_hash.Set.t ; - live_operations: Operation_hash.Set.t ; - test_chain: Chain_id.t option ; - save_point: Int32.t * Block_hash.t ; - caboose: Int32.t * Block_hash.t ; + current_head : block; + current_mempool : Mempool.t; + live_blocks : Block_hash.Set.t; + live_operations : Operation_hash.Set.t; + test_chain : Chain_id.t option; + save_point : Int32.t * Block_hash.t; + caboose : Int32.t * Block_hash.t } and block = { - chain_state: chain_state ; - hash: Block_hash.t ; - header: Block_header.t ; + chain_state : chain_state; + hash : Block_hash.t; + header : Block_header.t } (* Errors *) type error += Block_not_found of Block_hash.t + type error += Block_contents_not_found of Block_hash.t -let () = begin - register_error_kind `Permanent +let () = + register_error_kind + `Permanent ~id:"state.block.not_found" ~title:"Block_not_found" ~description:"Block not found" ~pp:(fun ppf block_hash -> - Format.fprintf ppf - "@[Cannot find block %a]" - Block_hash.pp block_hash) - Data_encoding.(obj1 (req "block_not_found" @@ Block_hash.encoding ) ) - (function - | Block_not_found block_hash -> Some block_hash - | _ -> None) + Format.fprintf ppf "@[Cannot find block %a]" Block_hash.pp block_hash) + Data_encoding.(obj1 (req "block_not_found" @@ Block_hash.encoding)) + (function Block_not_found block_hash -> Some block_hash | _ -> None) (fun block_hash -> Block_not_found block_hash) ; - - register_error_kind `Permanent + register_error_kind + `Permanent ~id:"state.block.contents_not_found" ~title:"Block_contents_not_found" ~description:"Block not found" ~pp:(fun ppf block_hash -> - Format.fprintf ppf - "@[Cannot find block contents %a]" - Block_hash.pp block_hash) - Data_encoding.(obj1 (req "block_contents_not_found" @@ Block_hash.encoding ) ) + Format.fprintf + ppf + "@[Cannot find block contents %a]" + Block_hash.pp + block_hash) + Data_encoding.( + obj1 (req "block_contents_not_found" @@ Block_hash.encoding)) (function - | Block_contents_not_found block_hash -> Some block_hash - | _ -> None) - (fun block_hash -> Block_contents_not_found block_hash) ; -end + | Block_contents_not_found block_hash -> Some block_hash | _ -> None) + (fun block_hash -> Block_contents_not_found block_hash) (* Abstract view over block header storage. This module aims to abstract over block header's [read], [read_opt] and [known] @@ -141,44 +139,45 @@ end *) module Header = struct - let read (store, hash) = - Store.Block.Contents.read (store, hash) >>= function - | Ok { header ; _ } -> return header - | Error _ -> - Store.Block.Pruned_contents.read (store, hash) >>=? fun { header } -> + Store.Block.Contents.read (store, hash) + >>= function + | Ok {header; _} -> return header + | Error _ -> + Store.Block.Pruned_contents.read (store, hash) + >>=? fun {header} -> return header let read_opt (store, hash) = - read (store, hash) >>= function - | Ok header -> Lwt.return_some header - | Error _ -> Lwt.return_none + read (store, hash) + >>= function + | Ok header -> Lwt.return_some header | Error _ -> Lwt.return_none let known (store, hash) = - Store.Block.Pruned_contents.known (store, hash) >>= function - | true -> Lwt.return_true - | false -> Store.Block.Contents.known (store, hash) + Store.Block.Pruned_contents.known (store, hash) + >>= function + | true -> + Lwt.return_true + | false -> + Store.Block.Contents.known (store, hash) end -let read_chain_data { chain_data ; _ } f = - Shared.use chain_data begin fun state -> - f state.chain_data_store state.data - end - -let update_chain_data { chain_id ; context_index ; chain_data ; _ } f = - Shared.use chain_data begin fun state -> - f state.chain_data_store state.data >>= fun (data, res) -> - Lwt_utils.may data - ~f:begin fun data -> - state.data <- data ; - Shared.use context_index begin fun context_index -> - Context.set_head context_index chain_id - data.current_head.header.shell.context - end >>= fun () -> - Lwt.return_unit - end >>= fun () -> - Lwt.return res - end +let read_chain_data {chain_data; _} f = + Shared.use chain_data (fun state -> f state.chain_data_store state.data) + +let update_chain_data {chain_id; context_index; chain_data; _} f = + Shared.use chain_data (fun state -> + f state.chain_data_store state.data + >>= fun (data, res) -> + Lwt_utils.may data ~f:(fun data -> + state.data <- data ; + Shared.use context_index (fun context_index -> + Context.set_head + context_index + chain_id + data.current_head.header.shell.context) + >>= fun () -> Lwt.return_unit) + >>= fun () -> Lwt.return res) (** The number of predecessors stored per block. This value chosen to compute efficiently block locators that @@ -202,24 +201,27 @@ let stored_predecessors_size = 12 p(n,3) = n-8 = p(n-4,2) p(n,4) = n-16 = p(n-8,3) *) -let store_predecessors (store: Store.Block.store) (b: Block_hash.t) : unit Lwt.t = +let store_predecessors (store : Store.Block.store) (b : Block_hash.t) : + unit Lwt.t = let rec loop pred dist = - if dist = stored_predecessors_size then - Lwt.return_unit + if dist = stored_predecessors_size then Lwt.return_unit else - Store.Block.Predecessors.read_opt (store, pred) (dist - 1) >>= function - | None -> Lwt.return_unit (* we reached the last known block *) + Store.Block.Predecessors.read_opt (store, pred) (dist - 1) + >>= function + | None -> + Lwt.return_unit (* we reached the last known block *) | Some p -> - Store.Block.Predecessors.store (store, b) dist p >>= fun () -> - loop p (dist + 1) in + Store.Block.Predecessors.store (store, b) dist p + >>= fun () -> loop p (dist + 1) + in (* the first predecessor is fetched from the header *) - Header.read_opt (store, b) >|= Option.unopt_assert ~loc:__POS__ >>= fun header -> + Header.read_opt (store, b) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun header -> let pred = header.shell.predecessor in - if Block_hash.equal b pred then - Lwt.return_unit (* genesis *) + if Block_hash.equal b pred then Lwt.return_unit (* genesis *) else - Store.Block.Predecessors.store (store, b) 0 pred >>= fun () -> - loop pred 1 + Store.Block.Predecessors.store (store, b) 0 pred >>= fun () -> loop pred 1 (** [predecessor_n_raw s b d] returns the hash of the block at distance [d] from [b]. @@ -233,77 +235,79 @@ let predecessor_n_raw store block_hash distance = (* helper functions *) (* computes power of 2 w/o floats *) let power_of_2 n = - if n < 0 then invalid_arg "negative argument" else + if n < 0 then invalid_arg "negative argument" + else let rec loop cnt res = - if cnt<1 then res - else loop (cnt-1) (res*2) + if cnt < 1 then res else loop (cnt - 1) (res * 2) in loop n 1 in (* computes the closest power of two smaller than a given a number and the rest w/o floats *) let closest_power_two_and_rest n = - if n < 0 then invalid_arg "negative argument" else + if n < 0 then invalid_arg "negative argument" + else let rec loop cnt n rest = - if n<=1 - then (cnt,rest) - else loop (cnt+1) (n/2) (rest + (power_of_2 cnt) * (n mod 2)) + if n <= 1 then (cnt, rest) + else loop (cnt + 1) (n / 2) (rest + (power_of_2 cnt * (n mod 2))) in loop 0 n 0 in - (* actual predecessor function *) if distance < 0 then invalid_arg ("State.predecessor: distance < 0 " ^ string_of_int distance) - else if distance = 0 then - Lwt.return_some block_hash + else if distance = 0 then Lwt.return_some block_hash else let rec loop block_hash distance = - if distance = 1 - then Store.Block.Predecessors.read_opt (store, block_hash) 0 + if distance = 1 then + Store.Block.Predecessors.read_opt (store, block_hash) 0 else - let (power,rest) = closest_power_two_and_rest distance in - let (power,rest) = - if power < stored_predecessors_size then (power,rest) + let (power, rest) = closest_power_two_and_rest distance in + let (power, rest) = + if power < stored_predecessors_size then (power, rest) else - let power = stored_predecessors_size-1 in - let rest = distance - (power_of_2 power) in - (power,rest) + let power = stored_predecessors_size - 1 in + let rest = distance - power_of_2 power in + (power, rest) in - Store.Block.Predecessors.read_opt (store, block_hash) power >>= function - | None -> Lwt.return_none (* reached genesis *) + Store.Block.Predecessors.read_opt (store, block_hash) power + >>= function + | None -> + Lwt.return_none (* reached genesis *) | Some pred -> - if rest = 0 - then Lwt.return_some pred (* landed on the requested predecessor *) - else loop pred rest (* need to jump further back *) + if rest = 0 then Lwt.return_some pred + (* landed on the requested predecessor *) + else loop pred rest + (* need to jump further back *) in loop block_hash distance let predecessor_n ?(below_save_point = false) block_store block_hash distance = - predecessor_n_raw block_store block_hash distance >>= function - | None -> Lwt.return_none - | Some predecessor -> - begin if below_save_point then - Header.known (block_store, predecessor) - else - Store.Block.Contents.known (block_store, predecessor) - end >>= function - | false -> Lwt.return_none - | true -> Lwt.return_some predecessor + predecessor_n_raw block_store block_hash distance + >>= function + | None -> + Lwt.return_none + | Some predecessor -> ( + ( if below_save_point then Header.known (block_store, predecessor) + else Store.Block.Contents.known (block_store, predecessor) ) + >>= function + | false -> Lwt.return_none | true -> Lwt.return_some predecessor ) let compute_locator_from_hash chain_state ?(size = 200) head_hash seed = - Shared.use chain_state.chain_data begin fun state -> - Lwt.return state.data.caboose - end >>= fun (_lvl, caboose) -> - Shared.use chain_state.block_store begin fun block_store -> - Header.read_opt (block_store, head_hash) >|= - Option.unopt_assert ~loc:__POS__ >>= fun header -> - Block_locator.compute - ~get_predecessor:(predecessor_n ~below_save_point:true block_store) - ~caboose - ~size - head_hash header seed - end + Shared.use chain_state.chain_data (fun state -> + Lwt.return state.data.caboose) + >>= fun (_lvl, caboose) -> + Shared.use chain_state.block_store (fun block_store -> + Header.read_opt (block_store, head_hash) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun header -> + Block_locator.compute + ~get_predecessor:(predecessor_n ~below_save_point:true block_store) + ~caboose + ~size + head_hash + header + seed) let compute_locator chain ?size head seed = compute_locator_from_hash chain ?size head.hash seed @@ -311,27 +315,28 @@ let compute_locator chain ?size head seed = type t = global_state module Locked_block = struct - let store_genesis store genesis context = - let shell : Block_header.shell_header = { - level = 0l ; - proto_level = 0 ; - predecessor = genesis.block ; (* genesis' predecessor is genesis *) - timestamp = genesis.time ; - fitness = [] ; - validation_passes = 0 ; - operations_hash = Operation_list_list_hash.empty ; - context ; - } in - let header : Block_header.t = { shell ; protocol_data = MBytes.create 0 } in - Store.Block.Contents.store (store, genesis.block) - { header ; - Store.Block.message = Some "Genesis" ; - max_operations_ttl = 0 ; context ; - metadata = MBytes.create 0 ; - last_allowed_fork_level = 0l ; - } >>= fun () -> - Lwt.return header + let shell : Block_header.shell_header = + { level = 0l; + proto_level = 0; + predecessor = genesis.block; + (* genesis' predecessor is genesis *) + timestamp = genesis.time; + fitness = []; + validation_passes = 0; + operations_hash = Operation_list_list_hash.empty; + context } + in + let header : Block_header.t = {shell; protocol_data = MBytes.create 0} in + Store.Block.Contents.store + (store, genesis.block) + { header; + Store.Block.message = Some "Genesis"; + max_operations_ttl = 0; + context; + metadata = MBytes.create 0; + last_allowed_fork_level = 0l } + >>= fun () -> Lwt.return header (* Will that block is compatible with the current checkpoint. *) let acceptable chain_data (header : Block_header.t) = @@ -341,48 +346,49 @@ module Locked_block = struct Lwt.return_true else if checkpoint_level = header.shell.level then Lwt.return (Block_header.equal header chain_data.checkpoint) - else (* header.shell.level < level *) + else + (* header.shell.level < level *) (* valid only if the current head is lower than the checkpoint. *) - let head_level = - chain_data.data.current_head.header.shell.level in + let head_level = chain_data.data.current_head.header.shell.level in Lwt.return (head_level < checkpoint_level) (* Is a block still valid for a given checkpoint ? *) - let is_valid_for_checkpoint - block_store hash (header : Block_header.t) (checkpoint : Block_header.t) = + let is_valid_for_checkpoint block_store hash (header : Block_header.t) + (checkpoint : Block_header.t) = if Compare.Int32.(header.shell.level < checkpoint.shell.level) then Lwt.return_true else - predecessor_n block_store hash - (Int32.to_int @@ - Int32.sub header.shell.level checkpoint.shell.level) >|= - Option.unopt_assert ~loc:__POS__ >>= fun predecessor -> + predecessor_n + block_store + hash + (Int32.to_int @@ Int32.sub header.shell.level checkpoint.shell.level) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun predecessor -> if Block_hash.equal predecessor (Block_header.hash checkpoint) then Lwt.return_true - else - Lwt.return_false - + else Lwt.return_false end (* Find the branches that are still valid with a given checkpoint, i.e. heads with lower level, or branches that goes through the checkpoint. *) let locked_valid_heads_for_checkpoint block_store data checkpoint = - Store.Chain_data.Known_heads.read_all - data.chain_data_store >>= fun heads -> + Store.Chain_data.Known_heads.read_all data.chain_data_store + >>= fun heads -> Block_hash.Set.fold (fun head acc -> - let valid_header = - Header.read_opt - (block_store, head) >|= Option.unopt_assert ~loc:__POS__ >>= fun header -> - Locked_block.is_valid_for_checkpoint - block_store head header checkpoint >>= fun valid -> - Lwt.return (valid, header) in - acc >>= fun (valid_heads, invalid_heads) -> - valid_header >>= fun (valid, header) -> - if valid then - Lwt.return ((head, header) :: valid_heads, invalid_heads) - else - Lwt.return (valid_heads, (head, header) :: invalid_heads)) + let valid_header = + Header.read_opt (block_store, head) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun header -> + Locked_block.is_valid_for_checkpoint block_store head header checkpoint + >>= fun valid -> Lwt.return (valid, header) + in + acc + >>= fun (valid_heads, invalid_heads) -> + valid_header + >>= fun (valid, header) -> + if valid then Lwt.return ((head, header) :: valid_heads, invalid_heads) + else Lwt.return (valid_heads, (head, header) :: invalid_heads)) heads (Lwt.return ([], [])) @@ -391,211 +397,236 @@ let locked_valid_heads_for_checkpoint block_store data checkpoint = let tag_invalid_heads block_store chain_store heads level = let rec tag_invalid_head (hash, header) = if header.Block_header.shell.level <= level then - Store.Chain_data.Known_heads.store chain_store hash >>= fun () -> - Lwt.return_some (hash, header) + Store.Chain_data.Known_heads.store chain_store hash + >>= fun () -> Lwt.return_some (hash, header) else - let errors = [ Validation_errors.Checkpoint_error (hash, None) ] in - Store.Block.Invalid_block.store block_store hash - { level = header.shell.level ; errors } >>= fun () -> - Store.Block.Contents.remove (block_store, hash) >>= fun () -> - Store.Block.Operation_hashes.remove_all (block_store, hash) >>= fun () -> - Store.Block.Operations_metadata.remove_all (block_store, hash) >>= fun () -> - Store.Block.Operations.remove_all (block_store, hash) >>= fun () -> - Store.Block.Predecessors.remove_all (block_store, hash) >>= fun () -> - Header.read_opt - (block_store, header.shell.predecessor) >>= function + let errors = [Validation_errors.Checkpoint_error (hash, None)] in + Store.Block.Invalid_block.store + block_store + hash + {level = header.shell.level; errors} + >>= fun () -> + Store.Block.Contents.remove (block_store, hash) + >>= fun () -> + Store.Block.Operation_hashes.remove_all (block_store, hash) + >>= fun () -> + Store.Block.Operations_metadata.remove_all (block_store, hash) + >>= fun () -> + Store.Block.Operations.remove_all (block_store, hash) + >>= fun () -> + Store.Block.Predecessors.remove_all (block_store, hash) + >>= fun () -> + Header.read_opt (block_store, header.shell.predecessor) + >>= function | None -> Lwt.return_none | Some header -> - tag_invalid_head (Block_header.hash header, header) in + tag_invalid_head (Block_header.hash header, header) + in Lwt_list.iter_p (fun (hash, _header) -> - Store.Chain_data.Known_heads.remove chain_store hash) - heads >>= fun () -> - Lwt_list.filter_map_s tag_invalid_head heads + Store.Chain_data.Known_heads.remove chain_store hash) + heads + >>= fun () -> Lwt_list.filter_map_s tag_invalid_head heads let prune_block store block_hash = let st = (store, block_hash) in - Store.Block.Contents.remove st >>= fun () -> - Store.Block.Invalid_block.remove store block_hash >>= fun () -> - Store.Block.Operations_metadata.remove_all st + Store.Block.Contents.remove st + >>= fun () -> + Store.Block.Invalid_block.remove store block_hash + >>= fun () -> Store.Block.Operations_metadata.remove_all st let store_header_and_prune_block store block_hash = let st = (store, block_hash) in - Store.Block.Contents.read_opt st >>= begin function - | Some { header ; _ } -> - Store.Block.Pruned_contents.store st { header } - | None -> - Store.Block.Pruned_contents.known st >>= function - | true -> Lwt.return_unit - | false -> - State_logging.lwt_log_error Tag.DSL.(fun f -> - f "@[cannot find pruned contents of block %a@]" - -% t event "missing_pruned_contents" - -% a Block_hash.Logging.tag block_hash) - end >>= fun () -> - prune_block store block_hash + Store.Block.Contents.read_opt st + >>= (function + | Some {header; _} -> + Store.Block.Pruned_contents.store st {header} + | None -> ( + Store.Block.Pruned_contents.known st + >>= function + | true -> + Lwt.return_unit + | false -> + State_logging.lwt_log_error + Tag.DSL.( + fun f -> + f "@[cannot find pruned contents of block %a@]" + -% t event "missing_pruned_contents" + -% a Block_hash.Logging.tag block_hash) )) + >>= fun () -> prune_block store block_hash let delete_block store block_hash = - prune_block store block_hash >>= fun () -> + prune_block store block_hash + >>= fun () -> let st = (store, block_hash) in - Store.Block.Pruned_contents.remove st >>= fun () -> - Store.Block.Operations.remove_all st >>= fun () -> - Store.Block.Operation_hashes.remove_all st >>= fun () -> - Store.Block.Predecessors.remove_all st - + Store.Block.Pruned_contents.remove st + >>= fun () -> + Store.Block.Operations.remove_all st + >>= fun () -> + Store.Block.Operation_hashes.remove_all st + >>= fun () -> Store.Block.Predecessors.remove_all st (* Remove all blocks that are not in the chain. *) let cut_alternate_heads block_store chain_store heads = let rec cut_alternate_head hash header = - Store.Chain_data.In_main_branch.known (chain_store, hash) >>= fun in_chain -> - if in_chain then - Lwt.return_unit + Store.Chain_data.In_main_branch.known (chain_store, hash) + >>= fun in_chain -> + if in_chain then Lwt.return_unit else - Header.read_opt - (block_store, header.Block_header.shell.predecessor) >>= function + Header.read_opt (block_store, header.Block_header.shell.predecessor) + >>= function | None -> - delete_block block_store hash >>= fun () -> - Lwt.return_unit + delete_block block_store hash >>= fun () -> Lwt.return_unit | Some header -> - delete_block block_store hash >>= fun () -> - cut_alternate_head (Block_header.hash header) header in + delete_block block_store hash + >>= fun () -> cut_alternate_head (Block_header.hash header) header + in Lwt_list.iter_p (fun (hash, header) -> - Store.Chain_data.Known_heads.remove chain_store hash >>= fun () -> - cut_alternate_head hash header) + Store.Chain_data.Known_heads.remove chain_store hash + >>= fun () -> cut_alternate_head hash header) heads module Chain = struct - type nonrec genesis = genesis = { - time: Time.Protocol.t ; - block: Block_hash.t ; - protocol: Protocol_hash.t ; + time : Time.Protocol.t; + block : Block_hash.t; + protocol : Protocol_hash.t } + let genesis_encoding = let open Data_encoding in conv - (fun { time ; block ; protocol } -> (time, block, protocol)) - (fun (time, block, protocol) -> { time ; block ; protocol }) + (fun {time; block; protocol} -> (time, block, protocol)) + (fun (time, block, protocol) -> {time; block; protocol}) (obj3 (req "timestamp" Time.Protocol.encoding) (req "block" Block_hash.encoding) (req "protocol" Protocol_hash.encoding)) type t = chain_state + type chain_state = t - let main { main_chain ; _ } = main_chain + let main {main_chain; _} = main_chain + let test chain_state = - read_chain_data chain_state begin fun _ chain_data -> - Lwt.return chain_data.test_chain - end + read_chain_data chain_state (fun _ chain_data -> + Lwt.return chain_data.test_chain) let get_level_indexed_protocol chain_state header = let chain_id = chain_state.chain_id in let protocol_level = header.Block_header.shell.proto_level in let global_state = chain_state.global_state in - Shared.use global_state.global_data begin fun global_data -> - let global_store = global_data.global_store in - let chain_store = Store.Chain.get global_store chain_id in - Store.Chain.Protocol_info.read_opt chain_store protocol_level >>= function - | None -> Pervasives.failwith "State.Chain.get_level_index_protocol" - | Some (p,_) -> Lwt.return p - end - - let update_level_indexed_protocol_store chain_state chain_id protocol_level protocol_hash block_header = + Shared.use global_state.global_data (fun global_data -> + let global_store = global_data.global_store in + let chain_store = Store.Chain.get global_store chain_id in + Store.Chain.Protocol_info.read_opt chain_store protocol_level + >>= function + | None -> + Pervasives.failwith "State.Chain.get_level_index_protocol" + | Some (p, _) -> + Lwt.return p) + + let update_level_indexed_protocol_store chain_state chain_id protocol_level + protocol_hash block_header = let global_state = chain_state.global_state in - Shared.use chain_state.block_store begin fun block_store -> - Header.read_opt (block_store, block_header.Block_header.shell.predecessor) >>= function - | None -> Lwt.return_none (* should not happen *) - | Some header -> Lwt.return_some header - end >>= function - | None -> Lwt.return_unit + Shared.use chain_state.block_store (fun block_store -> + Header.read_opt + (block_store, block_header.Block_header.shell.predecessor) + >>= function + | None -> + Lwt.return_none (* should not happen *) + | Some header -> + Lwt.return_some header) + >>= function + | None -> + Lwt.return_unit | Some pred_header -> if pred_header.shell.proto_level <> block_header.shell.proto_level then - Shared.use global_state.global_data begin fun global_data -> - let global_store = global_data.global_store in - let chain_store = Store.Chain.get global_store chain_id in - Store.Chain.Protocol_info.store chain_store protocol_level (protocol_hash, block_header.shell.level) - end + Shared.use global_state.global_data (fun global_data -> + let global_store = global_data.global_store in + let chain_store = Store.Chain.get global_store chain_id in + Store.Chain.Protocol_info.store + chain_store + protocol_level + (protocol_hash, block_header.shell.level)) else Lwt.return_unit - let allocate - ~genesis - ~faked_genesis_hash - ~save_point - ~caboose - ~expiration - ~allow_forked_chain - ~current_head - ~checkpoint - ~chain_id - global_state context_index chain_data_store block_store = - Header.read_opt - (block_store, current_head) >|= Option.unopt_assert ~loc:__POS__ >>= fun current_block_head -> - let rec chain_data = { - data = { - save_point ; - caboose ; - current_head = { - chain_state ; - hash = current_head ; - header = current_block_head ; - } ; - current_mempool = Mempool.empty ; - live_blocks = Block_hash.Set.singleton genesis.block ; - live_operations = Operation_hash.Set.empty ; - test_chain = None ; - } ; - checkpoint ; - chain_data_store ; - } - and chain_state = { - global_state ; - chain_id ; - chain_data = { Shared.data = chain_data ; lock = Lwt_mutex.create () } ; - genesis ; faked_genesis_hash ; - expiration ; - allow_forked_chain ; - block_store = Shared.create block_store ; - context_index = Shared.create context_index ; - block_watcher = Lwt_watcher.create_input () ; - block_rpc_directories = Protocol_hash.Table.create 7 ; - header_rpc_directories = Protocol_hash.Table.create 7 ; - } in + let allocate ~genesis ~faked_genesis_hash ~save_point ~caboose ~expiration + ~allow_forked_chain ~current_head ~checkpoint ~chain_id global_state + context_index chain_data_store block_store = + Header.read_opt (block_store, current_head) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun current_block_head -> + let rec chain_data = + { data = + { save_point; + caboose; + current_head = + {chain_state; hash = current_head; header = current_block_head}; + current_mempool = Mempool.empty; + live_blocks = Block_hash.Set.singleton genesis.block; + live_operations = Operation_hash.Set.empty; + test_chain = None }; + checkpoint; + chain_data_store } + and chain_state = + { global_state; + chain_id; + chain_data = {Shared.data = chain_data; lock = Lwt_mutex.create ()}; + genesis; + faked_genesis_hash; + expiration; + allow_forked_chain; + block_store = Shared.create block_store; + context_index = Shared.create context_index; + block_watcher = Lwt_watcher.create_input (); + block_rpc_directories = Protocol_hash.Table.create 7; + header_rpc_directories = Protocol_hash.Table.create 7 } + in Lwt.return chain_state - let locked_create - global_state data ?expiration ?(allow_forked_chain = false) + let locked_create global_state data ?expiration ?(allow_forked_chain = false) chain_id genesis (genesis_header : Block_header.t) = let chain_store = Store.Chain.get data.global_store chain_id in let block_store = Store.Block.get chain_store and chain_data_store = Store.Chain_data.get chain_store in - let save_point = genesis_header.shell.level, genesis.block in - let caboose = genesis_header.shell.level, genesis.block in + let save_point = (genesis_header.shell.level, genesis.block) in + let caboose = (genesis_header.shell.level, genesis.block) in let proto_level = genesis_header.shell.proto_level in - Store.Chain.Genesis_hash.store chain_store genesis.block >>= fun () -> - Store.Chain.Genesis_time.store chain_store genesis.time >>= fun () -> - Store.Chain.Genesis_protocol.store chain_store genesis.protocol >>= fun () -> - Store.Chain_data.Current_head.store chain_data_store genesis.block >>= fun () -> - Store.Chain_data.Known_heads.store chain_data_store genesis.block >>= fun () -> - Store.Chain_data.Checkpoint.store chain_data_store genesis_header >>= fun () -> - Store.Chain_data.Save_point.store chain_data_store save_point >>= fun () -> - Store.Chain_data.Caboose.store chain_data_store caboose >>= fun () -> - Store.Chain.Protocol_info.store chain_store proto_level - (genesis.protocol, genesis_header.shell.level) >>= fun () -> - begin - match expiration with - | None -> Lwt.return_unit - | Some time -> Store.Chain.Expiration.store chain_store time - end >>= fun () -> - begin - if allow_forked_chain then - Store.Chain.Allow_forked_chain.store data.global_store chain_id - else + Store.Chain.Genesis_hash.store chain_store genesis.block + >>= fun () -> + Store.Chain.Genesis_time.store chain_store genesis.time + >>= fun () -> + Store.Chain.Genesis_protocol.store chain_store genesis.protocol + >>= fun () -> + Store.Chain_data.Current_head.store chain_data_store genesis.block + >>= fun () -> + Store.Chain_data.Known_heads.store chain_data_store genesis.block + >>= fun () -> + Store.Chain_data.Checkpoint.store chain_data_store genesis_header + >>= fun () -> + Store.Chain_data.Save_point.store chain_data_store save_point + >>= fun () -> + Store.Chain_data.Caboose.store chain_data_store caboose + >>= fun () -> + Store.Chain.Protocol_info.store + chain_store + proto_level + (genesis.protocol, genesis_header.shell.level) + >>= fun () -> + ( match expiration with + | None -> Lwt.return_unit - end >>= fun () -> + | Some time -> + Store.Chain.Expiration.store chain_store time ) + >>= fun () -> + ( if allow_forked_chain then + Store.Chain.Allow_forked_chain.store data.global_store chain_id + else Lwt.return_unit ) + >>= fun () -> allocate ~genesis ~faked_genesis_hash:(Block_header.hash genesis_header) @@ -609,50 +640,66 @@ module Chain = struct global_state data.context_index chain_data_store - block_store >>= fun chain -> + block_store + >>= fun chain -> Chain_id.Table.add data.chains chain_id chain ; Lwt.return chain - let create state ?allow_forked_chain genesis chain_id = - Shared.use state.global_data begin fun data -> - let chain_store = Store.Chain.get data.global_store chain_id in - let block_store = Store.Block.get chain_store in - if Chain_id.Table.mem data.chains chain_id then - Pervasives.failwith "State.Chain.create" - else - Context.commit_genesis - data.context_index - ~chain_id - ~time:genesis.time - ~protocol:genesis.protocol >>= fun commit -> - Locked_block.store_genesis - block_store genesis commit >>= fun genesis_header -> - locked_create - state data ?allow_forked_chain - chain_id genesis genesis_header >>= fun chain -> - (* in case this is a forked chain creation, + let create state ?allow_forked_chain genesis chain_id = + Shared.use state.global_data (fun data -> + let chain_store = Store.Chain.get data.global_store chain_id in + let block_store = Store.Block.get chain_store in + if Chain_id.Table.mem data.chains chain_id then + Pervasives.failwith "State.Chain.create" + else + Context.commit_genesis + data.context_index + ~chain_id + ~time:genesis.time + ~protocol:genesis.protocol + >>= fun commit -> + Locked_block.store_genesis block_store genesis commit + >>= fun genesis_header -> + locked_create + state + data + ?allow_forked_chain + chain_id + genesis + genesis_header + >>= fun chain -> + (* in case this is a forked chain creation, delete its header from the temporary table*) - Store.Forking_block_hash.remove data.global_store - (Context.compute_testchain_chain_id genesis.block) >>= fun () -> - Lwt.return chain - end + Store.Forking_block_hash.remove + data.global_store + (Context.compute_testchain_chain_id genesis.block) + >>= fun () -> Lwt.return chain) let locked_read global_state data chain_id = let chain_store = Store.Chain.get data.global_store chain_id in let block_store = Store.Block.get chain_store and chain_data_store = Store.Chain_data.get chain_store in - Store.Chain.Genesis_hash.read chain_store >>=? fun genesis_hash -> - Store.Chain.Genesis_time.read chain_store >>=? fun time -> - Store.Chain.Genesis_protocol.read chain_store >>=? fun protocol -> - Store.Chain.Expiration.read_opt chain_store >>= fun expiration -> - Store.Chain.Allow_forked_chain.known - data.global_store chain_id >>= fun allow_forked_chain -> - Header.read (block_store, genesis_hash) >>=? fun genesis_header -> - let genesis = { time ; protocol ; block = genesis_hash } in - Store.Chain_data.Current_head.read chain_data_store >>=? fun current_head -> - Store.Chain_data.Checkpoint.read chain_data_store >>=? fun checkpoint -> - Store.Chain_data.Save_point.read chain_data_store >>=? fun save_point -> - Store.Chain_data.Caboose.read chain_data_store >>=? fun caboose -> + Store.Chain.Genesis_hash.read chain_store + >>=? fun genesis_hash -> + Store.Chain.Genesis_time.read chain_store + >>=? fun time -> + Store.Chain.Genesis_protocol.read chain_store + >>=? fun protocol -> + Store.Chain.Expiration.read_opt chain_store + >>= fun expiration -> + Store.Chain.Allow_forked_chain.known data.global_store chain_id + >>= fun allow_forked_chain -> + Header.read (block_store, genesis_hash) + >>=? fun genesis_header -> + let genesis = {time; protocol; block = genesis_hash} in + Store.Chain_data.Current_head.read chain_data_store + >>=? fun current_head -> + Store.Chain_data.Checkpoint.read chain_data_store + >>=? fun checkpoint -> + Store.Chain_data.Save_point.read chain_data_store + >>=? fun save_point -> + Store.Chain_data.Caboose.read chain_data_store + >>=? fun caboose -> try allocate ~genesis @@ -667,514 +714,548 @@ module Chain = struct global_state data.context_index chain_data_store - block_store >>= return - with Not_found -> - fail Bad_data_dir + block_store + >>= return + with Not_found -> fail Bad_data_dir let locked_read_all global_state data = - Store.Chain.list data.global_store >>= fun ids -> + Store.Chain.list data.global_store + >>= fun ids -> iter_p (fun id -> - locked_read global_state data id >>=? fun chain -> - Chain_id.Table.add data.chains id chain ; - return_unit) + locked_read global_state data id + >>=? fun chain -> + Chain_id.Table.add data.chains id chain ; + return_unit) ids let read_all state = - Shared.use state.global_data begin fun data -> - locked_read_all state data - end + Shared.use state.global_data (fun data -> locked_read_all state data) let get_exn state id = - Shared.use state.global_data begin fun data -> - Lwt.return (Chain_id.Table.find data.chains id) - end + Shared.use state.global_data (fun data -> + Lwt.return (Chain_id.Table.find data.chains id)) let get state id = Lwt.catch (fun () -> get_exn state id >>= return) - (function - | Not_found -> fail (Unknown_chain id) - | exn -> Lwt.fail exn) + (function Not_found -> fail (Unknown_chain id) | exn -> Lwt.fail exn) let all state = - Shared.use state.global_data begin fun { chains ; _ } -> - Lwt.return @@ - Chain_id.Table.fold (fun _ chain acc -> chain :: acc) chains [] - end - - let id { chain_id ; _ } = chain_id - let genesis { genesis ; _ } = genesis - let faked_genesis_hash { faked_genesis_hash ; _ } = faked_genesis_hash - let expiration { expiration ; _ } = expiration - let allow_forked_chain { allow_forked_chain ; _ } = allow_forked_chain - let global_state { global_state ; _ } = global_state + Shared.use state.global_data (fun {chains; _} -> + Lwt.return + @@ Chain_id.Table.fold (fun _ chain acc -> chain :: acc) chains []) + + let id {chain_id; _} = chain_id + + let genesis {genesis; _} = genesis + + let faked_genesis_hash {faked_genesis_hash; _} = faked_genesis_hash + + let expiration {expiration; _} = expiration + + let allow_forked_chain {allow_forked_chain; _} = allow_forked_chain + + let global_state {global_state; _} = global_state + let checkpoint chain_state = - Shared.use chain_state.chain_data begin fun { checkpoint ; _ } -> - Lwt.return checkpoint - end + Shared.use chain_state.chain_data (fun {checkpoint; _} -> + Lwt.return checkpoint) + let save_point chain_state = - Shared.use chain_state.chain_data begin fun state -> - Lwt.return state.data.save_point - end + Shared.use chain_state.chain_data (fun state -> + Lwt.return state.data.save_point) + let caboose chain_state = - Shared.use chain_state.chain_data begin fun state -> - Lwt.return state.data.caboose - end - - let purge_loop_full - ?(chunk_size = 4000) - global_store store - ~genesis_hash block_hash - caboose_level = + Shared.use chain_state.chain_data (fun state -> + Lwt.return state.data.caboose) + + let purge_loop_full ?(chunk_size = 4000) global_store store ~genesis_hash + block_hash caboose_level = let do_prune blocks = - Store.with_atomic_rw global_store @@ fun () -> - Lwt_list.iter_s (store_header_and_prune_block store) blocks in + Store.with_atomic_rw global_store + @@ fun () -> Lwt_list.iter_s (store_header_and_prune_block store) blocks + in let rec loop block_hash (n_blocks, blocks) = - begin if n_blocks >= chunk_size then - do_prune blocks >>= fun () -> - Lwt.return (0, []) - else - Lwt.return (n_blocks, blocks) - end >>= fun (n_blocks, blocks) -> - Header.read_opt (store, block_hash) >|= - Option.unopt_assert ~loc:__POS__ >>= fun header -> - if Block_hash.equal block_hash genesis_hash then - do_prune blocks + ( if n_blocks >= chunk_size then + do_prune blocks >>= fun () -> Lwt.return (0, []) + else Lwt.return (n_blocks, blocks) ) + >>= fun (n_blocks, blocks) -> + Header.read_opt (store, block_hash) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun header -> + if Block_hash.equal block_hash genesis_hash then do_prune blocks else if header.shell.level = caboose_level then do_prune (block_hash :: blocks) - else - loop header.shell.predecessor (n_blocks + 1, block_hash :: blocks) in - Header.read_opt (store, block_hash) >|= - Option.unopt_assert ~loc:__POS__ >>= fun header -> - loop header.shell.predecessor (0, []) + else loop header.shell.predecessor (n_blocks + 1, block_hash :: blocks) + in + Header.read_opt (store, block_hash) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun header -> loop header.shell.predecessor (0, []) let purge_full chain_state (lvl, hash) = - Shared.use chain_state.global_state.global_data begin fun global_data -> - Shared.use chain_state.block_store begin fun store -> - update_chain_data chain_state begin fun _ data -> - purge_loop_full - global_data.global_store store - ~genesis_hash:chain_state.genesis.block hash - (fst data.save_point) >>= fun () -> - let new_data = { data with save_point = (lvl, hash) ; } in - Lwt.return (Some new_data, ()) - end >>= fun () -> - Shared.use chain_state.chain_data begin fun data -> - Store.Chain_data.Save_point.store data.chain_data_store (lvl, hash) >>= fun () -> - return_unit - end - end - end + Shared.use chain_state.global_state.global_data (fun global_data -> + Shared.use chain_state.block_store (fun store -> + update_chain_data chain_state (fun _ data -> + purge_loop_full + global_data.global_store + store + ~genesis_hash:chain_state.genesis.block + hash + (fst data.save_point) + >>= fun () -> + let new_data = {data with save_point = (lvl, hash)} in + Lwt.return (Some new_data, ())) + >>= fun () -> + Shared.use chain_state.chain_data (fun data -> + Store.Chain_data.Save_point.store + data.chain_data_store + (lvl, hash) + >>= fun () -> return_unit))) let purge_loop_rolling global_store store ~genesis_hash block_hash limit = let do_delete blocks = - Store.with_atomic_rw global_store @@ fun () -> - Lwt_list.iter_s (delete_block store) blocks in + Store.with_atomic_rw global_store + @@ fun () -> Lwt_list.iter_s (delete_block store) blocks + in let rec prune_loop block_hash limit = - if Block_hash.equal genesis_hash block_hash then - Lwt.return block_hash + if Block_hash.equal genesis_hash block_hash then Lwt.return block_hash else if limit = 1 then - Header.read_opt (store, block_hash) >>= function - | None -> assert false (* Should not happen. *) + Header.read_opt (store, block_hash) + >>= function + | None -> + assert false (* Should not happen. *) | Some header -> - begin - store_header_and_prune_block store block_hash >>= fun () -> - delete_loop header.shell.predecessor (0, []) >>= fun () -> - Lwt.return block_hash end + store_header_and_prune_block store block_hash + >>= fun () -> + delete_loop header.shell.predecessor (0, []) + >>= fun () -> Lwt.return block_hash else - Header.read_opt (store, block_hash) >>= function - | None -> assert false (* Should not happen. *) + Header.read_opt (store, block_hash) + >>= function + | None -> + assert false (* Should not happen. *) | Some header -> - store_header_and_prune_block store block_hash >>= fun () -> - prune_loop header.shell.predecessor (limit - 1) + store_header_and_prune_block store block_hash + >>= fun () -> prune_loop header.shell.predecessor (limit - 1) and delete_loop block_hash (n_blocks, blocks) = - begin if n_blocks >= 4000 then - do_delete blocks >>= fun () -> - Lwt.return (0, []) - else Lwt.return (n_blocks, blocks) - end >>= fun (n_blocks, blocks) -> - Header.read_opt (store, block_hash) >>= function - | None -> do_delete blocks + ( if n_blocks >= 4000 then + do_delete blocks >>= fun () -> Lwt.return (0, []) + else Lwt.return (n_blocks, blocks) ) + >>= fun (n_blocks, blocks) -> + Header.read_opt (store, block_hash) + >>= function + | None -> + do_delete blocks | Some header -> - if Block_hash.equal genesis_hash block_hash then - do_delete blocks + if Block_hash.equal genesis_hash block_hash then do_delete blocks else - delete_loop header.shell.predecessor + delete_loop + header.shell.predecessor (n_blocks + 1, block_hash :: blocks) in - Header.read_opt (store, block_hash) >|= - Option.unopt_assert ~loc:__POS__ >>= fun header -> + Header.read_opt (store, block_hash) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun header -> if limit = 0 then - delete_loop header.shell.predecessor (0, []) >>= fun () -> - Lwt.return block_hash - else - prune_loop header.shell.predecessor limit + delete_loop header.shell.predecessor (0, []) + >>= fun () -> Lwt.return block_hash + else prune_loop header.shell.predecessor limit let purge_rolling chain_state ((lvl, hash) as checkpoint) = - Shared.use chain_state.global_state.global_data begin fun global_data -> - Shared.use chain_state.block_store begin fun store -> - begin Store.Block.Contents.read_opt (store, hash) >>= function - | None -> fail (Block_contents_not_found hash) - | Some contents -> return contents end >>=? fun contents -> - let max_op_ttl = contents.max_operations_ttl in - let limit = max_op_ttl in - purge_loop_rolling ~genesis_hash:chain_state.genesis.block - global_data.global_store store hash limit >>= fun caboose_hash -> - let caboose_level = Int32.sub lvl (Int32.of_int max_op_ttl) in - let caboose = (caboose_level, caboose_hash) in - update_chain_data chain_state begin fun _ data -> - let new_data = { data with save_point = checkpoint ; caboose } in - Lwt.return (Some new_data, ()) - end >>= fun () -> - Shared.use chain_state.chain_data begin fun data -> - Store.Chain_data.Save_point.store data.chain_data_store checkpoint >>= fun () -> - Store.Chain_data.Caboose.store data.chain_data_store caboose >>= fun () -> - return_unit - end - - end - end + Shared.use chain_state.global_state.global_data (fun global_data -> + Shared.use chain_state.block_store (fun store -> + Store.Block.Contents.read_opt (store, hash) + >>= (function + | None -> + fail (Block_contents_not_found hash) + | Some contents -> + return contents) + >>=? fun contents -> + let max_op_ttl = contents.max_operations_ttl in + let limit = max_op_ttl in + purge_loop_rolling + ~genesis_hash:chain_state.genesis.block + global_data.global_store + store + hash + limit + >>= fun caboose_hash -> + let caboose_level = Int32.sub lvl (Int32.of_int max_op_ttl) in + let caboose = (caboose_level, caboose_hash) in + update_chain_data chain_state (fun _ data -> + let new_data = {data with save_point = checkpoint; caboose} in + Lwt.return (Some new_data, ())) + >>= fun () -> + Shared.use chain_state.chain_data (fun data -> + Store.Chain_data.Save_point.store + data.chain_data_store + checkpoint + >>= fun () -> + Store.Chain_data.Caboose.store data.chain_data_store caboose + >>= fun () -> return_unit))) let set_checkpoint chain_state checkpoint = - Shared.use chain_state.block_store begin fun store -> - Shared.use chain_state.chain_data begin fun data -> - let head_header = - data.data.current_head.header in - let head_hash = data.data.current_head.hash in - Locked_block.is_valid_for_checkpoint - store head_hash head_header checkpoint >>= fun valid -> - assert valid ; - (* Remove outdated invalid blocks. *) - Store.Block.Invalid_block.iter store ~f: begin fun hash iblock -> - if iblock.level <= checkpoint.shell.level then - Store.Block.Invalid_block.remove store hash - else - Lwt.return_unit - end >>= fun () -> - (* Remove outdated heads and tag invalid branches. *) - begin - locked_valid_heads_for_checkpoint - store data checkpoint >>= fun (valid_heads, invalid_heads) -> - tag_invalid_heads store data.chain_data_store - invalid_heads checkpoint.shell.level >>= fun outdated_invalid_heads -> - if head_header.shell.level < checkpoint.shell.level then - Lwt.return_unit - else - let outdated_valid_heads = - List.filter - (fun (hash, { Block_header.shell ; _ } ) -> - shell.level <= checkpoint.shell.level && - not (Block_hash.equal hash head_hash)) - valid_heads in - cut_alternate_heads store data.chain_data_store - outdated_valid_heads >>= fun () -> - cut_alternate_heads store data.chain_data_store - outdated_invalid_heads - end >>= fun () -> - (* Store the new checkpoint. *) - Store.Chain_data.Checkpoint.store - data.chain_data_store checkpoint >>= fun () -> - data.checkpoint <- checkpoint ; - (* TODO 'git fsck' in the context. *) - Lwt.return_unit - end - end + Shared.use chain_state.block_store (fun store -> + Shared.use chain_state.chain_data (fun data -> + let head_header = data.data.current_head.header in + let head_hash = data.data.current_head.hash in + Locked_block.is_valid_for_checkpoint + store + head_hash + head_header + checkpoint + >>= fun valid -> + assert valid ; + (* Remove outdated invalid blocks. *) + Store.Block.Invalid_block.iter store ~f:(fun hash iblock -> + if iblock.level <= checkpoint.shell.level then + Store.Block.Invalid_block.remove store hash + else Lwt.return_unit) + >>= fun () -> + (* Remove outdated heads and tag invalid branches. *) + locked_valid_heads_for_checkpoint store data checkpoint + >>= (fun (valid_heads, invalid_heads) -> + tag_invalid_heads + store + data.chain_data_store + invalid_heads + checkpoint.shell.level + >>= fun outdated_invalid_heads -> + if head_header.shell.level < checkpoint.shell.level then + Lwt.return_unit + else + let outdated_valid_heads = + List.filter + (fun (hash, {Block_header.shell; _}) -> + shell.level <= checkpoint.shell.level + && not (Block_hash.equal hash head_hash)) + valid_heads + in + cut_alternate_heads + store + data.chain_data_store + outdated_valid_heads + >>= fun () -> + cut_alternate_heads + store + data.chain_data_store + outdated_invalid_heads) + >>= fun () -> + (* Store the new checkpoint. *) + Store.Chain_data.Checkpoint.store data.chain_data_store checkpoint + >>= fun () -> + data.checkpoint <- checkpoint ; + (* TODO 'git fsck' in the context. *) + Lwt.return_unit)) let set_checkpoint_then_purge_full chain_state checkpoint = - set_checkpoint chain_state checkpoint >>= fun () -> + set_checkpoint chain_state checkpoint + >>= fun () -> let lvl = checkpoint.shell.level in let hash = Block_header.hash checkpoint in purge_full chain_state (lvl, hash) let set_checkpoint_then_purge_rolling chain_state checkpoint = - set_checkpoint chain_state checkpoint >>= fun () -> + set_checkpoint chain_state checkpoint + >>= fun () -> let lvl = checkpoint.shell.level in let hash = Block_header.hash checkpoint in purge_rolling chain_state (lvl, hash) let acceptable_block chain_state (header : Block_header.t) = - Shared.use chain_state.chain_data begin fun chain_data -> - Locked_block.acceptable chain_data header - end + Shared.use chain_state.chain_data (fun chain_data -> + Locked_block.acceptable chain_data header) let destroy state chain = - lwt_debug Tag.DSL.(fun f -> - f "destroy %a" - -% t event "destroy" - -% a chain_id (id chain)) >>= fun () -> - Shared.use state.global_data begin fun { global_store ; chains ; _ } -> - Chain_id.Table.remove chains (id chain) ; - Store.Chain.destroy global_store (id chain) - end + lwt_debug + Tag.DSL.( + fun f -> f "destroy %a" -% t event "destroy" -% a chain_id (id chain)) + >>= fun () -> + Shared.use state.global_data (fun {global_store; chains; _} -> + Chain_id.Table.remove chains (id chain) ; + Store.Chain.destroy global_store (id chain)) let store chain_state = - Shared.use chain_state.global_state.global_data - begin fun global_data -> - Lwt.return global_data.global_store - end - + Shared.use chain_state.global_state.global_data (fun global_data -> + Lwt.return global_data.global_store) end module Block = struct - type t = block = { - chain_state: Chain.t ; - hash: Block_hash.t ; - header: Block_header.t ; + chain_state : Chain.t; + hash : Block_hash.t; + header : Block_header.t } + type block = t type validation_store = { - context_hash: Context_hash.t; - message: string option; - max_operations_ttl: int; - last_allowed_fork_level: Int32.t; + context_hash : Context_hash.t; + message : string option; + max_operations_ttl : int; + last_allowed_fork_level : Int32.t } - module Header = Header let compare b1 b2 = Block_hash.compare b1.hash b2.hash + let equal b1 b2 = Block_hash.equal b1.hash b2.hash - let hash { hash ; _} = hash - let header { header ; _ } = header + let hash {hash; _} = hash + + let header {header; _} = header let read_contents block = - Shared.use block.chain_state.block_store begin fun store -> - Store.Block.Contents.read_opt (store, block.hash) >>= function - | None -> fail (Block_contents_not_found block.hash) - | Some contents -> return contents - end + Shared.use block.chain_state.block_store (fun store -> + Store.Block.Contents.read_opt (store, block.hash) + >>= function + | None -> + fail (Block_contents_not_found block.hash) + | Some contents -> + return contents) let header_of_hash chain_state hash = - Shared.use chain_state.block_store begin fun store -> - Header.read_opt (store, hash) - end + Shared.use chain_state.block_store (fun store -> + Header.read_opt (store, hash)) + + let metadata b = read_contents b >>=? fun {metadata; _} -> return metadata - let metadata b = - read_contents b >>=? fun { metadata ; _ } -> return metadata + let chain_state {chain_state; _} = chain_state + + let chain_id {chain_state = {chain_id; _}; _} = chain_id + + let shell_header {header = {shell; _}; _} = shell - let chain_state { chain_state ; _ } = chain_state - let chain_id { chain_state = { chain_id ; _ } ; _ } = chain_id - let shell_header { header = { shell ; _ } ; _ } = shell let timestamp b = (shell_header b).timestamp + let fitness b = (shell_header b).fitness + let level b = (shell_header b).level + let validation_passes b = (shell_header b).validation_passes - let message b = - read_contents b >>=? fun { message ; _ } -> return message + let message b = read_contents b >>=? fun {message; _} -> return message let max_operations_ttl b = - read_contents b >>=? fun { max_operations_ttl ; _ } -> return max_operations_ttl + read_contents b + >>=? fun {max_operations_ttl; _} -> return max_operations_ttl let last_allowed_fork_level b = - read_contents b >>=? fun { last_allowed_fork_level ; _ } -> return last_allowed_fork_level + read_contents b + >>=? fun {last_allowed_fork_level; _} -> return last_allowed_fork_level let is_genesis b = Block_hash.equal b.hash b.chain_state.genesis.block let known_valid chain_state hash = - Shared.use chain_state.block_store begin fun store -> - Header.known (store, hash) - end + Shared.use chain_state.block_store (fun store -> + Header.known (store, hash)) + let known_invalid chain_state hash = - Shared.use chain_state.block_store begin fun store -> - Store.Block.Invalid_block.known store hash - end + Shared.use chain_state.block_store (fun store -> + Store.Block.Invalid_block.known store hash) + let read_invalid chain_state hash = - Shared.use chain_state.block_store begin fun store -> - Store.Block.Invalid_block.read_opt store hash - end + Shared.use chain_state.block_store (fun store -> + Store.Block.Invalid_block.read_opt store hash) + let list_invalid chain_state = - Shared.use chain_state.block_store begin fun store -> - Store.Block.Invalid_block.fold store ~init:[] - ~f:(fun hash { level ; errors } acc -> - Lwt.return ((hash, level, errors) :: acc)) - end + Shared.use chain_state.block_store (fun store -> + Store.Block.Invalid_block.fold + store + ~init:[] + ~f:(fun hash {level; errors} acc -> + Lwt.return ((hash, level, errors) :: acc))) + let unmark_invalid chain_state block = - Shared.use chain_state.block_store begin fun store -> - Store.Block.Invalid_block.known store block >>= fun mem -> - if mem then - Store.Block.Invalid_block.remove store block >>= return - else - fail (Block_not_invalid block) - end + Shared.use chain_state.block_store (fun store -> + Store.Block.Invalid_block.known store block + >>= fun mem -> + if mem then Store.Block.Invalid_block.remove store block >>= return + else fail (Block_not_invalid block)) let is_valid_for_checkpoint block checkpoint = let chain_state = block.chain_state in - Shared.use chain_state.block_store begin fun store -> - Locked_block.is_valid_for_checkpoint - store block.hash block.header checkpoint - end + Shared.use chain_state.block_store (fun store -> + Locked_block.is_valid_for_checkpoint + store + block.hash + block.header + checkpoint) let read_predecessor chain_state ~pred ?(below_save_point = false) hash = - Shared.use chain_state.block_store begin fun store -> - predecessor_n ~below_save_point store hash pred >>= fun hash_opt -> - let new_hash_opt = - match hash_opt with - | Some _ as hash_opt -> hash_opt + Shared.use chain_state.block_store (fun store -> + predecessor_n ~below_save_point store hash pred + >>= fun hash_opt -> + let new_hash_opt = + match hash_opt with + | Some _ as hash_opt -> + hash_opt + | None -> + if Block_hash.equal hash chain_state.genesis.block then + Some chain_state.genesis.block + else None + in + match new_hash_opt with | None -> - if Block_hash.equal hash chain_state.genesis.block then - Some chain_state.genesis.block - else - None - in - match new_hash_opt with - | None -> Lwt.fail Not_found - | Some hash -> - Header.read_opt (store, hash) >>= fun header -> - begin match header with + Lwt.fail Not_found + | Some hash -> ( + Header.read_opt (store, hash) + >>= fun header -> + match header with | Some header -> - Lwt.return_some { chain_state ; hash ; header } + Lwt.return_some {chain_state; hash; header} | None -> - Lwt.return_none - end - end + Lwt.return_none )) let read chain_state hash = - - Shared.use chain_state.block_store begin fun store -> - Header.read (store, hash) >>=? fun header -> - return { chain_state ; hash ; header } - end + Shared.use chain_state.block_store (fun store -> + Header.read (store, hash) + >>=? fun header -> return {chain_state; hash; header}) let read_opt chain_state hash = - read chain_state hash >>= function - | Error _ -> Lwt.return_none - | Ok v -> Lwt.return_some v + read chain_state hash + >>= function Error _ -> Lwt.return_none | Ok v -> Lwt.return_some v - let predecessor { chain_state ; header ; hash ; _ } = - if Block_hash.equal hash header.shell.predecessor then - Lwt.return_none (* we are at genesis *) - else - read_opt chain_state header.shell.predecessor + let predecessor {chain_state; header; hash; _} = + if Block_hash.equal hash header.shell.predecessor then Lwt.return_none + (* we are at genesis *) + else read_opt chain_state header.shell.predecessor let predecessor_n b n = - Shared.use b.chain_state.block_store begin fun block_store -> - predecessor_n block_store b.hash n - end - - let store - ?(dont_enforce_context_hash = false) - chain_state block_header block_header_metadata - operations operations_metadata - { context_hash ; message ; max_operations_ttl ; last_allowed_fork_level } - ~forking_testchain - = + Shared.use b.chain_state.block_store (fun block_store -> + predecessor_n block_store b.hash n) + + let store ?(dont_enforce_context_hash = false) chain_state block_header + block_header_metadata operations operations_metadata + {context_hash; message; max_operations_ttl; last_allowed_fork_level} + ~forking_testchain = let bytes = Block_header.to_bytes block_header in let hash = Block_header.hash_raw bytes in fail_unless (block_header.shell.validation_passes = List.length operations) - (failure "State.Block.store: invalid operations length") >>=? fun () -> + (failure "State.Block.store: invalid operations length") + >>=? fun () -> fail_unless (block_header.shell.validation_passes = List.length operations_metadata) - (failure "State.Block.store: invalid operations_data length") >>=? fun () -> + (failure "State.Block.store: invalid operations_data length") + >>=? fun () -> fail_unless (List.for_all2 (fun l1 l2 -> List.length l1 = List.length l2) - operations operations_metadata) - (failure "State.Block.store: inconsistent operations and operations_data") >>=? fun () -> + operations + operations_metadata) + (failure "State.Block.store: inconsistent operations and operations_data") + >>=? fun () -> (* let's the validator check the consistency... of fitness, level, ... *) - Shared.use chain_state.block_store begin fun store -> - Store.Block.Invalid_block.known store hash >>= fun known_invalid -> - fail_when known_invalid (failure "Known invalid") >>=? fun () -> - Store.Block.Contents.known (store, hash) >>= fun known -> - if known then - return_none - else begin - (* safety check: never ever commit a block that is not compatible + Shared.use chain_state.block_store (fun store -> + Store.Block.Invalid_block.known store hash + >>= fun known_invalid -> + fail_when known_invalid (failure "Known invalid") + >>=? fun () -> + Store.Block.Contents.known (store, hash) + >>= fun known -> + if known then return_none + else + (* safety check: never ever commit a block that is not compatible with the current checkpoint. *) - begin - let predecessor = block_header.shell.predecessor in - Header.known - (store, predecessor) >>= fun valid_predecessor -> - if not valid_predecessor then - Lwt.return_false - else - Shared.use chain_state.chain_data begin fun chain_data -> - Locked_block.acceptable chain_data block_header - end - end >>= fun acceptable_block -> - fail_unless - acceptable_block - (Checkpoint_error (hash, None)) >>=? fun () -> - let commit = context_hash in - Context.exists chain_state.context_index.data commit - >>= fun exists -> - fail_unless exists - (failure "State.Block.store: context hash not found in context") - >>=? fun _ -> - fail_unless - (dont_enforce_context_hash - || Context_hash.equal block_header.shell.context commit) - (Inconsistent_hash (commit, block_header.shell.context)) >>=? fun () -> - let header = - if dont_enforce_context_hash then - { block_header - with shell = { block_header.shell with context = commit } } - else - block_header - in - let contents = { - header ; - Store.Block.message ; - max_operations_ttl ; - last_allowed_fork_level ; - context = commit ; - metadata = block_header_metadata ; - } in - Store.Block.Contents.store (store, hash) contents >>= fun () -> - Lwt_list.iteri_p (fun i ops -> - Store.Block.Operation_hashes.store - (store,hash) i (List.map Operation.hash ops)) - operations >>= fun () -> - Lwt_list.iteri_p - (fun i ops -> - Store.Block.Operations.store (store, hash) i ops) - operations >>= fun () -> - Lwt_list.iteri_p - (fun i ops -> - Store.Block.Operations_metadata.store (store, hash) i ops) - operations_metadata >>= fun () -> - (* Store predecessors *) - store_predecessors store hash >>= fun () -> - (* Update the chain state. *) - Shared.use chain_state.chain_data begin fun chain_data -> - let store = chain_data.chain_data_store in - let predecessor = block_header.shell.predecessor in - Store.Chain_data.Known_heads.remove store predecessor >>= fun () -> - Store.Chain_data.Known_heads.store store hash - end >>= fun () -> - begin if forking_testchain then - Shared.use chain_state.global_state.global_data begin fun global_data -> - let genesis = Context.compute_testchain_genesis hash in - Store.Forking_block_hash.store global_data.global_store - (Context.compute_testchain_chain_id genesis) hash end - else - Lwt.return_unit end >>= fun () -> - let block = { chain_state ; hash ; header } in - Lwt_watcher.notify chain_state.block_watcher block ; - Lwt_watcher.notify chain_state.global_state.block_watcher block ; - return_some block - end - end + (let predecessor = block_header.shell.predecessor in + Header.known (store, predecessor) + >>= fun valid_predecessor -> + if not valid_predecessor then Lwt.return_false + else + Shared.use chain_state.chain_data (fun chain_data -> + Locked_block.acceptable chain_data block_header)) + >>= fun acceptable_block -> + fail_unless acceptable_block (Checkpoint_error (hash, None)) + >>=? fun () -> + let commit = context_hash in + Context.exists chain_state.context_index.data commit + >>= fun exists -> + fail_unless + exists + (failure "State.Block.store: context hash not found in context") + >>=? fun _ -> + fail_unless + ( dont_enforce_context_hash + || Context_hash.equal block_header.shell.context commit ) + (Inconsistent_hash (commit, block_header.shell.context)) + >>=? fun () -> + let header = + if dont_enforce_context_hash then + { block_header with + shell = {block_header.shell with context = commit} } + else block_header + in + let contents = + { header; + Store.Block.message; + max_operations_ttl; + last_allowed_fork_level; + context = commit; + metadata = block_header_metadata } + in + Store.Block.Contents.store (store, hash) contents + >>= fun () -> + Lwt_list.iteri_p + (fun i ops -> + Store.Block.Operation_hashes.store + (store, hash) + i + (List.map Operation.hash ops)) + operations + >>= fun () -> + Lwt_list.iteri_p + (fun i ops -> Store.Block.Operations.store (store, hash) i ops) + operations + >>= fun () -> + Lwt_list.iteri_p + (fun i ops -> + Store.Block.Operations_metadata.store (store, hash) i ops) + operations_metadata + >>= fun () -> + (* Store predecessors *) + store_predecessors store hash + >>= fun () -> + (* Update the chain state. *) + Shared.use chain_state.chain_data (fun chain_data -> + let store = chain_data.chain_data_store in + let predecessor = block_header.shell.predecessor in + Store.Chain_data.Known_heads.remove store predecessor + >>= fun () -> Store.Chain_data.Known_heads.store store hash) + >>= fun () -> + ( if forking_testchain then + Shared.use chain_state.global_state.global_data (fun global_data -> + let genesis = Context.compute_testchain_genesis hash in + Store.Forking_block_hash.store + global_data.global_store + (Context.compute_testchain_chain_id genesis) + hash) + else Lwt.return_unit ) + >>= fun () -> + let block = {chain_state; hash; header} in + Lwt_watcher.notify chain_state.block_watcher block ; + Lwt_watcher.notify chain_state.global_state.block_watcher block ; + return_some block) let store_invalid chain_state block_header errors = let bytes = Block_header.to_bytes block_header in let hash = Block_header.hash_raw bytes in - Shared.use chain_state.block_store begin fun store -> - Header.known (store, hash) >>= fun known_valid -> - fail_when known_valid (failure "Known valid") >>=? fun () -> - Store.Block.Invalid_block.known store hash >>= fun known_invalid -> - if known_invalid then - return_false - else - Store.Block.Invalid_block.store store hash - { level = block_header.shell.level ; errors } >>= fun () -> - return_true - end + Shared.use chain_state.block_store (fun store -> + Header.known (store, hash) + >>= fun known_valid -> + fail_when known_valid (failure "Known valid") + >>=? fun () -> + Store.Block.Invalid_block.known store hash + >>= fun known_invalid -> + if known_invalid then return_false + else + Store.Block.Invalid_block.store + store + hash + {level = block_header.shell.level; errors} + >>= fun () -> return_true) let watcher (state : chain_state) = Lwt_watcher.create_stream state.block_watcher @@ -1183,478 +1264,517 @@ module Block = struct let list_hashes = List.map Operation_list_hash.compute hashes in Operation_list_list_hash.compute_path list_hashes - let operation_hashes { chain_state ; hash ; header } i = + let operation_hashes {chain_state; hash; header} i = if i < 0 || header.shell.validation_passes <= i then invalid_arg "State.Block.operations" ; - Shared.use chain_state.block_store begin fun store -> - Lwt_list.map_p - (fun n -> - Store.Block.Operation_hashes.read_opt (store, hash) n >|= - Option.unopt_assert ~loc:__POS__ - ) - (0 -- (header.shell.validation_passes - 1)) >>= fun hashes -> - let path = compute_operation_path hashes in - Lwt.return (List.nth hashes i , path i) - end - - let all_operation_hashes { chain_state ; hash ; header ; _ } = - Shared.use chain_state.block_store begin fun store -> - Lwt_list.map_p - (fun i -> Store.Block.Operation_hashes.read_opt (store, hash) i >|= Option.unopt_assert ~loc:__POS__) - (0 -- (header.shell.validation_passes - 1)) - end - - let operations { chain_state ; hash ; header ; _ } i = + Shared.use chain_state.block_store (fun store -> + Lwt_list.map_p + (fun n -> + Store.Block.Operation_hashes.read_opt (store, hash) n + >|= Option.unopt_assert ~loc:__POS__) + (0 -- (header.shell.validation_passes - 1)) + >>= fun hashes -> + let path = compute_operation_path hashes in + Lwt.return (List.nth hashes i, path i)) + + let all_operation_hashes {chain_state; hash; header; _} = + Shared.use chain_state.block_store (fun store -> + Lwt_list.map_p + (fun i -> + Store.Block.Operation_hashes.read_opt (store, hash) i + >|= Option.unopt_assert ~loc:__POS__) + (0 -- (header.shell.validation_passes - 1))) + + let operations {chain_state; hash; header; _} i = if i < 0 || header.shell.validation_passes <= i then invalid_arg "State.Block.operations" ; - Shared.use chain_state.block_store begin fun store -> - Lwt_list.map_p - (fun n -> - Store.Block.Operation_hashes.read_opt (store, hash) n >|= - Option.unopt_assert ~loc:__POS__) - (0 -- (header.shell.validation_passes - 1)) >>= fun hashes -> - let path = compute_operation_path hashes in - Store.Block.Operations.read_opt (store, hash) i >|= Option.unopt_assert ~loc:__POS__ >>= fun ops -> - Lwt.return (ops, path i) - end - - let operations_metadata { chain_state ; hash ; header ; _ } i = + Shared.use chain_state.block_store (fun store -> + Lwt_list.map_p + (fun n -> + Store.Block.Operation_hashes.read_opt (store, hash) n + >|= Option.unopt_assert ~loc:__POS__) + (0 -- (header.shell.validation_passes - 1)) + >>= fun hashes -> + let path = compute_operation_path hashes in + Store.Block.Operations.read_opt (store, hash) i + >|= Option.unopt_assert ~loc:__POS__ + >>= fun ops -> Lwt.return (ops, path i)) + + let operations_metadata {chain_state; hash; header; _} i = if i < 0 || header.shell.validation_passes <= i then invalid_arg "State.Block.operations_metadata" ; - Shared.use chain_state.block_store begin fun store -> - Store.Block.Operations_metadata.read_opt (store, hash) i >|= Option.unopt_assert ~loc:__POS__ - end - - let all_operations { chain_state ; hash ; header ; _ } = - Shared.use chain_state.block_store begin fun store -> - Lwt_list.map_p - (fun i -> Store.Block.Operations.read_opt (store, hash) i >|= Option.unopt_assert ~loc:__POS__) - (0 -- (header.shell.validation_passes - 1)) - end - - let all_operations_metadata { chain_state ; hash ; header ; _ } = - Shared.use chain_state.block_store begin fun store -> - Lwt_list.map_p - (fun i -> Store.Block.Operations_metadata.read_opt (store, hash) i >|= Option.unopt_assert ~loc:__POS__) - (0 -- (header.shell.validation_passes - 1)) - end - - let context { chain_state ; hash ; _ } = - Shared.use chain_state.block_store begin fun block_store -> - Store.Block.Contents.read_opt (block_store, hash) - end >|= Option.unopt_assert ~loc:__POS__ >>= fun { context = commit ; _ } -> - Shared.use chain_state.context_index begin fun context_index -> - Context.checkout_exn context_index commit - end + Shared.use chain_state.block_store (fun store -> + Store.Block.Operations_metadata.read_opt (store, hash) i + >|= Option.unopt_assert ~loc:__POS__) + + let all_operations {chain_state; hash; header; _} = + Shared.use chain_state.block_store (fun store -> + Lwt_list.map_p + (fun i -> + Store.Block.Operations.read_opt (store, hash) i + >|= Option.unopt_assert ~loc:__POS__) + (0 -- (header.shell.validation_passes - 1))) + + let all_operations_metadata {chain_state; hash; header; _} = + Shared.use chain_state.block_store (fun store -> + Lwt_list.map_p + (fun i -> + Store.Block.Operations_metadata.read_opt (store, hash) i + >|= Option.unopt_assert ~loc:__POS__) + (0 -- (header.shell.validation_passes - 1))) + + let context {chain_state; hash; _} = + Shared.use chain_state.block_store (fun block_store -> + Store.Block.Contents.read_opt (block_store, hash)) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun {context = commit; _} -> + Shared.use chain_state.context_index (fun context_index -> + Context.checkout_exn context_index commit) let protocol_hash block = - context block >>= fun context -> - Context.get_protocol context + context block >>= fun context -> Context.get_protocol context - let protocol_level block = - block.header.shell.proto_level + let protocol_level block = block.header.shell.proto_level let test_chain block = - context block >>= fun context -> - Context.get_test_chain context >>= fun status -> + context block + >>= fun context -> + Context.get_test_chain context + >>= fun status -> let lookup_testchain genesis = let chain_id = Context.compute_testchain_chain_id genesis in (* otherwise, look in the temporary table *) - Shared.use block.chain_state.global_state.global_data begin fun global_data -> - Store.Forking_block_hash.read_opt global_data.global_store chain_id - end >>= function + Shared.use block.chain_state.global_state.global_data (fun global_data -> + Store.Forking_block_hash.read_opt global_data.global_store chain_id) + >>= function | Some forking_block_hash -> - read_opt block.chain_state forking_block_hash >>= fun forking_block -> - Lwt.return (status, forking_block) + read_opt block.chain_state forking_block_hash + >>= fun forking_block -> Lwt.return (status, forking_block) | None -> Lwt.return (status, None) in match status with - | Running { genesis ; _ } -> lookup_testchain genesis - | Forking _ -> Lwt.return (status, Some block) - | Not_running -> Lwt.return (status, None) + | Running {genesis; _} -> + lookup_testchain genesis + | Forking _ -> + Lwt.return (status, Some block) + | Not_running -> + Lwt.return (status, None) let known chain_state hash = - Shared.use chain_state.block_store begin fun store -> - Header.known (store, hash) >>= fun known -> - if known then - Lwt.return_true - else - Store.Block.Invalid_block.known store hash - end + Shared.use chain_state.block_store (fun store -> + Header.known (store, hash) + >>= fun known -> + if known then Lwt.return_true + else Store.Block.Invalid_block.known store hash) let block_validity chain_state block : Block_locator.validity Lwt.t = - known chain_state block >>= function + known chain_state block + >>= function | false -> if Block_hash.equal block (Chain.faked_genesis_hash chain_state) then Lwt.return Block_locator.Known_valid - else - Lwt.return Block_locator.Unknown - | true -> - known_invalid chain_state block >>= function + else Lwt.return Block_locator.Unknown + | true -> ( + known_invalid chain_state block + >>= function | true -> Lwt.return Block_locator.Known_invalid | false -> - Lwt.return Block_locator.Known_valid + Lwt.return Block_locator.Known_valid ) let known_ancestor chain_state locator = - Shared.use chain_state.global_state.global_data begin fun { global_store ; _ } -> - begin - Store.Configuration.History_mode.read_opt global_store >|= - Option.unopt_assert ~loc:__POS__ - end - end >>= fun history_mode -> - Block_locator.unknown_prefix - ~is_known:(block_validity chain_state) locator >>= function - | (Known_valid, prefix_locator) -> Lwt.return_some prefix_locator - | (Known_invalid, _) -> Lwt.return_none - | (Unknown, _) -> - begin match history_mode with - | Archive -> Lwt.return_none - | Rolling | Full -> Lwt.return_some locator - end + Shared.use chain_state.global_state.global_data (fun {global_store; _} -> + Store.Configuration.History_mode.read_opt global_store + >|= Option.unopt_assert ~loc:__POS__) + >>= fun history_mode -> + Block_locator.unknown_prefix ~is_known:(block_validity chain_state) locator + >>= function + | (Known_valid, prefix_locator) -> + Lwt.return_some prefix_locator + | (Known_invalid, _) -> + Lwt.return_none + | (Unknown, _) -> ( + match history_mode with + | Archive -> + Lwt.return_none + | Rolling | Full -> + Lwt.return_some locator ) (* Hypothesis : genesis' predecessor is itself. *) - let get_rpc_directory ({ chain_state ; _ } as block) = - read_opt chain_state block.header.shell.predecessor >>= function - | None -> Lwt.return_none (* assert false *) - | Some pred when equal pred block -> Lwt.return_none (* genesis *) - | Some pred -> - Chain.save_point chain_state >>= fun (save_point_level, _) -> - begin - if Compare.Int32.(level pred < save_point_level) then - Chain.get_level_indexed_protocol chain_state pred.header - else protocol_hash pred - end >>= fun protocol -> + let get_rpc_directory ({chain_state; _} as block) = + read_opt chain_state block.header.shell.predecessor + >>= function + | None -> + Lwt.return_none (* assert false *) + | Some pred when equal pred block -> + Lwt.return_none (* genesis *) + | Some pred -> ( + Chain.save_point chain_state + >>= fun (save_point_level, _) -> + ( if Compare.Int32.(level pred < save_point_level) then + Chain.get_level_indexed_protocol chain_state pred.header + else protocol_hash pred ) + >>= fun protocol -> match Protocol_hash.Table.find_opt - chain_state.block_rpc_directories protocol + chain_state.block_rpc_directories + protocol with - | None -> Lwt.return_none + | None -> + Lwt.return_none | Some map -> - protocol_hash block >>= fun next_protocol -> - Lwt.return (Protocol_hash.Map.find_opt next_protocol map) - - let set_rpc_directory ({ chain_state ; _ } as block) dir = - read_opt chain_state block.header.shell.predecessor >|= - Option.unopt_assert ~loc:__POS__ >>= fun pred -> - protocol_hash block >>= fun next_protocol -> - Chain.save_point chain_state >>= fun (save_point_level, _) -> - begin - if Compare.Int32.(level pred < save_point_level) then - Chain.get_level_indexed_protocol chain_state (header pred) - else protocol_hash pred - end >>= fun protocol -> + protocol_hash block + >>= fun next_protocol -> + Lwt.return (Protocol_hash.Map.find_opt next_protocol map) ) + + let set_rpc_directory ({chain_state; _} as block) dir = + read_opt chain_state block.header.shell.predecessor + >|= Option.unopt_assert ~loc:__POS__ + >>= fun pred -> + protocol_hash block + >>= fun next_protocol -> + Chain.save_point chain_state + >>= fun (save_point_level, _) -> + ( if Compare.Int32.(level pred < save_point_level) then + Chain.get_level_indexed_protocol chain_state (header pred) + else protocol_hash pred ) + >>= fun protocol -> let map = - Option.unopt ~default:Protocol_hash.Map.empty - (Protocol_hash.Table.find_opt chain_state.block_rpc_directories protocol) + Option.unopt + ~default:Protocol_hash.Map.empty + (Protocol_hash.Table.find_opt + chain_state.block_rpc_directories + protocol) in Protocol_hash.Table.replace - chain_state.block_rpc_directories protocol + chain_state.block_rpc_directories + protocol (Protocol_hash.Map.add next_protocol dir map) ; Lwt.return_unit let get_header_rpc_directory chain_state header = - Shared.use chain_state.block_store begin fun block_store -> - Header.read_opt - (block_store, header.Block_header.shell.predecessor) >>= function - | None -> Lwt.return_none (* caboose *) - | Some pred when Block_header.equal pred header -> Lwt.return_none (* genesis *) - | Some pred -> - Chain.get_level_indexed_protocol chain_state header >>= fun protocol -> - match - Protocol_hash.Table.find_opt - chain_state.header_rpc_directories protocol - with - | None -> Lwt.return_none - | Some map -> - Chain.get_level_indexed_protocol chain_state pred >>= fun next_protocol -> - Lwt.return (Protocol_hash.Map.find_opt next_protocol map) - end + Shared.use chain_state.block_store (fun block_store -> + Header.read_opt (block_store, header.Block_header.shell.predecessor) + >>= function + | None -> + Lwt.return_none (* caboose *) + | Some pred when Block_header.equal pred header -> + Lwt.return_none (* genesis *) + | Some pred -> ( + Chain.get_level_indexed_protocol chain_state header + >>= fun protocol -> + match + Protocol_hash.Table.find_opt + chain_state.header_rpc_directories + protocol + with + | None -> + Lwt.return_none + | Some map -> + Chain.get_level_indexed_protocol chain_state pred + >>= fun next_protocol -> + Lwt.return (Protocol_hash.Map.find_opt next_protocol map) )) let set_header_rpc_directory chain_state header dir = - Shared.use chain_state.block_store begin fun block_store -> - Header.read_opt - (block_store, header.Block_header.shell.predecessor) >>= function - | None -> assert false - | Some pred -> - Chain.get_level_indexed_protocol chain_state header >>= fun next_protocol -> - Chain.get_level_indexed_protocol chain_state pred >>= fun protocol -> - let map = - Option.unopt ~default:Protocol_hash.Map.empty - (Protocol_hash.Table.find_opt chain_state.header_rpc_directories protocol) - in - Protocol_hash.Table.replace - chain_state.header_rpc_directories protocol - (Protocol_hash.Map.add next_protocol dir map) ; - Lwt.return_unit - end + Shared.use chain_state.block_store (fun block_store -> + Header.read_opt (block_store, header.Block_header.shell.predecessor) + >>= function + | None -> + assert false + | Some pred -> + Chain.get_level_indexed_protocol chain_state header + >>= fun next_protocol -> + Chain.get_level_indexed_protocol chain_state pred + >>= fun protocol -> + let map = + Option.unopt + ~default:Protocol_hash.Map.empty + (Protocol_hash.Table.find_opt + chain_state.header_rpc_directories + protocol) + in + Protocol_hash.Table.replace + chain_state.header_rpc_directories + protocol + (Protocol_hash.Map.add next_protocol dir map) ; + Lwt.return_unit) end let watcher (state : global_state) = Lwt_watcher.create_stream state.block_watcher -let read_block { global_data ; _ } hash = - Shared.use global_data begin fun { chains ; _ } -> - Chain_id.Table.fold - (fun _chain_id chain_state acc -> - acc >>= function - | Some _ -> acc - | None -> - Block.read_opt chain_state hash >>= function - | None -> acc - | Some block -> Lwt.return_some block) - chains - Lwt.return_none - end +let read_block {global_data; _} hash = + Shared.use global_data (fun {chains; _} -> + Chain_id.Table.fold + (fun _chain_id chain_state acc -> + acc + >>= function + | Some _ -> + acc + | None -> ( + Block.read_opt chain_state hash + >>= function None -> acc | Some block -> Lwt.return_some block )) + chains + Lwt.return_none) let read_block_exn t hash = - read_block t hash >>= function - | None -> Lwt.fail Not_found - | Some b -> Lwt.return b + read_block t hash + >>= function None -> Lwt.fail Not_found | Some b -> Lwt.return b let update_testchain block ~testchain_state = - update_chain_data block.chain_state begin fun _ chain_data -> - Lwt.return (Some { chain_data with test_chain = Some testchain_state.chain_id }, ()) - end >>= fun () -> - Lwt.return_unit - -let fork_testchain block chain_id genesis_hash genesis_header protocol expiration = - Shared.use block.chain_state.global_state.global_data begin fun data -> - let chain_store = Store.Chain.get data.global_store chain_id in - let block_store = Store.Block.get chain_store in - Store.Block.Contents.store (block_store, genesis_hash) - { header = genesis_header ; - Store.Block.message = Some "Genesis" ; - max_operations_ttl = 0 ; context = genesis_header.shell.context ; - metadata = MBytes.create 0 ; - last_allowed_fork_level = 0l ; - } >>= fun () -> - let genesis = - { block = genesis_hash ; - time = genesis_header.shell.timestamp ; - protocol } in - Chain.locked_create block.chain_state.global_state data - chain_id ~expiration genesis genesis_header >>= fun testchain_state -> - Store.Chain.Protocol_info.store - chain_store genesis_header.shell.proto_level (protocol, genesis_header.shell.level) >>= fun () -> - update_testchain block ~testchain_state >>= fun () -> - return testchain_state - end + update_chain_data block.chain_state (fun _ chain_data -> + Lwt.return + (Some {chain_data with test_chain = Some testchain_state.chain_id}, ())) + >>= fun () -> Lwt.return_unit + +let fork_testchain block chain_id genesis_hash genesis_header protocol + expiration = + Shared.use block.chain_state.global_state.global_data (fun data -> + let chain_store = Store.Chain.get data.global_store chain_id in + let block_store = Store.Block.get chain_store in + Store.Block.Contents.store + (block_store, genesis_hash) + { header = genesis_header; + Store.Block.message = Some "Genesis"; + max_operations_ttl = 0; + context = genesis_header.shell.context; + metadata = MBytes.create 0; + last_allowed_fork_level = 0l } + >>= fun () -> + let genesis = + {block = genesis_hash; time = genesis_header.shell.timestamp; protocol} + in + Chain.locked_create + block.chain_state.global_state + data + chain_id + ~expiration + genesis + genesis_header + >>= fun testchain_state -> + Store.Chain.Protocol_info.store + chain_store + genesis_header.shell.proto_level + (protocol, genesis_header.shell.level) + >>= fun () -> + update_testchain block ~testchain_state + >>= fun () -> return testchain_state) let best_known_head_for_checkpoint chain_state checkpoint = - Shared.use chain_state.block_store begin fun store -> - Shared.use chain_state.chain_data begin fun data -> - let head_hash = data.data.current_head.hash in - let head_header = data.data.current_head.header in - Locked_block.is_valid_for_checkpoint - store head_hash head_header checkpoint >>= fun valid -> - if valid then - Lwt.return data.data.current_head - else - let find_valid_predecessor hash = - Header.read_opt - (store, hash) >|= Option.unopt_assert ~loc:__POS__ >>= fun header -> - if Compare.Int32.(header.shell.level < checkpoint.shell.level) then - Lwt.return { hash ; chain_state ; header } + Shared.use chain_state.block_store (fun store -> + Shared.use chain_state.chain_data (fun data -> + let head_hash = data.data.current_head.hash in + let head_header = data.data.current_head.header in + Locked_block.is_valid_for_checkpoint + store + head_hash + head_header + checkpoint + >>= fun valid -> + if valid then Lwt.return data.data.current_head else - predecessor_n store hash - (1 + (Int32.to_int @@ - Int32.sub header.shell.level checkpoint.shell.level)) >|= Option.unopt_assert ~loc:__POS__ >>= fun pred -> - Header.read_opt - (store, pred) >|= Option.unopt_assert ~loc:__POS__ >>= fun pred_header -> - Lwt.return { hash = pred ; chain_state ; header = pred_header } in - Store.Chain_data.Known_heads.read_all - data.chain_data_store >>= fun heads -> - Header.read_opt - (store, chain_state.genesis.block) >|= Option.unopt_assert ~loc:__POS__ >>= fun genesis_header -> - let genesis = - { hash = chain_state.genesis.block ; - chain_state ; header = genesis_header } in - Block_hash.Set.fold - (fun head best -> - let valid_predecessor = find_valid_predecessor head in - best >>= fun best -> - valid_predecessor >>= fun pred -> - if Fitness.(pred.header.shell.fitness > - best.header.shell.fitness) then - Lwt.return pred - else - Lwt.return best) - heads - (Lwt.return genesis) - end - end + let find_valid_predecessor hash = + Header.read_opt (store, hash) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun header -> + if Compare.Int32.(header.shell.level < checkpoint.shell.level) + then Lwt.return {hash; chain_state; header} + else + predecessor_n + store + hash + ( 1 + + ( Int32.to_int + @@ Int32.sub header.shell.level checkpoint.shell.level ) ) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun pred -> + Header.read_opt (store, pred) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun pred_header -> + Lwt.return {hash = pred; chain_state; header = pred_header} + in + Store.Chain_data.Known_heads.read_all data.chain_data_store + >>= fun heads -> + Header.read_opt (store, chain_state.genesis.block) + >|= Option.unopt_assert ~loc:__POS__ + >>= fun genesis_header -> + let genesis = + { hash = chain_state.genesis.block; + chain_state; + header = genesis_header } + in + Block_hash.Set.fold + (fun head best -> + let valid_predecessor = find_valid_predecessor head in + best + >>= fun best -> + valid_predecessor + >>= fun pred -> + if + Fitness.( + pred.header.shell.fitness > best.header.shell.fitness) + then Lwt.return pred + else Lwt.return best) + heads + (Lwt.return genesis))) module Protocol = struct - include Protocol let known global_state hash = - Shared.use global_state.protocol_store begin fun store -> - Store.Protocol.Contents.known store hash - end + Shared.use global_state.protocol_store (fun store -> + Store.Protocol.Contents.known store hash) let read global_state hash = - Shared.use global_state.protocol_store begin fun store -> - Store.Protocol.Contents.read store hash - end + Shared.use global_state.protocol_store (fun store -> + Store.Protocol.Contents.read store hash) + let read_opt global_state hash = - Shared.use global_state.protocol_store begin fun store -> - Store.Protocol.Contents.read_opt store hash - end + Shared.use global_state.protocol_store (fun store -> + Store.Protocol.Contents.read_opt store hash) let read_raw global_state hash = - Shared.use global_state.protocol_store begin fun store -> - Store.Protocol.RawContents.read (store, hash) - end + Shared.use global_state.protocol_store (fun store -> + Store.Protocol.RawContents.read (store, hash)) + let read_raw_opt global_state hash = - Shared.use global_state.protocol_store begin fun store -> - Store.Protocol.RawContents.read_opt (store, hash) - end + Shared.use global_state.protocol_store (fun store -> + Store.Protocol.RawContents.read_opt (store, hash)) let store global_state p = let bytes = Protocol.to_bytes p in let hash = Protocol.hash_raw bytes in - Shared.use global_state.protocol_store begin fun store -> - Store.Protocol.Contents.known store hash >>= fun known -> - if known then - Lwt.return_none - else - Store.Protocol.RawContents.store (store, hash) bytes >>= fun () -> - Lwt_watcher.notify global_state.protocol_watcher hash ; - Lwt.return_some hash - end + Shared.use global_state.protocol_store (fun store -> + Store.Protocol.Contents.known store hash + >>= fun known -> + if known then Lwt.return_none + else + Store.Protocol.RawContents.store (store, hash) bytes + >>= fun () -> + Lwt_watcher.notify global_state.protocol_watcher hash ; + Lwt.return_some hash) let remove global_state hash = - Shared.use global_state.protocol_store begin fun store -> - Store.Protocol.Contents.known store hash >>= fun known -> - if known then - Lwt.return_false - else - Store.Protocol.Contents.remove store hash >>= fun () -> - Lwt.return_true - end + Shared.use global_state.protocol_store (fun store -> + Store.Protocol.Contents.known store hash + >>= fun known -> + if known then Lwt.return_false + else + Store.Protocol.Contents.remove store hash + >>= fun () -> Lwt.return_true) let list global_state = - Shared.use global_state.protocol_store begin fun store -> - Store.Protocol.Contents.fold_keys store - ~init:Protocol_hash.Set.empty - ~f:(fun x acc -> Lwt.return (Protocol_hash.Set.add x acc)) - end + Shared.use global_state.protocol_store (fun store -> + Store.Protocol.Contents.fold_keys + store + ~init:Protocol_hash.Set.empty + ~f:(fun x acc -> Lwt.return (Protocol_hash.Set.add x acc))) let watcher (state : global_state) = Lwt_watcher.create_stream state.protocol_watcher - end module Current_mempool = struct - let set chain_state ~head mempool = - update_chain_data chain_state begin fun _chain_data_store data -> - if Block_hash.equal head (Block.hash data.current_head) then - Lwt.return (Some { data with current_mempool = mempool }, ()) - else - Lwt.return (None, ()) - end + update_chain_data chain_state (fun _chain_data_store data -> + if Block_hash.equal head (Block.hash data.current_head) then + Lwt.return (Some {data with current_mempool = mempool}, ()) + else Lwt.return (None, ())) let get chain_state = - read_chain_data chain_state begin fun _chain_data_store data -> - Lwt.return (Block.header data.current_head, data.current_mempool) - end - + read_chain_data chain_state (fun _chain_data_store data -> + Lwt.return (Block.header data.current_head, data.current_mempool)) end let may_create_chain state chain_id genesis = - Chain.get state chain_id >>= function - | Ok chain -> Lwt.return chain + Chain.get state chain_id + >>= function + | Ok chain -> + Lwt.return chain | Error _ -> - Chain.create - ~allow_forked_chain:true - state genesis chain_id - -let read - global_store - context_index - main_chain = - let global_data = { - chains = Chain_id.Table.create 17 ; - global_store ; - context_index ; - } in - let state = { - global_data = Shared.create global_data ; - protocol_store = Shared.create @@ Store.Protocol.get global_store ; - main_chain ; - protocol_watcher = Lwt_watcher.create_input () ; - block_watcher = Lwt_watcher.create_input () ; - } in - Chain.read_all state >>=? fun () -> - return state - -type error += Incorrect_history_mode_switch of - { previous_mode: History_mode.t ; next_mode: History_mode.t } + Chain.create ~allow_forked_chain:true state genesis chain_id + +let read global_store context_index main_chain = + let global_data = + {chains = Chain_id.Table.create 17; global_store; context_index} + in + let state = + { global_data = Shared.create global_data; + protocol_store = Shared.create @@ Store.Protocol.get global_store; + main_chain; + protocol_watcher = Lwt_watcher.create_input (); + block_watcher = Lwt_watcher.create_input () } + in + Chain.read_all state >>=? fun () -> return state + +type error += + | Incorrect_history_mode_switch of + { previous_mode : History_mode.t; + next_mode : History_mode.t } let () = - register_error_kind `Permanent + register_error_kind + `Permanent ~id:"node_config_file.incorrect_history_mode_switch" ~title:"Incorrect history mode switch" ~description:"Incorrect history mode switch." ~pp:(fun ppf (prev, next) -> - Format.fprintf ppf - "@[cannot switch from history mode %a mode to %a mode@]" - History_mode.pp prev History_mode.pp next - ) + Format.fprintf + ppf + "@[cannot switch from history mode %a mode to %a mode@]" + History_mode.pp + prev + History_mode.pp + next) (Data_encoding.obj2 (Data_encoding.req "previous_mode" History_mode.encoding) (Data_encoding.req "next_mode" History_mode.encoding)) (function - | Incorrect_history_mode_switch x -> Some (x.previous_mode, x.next_mode) - | _ -> None) + | Incorrect_history_mode_switch x -> + Some (x.previous_mode, x.next_mode) + | _ -> + None) (fun (previous_mode, next_mode) -> - Incorrect_history_mode_switch { previous_mode ; next_mode }) - -let init - ?patch_context - ?(store_mapsize=40_960_000_000L) - ?(context_mapsize=409_600_000_000L) - ~store_root - ~context_root - ?history_mode - genesis = - Store.init ~mapsize:store_mapsize store_root >>=? fun global_store -> - Context.init - ~mapsize:context_mapsize ?patch_context - context_root >>= fun context_index -> + Incorrect_history_mode_switch {previous_mode; next_mode}) + +let init ?patch_context ?(store_mapsize = 40_960_000_000L) + ?(context_mapsize = 409_600_000_000L) ~store_root ~context_root + ?history_mode genesis = + Store.init ~mapsize:store_mapsize store_root + >>=? fun global_store -> + Context.init ~mapsize:context_mapsize ?patch_context context_root + >>= fun context_index -> let chain_id = Chain_id.of_block_hash genesis.Chain.block in - read global_store context_index chain_id >>=? fun state -> - may_create_chain state chain_id genesis >>= fun main_chain_state -> - Store.Configuration.History_mode.read_opt global_store >>= begin function - | None -> - let mode = Option.unopt ~default:History_mode.Full history_mode in - Store.Configuration.History_mode.store global_store mode >>= fun () -> - return mode - | Some previous_history_mode -> - match history_mode with - | None -> return previous_history_mode - | Some history_mode -> - if history_mode <> previous_history_mode then - fail (Incorrect_history_mode_switch - { previous_mode = previous_history_mode ; - next_mode = history_mode }) - else - return history_mode - end >>=? fun history_mode -> + read global_store context_index chain_id + >>=? fun state -> + may_create_chain state chain_id genesis + >>= fun main_chain_state -> + Store.Configuration.History_mode.read_opt global_store + >>= (function + | None -> + let mode = Option.unopt ~default:History_mode.Full history_mode in + Store.Configuration.History_mode.store global_store mode + >>= fun () -> return mode + | Some previous_history_mode -> ( + match history_mode with + | None -> + return previous_history_mode + | Some history_mode -> + if history_mode <> previous_history_mode then + fail + (Incorrect_history_mode_switch + { previous_mode = previous_history_mode; + next_mode = history_mode }) + else return history_mode )) + >>=? fun history_mode -> return (state, main_chain_state, context_index, history_mode) -let history_mode { global_data ; _ } = - Shared.use global_data begin fun { global_store ; _ } -> - Store.Configuration.History_mode.read_opt global_store >|= - Option.unopt_assert ~loc:__POS__ - end +let history_mode {global_data; _} = + Shared.use global_data (fun {global_store; _} -> + Store.Configuration.History_mode.read_opt global_store + >|= Option.unopt_assert ~loc:__POS__) -let close { global_data ; _ } = - Shared.use global_data begin fun { global_store ; _ } -> - Store.close global_store ; - Lwt.return_unit - end +let close {global_data; _} = + Shared.use global_data (fun {global_store; _} -> + Store.close global_store ; Lwt.return_unit) diff --git a/src/lib_shell/state.mli b/src/lib_shell/state.mli index 79abcd40741cc6d96b2c5de5d82c87972219aae0..3e63ae302fab4f2ea15764acfa597b24a6248f9d 100644 --- a/src/lib_shell/state.mli +++ b/src/lib_shell/state.mli @@ -33,6 +33,7 @@ - the pool of pending operations of a chain. *) type t + type global_state = t (** {2 Network} *) @@ -40,22 +41,23 @@ type global_state = t (** Data specific to a given chain (e.g the main chain or the current test chain). *) module Chain : sig - type t + type chain_state = t (** The chain starts from a genesis block associated to a seed protocol *) type genesis = { - time: Time.Protocol.t ; - block: Block_hash.t ; - protocol: Protocol_hash.t ; + time : Time.Protocol.t; + block : Block_hash.t; + protocol : Protocol_hash.t } - val genesis_encoding: genesis Data_encoding.t + + val genesis_encoding : genesis Data_encoding.t (** Initialize a chain for a given [genesis]. By default, the chain does accept forking test chain. When [~allow_forked_chain:true] is provided, test chain are allowed. *) - val create: + val create : global_state -> ?allow_forked_chain:bool -> genesis -> @@ -63,38 +65,44 @@ module Chain : sig chain_state Lwt.t (** Look up for a chain by the hash of its genesis block. *) - val get: global_state -> Chain_id.t -> chain_state tzresult Lwt.t - val get_exn: global_state -> Chain_id.t -> chain_state Lwt.t + val get : global_state -> Chain_id.t -> chain_state tzresult Lwt.t + + val get_exn : global_state -> Chain_id.t -> chain_state Lwt.t - val main: global_state -> Chain_id.t - val test: chain_state -> Chain_id.t option Lwt.t + val main : global_state -> Chain_id.t + + val test : chain_state -> Chain_id.t option Lwt.t (** Returns all the known chains. *) - val all: global_state -> chain_state list Lwt.t + val all : global_state -> chain_state list Lwt.t (** Destroy a chain: this completly removes from the local storage all the data associated to the chain (this includes blocks and operations). *) - val destroy: global_state -> chain_state -> unit Lwt.t + val destroy : global_state -> chain_state -> unit Lwt.t (** Various accessors. *) - val id: chain_state -> Chain_id.t - val genesis: chain_state -> genesis - val global_state: chain_state -> global_state + val id : chain_state -> Chain_id.t + + val genesis : chain_state -> genesis + + val global_state : chain_state -> global_state (** Hash of the faked block header of the genesis block. *) - val faked_genesis_hash: chain_state -> Block_hash.t + val faked_genesis_hash : chain_state -> Block_hash.t (** Return the expiration timestamp of a test chain. *) - val expiration: chain_state -> Time.Protocol.t option - val allow_forked_chain: chain_state -> bool + val expiration : chain_state -> Time.Protocol.t option + + val allow_forked_chain : chain_state -> bool + + val checkpoint : chain_state -> Block_header.t Lwt.t - val checkpoint: chain_state -> Block_header.t Lwt.t + val save_point : chain_state -> (Int32.t * Block_hash.t) Lwt.t - val save_point: chain_state -> (Int32.t * Block_hash.t) Lwt.t - val caboose: chain_state -> (Int32.t * Block_hash.t) Lwt.t + val caboose : chain_state -> (Int32.t * Block_hash.t) Lwt.t - val store: chain_state -> Store.t Lwt.t + val store : chain_state -> Store.t Lwt.t (** Update the current checkpoint. The current head should be consistent (i.e. it should either have a lower level or pass @@ -102,48 +110,53 @@ module Chain : sig invalid alternate heads are removed from the disk, either completely (when `level <= checkpoint`) or still tagged as invalid (when `level > checkpoint`). *) - val set_checkpoint: - chain_state -> Block_header.t -> unit Lwt.t + val set_checkpoint : chain_state -> Block_header.t -> unit Lwt.t (** Apply [set_checkpoint] then [purge_full] (see {!History_mode.t}). *) - val set_checkpoint_then_purge_full: chain_state -> Block_header.t -> - unit tzresult Lwt.t + val set_checkpoint_then_purge_full : + chain_state -> Block_header.t -> unit tzresult Lwt.t (** Apply [set_checkpoint] then [purge_rolling] (see {!History_mode.t}). *) - val set_checkpoint_then_purge_rolling: chain_state -> Block_header.t -> - unit tzresult Lwt.t + val set_checkpoint_then_purge_rolling : + chain_state -> Block_header.t -> unit tzresult Lwt.t (** Check that a block is compatible with the current checkpoint. This function assumes that the predecessor is known valid. *) - val acceptable_block: chain_state -> Block_header.t -> bool Lwt.t + val acceptable_block : chain_state -> Block_header.t -> bool Lwt.t (** Get the level indexed chain protocol store for the given header. *) - val get_level_indexed_protocol: chain_state -> Block_header.t -> Protocol_hash.t Lwt.t + val get_level_indexed_protocol : + chain_state -> Block_header.t -> Protocol_hash.t Lwt.t (** Update the level indexed chain protocol store so that the block can easily access its corresponding protocol hash from the protocol level in its header. Also stores the transition block level. *) - val update_level_indexed_protocol_store: chain_state -> Chain_id.t -> int -> - Protocol_hash.t -> Block_header.t -> unit Lwt.t - + val update_level_indexed_protocol_store : + chain_state -> + Chain_id.t -> + int -> + Protocol_hash.t -> + Block_header.t -> + unit Lwt.t end (** {2 Block database} *) type error += Block_not_found of Block_hash.t + type error += Block_contents_not_found of Block_hash.t module Block : sig - type t + type block = t type validation_store = { - context_hash: Context_hash.t ; - message: string option ; - max_operations_ttl: int ; - last_allowed_fork_level: Int32.t ; + context_hash : Context_hash.t; + message : string option; + max_operations_ttl : int; + last_allowed_fork_level : Int32.t } (** Abstract view over block header storage. @@ -152,20 +165,29 @@ module Block : sig module Header : sig val read : Store.Block.store * Block_hash.t -> Block_header.t tzresult Lwt.t + val read_opt : Store.Block.store * Block_hash.t -> Block_header.t option Lwt.t + val known : Store.Block.store * Block_hash.t -> bool Lwt.t end - val known: Chain.t -> Block_hash.t -> bool Lwt.t - val known_valid: Chain.t -> Block_hash.t -> bool Lwt.t - val known_invalid: Chain.t -> Block_hash.t -> bool Lwt.t - val read_invalid: Chain.t -> Block_hash.t -> Store.Block.invalid_block option Lwt.t - val list_invalid: Chain.t -> (Block_hash.t * int32 * error list) list Lwt.t - val unmark_invalid: Chain.t -> Block_hash.t -> unit tzresult Lwt.t + val known : Chain.t -> Block_hash.t -> bool Lwt.t + + val known_valid : Chain.t -> Block_hash.t -> bool Lwt.t - val read: Chain.t -> Block_hash.t -> t tzresult Lwt.t - val read_opt: Chain.t -> Block_hash.t -> t option Lwt.t + val known_invalid : Chain.t -> Block_hash.t -> bool Lwt.t + + val read_invalid : + Chain.t -> Block_hash.t -> Store.Block.invalid_block option Lwt.t + + val list_invalid : Chain.t -> (Block_hash.t * int32 * error list) list Lwt.t + + val unmark_invalid : Chain.t -> Block_hash.t -> unit tzresult Lwt.t + + val read : Chain.t -> Block_hash.t -> t tzresult Lwt.t + + val read_opt : Chain.t -> Block_hash.t -> t option Lwt.t (** Will return the full block if the block has never been cleaned (all blocks for nodes whose history-mode is set to archive), only @@ -173,68 +195,90 @@ module Block : sig rolling history-mode) or even `Pruned` for blocks below the rock bottom, only for nodes in rolling history-mode. Will fail with `Not_found` if the given hash is unknown. *) - val read_predecessor: Chain.t -> pred:int -> ?below_save_point:bool -> Block_hash.t -> t option Lwt.t + val read_predecessor : + Chain.t -> + pred:int -> + ?below_save_point:bool -> + Block_hash.t -> + t option Lwt.t - val store: + val store : ?dont_enforce_context_hash:bool -> Chain.t -> - Block_header.t -> MBytes.t -> - Operation.t list list -> MBytes.t list list -> + Block_header.t -> + MBytes.t -> + Operation.t list list -> + MBytes.t list list -> validation_store -> - forking_testchain: bool -> + forking_testchain:bool -> block option tzresult Lwt.t - val store_invalid: - Chain.t -> - Block_header.t -> - error list -> - bool tzresult Lwt.t - - val compare: t -> t -> int - val equal: t -> t -> bool - - val hash: t -> Block_hash.t - val header: t -> Block_header.t - val header_of_hash: Chain.t -> Block_hash.t -> Block_header.t option Lwt.t - val shell_header: t -> Block_header.shell_header - val timestamp: t -> Time.Protocol.t - val fitness: t -> Fitness.t - val validation_passes: t -> int - val chain_id: t -> Chain_id.t - val chain_state: t -> Chain.t - val level: t -> Int32.t - val message: t -> string option tzresult Lwt.t - val max_operations_ttl: t -> int tzresult Lwt.t - val metadata: t -> MBytes.t tzresult Lwt.t - val last_allowed_fork_level: t -> Int32.t tzresult Lwt.t - - val is_genesis: t -> bool - val predecessor: t -> t option Lwt.t - val predecessor_n: t -> int -> Block_hash.t option Lwt.t - - val is_valid_for_checkpoint: t -> Block_header.t -> bool Lwt.t - - val context: t -> Context.t Lwt.t - val protocol_hash: t -> Protocol_hash.t Lwt.t - val test_chain: t -> (Test_chain_status.t * t option) Lwt.t - - val protocol_level: t -> int - - - val operation_hashes: - t -> int -> - (Operation_hash.t list * Operation_list_list_hash.path) Lwt.t - val all_operation_hashes: t -> Operation_hash.t list list Lwt.t - - val operations: + val store_invalid : + Chain.t -> Block_header.t -> error list -> bool tzresult Lwt.t + + val compare : t -> t -> int + + val equal : t -> t -> bool + + val hash : t -> Block_hash.t + + val header : t -> Block_header.t + + val header_of_hash : Chain.t -> Block_hash.t -> Block_header.t option Lwt.t + + val shell_header : t -> Block_header.shell_header + + val timestamp : t -> Time.Protocol.t + + val fitness : t -> Fitness.t + + val validation_passes : t -> int + + val chain_id : t -> Chain_id.t + + val chain_state : t -> Chain.t + + val level : t -> Int32.t + + val message : t -> string option tzresult Lwt.t + + val max_operations_ttl : t -> int tzresult Lwt.t + + val metadata : t -> MBytes.t tzresult Lwt.t + + val last_allowed_fork_level : t -> Int32.t tzresult Lwt.t + + val is_genesis : t -> bool + + val predecessor : t -> t option Lwt.t + + val predecessor_n : t -> int -> Block_hash.t option Lwt.t + + val is_valid_for_checkpoint : t -> Block_header.t -> bool Lwt.t + + val context : t -> Context.t Lwt.t + + val protocol_hash : t -> Protocol_hash.t Lwt.t + + val test_chain : t -> (Test_chain_status.t * t option) Lwt.t + + val protocol_level : t -> int + + val operation_hashes : + t -> int -> (Operation_hash.t list * Operation_list_list_hash.path) Lwt.t + + val all_operation_hashes : t -> Operation_hash.t list list Lwt.t + + val operations : t -> int -> (Operation.t list * Operation_list_list_hash.path) Lwt.t - val all_operations: t -> Operation.t list list Lwt.t - val operations_metadata: - t -> int -> MBytes.t list Lwt.t - val all_operations_metadata: t -> MBytes.t list list Lwt.t + val all_operations : t -> Operation.t list list Lwt.t + + val operations_metadata : t -> int -> MBytes.t list Lwt.t - val watcher: Chain.t -> block Lwt_stream.t * Lwt_watcher.stopper + val all_operations_metadata : t -> MBytes.t list list Lwt.t + + val watcher : Chain.t -> block Lwt_stream.t * Lwt_watcher.stopper (** [known_ancestor chain_state locator] computes the unknown prefix in the [locator] according to [chain_state]. @@ -248,61 +292,67 @@ module Block : sig we find an invalid block or no valid block in the [locator]. - [None] when the node runs in full or rolling mode and we find an invalid block in the [locator]. *) - val known_ancestor: - Chain.t -> Block_locator.t -> - Block_locator.t option Lwt.t + val known_ancestor : + Chain.t -> Block_locator.t -> Block_locator.t option Lwt.t + + val get_rpc_directory : t -> t RPC_directory.t option Lwt.t - val get_rpc_directory: t -> t RPC_directory.t option Lwt.t - val set_rpc_directory: t -> t RPC_directory.t -> unit Lwt.t + val set_rpc_directory : t -> t RPC_directory.t -> unit Lwt.t - val get_header_rpc_directory: Chain.t -> Block_header.t -> + val get_header_rpc_directory : + Chain.t -> + Block_header.t -> (Chain.t * Block_hash.t * Block_header.t) RPC_directory.t option Lwt.t - val set_header_rpc_directory: Chain.t -> Block_header.t -> - (Chain.t * Block_hash.t * Block_header.t) RPC_directory.t -> unit Lwt.t + val set_header_rpc_directory : + Chain.t -> + Block_header.t -> + (Chain.t * Block_hash.t * Block_header.t) RPC_directory.t -> + unit Lwt.t end -val read_block: - global_state -> Block_hash.t -> Block.t option Lwt.t +val read_block : global_state -> Block_hash.t -> Block.t option Lwt.t -val read_block_exn: - global_state -> Block_hash.t -> Block.t Lwt.t +val read_block_exn : global_state -> Block_hash.t -> Block.t Lwt.t -val watcher: t -> Block.t Lwt_stream.t * Lwt_watcher.stopper +val watcher : t -> Block.t Lwt_stream.t * Lwt_watcher.stopper (** Computes the block with the best fitness amongst the known blocks which are compatible with the given checkpoint. *) -val best_known_head_for_checkpoint: - Chain.t -> Block_header.t -> Block.t Lwt.t - -val compute_locator: Chain.t -> ?size:int -> Block.t -> Block_locator.seed -> Block_locator.t Lwt.t +val best_known_head_for_checkpoint : Chain.t -> Block_header.t -> Block.t Lwt.t -val update_testchain: +val compute_locator : + Chain.t -> + ?size:int -> Block.t -> - testchain_state: Chain.t -> - unit Lwt.t + Block_locator.seed -> + Block_locator.t Lwt.t + +val update_testchain : Block.t -> testchain_state:Chain.t -> unit Lwt.t -val fork_testchain: +val fork_testchain : Block.t -> - Chain_id.t -> Block_hash.t -> Block_header.t -> - Protocol_hash.t -> Time.Protocol.t -> Chain.t tzresult Lwt.t + Chain_id.t -> + Block_hash.t -> + Block_header.t -> + Protocol_hash.t -> + Time.Protocol.t -> + Chain.t tzresult Lwt.t type chain_data = { - current_head: Block.t ; - current_mempool: Mempool.t ; - live_blocks: Block_hash.Set.t ; - live_operations: Operation_hash.Set.t ; - test_chain: Chain_id.t option ; - save_point: Int32.t * Block_hash.t ; - caboose: Int32.t * Block_hash.t ; + current_head : Block.t; + current_mempool : Mempool.t; + live_blocks : Block_hash.Set.t; + live_operations : Operation_hash.Set.t; + test_chain : Chain_id.t option; + save_point : Int32.t * Block_hash.t; + caboose : Int32.t * Block_hash.t } -val read_chain_data: - Chain.t -> - (Store.Chain_data.store -> chain_data -> 'a Lwt.t) -> - 'a Lwt.t +val read_chain_data : + Chain.t -> (Store.Chain_data.store -> chain_data -> 'a Lwt.t) -> 'a Lwt.t -val update_chain_data: +val update_chain_data : Chain.t -> (Store.Chain_data.store -> chain_data -> (chain_data option * 'a) Lwt.t) -> 'a Lwt.t @@ -310,50 +360,53 @@ val update_chain_data: (** {2 Protocol database} *) module Protocol : sig - - include (module type of (struct include Protocol end)) + include module type of struct + include Protocol + end (** Is a value stored in the local database ? *) - val known: global_state -> Protocol_hash.t -> bool Lwt.t + val known : global_state -> Protocol_hash.t -> bool Lwt.t (** Read a value in the local database. *) - val read: global_state -> Protocol_hash.t -> Protocol.t tzresult Lwt.t - val read_opt: global_state -> Protocol_hash.t -> Protocol.t option Lwt.t + val read : global_state -> Protocol_hash.t -> Protocol.t tzresult Lwt.t + + val read_opt : global_state -> Protocol_hash.t -> Protocol.t option Lwt.t (** Read a value in the local database (without parsing). *) - val read_raw: global_state -> Protocol_hash.t -> MBytes.t tzresult Lwt.t - val read_raw_opt: global_state -> Protocol_hash.t -> MBytes.t option Lwt.t + val read_raw : global_state -> Protocol_hash.t -> MBytes.t tzresult Lwt.t - val store: global_state -> Protocol.t -> Protocol_hash.t option Lwt.t + val read_raw_opt : global_state -> Protocol_hash.t -> MBytes.t option Lwt.t - (** Remove a value from the local database. *) - val remove: global_state -> Protocol_hash.t -> bool Lwt.t + val store : global_state -> Protocol.t -> Protocol_hash.t option Lwt.t - val list: global_state -> Protocol_hash.Set.t Lwt.t + (** Remove a value from the local database. *) + val remove : global_state -> Protocol_hash.t -> bool Lwt.t - val watcher: global_state -> Protocol_hash.t Lwt_stream.t * Lwt_watcher.stopper + val list : global_state -> Protocol_hash.Set.t Lwt.t + val watcher : + global_state -> Protocol_hash.t Lwt_stream.t * Lwt_watcher.stopper end module Current_mempool : sig - - val get: Chain.t -> (Block_header.t * Mempool.t) Lwt.t (** The current mempool. *) + val get : Chain.t -> (Block_header.t * Mempool.t) Lwt.t - val set: Chain.t -> head:Block_hash.t -> Mempool.t -> unit Lwt.t (** Set the current mempool. It is ignored if the current head is not the provided one. *) - + val set : Chain.t -> head:Block_hash.t -> Mempool.t -> unit Lwt.t end -type error += Incorrect_history_mode_switch of - { previous_mode: History_mode.t ; next_mode: History_mode.t } +type error += + | Incorrect_history_mode_switch of + { previous_mode : History_mode.t; + next_mode : History_mode.t } -val history_mode: global_state -> History_mode.t Lwt.t +val history_mode : global_state -> History_mode.t Lwt.t (** Read the internal state of the node and initialize the databases. *) -val init: +val init : ?patch_context:(Context.t -> Context.t Lwt.t) -> ?store_mapsize:int64 -> ?context_mapsize:int64 -> @@ -363,5 +416,4 @@ val init: Chain.genesis -> (global_state * Chain.t * Context.index * History_mode.t) tzresult Lwt.t -val close: - global_state -> unit Lwt.t +val close : global_state -> unit Lwt.t diff --git a/src/lib_shell/store.ml b/src/lib_shell/store.ml index f6f8a81c9c84232a14c40975e139cf6b62461575..13d167c248e6454f00976f18d35b16c6c8a9f82f 100644 --- a/src/lib_shell/store.ml +++ b/src/lib_shell/store.ml @@ -24,6 +24,7 @@ (*****************************************************************************) type t = Raw_store.t + type global_store = t (************************************************************************** @@ -31,12 +32,13 @@ type global_store = t **************************************************************************) module Configuration = struct - - module History_mode = Store_helpers.Make_single_store + module History_mode = + Store_helpers.Make_single_store (Raw_store) - (struct let name = ["history_mode"] end) - (Store_helpers.Make_value(History_mode)) - + (struct + let name = ["history_mode"] + end) + (Store_helpers.Make_value (History_mode)) end (************************************************************************** @@ -44,69 +46,90 @@ end **************************************************************************) module Chain = struct - type store = global_store * Chain_id.t + let get s id = (s, id) module Indexed_store = Store_helpers.Make_indexed_substore - (Store_helpers.Make_substore(Raw_store)(struct let name = ["chain"] end)) - (Chain_id) + (Store_helpers.Make_substore + (Raw_store) + (struct + let name = ["chain"] + end)) + (Chain_id) let destroy = Indexed_store.remove_all + let list t = - Indexed_store.fold_indexes t ~init:[] - ~f:(fun h acc -> Lwt.return (h :: acc)) + Indexed_store.fold_indexes t ~init:[] ~f:(fun h acc -> + Lwt.return (h :: acc)) module Genesis_hash = Store_helpers.Make_single_store (Indexed_store.Store) - (struct let name = ["genesis" ; "hash"] end) - (Store_helpers.Make_value(Block_hash)) + (struct + let name = ["genesis"; "hash"] + end) + (Store_helpers.Make_value (Block_hash)) module Genesis_time = Store_helpers.Make_single_store (Indexed_store.Store) - (struct let name = ["genesis" ; "time"] end) - (Store_helpers.Make_value(Time.Protocol)) + (struct + let name = ["genesis"; "time"] + end) + (Store_helpers.Make_value (Time.Protocol)) module Genesis_protocol = Store_helpers.Make_single_store (Indexed_store.Store) - (struct let name = ["genesis" ; "protocol"] end) - (Store_helpers.Make_value(Protocol_hash)) + (struct + let name = ["genesis"; "protocol"] + end) + (Store_helpers.Make_value (Protocol_hash)) module Genesis_test_protocol = Store_helpers.Make_single_store (Indexed_store.Store) - (struct let name = ["genesis" ; "test_protocol"] end) - (Store_helpers.Make_value(Protocol_hash)) + (struct + let name = ["genesis"; "test_protocol"] + end) + (Store_helpers.Make_value (Protocol_hash)) module Expiration = Store_helpers.Make_single_store (Indexed_store.Store) - (struct let name = ["expiration"] end) - (Store_helpers.Make_value(Time.Protocol)) + (struct + let name = ["expiration"] + end) + (Store_helpers.Make_value (Time.Protocol)) - module Allow_forked_chain = - Indexed_store.Make_set (struct let name = ["allow_forked_chain"] end) + module Allow_forked_chain = Indexed_store.Make_set (struct + let name = ["allow_forked_chain"] + end) module Protocol_index = Store_helpers.Make_indexed_substore (Store_helpers.Make_substore (Indexed_store.Store) - (struct let name = ["protocol"] end)) - (Store_helpers.Integer_index) + (struct + let name = ["protocol"] + end)) + (Store_helpers.Integer_index) module Protocol_info = Protocol_index.Make_map - (struct let name = ["info"] end) - (Store_helpers.Make_value(struct - type t = Protocol_hash.t * Int32.t - let encoding = - let open Data_encoding in - tup2 Protocol_hash.encoding int32 - end)) + (struct + let name = ["info"] + end) + (Store_helpers.Make_value (struct + type t = Protocol_hash.t * Int32.t + + let encoding = + let open Data_encoding in + tup2 Protocol_hash.encoding int32 + end)) end (************************************************************************** @@ -117,256 +140,303 @@ module Forking_block_hash = Store_helpers.Make_map (Store_helpers.Make_substore (Raw_store) - (struct let name = ["forking_block_hash"] end)) - (Chain_id) - (Store_helpers.Make_value(Block_hash)) + (struct + let name = ["forking_block_hash"] + end)) + (Chain_id) + (Store_helpers.Make_value (Block_hash)) (************************************************************************** * Block_header store under "chain/<id>/blocks/" **************************************************************************) module Block = struct - type store = Chain.store + let get x = x module Indexed_store = Store_helpers.Make_indexed_substore (Store_helpers.Make_substore (Chain.Indexed_store.Store) - (struct let name = ["blocks"] end)) - (Block_hash) + (struct + let name = ["blocks"] + end)) + (Block_hash) type contents = { - header : Block_header.t ; - message: string option ; - max_operations_ttl: int ; - last_allowed_fork_level: Int32.t ; - context: Context_hash.t ; - metadata: MBytes.t ; + header : Block_header.t; + message : string option; + max_operations_ttl : int; + last_allowed_fork_level : Int32.t; + context : Context_hash.t; + metadata : MBytes.t } module Contents = Store_helpers.Make_single_store (Indexed_store.Store) - (struct let name = ["contents"] end) - (Store_helpers.Make_value(struct - type t = contents - let encoding = - let open Data_encoding in - conv - (fun { header ; message ; max_operations_ttl ; - last_allowed_fork_level ; - context ; metadata } -> - (message, max_operations_ttl, last_allowed_fork_level, - context, metadata, header )) - (fun (message, max_operations_ttl, last_allowed_fork_level, - context, metadata, header ) -> - { header ; message ; max_operations_ttl ; - last_allowed_fork_level ; - context ; metadata }) - (obj6 - (opt "message" string) - (req "max_operations_ttl" uint16) - (req "last_allowed_fork_level" int32) - (req "context" Context_hash.encoding) - (req "metadata" bytes) - (req "header" Block_header.encoding)) - end)) - - type pruned_contents = { - header : Block_header.t ; - } + (struct + let name = ["contents"] + end) + (Store_helpers.Make_value (struct + type t = contents + + let encoding = + let open Data_encoding in + conv + (fun { header; + message; + max_operations_ttl; + last_allowed_fork_level; + context; + metadata } -> + ( message, + max_operations_ttl, + last_allowed_fork_level, + context, + metadata, + header )) + (fun ( message, + max_operations_ttl, + last_allowed_fork_level, + context, + metadata, + header ) -> + { header; + message; + max_operations_ttl; + last_allowed_fork_level; + context; + metadata }) + (obj6 + (opt "message" string) + (req "max_operations_ttl" uint16) + (req "last_allowed_fork_level" int32) + (req "context" Context_hash.encoding) + (req "metadata" bytes) + (req "header" Block_header.encoding)) + end)) + + type pruned_contents = {header : Block_header.t} module Pruned_contents = Store_helpers.Make_single_store (Indexed_store.Store) - (struct let name = ["pruned_contents"] end) - (Store_helpers.Make_value(struct - type t = pruned_contents - let encoding = - let open Data_encoding in - conv - (fun { header } -> header) - (fun header -> { header }) - (obj1 (req "header" Block_header.encoding)) - end)) + (struct + let name = ["pruned_contents"] + end) + (Store_helpers.Make_value (struct + type t = pruned_contents + + let encoding = + let open Data_encoding in + conv + (fun {header} -> header) + (fun header -> {header}) + (obj1 (req "header" Block_header.encoding)) + end)) module Operations_index = Store_helpers.Make_indexed_substore (Store_helpers.Make_substore (Indexed_store.Store) - (struct let name = ["operations"] end)) - (Store_helpers.Integer_index) + (struct + let name = ["operations"] + end)) + (Store_helpers.Integer_index) module Operation_hashes = Operations_index.Make_map - (struct let name = ["hashes"] end) - (Store_helpers.Make_value(struct - type t = Operation_hash.t list - let encoding = Data_encoding.list Operation_hash.encoding - end)) + (struct + let name = ["hashes"] + end) + (Store_helpers.Make_value (struct + type t = Operation_hash.t list + + let encoding = Data_encoding.list Operation_hash.encoding + end)) module Operations = Operations_index.Make_map - (struct let name = ["contents"] end) - (Store_helpers.Make_value(struct - type t = Operation.t list - let encoding = Data_encoding.(list (dynamic_size Operation.encoding)) - end)) + (struct + let name = ["contents"] + end) + (Store_helpers.Make_value (struct + type t = Operation.t list + + let encoding = Data_encoding.(list (dynamic_size Operation.encoding)) + end)) module Operations_metadata = Operations_index.Make_map - (struct let name = ["metadata"] end) - (Store_helpers.Make_value(struct - type t = MBytes.t list - let encoding = Data_encoding.(list bytes) - end)) + (struct + let name = ["metadata"] + end) + (Store_helpers.Make_value (struct + type t = MBytes.t list - type invalid_block = { - level: int32 ; - errors: Error_monad.error list ; - } + let encoding = Data_encoding.(list bytes) + end)) + + type invalid_block = {level : int32; errors : Error_monad.error list} module Invalid_block = Store_helpers.Make_map (Store_helpers.Make_substore (Chain.Indexed_store.Store) - (struct let name = ["invalid_blocks"] end)) - (Block_hash) - (Store_helpers.Make_value(struct - type t = invalid_block - let encoding = - let open Data_encoding in - conv - (fun { level ; errors } -> (level, errors)) - (fun (level, errors) -> { level ; errors }) - (tup2 int32 (list Error_monad.error_encoding)) - end)) + (struct + let name = ["invalid_blocks"] + end)) + (Block_hash) + (Store_helpers.Make_value (struct + type t = invalid_block + + let encoding = + let open Data_encoding in + conv + (fun {level; errors} -> (level, errors)) + (fun (level, errors) -> {level; errors}) + (tup2 int32 (list Error_monad.error_encoding)) + end)) let register s = - Base58.register_resolver Block_hash.b58check_encoding begin fun str -> - let pstr = Block_hash.prefix_path str in - Chain.Indexed_store.fold_indexes s ~init:[] - ~f:begin fun chain acc -> - Indexed_store.resolve_index (s, chain) pstr >>= fun l -> - Lwt.return (List.rev_append l acc) - end - end + Base58.register_resolver Block_hash.b58check_encoding (fun str -> + let pstr = Block_hash.prefix_path str in + Chain.Indexed_store.fold_indexes s ~init:[] ~f:(fun chain acc -> + Indexed_store.resolve_index (s, chain) pstr + >>= fun l -> Lwt.return (List.rev_append l acc))) module Predecessors = Store_helpers.Make_map (Store_helpers.Make_substore (Indexed_store.Store) - (struct let name = ["predecessors"] end)) - (Store_helpers.Integer_index) - (Store_helpers.Make_value(Block_hash)) - + (struct + let name = ["predecessors"] + end)) + (Store_helpers.Integer_index) + (Store_helpers.Make_value (Block_hash)) end - (************************************************************************** * Blockchain data **************************************************************************) module Chain_data = struct - type store = Chain.store + let get s = s module Known_heads = Store_helpers.Make_buffered_set (Store_helpers.Make_substore (Chain.Indexed_store.Store) - (struct let name = ["known_heads"] end)) - (Block_hash) + (struct + let name = ["known_heads"] + end)) + (Block_hash) (Block_hash.Set) module Current_head = Store_helpers.Make_single_store (Chain.Indexed_store.Store) - (struct let name = ["current_head"] end) - (Store_helpers.Make_value(Block_hash)) + (struct + let name = ["current_head"] + end) + (Store_helpers.Make_value (Block_hash)) module In_main_branch = Store_helpers.Make_single_store (Block.Indexed_store.Store) - (struct let name = ["in_chain"] end) - (Store_helpers.Make_value(Block_hash)) (* successor *) + (struct + let name = ["in_chain"] + end) + (Store_helpers.Make_value (Block_hash)) + + (* successor *) module Checkpoint = Store_helpers.Make_single_store (Chain.Indexed_store.Store) - (struct let name = ["checkpoint"] end) - (Store_helpers.Make_value(Block_header)) + (struct + let name = ["checkpoint"] + end) + (Store_helpers.Make_value (Block_header)) module Save_point = Store_helpers.Make_single_store (Chain.Indexed_store.Store) - (struct let name = ["save_point"] end) - (Store_helpers.Make_value(struct - type t = Int32.t * Block_hash.t - let encoding = - let open Data_encoding in - tup2 int32 Block_hash.encoding - end)) + (struct + let name = ["save_point"] + end) + (Store_helpers.Make_value (struct + type t = Int32.t * Block_hash.t + + let encoding = + let open Data_encoding in + tup2 int32 Block_hash.encoding + end)) module Caboose = Store_helpers.Make_single_store (Chain.Indexed_store.Store) - (struct let name = ["caboose"] end) - (Store_helpers.Make_value(struct - type t = Int32.t * Block_hash.t - let encoding = - let open Data_encoding in - tup2 int32 Block_hash.encoding - end)) - + (struct + let name = ["caboose"] + end) + (Store_helpers.Make_value (struct + type t = Int32.t * Block_hash.t + + let encoding = + let open Data_encoding in + tup2 int32 Block_hash.encoding + end)) end - (************************************************************************** * Protocol store under "protocols/" **************************************************************************) module Protocol = struct - type store = global_store + let get x = x module Indexed_store = Store_helpers.Make_indexed_substore (Store_helpers.Make_substore (Raw_store) - (struct let name = ["protocols"] end)) - (Protocol_hash) + (struct + let name = ["protocols"] + end)) + (Protocol_hash) module Contents = Indexed_store.Make_map - (struct let name = ["contents"] end) - (Store_helpers.Make_value(Protocol)) + (struct + let name = ["contents"] + end) + (Store_helpers.Make_value (Protocol)) module RawContents = Store_helpers.Make_single_store (Indexed_store.Store) - (struct let name = ["contents"] end) + (struct + let name = ["contents"] + end) (Store_helpers.Raw_value) let register s = - Base58.register_resolver Protocol_hash.b58check_encoding begin fun str -> - let pstr = Protocol_hash.prefix_path str in - Indexed_store.resolve_index s pstr - end - + Base58.register_resolver Protocol_hash.b58check_encoding (fun str -> + let pstr = Protocol_hash.prefix_path str in + Indexed_store.resolve_index s pstr) end let init ?readonly ?mapsize dir = - Raw_store.init ?readonly ?mapsize dir >>=? fun s -> - Block.register s ; - Protocol.register s ; - return s + Raw_store.init ?readonly ?mapsize dir + >>=? fun s -> Block.register s ; Protocol.register s ; return s let close = Raw_store.close let open_with_atomic_rw = Raw_store.open_with_atomic_rw + let with_atomic_rw = Raw_store.with_atomic_rw diff --git a/src/lib_shell/store.mli b/src/lib_shell/store.mli index 91ef353c571120e8f09443676b1863e7f9234014..2060a020c4c32d32cb288f1fb422367f41f2715d 100644 --- a/src/lib_shell/store.mli +++ b/src/lib_shell/store.mli @@ -26,128 +26,117 @@ open Store_sigs type t + type global_store = t (** [init ~mapsize path] returns an initialized store at [path] of maximum capacity [mapsize] bytes. *) -val init: ?readonly:bool -> ?mapsize:int64 -> string -> t tzresult Lwt.t -val close: t -> unit +val init : ?readonly:bool -> ?mapsize:int64 -> string -> t tzresult Lwt.t + +val close : t -> unit -val open_with_atomic_rw: - ?mapsize:int64 -> string -> +val open_with_atomic_rw : + ?mapsize:int64 -> + string -> (t -> 'a Error_monad.tzresult Lwt.t) -> 'a tzresult Lwt.t -val with_atomic_rw: - t -> - (unit -> 'a Lwt.t) -> - 'a Lwt.t +val with_atomic_rw : t -> (unit -> 'a Lwt.t) -> 'a Lwt.t (** {2 Configuration} **********************************************************) module Configuration : sig - - module History_mode : SINGLE_STORE - with type t := global_store - and type value := History_mode.t + module History_mode : + SINGLE_STORE with type t := global_store and type value := History_mode.t end (** {2 Chain store} **********************************************************) module Chain : sig + val list : global_store -> Chain_id.t list Lwt.t - val list: global_store -> Chain_id.t list Lwt.t - val destroy: global_store -> Chain_id.t -> unit Lwt.t + val destroy : global_store -> Chain_id.t -> unit Lwt.t type store - val get: global_store -> Chain_id.t -> store - module Genesis_hash : SINGLE_STORE - with type t := store - and type value := Block_hash.t + val get : global_store -> Chain_id.t -> store - module Genesis_time : SINGLE_STORE - with type t := store - and type value := Time.Protocol.t + module Genesis_hash : + SINGLE_STORE with type t := store and type value := Block_hash.t - module Genesis_protocol : SINGLE_STORE - with type t := store - and type value := Protocol_hash.t + module Genesis_time : + SINGLE_STORE with type t := store and type value := Time.Protocol.t - module Genesis_test_protocol : SINGLE_STORE - with type t := store - and type value := Protocol_hash.t + module Genesis_protocol : + SINGLE_STORE with type t := store and type value := Protocol_hash.t - module Expiration : SINGLE_STORE - with type t := store - and type value := Time.Protocol.t + module Genesis_test_protocol : + SINGLE_STORE with type t := store and type value := Protocol_hash.t - module Allow_forked_chain : SET_STORE - with type t := t - and type elt := Chain_id.t + module Expiration : + SINGLE_STORE with type t := store and type value := Time.Protocol.t - module Protocol_info : MAP_STORE - with type t = store - and type key = int - and type value = Protocol_hash.t * Int32.t + module Allow_forked_chain : + SET_STORE with type t := t and type elt := Chain_id.t + module Protocol_info : + MAP_STORE + with type t = store + and type key = int + and type value = Protocol_hash.t * Int32.t end - (** {2 Mutable chain data} *) module Chain_data : sig - type store - val get: Chain.store -> store - module Current_head : SINGLE_STORE - with type t := store - and type value := Block_hash.t + val get : Chain.store -> store - module Known_heads : BUFFERED_SET_STORE - with type t := store - and type elt := Block_hash.t - and module Set := Block_hash.Set + module Current_head : + SINGLE_STORE with type t := store and type value := Block_hash.t - module In_main_branch : SINGLE_STORE - with type t = store * Block_hash.t - and type value := Block_hash.t (* successor *) + module Known_heads : + BUFFERED_SET_STORE + with type t := store + and type elt := Block_hash.t + and module Set := Block_hash.Set - module Checkpoint : SINGLE_STORE - with type t := store - and type value := Block_header.t + module In_main_branch : + SINGLE_STORE + with type t = store * Block_hash.t + and type value := Block_hash.t - module Save_point : SINGLE_STORE - with type t := store - and type value := Int32.t * Block_hash.t + (* successor *) - module Caboose : SINGLE_STORE - with type t := store - and type value := Int32.t * Block_hash.t + module Checkpoint : + SINGLE_STORE with type t := store and type value := Block_header.t -end + module Save_point : + SINGLE_STORE with type t := store and type value := Int32.t * Block_hash.t + module Caboose : + SINGLE_STORE with type t := store and type value := Int32.t * Block_hash.t +end (** {2 Block header store} *) module Block : sig - type store - val get: Chain.store -> store + + val get : Chain.store -> store type contents = { - header: Block_header.t ; - message: string option ; - max_operations_ttl: int ; - last_allowed_fork_level: Int32.t ; - context: Context_hash.t ; - metadata: MBytes.t ; + header : Block_header.t; + message : string option; + max_operations_ttl : int; + last_allowed_fork_level : Int32.t; + context : Context_hash.t; + metadata : MBytes.t } - module Contents : SINGLE_STORE - with type t = store * Block_hash.t - and type value := contents + module Contents : + SINGLE_STORE with type t = store * Block_hash.t and type value := contents (** Block header storage used for pruned blocks. Blocks that are not pruned have their header @@ -156,73 +145,74 @@ module Block : sig the {!State.Block.Header} module. *) - type pruned_contents = { - header: Block_header.t ; - } + type pruned_contents = {header : Block_header.t} - module Pruned_contents : SINGLE_STORE - with type t = store * Block_hash.t - and type value := pruned_contents + module Pruned_contents : + SINGLE_STORE + with type t = store * Block_hash.t + and type value := pruned_contents - module Operation_hashes : MAP_STORE - with type t = store * Block_hash.t - and type key = int - and type value = Operation_hash.t list + module Operation_hashes : + MAP_STORE + with type t = store * Block_hash.t + and type key = int + and type value = Operation_hash.t list - module Operations : MAP_STORE - with type t = store * Block_hash.t - and type key = int - and type value = Operation.t list + module Operations : + MAP_STORE + with type t = store * Block_hash.t + and type key = int + and type value = Operation.t list - module Operations_metadata : MAP_STORE - with type t = store * Block_hash.t - and type key = int - and type value = MBytes.t list + module Operations_metadata : + MAP_STORE + with type t = store * Block_hash.t + and type key = int + and type value = MBytes.t list - type invalid_block = { - level: int32 ; - errors: Error_monad.error list ; - } + type invalid_block = {level : int32; errors : Error_monad.error list} - module Invalid_block : MAP_STORE - with type t = store - and type key = Block_hash.t - and type value = invalid_block + module Invalid_block : + MAP_STORE + with type t = store + and type key = Block_hash.t + and type value = invalid_block (** Block predecessors under [/blocks/<block_id>/predecessors/<distance>/<block_id>]. Used to compute block predecessors in [lib_node_shell/state.ml]. *) - module Predecessors : MAP_STORE - with type t = store * Block_hash.t - and type key = int - and type value = Block_hash.t - + module Predecessors : + MAP_STORE + with type t = store * Block_hash.t + and type key = int + and type value = Block_hash.t end - (** {2 Protocol store} *) module Protocol : sig - type store - val get: global_store -> store - module Contents : MAP_STORE - with type t := store - and type key := Protocol_hash.t - and type value := Protocol.t + val get : global_store -> store - module RawContents : SINGLE_STORE - with type t = store * Protocol_hash.t - and type value := MBytes.t + module Contents : + MAP_STORE + with type t := store + and type key := Protocol_hash.t + and type value := Protocol.t + module RawContents : + SINGLE_STORE + with type t = store * Protocol_hash.t + and type value := MBytes.t end (** {2 Temporary test chain forking block store} *) -module Forking_block_hash : MAP_STORE - with type t = global_store - and type key := Chain_id.t - and type value := Block_hash.t +module Forking_block_hash : + MAP_STORE + with type t = global_store + and type key := Chain_id.t + and type value := Block_hash.t diff --git a/src/lib_shell/test/assert.ml b/src/lib_shell/test/assert.ml index 738048003ba851964b40dee84cd5f32c65852dfd..71498bdef0965439ec98845e38a190339a1c4ca3 100644 --- a/src/lib_shell/test/assert.ml +++ b/src/lib_shell/test/assert.ml @@ -24,103 +24,135 @@ (*****************************************************************************) let fail expected given msg = - Format.kasprintf Pervasives.failwith - "@[%s@ expected: %s@ got: %s@]" msg expected given + Format.kasprintf + Pervasives.failwith + "@[%s@ expected: %s@ got: %s@]" + msg + expected + given -let fail_msg ?(expected="") ?(given="") fmt = +let fail_msg ?(expected = "") ?(given = "") fmt = Format.kasprintf (fail expected given) fmt let default_printer _ = "" -let equal ?(eq=(=)) ?(prn=default_printer) ?(msg="") x y = +let equal ?(eq = ( = )) ?(prn = default_printer) ?(msg = "") x y = if not (eq x y) then fail (prn x) (prn y) msg let equal_operation ?msg op1 op2 = let eq op1 op2 = - match op1, op2 with - | None, None -> true - | Some op1, Some op2 -> + match (op1, op2) with + | (None, None) -> + true + | (Some op1, Some op2) -> Operation.equal op1 op2 - | _ -> false in + | _ -> + false + in let prn = function - | None -> "none" - | Some op -> Operation_hash.to_b58check (Operation.hash op) in + | None -> + "none" + | Some op -> + Operation_hash.to_b58check (Operation.hash op) + in equal ?msg ~prn ~eq op1 op2 let equal_block ?msg st1 st2 = let eq st1 st2 = - match st1, st2 with - | None, None -> true - | Some st1, Some st2 -> Block_header.equal st1 st2 - | _ -> false in + match (st1, st2) with + | (None, None) -> + true + | (Some st1, Some st2) -> + Block_header.equal st1 st2 + | _ -> + false + in let prn = function - | None -> "none" - | Some st -> Block_hash.to_b58check (Block_header.hash st) in + | None -> + "none" + | Some st -> + Block_hash.to_b58check (Block_header.hash st) + in equal ?msg ~prn ~eq st1 st2 -let make_equal_list eq prn ?(msg="") x y = +let make_equal_list eq prn ?(msg = "") x y = let rec iter i x y = - match x, y with - | hd_x :: tl_x, hd_y :: tl_y -> - if eq hd_x hd_y then - iter (succ i) tl_x tl_y + match (x, y) with + | (hd_x :: tl_x, hd_y :: tl_y) -> + if eq hd_x hd_y then iter (succ i) tl_x tl_y else - fail_msg ~expected:(prn hd_x) ~given:(prn hd_y) - "%s (at index %d)" msg i - | _ :: _, [] | [], _ :: _ -> - fail_msg ~expected:"" ~given:"" - "%s (lists of different sizes %d %d)" msg - (List.length x) (List.length y) - | [], [] -> - () in + fail_msg + ~expected:(prn hd_x) + ~given:(prn hd_y) + "%s (at index %d)" + msg + i + | (_ :: _, []) | ([], _ :: _) -> + fail_msg + ~expected:"" + ~given:"" + "%s (lists of different sizes %d %d)" + msg + (List.length x) + (List.length y) + | ([], []) -> + () + in iter 0 x y let equal_string_list ?msg l1 l2 = - make_equal_list ?msg (=) (fun x -> x) l1 l2 + make_equal_list ?msg ( = ) (fun x -> x) l1 l2 let equal_string_list_list ?msg l1 l2 = let pr_persist l = let res = - String.concat ";" (List.map (fun s -> Printf.sprintf "%S" s) l) in - Printf.sprintf "[%s]" res in - make_equal_list ?msg (=) pr_persist l1 l2 + String.concat ";" (List.map (fun s -> Printf.sprintf "%S" s) l) + in + Printf.sprintf "[%s]" res + in + make_equal_list ?msg ( = ) pr_persist l1 l2 let equal_block_set ?msg set1 set2 = let b1 = Block_hash.Set.elements set1 and b2 = Block_hash.Set.elements set2 in - make_equal_list ?msg + make_equal_list + ?msg (fun h1 h2 -> Block_hash.equal h1 h2) Block_hash.to_string - b1 b2 + b1 + b2 let equal_block_map ?msg ~eq map1 map2 = let b1 = Block_hash.Map.bindings map1 and b2 = Block_hash.Map.bindings map2 in - make_equal_list ?msg + make_equal_list + ?msg (fun (h1, b1) (h2, b2) -> Block_hash.equal h1 h2 && eq b1 b2) (fun (h1, _) -> Block_hash.to_string h1) - b1 b2 + b1 + b2 let equal_block_hash_list ?msg l1 l2 = let pr_block_hash = Block_hash.to_short_b58check in make_equal_list ?msg Block_hash.equal pr_block_hash l1 l2 -let is_false ?(msg="") x = - if x then fail "false" "true" msg +let is_false ?(msg = "") x = if x then fail "false" "true" msg -let is_true ?(msg="") x = - if not x then fail "true" "false" msg +let is_true ?(msg = "") x = if not x then fail "true" "false" msg let equal_checkpoint ?msg cp1 cp2 = let eq cp1 cp2 = - match cp1, cp2 with - | None, None -> true - | Some (x, bh1), Some (y, bh2) -> - Int32.equal x y && - (Block_hash.equal bh1 bh2) - | _ -> false in + match (cp1, cp2) with + | (None, None) -> + true + | (Some (x, bh1), Some (y, bh2)) -> + Int32.equal x y && Block_hash.equal bh1 bh2 + | _ -> + false + in let prn = function - | None -> "none" + | None -> + "none" | Some (_x, bh) -> (*let s = Printf.sprintf "%s" x in*) Block_hash.to_b58check bh diff --git a/src/lib_shell/test/test.ml b/src/lib_shell/test/test.ml index 6e4a99652678f007e8ed66b8186e9e5c8344acf5..e5bca0fc8b63dd8cdf772fdc75e461d36238adb1 100644 --- a/src/lib_shell/test/test.ml +++ b/src/lib_shell/test/test.ml @@ -25,11 +25,11 @@ (*****************************************************************************) let () = - Alcotest.run "tezos-state" [ - "store", Test_store.tests ; - "state", Test_state.tests ; - "store checkpoint", Test_store_checkpoint.tests ; - "state checkpoint", Test_state_checkpoint.tests ; - "pipeline-order", Test_pipeline.Order.tests ; - "pipeline-many-passes", Test_pipeline.Many_passes.tests ; - ] + Alcotest.run + "tezos-state" + [ ("store", Test_store.tests); + ("state", Test_state.tests); + ("store checkpoint", Test_store_checkpoint.tests); + ("state checkpoint", Test_state_checkpoint.tests); + ("pipeline-order", Test_pipeline.Order.tests); + ("pipeline-many-passes", Test_pipeline.Many_passes.tests) ] diff --git a/src/lib_shell/test/test_locator.ml b/src/lib_shell/test/test_locator.ml index 6c505ccdfa7b53f806a60b3633a5bca149abbd70..773944068dd29e7bf304c1e4c031432a4aa37f3f 100644 --- a/src/lib_shell/test/test_locator.ml +++ b/src/lib_shell/test/test_locator.ml @@ -23,7 +23,7 @@ (* *) (*****************************************************************************) -let (//) = Filename.concat +let ( // ) = Filename.concat (** Basic blocks *) @@ -38,11 +38,9 @@ let genesis_protocol = let genesis_time = Time.Protocol.of_seconds 0L let state_genesis_block = - { - State.Chain.time = genesis_time ; - State.Chain.block= genesis_hash ; - State.Chain.protocol = genesis_protocol - } + { State.Chain.time = genesis_time; + State.Chain.block = genesis_hash; + State.Chain.protocol = genesis_protocol } let chain_id = Chain_id.of_block_hash genesis_hash @@ -54,205 +52,220 @@ let incr_timestamp timestamp = let incr_fitness fitness = let new_fitness = match fitness with - | [ fitness ] -> + | [fitness] -> Pervasives.( Data_encoding.Binary.of_bytes Data_encoding.int64 fitness - |> Option.unopt ~default:0L - |> Int64.succ - |> Data_encoding.Binary.to_bytes_exn Data_encoding.int64 - ) - | _ -> Data_encoding.Binary.to_bytes_exn Data_encoding.int64 1L + |> Option.unopt ~default:0L |> Int64.succ + |> Data_encoding.Binary.to_bytes_exn Data_encoding.int64) + | _ -> + Data_encoding.Binary.to_bytes_exn Data_encoding.int64 1L in - [ new_fitness ] - + [new_fitness] (* returns a new state with a single block, genesis *) let init_chain base_dir : State.Chain.t Lwt.t = let store_root = base_dir // "store" in let context_root = base_dir // "context" in State.init - ~store_root ~context_root ~history_mode:Archive - state_genesis_block >>= function - | Error _ -> Pervasives.failwith "read err" + ~store_root + ~context_root + ~history_mode:Archive + state_genesis_block + >>= function + | Error _ -> + Pervasives.failwith "read err" | Ok (_state, chain, _index, _history_mode) -> Lwt.return chain - -let block_header - ?(context = Context_hash.zero) - (pred : State.Block.t) : Block_header.t = +let block_header ?(context = Context_hash.zero) (pred : State.Block.t) : + Block_header.t = let pred_header = State.Block.shell_header pred in let timestamp = incr_timestamp pred_header.timestamp in let fitness = incr_fitness pred_header.fitness in - { - Block_header.shell = - { - level = Int32.add Int32.one (State.Block.level pred); + { Block_header.shell = + { level = Int32.add Int32.one (State.Block.level pred); proto_level = 0; predecessor = State.Block.hash pred; - timestamp = timestamp; + timestamp; validation_passes = 0; operations_hash = Operation_list_list_hash.empty; - fitness = fitness ; - context ; - } ; - Block_header.protocol_data = MBytes.of_string "" ; - } + fitness; + context }; + Block_header.protocol_data = MBytes.of_string "" } let zero = MBytes.create 0 (* adds n blocks on top of an initialized chain *) -let make_empty_chain (chain:State.Chain.t) n : Block_hash.t Lwt.t = - State.Block.read_opt chain genesis_hash >|= Option.unopt_assert ~loc:__POS__ >>= fun genesis -> - State.Block.context genesis >>= fun empty_context -> +let make_empty_chain (chain : State.Chain.t) n : Block_hash.t Lwt.t = + State.Block.read_opt chain genesis_hash + >|= Option.unopt_assert ~loc:__POS__ + >>= fun genesis -> + State.Block.context genesis + >>= fun empty_context -> let header = State.Block.header genesis in let timestamp = State.Block.timestamp genesis in Context.hash ~time:timestamp empty_context >>= fun empty_context_hash -> - Context.commit - ~time:header.shell.timestamp empty_context >>= fun context -> - let header = { header with shell = { header.shell with context } } in - let empty_result = { - State.Block. - context_hash = empty_context_hash ; - message = None ; - max_operations_ttl = 0 ; - last_allowed_fork_level = 0l ; - } in + Context.commit ~time:header.shell.timestamp empty_context + >>= fun context -> + let header = {header with shell = {header.shell with context}} in + let empty_result = + { State.Block.context_hash = empty_context_hash; + message = None; + max_operations_ttl = 0; + last_allowed_fork_level = 0l } + in let rec loop lvl pred = - if lvl >= n then - return pred + if lvl >= n then return pred else let header = { header with - shell = { header.shell with predecessor = pred ; - level = Int32.of_int lvl } } in - State.Block.store chain header zero [] [] empty_result ~forking_testchain:false >>=? fun _ -> - loop (lvl+1) (Block_header.hash header) + shell = + {header.shell with predecessor = pred; level = Int32.of_int lvl} } + in + State.Block.store + chain + header + zero + [] + [] + empty_result + ~forking_testchain:false + >>=? fun _ -> loop (lvl + 1) (Block_header.hash header) in - loop 1 genesis_hash >>= function - | Ok b -> Lwt.return b + loop 1 genesis_hash + >>= function + | Ok b -> + Lwt.return b | Error err -> Error_monad.pp_print_error Format.err_formatter err ; assert false - - - (* helper functions ------------------------------------- *) (* wall clock time of a unit function *) -let time1 (f: unit -> 'a) : 'a * float = +let time1 (f : unit -> 'a) : 'a * float = let t = Unix.gettimeofday () in let res = f () in let wall_clock = Unix.gettimeofday () -. t in - (res,wall_clock) + (res, wall_clock) (* returns result from first run and average time of [runs] runs *) -let time ?(runs=1) f = - if runs < 1 then invalid_arg "time negative arg" else +let time ?(runs = 1) f = + if runs < 1 then invalid_arg "time negative arg" + else let rec loop cnt sum = - if cnt = (runs) - then sum + if cnt = runs then sum else - let (_,t) = time1 f in - loop (cnt+1) (sum+.t) + let (_, t) = time1 f in + loop (cnt + 1) (sum +. t) in - let (res,t) = time1 f in + let (res, t) = time1 f in let sum = loop 1 t in - (res, sum /. (float runs)) + (res, sum /. float runs) let rec repeat f n = - if n<0 then invalid_arg "repeat: negative arg" else - if n=0 then () - else let _ = f () in repeat f (n-1) + if n < 0 then invalid_arg "repeat: negative arg" + else if n = 0 then () + else + let _ = f () in + repeat f (n - 1) (* ----------------------------------------------------- *) let print_block b = - Printf.printf "%6i %s\n" + Printf.printf + "%6i %s\n" (Int32.to_int (State.Block.level b)) (Block_hash.to_b58check (State.Block.hash b)) let print_block_h chain bh = - State.Block.read_opt chain bh >|= Option.unopt_assert ~loc:__POS__ >|= fun b -> - print_block b - - + State.Block.read_opt chain bh + >|= Option.unopt_assert ~loc:__POS__ + >|= fun b -> print_block b (* returns the predecessor at distance one, reading the header *) -let linear_predecessor chain (bh: Block_hash.t) : Block_hash.t option Lwt.t = - State.Block.read_opt chain bh >|= Option.unopt_assert ~loc:__POS__ >>= fun b -> - State.Block.predecessor b >|= function - | None -> None - | Some pred -> Some (State.Block.hash pred) +let linear_predecessor chain (bh : Block_hash.t) : Block_hash.t option Lwt.t = + State.Block.read_opt chain bh + >|= Option.unopt_assert ~loc:__POS__ + >>= fun b -> + State.Block.predecessor b + >|= function None -> None | Some pred -> Some (State.Block.hash pred) let print_chain chain bh = let rec loop bh cnt = let _ = print_block_h chain bh in - linear_predecessor chain bh >>= function - | Some pred -> loop pred (cnt+1) - | None -> Lwt.return_unit + linear_predecessor chain bh + >>= function Some pred -> loop pred (cnt + 1) | None -> Lwt.return_unit in loop bh 0 - (* returns the predecessors at ditance n, traversing all n intermediate blocks *) -let linear_predecessor_n (chain:State.Chain.t) (bh:Block_hash.t) (distance:int) - : Block_hash.t option Lwt.t = +let linear_predecessor_n (chain : State.Chain.t) (bh : Block_hash.t) + (distance : int) : Block_hash.t option Lwt.t = (* let _ = Printf.printf "LP: %4i " distance; print_block_h chain bh in *) - if distance < 1 then invalid_arg "distance<1" else + if distance < 1 then invalid_arg "distance<1" + else let rec loop bh distance = - if distance = 0 - then Lwt.return_some bh (* reached distance *) + if distance = 0 then Lwt.return_some bh (* reached distance *) else - linear_predecessor chain bh >>= function - | None -> Lwt.return_none - | Some pred -> - loop pred (distance-1) + linear_predecessor chain bh + >>= function + | None -> Lwt.return_none | Some pred -> loop pred (distance - 1) in loop bh distance - - (* Tests that the linear predecessor defined above and the exponential predecessor implemented in State.predecessor_n return the same block and it is the block at the distance requested *) -let test_pred (base_dir:string) : unit tzresult Lwt.t = +let test_pred (base_dir : string) : unit tzresult Lwt.t = let size_chain = 1000 in - init_chain base_dir >>= fun chain -> - make_empty_chain chain size_chain >>= fun head -> - + init_chain base_dir + >>= fun chain -> + make_empty_chain chain size_chain + >>= fun head -> let test_once distance = - linear_predecessor_n chain head distance >>= fun lin_res -> - State.Block.read_opt chain head >|= Option.unopt_assert ~loc:__POS__ >>= fun head_block -> - State.Block.predecessor_n head_block distance >>= fun exp_res -> - match lin_res,exp_res with - | None, None -> + linear_predecessor_n chain head distance + >>= fun lin_res -> + State.Block.read_opt chain head + >|= Option.unopt_assert ~loc:__POS__ + >>= fun head_block -> + State.Block.predecessor_n head_block distance + >>= fun exp_res -> + match (lin_res, exp_res) with + | (None, None) -> Lwt.return_unit - | None,Some _ | Some _,None -> + | (None, Some _) | (Some _, None) -> Assert.fail_msg "mismatch between exponential and linear predecessor_n" - | Some lin_res, Some exp_res -> + | (Some lin_res, Some exp_res) -> (* check that the two results are the same *) - (assert (lin_res = exp_res)); - State.Block.read_opt chain lin_res >|= Option.unopt_assert ~loc:__POS__ >>= fun pred -> + assert (lin_res = exp_res) ; + State.Block.read_opt chain lin_res + >|= Option.unopt_assert ~loc:__POS__ + >>= fun pred -> let level_pred = Int32.to_int (State.Block.level pred) in - State.Block.read_opt chain head >|= Option.unopt_assert ~loc:__POS__ >>= fun head -> + State.Block.read_opt chain head + >|= Option.unopt_assert ~loc:__POS__ + >>= fun head -> let level_start = Int32.to_int (State.Block.level head) in (* check distance using the level *) - assert (level_start - distance = level_pred); + assert (level_start - distance = level_pred) ; Lwt.return_unit in let _ = Random.self_init () in - let range = size_chain+(size_chain/10) in + let range = size_chain + (size_chain / 10) in let repeats = 100 in return (repeat (fun () -> test_once (1 + Random.int range)) repeats) let seed = - let receiver_id = P2p_peer.Id.of_string_exn (String.make P2p_peer.Id.size 'r') in - let sender_id = P2p_peer.Id.of_string_exn (String.make P2p_peer.Id.size 's') in - {Block_locator.receiver_id=receiver_id ; sender_id } + let receiver_id = + P2p_peer.Id.of_string_exn (String.make P2p_peer.Id.size 'r') + in + let sender_id = + P2p_peer.Id.of_string_exn (String.make P2p_peer.Id.size 's') + in + {Block_locator.receiver_id; sender_id} (* compute locator using the linear predecessor *) let compute_linear_locator chain_state ~size block = @@ -260,21 +273,22 @@ let compute_linear_locator chain_state ~size block = let header = State.Block.header block in Block_locator.compute ~get_predecessor:(linear_predecessor_n chain_state) - block_hash header ~size seed - + block_hash + header + ~size + seed (* given the size of a chain, returns the size required for a locator to reach genesis *) let compute_size_locator size_chain = let repeats = 10. in - int_of_float ((log ((float size_chain) /. repeats)) /. (log 2.) -. 1.) * 10 + int_of_float ((log (float size_chain /. repeats) /. log 2.) -. 1.) * 10 (* given the size of a locator, returns the size of the chain that it can cover back to genesis *) let compute_size_chain size_locator = let repeats = 10. in - int_of_float (repeats *. (2. ** (float (size_locator + 1)))) - + int_of_float (repeats *. (2. ** float (size_locator + 1))) (* test if the linear and exponential locator are the same and outputs their timing. @@ -298,77 +312,66 @@ let test_locator base_dir = (* size after which locator always reaches genesis *) let locator_limit = compute_size_locator size_chain in let _ = Printf.printf "#locator_limit %i\n" locator_limit in - - init_chain base_dir >>= fun chain -> - time1 (fun () -> - make_empty_chain chain size_chain) |> - fun (res, t_chain) -> - let _ = Printf.printf + init_chain base_dir + >>= fun chain -> + time1 (fun () -> make_empty_chain chain size_chain) + |> fun (res, t_chain) -> + let _ = + Printf.printf "#size_chain %i built in %f sec\n# size exp lins\n" - size_chain t_chain in - res >>= fun head -> - + size_chain + t_chain + in + res + >>= fun head -> let check_locator size : unit tzresult Lwt.t = - State.read_chain_data chain begin fun _ data -> - Lwt.return (data.caboose, data.save_point) - end >>= fun ((_, caboose), _save_point) -> - State.Block.read chain head >>=? begin fun block -> - time ~runs:runs (fun () -> - State.compute_locator chain ~size block seed) |> - fun (l_exp,t_exp) -> - time ~runs:runs (fun () -> - compute_linear_locator chain - ~caboose ~size block) - |> fun (l_lin,t_lin) -> - l_exp >>= fun l_exp -> - l_lin >>= fun l_lin -> - let _, l_exp = (l_exp : Block_locator.t :> _ * _) in - let _, l_lin = (l_lin : Block_locator.t :> _ * _) in - let _ = Printf.printf "%10i %f %f\n" size t_exp t_lin in - List.iter2 - (fun hn ho -> - if not (Block_hash.equal hn ho) - then - Assert.fail_msg "Invalid locator %i" size) - l_exp l_lin; - return_unit - end + State.read_chain_data chain (fun _ data -> + Lwt.return (data.caboose, data.save_point)) + >>= fun ((_, caboose), _save_point) -> + State.Block.read chain head + >>=? fun block -> + time ~runs (fun () -> State.compute_locator chain ~size block seed) + |> fun (l_exp, t_exp) -> + time ~runs (fun () -> compute_linear_locator chain ~caboose ~size block) + |> fun (l_lin, t_lin) -> + l_exp + >>= fun l_exp -> + l_lin + >>= fun l_lin -> + let (_, l_exp) = (l_exp : Block_locator.t :> _ * _) in + let (_, l_lin) = (l_lin : Block_locator.t :> _ * _) in + let _ = Printf.printf "%10i %f %f\n" size t_exp t_lin in + List.iter2 + (fun hn ho -> + if not (Block_hash.equal hn ho) then + Assert.fail_msg "Invalid locator %i" size) + l_exp + l_lin ; + return_unit in let stop = locator_limit + 20 in let rec loop size = - if size < stop then ( - check_locator size >>=? fun _ -> - loop (size+5) - ) + if size < stop then check_locator size >>=? fun _ -> loop (size + 5) else return_unit in loop 1 let wrap n f = - Alcotest_lwt.test_case n `Quick begin fun _ () -> - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun dir -> - f dir >>= function - | Ok () -> Lwt.return_unit - | Error error -> - Format.kasprintf Pervasives.failwith "%a" pp_print_error error - end - end + Alcotest_lwt.test_case n `Quick (fun _ () -> + Lwt_utils_unix.with_tempdir "tezos_test_" (fun dir -> + f dir + >>= function + | Ok () -> + Lwt.return_unit + | Error error -> + Format.kasprintf Pervasives.failwith "%a" pp_print_error error)) -let tests = - [ wrap "test pred" test_pred ] +let tests = [wrap "test pred" test_pred] -let bench = [ wrap "test locator" test_locator ] +let bench = [wrap "test locator" test_locator] let tests = - try - if Sys.argv.(1) = "--no-bench" then - tests - else - tests @ bench + try if Sys.argv.(1) = "--no-bench" then tests else tests @ bench with _ -> tests @ bench - -let () = - Alcotest.run ~argv:[|""|] "tezos-shell" [ - "locator", tests - ] +let () = Alcotest.run ~argv:[|""|] "tezos-shell" [("locator", tests)] diff --git a/src/lib_shell/test/test_pipeline.ml b/src/lib_shell/test/test_pipeline.ml index b04e8322d7627b1a5d7e2abbf139147eeb03c903..a9c01db398c159553d585ebade7165a454588c8c 100644 --- a/src/lib_shell/test/test_pipeline.ml +++ b/src/lib_shell/test/test_pipeline.ml @@ -25,122 +25,112 @@ open Pipeline open Lwt.Infix + let fail pp v exp = let open Format in let pp_print_result pv pe fmt = function - | Ok v -> Format.fprintf fmt "Ok %a" pv v - | Error e -> Format.fprintf fmt "Error %a" pe e + | Ok v -> + Format.fprintf fmt "Ok %a" pv v + | Error e -> + Format.fprintf fmt "Error %a" pe e in let pp_print_exc fmt exc = - Format.fprintf fmt "%s" (Printexc.to_string exc) in - kasprintf Pervasives.failwith "Got [%a], expected [%a]" - (pp_print_list ~pp_sep:(fun fmt () -> pp_print_char fmt ';') (pp_print_result pp pp_print_exc)) v - (pp_print_list ~pp_sep:(fun fmt () -> pp_print_char fmt ';') (pp_print_result pp pp_print_exc)) exp -let expect pp v vv = - if v = vv then - Lwt.return_unit - else - fail pp v vv + Format.fprintf fmt "%s" (Printexc.to_string exc) + in + kasprintf + Pervasives.failwith + "Got [%a], expected [%a]" + (pp_print_list + ~pp_sep:(fun fmt () -> pp_print_char fmt ';') + (pp_print_result pp pp_print_exc)) + v + (pp_print_list + ~pp_sep:(fun fmt () -> pp_print_char fmt ';') + (pp_print_result pp pp_print_exc)) + exp -let wrap (s, f) = - Alcotest_lwt.test_case s `Quick (fun _ () -> f ()) +let expect pp v vv = if v = vv then Lwt.return_unit else fail pp v vv +let wrap (s, f) = Alcotest_lwt.test_case s `Quick (fun _ () -> f ()) (* Some helper functions *) let rec stall n = - if n <= 0 then - Lwt.return_unit - else - Lwt_main.yield () >>= fun () -> - stall (n - 1) - -let req () = - stall (Random.int 5) >>= fun () -> - Lwt.return (Random.int 128) + if n <= 0 then Lwt.return_unit + else Lwt_main.yield () >>= fun () -> stall (n - 1) +let req () = stall (Random.int 5) >>= fun () -> Lwt.return (Random.int 128) module Order = struct - let tests = - List.map wrap - [ - ("s-two-values", fun () -> - run - (cons (async_s Lwt.return) nil) - [1 ; 5] - >>= expect Format.pp_print_int [Ok 1 ; Ok 5] - ) ; - ("p-two-values", fun () -> - run - (cons (async_p Lwt.return) nil) - [1 ; 5] - >>= expect Format.pp_print_int [Ok 1 ; Ok 5] - ) ; - ("s-many-values", fun () -> - run - (cons (async_s Lwt.return) nil) - [1 ; 2 ; 1 ; 2 ; 5 ; 6 ; 7 ; 5] - >>= expect Format.pp_print_int [Ok 1 ; Ok 2 ; Ok 1 ; Ok 2 ; Ok 5 ; Ok 6 ; Ok 7 ; Ok 5] - ) ; - ("p-many-values", fun () -> - run - (cons (async_p Lwt.return) nil) - [1 ; 2 ; 1 ; 2 ; 5 ; 6 ; 7 ; 5] - >>= expect Format.pp_print_int [Ok 1 ; Ok 2 ; Ok 1 ; Ok 2 ; Ok 5 ; Ok 6 ; Ok 7 ; Ok 5] - ) ; - ("s-stalls", fun () -> + List.map + wrap + [ ( "s-two-values", + fun () -> + run (cons (async_s Lwt.return) nil) [1; 5] + >>= expect Format.pp_print_int [Ok 1; Ok 5] ); + ( "p-two-values", + fun () -> + run (cons (async_p Lwt.return) nil) [1; 5] + >>= expect Format.pp_print_int [Ok 1; Ok 5] ); + ( "s-many-values", + fun () -> + run (cons (async_s Lwt.return) nil) [1; 2; 1; 2; 5; 6; 7; 5] + >>= expect + Format.pp_print_int + [Ok 1; Ok 2; Ok 1; Ok 2; Ok 5; Ok 6; Ok 7; Ok 5] ); + ( "p-many-values", + fun () -> + run (cons (async_p Lwt.return) nil) [1; 2; 1; 2; 5; 6; 7; 5] + >>= expect + Format.pp_print_int + [Ok 1; Ok 2; Ok 1; Ok 2; Ok 5; Ok 6; Ok 7; Ok 5] ); + ( "s-stalls", + fun () -> run (cons (async_s (fun i -> stall i >>= fun () -> Lwt.return i)) nil) - [1 ; 2 ; 1 ; 2 ; 5 ; 6 ; 7 ; 5] - >>= expect Format.pp_print_int [Ok 1 ; Ok 2 ; Ok 1 ; Ok 2 ; Ok 5 ; Ok 6 ; Ok 7 ; Ok 5] - ) ; - ("p-stalls", fun () -> + [1; 2; 1; 2; 5; 6; 7; 5] + >>= expect + Format.pp_print_int + [Ok 1; Ok 2; Ok 1; Ok 2; Ok 5; Ok 6; Ok 7; Ok 5] ); + ( "p-stalls", + fun () -> run (cons (async_p (fun i -> stall i >>= fun () -> Lwt.return i)) nil) - [1 ; 2 ; 1 ; 2 ; 5 ; 6 ; 7 ; 5] - >>= expect Format.pp_print_int [Ok 1 ; Ok 2 ; Ok 1 ; Ok 2 ; Ok 5 ; Ok 6 ; Ok 7 ; Ok 5] - ) ; - ] - + [1; 2; 1; 2; 5; 6; 7; 5] + >>= expect + Format.pp_print_int + [Ok 1; Ok 2; Ok 1; Ok 2; Ok 5; Ok 6; Ok 7; Ok 5] ) ] end module Many_passes = struct + let f1 (n, tok) = stall n >>= fun () -> Lwt.return (n * 256, tok) + + let f2 (n, tok) = Lwt.return (n, tok) - let f1 (n, tok) = - stall n >>= fun () -> - Lwt.return (n * 256, tok) - let f2 (n, tok) = - Lwt.return (n, tok) let f3 (i, tok) = - req () >>= fun j -> - req () >>= fun k -> - Lwt.return (i + j, i + k, tok) - let f4 (i, j, tok) = - Lwt.return (i + j + 2, tok) + req () >>= fun j -> req () >>= fun k -> Lwt.return (i + j, i + k, tok) + + let f4 (i, j, tok) = Lwt.return (i + j + 2, tok) + let f5 (i, j, tok) = - if i > j then - req () >>= fun (_: int) -> - Lwt.return tok - else - Lwt.return tok + if i > j then req () >>= fun (_ : int) -> Lwt.return tok + else Lwt.return tok let pipe = - let (@) = cons in - async_p f1 @ async_s f2 @ - async_s f3 @ async_p f4 @ - async_p f1 @ async_s f2 @ - async_s f3 @ async_p f4 @ - async_s f3 @ async_p f5 @ nil + let ( @ ) = cons in + async_p f1 @ async_s f2 @ async_s f3 @ async_p f4 @ async_p f1 @ async_s f2 + @ async_s f3 @ async_p f4 @ async_s f3 @ async_p f5 @ nil let tests = - List.map wrap - [ - ("many-passes", fun () -> - run pipe - [ (1, "a"); (2, "b"); (1, "c"); (3, "d"); (1, "e") ] >>= function - | [ Ok "a"; Ok "b"; Ok "c"; Ok "d"; Ok "e"; ] -> Lwt.return_unit - | _ -> Format.kasprintf Pervasives.failwith "non identical output" - ) ; + List.map + wrap + [ ( "many-passes", + fun () -> + run pipe [(1, "a"); (2, "b"); (1, "c"); (3, "d"); (1, "e")] + >>= function + | [Ok "a"; Ok "b"; Ok "c"; Ok "d"; Ok "e"] -> + Lwt.return_unit + | _ -> + Format.kasprintf Pervasives.failwith "non identical output" ) ] - end diff --git a/src/lib_shell/test/test_state.ml b/src/lib_shell/test/test_state.ml index dc496637d4b0041bdaeb812694c4e500b0659838..cc1725cb61551d3da13d80929d19b0da0c9760e9 100644 --- a/src/lib_shell/test/test_state.ml +++ b/src/lib_shell/test/test_state.ml @@ -23,7 +23,7 @@ (* *) (*****************************************************************************) -let (//) = Filename.concat +let ( // ) = Filename.concat (** Basic blocks *) @@ -39,128 +39,133 @@ let genesis_time = Time.Protocol.of_seconds 0L module Proto = (val Registered_protocol.get_exn genesis_protocol) -let genesis : State.Chain.genesis = { - time = genesis_time ; - block = genesis_block ; - protocol = genesis_protocol ; -} +let genesis : State.Chain.genesis = + {time = genesis_time; block = genesis_block; protocol = genesis_protocol} let chain_id = Chain_id.of_block_hash genesis_block let incr_fitness fitness = let new_fitness = match fitness with - | [ fitness ] -> + | [fitness] -> Pervasives.( Data_encoding.Binary.of_bytes Data_encoding.int64 fitness - |> Option.unopt ~default:0L - |> Int64.succ - |> Data_encoding.Binary.to_bytes_exn Data_encoding.int64 - ) - | _ -> Data_encoding.Binary.to_bytes_exn Data_encoding.int64 1L + |> Option.unopt ~default:0L |> Int64.succ + |> Data_encoding.Binary.to_bytes_exn Data_encoding.int64) + | _ -> + Data_encoding.Binary.to_bytes_exn Data_encoding.int64 1L in - [ new_fitness ] + [new_fitness] let incr_timestamp timestamp = Time.Protocol.add timestamp (Int64.add 1L (Random.int64 10L)) let operation op = - let op : Operation.t = { - shell = { branch = genesis_block } ; - proto = MBytes.of_string op ; - } in - Operation.hash op, - op, - Data_encoding.Binary.to_bytes Operation.encoding op - + let op : Operation.t = + {shell = {branch = genesis_block}; proto = MBytes.of_string op} + in + (Operation.hash op, op, Data_encoding.Binary.to_bytes Operation.encoding op) -let block _state ?(context = Context_hash.zero) ?(operations = []) (pred: State.Block.t) name - : Block_header.t = +let block _state ?(context = Context_hash.zero) ?(operations = []) + (pred : State.Block.t) name : Block_header.t = let operations_hash = - Operation_list_list_hash.compute - [Operation_list_hash.compute operations] in + Operation_list_list_hash.compute [Operation_list_hash.compute operations] + in let pred_header = State.Block.shell_header pred in let fitness = incr_fitness pred_header.fitness in let timestamp = incr_timestamp pred_header.timestamp in - { shell = { level = Int32.succ pred_header.level ; - proto_level = pred_header.proto_level ; - predecessor = State.Block.hash pred ; - validation_passes = 1 ; - timestamp ; operations_hash ; fitness ; - context } ; - protocol_data = MBytes.of_string name ; - } - -let parsed_block ({ shell ; protocol_data } : Block_header.t) = + { shell = + { level = Int32.succ pred_header.level; + proto_level = pred_header.proto_level; + predecessor = State.Block.hash pred; + validation_passes = 1; + timestamp; + operations_hash; + fitness; + context }; + protocol_data = MBytes.of_string name } + +let parsed_block ({shell; protocol_data} : Block_header.t) = let protocol_data = Data_encoding.Binary.of_bytes_exn Proto.block_header_data_encoding - protocol_data in - ({ shell ; protocol_data } : Proto.block_header) + protocol_data + in + ({shell; protocol_data} : Proto.block_header) let zero = MBytes.create 0 let build_valid_chain state vtbl pred names = Lwt_list.fold_left_s (fun pred name -> - State.Block.context pred >>= fun predecessor_context -> - let rec attempt context = - begin - let oph, op, _bytes = operation name in - let block = block ?context state ~operations:[oph] pred name in - let hash = Block_header.hash block in - let pred_header = State.Block.header pred in - begin - Proto.begin_application - ~chain_id: Chain_id.zero - ~predecessor_context - ~predecessor_timestamp: pred_header.shell.timestamp - ~predecessor_fitness: pred_header.shell.fitness - (parsed_block block) >>=? fun vstate -> - (* no operations *) - Proto.finalize_block vstate - end >>=? fun (validation_result, _metadata) -> - Context.commit - ~time:block.shell.timestamp validation_result.context >>= fun context_hash -> - State.Block.store state - block zero [[op]] [[zero]] - ({context_hash; + State.Block.context pred + >>= fun predecessor_context -> + let rec attempt context = + (let (oph, op, _bytes) = operation name in + let block = block ?context state ~operations:[oph] pred name in + let hash = Block_header.hash block in + let pred_header = State.Block.header pred in + Proto.begin_application + ~chain_id:Chain_id.zero + ~predecessor_context + ~predecessor_timestamp:pred_header.shell.timestamp + ~predecessor_fitness:pred_header.shell.fitness + (parsed_block block) + >>=? (fun vstate -> + (* no operations *) + Proto.finalize_block vstate) + >>=? fun (validation_result, _metadata) -> + Context.commit ~time:block.shell.timestamp validation_result.context + >>= fun context_hash -> + State.Block.store + state + block + zero + [[op]] + [[zero]] + ( { context_hash; message = validation_result.message; max_operations_ttl = 1; - last_allowed_fork_level = validation_result.last_allowed_fork_level} : - State.Block.validation_store) - ~forking_testchain:false >>=? fun _vblock -> - State.Block.read state hash >>=? fun vblock -> - Hashtbl.add vtbl name vblock ; - return vblock - end >>= function - | Ok v -> Lwt.return v - | Error [ Validation_errors.Inconsistent_hash (got, _) ] -> - (* Kind of a hack, but at least it tests idempotence to some extent. *) - attempt (Some got) - | Error err -> - Error_monad.pp_print_error Format.err_formatter err ; - assert false in - attempt None) + last_allowed_fork_level = + validation_result.last_allowed_fork_level } + : State.Block.validation_store ) + ~forking_testchain:false + >>=? fun _vblock -> + State.Block.read state hash + >>=? fun vblock -> + Hashtbl.add vtbl name vblock ; + return vblock) + >>= function + | Ok v -> + Lwt.return v + | Error [Validation_errors.Inconsistent_hash (got, _)] -> + (* Kind of a hack, but at least it tests idempotence to some extent. *) + attempt (Some got) + | Error err -> + Error_monad.pp_print_error Format.err_formatter err ; + assert false + in + attempt None) pred - names >>= fun _ -> - Lwt.return_unit + names + >>= fun _ -> Lwt.return_unit let build_example_tree chain = let vtbl = Hashtbl.create 23 in - Chain.genesis chain >>= fun genesis -> + Chain.genesis chain + >>= fun genesis -> Hashtbl.add vtbl "Genesis" genesis ; - let c = [ "A1" ; "A2" ; "A3" ; "A4" ; "A5" ; "A6" ; "A7" ; "A8" ] in - build_valid_chain chain vtbl genesis c >>= fun () -> + let c = ["A1"; "A2"; "A3"; "A4"; "A5"; "A6"; "A7"; "A8"] in + build_valid_chain chain vtbl genesis c + >>= fun () -> let a3 = Hashtbl.find vtbl "A3" in - let c = [ "B1" ; "B2" ; "B3" ; "B4" ; "B5" ; "B6" ; "B7" ; "B8" ] in - build_valid_chain chain vtbl a3 c >>= fun () -> - Lwt.return vtbl + let c = ["B1"; "B2"; "B3"; "B4"; "B5"; "B6"; "B7"; "B8"] in + build_valid_chain chain vtbl a3 c >>= fun () -> Lwt.return vtbl type state = { - vblock: (string, State.Block.t) Hashtbl.t ; - state: State.t ; - chain: State.Chain.t ; + vblock : (string, State.Block.t) Hashtbl.t; + state : State.t; + chain : State.Chain.t } let vblock s = Hashtbl.find s.vblock @@ -168,57 +173,57 @@ let vblock s = Hashtbl.find s.vblock exception Found of string let vblocks s = - Hashtbl.fold (fun k v acc -> (k,v) :: acc) s.vblock [] + Hashtbl.fold (fun k v acc -> (k, v) :: acc) s.vblock [] |> List.sort Pervasives.compare let wrap_state_init f base_dir = - begin - let store_root = base_dir // "store" in - let context_root = base_dir // "context" in - State.init - ~store_mapsize:4_096_000_000L - ~context_mapsize:4_096_000_000L - ~store_root - ~context_root - genesis >>=? fun (state, chain, _index, _history_mode) -> - build_example_tree chain >>= fun vblock -> - f { state ; chain ; vblock } >>=? fun () -> - return_unit - end - -let test_init (_ : state) = - return_unit - - + let store_root = base_dir // "store" in + let context_root = base_dir // "context" in + State.init + ~store_mapsize:4_096_000_000L + ~context_mapsize:4_096_000_000L + ~store_root + ~context_root + genesis + >>=? fun (state, chain, _index, _history_mode) -> + build_example_tree chain + >>= fun vblock -> f {state; chain; vblock} >>=? fun () -> return_unit + +let test_init (_ : state) = return_unit (****************************************************************************) (** State.Block.read *) -let test_read_block (s: state) = - Lwt_list.iter_s (fun (name, vblock) -> +let test_read_block (s : state) = + Lwt_list.iter_s + (fun (name, vblock) -> let hash = State.Block.hash vblock in - State.Block.read s.chain hash >>= function + State.Block.read s.chain hash + >>= function | Error _ -> Assert.fail_msg "Error while reading valid block %s" name | Ok _vblock' -> (* FIXME COMPARE read operations ??? *) - Lwt.return_unit - ) (vblocks s) >>= fun () -> - return_unit - + Lwt.return_unit) + (vblocks s) + >>= fun () -> return_unit (****************************************************************************) (** Chain.set_checkpoint_then_purge_full *) let test_set_checkpoint_then_purge_full (s : state) = - State.Chain.checkpoint s.chain >>= fun checkpoint -> + State.Chain.checkpoint s.chain + >>= fun checkpoint -> let checkpoint_lvl = checkpoint.shell.level in let checkpoint_hash = Block_header.hash checkpoint in (* At the beginning the checkpoint is the genesis. *) - State.Block.read (s.chain) genesis_block >>=? fun read_genesis -> - let read_genesis_hash = Block_header.hash (State.Block.header read_genesis) in + State.Block.read s.chain genesis_block + >>=? fun read_genesis -> + let read_genesis_hash = + Block_header.hash (State.Block.header read_genesis) + in assert (Block_hash.equal checkpoint_hash read_genesis_hash) ; assert (checkpoint_lvl = Int32.zero) ; let a1 = vblock s "A1" in @@ -233,48 +238,56 @@ let test_set_checkpoint_then_purge_full (s : state) = assert (Int32.compare checkpoint_lvl la1 = -1) ; assert (Int32.compare checkpoint_lvl lb1 = -1) ; assert (Int32.compare checkpoint_lvl lb2 = -1) ; - State.Chain.store s.chain >>= fun chain_store -> - let chain_store = Store.Chain.get chain_store (State.Chain.id s.chain) in + State.Chain.store s.chain + >>= fun chain_store -> + let chain_store = Store.Chain.get chain_store (State.Chain.id s.chain) in let block_store = Store.Block.get chain_store in (* Let us set a new checkpoint "B1" whose level is greater than the genesis. *) State.Chain.set_checkpoint_then_purge_full s.chain (State.Block.header b2) - >>=? fun () -> (* Assert b2 does still exist and is the new checkpoint. *) - begin State.Block.known s.chain hb2 >|= fun b -> assert b end - >>= fun () -> - begin State.Chain.checkpoint s.chain >|= begin fun b -> - assert (Block_hash.equal (Block_header.hash b) hb2); - assert (Int32.equal b.shell.level lb2); - end - end - >>= fun () -> (* Assert b1 has been pruned.. *) - begin Store.Block.Contents.known (block_store, hb1) >|= fun b -> assert (not b) end - >>= fun () -> (* pruned, so we can still access its header. *) - begin - State.Block.read_opt s.chain hb1 >|= function - | Some _header -> assert true - | None -> assert false - end - >>= fun () -> (* Assert a1 has also been pruned .. *) - begin Store.Block.Contents.known (block_store, ha1) >|= fun b -> assert (not b) end - >>= fun () -> (* and we can also access its header. *) - begin - State.Block.read_opt s.chain ha1 >|= function - | Some _header -> assert true - | None -> assert false - end - >>= fun () -> (* and is accesible in Store.Block.Header *) - begin Store.Block.Pruned_contents.known (block_store, ha1) >|= fun b -> assert b end + >>=? fun () -> + (* Assert b2 does still exist and is the new checkpoint. *) + State.Block.known s.chain hb2 + >|= (fun b -> assert b) + >>= fun () -> + State.Chain.checkpoint s.chain + >|= (fun b -> + assert (Block_hash.equal (Block_header.hash b) hb2) ; + assert (Int32.equal b.shell.level lb2)) + >>= fun () -> + (* Assert b1 has been pruned.. *) + Store.Block.Contents.known (block_store, hb1) + >|= (fun b -> assert (not b)) + >>= fun () -> + (* pruned, so we can still access its header. *) + State.Block.read_opt s.chain hb1 + >|= (function Some _header -> assert true | None -> assert false) + >>= fun () -> + (* Assert a1 has also been pruned .. *) + Store.Block.Contents.known (block_store, ha1) + >|= (fun b -> assert (not b)) + >>= fun () -> + (* and we can also access its header. *) + State.Block.read_opt s.chain ha1 + >|= (function Some _header -> assert true | None -> assert false) + >>= fun () -> + (* and is accesible in Store.Block.Header *) + Store.Block.Pruned_contents.known (block_store, ha1) + >|= (fun b -> assert b) >>= fun () -> return_unit (** Chain.set_checkpoint_then_purge_rolling *) let test_set_checkpoint_then_purge_rolling (s : state) = - State.Chain.checkpoint s.chain >>= fun checkpoint -> + State.Chain.checkpoint s.chain + >>= fun checkpoint -> let checkpoint_lvl = checkpoint.shell.level in let checkpoint_hash = Block_header.hash checkpoint in (* At the beginning the checkpoint is the genesis. *) - State.Block.read (s.chain) genesis_block >>=? fun read_genesis -> - let read_genesis_hash = Block_header.hash (State.Block.header read_genesis) in + State.Block.read s.chain genesis_block + >>=? fun read_genesis -> + let read_genesis_hash = + Block_header.hash (State.Block.header read_genesis) + in assert (Block_hash.equal checkpoint_hash read_genesis_hash) ; assert (checkpoint_lvl = Int32.zero) ; let a1 = vblock s "A1" in @@ -289,7 +302,8 @@ let test_set_checkpoint_then_purge_rolling (s : state) = assert (Int32.compare checkpoint_lvl la1 = -1) ; assert (Int32.compare checkpoint_lvl lb1 = -1) ; assert (Int32.compare checkpoint_lvl lb2 = -1) ; - State.Block.max_operations_ttl b2 >>=? fun max_op_ttl -> + State.Block.max_operations_ttl b2 + >>=? fun max_op_ttl -> assert (max_op_ttl > 0) ; let ilb1 = Int32.to_int lb1 in let ilb2 = Int32.to_int lb2 in @@ -297,63 +311,74 @@ let test_set_checkpoint_then_purge_rolling (s : state) = assert (ilb2 - ilb1 <= min max_op_ttl ilb2) ; (* Assert a1 is in the to-delete range. *) let ila1 = Int32.to_int la1 in - assert (ilb2 - ila1 > min max_op_ttl ilb2); + assert (ilb2 - ila1 > min max_op_ttl ilb2) ; (* Assert b1 is not yet in Store.Block.Header since not pruned *) - State.Chain.store s.chain >>= fun chain_store -> - let chain_store = Store.Chain.get chain_store (State.Chain.id s.chain) in + State.Chain.store s.chain + >>= fun chain_store -> + let chain_store = Store.Chain.get chain_store (State.Chain.id s.chain) in let block_store = Store.Block.get chain_store in - begin Store.Block.Pruned_contents.known (block_store, hb1) >|= fun b -> assert (not b) end + Store.Block.Pruned_contents.known (block_store, hb1) + >|= (fun b -> assert (not b)) >>= fun () -> (* But accessible with State.Block.Header *) - begin State.Block.known s.chain hb1 >|= fun b -> assert b end + State.Block.known s.chain hb1 + >|= (fun b -> assert b) (* And Store.Block.Contents *) >>= fun () -> - begin Store.Block.Contents.known (block_store, hb1) >|= fun b -> assert b end + Store.Block.Contents.known (block_store, hb1) + >|= (fun b -> assert b) (* Let us set a new checkpoint "B1" whose level is greater than the genesis. *) >>= fun () -> State.Chain.set_checkpoint_then_purge_rolling s.chain (State.Block.header b2) - >>=? fun () -> (* Assert b2 does still exist and is the new checkpoint. *) - begin State.Block.known s.chain hb2 >|= fun b -> assert b end - >>= fun () -> - begin State.Chain.checkpoint s.chain >|= begin fun b -> - assert (Block_hash.equal (Block_header.hash b) hb2); - assert (Int32.equal b.shell.level lb2); - end - end - >>= fun () -> (* Assert b1 has been pruned.. *) - begin Store.Block.Contents.known (block_store, hb1) >|= fun b -> assert (not b) end - >>= fun () -> (* pruned, so we can still access its header. *) - begin - State.Block.read_opt s.chain hb1 >|= function - | Some _block -> assert true - | None -> assert false - end + >>=? fun () -> + (* Assert b2 does still exist and is the new checkpoint. *) + State.Block.known s.chain hb2 + >|= (fun b -> assert b) + >>= fun () -> + State.Chain.checkpoint s.chain + >|= (fun b -> + assert (Block_hash.equal (Block_header.hash b) hb2) ; + assert (Int32.equal b.shell.level lb2)) + >>= fun () -> + (* Assert b1 has been pruned.. *) + Store.Block.Contents.known (block_store, hb1) + >|= (fun b -> assert (not b)) + >>= fun () -> + (* pruned, so we can still access its header. *) + State.Block.read_opt s.chain hb1 + >|= (function Some _block -> assert true | None -> assert false) >>= fun () -> (* Assert b1 is now in Store.Block.Header since it has been pruned *) - begin Store.Block.Pruned_contents.known (block_store, hb1) >|= fun b -> assert b end + Store.Block.Pruned_contents.known (block_store, hb1) + >|= (fun b -> assert b) >>= fun () -> (* And also accessible with State.Block.Header *) - begin State.Block.Header.known (block_store, hb1) >|= fun b -> assert b end + State.Block.Header.known (block_store, hb1) + >|= (fun b -> assert b) (* But not in Store.Block.Contents *) >>= fun () -> - begin Store.Block.Contents.known (block_store, hb1) >|= fun b -> assert (not b) end - >>= fun () -> (* Assert a1 has been deleted.. *) - begin State.Block.known s.chain ha1 >|= fun b -> assert (not b) end - >>= fun () -> (* deleted, so we can not access its header anymore. *) - begin - State.Block.read_opt s.chain ha1 >|= function - | Some _header -> assert false - | None -> assert true - end + Store.Block.Contents.known (block_store, hb1) + >|= (fun b -> assert (not b)) + >>= fun () -> + (* Assert a1 has been deleted.. *) + State.Block.known s.chain ha1 + >|= (fun b -> assert (not b)) + >>= fun () -> + (* deleted, so we can not access its header anymore. *) + State.Block.read_opt s.chain ha1 + >|= (function Some _header -> assert false | None -> assert true) >>= fun () -> (* Assert b1 is now in Store.Block.Header since it has been pruned *) - begin Store.Block.Pruned_contents.known (block_store, ha1) >|= fun b -> assert (not b) end + Store.Block.Pruned_contents.known (block_store, ha1) + >|= (fun b -> assert (not b)) >>= fun () -> (* And not in State.Block.Header *) - begin State.Block.Header.known (block_store, ha1) >|= fun b -> assert (not b) end + State.Block.Header.known (block_store, ha1) + >|= (fun b -> assert (not b)) (* Neither in Store.Block.Contents *) >>= fun () -> - begin Store.Block.Contents.known (block_store, hb1) >|= fun b -> assert (not b) end + Store.Block.Contents.known (block_store, hb1) + >|= (fun b -> assert (not b)) (* *) >>= fun () -> return_unit @@ -361,29 +386,37 @@ let test_set_checkpoint_then_purge_rolling (s : state) = (** Chain_traversal.path *) -let rec compare_path p1 p2 = match p1, p2 with - | [], [] -> true - | h1 :: p1, h2 :: p2 -> Block_hash.equal h1 h2 && compare_path p1 p2 - | _ -> false +let rec compare_path p1 p2 = + match (p1, p2) with + | ([], []) -> + true + | (h1 :: p1, h2 :: p2) -> + Block_hash.equal h1 h2 && compare_path p1 p2 + | _ -> + false -let test_path (s: state) = +let test_path (s : state) = let check_path h1 h2 p2 = - Chain_traversal.path (vblock s h1) (vblock s h2) >>= function + Chain_traversal.path (vblock s h1) (vblock s h2) + >>= function | None -> - Assert.fail_msg "cannot compute path %s -> %s" h1 h2 ; - | Some (p: State.Block.t list) -> + Assert.fail_msg "cannot compute path %s -> %s" h1 h2 + | Some (p : State.Block.t list) -> let p = List.map State.Block.hash p in let p2 = List.map (fun b -> State.Block.hash (vblock s b)) p2 in if not (compare_path p p2) then Assert.fail_msg "bad path %s -> %s" h1 h2 ; - Lwt.return_unit in - check_path "Genesis" "Genesis" [] >>= fun () -> - check_path "A1" "A1" [] >>= fun () -> - check_path "A2" "A6" ["A3"; "A4"; "A5"; "A6"] >>= fun () -> - check_path "B2" "B6" ["B3"; "B4"; "B5"; "B6"] >>= fun () -> - check_path "A1" "B3" ["A2"; "A3"; "B1"; "B2"; "B3"] >>= fun () -> - return_unit - + Lwt.return_unit + in + check_path "Genesis" "Genesis" [] + >>= fun () -> + check_path "A1" "A1" [] + >>= fun () -> + check_path "A2" "A6" ["A3"; "A4"; "A5"; "A6"] + >>= fun () -> + check_path "B2" "B6" ["B3"; "B4"; "B5"; "B6"] + >>= fun () -> + check_path "A1" "B3" ["A2"; "A3"; "B1"; "B2"; "B3"] >>= fun () -> return_unit (****************************************************************************) @@ -391,59 +424,82 @@ let test_path (s: state) = let test_ancestor s = let check_ancestor h1 h2 expected = - Chain_traversal.common_ancestor - (vblock s h1) (vblock s h2) >>= fun a -> - if not (Block_hash.equal (State.Block.hash a) (State.Block.hash expected)) then - Assert.fail_msg "bad ancestor %s %s" h1 h2 ; - Lwt.return_unit in - check_ancestor "Genesis" "Genesis" (vblock s "Genesis") >>= fun () -> - check_ancestor "Genesis" "A3" (vblock s "Genesis") >>= fun () -> - check_ancestor "A3" "Genesis" (vblock s "Genesis") >>= fun () -> - check_ancestor "A1" "A1" (vblock s "A1") >>= fun () -> - check_ancestor "A1" "A3" (vblock s "A1") >>= fun () -> - check_ancestor "A3" "A1" (vblock s "A1") >>= fun () -> - check_ancestor "A6" "B6" (vblock s "A3") >>= fun () -> - check_ancestor "B6" "A6" (vblock s "A3") >>= fun () -> - check_ancestor "A4" "B1" (vblock s "A3") >>= fun () -> - check_ancestor "B1" "A4" (vblock s "A3") >>= fun () -> - check_ancestor "A3" "B1" (vblock s "A3") >>= fun () -> - check_ancestor "B1" "A3" (vblock s "A3") >>= fun () -> - check_ancestor "A2" "B1" (vblock s "A2") >>= fun () -> - check_ancestor "B1" "A2" (vblock s "A2") >>= fun () -> - return_unit - + Chain_traversal.common_ancestor (vblock s h1) (vblock s h2) + >>= fun a -> + if not (Block_hash.equal (State.Block.hash a) (State.Block.hash expected)) + then Assert.fail_msg "bad ancestor %s %s" h1 h2 ; + Lwt.return_unit + in + check_ancestor "Genesis" "Genesis" (vblock s "Genesis") + >>= fun () -> + check_ancestor "Genesis" "A3" (vblock s "Genesis") + >>= fun () -> + check_ancestor "A3" "Genesis" (vblock s "Genesis") + >>= fun () -> + check_ancestor "A1" "A1" (vblock s "A1") + >>= fun () -> + check_ancestor "A1" "A3" (vblock s "A1") + >>= fun () -> + check_ancestor "A3" "A1" (vblock s "A1") + >>= fun () -> + check_ancestor "A6" "B6" (vblock s "A3") + >>= fun () -> + check_ancestor "B6" "A6" (vblock s "A3") + >>= fun () -> + check_ancestor "A4" "B1" (vblock s "A3") + >>= fun () -> + check_ancestor "B1" "A4" (vblock s "A3") + >>= fun () -> + check_ancestor "A3" "B1" (vblock s "A3") + >>= fun () -> + check_ancestor "B1" "A3" (vblock s "A3") + >>= fun () -> + check_ancestor "A2" "B1" (vblock s "A2") + >>= fun () -> + check_ancestor "B1" "A2" (vblock s "A2") >>= fun () -> return_unit (****************************************************************************) let seed = - let receiver_id = P2p_peer.Id.of_string_exn (String.make P2p_peer.Id.size 'r') in - let sender_id = P2p_peer.Id.of_string_exn (String.make P2p_peer.Id.size 's') in - {Block_locator.receiver_id=receiver_id ; sender_id } + let receiver_id = + P2p_peer.Id.of_string_exn (String.make P2p_peer.Id.size 'r') + in + let sender_id = + P2p_peer.Id.of_string_exn (String.make P2p_peer.Id.size 's') + in + {Block_locator.receiver_id; sender_id} (** Chain_traversal.block_locator *) let test_locator s = let check_locator length h1 expected = - State.compute_locator s.chain - ~size:length (vblock s h1) seed >>= fun l -> - let _, l = (l : Block_locator.t :> _ * _) in + State.compute_locator s.chain ~size:length (vblock s h1) seed + >>= fun l -> + let (_, l) = (l : Block_locator.t :> _ * _) in if List.length l <> List.length expected then Assert.fail_msg "Invalid locator length %s (found: %d, expected: %d)" - h1 (List.length l) (List.length expected) ; + h1 + (List.length l) + (List.length expected) ; List.iter2 (fun h h2 -> - if not (Block_hash.equal h (State.Block.hash @@ vblock s h2)) then - Assert.fail_msg "Invalid locator %s (expected: %s)" h1 h2) - l expected ; - Lwt.return_unit in - check_locator 6 "A8" ["A7";"A6";"A5";"A4";"A3";"A2"] >>= fun () -> - check_locator 8 "B8" ["B7";"B6";"B5";"B4";"B3";"B2";"B1";"A3"] >>= fun () -> - check_locator 4 "B8" ["B7";"B6";"B5";"B4"] >>= fun () -> - check_locator 0 "A5" [] >>= fun () -> - check_locator 100 "A5" ["A4";"A3";"A2";"A1";"Genesis"] >>= fun () -> - return_unit - + if not (Block_hash.equal h (State.Block.hash @@ vblock s h2)) then + Assert.fail_msg "Invalid locator %s (expected: %s)" h1 h2) + l + expected ; + Lwt.return_unit + in + check_locator 6 "A8" ["A7"; "A6"; "A5"; "A4"; "A3"; "A2"] + >>= fun () -> + check_locator 8 "B8" ["B7"; "B6"; "B5"; "B4"; "B3"; "B2"; "B1"; "A3"] + >>= fun () -> + check_locator 4 "B8" ["B7"; "B6"; "B5"; "B4"] + >>= fun () -> + check_locator 0 "A5" [] + >>= fun () -> + check_locator 100 "A5" ["A4"; "A3"; "A2"; "A1"; "Genesis"] + >>= fun () -> return_unit (****************************************************************************) @@ -453,88 +509,132 @@ let compare s name heads l = if List.length heads <> List.length l then Assert.fail_msg "unexpected known_heads size (%s: %d %d)" - name (List.length heads) (List.length l) ; + name + (List.length heads) + (List.length l) ; List.iter (fun bname -> - let hash = State.Block.hash (vblock s bname) in - if not (List.exists (fun b -> Block_hash.equal hash (State.Block.hash b)) heads) then - Assert.fail_msg "missing block in known_heads (%s: %s)" name bname) + let hash = State.Block.hash (vblock s bname) in + if + not + (List.exists + (fun b -> Block_hash.equal hash (State.Block.hash b)) + heads) + then Assert.fail_msg "missing block in known_heads (%s: %s)" name bname) l let test_known_heads s = - Chain.known_heads s.chain >>= fun heads -> - compare s "initial" heads ["A8";"B8"] ; + Chain.known_heads s.chain + >>= fun heads -> + compare s "initial" heads ["A8"; "B8"] ; return_unit - (****************************************************************************) (** Chain.head/set_head *) let test_head s = - Chain.head s.chain >>= fun head -> + Chain.head s.chain + >>= fun head -> if not (Block_hash.equal (State.Block.hash head) genesis_block) then Assert.fail_msg "unexpected head" ; - Chain.set_head s.chain (vblock s "A6") >>= fun _ -> - Chain.head s.chain >>= fun head -> - if not (Block_hash.equal (State.Block.hash head) (State.Block.hash @@ vblock s "A6")) then - Assert.fail_msg "unexpected head" ; + Chain.set_head s.chain (vblock s "A6") + >>= fun _ -> + Chain.head s.chain + >>= fun head -> + if + not + (Block_hash.equal + (State.Block.hash head) + (State.Block.hash @@ vblock s "A6")) + then Assert.fail_msg "unexpected head" ; return_unit - (****************************************************************************) (** Chain.mem *) let test_mem s = - let mem s x = - Chain.mem s.chain (State.Block.hash @@ vblock s x) in + let mem s x = Chain.mem s.chain (State.Block.hash @@ vblock s x) in let test_mem s x = - mem s x >>= function - | true -> Lwt.return_unit - | false -> Assert.fail_msg "mem %s" x in + mem s x + >>= function + | true -> Lwt.return_unit | false -> Assert.fail_msg "mem %s" x + in let test_not_mem s x = - mem s x >>= function - | false -> Lwt.return_unit - | true -> Assert.fail_msg "not (mem %s)" x in - test_not_mem s "A3" >>= fun () -> - test_not_mem s "A6" >>= fun () -> - test_not_mem s "A8" >>= fun () -> - test_not_mem s "B1" >>= fun () -> - test_not_mem s "B6" >>= fun () -> - test_not_mem s "B8" >>= fun () -> - Chain.set_head s.chain (vblock s "A8") >>= fun _ -> - test_mem s "A3" >>= fun () -> - test_mem s "A6" >>= fun () -> - test_mem s "A8" >>= fun () -> - test_not_mem s "B1" >>= fun () -> - test_not_mem s "B6" >>= fun () -> - test_not_mem s "B8" >>= fun () -> - Chain.set_head s.chain (vblock s "A6") >>= fun _ -> - test_mem s "A3" >>= fun () -> - test_mem s "A6" >>= fun () -> - test_not_mem s "A8" >>= fun () -> - test_not_mem s "B1" >>= fun () -> - test_not_mem s "B6" >>= fun () -> - test_not_mem s "B8" >>= fun () -> - Chain.set_head s.chain (vblock s "B6") >>= fun _ -> - test_mem s "A3" >>= fun () -> - test_not_mem s "A4" >>= fun () -> - test_not_mem s "A6" >>= fun () -> - test_not_mem s "A8" >>= fun () -> - test_mem s "B1" >>= fun () -> - test_mem s "B6" >>= fun () -> - test_not_mem s "B8" >>= fun () -> - Chain.set_head s.chain (vblock s "B8") >>= fun _ -> - test_mem s "A3" >>= fun () -> - test_not_mem s "A4" >>= fun () -> - test_not_mem s "A6" >>= fun () -> - test_not_mem s "A8" >>= fun () -> - test_mem s "B1" >>= fun () -> - test_mem s "B6" >>= fun () -> - test_mem s "B8" >>= fun () -> - return_unit - + mem s x + >>= function + | false -> Lwt.return_unit | true -> Assert.fail_msg "not (mem %s)" x + in + test_not_mem s "A3" + >>= fun () -> + test_not_mem s "A6" + >>= fun () -> + test_not_mem s "A8" + >>= fun () -> + test_not_mem s "B1" + >>= fun () -> + test_not_mem s "B6" + >>= fun () -> + test_not_mem s "B8" + >>= fun () -> + Chain.set_head s.chain (vblock s "A8") + >>= fun _ -> + test_mem s "A3" + >>= fun () -> + test_mem s "A6" + >>= fun () -> + test_mem s "A8" + >>= fun () -> + test_not_mem s "B1" + >>= fun () -> + test_not_mem s "B6" + >>= fun () -> + test_not_mem s "B8" + >>= fun () -> + Chain.set_head s.chain (vblock s "A6") + >>= fun _ -> + test_mem s "A3" + >>= fun () -> + test_mem s "A6" + >>= fun () -> + test_not_mem s "A8" + >>= fun () -> + test_not_mem s "B1" + >>= fun () -> + test_not_mem s "B6" + >>= fun () -> + test_not_mem s "B8" + >>= fun () -> + Chain.set_head s.chain (vblock s "B6") + >>= fun _ -> + test_mem s "A3" + >>= fun () -> + test_not_mem s "A4" + >>= fun () -> + test_not_mem s "A6" + >>= fun () -> + test_not_mem s "A8" + >>= fun () -> + test_mem s "B1" + >>= fun () -> + test_mem s "B6" + >>= fun () -> + test_not_mem s "B8" + >>= fun () -> + Chain.set_head s.chain (vblock s "B8") + >>= fun _ -> + test_mem s "A3" + >>= fun () -> + test_not_mem s "A4" + >>= fun () -> + test_not_mem s "A6" + >>= fun () -> + test_not_mem s "A8" + >>= fun () -> + test_mem s "B1" + >>= fun () -> + test_mem s "B6" >>= fun () -> test_mem s "B8" >>= fun () -> return_unit (****************************************************************************) @@ -542,53 +642,74 @@ let test_mem s = let test_new_blocks s = let test s head h expected_ancestor expected = - let to_block = vblock s head - and from_block = vblock s h in - Chain_traversal.new_blocks ~from_block ~to_block >>= fun (ancestor, blocks) -> - if not (Block_hash.equal (State.Block.hash ancestor) (State.Block.hash @@ vblock s expected_ancestor)) then - Assert.fail_msg "Invalid ancestor %s -> %s (expected: %s)" head h expected_ancestor ; + let to_block = vblock s head and from_block = vblock s h in + Chain_traversal.new_blocks ~from_block ~to_block + >>= fun (ancestor, blocks) -> + if + not + (Block_hash.equal + (State.Block.hash ancestor) + (State.Block.hash @@ vblock s expected_ancestor)) + then + Assert.fail_msg + "Invalid ancestor %s -> %s (expected: %s)" + head + h + expected_ancestor ; if List.length blocks <> List.length expected then Assert.fail_msg "Invalid locator length %s (found: %d, expected: %d)" - h (List.length blocks) (List.length expected) ; + h + (List.length blocks) + (List.length expected) ; List.iter2 (fun h1 h2 -> - if not (Block_hash.equal (State.Block.hash h1) (State.Block.hash @@ vblock s h2)) then - Assert.fail_msg "Invalid new blocks %s -> %s (expected: %s)" head h h2) - blocks expected ; + if + not + (Block_hash.equal + (State.Block.hash h1) + (State.Block.hash @@ vblock s h2)) + then + Assert.fail_msg + "Invalid new blocks %s -> %s (expected: %s)" + head + h + h2) + blocks + expected ; Lwt.return_unit in - test s "A6" "A6" "A6" [] >>= fun () -> - test s "A8" "A6" "A6" ["A7";"A8"] >>= fun () -> - test s "A8" "B7" "A3" ["A4";"A5";"A6";"A7";"A8"] >>= fun () -> - return_unit - + test s "A6" "A6" "A6" [] + >>= fun () -> + test s "A8" "A6" "A6" ["A7"; "A8"] + >>= fun () -> + test s "A8" "B7" "A3" ["A4"; "A5"; "A6"; "A7"; "A8"] + >>= fun () -> return_unit (****************************************************************************) - -let tests : (string * (state -> unit tzresult Lwt.t)) list = [ - "init", test_init ; - "read_block", test_read_block ; - "path", test_path ; - "ancestor", test_ancestor ; - "locator", test_locator ; - "known_heads", test_known_heads ; - "head", test_head ; - "mem", test_mem ; - "new_blocks", test_new_blocks ; - "set_checkpoint_then_purge_rolling", test_set_checkpoint_then_purge_rolling ; - "set_checkpoint_then_purge_full", test_set_checkpoint_then_purge_full ; -] +let tests : (string * (state -> unit tzresult Lwt.t)) list = + [ ("init", test_init); + ("read_block", test_read_block); + ("path", test_path); + ("ancestor", test_ancestor); + ("locator", test_locator); + ("known_heads", test_known_heads); + ("head", test_head); + ("mem", test_mem); + ("new_blocks", test_new_blocks); + ( "set_checkpoint_then_purge_rolling", + test_set_checkpoint_then_purge_rolling ); + ("set_checkpoint_then_purge_full", test_set_checkpoint_then_purge_full) ] let wrap (n, f) = - Alcotest_lwt.test_case n `Quick begin fun _ () -> - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun dir -> - wrap_state_init f dir >>= function - | Ok () -> Lwt.return_unit - | Error error -> - Format.kasprintf Pervasives.failwith "%a" pp_print_error error - end - end + Alcotest_lwt.test_case n `Quick (fun _ () -> + Lwt_utils_unix.with_tempdir "tezos_test_" (fun dir -> + wrap_state_init f dir + >>= function + | Ok () -> + Lwt.return_unit + | Error error -> + Format.kasprintf Pervasives.failwith "%a" pp_print_error error)) let tests = List.map wrap tests diff --git a/src/lib_shell/test/test_state_checkpoint.ml b/src/lib_shell/test/test_state_checkpoint.ml index 46f2e533ca193846627478206bccdee6156179bc..ce9e4540f4a01cc18b93427153636a7cd762d2cf 100644 --- a/src/lib_shell/test/test_state_checkpoint.ml +++ b/src/lib_shell/test/test_state_checkpoint.ml @@ -23,7 +23,7 @@ (* *) (*****************************************************************************) -let (//) = Filename.concat +let ( // ) = Filename.concat (**************************************************************************) (** Basic blocks *) @@ -36,120 +36,127 @@ let genesis_protocol = Protocol_hash.of_b58check_exn "ProtoDemoDemoDemoDemoDemoDemoDemoDemoDemoDemoD3c8k9" -let genesis_time = - Time.Protocol.of_seconds 0L +let genesis_time = Time.Protocol.of_seconds 0L module Proto = (val Registered_protocol.get_exn genesis_protocol) -let genesis : State.Chain.genesis = { - time = genesis_time ; - block = genesis_block ; - protocol = genesis_protocol ; -} +let genesis : State.Chain.genesis = + {time = genesis_time; block = genesis_block; protocol = genesis_protocol} let operation op = - let op : Operation.t = { - shell = { branch = genesis_block } ; - proto = MBytes.of_string op ; - } in - Operation.hash op, - op, - Data_encoding.Binary.to_bytes Operation.encoding op + let op : Operation.t = + {shell = {branch = genesis_block}; proto = MBytes.of_string op} + in + (Operation.hash op, op, Data_encoding.Binary.to_bytes Operation.encoding op) let incr_fitness fitness = let new_fitness = match fitness with - | [ fitness ] -> + | [fitness] -> Pervasives.( Data_encoding.Binary.of_bytes Data_encoding.int64 fitness - |> Option.unopt ~default:0L - |> Int64.succ - |> Data_encoding.Binary.to_bytes_exn Data_encoding.int64 - ) - | _ -> Data_encoding.Binary.to_bytes_exn Data_encoding.int64 1L + |> Option.unopt ~default:0L |> Int64.succ + |> Data_encoding.Binary.to_bytes_exn Data_encoding.int64) + | _ -> + Data_encoding.Binary.to_bytes_exn Data_encoding.int64 1L in - [ new_fitness ] + [new_fitness] let incr_timestamp timestamp = Time.Protocol.add timestamp (Int64.add 1L (Random.int64 10L)) -let block _state ?(context = Context_hash.zero) ?(operations = []) (pred: State.Block.t) name - : Block_header.t = +let block _state ?(context = Context_hash.zero) ?(operations = []) + (pred : State.Block.t) name : Block_header.t = let operations_hash = - Operation_list_list_hash.compute - [Operation_list_hash.compute operations] in + Operation_list_list_hash.compute [Operation_list_hash.compute operations] + in let pred_header = State.Block.shell_header pred in let fitness = incr_fitness pred_header.fitness in let timestamp = incr_timestamp pred_header.timestamp in - { shell = { level = Int32.succ pred_header.level ; - proto_level = pred_header.proto_level ; - predecessor = State.Block.hash pred ; - validation_passes = 1 ; - timestamp ; operations_hash ; fitness ; - context } ; - protocol_data = MBytes.of_string name ; - } - -let parsed_block ({ shell ; protocol_data } : Block_header.t) = + { shell = + { level = Int32.succ pred_header.level; + proto_level = pred_header.proto_level; + predecessor = State.Block.hash pred; + validation_passes = 1; + timestamp; + operations_hash; + fitness; + context }; + protocol_data = MBytes.of_string name } + +let parsed_block ({shell; protocol_data} : Block_header.t) = let protocol_data = Data_encoding.Binary.of_bytes_exn Proto.block_header_data_encoding - protocol_data in - ({ shell ; protocol_data } : Proto.block_header) + protocol_data + in + ({shell; protocol_data} : Proto.block_header) let zero = MBytes.create 0 let build_valid_chain state vtbl pred names = Lwt_list.fold_left_s (fun pred name -> - State.Block.context pred >>= fun predecessor_context -> - let rec attempt context = - begin - let oph, op, _bytes = operation name in - let block = block ?context state ~operations:[oph] pred name in - let hash = Block_header.hash block in - let pred_header = State.Block.header pred in - begin - Proto.begin_application - ~chain_id: Chain_id.zero - ~predecessor_context - ~predecessor_timestamp: pred_header.shell.timestamp - ~predecessor_fitness: pred_header.shell.fitness - (parsed_block block) >>=? fun vstate -> - (* no operations *) - Proto.finalize_block vstate - end >>=? fun (result, _metadata) -> - Context.commit - ~time:(Time.System.to_protocol (Systime_os.now ())) - ?message:result.message - result.context >>= fun context_hash -> - let validation_store = - { State.Block.context_hash ; message = result.message ; - max_operations_ttl = result.max_operations_ttl ; - last_allowed_fork_level = result.last_allowed_fork_level - } in - State.Block.store state - block zero [[op]] [[zero]] validation_store ~forking_testchain:false >>=? fun _vblock -> - State.Block.read state hash >>=? fun vblock -> - Hashtbl.add vtbl name vblock ; - return vblock - end >>= function - | Ok v -> Lwt.return v - | Error [ Validation_errors.Inconsistent_hash (got, _) ] -> - (* Kind of a hack, but at least it tests idempotence to some extent. *) - attempt (Some got) - | Error err -> - Error_monad.pp_print_error Format.err_formatter err ; - assert false in - attempt None) + State.Block.context pred + >>= fun predecessor_context -> + let rec attempt context = + (let (oph, op, _bytes) = operation name in + let block = block ?context state ~operations:[oph] pred name in + let hash = Block_header.hash block in + let pred_header = State.Block.header pred in + Proto.begin_application + ~chain_id:Chain_id.zero + ~predecessor_context + ~predecessor_timestamp:pred_header.shell.timestamp + ~predecessor_fitness:pred_header.shell.fitness + (parsed_block block) + >>=? (fun vstate -> + (* no operations *) + Proto.finalize_block vstate) + >>=? fun (result, _metadata) -> + Context.commit + ~time:(Time.System.to_protocol (Systime_os.now ())) + ?message:result.message + result.context + >>= fun context_hash -> + let validation_store = + { State.Block.context_hash; + message = result.message; + max_operations_ttl = result.max_operations_ttl; + last_allowed_fork_level = result.last_allowed_fork_level } + in + State.Block.store + state + block + zero + [[op]] + [[zero]] + validation_store + ~forking_testchain:false + >>=? fun _vblock -> + State.Block.read state hash + >>=? fun vblock -> + Hashtbl.add vtbl name vblock ; + return vblock) + >>= function + | Ok v -> + Lwt.return v + | Error [Validation_errors.Inconsistent_hash (got, _)] -> + (* Kind of a hack, but at least it tests idempotence to some extent. *) + attempt (Some got) + | Error err -> + Error_monad.pp_print_error Format.err_formatter err ; + assert false + in + attempt None) pred - names >>= fun _ -> - Lwt.return_unit + names + >>= fun _ -> Lwt.return_unit type state = { - vblock: (string, State.Block.t) Hashtbl.t ; - state: State.t ; - chain: State.Chain.t ; + vblock : (string, State.Block.t) Hashtbl.t; + state : State.t; + chain : State.Chain.t } let vblock s = Hashtbl.find s.vblock @@ -157,7 +164,7 @@ let vblock s = Hashtbl.find s.vblock exception Found of string let vblocks s = - Hashtbl.fold (fun k v acc -> (k,v) :: acc) s.vblock [] + Hashtbl.fold (fun k v acc -> (k, v) :: acc) s.vblock [] |> List.sort Pervasives.compare (*******************************************************) @@ -170,29 +177,28 @@ let vblocks s = let build_example_tree chain = let vtbl = Hashtbl.create 23 in - Chain.genesis chain >>= fun genesis -> + Chain.genesis chain + >>= fun genesis -> Hashtbl.add vtbl "Genesis" genesis ; - let c = [ "A1" ; "A2" ; "A3" ; "A4" ; "A5" ] in - build_valid_chain chain vtbl genesis c >>= fun () -> + let c = ["A1"; "A2"; "A3"; "A4"; "A5"] in + build_valid_chain chain vtbl genesis c + >>= fun () -> let a2 = Hashtbl.find vtbl "A2" in - let c = [ "B1" ; "B2" ; "B3" ; "B4" ; "B5" ] in - build_valid_chain chain vtbl a2 c >>= fun () -> - Lwt.return vtbl + let c = ["B1"; "B2"; "B3"; "B4"; "B5"] in + build_valid_chain chain vtbl a2 c >>= fun () -> Lwt.return vtbl let wrap_state_init f base_dir = - begin - let store_root = base_dir // "store" in - let context_root = base_dir // "context" in - State.init - ~store_mapsize:4_096_000_000L - ~context_mapsize:4_096_000_000L - ~store_root - ~context_root - genesis >>=? fun (state, chain, _index, _history_mode) -> - build_example_tree chain >>= fun vblock -> - f { state ; chain ; vblock } >>=? fun () -> - return_unit - end + let store_root = base_dir // "store" in + let context_root = base_dir // "context" in + State.init + ~store_mapsize:4_096_000_000L + ~context_mapsize:4_096_000_000L + ~store_root + ~context_root + genesis + >>=? fun (state, chain, _index, _history_mode) -> + build_example_tree chain + >>= fun vblock -> f {state; chain; vblock} >>=? fun () -> return_unit (*******************************************************) @@ -210,17 +216,19 @@ does not prevent a future good block from correctly being reached let test_basic_checkpoint s = let block = vblock s "A1" in let header = State.Block.header block in - State.Chain.set_checkpoint s.chain header >>= fun () -> - State.Chain.checkpoint s.chain >>= fun checkpoint_header -> + State.Chain.set_checkpoint s.chain header + >>= fun () -> + State.Chain.checkpoint s.chain + >>= fun checkpoint_header -> let c_level = checkpoint_header.shell.level in let c_block = Block_header.hash checkpoint_header in - if not (Block_hash.equal c_block (State.Block.hash block)) && - Int32.equal c_level (State.Block.level block) - then - Assert.fail_msg "unexpected checkpoint" + if + (not (Block_hash.equal c_block (State.Block.hash block))) + && Int32.equal c_level (State.Block.level block) + then Assert.fail_msg "unexpected checkpoint" else return_unit - (* +(* - cp: checkpoint Genesis - A1 - A2 (cp) - A3 - A4 - A5 @@ -236,17 +244,19 @@ let test_acceptable_block s = let header = State.Block.header block in (* let level = State.Block.level block in * let block_hash = State.Block.hash block in *) - State.Chain.set_checkpoint s.chain header >>= fun () -> + State.Chain.set_checkpoint s.chain header + >>= fun () -> (* it is accepted only if the current head is lower than the checkpoint *) let block_1 = vblock s "A1" in - Chain.set_head s.chain block_1 >>=? fun head -> + Chain.set_head s.chain block_1 + >>=? fun head -> let header = State.Block.header head in - State.Chain.acceptable_block s.chain header >>= fun is_accepted_block -> - if is_accepted_block - then return_unit + State.Chain.acceptable_block s.chain header + >>= fun is_accepted_block -> + if is_accepted_block then return_unit else Assert.fail_msg "unacceptable block" - (* +(* Genesis - A1 - A2 (cp) - A3 - A4 - A5 \ B1 - B2 - B3 - B4 - B5 @@ -260,16 +270,17 @@ let test_is_valid_checkpoint s = let header = State.Block.header block in (* let block_hash = State.Block.hash block in * let level = State.Block.level block in *) - State.Chain.set_checkpoint s.chain header >>= fun () -> - State.Chain.checkpoint s.chain >>= fun checkpoint_header -> + State.Chain.set_checkpoint s.chain header + >>= fun () -> + State.Chain.checkpoint s.chain + >>= fun checkpoint_header -> (* "b3" is valid because: a1 - a2 (checkpoint) - b1 - b2 - b3 it is not valid when the checkpoint change to a pick different than a2. *) - State.Block.is_valid_for_checkpoint (vblock s "B3") checkpoint_header >>= fun is_valid -> - if is_valid - then return_unit - else Assert.fail_msg "invalid checkpoint" + State.Block.is_valid_for_checkpoint (vblock s "B3") checkpoint_header + >>= fun is_valid -> + if is_valid then return_unit else Assert.fail_msg "invalid checkpoint" (* return a block with the best fitness amongst the known blocks which are compatible with the given checkpoint *) @@ -277,9 +288,12 @@ let test_is_valid_checkpoint s = let test_best_know_head_for_checkpoint s = let block = vblock s "A2" in let checkpoint = State.Block.header block in - State.Chain.set_checkpoint s.chain checkpoint >>= fun () -> - Chain.set_head s.chain (vblock s "B3") >>= fun _head -> - State.best_known_head_for_checkpoint s.chain checkpoint >>= fun _block -> + State.Chain.set_checkpoint s.chain checkpoint + >>= fun () -> + Chain.set_head s.chain (vblock s "B3") + >>= fun _head -> + State.best_known_head_for_checkpoint s.chain checkpoint + >>= fun _block -> (* the block returns with the best fitness is B3 at level 5 *) return_unit @@ -296,8 +310,10 @@ let test_future_checkpoint s = let block_hash = State.Block.hash block in let level = State.Block.level block in let header = State.Block.header block in - State.Chain.set_checkpoint s.chain header >>= fun () -> - State.Chain.checkpoint s.chain >>= fun checkpoint_header -> + State.Chain.set_checkpoint s.chain header + >>= fun () -> + State.Chain.checkpoint s.chain + >>= fun checkpoint_header -> let c_level = checkpoint_header.shell.level in let c_block = Block_header.hash checkpoint_header in if Int32.equal c_level level && not (Block_hash.equal c_block block_hash) @@ -324,18 +340,18 @@ let test_future_checkpoint_bad_good_block s = let block_hash = State.Block.hash block in let level = State.Block.level block in let header = State.Block.header block in - State.Chain.set_checkpoint s.chain header >>= fun () -> - State.Chain.checkpoint s.chain >>= fun checkpoint_header -> + State.Chain.set_checkpoint s.chain header + >>= fun () -> + State.Chain.checkpoint s.chain + >>= fun checkpoint_header -> let c_level = checkpoint_header.shell.level in let c_block = Block_header.hash checkpoint_header in if Int32.equal c_level level && not (Block_hash.equal c_block block_hash) then Assert.fail_msg "unexpected checkpoint" else - State.Block.is_valid_for_checkpoint - (vblock s "B2") checkpoint_header >>= fun is_valid -> - if is_valid - then return_unit - else Assert.fail_msg "invalid checkpoint" + State.Block.is_valid_for_checkpoint (vblock s "B2") checkpoint_header + >>= fun is_valid -> + if is_valid then return_unit else Assert.fail_msg "invalid checkpoint" (* check if the checkpoint can be reached @@ -346,53 +362,60 @@ let test_future_checkpoint_bad_good_block s = *) let test_reach_checkpoint s = - let mem s x = - Chain.mem s.chain (State.Block.hash @@ vblock s x) - in - let test_mem s x = mem s x >>= function - | true -> Lwt.return_unit - | false -> Assert.fail_msg "mem %s" x + let mem s x = Chain.mem s.chain (State.Block.hash @@ vblock s x) in + let test_mem s x = + mem s x + >>= function + | true -> Lwt.return_unit | false -> Assert.fail_msg "mem %s" x in let test_not_mem s x = - mem s x >>= function - | false -> Lwt.return_unit - | true -> Assert.fail_msg "not (mem %s)" x in + mem s x + >>= function + | false -> Lwt.return_unit | true -> Assert.fail_msg "not (mem %s)" x + in let block = vblock s "A1" in let block_hash = State.Block.hash block in let header = State.Block.header block in - State.Chain.set_checkpoint s.chain header >>= fun () -> - State.Chain.checkpoint s.chain >>= fun checkpoint_header -> - let time_now = (Time.System.to_protocol (Systime_os.now ())) in - if Time.Protocol.compare (Time.Protocol.add time_now 15L) header.shell.timestamp >= 0 + State.Chain.set_checkpoint s.chain header + >>= fun () -> + State.Chain.checkpoint s.chain + >>= fun checkpoint_header -> + let time_now = Time.System.to_protocol (Systime_os.now ()) in + if + Time.Protocol.compare + (Time.Protocol.add time_now 15L) + header.shell.timestamp + >= 0 then let checkpoint_hash = Block_header.hash checkpoint_header in - if Int32.equal header.shell.level checkpoint_header.shell.level && - not (Block_hash.equal checkpoint_hash block_hash) + if + Int32.equal header.shell.level checkpoint_header.shell.level + && not (Block_hash.equal checkpoint_hash block_hash) then Assert.fail_msg "checkpoint error" else - Chain.set_head s.chain (vblock s "A2") >>= fun _ -> - Chain.head s.chain >>= fun head -> + Chain.set_head s.chain (vblock s "A2") + >>= fun _ -> + Chain.head s.chain + >>= fun head -> let checkpoint_reached = (State.Block.header head).shell.level >= checkpoint_header.shell.level in - if checkpoint_reached - then + if checkpoint_reached then (* if reached the checkpoint, every block before the checkpoint must be the part of the chain *) - if header.shell.level <= checkpoint_header.shell.level - then - test_mem s "Genesis" >>= fun () -> - test_mem s "A1" >>= fun () -> - test_mem s "A2" >>= fun () -> - test_not_mem s "A3" >>= fun () -> - test_not_mem s "B1" >>= fun () -> - return_unit + if header.shell.level <= checkpoint_header.shell.level then + test_mem s "Genesis" + >>= fun () -> + test_mem s "A1" + >>= fun () -> + test_mem s "A2" + >>= fun () -> + test_not_mem s "A3" + >>= fun () -> test_not_mem s "B1" >>= fun () -> return_unit else Assert.fail_msg "checkpoint error" - else - Assert.fail_msg "checkpoint error" + else Assert.fail_msg "checkpoint error" else Assert.fail_msg "fail future block header" - (* Chain.Validator function may_update_checkpoint @@ -408,36 +431,44 @@ let test_reach_checkpoint s = *) let may_update_checkpoint chain_state new_head = - State.Chain.checkpoint chain_state >>= fun checkpoint_header -> + State.Chain.checkpoint chain_state + >>= fun checkpoint_header -> (* FIXME: the new level is always return 0l even if the new_head is A4 at level 4l Or TODO: set a level where allow to have a fork *) let old_level = checkpoint_header.shell.level in - State.Block.last_allowed_fork_level new_head >>=? fun new_level -> - if new_level <= old_level then - return_unit + State.Block.last_allowed_fork_level new_head + >>=? fun new_level -> + if new_level <= old_level then return_unit else let head_level = State.Block.level new_head in - State.Block.predecessor_n new_head - (Int32.to_int (Int32.sub head_level new_level)) >>= function - | None -> return @@ Assert.fail_msg "Unexpected None in predecessor query" - | Some hash -> - State.Block.read_opt chain_state hash >>= function - | None -> assert false + State.Block.predecessor_n + new_head + (Int32.to_int (Int32.sub head_level new_level)) + >>= function + | None -> + return @@ Assert.fail_msg "Unexpected None in predecessor query" + | Some hash -> ( + State.Block.read_opt chain_state hash + >>= function + | None -> + assert false | Some b -> - State.Chain.set_checkpoint chain_state (State.Block.header b) >>= fun () -> - return_unit + State.Chain.set_checkpoint chain_state (State.Block.header b) + >>= fun () -> return_unit ) let test_may_update_checkpoint s = let block = vblock s "A3" in let checkpoint = State.Block.header block in - State.Chain.set_checkpoint s.chain checkpoint >>= fun () -> - State.Chain.checkpoint s.chain >>= fun _ -> - Chain.set_head s.chain (vblock s "A4") >>= fun _ -> - Chain.head s.chain >>= fun head -> - may_update_checkpoint s.chain head >>=? fun () -> - return () + State.Chain.set_checkpoint s.chain checkpoint + >>= fun () -> + State.Chain.checkpoint s.chain + >>= fun _ -> + Chain.set_head s.chain (vblock s "A4") + >>= fun _ -> + Chain.head s.chain + >>= fun head -> may_update_checkpoint s.chain head >>=? fun () -> return () (* Check function may_update_checkpoint in Node.ml @@ -456,45 +487,45 @@ let note_may_update_checkpoint chain_state checkpoint = | None -> Lwt.return_unit | Some checkpoint -> - State.best_known_head_for_checkpoint - chain_state checkpoint >>= fun new_head -> - Chain.set_head chain_state new_head >>= fun _ -> - State.Chain.set_checkpoint chain_state checkpoint + State.best_known_head_for_checkpoint chain_state checkpoint + >>= fun new_head -> + Chain.set_head chain_state new_head + >>= fun _ -> State.Chain.set_checkpoint chain_state checkpoint let test_note_may_update_checkpoint s = (* set checkpoint at (2l, A2) *) let block = vblock s "A2" in let header = State.Block.header block in - State.Chain.set_checkpoint s.chain header >>= fun () -> + State.Chain.set_checkpoint s.chain header + >>= fun () -> (* set new checkpoint at (3l, A3) *) let block = vblock s "A3" in let checkpoint = State.Block.header block in - note_may_update_checkpoint s.chain (Some checkpoint) >>= fun () -> - return_unit + note_may_update_checkpoint s.chain (Some checkpoint) + >>= fun () -> return_unit (**********************************************************) -let tests: (string * (state -> unit tzresult Lwt.t)) list = [ - "basic checkpoint", test_basic_checkpoint; - "is valid checkpoint", test_is_valid_checkpoint; - "acceptable block", test_acceptable_block ; - "best know head", test_best_know_head_for_checkpoint; - "future checkpoint", test_future_checkpoint; - "future checkpoint bad/good block", test_future_checkpoint_bad_good_block; - "test_reach_checkpoint", test_reach_checkpoint; - "update checkpoint", test_may_update_checkpoint; - "update checkpoint in node", test_note_may_update_checkpoint; -] +let tests : (string * (state -> unit tzresult Lwt.t)) list = + [ ("basic checkpoint", test_basic_checkpoint); + ("is valid checkpoint", test_is_valid_checkpoint); + ("acceptable block", test_acceptable_block); + ("best know head", test_best_know_head_for_checkpoint); + ("future checkpoint", test_future_checkpoint); + ("future checkpoint bad/good block", test_future_checkpoint_bad_good_block); + ("test_reach_checkpoint", test_reach_checkpoint); + ("update checkpoint", test_may_update_checkpoint); + ("update checkpoint in node", test_note_may_update_checkpoint) ] let wrap (n, f) = - Alcotest_lwt.test_case n `Quick begin fun _ () -> - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun dir -> - wrap_state_init f dir >>= function - | Ok () -> Lwt.return_unit - | Error error -> - Format.eprintf "WWW %a@." pp_print_error error ; - Lwt.fail Alcotest.Test_error - end - end + Alcotest_lwt.test_case n `Quick (fun _ () -> + Lwt_utils_unix.with_tempdir "tezos_test_" (fun dir -> + wrap_state_init f dir + >>= function + | Ok () -> + Lwt.return_unit + | Error error -> + Format.eprintf "WWW %a@." pp_print_error error ; + Lwt.fail Alcotest.Test_error)) let tests = List.map wrap tests diff --git a/src/lib_shell/test/test_store.ml b/src/lib_shell/test/test_store.ml index 796cd4dce49f82c69ebbb3432b7f31b9c7d502ca..15749944e45feb3a0a458a820aefddb0aca415f7 100644 --- a/src/lib_shell/test/test_store.ml +++ b/src/lib_shell/test/test_store.ml @@ -25,9 +25,11 @@ open Store -let (>>=) = Lwt.bind -let (>|=) = Lwt.(>|=) -let (//) = Filename.concat +let ( >>= ) = Lwt.bind + +let ( >|= ) = Lwt.( >|= ) + +let ( // ) = Filename.concat (** Basic blocks *) @@ -46,30 +48,36 @@ let genesis_time = Time.Protocol.of_seconds 0L let mapsize = 4_096_000_000L (* ~4 GiB *) let wrap_store_init f _ () = - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun base_dir -> - let root = base_dir // "store" in - Store.init ~mapsize root >>= function - | Ok store -> - Lwt.finalize - (fun () -> f store) - (fun () -> Store.close store ; Lwt.return_unit) - | Error err -> - Format.kasprintf Pervasives.failwith - "@[Cannot initialize store:@ %a@]" pp_print_error err - end + Lwt_utils_unix.with_tempdir "tezos_test_" (fun base_dir -> + let root = base_dir // "store" in + Store.init ~mapsize root + >>= function + | Ok store -> + Lwt.finalize + (fun () -> f store) + (fun () -> Store.close store ; Lwt.return_unit) + | Error err -> + Format.kasprintf + Pervasives.failwith + "@[Cannot initialize store:@ %a@]" + pp_print_error + err) let wrap_raw_store_init f _ () = - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun base_dir -> - let root = base_dir // "store" in - Raw_store.init ~mapsize root >>= function - | Ok store -> - Lwt.finalize - (fun () -> f store) - (fun () -> Raw_store.close store ; Lwt.return_unit) - | Error err -> - Format.kasprintf Pervasives.failwith - "@[Cannot initialize store:@ %a@]" pp_print_error err - end + Lwt_utils_unix.with_tempdir "tezos_test_" (fun base_dir -> + let root = base_dir // "store" in + Raw_store.init ~mapsize root + >>= function + | Ok store -> + Lwt.finalize + (fun () -> f store) + (fun () -> Raw_store.close store ; Lwt.return_unit) + | Error err -> + Format.kasprintf + Pervasives.failwith + "@[Cannot initialize store:@ %a@]" + pp_print_error + err) let test_init _ = Lwt.return_unit @@ -77,174 +85,236 @@ let chain_id = Chain_id.of_block_hash genesis_block (** Operation store *) -let make proto : Operation.t = - { shell = { branch = genesis_block } ; proto } +let make proto : Operation.t = {shell = {branch = genesis_block}; proto} let op1 = make (MBytes.of_string "Capadoce") + let oph1 = Operation.hash op1 + let op2 = make (MBytes.of_string "Kivu") -let oph2 = Operation.hash op2 +let oph2 = Operation.hash op2 (** Block store *) let lolblock ?(operations = []) header = let operations_hash = - Operation_list_list_hash.compute - [Operation_list_hash.compute operations] in + Operation_list_list_hash.compute [Operation_list_hash.compute operations] + in let block_header = { Block_header.shell = - { timestamp = Time.Protocol.of_seconds (Random.int64 1500L) ; - level = 0l ; (* dummy *) - proto_level = 0 ; (* dummy *) - validation_passes = Random.int 32 ; - predecessor = genesis_block ; operations_hash ; - fitness = [MBytes.of_string @@ string_of_int @@ String.length header; - MBytes.of_string @@ string_of_int @@ 12] ; - context = Context_hash.zero } ; - protocol_data = MBytes.of_string header ; } in + { timestamp = Time.Protocol.of_seconds (Random.int64 1500L); + level = 0l; + (* dummy *) + proto_level = 0; + (* dummy *) + validation_passes = Random.int 32; + predecessor = genesis_block; + operations_hash; + fitness = + [ MBytes.of_string @@ string_of_int @@ String.length header; + MBytes.of_string @@ string_of_int @@ 12 ]; + context = Context_hash.zero }; + protocol_data = MBytes.of_string header } + in let block_contents = - { header = block_header ; - Store.Block.metadata = MBytes.create 0 ; - max_operations_ttl = 0 ; - message = None ; - context = Context_hash.zero ; - last_allowed_fork_level = 0l ; - } in block_header, block_contents - -let (b1_header,b1_contents) as b1 = lolblock "Blop !" + { header = block_header; + Store.Block.metadata = MBytes.create 0; + max_operations_ttl = 0; + message = None; + context = Context_hash.zero; + last_allowed_fork_level = 0l } + in + (block_header, block_contents) + +let ((b1_header, b1_contents) as b1) = lolblock "Blop !" + let bh1 = Block_header.hash b1_header -let (b2_header,b2_contents) as b2 = lolblock "Tacatlopo" + +let ((b2_header, b2_contents) as b2) = lolblock "Tacatlopo" + let bh2 = Block_header.hash b2_header -let (b3_header,b3_contents) as b3 = lolblock ~operations:[oph1;oph2] "Persil" + +let ((b3_header, b3_contents) as b3) = + lolblock ~operations:[oph1; oph2] "Persil" + let bh3 = Block_header.hash b3_header + let bh3' = let raw = Bytes.of_string @@ Block_hash.to_string bh3 in Bytes.set raw 31 '\000' ; Bytes.set raw 30 '\000' ; Block_hash.of_string_exn @@ Bytes.to_string raw -let equal - (b1_header,b1_contents : Block_header.t * Store.Block.contents) - (b2_header,b2_contents : Block_header.t * Store.Block.contents) = - Block_header.equal b1_header b2_header && - b1_contents.message = b2_contents.message +let equal ((b1_header, b1_contents) : Block_header.t * Store.Block.contents) + ((b2_header, b2_contents) : Block_header.t * Store.Block.contents) = + Block_header.equal b1_header b2_header + && b1_contents.message = b2_contents.message let check_block s h b = - Store.Block.Contents.read (s, h) >>= function - | Ok bc' -> - begin - Store.Block.Pruned_contents.read (s, h) >>= function - | Ok { header } when equal b (header, bc') -> - Lwt.return_unit - | Ok _ -> - Format.eprintf - "Error while reading block %a\n%!" - Block_hash.pp_short h ; - exit 1 - | Error err -> - Format.eprintf "@[Error while reading block header %a:@ %a\n@]" - Block_hash.pp_short h - pp_print_error err ; - exit 1 - end + Store.Block.Contents.read (s, h) + >>= function + | Ok bc' -> ( + Store.Block.Pruned_contents.read (s, h) + >>= function + | Ok {header} when equal b (header, bc') -> + Lwt.return_unit + | Ok _ -> + Format.eprintf + "Error while reading block %a\n%!" + Block_hash.pp_short + h ; + exit 1 + | Error err -> + Format.eprintf + "@[Error while reading block header %a:@ %a\n@]" + Block_hash.pp_short + h + pp_print_error + err ; + exit 1 ) | Error err -> - Format.eprintf "@[Error while reading block %a:@ %a\n@]" - Block_hash.pp_short h - pp_print_error err ; + Format.eprintf + "@[Error while reading block %a:@ %a\n@]" + Block_hash.pp_short + h + pp_print_error + err ; exit 1 let test_block s = let s = Store.Chain.get s chain_id in let s = Store.Block.get s in - Block.Contents.store (s, bh1) b1_contents >>= fun () -> - Block.Contents.store (s, bh2) b2_contents >>= fun () -> - Block.Contents.store (s, bh3) b3_contents >>= fun () -> - Block.Pruned_contents.store (s, bh1) { header = b1_header } >>= fun () -> - Block.Pruned_contents.store (s, bh2) { header = b2_header } >>= fun () -> - Block.Pruned_contents.store (s, bh3) { header = b3_header } >>= fun () -> - check_block s bh1 b1 >>= fun () -> - check_block s bh2 b2 >>= fun () -> - check_block s bh3 b3 + Block.Contents.store (s, bh1) b1_contents + >>= fun () -> + Block.Contents.store (s, bh2) b2_contents + >>= fun () -> + Block.Contents.store (s, bh3) b3_contents + >>= fun () -> + Block.Pruned_contents.store (s, bh1) {header = b1_header} + >>= fun () -> + Block.Pruned_contents.store (s, bh2) {header = b2_header} + >>= fun () -> + Block.Pruned_contents.store (s, bh3) {header = b3_header} + >>= fun () -> + check_block s bh1 b1 + >>= fun () -> check_block s bh2 b2 >>= fun () -> check_block s bh3 b3 let test_expand s = let s = Store.Chain.get s chain_id in let s = Store.Block.get s in - Block.Contents.store (s, bh1) b1_contents >>= fun () -> - Block.Contents.store (s, bh2) b2_contents >>= fun () -> - Block.Contents.store (s, bh3) b3_contents >>= fun () -> - Block.Contents.store (s, bh3') b3_contents >>= fun () -> - Block.Pruned_contents.store (s, bh1) { header = b1_header } >>= fun () -> - Block.Pruned_contents.store (s, bh2) { header = b2_header } >>= fun () -> - Block.Pruned_contents.store (s, bh3) { header = b3_header } >>= fun () -> - Block.Pruned_contents.store (s, bh3') { header = b3_header } >>= fun () -> - Base58.complete (Block_hash.to_short_b58check bh1) >>= fun res -> + Block.Contents.store (s, bh1) b1_contents + >>= fun () -> + Block.Contents.store (s, bh2) b2_contents + >>= fun () -> + Block.Contents.store (s, bh3) b3_contents + >>= fun () -> + Block.Contents.store (s, bh3') b3_contents + >>= fun () -> + Block.Pruned_contents.store (s, bh1) {header = b1_header} + >>= fun () -> + Block.Pruned_contents.store (s, bh2) {header = b2_header} + >>= fun () -> + Block.Pruned_contents.store (s, bh3) {header = b3_header} + >>= fun () -> + Block.Pruned_contents.store (s, bh3') {header = b3_header} + >>= fun () -> + Base58.complete (Block_hash.to_short_b58check bh1) + >>= fun res -> Assert.equal_string_list ~msg:__LOC__ res [Block_hash.to_b58check bh1] ; - Base58.complete (Block_hash.to_short_b58check bh2) >>= fun res -> + Base58.complete (Block_hash.to_short_b58check bh2) + >>= fun res -> Assert.equal_string_list ~msg:__LOC__ res [Block_hash.to_b58check bh2] ; - Base58.complete (Block_hash.to_short_b58check bh3) >>= fun res -> - Assert.equal_string_list ~msg:__LOC__ + Base58.complete (Block_hash.to_short_b58check bh3) + >>= fun res -> + Assert.equal_string_list + ~msg:__LOC__ (List.sort String.compare res) - [Block_hash.to_b58check bh3' ; Block_hash.to_b58check bh3] ; + [Block_hash.to_b58check bh3'; Block_hash.to_b58check bh3] ; Lwt.return_unit - (** Generic store *) -let check (type t) - (module Store: Store_sigs.STORE with type t = t) (s: Store.t) k d = - Store.read_opt s k >|= function - | Some d' when MBytes.equal d d' -> () +let check (type t) (module Store : Store_sigs.STORE with type t = t) + (s : Store.t) k d = + Store.read_opt s k + >|= function + | Some d' when MBytes.equal d d' -> + () | Some d' -> - Assert.fail_msg ~expected:(MBytes.to_string d) ~given:(MBytes.to_string d') + Assert.fail_msg + ~expected:(MBytes.to_string d) + ~given:(MBytes.to_string d') "Error while reading key %d %S\n%!" - Cstruct.(compare (of_bigarray d) (of_bigarray d')) (String.concat Filename.dir_sep k) + Cstruct.(compare (of_bigarray d) (of_bigarray d')) + (String.concat Filename.dir_sep k) | None -> - Assert.fail_msg ~expected:(MBytes.to_string d) ~given:"" - "Error while reading key %S\n%!" (String.concat Filename.dir_sep k) + Assert.fail_msg + ~expected:(MBytes.to_string d) + ~given:"" + "Error while reading key %S\n%!" + (String.concat Filename.dir_sep k) -let check_none (type t) - (module Store: Store_sigs.STORE with type t = t) (s: Store.t) k = - Store.read_opt s k >|= function - | None -> () +let check_none (type t) (module Store : Store_sigs.STORE with type t = t) + (s : Store.t) k = + Store.read_opt s k + >|= function + | None -> + () | Some _ -> Assert.fail_msg "Error while reading non-existent key %S\n%!" (String.concat Filename.dir_sep k) -let test_generic (type t) - (module Store: Store_sigs.STORE with type t = t) (s: Store.t) = - Store.store s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () -> - Store.store s ["day";"next"] (MBytes.of_string "Jeudi") >>= fun () -> - Store.store s ["day";"truc";"chose"] (MBytes.of_string "Vendredi") >>= fun () -> - check (module Store) s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () -> - check (module Store) s ["day";"next"] (MBytes.of_string "Jeudi") >>= fun () -> - check_none (module Store) s ["day"] - -let list (type t) - (module Store: Store_sigs.STORE with type t = t) (s: Store.t) k = +let test_generic (type t) (module Store : Store_sigs.STORE with type t = t) + (s : Store.t) = + Store.store s ["day"; "current"] (MBytes.of_string "Mercredi") + >>= fun () -> + Store.store s ["day"; "next"] (MBytes.of_string "Jeudi") + >>= fun () -> + Store.store s ["day"; "truc"; "chose"] (MBytes.of_string "Vendredi") + >>= fun () -> + check (module Store) s ["day"; "current"] (MBytes.of_string "Mercredi") + >>= fun () -> + check (module Store) s ["day"; "next"] (MBytes.of_string "Jeudi") + >>= fun () -> check_none (module Store) s ["day"] + +let list (type t) (module Store : Store_sigs.STORE with type t = t) + (s : Store.t) k = Store.keys s k let test_generic_list (type t) - (module Store: Store_sigs.STORE with type t = t) (s: Store.t) = - Store.store s ["a"; "b"] (MBytes.of_string "Novembre") >>= fun () -> - Store.store s ["a"; "c"] (MBytes.of_string "Juin") >>= fun () -> - Store.store s ["a"; "d"; "e"] (MBytes.of_string "Septembre") >>= fun () -> - Store.store s ["f";] (MBytes.of_string "Avril") >>= fun () -> - Store.store s ["g"; "h"] (MBytes.of_string "Avril") >>= fun () -> - list (module Store) s [] >>= fun l -> - Assert.equal_string_list_list ~msg:__LOC__ - [["a";"b"];["a";"c"];["a";"d";"e"];["f"];["g";"h"]] + (module Store : Store_sigs.STORE with type t = t) (s : Store.t) = + Store.store s ["a"; "b"] (MBytes.of_string "Novembre") + >>= fun () -> + Store.store s ["a"; "c"] (MBytes.of_string "Juin") + >>= fun () -> + Store.store s ["a"; "d"; "e"] (MBytes.of_string "Septembre") + >>= fun () -> + Store.store s ["f"] (MBytes.of_string "Avril") + >>= fun () -> + Store.store s ["g"; "h"] (MBytes.of_string "Avril") + >>= fun () -> + list (module Store) s [] + >>= fun l -> + Assert.equal_string_list_list + ~msg:__LOC__ + [["a"; "b"]; ["a"; "c"]; ["a"; "d"; "e"]; ["f"]; ["g"; "h"]] (List.sort compare l) ; - list (module Store) s ["a"] >>= fun l -> + list (module Store) s ["a"] + >>= fun l -> Assert.equal_string_list_list - ~msg:__LOC__ [["a";"b"]; ["a";"c"]; ["a";"d";"e"]] + ~msg:__LOC__ + [["a"; "b"]; ["a"; "c"]; ["a"; "d"; "e"]] (List.sort compare l) ; - list (module Store) s ["f"] >>= fun l -> + list (module Store) s ["f"] + >>= fun l -> Assert.equal_string_list_list ~msg:__LOC__ [] l ; - list (module Store) s ["g"] >>= fun l -> - Assert.equal_string_list_list ~msg:__LOC__ [["g";"h"]] (List.sort compare l) ; - list (module Store) s ["i"] >>= fun l -> + list (module Store) s ["g"] + >>= fun l -> + Assert.equal_string_list_list ~msg:__LOC__ [["g"; "h"]] (List.sort compare l) ; + list (module Store) s ["i"] + >>= fun l -> Assert.equal_string_list_list ~msg:__LOC__ [] l ; Lwt.return_unit @@ -252,222 +322,281 @@ let test_generic_list (type t) open Store_helpers -let test_hashset (type t) - (module Store: Store_sigs.STORE with type t = t) (s: Store.t) = +let test_hashset (type t) (module Store : Store_sigs.STORE with type t = t) + (s : Store.t) = let module BlockSet = Block_hash.Set in let module StoreSet = Make_buffered_set - (Make_substore(Store)(struct let name = ["test_set"] end)) - (Block_hash) - (BlockSet) in + (Make_substore + (Store) + (struct + let name = ["test_set"] + end)) + (Block_hash) + (BlockSet) + in let bhset = BlockSet.(add bh2 (add bh1 empty)) in - StoreSet.store_all s bhset >>= fun () -> - StoreSet.read_all s >>= fun bhset' -> + StoreSet.store_all s bhset + >>= fun () -> + StoreSet.read_all s + >>= fun bhset' -> Assert.equal_block_set ~msg:__LOC__ bhset bhset' ; let bhset2 = BlockSet.(bhset |> add bh3 |> remove bh1) in - StoreSet.store_all s bhset2 >>= fun () -> - StoreSet.read_all s >>= fun bhset2' -> + StoreSet.store_all s bhset2 + >>= fun () -> + StoreSet.read_all s + >>= fun bhset2' -> Assert.equal_block_set ~msg:__LOC__ bhset2 bhset2' ; - StoreSet.fold s ~init:BlockSet.empty - ~f:(fun bh acc -> Lwt.return (BlockSet.add bh acc)) >>= fun bhset2'' -> + StoreSet.fold s ~init:BlockSet.empty ~f:(fun bh acc -> + Lwt.return (BlockSet.add bh acc)) + >>= fun bhset2'' -> Assert.equal_block_set ~msg:__LOC__ bhset2 bhset2'' ; - Store.store s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () -> - StoreSet.remove_all s >>= fun () -> - StoreSet.read_all s >>= fun empty -> + Store.store s ["day"; "current"] (MBytes.of_string "Mercredi") + >>= fun () -> + StoreSet.remove_all s + >>= fun () -> + StoreSet.read_all s + >>= fun empty -> Assert.equal_block_set ~msg:__LOC__ BlockSet.empty empty ; - check (module Store) s ["day";"current"] (MBytes.of_string "Mercredi") >>= fun () -> - Lwt.return_unit - + check (module Store) s ["day"; "current"] (MBytes.of_string "Mercredi") + >>= fun () -> Lwt.return_unit (** HashMap *) -let test_hashmap (type t) - (module Store: Store_sigs.STORE with type t = t) (s: Store.t) = +let test_hashmap (type t) (module Store : Store_sigs.STORE with type t = t) + (s : Store.t) = let module BlockMap = Block_hash.Map in let module StoreMap = Make_buffered_map - (Make_substore(Store)(struct let name = ["test_map"] end)) - (Block_hash) - (Make_value(struct - type t = int * char - let encoding = - Data_encoding.(tup2 int31 (conv int_of_char char_of_int int8)) - end)) - (BlockMap) in - let eq = (=) in + (Make_substore + (Store) + (struct + let name = ["test_map"] + end)) + (Block_hash) + (Make_value (struct + type t = int * char + + let encoding = + Data_encoding.(tup2 int31 (conv int_of_char char_of_int int8)) + end)) + (BlockMap) + in + let eq = ( = ) in let map = BlockMap.(empty |> add bh1 (1, 'a') |> add bh2 (2, 'b')) in - StoreMap.store_all s map >>= fun () -> - StoreMap.read_all s >>= fun map' -> + StoreMap.store_all s map + >>= fun () -> + StoreMap.read_all s + >>= fun map' -> Assert.equal_block_map ~msg:__LOC__ ~eq map map' ; let map2 = map |> BlockMap.add bh3 (3, 'c') |> BlockMap.remove bh1 in - StoreMap.store_all s map2 >>= fun () -> - StoreMap.read_all s >>= fun map2' -> + StoreMap.store_all s map2 + >>= fun () -> + StoreMap.read_all s + >>= fun map2' -> Assert.equal_block_map ~msg:__LOC__ ~eq map2 map2' ; Lwt.return_unit (** Functors *) -let test_single (type t) - (module Store: Store_sigs.STORE with type t = t) (s: Store.t) = +let test_single (type t) (module Store : Store_sigs.STORE with type t = t) + (s : Store.t) = let module Single = Make_single_store (Store) - (struct let name = ["plop"] end) - (Make_value(struct - type t = int * string - let encoding = Data_encoding.(tup2 int31 string) - end)) in - Single.known s >>= fun known -> + (struct + let name = ["plop"] + end) + (Make_value (struct + type t = int * string + + let encoding = Data_encoding.(tup2 int31 string) + end)) + in + Single.known s + >>= fun known -> Assert.is_false ~msg:__LOC__ known ; - Single.read_opt s >>= fun v' -> + Single.read_opt s + >>= fun v' -> Assert.equal ~msg:__LOC__ None v' ; let v = (3, "Non!") in - Single.store s v >>= fun () -> - Single.known s >>= fun known -> + Single.store s v + >>= fun () -> + Single.known s + >>= fun known -> Assert.is_true ~msg:__LOC__ known ; - Single.read_opt s >>= fun v' -> + Single.read_opt s + >>= fun v' -> Assert.equal ~msg:__LOC__ (Some v) v' ; - Single.remove s >>= fun () -> - Single.known s >>= fun known -> + Single.remove s + >>= fun () -> + Single.known s + >>= fun known -> Assert.is_false ~msg:__LOC__ known ; - Single.read_opt s >>= fun v' -> + Single.read_opt s + >>= fun v' -> Assert.equal ~msg:__LOC__ None v' ; Lwt.return_unit module Sub = - Make_substore(Raw_store)(struct let name = ["plop";"plip"] end) + Make_substore + (Raw_store) + (struct + let name = ["plop"; "plip"] + end) module SubBlocks = Make_indexed_substore - (Make_substore(Raw_store)(struct let name = ["blocks"] end)) - (Block_hash) + (Make_substore + (Raw_store) + (struct + let name = ["blocks"] + end)) + (Block_hash) module SubBlocksSet = SubBlocks.Make_buffered_set - (struct let name = ["test_set"] end) + (struct + let name = ["test_set"] + end) (Block_hash.Set) module SubBlocksMap = SubBlocks.Make_buffered_map - (struct let name = ["test_map"] end) - (Make_value(struct - type t = int * string - let encoding = Data_encoding.(tup2 int31 string) - end)) + (struct + let name = ["test_map"] + end) + (Make_value (struct + type t = int * string + + let encoding = Data_encoding.(tup2 int31 string) + end)) (Block_hash.Map) let test_subblock s = - SubBlocksSet.known s bh1 >>= fun known -> + SubBlocksSet.known s bh1 + >>= fun known -> Assert.is_false ~msg:__LOC__ known ; - SubBlocksSet.store s bh1 >>= fun () -> - SubBlocksSet.store s bh2 >>= fun () -> - SubBlocksSet.known s bh2 >>= fun known -> + SubBlocksSet.store s bh1 + >>= fun () -> + SubBlocksSet.store s bh2 + >>= fun () -> + SubBlocksSet.known s bh2 + >>= fun known -> Assert.is_true ~msg:__LOC__ known ; - SubBlocksSet.read_all s >>= fun set -> + SubBlocksSet.read_all s + >>= fun set -> let set' = Block_hash.Set.(empty |> add bh1 |> add bh2) in Assert.equal_block_set ~msg:__LOC__ set set' ; - SubBlocksSet.remove s bh2 >>= fun () -> + SubBlocksSet.remove s bh2 + >>= fun () -> let set = Block_hash.Set.(empty |> add bh3' |> add bh3) in - SubBlocksSet.store_all s set >>= fun () -> - SubBlocksSet.elements s >>= fun elts -> - Assert.equal_block_hash_list ~msg:__LOC__ + SubBlocksSet.store_all s set + >>= fun () -> + SubBlocksSet.elements s + >>= fun elts -> + Assert.equal_block_hash_list + ~msg:__LOC__ (List.sort Block_hash.compare elts) - (List.sort Block_hash.compare [bh3 ; bh3']) ; - SubBlocksSet.store s bh2 >>= fun () -> - SubBlocksSet.remove s bh3 >>= fun () -> - SubBlocksSet.elements s >>= fun elts -> - Assert.equal_block_hash_list ~msg:__LOC__ + (List.sort Block_hash.compare [bh3; bh3']) ; + SubBlocksSet.store s bh2 + >>= fun () -> + SubBlocksSet.remove s bh3 + >>= fun () -> + SubBlocksSet.elements s + >>= fun elts -> + Assert.equal_block_hash_list + ~msg:__LOC__ (List.sort Block_hash.compare elts) - (List.sort Block_hash.compare [bh2 ; bh3']) ; - SubBlocksMap.known s bh1 >>= fun known -> + (List.sort Block_hash.compare [bh2; bh3']) ; + SubBlocksMap.known s bh1 + >>= fun known -> Assert.is_false ~msg:__LOC__ known ; - let v1 = (3, "Non!") - and v2 = (12, "Beurk.") in - SubBlocksMap.store s bh1 v1 >>= fun () -> - SubBlocksMap.store s bh2 v2 >>= fun () -> - SubBlocksMap.known s bh1 >>= fun known -> - SubBlocksMap.read_opt s bh1 >>= fun v1' -> + let v1 = (3, "Non!") and v2 = (12, "Beurk.") in + SubBlocksMap.store s bh1 v1 + >>= fun () -> + SubBlocksMap.store s bh2 v2 + >>= fun () -> + SubBlocksMap.known s bh1 + >>= fun known -> + SubBlocksMap.read_opt s bh1 + >>= fun v1' -> Assert.equal ~msg:__LOC__ (Some v1) v1' ; Assert.is_true ~msg:__LOC__ known ; let map = Block_hash.Map.(empty |> add bh1 v1 |> add bh2 v2) in - SubBlocksMap.read_all s >>= fun map' -> - Assert.equal_block_map ~eq:(=) ~msg:__LOC__ map map' ; - - SubBlocksSet.remove_all s >>= fun () -> - SubBlocksSet.elements s >>= fun elts -> + SubBlocksMap.read_all s + >>= fun map' -> + Assert.equal_block_map ~eq:( = ) ~msg:__LOC__ map map' ; + SubBlocksSet.remove_all s + >>= fun () -> + SubBlocksSet.elements s + >>= fun elts -> Assert.equal_block_hash_list ~msg:__LOC__ elts [] ; - - SubBlocksMap.read_all s >>= fun map' -> - Assert.equal_block_map ~eq:(=) ~msg:__LOC__ map map' ; - - SubBlocksSet.store s bh3 >>= fun () -> - - SubBlocks.indexes s >>= fun keys -> - Assert.equal_block_hash_list ~msg:__LOC__ + SubBlocksMap.read_all s + >>= fun map' -> + Assert.equal_block_map ~eq:( = ) ~msg:__LOC__ map map' ; + SubBlocksSet.store s bh3 + >>= fun () -> + SubBlocks.indexes s + >>= fun keys -> + Assert.equal_block_hash_list + ~msg:__LOC__ (List.sort Block_hash.compare keys) - (List.sort Block_hash.compare [bh1;bh2;bh3]) ; - + (List.sort Block_hash.compare [bh1; bh2; bh3]) ; Lwt.return_unit module SubSubBlocks = Make_indexed_substore - (Make_substore(SubBlocks.Store)(struct let name = ["sub_blocks"] end)) - (Block_hash) + (Make_substore + (SubBlocks.Store) + (struct + let name = ["sub_blocks"] + end)) + (Block_hash) (** *) -let tests_raw : (string * (Raw_store.t -> unit Lwt.t)) list = [ - - "init", test_init ; - - "generic", test_generic (module Raw_store) ; - "generic_substore", test_generic (module Sub) ; - "generic_indexedstore", - (fun s -> test_generic (module SubBlocks.Store) (s, bh1)) ; - "generic_indexedsubstore", - (fun s -> test_generic (module SubSubBlocks.Store) ((s, bh1), bh2)) ; - - "single", test_single (module Raw_store) ; - "single_substore", test_single (module Sub) ; - "single_indexedstore", - (fun s -> test_single (module SubBlocks.Store) (s, bh1)) ; - "single_indexedsubstore", - (fun s -> test_single (module SubSubBlocks.Store) ((s, bh1), bh2)) ; - - "generic_list", test_generic_list (module Raw_store); - "generic_substore_list", test_generic_list (module Sub); - "generic_indexedstore_list", - (fun s -> test_generic_list (module SubBlocks.Store) (s, bh1)); - "generic_indexedsubstore_list", - (fun s -> test_generic_list (module SubSubBlocks.Store) ((s, bh1), bh2)) ; - - "hashset", test_hashset (module Raw_store) ; - "hashset_substore", test_hashset (module Sub) ; - "hashset_indexedstore", - (fun s -> test_hashset (module SubBlocks.Store) (s, bh1)); - "hashset_indexedsubstore", - (fun s -> test_hashset (module SubSubBlocks.Store) ((s, bh1), bh2)) ; - - "hashmap", test_hashmap (module Raw_store) ; - "hashmap_substore", test_hashmap (module Sub) ; - "hashmap_indexedstore", - (fun s -> test_hashmap (module SubBlocks.Store) (s, bh1)); - "hashmap_indexedsubstore", - (fun s -> test_hashmap (module SubSubBlocks.Store) ((s, bh1), bh2)) ; - - "subblock", test_subblock ; - -] - -let tests : (string * (Store.t -> unit Lwt.t)) list = [ - "expand", test_expand ; - "block", test_block ; -] +let tests_raw : (string * (Raw_store.t -> unit Lwt.t)) list = + [ ("init", test_init); + ("generic", test_generic (module Raw_store)); + ("generic_substore", test_generic (module Sub)); + ( "generic_indexedstore", + fun s -> test_generic (module SubBlocks.Store) (s, bh1) ); + ( "generic_indexedsubstore", + fun s -> test_generic (module SubSubBlocks.Store) ((s, bh1), bh2) ); + ("single", test_single (module Raw_store)); + ("single_substore", test_single (module Sub)); + ( "single_indexedstore", + fun s -> test_single (module SubBlocks.Store) (s, bh1) ); + ( "single_indexedsubstore", + fun s -> test_single (module SubSubBlocks.Store) ((s, bh1), bh2) ); + ("generic_list", test_generic_list (module Raw_store)); + ("generic_substore_list", test_generic_list (module Sub)); + ( "generic_indexedstore_list", + fun s -> test_generic_list (module SubBlocks.Store) (s, bh1) ); + ( "generic_indexedsubstore_list", + fun s -> test_generic_list (module SubSubBlocks.Store) ((s, bh1), bh2) ); + ("hashset", test_hashset (module Raw_store)); + ("hashset_substore", test_hashset (module Sub)); + ( "hashset_indexedstore", + fun s -> test_hashset (module SubBlocks.Store) (s, bh1) ); + ( "hashset_indexedsubstore", + fun s -> test_hashset (module SubSubBlocks.Store) ((s, bh1), bh2) ); + ("hashmap", test_hashmap (module Raw_store)); + ("hashmap_substore", test_hashmap (module Sub)); + ( "hashmap_indexedstore", + fun s -> test_hashmap (module SubBlocks.Store) (s, bh1) ); + ( "hashmap_indexedsubstore", + fun s -> test_hashmap (module SubSubBlocks.Store) ((s, bh1), bh2) ); + ("subblock", test_subblock) ] + +let tests : (string * (Store.t -> unit Lwt.t)) list = + [("expand", test_expand); ("block", test_block)] let tests = List.map (fun (s, f) -> Alcotest_lwt.test_case s `Quick (wrap_raw_store_init f)) - tests_raw @ - List.map - (fun (s, f) -> Alcotest_lwt.test_case s `Quick (wrap_store_init f)) - tests + tests_raw + @ List.map + (fun (s, f) -> Alcotest_lwt.test_case s `Quick (wrap_store_init f)) + tests diff --git a/src/lib_shell/test/test_store_checkpoint.ml b/src/lib_shell/test/test_store_checkpoint.ml index 31761a63ac5a77509ad986a46f3c18fb212ef67e..51c67c1ce9f1768f3b8a57b201dc9043eb189cf5 100644 --- a/src/lib_shell/test/test_store_checkpoint.ml +++ b/src/lib_shell/test/test_store_checkpoint.ml @@ -25,20 +25,23 @@ let mapsize = 4_096_000_000L (* ~4 GiB *) -let (//) = Filename.concat +let ( // ) = Filename.concat let wrap_raw_store_init f _ () = - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun base_dir -> - let root = base_dir // "store" in - Raw_store.init ~mapsize root >>= function - | Ok store -> - Lwt.finalize - (fun () -> f store) - (fun () -> Raw_store.close store ; Lwt.return_unit) - | Error err -> - Format.kasprintf Pervasives.failwith - "@[Cannot initialize store:@ %a@]" pp_print_error err - end + Lwt_utils_unix.with_tempdir "tezos_test_" (fun base_dir -> + let root = base_dir // "store" in + Raw_store.init ~mapsize root + >>= function + | Ok store -> + Lwt.finalize + (fun () -> f store) + (fun () -> Raw_store.close store ; Lwt.return_unit) + | Error err -> + Format.kasprintf + Pervasives.failwith + "@[Cannot initialize store:@ %a@]" + pp_print_error + err) (**************************************************************************) (** Basic blocks *) @@ -52,76 +55,90 @@ let genesis_block = let lolblock ?(operations = []) header = let operations_hash = - Operation_list_list_hash.compute - [Operation_list_hash.compute operations] in + Operation_list_list_hash.compute [Operation_list_hash.compute operations] + in let block_header = { Block_header.shell = - { timestamp = Time.Protocol.of_seconds (Random.int64 1500L) ; - level = 0l ; (* dummy *) - proto_level = 0 ; (* dummy *) - validation_passes = Random.int 32 ; - predecessor = genesis_block ; operations_hash ; - fitness = [MBytes.of_string @@ string_of_int @@ String.length header; - MBytes.of_string @@ string_of_int @@ 12] ; - context = Context_hash.zero } ; - protocol_data = MBytes.of_string header ; } in + { timestamp = Time.Protocol.of_seconds (Random.int64 1500L); + level = 0l; + (* dummy *) + proto_level = 0; + (* dummy *) + validation_passes = Random.int 32; + predecessor = genesis_block; + operations_hash; + fitness = + [ MBytes.of_string @@ string_of_int @@ String.length header; + MBytes.of_string @@ string_of_int @@ 12 ]; + context = Context_hash.zero }; + protocol_data = MBytes.of_string header } + in let block_contents = - { header = block_header ; - Store.Block.metadata = MBytes.create 0 ; - max_operations_ttl = 0 ; - message = None ; - context = Context_hash.zero ; - last_allowed_fork_level = 0l ; - } in block_header, block_contents + { header = block_header; + Store.Block.metadata = MBytes.create 0; + max_operations_ttl = 0; + message = None; + context = Context_hash.zero; + last_allowed_fork_level = 0l } + in + (block_header, block_contents) +let (block_header, _) = lolblock "A1" -let (block_header,_) = lolblock "A1" let block_hash = Block_header.hash block_header (****************************************************) open Store_helpers -let test_single (type t) - (module Store:Store_sigs.STORE with type t = t) (s: Store.t) = +let test_single (type t) (module Store : Store_sigs.STORE with type t = t) + (s : Store.t) = let module Single = Make_single_store (Store) - (struct let name = ["checkpoint"] end) + (struct + let name = ["checkpoint"] + end) (Store_helpers.Make_value (struct - type t = Int32.t * Block_hash.t - let encoding = Data_encoding.(tup2 int32 Block_hash.encoding) - end - )) + type t = Int32.t * Block_hash.t + + let encoding = Data_encoding.(tup2 int32 Block_hash.encoding) + end)) in (* is there any checkpoint in store *) - Single.known s >>= fun is_known -> - Assert.is_false ~msg:__LOC__ is_known; - Single.read_opt s >>= fun checkpoint' -> - Assert.equal_checkpoint ~msg:__LOC__ None checkpoint'; + Single.known s + >>= fun is_known -> + Assert.is_false ~msg:__LOC__ is_known ; + Single.read_opt s + >>= fun checkpoint' -> + Assert.equal_checkpoint ~msg:__LOC__ None checkpoint' ; (* store new checkpoint: (1, A1) *) - let checkpoint = (1l, block_hash) in - Single.store s checkpoint >>= fun () -> - Single.known s >>= fun is_known -> - Assert.is_true ~msg:__LOC__ is_known; - Single.read_opt s >>= fun checkpoint' -> - Assert.equal_checkpoint ~msg:__LOC__ (Some checkpoint) checkpoint'; + let checkpoint = (1l, block_hash) in + Single.store s checkpoint + >>= fun () -> + Single.known s + >>= fun is_known -> + Assert.is_true ~msg:__LOC__ is_known ; + Single.read_opt s + >>= fun checkpoint' -> + Assert.equal_checkpoint ~msg:__LOC__ (Some checkpoint) checkpoint' ; (* remove the checkpoint just store *) - Single.remove s >>= fun () -> - Single.known s >>= fun is_known -> - Assert.is_false ~msg:__LOC__ is_known; - Single.read_opt s >>= fun checkpoint' -> - Assert.equal_checkpoint ~msg:__LOC__ None checkpoint'; + Single.remove s + >>= fun () -> + Single.known s + >>= fun is_known -> + Assert.is_false ~msg:__LOC__ is_known ; + Single.read_opt s + >>= fun checkpoint' -> + Assert.equal_checkpoint ~msg:__LOC__ None checkpoint' ; Lwt.return_unit (**************************************************************************) let tests_raw : (string * (Raw_store.t -> unit Lwt.t)) list = - [ - "single", test_single (module Raw_store) - - ] + [("single", test_single (module Raw_store))] let tests = - List.map (fun (s, f) -> Alcotest_lwt.test_case s `Quick (wrap_raw_store_init f)) + List.map + (fun (s, f) -> Alcotest_lwt.test_case s `Quick (wrap_raw_store_init f)) tests_raw diff --git a/src/lib_shell/validator.ml b/src/lib_shell/validator.ml index 27285a0456671683837d1b1e5287d84b5e1f5135..06f974fb22541ecff287cf6df84e4a879cbdf02a 100644 --- a/src/lib_shell/validator.ml +++ b/src/lib_shell/validator.ml @@ -24,155 +24,174 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make_semantic(struct let name = "node.validator" end) +include Internal_event.Legacy_logging.Make_semantic (struct + let name = "node.validator" +end) type t = { - - state: State.t ; - db: Distributed_db.t ; - block_validator: Block_validator.t ; - chain_validator_limits: Chain_validator.limits ; - peer_validator_limits: Peer_validator.limits ; - block_validator_limits: Block_validator.limits ; - prevalidator_limits: Prevalidator.limits ; - start_testchain: bool ; - - valid_block_input: State.Block.t Lwt_watcher.input ; - - chains_input: (Chain_id.t * bool) Lwt_watcher.input ; - active_chains: Chain_validator.t Chain_id.Table.t ; - + state : State.t; + db : Distributed_db.t; + block_validator : Block_validator.t; + chain_validator_limits : Chain_validator.limits; + peer_validator_limits : Peer_validator.limits; + block_validator_limits : Block_validator.limits; + prevalidator_limits : Prevalidator.limits; + start_testchain : bool; + valid_block_input : State.Block.t Lwt_watcher.input; + chains_input : (Chain_id.t * bool) Lwt_watcher.input; + active_chains : Chain_validator.t Chain_id.Table.t } -let create state db - peer_validator_limits +let create state db peer_validator_limits block_validator_limits + block_validator_kind prevalidator_limits chain_validator_limits + ~start_testchain = + Block_validator.create block_validator_limits + db block_validator_kind - prevalidator_limits - chain_validator_limits ~start_testchain - = - Block_validator.create block_validator_limits db block_validator_kind ~start_testchain >>=? fun block_validator -> + >>=? fun block_validator -> let valid_block_input = Lwt_watcher.create_input () in let chains_input = Lwt_watcher.create_input () in return - { state ; db ; - start_testchain ; - block_validator ; - block_validator_limits ; prevalidator_limits ; - peer_validator_limits ; chain_validator_limits ; - valid_block_input ; - chains_input ; + { state; + db; + start_testchain; + block_validator; + block_validator_limits; + prevalidator_limits; + peer_validator_limits; + chain_validator_limits; + valid_block_input; + chains_input; active_chains = Chain_id.Table.create 7 } -let activate v ?max_child_ttl - ~start_prevalidator - chain_state = +let activate v ?max_child_ttl ~start_prevalidator chain_state = let chain_id = State.Chain.id chain_state in - lwt_log_notice Tag.DSL.(fun f -> - f "activate chain %a" - -% t event "active_chain" - -% a State_logging.chain_id chain_id) >>= fun () -> + lwt_log_notice + Tag.DSL.( + fun f -> + f "activate chain %a" -% t event "active_chain" + -% a State_logging.chain_id chain_id) + >>= fun () -> match Chain_id.Table.find_opt v.active_chains chain_id with - | Some chain -> return chain + | Some chain -> + return chain | None -> Chain_validator.create ?max_child_ttl ~start_prevalidator ~start_testchain:v.start_testchain ~active_chains:v.active_chains - v.peer_validator_limits v.prevalidator_limits + v.peer_validator_limits + v.prevalidator_limits v.block_validator v.valid_block_input v.chains_input - v.db chain_state + v.db + chain_state v.chain_validator_limits -let get_exn { active_chains ; _ } chain_id = +let get_exn {active_chains; _} chain_id = Chain_id.Table.find active_chains chain_id -let get { active_chains ; _ } chain_id = +let get {active_chains; _} chain_id = match Chain_id.Table.find_opt active_chains chain_id with - |Some nv -> Ok nv - |None -> error (Validation_errors.Inactive_chain chain_id) + | Some nv -> + Ok nv + | None -> + error (Validation_errors.Inactive_chain chain_id) -let get_active_chains { active_chains ; _ } = +let get_active_chains {active_chains; _} = let l = Chain_id.Table.fold (fun c _ acc -> c :: acc) active_chains [] in List.rev l let validate_block v ?(force = false) ?chain_id bytes operations = let hash = Block_hash.hash_bytes [bytes] in match Block_header.of_bytes bytes with - | None -> failwith "Cannot parse block header." + | None -> + failwith "Cannot parse block header." | Some block -> - begin - match chain_id with - | None -> begin - Distributed_db.read_block_header - v.db block.shell.predecessor >>= function - | None -> - failwith "Unknown predecessor (%a), cannot inject the block." - Block_hash.pp_short block.shell.predecessor - | Some (chain_id, _bh) -> Lwt.return (get v chain_id) - end - | Some chain_id -> - Lwt.return (get v chain_id) >>=? fun nv -> - if force then - return nv - else - Distributed_db.Block_header.known - (Chain_validator.chain_db nv) - block.shell.predecessor >>= function - | true -> - return nv - | false -> - failwith "Unknown predecessor (%a), cannot inject the block." - Block_hash.pp_short block.shell.predecessor - end >>=? fun nv -> + ( match chain_id with + | None -> ( + Distributed_db.read_block_header v.db block.shell.predecessor + >>= function + | None -> + failwith + "Unknown predecessor (%a), cannot inject the block." + Block_hash.pp_short + block.shell.predecessor + | Some (chain_id, _bh) -> + Lwt.return (get v chain_id) ) + | Some chain_id -> ( + Lwt.return (get v chain_id) + >>=? fun nv -> + if force then return nv + else + Distributed_db.Block_header.known + (Chain_validator.chain_db nv) + block.shell.predecessor + >>= function + | true -> + return nv + | false -> + failwith + "Unknown predecessor (%a), cannot inject the block." + Block_hash.pp_short + block.shell.predecessor ) ) + >>=? fun nv -> let validation = - Chain_validator.validate_block nv ~force hash block operations in + Chain_validator.validate_block nv ~force hash block operations + in return (hash, validation) -let shutdown { active_chains ; block_validator ; _ } = +let shutdown {active_chains; block_validator; _} = let jobs = - Block_validator.shutdown block_validator :: - Chain_id.Table.fold - (fun _ nv acc -> Chain_validator.shutdown nv :: acc) - active_chains [] in - Lwt.join jobs >>= fun () -> - Lwt.return_unit - -let watcher { valid_block_input ; _ } = + Block_validator.shutdown block_validator + :: Chain_id.Table.fold + (fun _ nv acc -> Chain_validator.shutdown nv :: acc) + active_chains + [] + in + Lwt.join jobs >>= fun () -> Lwt.return_unit + +let watcher {valid_block_input; _} = Lwt_watcher.create_stream valid_block_input -let chains_watcher { chains_input ; _ } = - Lwt_watcher.create_stream chains_input +let chains_watcher {chains_input; _} = Lwt_watcher.create_stream chains_input let inject_operation v ?chain_id op = - begin - match chain_id with - | None -> begin - Distributed_db.read_block_header - v.db op.Operation.shell.branch >>= function - | None -> - failwith "Unknown branch (%a), cannot inject the operation." - Block_hash.pp_short op.shell.branch - | Some (chain_id, _bh) -> Lwt.return (get v chain_id) - end - | Some chain_id -> - Lwt.return (get v chain_id) >>=? fun nv -> - Distributed_db.Block_header.known - (Chain_validator.chain_db nv) - op.shell.branch >>= function - | true -> - return nv - | false -> - failwith "Unknown branch (%a), cannot inject the operation." - Block_hash.pp_short op.shell.branch - end >>=? fun nv -> + ( match chain_id with + | None -> ( + Distributed_db.read_block_header v.db op.Operation.shell.branch + >>= function + | None -> + failwith + "Unknown branch (%a), cannot inject the operation." + Block_hash.pp_short + op.shell.branch + | Some (chain_id, _bh) -> + Lwt.return (get v chain_id) ) + | Some chain_id -> ( + Lwt.return (get v chain_id) + >>=? fun nv -> + Distributed_db.Block_header.known + (Chain_validator.chain_db nv) + op.shell.branch + >>= function + | true -> + return nv + | false -> + failwith + "Unknown branch (%a), cannot inject the operation." + Block_hash.pp_short + op.shell.branch ) ) + >>=? fun nv -> let pv_opt = Chain_validator.prevalidator nv in match pv_opt with - | Some pv -> Prevalidator.inject_operation pv op - | None -> failwith "Prevalidator is not running, cannot inject the operation." + | Some pv -> + Prevalidator.inject_operation pv op + | None -> + failwith "Prevalidator is not running, cannot inject the operation." -let distributed_db { db ; _ } = db +let distributed_db {db; _} = db diff --git a/src/lib_shell/validator.mli b/src/lib_shell/validator.mli index 56709b57645565ed6541f2b4cab398e3578b2336..53f0a177a43cef0b331eaa4a43412fb0fbd0a73a 100644 --- a/src/lib_shell/validator.mli +++ b/src/lib_shell/validator.mli @@ -28,7 +28,7 @@ type t -val create: +val create : State.t -> Distributed_db.t -> Peer_validator.limits -> @@ -38,34 +38,39 @@ val create: Chain_validator.limits -> start_testchain:bool -> t tzresult Lwt.t -val shutdown: t -> unit Lwt.t + +val shutdown : t -> unit Lwt.t (** Start the validation scheduler of a given chain. *) -val activate: +val activate : t -> ?max_child_ttl:int -> start_prevalidator:bool -> - State.Chain.t -> Chain_validator.t tzresult Lwt.t + State.Chain.t -> + Chain_validator.t tzresult Lwt.t + +val get : t -> Chain_id.t -> Chain_validator.t tzresult + +val get_exn : t -> Chain_id.t -> Chain_validator.t -val get: t -> Chain_id.t -> Chain_validator.t tzresult -val get_exn: t -> Chain_id.t -> Chain_validator.t -val get_active_chains: t -> Chain_id.t list +val get_active_chains : t -> Chain_id.t list (** Force the validation of a block. *) -val validate_block: +val validate_block : t -> ?force:bool -> ?chain_id:Chain_id.t -> - MBytes.t -> Operation.t list list -> + MBytes.t -> + Operation.t list list -> (Block_hash.t * State.Block.t option tzresult Lwt.t) tzresult Lwt.t (** Monitor all the valid block (for all activate chains). *) -val watcher: t -> State.Block.t Lwt_stream.t * Lwt_watcher.stopper -val chains_watcher: t -> (Chain_id.t * bool) Lwt_stream.t * Lwt_watcher.stopper +val watcher : t -> State.Block.t Lwt_stream.t * Lwt_watcher.stopper -val inject_operation: - t -> - ?chain_id:Chain_id.t -> - Operation.t -> unit tzresult Lwt.t +val chains_watcher : + t -> (Chain_id.t * bool) Lwt_stream.t * Lwt_watcher.stopper + +val inject_operation : + t -> ?chain_id:Chain_id.t -> Operation.t -> unit tzresult Lwt.t -val distributed_db: t -> Distributed_db.t +val distributed_db : t -> Distributed_db.t diff --git a/src/lib_shell/worker.ml b/src/lib_shell/worker.ml index 92fd6774a7781ee2417e358d7053ea8e28e1c423..0d0d6badbb3a37cdff79e75c114429d5aced0d2e 100644 --- a/src/lib_shell/worker.ml +++ b/src/lib_shell/worker.ml @@ -26,8 +26,11 @@ module type NAME = sig val base : string list + type t + val encoding : t Data_encoding.t + val pp : Format.formatter -> t -> unit end @@ -35,62 +38,69 @@ module type EVENT = sig type t val level : t -> Internal_event.level + val encoding : t Data_encoding.t + val pp : Format.formatter -> t -> unit end module type REQUEST = sig type 'a t + type view val view : 'a t -> view + val encoding : view Data_encoding.t + val pp : Format.formatter -> view -> unit end module type TYPES = sig type state + type parameters + type view val view : state -> parameters -> view + val encoding : view Data_encoding.t + val pp : Format.formatter -> view -> unit end (** An error returned when trying to communicate with a worker that has been closed.*) -type worker_name = {base: string; name:string} +type worker_name = {base : string; name : string} + type Error_monad.error += Closed of worker_name let () = - register_error_kind `Permanent - ~id:("worker.closed") - ~title:("Worker closed") + register_error_kind + `Permanent + ~id:"worker.closed" + ~title:"Worker closed" ~description: - ("An operation on a worker could not complete \ - before it was shut down.") - ~pp: (fun ppf w -> - Format.fprintf ppf - "Worker %s[%s] has been shut down." - w.base w.name) + "An operation on a worker could not complete before it was shut down." + ~pp:(fun ppf w -> + Format.fprintf ppf "Worker %s[%s] has been shut down." w.base w.name) Data_encoding.( conv - (fun { base ; name } -> (base,name)) - (fun (name,base) -> { base ; name }) - (obj1 - (req "worker" (tup2 string string)) - ) - ) + (fun {base; name} -> (base, name)) + (fun (name, base) -> {base; name}) + (obj1 (req "worker" (tup2 string string)))) (function Closed w -> Some w | _ -> None) (fun w -> Closed w) module type T = sig + module Name : NAME + + module Event : EVENT + + module Request : REQUEST - module Name: NAME - module Event: EVENT - module Request: REQUEST - module Types: TYPES + module Types : TYPES (** A handle to a specific worker, parameterized by the type of internal message buffer. *) @@ -100,18 +110,26 @@ module type T = sig type 'kind table (** Internal buffer kinds used as parameters to {!t}. *) - type 'a queue and bounded and infinite + type 'a queue + + and bounded + + and infinite + type dropbox (** Supported kinds of internal buffers. *) type _ buffer_kind = | Queue : infinite queue buffer_kind - | Bounded : { size : int } -> bounded queue buffer_kind + | Bounded : {size : int} -> bounded queue buffer_kind | Dropbox : - { merge : (dropbox t -> - any_request -> - any_request option -> - any_request option) } -> dropbox buffer_kind + { merge : + dropbox t -> + any_request -> + any_request option -> + any_request option } + -> dropbox buffer_kind + and any_request = Any_request : _ Request.t -> any_request (** Create a table of workers. *) @@ -119,7 +137,6 @@ module type T = sig (** The callback handlers specific to each worker instance. *) module type HANDLERS = sig - (** Placeholder replaced with {!t} with the right parameters provided by the type of buffer chosen at {!launch}.*) type self @@ -131,17 +148,14 @@ module type T = sig self -> Name.t -> Types.parameters -> Types.state tzresult Lwt.t (** The main request processor, i.e. the body of the event loop. *) - val on_request : - self -> 'a Request.t -> 'a tzresult Lwt.t + val on_request : self -> 'a Request.t -> 'a tzresult Lwt.t (** Called when no request has been made before the timeout, if the parameter has been passed to {!launch}. *) - val on_no_request : - self -> unit tzresult Lwt.t + val on_no_request : self -> unit tzresult Lwt.t (** A function called when terminating a worker. *) - val on_close : - self -> unit Lwt.t + val on_close : self -> unit Lwt.t (** A function called at the end of the worker loop in case of an abnormal error. This function can handle the error by @@ -159,61 +173,68 @@ module type T = sig (** A function called at the end of the worker loop in case of a successful treatment of the current request. *) val on_completion : - self -> - 'a Request.t -> 'a -> - Worker_types.request_status -> - unit Lwt.t - + self -> 'a Request.t -> 'a -> Worker_types.request_status -> unit Lwt.t end (** Creates a new worker instance. Parameter [queue_size] not passed means unlimited queue. *) val launch : - 'kind table -> ?timeout:Time.System.Span.t -> - Worker_types.limits -> Name.t -> Types.parameters -> + 'kind table -> + ?timeout:Time.System.Span.t -> + Worker_types.limits -> + Name.t -> + Types.parameters -> (module HANDLERS with type self = 'kind t) -> 'kind t tzresult Lwt.t (** Triggers a worker termination and waits for its completion. Cannot be called from within the handlers. *) - val shutdown : - _ t -> unit Lwt.t + val shutdown : _ t -> unit Lwt.t module type BOX = sig type t + val put_request : t -> 'a Request.t -> unit + val put_request_and_wait : t -> 'a Request.t -> 'a tzresult Lwt.t end + module type QUEUE = sig type 'a t + val push_request_and_wait : 'q t -> 'a Request.t -> 'a tzresult Lwt.t + val push_request : 'q t -> 'a Request.t -> unit Lwt.t + val pending_requests : 'a t -> (Time.System.t * Request.view) list + val pending_requests_length : 'a t -> int end + module type BOUNDED_QUEUE = sig type t + val try_push_request_now : t -> 'a Request.t -> bool end module Dropbox : sig include BOX with type t := dropbox t end + module Queue : sig include QUEUE with type 'a t := 'a queue t + include BOUNDED_QUEUE with type t := bounded queue t (** Adds a message to the queue immediately. *) - val push_request_now : - infinite queue t -> 'a Request.t -> unit + val push_request_now : infinite queue t -> 'a Request.t -> unit end - (** Detects cancelation from within the request handler to stop asynchronous operations. *) val protect : _ t -> - ?on_error: (error list -> 'b tzresult Lwt.t) -> + ?on_error:(error list -> 'b tzresult Lwt.t) -> (unit -> 'b tzresult Lwt.t) -> 'b tzresult Lwt.t @@ -245,7 +266,8 @@ module type T = sig (** Get the request being treated by a worker. Gives the time the request was pushed, and the time its treatment started. *) - val current_request : _ t -> (Time.System.t * Time.System.t * Request.view) option + val current_request : + _ t -> (Time.System.t * Time.System.t * Request.view) option val information : _ t -> Worker_types.worker_information @@ -260,12 +282,8 @@ module type T = sig val find_opt : 'a table -> Name.t -> 'a t option end -module Make - (Name : NAME) - (Event : EVENT) - (Request : REQUEST) - (Types : TYPES) = struct - +module Make (Name : NAME) (Event : EVENT) (Request : REQUEST) (Types : TYPES) = +struct module Name = Name module Event = Event module Request = Request @@ -273,52 +291,65 @@ module Make let base_name = String.concat "." Name.base - type message = Message: 'a Request.t * 'a tzresult Lwt.u option -> message + type message = Message : 'a Request.t * 'a tzresult Lwt.u option -> message + + type 'a queue + + and bounded + + and infinite - type 'a queue and bounded and infinite type dropbox type _ buffer_kind = | Queue : infinite queue buffer_kind - | Bounded : { size : int } -> bounded queue buffer_kind + | Bounded : {size : int} -> bounded queue buffer_kind | Dropbox : - { merge : (dropbox t -> - any_request -> - any_request option -> - any_request option) } - -> dropbox buffer_kind + { merge : + dropbox t -> + any_request -> + any_request option -> + any_request option } + -> dropbox buffer_kind + and any_request = Any_request : _ Request.t -> any_request and _ buffer = - | Queue_buffer : (Time.System.t * message) Lwt_pipe.t -> infinite queue buffer - | Bounded_buffer : (Time.System.t * message) Lwt_pipe.t -> bounded queue buffer - | Dropbox_buffer : (Time.System.t * message) Lwt_dropbox.t -> dropbox buffer + | Queue_buffer : + (Time.System.t * message) Lwt_pipe.t + -> infinite queue buffer + | Bounded_buffer : + (Time.System.t * message) Lwt_pipe.t + -> bounded queue buffer + | Dropbox_buffer : + (Time.System.t * message) Lwt_dropbox.t + -> dropbox buffer and 'kind t = { - limits : Worker_types.limits ; - timeout : Time.System.Span.t option ; - parameters : Types.parameters ; - mutable (* only for init *) worker : unit Lwt.t ; - mutable (* only for init *) state : Types.state option ; - buffer : 'kind buffer ; - event_log : (Internal_event.level * Event.t Ring.t) list ; - logger : (module Internal_event.Legacy_logging.LOG) ; - canceler : Lwt_canceler.t ; - name : Name.t ; - id : int ; - mutable status : Worker_types.worker_status ; - mutable current_request : (Time.System.t * Time.System.t * Request.view) option ; - table : 'kind table ; + limits : Worker_types.limits; + timeout : Time.System.Span.t option; + parameters : Types.parameters; + mutable (* only for init *) worker : unit Lwt.t; + mutable (* only for init *) state : Types.state option; + buffer : 'kind buffer; + event_log : (Internal_event.level * Event.t Ring.t) list; + logger : (module Internal_event.Legacy_logging.LOG); + canceler : Lwt_canceler.t; + name : Name.t; + id : int; + mutable status : Worker_types.worker_status; + mutable current_request : + (Time.System.t * Time.System.t * Request.view) option; + table : 'kind table } + and 'kind table = { - buffer_kind : 'kind buffer_kind ; - mutable last_id : int ; - instances : (Name.t, 'kind t) Hashtbl.t ; + buffer_kind : 'kind buffer_kind; + mutable last_id : int; + instances : (Name.t, 'kind t) Hashtbl.t } - let queue_item ?u r = - Systime_os.now (), - Message (r, u) + let queue_item ?u r = (Systime_os.now (), Message (r, u)) let drop_request w merge message_box request = try @@ -330,66 +361,75 @@ module Make Lwt.ignore_result (Lwt_dropbox.take message_box) ; merge w (Any_request request) (Some (Any_request old)) with - | None -> () + | None -> + () | Some (Any_request neu) -> Lwt_dropbox.put message_box (Systime_os.now (), Message (neu, None)) with Lwt_dropbox.Closed -> () let push_request_and_wait w message_queue request = - let t, u = Lwt.wait () in + let (t, u) = Lwt.wait () in Lwt.catch (fun () -> - Lwt_pipe.push message_queue (queue_item ~u request) >>= fun () -> - t) + Lwt_pipe.push message_queue (queue_item ~u request) >>= fun () -> t) (function | Lwt_pipe.Closed -> let name = Format.asprintf "%a" Name.pp w.name in - fail (Closed {base=base_name; name}) - | exn -> fail (Exn exn)) + fail (Closed {base = base_name; name}) + | exn -> + fail (Exn exn)) let drop_request_and_wait w message_box request = - let t, u = Lwt.wait () in + let (t, u) = Lwt.wait () in Lwt.catch (fun () -> - Lwt_dropbox.put message_box (queue_item ~u request); - t) + Lwt_dropbox.put message_box (queue_item ~u request) ; + t) (function | Lwt_pipe.Closed -> let name = Format.asprintf "%a" Name.pp w.name in - fail (Closed {base=base_name; name}) - | exn -> fail (Exn exn)) + fail (Closed {base = base_name; name}) + | exn -> + fail (Exn exn)) module type BOX = sig type t + val put_request : t -> 'a Request.t -> unit + val put_request_and_wait : t -> 'a Request.t -> 'a tzresult Lwt.t end + module type QUEUE = sig type 'a t + val push_request_and_wait : 'q t -> 'a Request.t -> 'a tzresult Lwt.t + val push_request : 'q t -> 'a Request.t -> unit Lwt.t + val pending_requests : 'a t -> (Time.System.t * Request.view) list + val pending_requests_length : 'a t -> int end + module type BOUNDED_QUEUE = sig type t + val try_push_request_now : t -> 'a Request.t -> bool end - module Dropbox = struct + module Dropbox = struct let put_request (w : dropbox t) request = - let Dropbox { merge } = w.table.buffer_kind in - let Dropbox_buffer message_box = w.buffer in + let (Dropbox {merge}) = w.table.buffer_kind in + let (Dropbox_buffer message_box) = w.buffer in drop_request w merge message_box request let put_request_and_wait (w : dropbox t) request = - let Dropbox_buffer message_box = w.buffer in + let (Dropbox_buffer message_box) = w.buffer in drop_request_and_wait w message_box request - end module Queue = struct - let push_request (type a) (w : a queue t) request = match w.buffer with | Queue_buffer message_queue -> @@ -398,49 +438,66 @@ module Make Lwt_pipe.push message_queue (queue_item request) let push_request_now (w : infinite queue t) request = - let Queue_buffer message_queue = w.buffer in + let (Queue_buffer message_queue) = w.buffer in Lwt_pipe.push_now_exn message_queue (queue_item request) let try_push_request_now (w : bounded queue t) request = - let Bounded_buffer message_queue = w.buffer in + let (Bounded_buffer message_queue) = w.buffer in Lwt_pipe.push_now message_queue (queue_item request) let push_request_and_wait (type a) (w : a queue t) request = - let message_queue = match w.buffer with - | Queue_buffer message_queue -> message_queue - | Bounded_buffer message_queue -> message_queue in + let message_queue = + match w.buffer with + | Queue_buffer message_queue -> + message_queue + | Bounded_buffer message_queue -> + message_queue + in push_request_and_wait w message_queue request let pending_requests (type a) (w : a queue t) = - let message_queue = match w.buffer with - | Queue_buffer message_queue -> message_queue - | Bounded_buffer message_queue -> message_queue in + let message_queue = + match w.buffer with + | Queue_buffer message_queue -> + message_queue + | Bounded_buffer message_queue -> + message_queue + in List.map - (function (t, Message (req, _)) -> t, Request.view req) + (function (t, Message (req, _)) -> (t, Request.view req)) (Lwt_pipe.peek_all message_queue) let pending_requests_length (type a) (w : a queue t) = - let pipe_length (type a) (q : a buffer ) = match q with - | Queue_buffer queue -> Lwt_pipe.length queue - | Bounded_buffer queue -> Lwt_pipe.length queue - | Dropbox_buffer _ -> 1 - in pipe_length w.buffer - + let pipe_length (type a) (q : a buffer) = + match q with + | Queue_buffer queue -> + Lwt_pipe.length queue + | Bounded_buffer queue -> + Lwt_pipe.length queue + | Dropbox_buffer _ -> + 1 + in + pipe_length w.buffer end let close (type a) (w : a t) = let wakeup = function - | _, Message (_, Some u) -> + | (_, Message (_, Some u)) -> let name = Format.asprintf "%a" Name.pp w.name in - Lwt.wakeup_later u (Error [ Closed {base=base_name; name} ]) - | _, Message (_, None) -> () in + Lwt.wakeup_later u (Error [Closed {base = base_name; name}]) + | (_, Message (_, None)) -> + () + in let close_queue message_queue = let messages = Lwt_pipe.pop_all_now message_queue in List.iter wakeup messages ; - Lwt_pipe.close message_queue in + Lwt_pipe.close message_queue + in match w.buffer with - | Queue_buffer message_queue -> close_queue message_queue - | Bounded_buffer message_queue -> close_queue message_queue + | Queue_buffer message_queue -> + close_queue message_queue + | Bounded_buffer message_queue -> + close_queue message_queue | Dropbox_buffer message_box -> Option.iter ~f:wakeup (Lwt_dropbox.peek message_box) ; Lwt_dropbox.close message_box @@ -449,267 +506,315 @@ module Make let pop_queue message_queue = match w.timeout with | None -> - Lwt_pipe.pop message_queue >>= fun m -> - return_some m + Lwt_pipe.pop message_queue >>= fun m -> return_some m | Some timeout -> - Lwt_pipe.pop_with_timeout - (Systime_os.sleep timeout) message_queue >>= fun m -> - return m in + Lwt_pipe.pop_with_timeout (Systime_os.sleep timeout) message_queue + >>= fun m -> return m + in match w.buffer with - | Queue_buffer message_queue -> pop_queue message_queue - | Bounded_buffer message_queue -> pop_queue message_queue - | Dropbox_buffer message_box -> - match w.timeout with - | None -> - Lwt_dropbox.take message_box >>= fun m -> - return_some m - | Some timeout -> - Lwt_dropbox.take_with_timeout - (Systime_os.sleep timeout) message_box >>= fun m -> - return m + | Queue_buffer message_queue -> + pop_queue message_queue + | Bounded_buffer message_queue -> + pop_queue message_queue + | Dropbox_buffer message_box -> ( + match w.timeout with + | None -> + Lwt_dropbox.take message_box >>= fun m -> return_some m + | Some timeout -> + Lwt_dropbox.take_with_timeout (Systime_os.sleep timeout) message_box + >>= fun m -> return m ) - let trigger_shutdown w = - Lwt.ignore_result (Lwt_canceler.cancel w.canceler) + let trigger_shutdown w = Lwt.ignore_result (Lwt_canceler.cancel w.canceler) - let canceler { canceler ; _ } = canceler + let canceler {canceler; _} = canceler let log_event w evt = let (module Logger) = w.logger in let level = Event.level evt in let log = match level with - | Debug -> Logger.lwt_debug - | Info -> Logger.lwt_log_info - | Notice -> Logger.lwt_log_notice - | Warning -> Logger.lwt_warn - | Error -> Logger.lwt_log_error - | Fatal -> Logger.lwt_fatal_error in - log "@[<v 0>%a@]" Event.pp evt >>= fun () -> + | Debug -> + Logger.lwt_debug + | Info -> + Logger.lwt_log_info + | Notice -> + Logger.lwt_log_notice + | Warning -> + Logger.lwt_warn + | Error -> + Logger.lwt_log_error + | Fatal -> + Logger.lwt_fatal_error + in + log "@[<v 0>%a@]" Event.pp evt + >>= fun () -> if level >= w.limits.backlog_level then Ring.add (List.assoc level w.event_log) evt ; Lwt.return_unit - let record_event w evt = - Lwt.ignore_result (log_event w evt) + let record_event w evt = Lwt.ignore_result (log_event w evt) module type HANDLERS = sig type self + val on_launch : self -> Name.t -> Types.parameters -> Types.state tzresult Lwt.t - val on_request : - self -> 'a Request.t -> 'a tzresult Lwt.t - val on_no_request : - self -> unit tzresult Lwt.t - val on_close : - self -> unit Lwt.t + + val on_request : self -> 'a Request.t -> 'a tzresult Lwt.t + + val on_no_request : self -> unit tzresult Lwt.t + + val on_close : self -> unit Lwt.t + val on_error : - self -> Request.view -> Worker_types.request_status -> error list -> unit tzresult Lwt.t + self -> + Request.view -> + Worker_types.request_status -> + error list -> + unit tzresult Lwt.t + val on_completion : self -> 'a Request.t -> 'a -> Worker_types.request_status -> unit Lwt.t end let create_table buffer_kind = - { buffer_kind ; - last_id = 0 ; - instances = Hashtbl.create 10 ; } + {buffer_kind; last_id = 0; instances = Hashtbl.create 10} let worker_loop (type kind) handlers (w : kind t) = let (module Handlers : HANDLERS with type self = kind t) = handlers in let (module Logger) = w.logger in let do_close errs = - let t0 = match w.status with - | Running t0 -> t0 - | Launching _ | Closing _ | Closed _ -> assert false in + let t0 = + match w.status with + | Running t0 -> + t0 + | Launching _ | Closing _ | Closed _ -> + assert false + in w.status <- Closing (t0, Systime_os.now ()) ; close w ; - Lwt_canceler.cancel w.canceler >>= fun () -> + Lwt_canceler.cancel w.canceler + >>= fun () -> w.status <- Closed (t0, Systime_os.now (), errs) ; Hashtbl.remove w.table.instances w.name ; - Handlers.on_close w >>= fun () -> + Handlers.on_close w + >>= fun () -> w.state <- None ; Lwt.ignore_result ( List.iter (fun (_, ring) -> Ring.clear ring) w.event_log ; - Lwt.return_unit) ; - Lwt.return_unit in + Lwt.return_unit ) ; + Lwt.return_unit + in let rec loop () = - begin - protect ~canceler:w.canceler begin fun () -> - pop w - end >>=? function - | None -> Handlers.on_no_request w - | Some (pushed, Message (request, u)) -> - let current_request = Request.view request in - let treated = Systime_os.now () in - w.current_request <- Some (pushed, treated, current_request) ; - Logger.debug "@[<v 2>Request:@,%a@]" - Request.pp current_request ; - match u with - | None -> - Handlers.on_request w request >>=? fun res -> - let completed = Systime_os.now () in - w.current_request <- None ; - Handlers.on_completion w - request res Worker_types.{ pushed ; treated ; completed } >>= fun () -> - return_unit - | Some u -> - Handlers.on_request w request >>= fun res -> - Lwt.wakeup_later u res ; - Lwt.return res >>=? fun res -> - let completed = Systime_os.now () in - w.current_request <- None ; - Handlers.on_completion w - request res Worker_types.{ pushed ; treated ; completed } >>= fun () -> - return_unit - end >>= function + protect ~canceler:w.canceler (fun () -> pop w) + >>=? (function + | None -> + Handlers.on_no_request w + | Some (pushed, Message (request, u)) -> ( + let current_request = Request.view request in + let treated = Systime_os.now () in + w.current_request <- Some (pushed, treated, current_request) ; + Logger.debug + "@[<v 2>Request:@,%a@]" + Request.pp + current_request ; + match u with + | None -> + Handlers.on_request w request + >>=? fun res -> + let completed = Systime_os.now () in + w.current_request <- None ; + Handlers.on_completion + w + request + res + Worker_types.{pushed; treated; completed} + >>= fun () -> return_unit + | Some u -> + Handlers.on_request w request + >>= fun res -> + Lwt.wakeup_later u res ; + Lwt.return res + >>=? fun res -> + let completed = Systime_os.now () in + w.current_request <- None ; + Handlers.on_completion + w + request + res + Worker_types.{pushed; treated; completed} + >>= fun () -> return_unit )) + >>= function | Ok () -> loop () - | Error [Canceled | Exn Lwt.Canceled | Exn Lwt_pipe.Closed | Exn Lwt_dropbox.Closed ] -> - Logger.lwt_log_notice - "@[Worker terminated [%a] @]" - Name.pp w.name >>= fun () -> - do_close None - | Error errs -> - begin match w.current_request with - | Some (pushed, treated, request) -> - let completed = Systime_os.now () in - w.current_request <- None ; - Handlers.on_error w - request Worker_types.{ pushed ; treated ; completed } errs - | None -> assert false - end >>= function + | Error + [ ( Canceled + | Exn Lwt.Canceled + | Exn Lwt_pipe.Closed + | Exn Lwt_dropbox.Closed ) ] -> + Logger.lwt_log_notice "@[Worker terminated [%a] @]" Name.pp w.name + >>= fun () -> do_close None + | Error errs -> ( + ( match w.current_request with + | Some (pushed, treated, request) -> + let completed = Systime_os.now () in + w.current_request <- None ; + Handlers.on_error + w + request + Worker_types.{pushed; treated; completed} + errs + | None -> + assert false ) + >>= function | Ok () -> loop () | Error ([Timeout] as errs) -> Logger.lwt_log_notice "@[Worker terminated with timeout [%a] @]" - Name.pp w.name >>= fun () -> - do_close (Some errs) + Name.pp + w.name + >>= fun () -> do_close (Some errs) | Error errs -> Logger.lwt_log_error "@[<v 0>Worker crashed [%a]:@,%a@]" - Name.pp w.name - (Format.pp_print_list Error_monad.pp) errs >>= fun () -> - do_close (Some errs) in + Name.pp + w.name + (Format.pp_print_list Error_monad.pp) + errs + >>= fun () -> do_close (Some errs) ) + in loop () - let launch - : type kind. - kind table -> ?timeout:Time.System.Span.t -> - Worker_types.limits -> Name.t -> Types.parameters -> + let launch : + type kind. + kind table -> + ?timeout:Time.System.Span.t -> + Worker_types.limits -> + Name.t -> + Types.parameters -> (module HANDLERS with type self = kind t) -> - kind t tzresult Lwt.t - = fun table ?timeout limits name parameters (module Handlers) -> - let name_s = - Format.asprintf "%a" Name.pp name in - let full_name = - if name_s = "" then base_name else Format.asprintf "%s_%s" base_name name_s in - let id = - table.last_id <- table.last_id + 1 ; - table.last_id in - let id_name = - if name_s = "" then base_name else Format.asprintf "%s_%d" base_name id in - if Hashtbl.mem table.instances name then - invalid_arg (Format.asprintf "Worker.launch: duplicate worker %s" full_name) ; - let canceler = Lwt_canceler.create () in - let buffer : kind buffer = - match table.buffer_kind with - | Queue -> - Queue_buffer (Lwt_pipe.create ()) - | Bounded { size } -> - Bounded_buffer (Lwt_pipe.create ~size:(size, (fun _ -> 1)) ()) - | Dropbox _ -> - Dropbox_buffer (Lwt_dropbox.create ()) in - let event_log = - let levels = - Internal_event.[ - Debug ; Info ; Notice ; Warning ; Error ; Fatal - ] in - List.map (fun l -> l, Ring.create limits.backlog_size) levels in - let module Logger = - Internal_event.Legacy_logging.Make(struct - let name = id_name - end) in - let w = { limits ; parameters ; name ; canceler ; - table ; buffer ; logger = (module Logger) ; - state = None ; id ; - worker = Lwt.return_unit ; - event_log ; timeout ; - current_request = None ; - status = Launching (Systime_os.now ())} in - Hashtbl.add table.instances name w ; - begin - if id_name = base_name then - Logger.lwt_log_notice "Worker started" - else - Logger.lwt_log_notice "Worker started for %s" name_s - end >>= fun () -> - Handlers.on_launch w name parameters >>=? fun state -> - w.status <- Running (Systime_os.now ()) ; - w.state <- Some state ; - w.worker <- - Lwt_utils.worker - full_name - ~on_event:Internal_event.Lwt_worker_event.on_event - ~run:(fun () -> worker_loop (module Handlers) w) - ~cancel:(fun () -> Lwt_canceler.cancel w.canceler) ; - return w + kind t tzresult Lwt.t = + fun table ?timeout limits name parameters (module Handlers) -> + let name_s = Format.asprintf "%a" Name.pp name in + let full_name = + if name_s = "" then base_name + else Format.asprintf "%s_%s" base_name name_s + in + let id = + table.last_id <- table.last_id + 1 ; + table.last_id + in + let id_name = + if name_s = "" then base_name else Format.asprintf "%s_%d" base_name id + in + if Hashtbl.mem table.instances name then + invalid_arg + (Format.asprintf "Worker.launch: duplicate worker %s" full_name) ; + let canceler = Lwt_canceler.create () in + let buffer : kind buffer = + match table.buffer_kind with + | Queue -> + Queue_buffer (Lwt_pipe.create ()) + | Bounded {size} -> + Bounded_buffer (Lwt_pipe.create ~size:(size, fun _ -> 1) ()) + | Dropbox _ -> + Dropbox_buffer (Lwt_dropbox.create ()) + in + let event_log = + let levels = + Internal_event.[Debug; Info; Notice; Warning; Error; Fatal] + in + List.map (fun l -> (l, Ring.create limits.backlog_size)) levels + in + let module Logger = Internal_event.Legacy_logging.Make (struct + let name = id_name + end) in + let w = + { limits; + parameters; + name; + canceler; + table; + buffer; + logger = (module Logger); + state = None; + id; + worker = Lwt.return_unit; + event_log; + timeout; + current_request = None; + status = Launching (Systime_os.now ()) } + in + Hashtbl.add table.instances name w ; + ( if id_name = base_name then Logger.lwt_log_notice "Worker started" + else Logger.lwt_log_notice "Worker started for %s" name_s ) + >>= fun () -> + Handlers.on_launch w name parameters + >>=? fun state -> + w.status <- Running (Systime_os.now ()) ; + w.state <- Some state ; + w.worker <- + Lwt_utils.worker + full_name + ~on_event:Internal_event.Lwt_worker_event.on_event + ~run:(fun () -> worker_loop (module Handlers) w) + ~cancel:(fun () -> Lwt_canceler.cancel w.canceler) ; + return w let shutdown w = let (module Logger) = w.logger in - Logger.lwt_debug "Triggering shutdown" >>= fun () -> - Lwt_canceler.cancel w.canceler >>= fun () -> - w.worker + Logger.lwt_debug "Triggering shutdown" + >>= fun () -> Lwt_canceler.cancel w.canceler >>= fun () -> w.worker let state w = - match w.state, w.status with - | None, Launching _ -> + match (w.state, w.status) with + | (None, Launching _) -> invalid_arg (Format.asprintf - "Worker.state (%s[%a]): \ - state called before worker was initialized" - base_name Name.pp w.name) - | None, (Closing _ | Closed _) -> + "Worker.state (%s[%a]): state called before worker was initialized" + base_name + Name.pp + w.name) + | (None, (Closing _ | Closed _)) -> invalid_arg (Format.asprintf - "Worker.state (%s[%a]): \ - state called after worker was terminated" - base_name Name.pp w.name) - | None, _ -> assert false - | Some state, _ -> state + "Worker.state (%s[%a]): state called after worker was terminated" + base_name + Name.pp + w.name) + | (None, _) -> + assert false + | (Some state, _) -> + state - let pending_requests q = - Queue.pending_requests q + let pending_requests q = Queue.pending_requests q let last_events w = - List.map - (fun (level, ring) -> (level, Ring.elements ring)) - w.event_log + List.map (fun (level, ring) -> (level, Ring.elements ring)) w.event_log - let status { status ; _ } = status + let status {status; _} = status - let current_request { current_request ; _ } = current_request + let current_request {current_request; _} = current_request let information (type a) (w : a t) = - { Worker_types.instances_number = Hashtbl.length w.table.instances ; - wstatus = w.status ; - queue_length = match w.buffer with - | Queue_buffer pipe -> Lwt_pipe.length pipe - | Bounded_buffer pipe -> Lwt_pipe.length pipe - | Dropbox_buffer _ -> 1 - } - - let view w = - Types.view (state w) w.parameters + { Worker_types.instances_number = Hashtbl.length w.table.instances; + wstatus = w.status; + queue_length = + ( match w.buffer with + | Queue_buffer pipe -> + Lwt_pipe.length pipe + | Bounded_buffer pipe -> + Lwt_pipe.length pipe + | Dropbox_buffer _ -> + 1 ) } - let list { instances ; _ } = - Hashtbl.fold - (fun n w acc -> (n, w) :: acc) - instances [] + let view w = Types.view (state w) w.parameters - let find_opt { instances ; _ } = - Hashtbl.find_opt instances + let list {instances; _} = + Hashtbl.fold (fun n w acc -> (n, w) :: acc) instances [] - let protect { canceler ; _ } ?on_error f = - protect ?on_error ~canceler f + let find_opt {instances; _} = Hashtbl.find_opt instances + let protect {canceler; _} ?on_error f = protect ?on_error ~canceler f end diff --git a/src/lib_shell/worker.mli b/src/lib_shell/worker.mli index 326d7c92eb8e1f969e2cf8e6283d289050e05fe4..ee90cfec60d0ab059c75db5d3db940dc1f277258 100644 --- a/src/lib_shell/worker.mli +++ b/src/lib_shell/worker.mli @@ -31,7 +31,6 @@ (** The name of the group of workers corresponding to an instanciation of {!Make}, as well as the name of each worker in that group. *) module type NAME = sig - (** The name/path of the worker group *) val base : string list @@ -43,14 +42,12 @@ module type NAME = sig (** Pretty printer for displaying the worker name *) val pp : Format.formatter -> t -> unit - end (** Events that are used for logging and introspection. Events are pretty printed immediately in the log, and stored in the worker's event backlog for introspection. *) module type EVENT = sig - (** The type of an event. *) type t @@ -65,12 +62,10 @@ module type EVENT = sig (** Pretty printer, also used for logging *) val pp : Format.formatter -> t -> unit - end (** The type of messages that are fed to the worker's event loop. *) module type REQUEST = sig - (** The type of events. It is possible to wait for an event to be processed from outside the worker using {!push_request_and_wait}. In this case, the @@ -91,12 +86,10 @@ module type REQUEST = sig (** Pretty printer, also used for logging by {!Request_event}. *) val pp : Format.formatter -> view -> unit - end (** The (imperative) state of the event loop. *) module type TYPES = sig - (** The internal state that is passed to the event handlers. *) type state @@ -114,14 +107,14 @@ module type TYPES = sig (** Pretty printer for introspection. *) val pp : Format.formatter -> view -> unit - end (** {2 Worker group maker} *) (** An error returned when trying to communicate with a worker that has been closed. *) -type worker_name = {base: string; name:string} +type worker_name = {base : string; name : string} + type Error_monad.error += Closed of worker_name (** Functor to build a group of workers. @@ -129,11 +122,13 @@ type Error_monad.error += Closed of worker_name but the actual parameters and event handlers can be tweaked for each individual worker. *) module type T = sig + module Name : NAME - module Name: NAME - module Event: EVENT - module Request: REQUEST - module Types: TYPES + module Event : EVENT + + module Request : REQUEST + + module Types : TYPES (** A handle to a specific worker, parameterized by the type of internal message buffer. *) @@ -143,20 +138,26 @@ module type T = sig type 'kind table (** Internal buffer kinds used as parameters to {!t}. *) - type 'a queue and bounded and infinite - type dropbox + type 'a queue + + and bounded + and infinite + + type dropbox (** Supported kinds of internal buffers. *) type _ buffer_kind = | Queue : infinite queue buffer_kind - | Bounded : { size : int } -> bounded queue buffer_kind + | Bounded : {size : int} -> bounded queue buffer_kind | Dropbox : - { merge : (dropbox t -> - any_request -> - any_request option -> - any_request option) } - -> dropbox buffer_kind + { merge : + dropbox t -> + any_request -> + any_request option -> + any_request option } + -> dropbox buffer_kind + and any_request = Any_request : _ Request.t -> any_request (** Create a table of workers. *) @@ -164,7 +165,6 @@ module type T = sig (** The callback handlers specific to each worker instance. *) module type HANDLERS = sig - (** Placeholder replaced with {!t} with the right parameters provided by the type of buffer chosen at {!launch}.*) type self @@ -176,17 +176,14 @@ module type T = sig self -> Name.t -> Types.parameters -> Types.state tzresult Lwt.t (** The main request processor, i.e. the body of the event loop. *) - val on_request : - self -> 'a Request.t -> 'a tzresult Lwt.t + val on_request : self -> 'a Request.t -> 'a tzresult Lwt.t (** Called when no request has been made before the timeout, if the parameter has been passed to {!launch}. *) - val on_no_request : - self -> unit tzresult Lwt.t + val on_no_request : self -> unit tzresult Lwt.t (** A function called when terminating a worker. *) - val on_close : - self -> unit Lwt.t + val on_close : self -> unit Lwt.t (** A function called at the end of the worker loop in case of an abnormal error. This function can handle the error by @@ -204,66 +201,73 @@ module type T = sig (** A function called at the end of the worker loop in case of a successful treatment of the current request. *) val on_completion : - self -> - 'a Request.t -> 'a -> - Worker_types.request_status -> - unit Lwt.t - + self -> 'a Request.t -> 'a -> Worker_types.request_status -> unit Lwt.t end (** Creates a new worker instance. Parameter [queue_size] not passed means unlimited queue. *) val launch : - 'kind table -> ?timeout:Time.System.Span.t -> - Worker_types.limits -> Name.t -> Types.parameters -> + 'kind table -> + ?timeout:Time.System.Span.t -> + Worker_types.limits -> + Name.t -> + Types.parameters -> (module HANDLERS with type self = 'kind t) -> 'kind t tzresult Lwt.t (** Triggers a worker termination and waits for its completion. Cannot be called from within the handlers. *) - val shutdown : - _ t -> unit Lwt.t + val shutdown : _ t -> unit Lwt.t (** The following interface are common elements of multiple modules below. They are used to minimize repetition. *) module type BOX = sig (** With [BOX]es, you can put a request right at the front *) type t + val put_request : t -> 'a Request.t -> unit + val put_request_and_wait : t -> 'a Request.t -> 'a tzresult Lwt.t end + module type QUEUE = sig (** With [QUEUE]s, you can push requests in the queue *) type 'a t + val push_request_and_wait : 'q t -> 'a Request.t -> 'a tzresult Lwt.t + val push_request : 'q t -> 'a Request.t -> unit Lwt.t + val pending_requests : 'a t -> (Time.System.t * Request.view) list + val pending_requests_length : 'a t -> int end + module type BOUNDED_QUEUE = sig (** With [BOUNDED_QUEUE]s, you can push requests in the queue tentatively *) type t + val try_push_request_now : t -> 'a Request.t -> bool end module Dropbox : sig include BOX with type t := dropbox t end + module Queue : sig include QUEUE with type 'a t := 'a queue t + include BOUNDED_QUEUE with type t := bounded queue t (** Adds a message to the queue immediately. *) - val push_request_now : - infinite queue t -> 'a Request.t -> unit + val push_request_now : infinite queue t -> 'a Request.t -> unit end - (** Detects cancelation from within the request handler to stop asynchronous operations. *) val protect : _ t -> - ?on_error: (error list -> 'b tzresult Lwt.t) -> + ?on_error:(error list -> 'b tzresult Lwt.t) -> (unit -> 'b tzresult Lwt.t) -> 'b tzresult Lwt.t @@ -295,7 +299,8 @@ module type T = sig (** Get the request being treated by a worker. Gives the time the request was pushed, and the time its treatment started. *) - val current_request : _ t -> (Time.System.t * Time.System.t * Request.view) option + val current_request : + _ t -> (Time.System.t * Time.System.t * Request.view) option val information : _ t -> Worker_types.worker_information @@ -308,12 +313,11 @@ module type T = sig (** [find_opt table n] is [Some worker] if the [worker] is in the [table] and has name [n]. *) val find_opt : 'a table -> Name.t -> 'a t option - end - -module Make (Name : NAME) (Event : EVENT) (Request : REQUEST) (Types : TYPES) - : T with module Name = Name - and module Event = Event - and module Request = Request - and module Types = Types +module Make (Name : NAME) (Event : EVENT) (Request : REQUEST) (Types : TYPES) : + T + with module Name = Name + and module Event = Event + and module Request = Request + and module Types = Types diff --git a/src/lib_shell/worker_directory.ml b/src/lib_shell/worker_directory.ml index b1f80a703ff60fb7c8e7bdf7800b1197586894cb..25287e8dc357ea2bb8e2b105857fa904d5379720 100644 --- a/src/lib_shell/worker_directory.ml +++ b/src/lib_shell/worker_directory.ml @@ -25,114 +25,101 @@ (*****************************************************************************) let build_rpc_directory state = - let dir : unit RPC_directory.t ref = ref RPC_directory.empty in let register0 s f = - dir := RPC_directory.register !dir s (fun () p q -> f p q) in + dir := RPC_directory.register !dir s (fun () p q -> f p q) + in let register1 s f = - dir := RPC_directory.register !dir s (fun ((), a) p q -> f a p q) in + dir := RPC_directory.register !dir s (fun ((), a) p q -> f a p q) + in let register2 s f = - dir := RPC_directory.register !dir s (fun (((), a), b) p q -> f a b p q) in - + dir := RPC_directory.register !dir s (fun (((), a), b) p q -> f a b p q) + in (* Workers : Prevalidators *) - - register0 Worker_services.Prevalidators.S.list begin fun () () -> - let workers = Prevalidator.running_workers () in - let statuses = - List.map - (fun (chain_id, _, t) -> (chain_id, - Prevalidator.status t, - Prevalidator.information t, - Prevalidator.pipeline_length t)) - workers in - return statuses - end ; - - register1 Worker_services.Prevalidators.S.state begin fun chain () () -> - Chain_directory.get_chain_id state chain >>= fun chain_id -> - let workers = Prevalidator.running_workers () in - let (_, _, t) = - (* NOTE: it is technically possible to use the Prevalidator interface to - * register multiple Prevalidator for a single chain (using distinct - * protocols). However, this is never done. *) - List.find (fun (c, _, _) -> Chain_id.equal c chain_id) workers in - let status = Prevalidator.status t in - let pending_requests = Prevalidator.pending_requests t in - let backlog = Prevalidator.last_events t in - let current_request = Prevalidator.current_request t in - return - { Worker_types. - status ; - pending_requests ; - backlog ; - current_request } - end ; - + register0 Worker_services.Prevalidators.S.list (fun () () -> + let workers = Prevalidator.running_workers () in + let statuses = + List.map + (fun (chain_id, _, t) -> + ( chain_id, + Prevalidator.status t, + Prevalidator.information t, + Prevalidator.pipeline_length t )) + workers + in + return statuses) ; + register1 Worker_services.Prevalidators.S.state (fun chain () () -> + Chain_directory.get_chain_id state chain + >>= fun chain_id -> + let workers = Prevalidator.running_workers () in + let (_, _, t) = + (* NOTE: it is technically possible to use the Prevalidator interface to + * register multiple Prevalidator for a single chain (using distinct + * protocols). However, this is never done. *) + List.find (fun (c, _, _) -> Chain_id.equal c chain_id) workers + in + let status = Prevalidator.status t in + let pending_requests = Prevalidator.pending_requests t in + let backlog = Prevalidator.last_events t in + let current_request = Prevalidator.current_request t in + return {Worker_types.status; pending_requests; backlog; current_request}) ; (* Workers : Block_validator *) - - register0 Worker_services.Block_validator.S.state begin fun () () -> - let w = Block_validator.running_worker () in - return - { Worker_types.status = Block_validator.status w ; - pending_requests = Block_validator.pending_requests w ; - backlog = Block_validator.last_events w ; - current_request = Block_validator.current_request w } - end ; - + register0 Worker_services.Block_validator.S.state (fun () () -> + let w = Block_validator.running_worker () in + return + { Worker_types.status = Block_validator.status w; + pending_requests = Block_validator.pending_requests w; + backlog = Block_validator.last_events w; + current_request = Block_validator.current_request w }) ; (* Workers : Peer validators *) - - register1 Worker_services.Peer_validators.S.list begin fun chain () () -> - Chain_directory.get_chain_id state chain >>= fun chain_id -> - return - (List.filter_map - (fun ((id, peer_id), w) -> - if Chain_id.equal id chain_id then - Some (peer_id, - Peer_validator.status w, - Peer_validator.information w, - Peer_validator.pipeline_length w) - else None) - (Peer_validator.running_workers ())) - end ; - - register2 Worker_services.Peer_validators.S.state begin fun chain peer_id () () -> - Chain_directory.get_chain_id state chain >>= fun chain_id -> - let w = List.assoc (chain_id, peer_id) - (Peer_validator.running_workers ()) in - return - { Worker_types.status = Peer_validator.status w ; - pending_requests = [] ; - backlog = Peer_validator.last_events w ; - current_request = Peer_validator.current_request w } - end ; - + register1 Worker_services.Peer_validators.S.list (fun chain () () -> + Chain_directory.get_chain_id state chain + >>= fun chain_id -> + return + (List.filter_map + (fun ((id, peer_id), w) -> + if Chain_id.equal id chain_id then + Some + ( peer_id, + Peer_validator.status w, + Peer_validator.information w, + Peer_validator.pipeline_length w ) + else None) + (Peer_validator.running_workers ()))) ; + register2 Worker_services.Peer_validators.S.state (fun chain peer_id () () -> + Chain_directory.get_chain_id state chain + >>= fun chain_id -> + let w = + List.assoc (chain_id, peer_id) (Peer_validator.running_workers ()) + in + return + { Worker_types.status = Peer_validator.status w; + pending_requests = []; + backlog = Peer_validator.last_events w; + current_request = Peer_validator.current_request w }) ; (* Workers : Net validators *) - - register0 Worker_services.Chain_validators.S.list begin fun () () -> - return - (List.map - (fun (id, w) -> - (id, - Chain_validator.status w, - Chain_validator.information w, - Chain_validator.pending_requests_length w)) - (Chain_validator.running_workers ())) - end ; - - register1 Worker_services.Chain_validators.S.state begin fun chain () () -> - Chain_directory.get_chain_id state chain >>= fun chain_id -> - let w = List.assoc chain_id (Chain_validator.running_workers ()) in - return - { Worker_types.status = Chain_validator.status w ; - pending_requests = Chain_validator.pending_requests w ; - backlog = Chain_validator.last_events w ; - current_request = Chain_validator.current_request w } - end ; - + register0 Worker_services.Chain_validators.S.list (fun () () -> + return + (List.map + (fun (id, w) -> + ( id, + Chain_validator.status w, + Chain_validator.information w, + Chain_validator.pending_requests_length w )) + (Chain_validator.running_workers ()))) ; + register1 Worker_services.Chain_validators.S.state (fun chain () () -> + Chain_directory.get_chain_id state chain + >>= fun chain_id -> + let w = List.assoc chain_id (Chain_validator.running_workers ()) in + return + { Worker_types.status = Chain_validator.status w; + pending_requests = Chain_validator.pending_requests w; + backlog = Chain_validator.last_events w; + current_request = Chain_validator.current_request w }) ; (* DistributedDB *) - register1 Worker_services.Chain_validators.S.ddb_state begin fun chain () () -> - Chain_directory.get_chain_id state chain >>= fun chain_id -> - let w = List.assoc chain_id (Chain_validator.running_workers ()) in - return (Chain_validator.ddb_information w) end ; - + register1 Worker_services.Chain_validators.S.ddb_state (fun chain () () -> + Chain_directory.get_chain_id state chain + >>= fun chain_id -> + let w = List.assoc chain_id (Chain_validator.running_workers ()) in + return (Chain_validator.ddb_information w)) ; !dir diff --git a/src/lib_shell/worker_directory.mli b/src/lib_shell/worker_directory.mli index 8bb139e7e1d45d939df22025b1b8238e8582fa59..2ed0b23f35e9d9adbe3ab1e32cfe45e62ef4cb47 100644 --- a/src/lib_shell/worker_directory.mli +++ b/src/lib_shell/worker_directory.mli @@ -23,4 +23,4 @@ (* *) (*****************************************************************************) -val build_rpc_directory: State.t -> unit RPC_directory.t +val build_rpc_directory : State.t -> unit RPC_directory.t diff --git a/src/lib_shell_services/.ocamlformat b/src/lib_shell_services/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_shell_services/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_shell_services/block_services.ml b/src/lib_shell_services/block_services.ml index 303b04ca513cf9041a8fe33152fdb97dead8b19c..eb344788ad6544677cd498432e6c9c9bddb582f9 100644 --- a/src/lib_shell_services/block_services.ml +++ b/src/lib_shell_services/block_services.ml @@ -25,185 +25,213 @@ open Data_encoding -type chain = [ - | `Main - | `Test - | `Hash of Chain_id.t -] +type chain = [`Main | `Test | `Hash of Chain_id.t] let parse_chain s = try match s with - | "main" -> Ok `Main - | "test" -> Ok `Test - | h -> Ok (`Hash (Chain_id.of_b58check_exn h)) + | "main" -> + Ok `Main + | "test" -> + Ok `Test + | h -> + Ok (`Hash (Chain_id.of_b58check_exn h)) with _ -> Error "Cannot parse chain identifier." let chain_to_string = function - | `Main -> "main" - | `Test -> "test" - | `Hash h -> Chain_id.to_b58check h + | `Main -> + "main" + | `Test -> + "test" + | `Hash h -> + Chain_id.to_b58check h let chain_arg = let name = "chain_id" in let descr = "A chain identifier. This is either a chain hash in Base58Check notation \ - or a one the predefined aliases: 'main', 'test'." in + or a one the predefined aliases: 'main', 'test'." + in let construct = chain_to_string in let destruct = parse_chain in RPC_arg.make ~name ~descr ~construct ~destruct () -type block = [ - | `Genesis +type block = + [ `Genesis | `Head of int - | `Alias of [ `Caboose | `Checkpoint | `Save_point ] * int + | `Alias of [`Caboose | `Checkpoint | `Save_point] * int | `Hash of Block_hash.t * int - | `Level of Int32.t -] + | `Level of Int32.t ] let parse_block s = - let delims = ['~';'-';'+'] in + let delims = ['~'; '-'; '+'] in let count_delims s = List.map (fun d -> - (String.fold_left (fun i c -> if c = d then i+1 else i) 0 s), d) - delims in + (String.fold_left (fun i c -> if c = d then i + 1 else i) 0 s, d)) + delims + in let split_on_delim counts = - begin - match List.fold_left (fun i (v,_) -> i+v) 0 counts with - | 0 -> ([s], ' ') - | 1 -> let delim = List.assoc 1 counts in - (String.split delim s, delim) - | _ -> raise Exit - end in + match List.fold_left (fun i (v, _) -> i + v) 0 counts with + | 0 -> + ([s], ' ') + | 1 -> + let delim = List.assoc 1 counts in + (String.split delim s, delim) + | _ -> + raise Exit + in try match split_on_delim (count_delims s) with - | (["genesis"], _) -> Ok `Genesis - | (["genesis"; n], '+') -> Ok (`Level (Int32.of_string n)) - | (["head"], _) -> Ok (`Head 0) + | (["genesis"], _) -> + Ok `Genesis + | (["genesis"; n], '+') -> + Ok (`Level (Int32.of_string n)) + | (["head"], _) -> + Ok (`Head 0) | (["head"; n], '~') | (["head"; n], '-') -> Ok (`Head (int_of_string n)) | (["checkpoint"], _) -> Ok (`Alias (`Checkpoint, 0)) - | (["checkpoint" ; n], '~') | (["checkpoint" ; n], '-') -> + | (["checkpoint"; n], '~') | (["checkpoint"; n], '-') -> Ok (`Alias (`Checkpoint, int_of_string n)) - | (["checkpoint" ; n], '+') -> Ok (`Alias (`Checkpoint, - int_of_string n)) + | (["checkpoint"; n], '+') -> + Ok (`Alias (`Checkpoint, -int_of_string n)) | (["save_point"], _) -> Ok (`Alias (`Save_point, 0)) - | (["save_point" ; n], '~') | (["save_point" ; n], '-') -> + | (["save_point"; n], '~') | (["save_point"; n], '-') -> Ok (`Alias (`Save_point, int_of_string n)) - | (["save_point" ; n], '+') -> Ok (`Alias (`Save_point, - int_of_string n)) + | (["save_point"; n], '+') -> + Ok (`Alias (`Save_point, -int_of_string n)) | (["caboose"], _) -> Ok (`Alias (`Caboose, 0)) - | (["caboose" ; n], '~') | (["caboose" ; n], '-') -> + | (["caboose"; n], '~') | (["caboose"; n], '-') -> Ok (`Alias (`Caboose, int_of_string n)) - | (["caboose" ; n], '+') -> Ok (`Alias (`Caboose, - int_of_string n)) - | ([hol], _) -> - begin - match Block_hash.of_b58check_opt hol with - Some h -> Ok (`Hash (h , 0)) - | None -> - let l = Int32.of_string s in - if Compare.Int32.(l < 0l) then - raise Exit - else if Compare.Int32.(l = 0l) then - Ok `Genesis - else - Ok (`Level (Int32.of_string s)) - end - | ([h ; n], '~') | ([h ; n], '-') -> + | (["caboose"; n], '+') -> + Ok (`Alias (`Caboose, -int_of_string n)) + | ([hol], _) -> ( + match Block_hash.of_b58check_opt hol with + | Some h -> + Ok (`Hash (h, 0)) + | None -> + let l = Int32.of_string s in + if Compare.Int32.(l < 0l) then raise Exit + else if Compare.Int32.(l = 0l) then Ok `Genesis + else Ok (`Level (Int32.of_string s)) ) + | ([h; n], '~') | ([h; n], '-') -> Ok (`Hash (Block_hash.of_b58check_exn h, int_of_string n)) - | ([h ; n], '+') -> Ok (`Hash (Block_hash.of_b58check_exn h, - int_of_string n)) - | _ -> raise Exit + | ([h; n], '+') -> + Ok (`Hash (Block_hash.of_b58check_exn h, -int_of_string n)) + | _ -> + raise Exit with _ -> Error "Cannot parse block identifier." let alias_to_string = function - | `Checkpoint -> "checkpoint" - | `Save_point -> "save_point" - | `Caboose -> "caboose" + | `Checkpoint -> + "checkpoint" + | `Save_point -> + "save_point" + | `Caboose -> + "caboose" + let to_string = function - | `Genesis -> "genesis" - | `Alias (a, 0) -> alias_to_string a - | `Alias (a, n) when n < 0 -> Printf.sprintf "%s+%d" (alias_to_string a) (-n) - | `Alias (a, n) -> Printf.sprintf "%s~%d" (alias_to_string a) n - | `Head 0 -> "head" - | `Head n when n < 0 -> Printf.sprintf "head+%d" (-n) - | `Head n -> Printf.sprintf "head~%d" n - | `Hash (h, 0) -> Block_hash.to_b58check h - | `Hash (h, n) when n < 0 -> Printf.sprintf "%s+%d" (Block_hash.to_b58check h) (-n) - | `Hash (h, n) -> Printf.sprintf "%s~%d" (Block_hash.to_b58check h) n - | `Level i -> Printf.sprintf "%d" (Int32.to_int i) + | `Genesis -> + "genesis" + | `Alias (a, 0) -> + alias_to_string a + | `Alias (a, n) when n < 0 -> + Printf.sprintf "%s+%d" (alias_to_string a) (-n) + | `Alias (a, n) -> + Printf.sprintf "%s~%d" (alias_to_string a) n + | `Head 0 -> + "head" + | `Head n when n < 0 -> + Printf.sprintf "head+%d" (-n) + | `Head n -> + Printf.sprintf "head~%d" n + | `Hash (h, 0) -> + Block_hash.to_b58check h + | `Hash (h, n) when n < 0 -> + Printf.sprintf "%s+%d" (Block_hash.to_b58check h) (-n) + | `Hash (h, n) -> + Printf.sprintf "%s~%d" (Block_hash.to_b58check h) n + | `Level i -> + Printf.sprintf "%d" (Int32.to_int i) let blocks_arg = let name = "block_id" in let descr = "A block identifier. This is either a block hash in Base58Check notation, \ - one the predefined aliases: 'genesis', 'head' \ - or a block level (index in the chain). \ - One might also use 'head~N' or '<hash>~N' where N is an integer to \ - denote the Nth predecessor of the designated block.\ - Also, '<hash>+N' denotes the Nth successor of a block." in + one the predefined aliases: 'genesis', 'head' or a block level (index in \ + the chain). One might also use 'head~N' or '<hash>~N' where N is an \ + integer to denote the Nth predecessor of the designated block.Also, \ + '<hash>+N' denotes the Nth successor of a block." + in let construct = to_string in let destruct = parse_block in RPC_arg.make ~name ~descr ~construct ~destruct () type chain_prefix = unit * chain + type prefix = chain_prefix * block + let chain_path = RPC_path.(root / "chains" /: chain_arg) + let mempool_path p = RPC_path.(p / "mempool") + let live_blocks_path p = RPC_path.(p / "live_blocks") + let dir_path : (chain_prefix, chain_prefix) RPC_path.t = RPC_path.(open_root / "blocks") + let path = RPC_path.(dir_path /: blocks_arg) -type operation_list_quota = { - max_size: int ; - max_op: int option ; -} +type operation_list_quota = {max_size : int; max_op : int option} let operation_list_quota_encoding = conv - (fun { max_size ; max_op } -> (max_size, max_op)) - (fun (max_size, max_op) -> { max_size ; max_op }) - (obj2 - (req "max_size" int31) - (opt "max_op" int31)) + (fun {max_size; max_op} -> (max_size, max_op)) + (fun (max_size, max_op) -> {max_size; max_op}) + (obj2 (req "max_size" int31) (opt "max_op" int31)) -type raw_context = - | Key of MBytes.t - | Dir of (string * raw_context) list - | Cut +type raw_context = Key of MBytes.t | Dir of (string * raw_context) list | Cut let rec pp_raw_context ppf = function - | Cut -> Format.fprintf ppf "..." - | Key v -> Hex.pp ppf (MBytes.to_hex v) + | Cut -> + Format.fprintf ppf "..." + | Key v -> + Hex.pp ppf (MBytes.to_hex v) | Dir l -> - Format.fprintf ppf "{@[<v 1>@,%a@]@,}" - (Format.pp_print_list - ~pp_sep:Format.pp_print_cut - (fun ppf (s, t) -> Format.fprintf ppf "%s : %a" s pp_raw_context t)) + Format.fprintf + ppf + "{@[<v 1>@,%a@]@,}" + (Format.pp_print_list ~pp_sep:Format.pp_print_cut (fun ppf (s, t) -> + Format.fprintf ppf "%s : %a" s pp_raw_context t)) l let raw_context_encoding = - mu "raw_context" - (fun encoding -> - union [ - case (Tag 0) bytes - ~title:"Key" - (function Key k -> Some k | _ -> None) - (fun k -> Key k) ; - case (Tag 1) (assoc encoding) - ~title:"Dir" - (function Dir k -> Some k | _ -> None) - (fun k -> Dir k) ; - case (Tag 2) null - ~title:"Cut" - (function Cut -> Some () | _ -> None) - (fun () -> Cut) ; - ]) - -type error += - | Invalid_depth_arg of int + mu "raw_context" (fun encoding -> + union + [ case + (Tag 0) + bytes + ~title:"Key" + (function Key k -> Some k | _ -> None) + (fun k -> Key k); + case + (Tag 1) + (assoc encoding) + ~title:"Dir" + (function Dir k -> Some k | _ -> None) + (fun k -> Dir k); + case + (Tag 2) + null + ~title:"Cut" + (function Cut -> Some () | _ -> None) + (fun () -> Cut) ]) + +type error += Invalid_depth_arg of int let () = register_error_kind @@ -212,158 +240,180 @@ let () = ~title:"Invalid depth argument" ~description:"The raw context extraction depth argument must be positive." ~pp:(fun ppf depth -> - Format.fprintf ppf "Extraction depth %d is invalid" depth) + Format.fprintf ppf "Extraction depth %d is invalid" depth) Data_encoding.(obj1 (req "depth" int31)) (function Invalid_depth_arg depth -> Some depth | _ -> None) (fun depth -> Invalid_depth_arg depth) module type PROTO = sig - val hash: Protocol_hash.t + val hash : Protocol_hash.t + type block_header_data - val block_header_data_encoding: block_header_data Data_encoding.t + + val block_header_data_encoding : block_header_data Data_encoding.t + type block_header_metadata - val block_header_metadata_encoding: - block_header_metadata Data_encoding.t + + val block_header_metadata_encoding : block_header_metadata Data_encoding.t + type operation_data + type operation_receipt + type operation = { - shell: Operation.shell_header ; - protocol_data: operation_data ; + shell : Operation.shell_header; + protocol_data : operation_data } - val operation_data_encoding: operation_data Data_encoding.t - val operation_receipt_encoding: operation_receipt Data_encoding.t - val operation_data_and_receipt_encoding: + val operation_data_encoding : operation_data Data_encoding.t + + val operation_receipt_encoding : operation_receipt Data_encoding.t + + val operation_data_and_receipt_encoding : (operation_data * operation_receipt) Data_encoding.t end - type protocols = { - current_protocol: Protocol_hash.t ; - next_protocol: Protocol_hash.t ; + current_protocol : Protocol_hash.t; + next_protocol : Protocol_hash.t } let raw_protocol_encoding = conv - (fun { current_protocol ; next_protocol } -> - ((current_protocol, next_protocol), ())) + (fun {current_protocol; next_protocol} -> + ((current_protocol, next_protocol), ())) (fun ((current_protocol, next_protocol), ()) -> - { current_protocol ; next_protocol }) + {current_protocol; next_protocol}) (merge_objs (obj2 (req "protocol" Protocol_hash.encoding) (req "next_protocol" Protocol_hash.encoding)) unit) -module Make(Proto : PROTO)(Next_proto : PROTO) = struct - +module Make (Proto : PROTO) (Next_proto : PROTO) = struct let protocol_hash = Protocol_hash.to_b58check Proto.hash + let next_protocol_hash = Protocol_hash.to_b58check Next_proto.hash type raw_block_header = { - shell: Block_header.shell_header ; - protocol_data: Proto.block_header_data ; + shell : Block_header.shell_header; + protocol_data : Proto.block_header_data } let raw_block_header_encoding = - def "raw_block_header" @@ - conv - (fun { shell ; protocol_data } -> (shell, protocol_data)) - (fun (shell, protocol_data) -> { shell ; protocol_data } ) - (merge_objs - Block_header.shell_header_encoding - Proto.block_header_data_encoding) + def "raw_block_header" + @@ conv + (fun {shell; protocol_data} -> (shell, protocol_data)) + (fun (shell, protocol_data) -> {shell; protocol_data}) + (merge_objs + Block_header.shell_header_encoding + Proto.block_header_data_encoding) type block_header = { - chain_id: Chain_id.t ; - hash: Block_hash.t ; - shell: Block_header.shell_header ; - protocol_data: Proto.block_header_data ; + chain_id : Chain_id.t; + hash : Block_hash.t; + shell : Block_header.shell_header; + protocol_data : Proto.block_header_data } let block_header_encoding = - def "block_header" @@ - conv - (fun { chain_id ; hash ; shell ; protocol_data } -> - (((), chain_id, hash), { shell ; protocol_data })) - (fun (((), chain_id, hash), { shell ; protocol_data }) -> - { chain_id ; hash ; shell ; protocol_data } ) - (merge_objs - (obj3 - (req "protocol" (constant protocol_hash)) - (req "chain_id" Chain_id.encoding) - (req "hash" Block_hash.encoding)) - raw_block_header_encoding) + def "block_header" + @@ conv + (fun {chain_id; hash; shell; protocol_data} -> + (((), chain_id, hash), {shell; protocol_data})) + (fun (((), chain_id, hash), {shell; protocol_data}) -> + {chain_id; hash; shell; protocol_data}) + (merge_objs + (obj3 + (req "protocol" (constant protocol_hash)) + (req "chain_id" Chain_id.encoding) + (req "hash" Block_hash.encoding)) + raw_block_header_encoding) type block_metadata = { - protocol_data: Proto.block_header_metadata ; - test_chain_status: Test_chain_status.t ; + protocol_data : Proto.block_header_metadata; + test_chain_status : Test_chain_status.t; (* for the next block: *) - max_operations_ttl: int ; - max_operation_data_length: int ; - max_block_header_length: int ; - operation_list_quota: operation_list_quota list ; + max_operations_ttl : int; + max_operation_data_length : int; + max_block_header_length : int; + operation_list_quota : operation_list_quota list } let block_metadata_encoding = - def "block_header_metadata" @@ - conv - (fun { protocol_data ; test_chain_status ; max_operations_ttl ; - max_operation_data_length ; max_block_header_length ; - operation_list_quota } -> - (((), (), test_chain_status, - max_operations_ttl, max_operation_data_length, - max_block_header_length, operation_list_quota), - protocol_data)) - (fun (((), (), test_chain_status, - max_operations_ttl, max_operation_data_length, - max_block_header_length, operation_list_quota), - protocol_data) -> - { protocol_data ; test_chain_status ; max_operations_ttl ; - max_operation_data_length ; max_block_header_length ; - operation_list_quota }) - (merge_objs - (obj7 - (req "protocol" (constant protocol_hash)) - (req "next_protocol" (constant next_protocol_hash)) - (req "test_chain_status" Test_chain_status.encoding) - (req "max_operations_ttl" int31) - (req "max_operation_data_length" int31) - (req "max_block_header_length" int31) - (req "max_operation_list_length" - (dynamic_size (list operation_list_quota_encoding)))) - Proto.block_header_metadata_encoding) + def "block_header_metadata" + @@ conv + (fun { protocol_data; + test_chain_status; + max_operations_ttl; + max_operation_data_length; + max_block_header_length; + operation_list_quota } -> + ( ( (), + (), + test_chain_status, + max_operations_ttl, + max_operation_data_length, + max_block_header_length, + operation_list_quota ), + protocol_data )) + (fun ( ( (), + (), + test_chain_status, + max_operations_ttl, + max_operation_data_length, + max_block_header_length, + operation_list_quota ), + protocol_data ) -> + { protocol_data; + test_chain_status; + max_operations_ttl; + max_operation_data_length; + max_block_header_length; + operation_list_quota }) + (merge_objs + (obj7 + (req "protocol" (constant protocol_hash)) + (req "next_protocol" (constant next_protocol_hash)) + (req "test_chain_status" Test_chain_status.encoding) + (req "max_operations_ttl" int31) + (req "max_operation_data_length" int31) + (req "max_block_header_length" int31) + (req + "max_operation_list_length" + (dynamic_size (list operation_list_quota_encoding)))) + Proto.block_header_metadata_encoding) let next_operation_encoding = let open Data_encoding in - def "next_operation" @@ - conv - (fun Next_proto.{ shell ; protocol_data } -> - ((), (shell, protocol_data))) - (fun ((), (shell, protocol_data)) -> - { shell ; protocol_data } ) - (merge_objs - (obj1 (req "protocol" (constant next_protocol_hash))) + def "next_operation" + @@ conv + (fun Next_proto.{shell; protocol_data} -> + ((), (shell, protocol_data))) + (fun ((), (shell, protocol_data)) -> {shell; protocol_data}) (merge_objs - (dynamic_size Operation.shell_header_encoding) - (dynamic_size Next_proto.operation_data_encoding))) + (obj1 (req "protocol" (constant next_protocol_hash))) + (merge_objs + (dynamic_size Operation.shell_header_encoding) + (dynamic_size Next_proto.operation_data_encoding))) type operation = { - chain_id: Chain_id.t ; - hash: Operation_hash.t ; - shell: Operation.shell_header ; - protocol_data: Proto.operation_data ; - receipt: Proto.operation_receipt ; + chain_id : Chain_id.t; + hash : Operation_hash.t; + shell : Operation.shell_header; + protocol_data : Proto.operation_data; + receipt : Proto.operation_receipt } let operation_encoding = - def "operation" @@ + def "operation" + @@ let open Data_encoding in conv - (fun { chain_id ; hash ; shell ; protocol_data ; receipt } -> - (((), chain_id, hash), (shell, (protocol_data, receipt)))) - (fun (((), chain_id, hash), (shell, (protocol_data, receipt))) -> - { chain_id ; hash ; shell ; protocol_data ; receipt }) + (fun {chain_id; hash; shell; protocol_data; receipt} -> + (((), chain_id, hash), (shell, (protocol_data, receipt)))) + (fun (((), chain_id, hash), (shell, (protocol_data, receipt))) -> + {chain_id; hash; shell; protocol_data; receipt}) (merge_objs (obj3 (req "protocol" (constant protocol_hash)) @@ -374,85 +424,83 @@ module Make(Proto : PROTO)(Next_proto : PROTO) = struct (dynamic_size Proto.operation_data_and_receipt_encoding))) type block_info = { - chain_id: Chain_id.t ; - hash: Block_hash.t ; - header: raw_block_header ; - metadata: block_metadata ; - operations: operation list list ; + chain_id : Chain_id.t; + hash : Block_hash.t; + header : raw_block_header; + metadata : block_metadata; + operations : operation list list } let block_info_encoding = conv - (fun { chain_id ; hash ; header ; metadata ; operations } -> - ((), chain_id, hash, header, metadata, operations)) + (fun {chain_id; hash; header; metadata; operations} -> + ((), chain_id, hash, header, metadata, operations)) (fun ((), chain_id, hash, header, metadata, operations) -> - { chain_id ; hash ; header ; metadata ; operations }) + {chain_id; hash; header; metadata; operations}) (obj6 (req "protocol" (constant protocol_hash)) (req "chain_id" Chain_id.encoding) (req "hash" Block_hash.encoding) (req "header" (dynamic_size raw_block_header_encoding)) (req "metadata" (dynamic_size block_metadata_encoding)) - (req "operations" - (list (dynamic_size (list operation_encoding))))) + (req "operations" (list (dynamic_size (list operation_encoding))))) module S = struct - let path : prefix RPC_path.context = RPC_path.open_root let hash = RPC_service.get_service ~description:"The block's hash, its unique identifier." - ~query: RPC_query.empty - ~output: Block_hash.encoding + ~query:RPC_query.empty + ~output:Block_hash.encoding RPC_path.(path / "hash") let header = RPC_service.get_service ~description:"The whole block header." - ~query: RPC_query.empty - ~output: block_header_encoding + ~query:RPC_query.empty + ~output:block_header_encoding RPC_path.(path / "header") let raw_header = RPC_service.get_service ~description:"The whole block header (unparsed)." - ~query: RPC_query.empty - ~output: bytes + ~query:RPC_query.empty + ~output:bytes RPC_path.(path / "header" / "raw") let metadata = RPC_service.get_service ~description:"All the metadata associated to the block." - ~query: RPC_query.empty - ~output: block_metadata_encoding + ~query:RPC_query.empty + ~output:block_metadata_encoding RPC_path.(path / "metadata") let protocols = RPC_service.get_service ~description:"Current and next protocol." - ~query: RPC_query.empty - ~output: raw_protocol_encoding + ~query:RPC_query.empty + ~output:raw_protocol_encoding RPC_path.(path / "protocols") module Header = struct - let path = RPC_path.(path / "header") let shell_header = RPC_service.get_service ~description:"The shell-specific fragment of the block header." - ~query: RPC_query.empty - ~output: Block_header.shell_header_encoding + ~query:RPC_query.empty + ~output:Block_header.shell_header_encoding RPC_path.(path / "shell") let protocol_data = RPC_service.get_service ~description:"The version-specific fragment of the block header." - ~query: RPC_query.empty + ~query:RPC_query.empty ~output: (conv - (fun h -> ((), h)) (fun ((), h) -> h) + (fun h -> ((), h)) + (fun ((), h) -> h) (merge_objs (obj1 (req "protocol" (constant protocol_hash))) Proto.block_header_data_encoding)) @@ -460,143 +508,146 @@ module Make(Proto : PROTO)(Next_proto : PROTO) = struct let raw_protocol_data = RPC_service.get_service - ~description:"The version-specific fragment of the block header (unparsed)." - ~query: RPC_query.empty - ~output: bytes + ~description: + "The version-specific fragment of the block header (unparsed)." + ~query:RPC_query.empty + ~output:bytes RPC_path.(path / "protocol_data" / "raw") - end module Operations = struct - let path = RPC_path.(path / "operations") let operations = RPC_service.get_service ~description:"All the operations included in the block." - ~query: RPC_query.empty - ~output: (list (dynamic_size (list operation_encoding))) + ~query:RPC_query.empty + ~output:(list (dynamic_size (list operation_encoding))) path let list_arg = let name = "list_offset" in - let descr = - "Index `n` of the requested validation pass." in + let descr = "Index `n` of the requested validation pass." in let construct = string_of_int in let destruct s = try Ok (int_of_string s) - with _ -> Error (Format.sprintf "Invalid list offset (%s)" s) in + with _ -> Error (Format.sprintf "Invalid list offset (%s)" s) + in RPC_arg.make ~name ~descr ~construct ~destruct () let offset_arg = let name = "operation_offset" in let descr = - "Index `m` of the requested operation in its validation pass." in + "Index `m` of the requested operation in its validation pass." + in let construct = string_of_int in let destruct s = try Ok (int_of_string s) - with _ -> Error (Format.sprintf "Invalid operation offset (%s)" s) in + with _ -> Error (Format.sprintf "Invalid operation offset (%s)" s) + in RPC_arg.make ~name ~descr ~construct ~destruct () let operations_in_pass = RPC_service.get_service ~description: - "All the operations included in `n-th` validation pass of the block." - ~query: RPC_query.empty - ~output: (list operation_encoding) + "All the operations included in `n-th` validation pass of the \ + block." + ~query:RPC_query.empty + ~output:(list operation_encoding) RPC_path.(path /: list_arg) let operation = RPC_service.get_service ~description: "The `m-th` operation in the `n-th` validation pass of the block." - ~query: RPC_query.empty - ~output: operation_encoding + ~query:RPC_query.empty + ~output:operation_encoding RPC_path.(path /: list_arg /: offset_arg) - end module Operation_hashes = struct - let path = RPC_path.(path / "operation_hashes") let operation_hashes = RPC_service.get_service - ~description:"The hashes of all the operations included in the block." - ~query: RPC_query.empty - ~output: (list (list Operation_hash.encoding)) + ~description: + "The hashes of all the operations included in the block." + ~query:RPC_query.empty + ~output:(list (list Operation_hash.encoding)) path let operation_hashes_in_pass = RPC_service.get_service ~description: - "All the operations included in `n-th` validation pass of the block." - ~query: RPC_query.empty - ~output: (list Operation_hash.encoding) + "All the operations included in `n-th` validation pass of the \ + block." + ~query:RPC_query.empty + ~output:(list Operation_hash.encoding) RPC_path.(path /: Operations.list_arg) let operation_hash = RPC_service.get_service ~description: - "The hash of then `m-th` operation in the `n-th` validation pass of the block." - ~query: RPC_query.empty - ~output: Operation_hash.encoding + "The hash of then `m-th` operation in the `n-th` validation pass \ + of the block." + ~query:RPC_query.empty + ~output:Operation_hash.encoding RPC_path.(path /: Operations.list_arg /: Operations.offset_arg) end module Helpers = struct - let path = RPC_path.(path / "helpers") module Forge = struct - let block_header = RPC_service.post_service - ~description: "Forge a block header" - ~query: RPC_query.empty - ~input: Block_header.encoding - ~output: (obj1 (req "block" bytes)) + ~description:"Forge a block header" + ~query:RPC_query.empty + ~input:Block_header.encoding + ~output:(obj1 (req "block" bytes)) RPC_path.(path / "forge_block_header") - end module Preapply = struct - let path = RPC_path.(path / "preapply") let block_result_encoding = obj2 (req "shell_header" Block_header.shell_header_encoding) - (req "operations" + (req + "operations" (list (Preapply_result.encoding RPC_error.encoding))) type block_param = { - protocol_data: Next_proto.block_header_data ; - operations: Next_proto.operation list list ; + protocol_data : Next_proto.block_header_data; + operations : Next_proto.operation list list } let block_param_encoding = - (conv - (fun { protocol_data ; operations } -> - (protocol_data, operations)) - (fun (protocol_data, operations) -> - { protocol_data ; operations }) - (obj2 - (req "protocol_data" - (conv - (fun h -> ((), h)) (fun ((), h) -> h) - (merge_objs - (obj1 (req "protocol" (constant next_protocol_hash))) - (dynamic_size Next_proto.block_header_data_encoding)))) - (req "operations" - (list (dynamic_size (list next_operation_encoding)))))) + conv + (fun {protocol_data; operations} -> (protocol_data, operations)) + (fun (protocol_data, operations) -> {protocol_data; operations}) + (obj2 + (req + "protocol_data" + (conv + (fun h -> ((), h)) + (fun ((), h) -> h) + (merge_objs + (obj1 (req "protocol" (constant next_protocol_hash))) + (dynamic_size Next_proto.block_header_data_encoding)))) + (req + "operations" + (list (dynamic_size (list next_operation_encoding))))) let block_query = let open RPC_query in - query (fun sort timestamp -> object - method sort_operations = sort - method timestamp = timestamp - end) + query (fun sort timestamp -> + object + method sort_operations = sort + + method timestamp = timestamp + end) |+ flag "sort" (fun t -> t#sort_operations) |+ opt_field "timestamp" Time.Protocol.rpc_arg (fun t -> t#timestamp) |> seal @@ -604,184 +655,189 @@ module Make(Proto : PROTO)(Next_proto : PROTO) = struct let block = RPC_service.post_service ~description: - "Simulate the validation of a block that would contain \ - the given operations and return the resulting fitness \ - and context hash." - ~query: block_query - ~input: block_param_encoding - ~output: block_result_encoding + "Simulate the validation of a block that would contain the \ + given operations and return the resulting fitness and context \ + hash." + ~query:block_query + ~input:block_param_encoding + ~output:block_result_encoding RPC_path.(path / "block") let operations = RPC_service.post_service - ~description: - "Simulate the validation of an operation." - ~query: RPC_query.empty - ~input: (list next_operation_encoding) - ~output: (list (dynamic_size Next_proto.operation_data_and_receipt_encoding)) + ~description:"Simulate the validation of an operation." + ~query:RPC_query.empty + ~input:(list next_operation_encoding) + ~output: + (list + (dynamic_size Next_proto.operation_data_and_receipt_encoding)) RPC_path.(path / "operations") - end let complete = let prefix_arg = - let destruct s = Ok s - and construct s = s in - RPC_arg.make ~name:"prefix" ~destruct ~construct () in + let destruct s = Ok s and construct s = s in + RPC_arg.make ~name:"prefix" ~destruct ~construct () + in RPC_service.get_service - ~description: "Try to complete a prefix of a Base58Check-encoded data. \ - This RPC is actually able to complete hashes of \ - block, operations, public_keys and contracts." - ~query: RPC_query.empty - ~output: (list string) - RPC_path.(path / "complete" /: prefix_arg ) - + ~description: + "Try to complete a prefix of a Base58Check-encoded data. This RPC \ + is actually able to complete hashes of block, operations, \ + public_keys and contracts." + ~query:RPC_query.empty + ~output:(list string) + RPC_path.(path / "complete" /: prefix_arg) end module Context = struct - let path = RPC_path.(path / "context" / "raw" / "bytes") let context_path_arg : string RPC_arg.t = let name = "context_path" in let descr = "A path inside the context" in - let construct = fun s -> s in - let destruct = fun s -> Ok s in + let construct s = s in + let destruct s = Ok s in RPC_arg.make ~name ~descr ~construct ~destruct () - let raw_context_query : < depth: int option > RPC_query.t = + let raw_context_query : < depth : int option > RPC_query.t = let open RPC_query in - query (fun depth -> object - method depth = depth - end) + query (fun depth -> + object + method depth = depth + end) |+ opt_field "depth" RPC_arg.int (fun t -> t#depth) |> seal let read = RPC_service.get_service ~description:"Returns the raw context." - ~query: raw_context_query - ~output: raw_context_encoding + ~query:raw_context_query + ~output:raw_context_encoding RPC_path.(path /:* context_path_arg) - end let info = RPC_service.get_service ~description:"All the information about a block." - ~query: RPC_query.empty - ~output: block_info_encoding + ~query:RPC_query.empty + ~output:block_info_encoding path module Mempool = struct - type t = { - applied: (Operation_hash.t * Next_proto.operation) list ; - refused: (Next_proto.operation * error list) Operation_hash.Map.t ; - branch_refused: (Next_proto.operation * error list) Operation_hash.Map.t ; - branch_delayed: (Next_proto.operation * error list) Operation_hash.Map.t ; - unprocessed: Next_proto.operation Operation_hash.Map.t ; + applied : (Operation_hash.t * Next_proto.operation) list; + refused : (Next_proto.operation * error list) Operation_hash.Map.t; + branch_refused : + (Next_proto.operation * error list) Operation_hash.Map.t; + branch_delayed : + (Next_proto.operation * error list) Operation_hash.Map.t; + unprocessed : Next_proto.operation Operation_hash.Map.t } let encoding = conv - (fun - { applied ; - refused ; branch_refused ; branch_delayed ; - unprocessed } -> + (fun {applied; refused; branch_refused; branch_delayed; unprocessed} -> (applied, refused, branch_refused, branch_delayed, unprocessed)) - (fun - (applied, refused, branch_refused, branch_delayed, unprocessed) -> - { applied ; - refused ; branch_refused ; branch_delayed ; - unprocessed }) + (fun (applied, refused, branch_refused, branch_delayed, unprocessed) -> + {applied; refused; branch_refused; branch_delayed; unprocessed}) (obj5 - (req "applied" + (req + "applied" (list (conv (fun (hash, (op : Next_proto.operation)) -> - ((hash, op.shell), (op.protocol_data))) - (fun ((hash, shell), (protocol_data)) -> - (hash, { shell ; protocol_data })) + ((hash, op.shell), op.protocol_data)) + (fun ((hash, shell), protocol_data) -> + (hash, {shell; protocol_data})) (merge_objs (merge_objs (obj1 (req "hash" Operation_hash.encoding)) (dynamic_size Operation.shell_header_encoding)) - (dynamic_size Next_proto.operation_data_encoding) - )))) - (req "refused" + (dynamic_size Next_proto.operation_data_encoding))))) + (req + "refused" (Operation_hash.Map.encoding (merge_objs (dynamic_size next_operation_encoding) (obj1 (req "error" RPC_error.encoding))))) - (req "branch_refused" + (req + "branch_refused" (Operation_hash.Map.encoding (merge_objs (dynamic_size next_operation_encoding) (obj1 (req "error" RPC_error.encoding))))) - (req "branch_delayed" + (req + "branch_delayed" (Operation_hash.Map.encoding (merge_objs (dynamic_size next_operation_encoding) (obj1 (req "error" RPC_error.encoding))))) - (req "unprocessed" + (req + "unprocessed" (Operation_hash.Map.encoding (dynamic_size next_operation_encoding)))) let pending_operations path = (* TODO: branch_delayed/... *) RPC_service.get_service - ~description: "List the prevalidated operations." - ~query: RPC_query.empty - ~output: encoding + ~description:"List the prevalidated operations." + ~query:RPC_query.empty + ~output:encoding RPC_path.(path / "pending_operations") let mempool_query = let open RPC_query in - query (fun applied refused - branch_refused branch_delayed -> object - method applied = applied - method refused = refused - method branch_refused = branch_refused - method branch_delayed = branch_delayed - end) - |+ flag ~descr:"Include applied operations (set by default)" - "applied" (fun t -> t#applied) - |+ flag ~descr:"Include refused operations" - "refused" (fun t -> t#refused) - |+ flag ~descr:"Include branch refused operations" - "branch_refused" (fun t -> t#branch_refused) - |+ flag ~descr:"Include branch delayed operations (set by default)" - "branch_delayed" (fun t -> t#branch_delayed) + query (fun applied refused branch_refused branch_delayed -> + object + method applied = applied + + method refused = refused + + method branch_refused = branch_refused + + method branch_delayed = branch_delayed + end) + |+ flag + ~descr:"Include applied operations (set by default)" + "applied" + (fun t -> t#applied) + |+ flag ~descr:"Include refused operations" "refused" (fun t -> + t#refused) + |+ flag + ~descr:"Include branch refused operations" + "branch_refused" + (fun t -> t#branch_refused) + |+ flag + ~descr:"Include branch delayed operations (set by default)" + "branch_delayed" + (fun t -> t#branch_delayed) |> seal let monitor_operations path = RPC_service.get_service ~description:"Monitor the mempool operations." - ~query: mempool_query - ~output: (list next_operation_encoding) + ~query:mempool_query + ~output:(list next_operation_encoding) RPC_path.(path / "monitor_operations") let request_operations path = RPC_service.post_service ~description:"Request the operations of your peers." - ~input: Data_encoding.empty - ~query: RPC_query.empty - ~output: Data_encoding.empty + ~input:Data_encoding.empty + ~query:RPC_query.empty + ~output:Data_encoding.empty RPC_path.(path / "request_operations") - end let live_blocks = RPC_service.get_service - ~description:"List the ancestors of the given block which, if \ - referred to as the branch in an operation \ - header, are recent enough for that operation to \ - be included in the current block." - ~query: RPC_query.empty - ~output: Block_hash.Set.encoding + ~description: + "List the ancestors of the given block which, if referred to as the \ + branch in an operation header, are recent enough for that \ + operation to be included in the current block." + ~query:RPC_query.empty + ~output:Block_hash.Set.encoding RPC_path.(live_blocks_path open_root) - end let path = RPC_path.prefix chain_path path @@ -801,181 +857,167 @@ module Make(Proto : PROTO)(Next_proto : PROTO) = struct let hash ctxt = let f = make_call0 S.hash ctxt in fun ?(chain = `Main) ?(block = `Head 0) () -> - match block with - | `Hash (h, 0) -> return h - | _ -> f chain block () () + match block with `Hash (h, 0) -> return h | _ -> f chain block () () let header ctxt = let f = make_call0 S.header ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () let raw_header ctxt = let f = make_call0 S.raw_header ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () let metadata ctxt = let f = make_call0 S.metadata ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () let protocols ctxt = let f = make_call0 S.protocols ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () module Header = struct - module S = S.Header let shell_header ctxt = let f = make_call0 S.shell_header ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () + let protocol_data ctxt = let f = make_call0 S.protocol_data ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () + let raw_protocol_data ctxt = let f = make_call0 S.raw_protocol_data ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () - + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () end module Operations = struct - module S = S.Operations let operations ctxt = let f = make_call0 S.operations ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () let operations_in_pass ctxt = let f = make_call1 S.operations_in_pass ctxt in - fun ?(chain = `Main) ?(block = `Head 0) n -> - f chain block n () () + fun ?(chain = `Main) ?(block = `Head 0) n -> f chain block n () () let operation ctxt = let f = make_call2 S.operation ctxt in - fun ?(chain = `Main) ?(block = `Head 0) n m -> - f chain block n m () () - + fun ?(chain = `Main) ?(block = `Head 0) n m -> f chain block n m () () end module Operation_hashes = struct - module S = S.Operation_hashes let operation_hashes ctxt = let f = make_call0 S.operation_hashes ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () let operation_hashes_in_pass ctxt = let f = make_call1 S.operation_hashes_in_pass ctxt in - fun ?(chain = `Main) ?(block = `Head 0) n -> - f chain block n () () + fun ?(chain = `Main) ?(block = `Head 0) n -> f chain block n () () let operation_hash ctxt = let f = make_call2 S.operation_hash ctxt in - fun ?(chain = `Main) ?(block = `Head 0) n m -> - f chain block n m () () - + fun ?(chain = `Main) ?(block = `Head 0) n m -> f chain block n m () () end module Context = struct - module S = S.Context let read ctxt = let f = make_call1 S.read ctxt in fun ?(chain = `Main) ?(block = `Head 0) ?depth path -> - f chain block path - (object method depth = depth end) () - + f + chain + block + path + (object + method depth = depth + end) + () end module Helpers = struct - module S = S.Helpers module Forge = struct - module S = S.Forge let block_header ctxt = let f = make_call0 S.block_header ctxt in - fun - ?(chain = `Main) ?(block = `Head 0) - header -> + fun ?(chain = `Main) ?(block = `Head 0) header -> f chain block () header - end module Preapply = struct - module S = S.Preapply let block ctxt = let f = make_call0 S.block ctxt in - fun - ?(chain = `Main) ?(block = `Head 0) - ?(sort = false) ?timestamp ~protocol_data operations -> - f chain block - (object method sort_operations = sort method timestamp = timestamp end) - { protocol_data ; operations } + fun ?(chain = `Main) + ?(block = `Head 0) + ?(sort = false) + ?timestamp + ~protocol_data + operations -> + f + chain + block + (object + method sort_operations = sort + + method timestamp = timestamp + end) + {protocol_data; operations} let operations ctxt = let f = make_call0 S.operations ctxt in fun ?(chain = `Main) ?(block = `Head 0) operations -> f chain block () operations - end let complete ctxt = let f = make_call1 S.complete ctxt in - fun ?(chain = `Main) ?(block = `Head 0) s -> - f chain block s () () - + fun ?(chain = `Main) ?(block = `Head 0) s -> f chain block s () () end let info ctxt = let f = make_call0 S.info ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () module Mempool = struct - type t = S.Mempool.t = { - applied: (Operation_hash.t * Next_proto.operation) list ; - refused: (Next_proto.operation * error list) Operation_hash.Map.t ; - branch_refused: (Next_proto.operation * error list) Operation_hash.Map.t ; - branch_delayed: (Next_proto.operation * error list) Operation_hash.Map.t ; - unprocessed: Next_proto.operation Operation_hash.Map.t ; + applied : (Operation_hash.t * Next_proto.operation) list; + refused : (Next_proto.operation * error list) Operation_hash.Map.t; + branch_refused : + (Next_proto.operation * error list) Operation_hash.Map.t; + branch_delayed : + (Next_proto.operation * error list) Operation_hash.Map.t; + unprocessed : Next_proto.operation Operation_hash.Map.t } let pending_operations ctxt ?(chain = `Main) () = let s = S.Mempool.pending_operations (mempool_path chain_path) in RPC_context.make_call1 s ctxt chain () () - let monitor_operations ctxt - ?(chain = `Main) - ?(applied = true) - ?(branch_delayed = true) - ?(branch_refused = false) - ?(refused=false) + let monitor_operations ctxt ?(chain = `Main) ?(applied = true) + ?(branch_delayed = true) ?(branch_refused = false) ?(refused = false) () = let s = S.Mempool.monitor_operations (mempool_path chain_path) in - RPC_context.make_streamed_call s ctxt + RPC_context.make_streamed_call + s + ctxt ((), chain) (object - method applied = applied - method refused = refused - method branch_refused = branch_refused - method branch_delayed = branch_delayed + method applied = applied + + method refused = refused + + method branch_refused = branch_refused + + method branch_delayed = branch_delayed end) () @@ -986,25 +1028,33 @@ module Make(Proto : PROTO)(Next_proto : PROTO) = struct let live_blocks ctxt = let f = make_call0 S.live_blocks ctxt in - fun ?(chain = `Main) ?(block = `Head 0) () -> - f chain block () () - + fun ?(chain = `Main) ?(block = `Head 0) () -> f chain block () () end module Fake_protocol = struct let hash = Protocol_hash.zero + type block_header_data = unit + let block_header_data_encoding = Data_encoding.empty + type block_header_metadata = unit + let block_header_metadata_encoding = Data_encoding.empty + type operation_data = unit + type operation_receipt = unit + type operation = { - shell: Operation.shell_header ; - protocol_data: operation_data ; + shell : Operation.shell_header; + protocol_data : operation_data } + let operation_data_encoding = Data_encoding.empty + let operation_receipt_encoding = Data_encoding.empty + let operation_data_and_receipt_encoding = Data_encoding.conv (fun ((), ()) -> ()) @@ -1012,17 +1062,18 @@ module Fake_protocol = struct Data_encoding.empty end -module Empty = Make(Fake_protocol)(Fake_protocol) +module Empty = Make (Fake_protocol) (Fake_protocol) let () = - Printexc.register_printer - (function - | (Json_schema.Cannot_parse _ + Printexc.register_printer (function + | ( Json_schema.Cannot_parse _ | Json_schema.Dangling_reference _ | Json_schema.Bad_reference _ | Json_schema.Unexpected _ | Json_schema.Duplicate_definition _ ) as exn -> - Some (Format.asprintf "%a" (fun ppf -> Json_schema.print_error ppf) exn) - | _ -> None) + Some + (Format.asprintf "%a" (fun ppf -> Json_schema.print_error ppf) exn) + | _ -> + None) let protocols = Empty.protocols diff --git a/src/lib_shell_services/block_services.mli b/src/lib_shell_services/block_services.mli index fa7bb54a5736dc6ce26eb276038268cfa6896054..4fce8495334407949879f2f7c501d095cb9cf0b4 100644 --- a/src/lib_shell_services/block_services.mli +++ b/src/lib_shell_services/block_services.mli @@ -23,435 +23,496 @@ (* *) (*****************************************************************************) -type chain = [ - | `Main - | `Test - | `Hash of Chain_id.t -] +type chain = [`Main | `Test | `Hash of Chain_id.t] type chain_prefix = unit * chain -val chain_path: (unit, chain_prefix) RPC_path.t -val parse_chain: string -> (chain, string) result -val chain_to_string: chain -> string +val chain_path : (unit, chain_prefix) RPC_path.t -val chain_arg: chain RPC_arg.t +val parse_chain : string -> (chain, string) result -type block = [ - | `Genesis +val chain_to_string : chain -> string + +val chain_arg : chain RPC_arg.t + +type block = + [ `Genesis | `Head of int - | `Alias of [ `Caboose | `Checkpoint | `Save_point ] * int + | `Alias of [`Caboose | `Checkpoint | `Save_point] * int | `Hash of Block_hash.t * int - | `Level of Int32.t -] -val parse_block: string -> (block, string) result -val to_string: block -> string + | `Level of Int32.t ] + +val parse_block : string -> (block, string) result + +val to_string : block -> string type prefix = (unit * chain) * block -val dir_path: (chain_prefix, chain_prefix) RPC_path.t -val path: (chain_prefix, chain_prefix * block) RPC_path.t + +val dir_path : (chain_prefix, chain_prefix) RPC_path.t + +val path : (chain_prefix, chain_prefix * block) RPC_path.t + val mempool_path : ('a, 'b) RPC_path.t -> ('a, 'b) RPC_path.t + val live_blocks_path : ('a, 'b) RPC_path.t -> ('a, 'b) RPC_path.t -type operation_list_quota = { - max_size: int ; - max_op: int option ; -} +type operation_list_quota = {max_size : int; max_op : int option} -type raw_context = - | Key of MBytes.t - | Dir of (string * raw_context) list - | Cut +type raw_context = Key of MBytes.t | Dir of (string * raw_context) list | Cut -val pp_raw_context: Format.formatter -> raw_context -> unit +val pp_raw_context : Format.formatter -> raw_context -> unit -type error += - | Invalid_depth_arg of int +type error += Invalid_depth_arg of int module type PROTO = sig - val hash: Protocol_hash.t + val hash : Protocol_hash.t + type block_header_data - val block_header_data_encoding: block_header_data Data_encoding.t + + val block_header_data_encoding : block_header_data Data_encoding.t + type block_header_metadata - val block_header_metadata_encoding: - block_header_metadata Data_encoding.t + + val block_header_metadata_encoding : block_header_metadata Data_encoding.t + type operation_data + type operation_receipt + type operation = { - shell: Operation.shell_header ; - protocol_data: operation_data ; + shell : Operation.shell_header; + protocol_data : operation_data } - val operation_data_encoding: operation_data Data_encoding.t - val operation_receipt_encoding: operation_receipt Data_encoding.t - val operation_data_and_receipt_encoding: + val operation_data_encoding : operation_data Data_encoding.t + + val operation_receipt_encoding : operation_receipt Data_encoding.t + + val operation_data_and_receipt_encoding : (operation_data * operation_receipt) Data_encoding.t end type protocols = { - current_protocol: Protocol_hash.t ; - next_protocol: Protocol_hash.t ; + current_protocol : Protocol_hash.t; + next_protocol : Protocol_hash.t } -val protocols: - #RPC_context.simple -> ?chain:chain -> ?block:block -> - unit -> protocols tzresult Lwt.t - -module Make(Proto : PROTO)(Next_proto : PROTO) : sig +val protocols : + #RPC_context.simple -> + ?chain:chain -> + ?block:block -> + unit -> + protocols tzresult Lwt.t - val path: (unit, chain_prefix * block) RPC_path.t +module Make (Proto : PROTO) (Next_proto : PROTO) : sig + val path : (unit, chain_prefix * block) RPC_path.t type raw_block_header = { - shell: Block_header.shell_header ; - protocol_data: Proto.block_header_data ; + shell : Block_header.shell_header; + protocol_data : Proto.block_header_data } type block_header = { - chain_id: Chain_id.t ; - hash: Block_hash.t ; - shell: Block_header.shell_header ; - protocol_data: Proto.block_header_data ; + chain_id : Chain_id.t; + hash : Block_hash.t; + shell : Block_header.shell_header; + protocol_data : Proto.block_header_data } type block_metadata = { - protocol_data: Proto.block_header_metadata ; - test_chain_status: Test_chain_status.t ; - max_operations_ttl: int ; - max_operation_data_length: int ; - max_block_header_length: int ; - operation_list_quota: operation_list_quota list ; + protocol_data : Proto.block_header_metadata; + test_chain_status : Test_chain_status.t; + max_operations_ttl : int; + max_operation_data_length : int; + max_block_header_length : int; + operation_list_quota : operation_list_quota list } type operation = { - chain_id: Chain_id.t ; - hash: Operation_hash.t ; - shell: Operation.shell_header ; - protocol_data: Proto.operation_data ; - receipt: Proto.operation_receipt ; + chain_id : Chain_id.t; + hash : Operation_hash.t; + shell : Operation.shell_header; + protocol_data : Proto.operation_data; + receipt : Proto.operation_receipt } type block_info = { - chain_id: Chain_id.t ; - hash: Block_hash.t ; - header: raw_block_header ; - metadata: block_metadata ; - operations: operation list list ; + chain_id : Chain_id.t; + hash : Block_hash.t; + header : raw_block_header; + metadata : block_metadata; + operations : operation list list } open RPC_context - val info: - #simple -> ?chain:chain -> ?block:block -> - unit -> block_info tzresult Lwt.t + val info : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + block_info tzresult Lwt.t - val hash: - #simple -> ?chain:chain -> ?block:block -> - unit -> Block_hash.t tzresult Lwt.t + val hash : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + Block_hash.t tzresult Lwt.t - val raw_header: - #simple -> ?chain:chain -> ?block:block -> - unit -> MBytes.t tzresult Lwt.t + val raw_header : + #simple -> ?chain:chain -> ?block:block -> unit -> MBytes.t tzresult Lwt.t - val header: - #simple -> ?chain:chain -> ?block:block -> - unit -> block_header tzresult Lwt.t + val header : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + block_header tzresult Lwt.t - val metadata: - #simple -> ?chain:chain -> ?block:block -> - unit -> block_metadata tzresult Lwt.t + val metadata : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + block_metadata tzresult Lwt.t module Header : sig + val shell_header : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + Block_header.shell_header tzresult Lwt.t - val shell_header: - #simple -> ?chain:chain -> ?block:block -> - unit -> Block_header.shell_header tzresult Lwt.t - val protocol_data: - #simple -> ?chain:chain -> ?block:block -> - unit -> Proto.block_header_data tzresult Lwt.t - val raw_protocol_data: - #simple -> ?chain:chain -> ?block:block -> - unit -> MBytes.t tzresult Lwt.t + val protocol_data : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + Proto.block_header_data tzresult Lwt.t + val raw_protocol_data : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + MBytes.t tzresult Lwt.t end module Operations : sig + val operations : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + operation list list tzresult Lwt.t - val operations: - #simple -> ?chain:chain -> ?block:block -> - unit -> operation list list tzresult Lwt.t - val operations_in_pass: - #simple -> ?chain:chain -> ?block:block -> - int -> operation list tzresult Lwt.t - val operation: - #simple -> ?chain:chain -> ?block:block -> - int -> int -> operation tzresult Lwt.t + val operations_in_pass : + #simple -> + ?chain:chain -> + ?block:block -> + int -> + operation list tzresult Lwt.t + val operation : + #simple -> + ?chain:chain -> + ?block:block -> + int -> + int -> + operation tzresult Lwt.t end module Operation_hashes : sig + val operation_hashes : + #simple -> + ?chain:chain -> + ?block:block -> + unit -> + Operation_hash.t list list tzresult Lwt.t - val operation_hashes: - #simple -> ?chain:chain -> ?block:block -> - unit -> Operation_hash.t list list tzresult Lwt.t - val operation_hashes_in_pass: - #simple -> ?chain:chain -> ?block:block -> - int -> Operation_hash.t list tzresult Lwt.t - val operation_hash: - #simple -> ?chain:chain -> ?block:block -> - int -> int -> Operation_hash.t tzresult Lwt.t + val operation_hashes_in_pass : + #simple -> + ?chain:chain -> + ?block:block -> + int -> + Operation_hash.t list tzresult Lwt.t + val operation_hash : + #simple -> + ?chain:chain -> + ?block:block -> + int -> + int -> + Operation_hash.t tzresult Lwt.t end module Context : sig - - val read: - #simple -> ?chain:chain -> ?block:block -> - ?depth: int -> - string list -> raw_context tzresult Lwt.t - + val read : + #simple -> + ?chain:chain -> + ?block:block -> + ?depth:int -> + string list -> + raw_context tzresult Lwt.t end module Helpers : sig - module Forge : sig - - val block_header: + val block_header : #RPC_context.simple -> ?chain:chain -> ?block:block -> Block_header.t -> MBytes.t tzresult Lwt.t - end module Preapply : sig - - val block: - #simple -> ?chain:chain -> ?block:block -> + val block : + #simple -> + ?chain:chain -> + ?block:block -> ?sort:bool -> ?timestamp:Time.Protocol.t -> protocol_data:Next_proto.block_header_data -> Next_proto.operation list list -> - (Block_header.shell_header * error Preapply_result.t list) tzresult Lwt.t + (Block_header.shell_header * error Preapply_result.t list) tzresult + Lwt.t - val operations: - #simple -> ?chain:chain -> ?block:block -> + val operations : + #simple -> + ?chain:chain -> + ?block:block -> Next_proto.operation list -> - (Next_proto.operation_data * Next_proto.operation_receipt) list tzresult Lwt.t - + (Next_proto.operation_data * Next_proto.operation_receipt) list + tzresult + Lwt.t end - val complete: - #simple -> ?chain:chain -> ?block:block -> - string -> string list tzresult Lwt.t - + val complete : + #simple -> + ?chain:chain -> + ?block:block -> + string -> + string list tzresult Lwt.t end module Mempool : sig - type t = { - applied: (Operation_hash.t * Next_proto.operation) list ; - refused: (Next_proto.operation * error list) Operation_hash.Map.t ; - branch_refused: (Next_proto.operation * error list) Operation_hash.Map.t ; - branch_delayed: (Next_proto.operation * error list) Operation_hash.Map.t ; - unprocessed: Next_proto.operation Operation_hash.Map.t ; + applied : (Operation_hash.t * Next_proto.operation) list; + refused : (Next_proto.operation * error list) Operation_hash.Map.t; + branch_refused : + (Next_proto.operation * error list) Operation_hash.Map.t; + branch_delayed : + (Next_proto.operation * error list) Operation_hash.Map.t; + unprocessed : Next_proto.operation Operation_hash.Map.t } - val pending_operations: - #simple -> - ?chain:chain -> - unit -> t tzresult Lwt.t + val pending_operations : + #simple -> ?chain:chain -> unit -> t tzresult Lwt.t - val monitor_operations: + val monitor_operations : #streamed -> ?chain:chain -> ?applied:bool -> ?branch_delayed:bool -> ?branch_refused:bool -> ?refused:bool -> - unit -> (Next_proto.operation list Lwt_stream.t * stopper) tzresult Lwt.t - - val request_operations: - #simple -> - ?chain:chain -> - unit -> unit tzresult Lwt.t + unit -> + (Next_proto.operation list Lwt_stream.t * stopper) tzresult Lwt.t + val request_operations : + #simple -> ?chain:chain -> unit -> unit tzresult Lwt.t end - val live_blocks: + val live_blocks : #simple -> ?chain:chain -> ?block:block -> - unit -> Block_hash.Set.t tzresult Lwt.t + unit -> + Block_hash.Set.t tzresult Lwt.t module S : sig + val hash : ([`GET], prefix, prefix, unit, unit, Block_hash.t) RPC_service.t - val hash: - ([ `GET ], prefix, - prefix, unit, unit, - Block_hash.t) RPC_service.t + val info : ([`GET], prefix, prefix, unit, unit, block_info) RPC_service.t - val info: - ([ `GET ], prefix, - prefix, unit, unit, - block_info) RPC_service.t + val header : + ([`GET], prefix, prefix, unit, unit, block_header) RPC_service.t - val header: - ([ `GET ], prefix, - prefix, unit, unit, - block_header) RPC_service.t + val raw_header : + ([`GET], prefix, prefix, unit, unit, MBytes.t) RPC_service.t - val raw_header: - ([ `GET ], prefix, - prefix, unit, unit, - MBytes.t) RPC_service.t + val metadata : + ([`GET], prefix, prefix, unit, unit, block_metadata) RPC_service.t - val metadata: - ([ `GET ], prefix, - prefix, unit, unit, - block_metadata) RPC_service.t - - val protocols: - ([ `GET ], prefix, - prefix, unit, unit, - protocols) RPC_service.t + val protocols : + ([`GET], prefix, prefix, unit, unit, protocols) RPC_service.t module Header : sig - - val shell_header: - ([ `GET ], prefix, - prefix, unit, unit, - Block_header.shell_header) RPC_service.t - - val protocol_data: - ([ `GET ], prefix, - prefix, unit, unit, - Proto.block_header_data) RPC_service.t - - val raw_protocol_data: - ([ `GET ], prefix, - prefix, unit, unit, - MBytes.t) RPC_service.t - + val shell_header : + ( [`GET], + prefix, + prefix, + unit, + unit, + Block_header.shell_header ) + RPC_service.t + + val protocol_data : + ( [`GET], + prefix, + prefix, + unit, + unit, + Proto.block_header_data ) + RPC_service.t + + val raw_protocol_data : + ([`GET], prefix, prefix, unit, unit, MBytes.t) RPC_service.t end module Operations : sig - - val operations: - ([ `GET ], prefix, - prefix, unit, unit, - operation list list) RPC_service.t - - val operations_in_pass: - ([ `GET ], prefix, - prefix * int, unit, unit, - operation list) RPC_service.t - - val operation: - ([ `GET ], prefix, - (prefix * int) * int, unit, unit, - operation) RPC_service.t - + val operations : + ([`GET], prefix, prefix, unit, unit, operation list list) RPC_service.t + + val operations_in_pass : + ( [`GET], + prefix, + prefix * int, + unit, + unit, + operation list ) + RPC_service.t + + val operation : + ( [`GET], + prefix, + (prefix * int) * int, + unit, + unit, + operation ) + RPC_service.t end module Operation_hashes : sig - - val operation_hashes: - ([ `GET ], prefix, - prefix, unit, unit, - Tezos_crypto.Operation_hash.t list list) RPC_service.t - - val operation_hashes_in_pass: - ([ `GET ], prefix, - prefix * int, unit, unit, - Tezos_crypto.Operation_hash.t list) RPC_service.t - - val operation_hash: - ([ `GET ], prefix, - (prefix * int) * int, unit, unit, - Tezos_crypto.Operation_hash.t) RPC_service.t - + val operation_hashes : + ( [`GET], + prefix, + prefix, + unit, + unit, + Tezos_crypto.Operation_hash.t list list ) + RPC_service.t + + val operation_hashes_in_pass : + ( [`GET], + prefix, + prefix * int, + unit, + unit, + Tezos_crypto.Operation_hash.t list ) + RPC_service.t + + val operation_hash : + ( [`GET], + prefix, + (prefix * int) * int, + unit, + unit, + Tezos_crypto.Operation_hash.t ) + RPC_service.t end module Context : sig - - val read: - ([ `GET ], prefix, - prefix * string list, < depth : int option >, unit, - raw_context) RPC_service.t - + val read : + ( [`GET], + prefix, + prefix * string list, + < depth : int option >, + unit, + raw_context ) + RPC_service.t end module Helpers : sig - module Forge : sig - - val block_header: - ([ `POST ], prefix, - prefix, unit, Block_header.t, MBytes.t) RPC_service.service - + val block_header : + ( [`POST], + prefix, + prefix, + unit, + Block_header.t, + MBytes.t ) + RPC_service.service end module Preapply : sig - type block_param = { - protocol_data: Next_proto.block_header_data ; - operations: Next_proto.operation list list ; + protocol_data : Next_proto.block_header_data; + operations : Next_proto.operation list list } - val block: - ([ `POST ], prefix, - prefix, < sort_operations : bool; - timestamp : Time.Protocol.t option >, block_param, - Block_header.shell_header * error Preapply_result.t list) RPC_service.t - - val operations: - ([ `POST ], prefix, - prefix, unit, Next_proto.operation list, - (Next_proto.operation_data * Next_proto.operation_receipt) list) RPC_service.t - + val block : + ( [`POST], + prefix, + prefix, + < sort_operations : bool ; timestamp : Time.Protocol.t option >, + block_param, + Block_header.shell_header * error Preapply_result.t list ) + RPC_service.t + + val operations : + ( [`POST], + prefix, + prefix, + unit, + Next_proto.operation list, + (Next_proto.operation_data * Next_proto.operation_receipt) list ) + RPC_service.t end - val complete: - ([ `GET ], prefix, - prefix * string, unit, unit, - string list) RPC_service.t - + val complete : + ( [`GET], + prefix, + prefix * string, + unit, + unit, + string list ) + RPC_service.t end module Mempool : sig + val encoding : Mempool.t Data_encoding.t - val encoding: Mempool.t Data_encoding.t - - val pending_operations: + val pending_operations : ('a, 'b) RPC_path.t -> - ([ `GET ], 'a, - 'b , unit, unit, - Mempool.t) RPC_service.t + ([`GET], 'a, 'b, unit, unit, Mempool.t) RPC_service.t - val monitor_operations: + val monitor_operations : ('a, 'b) RPC_path.t -> - ([ `GET ], 'a, 'b, - < applied : bool ; branch_delayed : bool ; - branch_refused : bool ; refused : bool ; >, - unit, - Next_proto.operation list) RPC_service.t + ( [`GET], + 'a, + 'b, + < applied : bool + ; branch_delayed : bool + ; branch_refused : bool + ; refused : bool >, + unit, + Next_proto.operation list ) + RPC_service.t val request_operations : ('a, 'b) RPC_path.t -> - ([ `POST ], 'a, - 'b , unit, unit, unit) RPC_service.t - + ([`POST], 'a, 'b, unit, unit, unit) RPC_service.t end - val live_blocks: - ([ `GET ], prefix, - prefix, unit, unit, - Block_hash.Set.t) RPC_service.t - + val live_blocks : + ([`GET], prefix, prefix, unit, unit, Block_hash.Set.t) RPC_service.t end - end module Fake_protocol : PROTO -module Empty : (module type of Make(Fake_protocol)(Fake_protocol)) + +module Empty : module type of Make (Fake_protocol) (Fake_protocol) diff --git a/src/lib_shell_services/block_validator_errors.ml b/src/lib_shell_services/block_validator_errors.ml index 81349c3723276b8be3ed174d7bf00a81cbe6a9e2..30a4b79c8c815b771e0078dbedbd647ccdfdc4aa 100644 --- a/src/lib_shell_services/block_validator_errors.ml +++ b/src/lib_shell_services/block_validator_errors.ml @@ -26,195 +26,204 @@ type block_error = | Cannot_parse_operation of Operation_hash.t - | Invalid_fitness of { expected: Fitness.t ; found: Fitness.t } + | Invalid_fitness of {expected : Fitness.t; found : Fitness.t} | Non_increasing_timestamp | Non_increasing_fitness - | Invalid_level of { expected: Int32.t ; found: Int32.t } - | Invalid_proto_level of { expected: int ; found: int } + | Invalid_level of {expected : Int32.t; found : Int32.t} + | Invalid_proto_level of {expected : int; found : int} | Replayed_operation of Operation_hash.t | Outdated_operation of - { operation: Operation_hash.t; - originating_block: Block_hash.t } + { operation : Operation_hash.t; + originating_block : Block_hash.t } | Expired_chain of - { chain_id: Chain_id.t ; - expiration: Time.Protocol.t ; - timestamp: Time.Protocol.t ; - } + { chain_id : Chain_id.t; + expiration : Time.Protocol.t; + timestamp : Time.Protocol.t } | Unexpected_number_of_validation_passes of int (* uint8 *) - | Too_many_operations of { pass: int; found: int; max: int } - | Oversized_operation of { operation: Operation_hash.t; - size: int; max: int } - | Unallowed_pass of { operation: Operation_hash.t ; - pass: int ; - allowed_pass: int list } + | Too_many_operations of {pass : int; found : int; max : int} + | Oversized_operation of {operation : Operation_hash.t; size : int; max : int} + | Unallowed_pass of + { operation : Operation_hash.t; + pass : int; + allowed_pass : int list } | Cannot_parse_block_header let errno : Unix.error Data_encoding.t = let open Data_encoding in - union [ - case - ~title:"unknown_unix_error" - (Tag 0) int8 - (function Unix.EUNKNOWNERR i -> Some i | _ -> None) - (fun i -> EUNKNOWNERR i) ; - case - ~title:"unix_error" - (Tag 1) - (string_enum - Unix.[ - "2big", E2BIG ; - "acces", EACCES ; - "again", EAGAIN ; - "badf", EBADF ; - "busy", EBUSY ; - "child", ECHILD ; - "deadlk", EDEADLK ; - "dom", EDOM ; - "exist", EEXIST ; - "fault", EFAULT ; - "fbig", EFBIG ; - "intr", EINTR ; - "inval", EINVAL ; - "io", EIO ; - "isdir", EISDIR ; - "mfile", EMFILE ; - "mlink", EMLINK ; - "nametoolong", ENAMETOOLONG ; - "nfile", ENFILE ; - "nodev", ENODEV ; - "noent", ENOENT ; - "noexec", ENOEXEC ; - "nolck", ENOLCK ; - "nomem", ENOMEM ; - "nospc", ENOSPC ; - "nosys", ENOSYS ; - "notdir", ENOTDIR ; - "notempty", ENOTEMPTY ; - "notty", ENOTTY ; - "nxio", ENXIO ; - "perm", EPERM ; - "pipe", EPIPE ; - "range", ERANGE ; - "rofs", EROFS ; - "spipe", ESPIPE ; - "srch", ESRCH ; - "xdev", EXDEV ; - "wouldblock", EWOULDBLOCK ; - "inprogress", EINPROGRESS ; - "already", EALREADY ; - "notsock", ENOTSOCK ; - "destaddrreq", EDESTADDRREQ ; - "msgsize", EMSGSIZE ; - "prototype", EPROTOTYPE ; - "noprotoopt", ENOPROTOOPT ; - "protonosupport", EPROTONOSUPPORT ; - "socktnosupport", ESOCKTNOSUPPORT ; - "opnotsupp", EOPNOTSUPP ; - "pfnosupport", EPFNOSUPPORT ; - "afnosupport", EAFNOSUPPORT ; - "addrinuse", EADDRINUSE ; - "addrnotavail", EADDRNOTAVAIL ; - "netdown", ENETDOWN ; - "netunreach", ENETUNREACH ; - "netreset", ENETRESET ; - "connaborted", ECONNABORTED ; - "connreset", ECONNRESET ; - "nobufs", ENOBUFS ; - "isconn", EISCONN ; - "notconn", ENOTCONN ; - "shutdown", ESHUTDOWN ; - "toomanyrefs", ETOOMANYREFS ; - "timedout", ETIMEDOUT ; - "connrefused", ECONNREFUSED ; - "hostdown", EHOSTDOWN ; - "hostunreach", EHOSTUNREACH ; - "loop", ELOOP ; - "overflow", EOVERFLOW ]) - (fun x -> Some x) - (fun x -> x) - ] + union + [ case + ~title:"unknown_unix_error" + (Tag 0) + int8 + (function Unix.EUNKNOWNERR i -> Some i | _ -> None) + (fun i -> EUNKNOWNERR i); + case + ~title:"unix_error" + (Tag 1) + (string_enum + Unix. + [ ("2big", E2BIG); + ("acces", EACCES); + ("again", EAGAIN); + ("badf", EBADF); + ("busy", EBUSY); + ("child", ECHILD); + ("deadlk", EDEADLK); + ("dom", EDOM); + ("exist", EEXIST); + ("fault", EFAULT); + ("fbig", EFBIG); + ("intr", EINTR); + ("inval", EINVAL); + ("io", EIO); + ("isdir", EISDIR); + ("mfile", EMFILE); + ("mlink", EMLINK); + ("nametoolong", ENAMETOOLONG); + ("nfile", ENFILE); + ("nodev", ENODEV); + ("noent", ENOENT); + ("noexec", ENOEXEC); + ("nolck", ENOLCK); + ("nomem", ENOMEM); + ("nospc", ENOSPC); + ("nosys", ENOSYS); + ("notdir", ENOTDIR); + ("notempty", ENOTEMPTY); + ("notty", ENOTTY); + ("nxio", ENXIO); + ("perm", EPERM); + ("pipe", EPIPE); + ("range", ERANGE); + ("rofs", EROFS); + ("spipe", ESPIPE); + ("srch", ESRCH); + ("xdev", EXDEV); + ("wouldblock", EWOULDBLOCK); + ("inprogress", EINPROGRESS); + ("already", EALREADY); + ("notsock", ENOTSOCK); + ("destaddrreq", EDESTADDRREQ); + ("msgsize", EMSGSIZE); + ("prototype", EPROTOTYPE); + ("noprotoopt", ENOPROTOOPT); + ("protonosupport", EPROTONOSUPPORT); + ("socktnosupport", ESOCKTNOSUPPORT); + ("opnotsupp", EOPNOTSUPP); + ("pfnosupport", EPFNOSUPPORT); + ("afnosupport", EAFNOSUPPORT); + ("addrinuse", EADDRINUSE); + ("addrnotavail", EADDRNOTAVAIL); + ("netdown", ENETDOWN); + ("netunreach", ENETUNREACH); + ("netreset", ENETRESET); + ("connaborted", ECONNABORTED); + ("connreset", ECONNRESET); + ("nobufs", ENOBUFS); + ("isconn", EISCONN); + ("notconn", ENOTCONN); + ("shutdown", ESHUTDOWN); + ("toomanyrefs", ETOOMANYREFS); + ("timedout", ETIMEDOUT); + ("connrefused", ECONNREFUSED); + ("hostdown", EHOSTDOWN); + ("hostunreach", EHOSTUNREACH); + ("loop", ELOOP); + ("overflow", EOVERFLOW) ]) + (fun x -> Some x) + (fun x -> x) ] let block_error_encoding = let open Data_encoding in union - [ - case (Tag 0) + [ case + (Tag 0) ~title:"Cannot_parse_operation" (obj2 (req "error" (constant "cannot_parse_operation")) (req "operation" Operation_hash.encoding)) - (function Cannot_parse_operation operation -> Some ((), operation) - | _ -> None) - (fun ((), operation) -> Cannot_parse_operation operation) ; - case (Tag 1) + (function + | Cannot_parse_operation operation -> + Some ((), operation) + | _ -> + None) + (fun ((), operation) -> Cannot_parse_operation operation); + case + (Tag 1) ~title:"Invalid_fitness" (obj3 (req "error" (constant "invalid_fitness")) (req "expected" Fitness.encoding) (req "found" Fitness.encoding)) (function - | Invalid_fitness { expected ; found } -> + | Invalid_fitness {expected; found} -> Some ((), expected, found) - | _ -> None) - (fun ((), expected, found) -> Invalid_fitness { expected ; found }) ; - case (Tag 2) + | _ -> + None) + (fun ((), expected, found) -> Invalid_fitness {expected; found}); + case + (Tag 2) ~title:"Non_increasing_timestamp" - (obj1 - (req "error" (constant "non_increasing_timestamp"))) - (function Non_increasing_timestamp -> Some () - | _ -> None) - (fun () -> Non_increasing_timestamp) ; - case (Tag 3) + (obj1 (req "error" (constant "non_increasing_timestamp"))) + (function Non_increasing_timestamp -> Some () | _ -> None) + (fun () -> Non_increasing_timestamp); + case + (Tag 3) ~title:"Non_increasing_fitness" - (obj1 - (req "error" (constant "non_increasing_fitness"))) - (function Non_increasing_fitness -> Some () - | _ -> None) - (fun () -> Non_increasing_fitness) ; - case (Tag 4) + (obj1 (req "error" (constant "non_increasing_fitness"))) + (function Non_increasing_fitness -> Some () | _ -> None) + (fun () -> Non_increasing_fitness); + case + (Tag 4) ~title:"Invalid_level" (obj3 (req "error" (constant "invalid_level")) (req "expected" int32) (req "found" int32)) (function - | Invalid_level { expected ; found } -> + | Invalid_level {expected; found} -> Some ((), expected, found) - | _ -> None) - (fun ((), expected, found) -> Invalid_level { expected ; found }) ; - case (Tag 5) + | _ -> + None) + (fun ((), expected, found) -> Invalid_level {expected; found}); + case + (Tag 5) ~title:"Invalid_proto_level" (obj3 (req "error" (constant "invalid_proto_level")) (req "expected" uint8) (req "found" uint8)) (function - | Invalid_proto_level { expected ; found } -> + | Invalid_proto_level {expected; found} -> Some ((), expected, found) - | _ -> None) - (fun ((), expected, found) -> - Invalid_proto_level { expected ; found }) ; - case (Tag 6) + | _ -> + None) + (fun ((), expected, found) -> Invalid_proto_level {expected; found}); + case + (Tag 6) ~title:"Replayed_operation" (obj2 (req "error" (constant "replayed_operation")) (req "operation" Operation_hash.encoding)) - (function Replayed_operation operation -> Some ((), operation) - | _ -> None) - (fun ((), operation) -> Replayed_operation operation) ; - case (Tag 7) + (function + | Replayed_operation operation -> Some ((), operation) | _ -> None) + (fun ((), operation) -> Replayed_operation operation); + case + (Tag 7) ~title:"Outdated_operation" (obj3 (req "error" (constant "outdated_operation")) (req "operation" Operation_hash.encoding) (req "originating_block" Block_hash.encoding)) (function - | Outdated_operation { operation ; originating_block } -> + | Outdated_operation {operation; originating_block} -> Some ((), operation, originating_block) - | _ -> None) + | _ -> + None) (fun ((), operation, originating_block) -> - Outdated_operation { operation ; originating_block }) ; - case (Tag 8) + Outdated_operation {operation; originating_block}); + case + (Tag 8) ~title:"Expired_chain" (obj4 (req "error" (constant "expired_chain")) @@ -222,21 +231,26 @@ let block_error_encoding = (req "expiration" Time.Protocol.encoding) (req "timestamp" Time.Protocol.encoding)) (function - | Expired_chain { chain_id ; expiration ; timestamp } -> + | Expired_chain {chain_id; expiration; timestamp} -> Some ((), chain_id, expiration, timestamp) - | _ -> None) + | _ -> + None) (fun ((), chain_id, expiration, timestamp) -> - Expired_chain { chain_id ; expiration ; timestamp }) ; - case (Tag 9) + Expired_chain {chain_id; expiration; timestamp}); + case + (Tag 9) ~title:"Unexpected_number_of_validation_passes" (obj2 (req "error" (constant "unexpected_number_of_passes")) (req "found" uint8)) (function - | Unexpected_number_of_validation_passes n -> Some ((), n) - | _ -> None) - (fun ((), n) -> Unexpected_number_of_validation_passes n) ; - case (Tag 10) + | Unexpected_number_of_validation_passes n -> + Some ((), n) + | _ -> + None) + (fun ((), n) -> Unexpected_number_of_validation_passes n); + case + (Tag 10) ~title:"Too_many_operations" (obj4 (req "error" (constant "too_many_operations")) @@ -244,12 +258,13 @@ let block_error_encoding = (req "found" uint16) (req "max" uint16)) (function - | Too_many_operations { pass ; found ; max } -> + | Too_many_operations {pass; found; max} -> Some ((), pass, found, max) - | _ -> None) - (fun ((), pass, found, max) -> - Too_many_operations { pass ; found ; max }) ; - case (Tag 11) + | _ -> + None) + (fun ((), pass, found, max) -> Too_many_operations {pass; found; max}); + case + (Tag 11) ~title:"Oversized_operation" (obj4 (req "error" (constant "oversized_operation")) @@ -257,12 +272,14 @@ let block_error_encoding = (req "found" int31) (req "max" int31)) (function - | Oversized_operation { operation ; size ; max } -> + | Oversized_operation {operation; size; max} -> Some ((), operation, size, max) - | _ -> None) + | _ -> + None) (fun ((), operation, size, max) -> - Oversized_operation { operation ; size ; max }) ; - case (Tag 12) + Oversized_operation {operation; size; max}); + case + (Tag 12) ~title:"Unallowed_pass" (obj4 (req "error" (constant "invalid_pass")) @@ -270,103 +287,114 @@ let block_error_encoding = (req "pass" uint8) (req "allowed_pass" (list uint8))) (function - | Unallowed_pass { operation ; pass ; allowed_pass } -> + | Unallowed_pass {operation; pass; allowed_pass} -> Some ((), operation, pass, allowed_pass) - | _ -> None) + | _ -> + None) (fun ((), operation, pass, allowed_pass) -> - Unallowed_pass { operation ; pass ; allowed_pass }) ; - case (Tag 13) + Unallowed_pass {operation; pass; allowed_pass}); + case + (Tag 13) ~title:"Cannot_parse_block_header" - (obj1 - (req "error" (constant "cannot_parse_bock_header"))) - (function - | Cannot_parse_block_header -> - Some () - | _ -> None) - (fun () -> - Cannot_parse_block_header) ; - ] + (obj1 (req "error" (constant "cannot_parse_bock_header"))) + (function Cannot_parse_block_header -> Some () | _ -> None) + (fun () -> Cannot_parse_block_header) ] let pp_block_error ppf = function | Cannot_parse_operation oph -> - Format.fprintf ppf + Format.fprintf + ppf "Failed to parse the operation %a." - Operation_hash.pp_short oph - | Invalid_fitness { expected ; found } -> - Format.fprintf ppf - "@[<v 2>Invalid fitness:@ \ - \ expected %a@ \ - \ found %a@]" - Fitness.pp expected - Fitness.pp found + Operation_hash.pp_short + oph + | Invalid_fitness {expected; found} -> + Format.fprintf + ppf + "@[<v 2>Invalid fitness:@ expected %a@ found %a@]" + Fitness.pp + expected + Fitness.pp + found | Non_increasing_timestamp -> Format.fprintf ppf "Non increasing timestamp" | Non_increasing_fitness -> Format.fprintf ppf "Non increasing fitness" - | Invalid_level { expected ; found } -> - Format.fprintf ppf - "Invalid level:@ \ - \ expected %ld@ \ - \ found %ld" + | Invalid_level {expected; found} -> + Format.fprintf + ppf + "Invalid level:@ expected %ld@ found %ld" expected found - | Invalid_proto_level { expected ; found } -> - Format.fprintf ppf - "Invalid protocol level:@ \ - \ expected %d@ \ - \ found %d" + | Invalid_proto_level {expected; found} -> + Format.fprintf + ppf + "Invalid protocol level:@ expected %d@ found %d" expected found | Replayed_operation oph -> - Format.fprintf ppf + Format.fprintf + ppf "The operation %a was previously included in the chain." - Operation_hash.pp_short oph - | Outdated_operation { operation ; originating_block } -> - Format.fprintf ppf + Operation_hash.pp_short + oph + | Outdated_operation {operation; originating_block} -> + Format.fprintf + ppf "The operation %a is outdated (originated in block: %a)" - Operation_hash.pp_short operation - Block_hash.pp_short originating_block - | Expired_chain { chain_id ; expiration ; timestamp } -> - Format.fprintf ppf - "The block timestamp (%a) is later than \ - its chain expiration date: %a (chain: %a)." - Time.System.pp_hum (Time.System.of_protocol_exn timestamp) - Time.System.pp_hum (Time.System.of_protocol_exn expiration) - Chain_id.pp_short chain_id + Operation_hash.pp_short + operation + Block_hash.pp_short + originating_block + | Expired_chain {chain_id; expiration; timestamp} -> + Format.fprintf + ppf + "The block timestamp (%a) is later than its chain expiration date: %a \ + (chain: %a)." + Time.System.pp_hum + (Time.System.of_protocol_exn timestamp) + Time.System.pp_hum + (Time.System.of_protocol_exn expiration) + Chain_id.pp_short + chain_id | Unexpected_number_of_validation_passes n -> - Format.fprintf ppf - "Invalid number of validation passes (found: %d)" - n - | Too_many_operations { pass ; found ; max } -> - Format.fprintf ppf + Format.fprintf ppf "Invalid number of validation passes (found: %d)" n + | Too_many_operations {pass; found; max} -> + Format.fprintf + ppf "Too many operations in validation pass %d (found: %d, max: %d)" - pass found max - | Oversized_operation { operation ; size ; max } -> - Format.fprintf ppf + pass + found + max + | Oversized_operation {operation; size; max} -> + Format.fprintf + ppf "Oversized operation %a (size: %d, max: %d)" - Operation_hash.pp_short operation size max - | Unallowed_pass { operation ; pass ; allowed_pass } -> - Format.fprintf ppf - "Operation %a included in validation pass %d, \ - \ while only the following passes are allowed: @[<h>%a@]" - Operation_hash.pp_short operation pass - Format.(pp_print_list pp_print_int) allowed_pass + Operation_hash.pp_short + operation + size + max + | Unallowed_pass {operation; pass; allowed_pass} -> + Format.fprintf + ppf + "Operation %a included in validation pass %d, while only the \ + following passes are allowed: @[<h>%a@]" + Operation_hash.pp_short + operation + pass + Format.(pp_print_list pp_print_int) + allowed_pass | Cannot_parse_block_header -> Format.fprintf ppf "Failed to parse the block header." type error += - | Invalid_block of - { block: Block_hash.t ; error: block_error } - | Unavailable_protocol of - { block: Block_hash.t ; protocol: Protocol_hash.t } + | Invalid_block of {block : Block_hash.t; error : block_error} + | Unavailable_protocol of {block : Block_hash.t; protocol : Protocol_hash.t} | Inconsistent_operations_hash of - { block: Block_hash.t ; - expected: Operation_list_list_hash.t ; - found: Operation_list_list_hash.t } + { block : Block_hash.t; + expected : Operation_list_list_hash.t; + found : Operation_list_list_hash.t } | Failed_to_checkout_context of Context_hash.t - | System_error of { errno: Unix.error ; - fn: string ; - msg: string } + | System_error of {errno : Unix.error; fn : string; msg : string} | Missing_test_protocol of Protocol_hash.t let () = @@ -375,108 +403,117 @@ let () = ~id:"validator.invalid_block" ~title:"Invalid block" ~description:"Invalid block." - ~pp:begin fun ppf (block, error) -> - Format.fprintf ppf + ~pp:(fun ppf (block, error) -> + Format.fprintf + ppf "@[<v 2>Invalid block %a@ %a@]" - Block_hash.pp_short block pp_block_error error - end - Data_encoding.(merge_objs - (obj1 (req "invalid_block" Block_hash.encoding)) - block_error_encoding) - (function Invalid_block { block ; error } -> - Some (block, error) | _ -> None) - (fun (block, error) -> - Invalid_block { block ; error }) ; + Block_hash.pp_short + block + pp_block_error + error) + Data_encoding.( + merge_objs + (obj1 (req "invalid_block" Block_hash.encoding)) + block_error_encoding) + (function + | Invalid_block {block; error} -> Some (block, error) | _ -> None) + (fun (block, error) -> Invalid_block {block; error}) ; Error_monad.register_error_kind `Temporary ~id:"validator.unavailable_protocol" ~title:"Missing protocol" ~description:"The protocol required for validating a block is missing." - ~pp:begin fun ppf (block, protocol) -> - Format.fprintf ppf + ~pp:(fun ppf (block, protocol) -> + Format.fprintf + ppf "Missing protocol (%a) when validating the block %a." - Protocol_hash.pp_short protocol - Block_hash.pp_short block - end + Protocol_hash.pp_short + protocol + Block_hash.pp_short + block) Data_encoding.( obj2 (req "block" Block_hash.encoding) (req "missing_protocol" Protocol_hash.encoding)) (function - | Unavailable_protocol { block ; protocol } -> + | Unavailable_protocol {block; protocol} -> Some (block, protocol) - | _ -> None) - (fun (block, protocol) -> Unavailable_protocol { block ; protocol }) ; + | _ -> + None) + (fun (block, protocol) -> Unavailable_protocol {block; protocol}) ; Error_monad.register_error_kind `Temporary ~id:"validator.inconsistent_operations_hash" ~title:"Invalid merkle tree" - ~description:"The provided list of operations is inconsistent with \ - the block header." - ~pp:begin fun ppf (block, expected, found) -> - Format.fprintf ppf - "@[<v 2>The provided list of operations for block %a \ - \ is inconsistent with the block header@ \ - \ expected: %a@ \ - \ found: %a@]" - Block_hash.pp_short block - Operation_list_list_hash.pp_short expected - Operation_list_list_hash.pp_short found - end + ~description: + "The provided list of operations is inconsistent with the block header." + ~pp:(fun ppf (block, expected, found) -> + Format.fprintf + ppf + "@[<v 2>The provided list of operations for block %a is inconsistent \ + with the block header@ expected: %a@ found: %a@]" + Block_hash.pp_short + block + Operation_list_list_hash.pp_short + expected + Operation_list_list_hash.pp_short + found) Data_encoding.( obj3 (req "block" Block_hash.encoding) (req "expected" Operation_list_list_hash.encoding) (req "found" Operation_list_list_hash.encoding)) (function - | Inconsistent_operations_hash { block ; expected ; found } -> + | Inconsistent_operations_hash {block; expected; found} -> Some (block, expected, found) - | _ -> None) + | _ -> + None) (fun (block, expected, found) -> - Inconsistent_operations_hash { block ; expected ; found }); + Inconsistent_operations_hash {block; expected; found}) ; Error_monad.register_error_kind `Permanent ~id:"Validator_process.failed_to_checkout_context" - ~title: "Fail during checkout context" - ~description: "The context checkout failed using a given hash" - ~pp:(fun ppf (hash:Context_hash.t) -> - Format.fprintf ppf - "@[Failed to checkout the context with hash %a@]" - Context_hash.pp_short hash) + ~title:"Fail during checkout context" + ~description:"The context checkout failed using a given hash" + ~pp:(fun ppf (hash : Context_hash.t) -> + Format.fprintf + ppf + "@[Failed to checkout the context with hash %a@]" + Context_hash.pp_short + hash) Data_encoding.(obj1 (req "hash" Context_hash.encoding)) - (function - | Failed_to_checkout_context h -> Some h - | _ -> None) + (function Failed_to_checkout_context h -> Some h | _ -> None) (fun h -> Failed_to_checkout_context h) ; Error_monad.register_error_kind `Temporary ~id:"Validator_process.system_error_while_validating" - ~title: "Failed to validate block because of a system error" - ~description: "The validator failed because of a system error" + ~title:"Failed to validate block because of a system error" + ~description:"The validator failed because of a system error" ~pp:(fun ppf (errno, fn, msg) -> - Format.fprintf ppf - "System error while validating a block (in function %s(%s)):@ %s" - fn msg (Unix.error_message errno)) - Data_encoding.(obj3 - (req "errno" errno) - (req "function" string) - (req "msg" string)) + Format.fprintf + ppf + "System error while validating a block (in function %s(%s)):@ %s" + fn + msg + (Unix.error_message errno)) + Data_encoding.( + obj3 (req "errno" errno) (req "function" string) (req "msg" string)) (function - | System_error { errno ; fn ; msg } -> Some (errno, fn, msg) - | _ -> None) - (fun (errno, fn, msg) -> System_error { errno ; fn ; msg }) ; + | System_error {errno; fn; msg} -> Some (errno, fn, msg) | _ -> None) + (fun (errno, fn, msg) -> System_error {errno; fn; msg}) ; Error_monad.register_error_kind `Temporary ~id:"validator.missing_test_protocol" ~title:"Missing test protocol" - ~description: - "Missing test protocol when forking the test chain" - ~pp: (fun ppf protocol -> - Format.fprintf ppf - "Missing test protocol %a when forking the test chain." - Protocol_hash.pp protocol) + ~description:"Missing test protocol when forking the test chain" + ~pp:(fun ppf protocol -> + Format.fprintf + ppf + "Missing test protocol %a when forking the test chain." + Protocol_hash.pp + protocol) Data_encoding.(obj1 (req "test_protocol" Protocol_hash.encoding)) (function Missing_test_protocol protocol -> Some protocol | _ -> None) (fun protocol -> Missing_test_protocol protocol) -let invalid_block block error = Invalid_block { block ; error } +let invalid_block block error = Invalid_block {block; error} diff --git a/src/lib_shell_services/block_validator_errors.mli b/src/lib_shell_services/block_validator_errors.mli index 7e87895d0447438ea2dd95a96f98625dca6916ba..96f2f9aba6cc329a8cc310eed359a91d3e338993 100644 --- a/src/lib_shell_services/block_validator_errors.mli +++ b/src/lib_shell_services/block_validator_errors.mli @@ -26,42 +26,37 @@ type block_error = | Cannot_parse_operation of Operation_hash.t - | Invalid_fitness of { expected: Fitness.t ; found: Fitness.t } + | Invalid_fitness of {expected : Fitness.t; found : Fitness.t} | Non_increasing_timestamp | Non_increasing_fitness - | Invalid_level of { expected: Int32.t ; found: Int32.t } - | Invalid_proto_level of { expected: int ; found: int } + | Invalid_level of {expected : Int32.t; found : Int32.t} + | Invalid_proto_level of {expected : int; found : int} | Replayed_operation of Operation_hash.t | Outdated_operation of - { operation: Operation_hash.t; - originating_block: Block_hash.t } + { operation : Operation_hash.t; + originating_block : Block_hash.t } | Expired_chain of - { chain_id: Chain_id.t ; - expiration: Time.Protocol.t ; - timestamp: Time.Protocol.t ; - } + { chain_id : Chain_id.t; + expiration : Time.Protocol.t; + timestamp : Time.Protocol.t } | Unexpected_number_of_validation_passes of int (* uint8 *) - | Too_many_operations of { pass: int; found: int; max: int } - | Oversized_operation of { operation: Operation_hash.t; - size: int; max: int } - | Unallowed_pass of { operation: Operation_hash.t ; - pass: int ; - allowed_pass: int list } + | Too_many_operations of {pass : int; found : int; max : int} + | Oversized_operation of {operation : Operation_hash.t; size : int; max : int} + | Unallowed_pass of + { operation : Operation_hash.t; + pass : int; + allowed_pass : int list } | Cannot_parse_block_header type error += - | Invalid_block of - { block: Block_hash.t ; error: block_error } - | Unavailable_protocol of - { block: Block_hash.t ; protocol: Protocol_hash.t } + | Invalid_block of {block : Block_hash.t; error : block_error} + | Unavailable_protocol of {block : Block_hash.t; protocol : Protocol_hash.t} | Inconsistent_operations_hash of - { block: Block_hash.t ; - expected: Operation_list_list_hash.t ; - found: Operation_list_list_hash.t } + { block : Block_hash.t; + expected : Operation_list_list_hash.t; + found : Operation_list_list_hash.t } | Failed_to_checkout_context of Context_hash.t - | System_error of { errno: Unix.error ; - fn: string ; - msg: string } + | System_error of {errno : Unix.error; fn : string; msg : string} | Missing_test_protocol of Protocol_hash.t val invalid_block : Block_hash.t -> block_error -> error diff --git a/src/lib_shell_services/block_validator_worker_state.ml b/src/lib_shell_services/block_validator_worker_state.ml index 3ec59dc5119289b8c1f56bc24dd6d26e97d67f7a..7f56796249619ef3d816700029517b36d9802b5b 100644 --- a/src/lib_shell_services/block_validator_worker_state.ml +++ b/src/lib_shell_services/block_validator_worker_state.ml @@ -25,84 +25,116 @@ module Request = struct type view = { - chain_id : Chain_id.t ; - block : Block_hash.t ; - peer : P2p_peer.Id.t option ; + chain_id : Chain_id.t; + block : Block_hash.t; + peer : P2p_peer.Id.t option } + let encoding = let open Data_encoding in conv - (fun { chain_id ; block ; peer } -> (block, chain_id, peer)) - (fun (block, chain_id, peer) -> { chain_id ; block ; peer }) + (fun {chain_id; block; peer} -> (block, chain_id, peer)) + (fun (block, chain_id, peer) -> {chain_id; block; peer}) (obj3 (req "block" Block_hash.encoding) (req "chain_id" Chain_id.encoding) (opt "peer" P2p_peer.Id.encoding)) - let pp ppf { chain_id ; block ; peer } = - Format.fprintf ppf "Validation of %a (chain: %a)" - Block_hash.pp block - Chain_id.pp_short chain_id ; + let pp ppf {chain_id; block; peer} = + Format.fprintf + ppf + "Validation of %a (chain: %a)" + Block_hash.pp + block + Chain_id.pp_short + chain_id ; match peer with - | None -> () + | None -> + () | Some peer -> - Format.fprintf ppf "from peer %a" - P2p_peer.Id.pp_short peer + Format.fprintf ppf "from peer %a" P2p_peer.Id.pp_short peer end module Event = struct type t = | Validation_success of Request.view * Worker_types.request_status - | Validation_failure of Request.view * Worker_types.request_status * error list + | Validation_failure of + Request.view * Worker_types.request_status * error list | Debug of string let level req = match req with - | Debug _ -> Internal_event.Debug - | Validation_success _ - | Validation_failure _ -> Internal_event.Notice + | Debug _ -> + Internal_event.Debug + | Validation_success _ | Validation_failure _ -> + Internal_event.Notice let encoding = let open Data_encoding in union - [ case (Tag 0) ~title:"Debug" + [ case + (Tag 0) + ~title:"Debug" (obj1 (req "message" string)) (function Debug msg -> Some msg | _ -> None) - (fun msg -> Debug msg) ; - case (Tag 1) ~title:"Validation_success" + (fun msg -> Debug msg); + case + (Tag 1) + ~title:"Validation_success" (obj2 (req "successful_validation" Request.encoding) (req "status" Worker_types.request_status_encoding)) (function Validation_success (r, s) -> Some (r, s) | _ -> None) - (fun (r, s) -> Validation_success (r, s)) ; - case (Tag 2) ~title:"Validation_failure" + (fun (r, s) -> Validation_success (r, s)); + case + (Tag 2) + ~title:"Validation_failure" (obj3 (req "failed_validation" Request.encoding) (req "status" Worker_types.request_status_encoding) (dft "errors" RPC_error.encoding [])) - (function Validation_failure (r, s, err) -> Some (r, s, err) | _ -> None) + (function + | Validation_failure (r, s, err) -> Some (r, s, err) | _ -> None) (fun (r, s, err) -> Validation_failure (r, s, err)) ] let pp ppf = function - | Debug msg -> Format.fprintf ppf "%s" msg - | Validation_success (req, { pushed ; treated ; completed }) -> - Format.fprintf ppf + | Debug msg -> + Format.fprintf ppf "%s" msg + | Validation_success (req, {pushed; treated; completed}) -> + Format.fprintf + ppf "@[<v 0>Block %a successfully validated@,\ Pushed: %a, Treated: %a, Completed: %a@]" - Block_hash.pp req.block - Time.System.pp_hum pushed Time.System.pp_hum treated Time.System.pp_hum completed - | Validation_failure (req, { pushed ; treated ; completed }, errs)-> - Format.fprintf ppf + Block_hash.pp + req.block + Time.System.pp_hum + pushed + Time.System.pp_hum + treated + Time.System.pp_hum + completed + | Validation_failure (req, {pushed; treated; completed}, errs) -> + Format.fprintf + ppf "@[<v 0>Validation of block %a failed@,\ Pushed: %a, Treated: %a, Failed: %a@,\ %a@]" - Block_hash.pp req.block - Time.System.pp_hum pushed Time.System.pp_hum treated Time.System.pp_hum completed - (Format.pp_print_list Error_monad.pp) errs + Block_hash.pp + req.block + Time.System.pp_hum + pushed + Time.System.pp_hum + treated + Time.System.pp_hum + completed + (Format.pp_print_list Error_monad.pp) + errs end module Worker_state = struct type view = unit + let encoding = Data_encoding.empty + let pp _ppf _view = () end diff --git a/src/lib_shell_services/block_validator_worker_state.mli b/src/lib_shell_services/block_validator_worker_state.mli index 98c6d33e3fbef8d1bec96516797a50d8825e26a1..f1526ad2e8b7373f5237d6e2eb1f1f3a878838c1 100644 --- a/src/lib_shell_services/block_validator_worker_state.mli +++ b/src/lib_shell_services/block_validator_worker_state.mli @@ -25,26 +25,34 @@ module Request : sig type view = { - chain_id : Chain_id.t ; - block : Block_hash.t ; - peer: P2p_peer.Id.t option ; + chain_id : Chain_id.t; + block : Block_hash.t; + peer : P2p_peer.Id.t option } + val encoding : view Data_encoding.encoding + val pp : Format.formatter -> view -> unit end module Event : sig type t = | Validation_success of Request.view * Worker_types.request_status - | Validation_failure of Request.view * Worker_types.request_status * error list + | Validation_failure of + Request.view * Worker_types.request_status * error list | Debug of string + val level : t -> Internal_event.level + val encoding : t Data_encoding.encoding + val pp : Format.formatter -> t -> unit end module Worker_state : sig type view = unit + val encoding : view Data_encoding.encoding + val pp : Format.formatter -> view -> unit end diff --git a/src/lib_shell_services/chain_services.ml b/src/lib_shell_services/chain_services.ml index 7feb0724c569b7931b4eba33e941888dc1f03052..f343e4ad386e0a5d9b972408cfb40c040d50aa50 100644 --- a/src/lib_shell_services/chain_services.ml +++ b/src/lib_shell_services/chain_services.ml @@ -25,84 +25,89 @@ open Data_encoding -type chain = [ - | `Main - | `Test - | `Hash of Chain_id.t -] +type chain = [`Main | `Test | `Hash of Chain_id.t] let chain_arg = Block_services.chain_arg + let to_string = Block_services.chain_to_string + let parse_chain = Block_services.parse_chain type invalid_block = { - hash: Block_hash.t ; - level: Int32.t ; - errors: error list ; + hash : Block_hash.t; + level : Int32.t; + errors : error list } type prefix = Block_services.chain_prefix + let path = Block_services.chain_path let checkpoint_encoding = - (obj4 - (req "block" Block_header.encoding) - (req "save_point" int32) - (req "caboose" int32) - (req "history_mode" History_mode.encoding)) + obj4 + (req "block" Block_header.encoding) + (req "save_point" int32) + (req "caboose" int32) + (req "history_mode" History_mode.encoding) let invalid_block_encoding = conv - (fun { hash ; level ; errors } -> (hash, level, errors)) - (fun (hash, level, errors) -> { hash ; level ; errors }) + (fun {hash; level; errors} -> (hash, level, errors)) + (fun (hash, level, errors) -> {hash; level; errors}) (obj3 (req "block" Block_hash.encoding) (req "level" int32) (req "errors" RPC_error.encoding)) module S = struct - let path : prefix RPC_path.context = RPC_path.open_root let chain_id = RPC_service.get_service ~description:"The chain unique identifier." - ~query: RPC_query.empty - ~output: Chain_id.encoding + ~query:RPC_query.empty + ~output:Chain_id.encoding RPC_path.(path / "chain_id") let checkpoint = RPC_service.get_service ~description:"The current checkpoint for this chain." - ~query: RPC_query.empty - ~output: checkpoint_encoding + ~query:RPC_query.empty + ~output:checkpoint_encoding RPC_path.(path / "checkpoint") module Blocks = struct - let list_query = let open RPC_query in query (fun length heads min_date -> object method length = length + method heads = heads + method min_date = min_date end) - |+ opt_field "length" - ~descr: - "The requested number of predecessors to returns (per \ - requested head)." - RPC_arg.int (fun x -> x#length) - |+ multi_field "head" - ~descr: - "An empty argument requests blocks from the current heads. \ - A non empty list allow to request specific fragment \ - of the chain." - Block_hash.rpc_arg (fun x -> x#heads) - |+ opt_field "min_date" - ~descr: "When `min_date` is provided, heads with a \ - timestamp before `min_date` are filtered out" - Time.Protocol.rpc_arg (fun x -> x#min_date) + |+ opt_field + "length" + ~descr: + "The requested number of predecessors to returns (per requested \ + head)." + RPC_arg.int + (fun x -> x#length) + |+ multi_field + "head" + ~descr: + "An empty argument requests blocks from the current heads. A non \ + empty list allow to request specific fragment of the chain." + Block_hash.rpc_arg + (fun x -> x#heads) + |+ opt_field + "min_date" + ~descr: + "When `min_date` is provided, heads with a timestamp before \ + `min_date` are filtered out" + Time.Protocol.rpc_arg + (fun x -> x#min_date) |> seal let path = RPC_path.(path / "blocks") @@ -111,17 +116,16 @@ module S = struct let open Data_encoding in RPC_service.get_service ~description: - "Lists known heads of the blockchain sorted with decreasing fitness. \ - Optional arguments allows to returns the list of predecessors for \ - known heads or the list of predecessors for a given list of blocks." - ~query: list_query - ~output: (list (list Block_hash.encoding)) + "Lists known heads of the blockchain sorted with decreasing \ + fitness. Optional arguments allows to returns the list of \ + predecessors for known heads or the list of predecessors for a \ + given list of blocks." + ~query:list_query + ~output:(list (list Block_hash.encoding)) path - end module Invalid_blocks = struct - let path = RPC_path.(path / "invalid_blocks") let list = @@ -129,26 +133,24 @@ module S = struct ~description: "Lists blocks that have been declared invalid along with the errors \ that led to them being declared invalid." - ~query: RPC_query.empty - ~output: (list invalid_block_encoding) + ~query:RPC_query.empty + ~output:(list invalid_block_encoding) path let get = RPC_service.get_service - ~description: "The errors that appears during the block (in)validation." - ~query: RPC_query.empty - ~output: invalid_block_encoding + ~description:"The errors that appears during the block (in)validation." + ~query:RPC_query.empty + ~output:invalid_block_encoding RPC_path.(path /: Block_hash.rpc_arg) let delete = RPC_service.delete_service - ~description: "Remove an invalid block for the tezos storage" - ~query: RPC_query.empty - ~output: Data_encoding.empty + ~description:"Remove an invalid block for the tezos storage" + ~query:RPC_query.empty + ~output:Data_encoding.empty RPC_path.(path /: Block_hash.rpc_arg) - end - end let make_call0 s ctxt chain q p = @@ -162,54 +164,48 @@ let make_call1 s ctxt chain a q p = let chain_id ctxt = let f = make_call0 S.chain_id ctxt in fun ?(chain = `Main) () -> - match chain with - | `Hash h -> return h - | _ -> f chain () () + match chain with `Hash h -> return h | _ -> f chain () () let checkpoint ctxt ?(chain = `Main) () = make_call0 S.checkpoint ctxt chain () () module Blocks = struct - let list ctxt = let f = make_call0 S.Blocks.list ctxt in fun ?(chain = `Main) ?(heads = []) ?length ?min_date () -> - f chain + f + chain (object - method heads = heads - method length = length - method min_date = min_date + method heads = heads + + method length = length + + method min_date = min_date end) () include Block_services.Empty type protocols = Block_services.protocols = { - current_protocol: Protocol_hash.t ; - next_protocol: Protocol_hash.t ; + current_protocol : Protocol_hash.t; + next_protocol : Protocol_hash.t } let protocols = Block_services.protocols - end module Mempool = Block_services.Empty.Mempool module Invalid_blocks = struct - let list ctxt = let f = make_call0 S.Invalid_blocks.list ctxt in - fun ?(chain = `Main) () -> - f chain () () + fun ?(chain = `Main) () -> f chain () () let get ctxt = let f = make_call1 S.Invalid_blocks.get ctxt in - fun ?(chain = `Main) block -> - f chain block () () + fun ?(chain = `Main) block -> f chain block () () let delete ctxt = let f = make_call1 S.Invalid_blocks.delete ctxt in - fun ?(chain = `Main) block -> - f chain block () () - + fun ?(chain = `Main) block -> f chain block () () end diff --git a/src/lib_shell_services/chain_services.mli b/src/lib_shell_services/chain_services.mli index 273a613db25edffe704723e7e95ec28a226255a9..47c0d61260d219a11eeec87fb88885bd50ca5ded 100644 --- a/src/lib_shell_services/chain_services.mli +++ b/src/lib_shell_services/chain_services.mli @@ -23,124 +23,118 @@ (* *) (*****************************************************************************) -type chain = [ - | `Main - | `Test - | `Hash of Chain_id.t -] +type chain = [`Main | `Test | `Hash of Chain_id.t] -val parse_chain: string -> (chain, string) result -val to_string: chain -> string +val parse_chain : string -> (chain, string) result -val chain_arg: chain RPC_arg.t +val to_string : chain -> string + +val chain_arg : chain RPC_arg.t type invalid_block = { - hash: Block_hash.t ; - level: Int32.t ; - errors: error list ; + hash : Block_hash.t; + level : Int32.t; + errors : error list } type prefix = unit * chain -val path: (unit, prefix) RPC_path.path + +val path : (unit, prefix) RPC_path.path open RPC_context -val chain_id: - #simple -> - ?chain:chain -> - unit -> Chain_id.t tzresult Lwt.t +val chain_id : #simple -> ?chain:chain -> unit -> Chain_id.t tzresult Lwt.t -val checkpoint: +val checkpoint : #simple -> ?chain:chain -> - unit -> (Block_header.t * int32 * int32 * History_mode.t) tzresult Lwt.t + unit -> + (Block_header.t * int32 * int32 * History_mode.t) tzresult Lwt.t module Mempool = Block_services.Empty.Mempool module Blocks : sig - - val list: + val list : #simple -> ?chain:chain -> ?heads:Block_hash.t list -> ?length:int -> ?min_date:Time.Protocol.t -> - unit -> Block_hash.t list list tzresult Lwt.t + unit -> + Block_hash.t list list tzresult Lwt.t - include (module type of Block_services.Empty) + include module type of Block_services.Empty type protocols = { - current_protocol: Protocol_hash.t ; - next_protocol: Protocol_hash.t ; + current_protocol : Protocol_hash.t; + next_protocol : Protocol_hash.t } - val protocols: - #RPC_context.simple -> ?chain:chain -> ?block:Block_services.block -> - unit -> protocols tzresult Lwt.t - + val protocols : + #RPC_context.simple -> + ?chain:chain -> + ?block:Block_services.block -> + unit -> + protocols tzresult Lwt.t end module Invalid_blocks : sig + val list : + #simple -> ?chain:chain -> unit -> invalid_block list tzresult Lwt.t - val list: - #simple -> - ?chain:chain -> - unit -> invalid_block list tzresult Lwt.t - - val get: - #simple -> - ?chain:chain -> - Block_hash.t -> invalid_block tzresult Lwt.t - - val delete: - #simple -> - ?chain:chain -> - Block_hash.t -> unit tzresult Lwt.t + val get : + #simple -> ?chain:chain -> Block_hash.t -> invalid_block tzresult Lwt.t + val delete : #simple -> ?chain:chain -> Block_hash.t -> unit tzresult Lwt.t end module S : sig + val chain_id : ([`GET], prefix, prefix, unit, unit, Chain_id.t) RPC_service.t - val chain_id: - ([ `GET ], prefix, - prefix, unit, unit, - Chain_id.t) RPC_service.t - - val checkpoint: - ([ `GET ], prefix, - prefix, unit, unit, - Block_header.t * int32 * int32 * History_mode.t) RPC_service.t + val checkpoint : + ( [`GET], + prefix, + prefix, + unit, + unit, + Block_header.t * int32 * int32 * History_mode.t ) + RPC_service.t module Blocks : sig - - val path: (prefix, prefix) RPC_path.t - - val list: - ([ `GET ], prefix, - prefix, < heads : Block_hash.t list; - length : int option; - min_date : Time.Protocol.t option >, unit, - Block_hash.t list list) RPC_service.t - + val path : (prefix, prefix) RPC_path.t + + val list : + ( [`GET], + prefix, + prefix, + < heads : Block_hash.t list + ; length : int option + ; min_date : Time.Protocol.t option >, + unit, + Block_hash.t list list ) + RPC_service.t end module Invalid_blocks : sig - - val list: - ([ `GET ], prefix, - prefix, unit, unit, - invalid_block list) RPC_service.t - - val get: - ([ `GET ], prefix, - prefix * Block_hash.t, unit, unit, - invalid_block) RPC_service.t - - val delete: - ([ `DELETE ], prefix, - prefix * Block_hash.t, unit, unit, - unit) RPC_service.t - + val list : + ([`GET], prefix, prefix, unit, unit, invalid_block list) RPC_service.t + + val get : + ( [`GET], + prefix, + prefix * Block_hash.t, + unit, + unit, + invalid_block ) + RPC_service.t + + val delete : + ( [`DELETE], + prefix, + prefix * Block_hash.t, + unit, + unit, + unit ) + RPC_service.t end - end diff --git a/src/lib_shell_services/chain_validator_worker_state.ml b/src/lib_shell_services/chain_validator_worker_state.ml index b9e063d457866abc0d93f6d6dd62854fd4d125b1..4e5dc14ee6040768b648cffef11fa5bb434be18e 100644 --- a/src/lib_shell_services/chain_validator_worker_state.ml +++ b/src/lib_shell_services/chain_validator_worker_state.ml @@ -27,103 +27,120 @@ module Request = struct type view = Block_hash.t let encoding = Block_hash.encoding + let pp = Block_hash.pp end module Event = struct - type update = - | Ignored_head - | Branch_switch - | Head_incrememt + type update = Ignored_head | Branch_switch | Head_incrememt + type t = | Processed_block of - { request : Request.view ; - request_status : Worker_types.request_status ; - update : update ; + { request : Request.view; + request_status : Worker_types.request_status; + update : update; fitness : Fitness.t } | Could_not_switch_testchain of error list let level = function - | Processed_block req -> - begin match req.update with - | Ignored_head -> Internal_event.Info - | Branch_switch | Head_incrememt -> Internal_event.Notice - end - | Could_not_switch_testchain _ -> Internal_event.Error + | Processed_block req -> ( + match req.update with + | Ignored_head -> + Internal_event.Info + | Branch_switch | Head_incrememt -> + Internal_event.Notice ) + | Could_not_switch_testchain _ -> + Internal_event.Error let encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Processed_block" (obj4 (req "request" Request.encoding) (req "status" Worker_types.request_status_encoding) - (req "outcome" - (string_enum [ "ignored", Ignored_head ; - "branch", Branch_switch ; - "increment", Head_incrememt ])) + (req + "outcome" + (string_enum + [ ("ignored", Ignored_head); + ("branch", Branch_switch); + ("increment", Head_incrememt) ])) (req "fitness" Fitness.encoding)) (function - | Processed_block { request ; request_status ; update ; fitness } -> + | Processed_block {request; request_status; update; fitness} -> Some (request, request_status, update, fitness) - | _ -> None) + | _ -> + None) (fun (request, request_status, update, fitness) -> - Processed_block { request ; request_status ; update ; fitness }) ; - case (Tag 1) + Processed_block {request; request_status; update; fitness}); + case + (Tag 1) ~title:"Could_not_switch_testchain" RPC_error.encoding - (function - | Could_not_switch_testchain err -> Some err - | _ -> None) + (function Could_not_switch_testchain err -> Some err | _ -> None) (fun err -> Could_not_switch_testchain err) ] let pp ppf = function | Processed_block req -> Format.fprintf ppf "@[<v 0>" ; - begin match req.update with - | Ignored_head -> - Format.fprintf ppf - "Current head is better than %a (fitness %a), we do not switch@," - | Branch_switch -> - Format.fprintf ppf - "Update current head to %a (fitness %a), changing branch@," - | Head_incrememt -> - Format.fprintf ppf - "Update current head to %a (fitness %a), same branch@," - end - Request.pp req.request - Fitness.pp req.fitness ; - Format.fprintf ppf + ( match req.update with + | Ignored_head -> + Format.fprintf + ppf + "Current head is better than %a (fitness %a), we do not switch@," + | Branch_switch -> + Format.fprintf + ppf + "Update current head to %a (fitness %a), changing branch@," + | Head_incrememt -> + Format.fprintf + ppf + "Update current head to %a (fitness %a), same branch@," ) + Request.pp + req.request + Fitness.pp + req.fitness ; + Format.fprintf + ppf "Pushed: %a, Treated: %a, Completed: %a@]" - Time.System.pp_hum req.request_status.pushed - Time.System.pp_hum req.request_status.treated - Time.System.pp_hum req.request_status.completed + Time.System.pp_hum + req.request_status.pushed + Time.System.pp_hum + req.request_status.treated + Time.System.pp_hum + req.request_status.completed | Could_not_switch_testchain err -> - Format.fprintf ppf "@[<v 0>Error while switching test chain:@ %a@]" - (Format.pp_print_list Error_monad.pp) err - + Format.fprintf + ppf + "@[<v 0>Error while switching test chain:@ %a@]" + (Format.pp_print_list Error_monad.pp) + err end module Worker_state = struct - type view = - { active_peers : P2p_peer.Id.t list ; - bootstrapped_peers : P2p_peer.Id.t list ; - bootstrapped : bool } + type view = { + active_peers : P2p_peer.Id.t list; + bootstrapped_peers : P2p_peer.Id.t list; + bootstrapped : bool + } + let encoding = let open Data_encoding in conv - (fun { bootstrapped ; bootstrapped_peers ; active_peers } -> - (bootstrapped, bootstrapped_peers, active_peers)) + (fun {bootstrapped; bootstrapped_peers; active_peers} -> + (bootstrapped, bootstrapped_peers, active_peers)) (fun (bootstrapped, bootstrapped_peers, active_peers) -> - { bootstrapped ; bootstrapped_peers ; active_peers }) + {bootstrapped; bootstrapped_peers; active_peers}) (obj3 (req "bootstrapped" bool) (req "bootstrapped_peers" (list P2p_peer.Id.encoding)) (req "active_peers" (list P2p_peer.Id.encoding))) - let pp ppf { bootstrapped ; bootstrapped_peers ; active_peers } = - Format.fprintf ppf + let pp ppf {bootstrapped; bootstrapped_peers; active_peers} = + Format.fprintf + ppf "@[<v 0>Network is%s bootstrapped.@,\ @[<v 2>Active peers:%a@]@,\ @[<v 2>Bootstrapped peers:%a@]@]" @@ -135,97 +152,70 @@ module Worker_state = struct end module Distributed_db_state = struct - - type table_scheduler = { - table_length : int ; - scheduler_length : int ; - } + type table_scheduler = {table_length : int; scheduler_length : int} type view = { - p2p_readers_length: int ; - active_chains_length: int ; - - operation_db : table_scheduler ; - operations_db : table_scheduler ; - block_header_db : table_scheduler ; - operations_hashed_db : table_scheduler ; - - active_connections_length: int ; - active_peers_length: int ; + p2p_readers_length : int; + active_chains_length : int; + operation_db : table_scheduler; + operations_db : table_scheduler; + block_header_db : table_scheduler; + operations_hashed_db : table_scheduler; + active_connections_length : int; + active_peers_length : int } - - let table_scheduler_encoding = let open Data_encoding in conv - (fun {table_length ; scheduler_length } -> - (table_length, scheduler_length)) + (fun {table_length; scheduler_length} -> + (table_length, scheduler_length)) (fun (table_length, scheduler_length) -> - {table_length ; scheduler_length }) - (obj2 - (req "table_length" int31) - (req "scheduler_length" int31) - ) + {table_length; scheduler_length}) + (obj2 (req "table_length" int31) (req "scheduler_length" int31)) let encoding = let open Data_encoding in conv - (fun { - p2p_readers_length ; - active_chains_length ; - - operation_db ; - operations_db ; - block_header_db ; - operations_hashed_db ; - - active_connections_length ; - active_peers_length ; - } -> - ( p2p_readers_length , - active_chains_length , - operation_db , - operations_db , - block_header_db, - operations_hashed_db, - active_connections_length , - active_peers_length - )) - (fun - ( p2p_readers_length , - active_chains_length , - operation_db , - operations_db , + (fun { p2p_readers_length; + active_chains_length; + operation_db; + operations_db; + block_header_db; + operations_hashed_db; + active_connections_length; + active_peers_length } -> + ( p2p_readers_length, + active_chains_length, + operation_db, + operations_db, block_header_db, operations_hashed_db, - active_connections_length , - active_peers_length - ) -> - { - p2p_readers_length ; - active_chains_length ; - - operation_db ; - operations_db ; - block_header_db ; - operations_hashed_db ; - - active_connections_length ; - active_peers_length ; - } - ) + active_connections_length, + active_peers_length )) + (fun ( p2p_readers_length, + active_chains_length, + operation_db, + operations_db, + block_header_db, + operations_hashed_db, + active_connections_length, + active_peers_length ) -> + { p2p_readers_length; + active_chains_length; + operation_db; + operations_db; + block_header_db; + operations_hashed_db; + active_connections_length; + active_peers_length }) (obj8 (req "p2p_readers" int31) (req "active_chains" int31) - (req "operation_db" table_scheduler_encoding) (req "operations_db" table_scheduler_encoding) (req "block_header_db" table_scheduler_encoding) (req "operations_hashed_db" table_scheduler_encoding) - (req "active_connections" int31) - (req "active_peers" int31) - ) - + (req "active_peers" int31)) end diff --git a/src/lib_shell_services/chain_validator_worker_state.mli b/src/lib_shell_services/chain_validator_worker_state.mli index 0b7e145172e8b79e1d4a7ca3f89ff7f3945fe750..b980c1b49637de9caffe088f75297b3cb6a9bd0c 100644 --- a/src/lib_shell_services/chain_validator_worker_state.mli +++ b/src/lib_shell_services/chain_validator_worker_state.mli @@ -25,52 +25,54 @@ module Request : sig type view = Block_hash.t + val encoding : view Data_encoding.encoding + val pp : Format.formatter -> view -> unit end module Event : sig - type update = - | Ignored_head - | Branch_switch - | Head_incrememt + type update = Ignored_head | Branch_switch | Head_incrememt + type t = | Processed_block of - { request : Request.view ; - request_status : Worker_types.request_status ; - update : update ; + { request : Request.view; + request_status : Worker_types.request_status; + update : update; fitness : Fitness.t } | Could_not_switch_testchain of error list + val level : t -> Internal_event.level + val encoding : t Data_encoding.encoding + val pp : Format.formatter -> t -> unit end module Worker_state : sig - type view = - { active_peers : P2p_peer.Id.t list ; - bootstrapped_peers : P2p_peer.Id.t list ; - bootstrapped : bool } + type view = { + active_peers : P2p_peer.Id.t list; + bootstrapped_peers : P2p_peer.Id.t list; + bootstrapped : bool + } + val encoding : view Data_encoding.encoding + val pp : Format.formatter -> view -> unit end module Distributed_db_state : sig - - type table_scheduler = { table_length : int; - scheduler_length : int;} + type table_scheduler = {table_length : int; scheduler_length : int} type view = { - p2p_readers_length: int ; - active_chains_length: int ; - - operation_db : table_scheduler ; - operations_db : table_scheduler ; - block_header_db : table_scheduler ; - operations_hashed_db : table_scheduler ; - - active_connections_length: int ; - active_peers_length: int ; + p2p_readers_length : int; + active_chains_length : int; + operation_db : table_scheduler; + operations_db : table_scheduler; + block_header_db : table_scheduler; + operations_hashed_db : table_scheduler; + active_connections_length : int; + active_peers_length : int } val encoding : view Data_encoding.encoding diff --git a/src/lib_shell_services/connection_metadata.ml b/src/lib_shell_services/connection_metadata.ml index 7d910fc4d9ba1f57bd8e97dbdfadf4813dffbf9e..131ddc4d2dc3e7699e30e16976cbea41cdcb0227 100644 --- a/src/lib_shell_services/connection_metadata.ml +++ b/src/lib_shell_services/connection_metadata.ml @@ -23,20 +23,13 @@ (* *) (*****************************************************************************) -type t = { - disable_mempool : bool ; - private_node : bool ; -} +type t = {disable_mempool : bool; private_node : bool} let encoding = let open Data_encoding in (conv - (fun { disable_mempool ; private_node } -> - disable_mempool , private_node) - (fun (disable_mempool , private_node) -> - { disable_mempool ; private_node })) - (obj2 - (req "disable_mempool" bool) - (req "private_node" bool)) + (fun {disable_mempool; private_node} -> (disable_mempool, private_node)) + (fun (disable_mempool, private_node) -> {disable_mempool; private_node})) + (obj2 (req "disable_mempool" bool) (req "private_node" bool)) let pp _ppf _ = () diff --git a/src/lib_shell_services/connection_metadata.mli b/src/lib_shell_services/connection_metadata.mli index 60ec26b499020636d500299e2837da79cdf37c56..62b39d1c57f9f25f227b509a5aab4ff65a14af62 100644 --- a/src/lib_shell_services/connection_metadata.mli +++ b/src/lib_shell_services/connection_metadata.mli @@ -25,10 +25,8 @@ (** All the metadata associated to a running connection. *) -type t = { - disable_mempool : bool ; - private_node : bool ; -} +type t = {disable_mempool : bool; private_node : bool} -val encoding: t Data_encoding.t -val pp: Format.formatter -> t -> unit +val encoding : t Data_encoding.t + +val pp : Format.formatter -> t -> unit diff --git a/src/lib_shell_services/history_mode.ml b/src/lib_shell_services/history_mode.ml index ec40e9a1911098ecd7a96c75ff92692858cf6610..a17bff431e127479121656e780bea86b808285a2 100644 --- a/src/lib_shell_services/history_mode.ml +++ b/src/lib_shell_services/history_mode.ml @@ -25,21 +25,23 @@ type t = Archive | Full | Rolling -let encoding = Data_encoding.string_enum - [ ("archive", Archive) ; - ("full", Full) ; - ("rolling", Rolling) ; - ] +let encoding = + Data_encoding.string_enum + [("archive", Archive); ("full", Full); ("rolling", Rolling)] -let equal hm1 hm2 = match (hm1, hm2) with - | (Archive, Archive) - | (Full, Full) - | (Rolling, Rolling) -> true - | (Archive, _) | (Full, _) | (Rolling, _) -> false +let equal hm1 hm2 = + match (hm1, hm2) with + | (Archive, Archive) | (Full, Full) | (Rolling, Rolling) -> + true + | (Archive, _) | (Full, _) | (Rolling, _) -> + false let pp ppf = function - | Archive -> Format.fprintf ppf "archive" - | Full -> Format.fprintf ppf "full" - | Rolling -> Format.fprintf ppf "rolling" + | Archive -> + Format.fprintf ppf "archive" + | Full -> + Format.fprintf ppf "full" + | Rolling -> + Format.fprintf ppf "rolling" let tag = Tag.def "history_mode" pp diff --git a/src/lib_shell_services/injection_services.ml b/src/lib_shell_services/injection_services.ml index a5bef93eebb57cbee7de6e175c9adf4a03bc47b5..4792d0664730b78450be71b51d5f1c100e3b50f8 100644 --- a/src/lib_shell_services/injection_services.ml +++ b/src/lib_shell_services/injection_services.ml @@ -24,18 +24,20 @@ (*****************************************************************************) module S = struct - open Data_encoding let path = RPC_path.(root / "injection") let block_query = let open RPC_query in - query (fun async force chain -> object - method async = async - method force = force - method chain = chain - end) + query (fun async force chain -> + object + method async = async + + method force = force + + method chain = chain + end) |+ flag "async" (fun t -> t#async) |+ flag "force" (fun t -> t#force) |+ opt_field "chain" Chain_services.chain_arg (fun t -> t#chain) @@ -44,29 +46,31 @@ module S = struct let block_param = obj2 (req "data" bytes) - (req "operations" + (req + "operations" (list (dynamic_size (list (dynamic_size Operation.encoding))))) let block = RPC_service.post_service ~description: "Inject a block in the node and broadcast it. The `operations` \ - embedded in `blockHeader` might be pre-validated using a \ - contextual RPCs from the latest block \ - (e.g. '/blocks/head/context/preapply'). Returns the ID of the \ - block. By default, the RPC will wait for the block to be \ - validated before answering." - ~query: block_query - ~input: block_param - ~output: Block_hash.encoding + embedded in `blockHeader` might be pre-validated using a contextual \ + RPCs from the latest block (e.g. '/blocks/head/context/preapply'). \ + Returns the ID of the block. By default, the RPC will wait for the \ + block to be validated before answering." + ~query:block_query + ~input:block_param + ~output:Block_hash.encoding RPC_path.(path / "block") let operation_query = let open RPC_query in - query (fun async chain -> object - method async = async - method chain = chain - end) + query (fun async chain -> + object + method async = async + + method chain = chain + end) |+ flag "async" (fun t -> t#async) |+ opt_field "chain" Chain_services.chain_arg (fun t -> t#chain) |> seal @@ -74,65 +78,74 @@ module S = struct let operation = RPC_service.post_service ~description: - "Inject an operation in node and broadcast it. Returns the \ - ID of the operation. The `signedOperationContents` should be \ - constructed using a contextual RPCs from the latest block \ - and signed by the client. By default, the RPC will wait for \ - the operation to be (pre-)validated before answering. See \ - RPCs under /blocks/prevalidation for more details on the \ - prevalidation context." - ~query: operation_query - ~input: bytes - ~output: Operation_hash.encoding + "Inject an operation in node and broadcast it. Returns the ID of the \ + operation. The `signedOperationContents` should be constructed using \ + a contextual RPCs from the latest block and signed by the client. By \ + default, the RPC will wait for the operation to be (pre-)validated \ + before answering. See RPCs under /blocks/prevalidation for more \ + details on the prevalidation context." + ~query:operation_query + ~input:bytes + ~output:Operation_hash.encoding RPC_path.(path / "operation") let protocol_query = let open RPC_query in - query (fun async force -> object - method async = async - method force = force - end) + query (fun async force -> + object + method async = async + + method force = force + end) |+ flag "async" (fun t -> t#async) |+ flag "force" (fun t -> t#force) |> seal - let protocol = RPC_service.post_service - ~description: - "Inject a protocol in node. Returns the ID of the protocol." - ~query: protocol_query - ~input: Protocol.encoding - ~output: Protocol_hash.encoding + ~description:"Inject a protocol in node. Returns the ID of the protocol." + ~query:protocol_query + ~input:Protocol.encoding + ~output:Protocol_hash.encoding RPC_path.(path / "protocol") - end open RPC_context -let block ctxt - ?(async = false) ?(force = false) ?chain - raw operations = - make_call S.block ctxt () +let block ctxt ?(async = false) ?(force = false) ?chain raw operations = + make_call + S.block + ctxt + () (object - method async = async - method force = force - method chain = chain + method async = async + + method force = force + + method chain = chain end) (raw, operations) let operation ctxt ?(async = false) ?chain operation = - make_call S.operation ctxt () + make_call + S.operation + ctxt + () (object - method async = async - method chain = chain + method async = async + + method chain = chain end) operation let protocol ctxt ?(async = false) ?(force = false) protocol = - make_call S.protocol ctxt () + make_call + S.protocol + ctxt + () (object - method async = async - method force = force + method async = async + + method force = force end) protocol diff --git a/src/lib_shell_services/injection_services.mli b/src/lib_shell_services/injection_services.mli index e81841c9003697381bdc81041d831f1c067990ec..e57e88d960025f75d96cbfb1be1153200c245a41 100644 --- a/src/lib_shell_services/injection_services.mli +++ b/src/lib_shell_services/injection_services.mli @@ -25,48 +25,59 @@ open RPC_context -val block: - #simple -> - ?async:bool -> ?force:bool -> ?chain:Chain_services.chain -> - MBytes.t -> Operation.t list list -> - Block_hash.t tzresult Lwt.t (** [block cctxt ?async ?force raw_block] tries to inject [raw_block] inside the node. If [?async] is [true], [raw_block] will be validated before the result is returned. If [?force] is true, the block will be injected even on non strictly increasing fitness. *) +val block : + #simple -> + ?async:bool -> + ?force:bool -> + ?chain:Chain_services.chain -> + MBytes.t -> + Operation.t list list -> + Block_hash.t tzresult Lwt.t -val operation: +val operation : #simple -> - ?async:bool -> ?chain:Chain_services.chain -> + ?async:bool -> + ?chain:Chain_services.chain -> MBytes.t -> Operation_hash.t tzresult Lwt.t -val protocol: +val protocol : #simple -> - ?async:bool -> ?force:bool -> + ?async:bool -> + ?force:bool -> Protocol.t -> Protocol_hash.t tzresult Lwt.t module S : sig + val block : + ( [`POST], + unit, + unit, + < async : bool ; force : bool ; chain : Chain_services.chain option >, + MBytes.t * Operation.t list list, + Block_hash.t ) + RPC_service.t - val block: - ([ `POST ], unit, - unit, < async: bool ; - force: bool ; - chain: Chain_services.chain option >, MBytes.t * Operation.t list list, - Block_hash.t) RPC_service.t - - val operation: - ([ `POST ], unit, - unit, < async : bool; - chain : Chain_services.chain option >, MBytes.t, - Operation_hash.t) RPC_service.t - - val protocol: - ([ `POST ], unit, - unit, < async : bool; - force : bool >, Protocol.t, - Protocol_hash.t) RPC_service.t + val operation : + ( [`POST], + unit, + unit, + < async : bool ; chain : Chain_services.chain option >, + MBytes.t, + Operation_hash.t ) + RPC_service.t + val protocol : + ( [`POST], + unit, + unit, + < async : bool ; force : bool >, + Protocol.t, + Protocol_hash.t ) + RPC_service.t end diff --git a/src/lib_shell_services/monitor_services.ml b/src/lib_shell_services/monitor_services.ml index a7e634a1f716879c48554aa01a16d0d55f208f53..87b24f01be0e74bde99a6b89853423cc2dd7dcdd 100644 --- a/src/lib_shell_services/monitor_services.ml +++ b/src/lib_shell_services/monitor_services.ml @@ -25,41 +25,44 @@ type chain_status = | Active_main of Chain_id.t - | Active_test of { chain : Chain_id.t ; - protocol : Protocol_hash.t ; - expiration_date : Time.Protocol.t } + | Active_test of + { chain : Chain_id.t; + protocol : Protocol_hash.t; + expiration_date : Time.Protocol.t } | Stopping of Chain_id.t let chain_status_encoding = let open Data_encoding in - union ~tag_size:`Uint8 - [ (case - (Tag 0) - ~title:"Main" - (obj1 (req "chain_id" Chain_id.encoding)) - (function Active_main chain_id -> Some chain_id | _ -> None) - (fun chain_id -> Active_main chain_id)) ; - (case - (Tag 1) - ~title:"Test" - (obj3 - (req "chain_id" Chain_id.encoding) - (req "test_protocol" Protocol_hash.encoding) - (req "expiration_date" Time.Protocol.encoding) - ) - (function | Active_test { chain ; protocol ; expiration_date } -> Some (chain, protocol, expiration_date) - | _ -> None) - (fun (chain, protocol, expiration_date) -> Active_test { chain ; protocol ; expiration_date })) ; - (case - (Tag 2) - ~title:"Stopping" - (obj1 (req "stopping" Chain_id.encoding)) - (function Stopping chain_id -> Some chain_id | _ -> None) - (fun chain_id -> Stopping chain_id)) - ] + union + ~tag_size:`Uint8 + [ case + (Tag 0) + ~title:"Main" + (obj1 (req "chain_id" Chain_id.encoding)) + (function Active_main chain_id -> Some chain_id | _ -> None) + (fun chain_id -> Active_main chain_id); + case + (Tag 1) + ~title:"Test" + (obj3 + (req "chain_id" Chain_id.encoding) + (req "test_protocol" Protocol_hash.encoding) + (req "expiration_date" Time.Protocol.encoding)) + (function + | Active_test {chain; protocol; expiration_date} -> + Some (chain, protocol, expiration_date) + | _ -> + None) + (fun (chain, protocol, expiration_date) -> + Active_test {chain; protocol; expiration_date}); + case + (Tag 2) + ~title:"Stopping" + (obj1 (req "stopping" Chain_id.encoding)) + (function Stopping chain_id -> Some chain_id | _ -> None) + (fun chain_id -> Stopping chain_id) ] module S = struct - open Data_encoding let path = RPC_path.(root / "monitor") @@ -67,115 +70,127 @@ module S = struct let bootstrapped = RPC_service.get_service ~description: - "Wait for the node to have synchronized its chain with a few \ - peers (configured by the node's administrator), streaming \ - head updates that happen during the bootstrapping process, \ - and closing the stream at the end. If the node was already \ - bootstrapped, returns the current head immediately." - ~query: RPC_query.empty - ~output: (obj2 - (req "block" Block_hash.encoding) - (req "timestamp" Time.Protocol.encoding)) + "Wait for the node to have synchronized its chain with a few peers \ + (configured by the node's administrator), streaming head updates \ + that happen during the bootstrapping process, and closing the stream \ + at the end. If the node was already bootstrapped, returns the \ + current head immediately." + ~query:RPC_query.empty + ~output: + (obj2 + (req "block" Block_hash.encoding) + (req "timestamp" Time.Protocol.encoding)) RPC_path.(path / "bootstrapped") let valid_blocks_query = let open RPC_query in - query (fun protocols next_protocols chains -> object - method protocols = protocols - method next_protocols = next_protocols - method chains = chains - end) - |+ multi_field "protocol" - Protocol_hash.rpc_arg (fun t -> t#protocols) - |+ multi_field "next_protocol" - Protocol_hash.rpc_arg (fun t -> t#next_protocols) - |+ multi_field "chain" - Chain_services.chain_arg (fun t -> t#chains) + query (fun protocols next_protocols chains -> + object + method protocols = protocols + + method next_protocols = next_protocols + + method chains = chains + end) + |+ multi_field "protocol" Protocol_hash.rpc_arg (fun t -> t#protocols) + |+ multi_field "next_protocol" Protocol_hash.rpc_arg (fun t -> + t#next_protocols) + |+ multi_field "chain" Chain_services.chain_arg (fun t -> t#chains) |> seal let valid_blocks = RPC_service.get_service - ~description:"Monitor all blocks that are successfully validated \ - by the node, disregarding whether they were \ - selected as the new head or not." - ~query: valid_blocks_query - ~output: (merge_objs - (obj2 - (req "chain_id" Chain_id.encoding) - (req "hash" Block_hash.encoding)) - Block_header.encoding) + ~description: + "Monitor all blocks that are successfully validated by the node, \ + disregarding whether they were selected as the new head or not." + ~query:valid_blocks_query + ~output: + (merge_objs + (obj2 + (req "chain_id" Chain_id.encoding) + (req "hash" Block_hash.encoding)) + Block_header.encoding) RPC_path.(path / "valid_blocks") let heads_query = let open RPC_query in - query (fun next_protocols -> object - method next_protocols = next_protocols - end) - |+ multi_field "next_protocol" - Protocol_hash.rpc_arg (fun t -> t#next_protocols) + query (fun next_protocols -> + object + method next_protocols = next_protocols + end) + |+ multi_field "next_protocol" Protocol_hash.rpc_arg (fun t -> + t#next_protocols) |> seal let heads = RPC_service.get_service - ~description:"Monitor all blocks that are successfully validated \ - by the node and selected as the new head of the \ - given chain." - ~query: heads_query - ~output: (merge_objs - (obj1 - (req "hash" Block_hash.encoding)) - Block_header.encoding) + ~description: + "Monitor all blocks that are successfully validated by the node and \ + selected as the new head of the given chain." + ~query:heads_query + ~output: + (merge_objs + (obj1 (req "hash" Block_hash.encoding)) + Block_header.encoding) RPC_path.(path / "heads" /: Chain_services.chain_arg) let protocols = RPC_service.get_service - ~description:"Monitor all economic protocols that are retrieved \ - and successfully loaded and compiled by the node." - ~query: RPC_query.empty - ~output: Protocol_hash.encoding + ~description: + "Monitor all economic protocols that are retrieved and successfully \ + loaded and compiled by the node." + ~query:RPC_query.empty + ~output:Protocol_hash.encoding RPC_path.(path / "protocols") let commit_hash = RPC_service.get_service ~description:"Get information on the build of the node." - ~query: RPC_query.empty - ~output: string + ~query:RPC_query.empty + ~output:string RPC_path.(path / "commit_hash") let active_chains = RPC_service.get_service - ~description:"Monitor every chain creation and \ - destruction. Currently active chains will be given \ - as first elements" - ~query: RPC_query.empty - ~output: (Data_encoding.list chain_status_encoding) + ~description: + "Monitor every chain creation and destruction. Currently active \ + chains will be given as first elements" + ~query:RPC_query.empty + ~output:(Data_encoding.list chain_status_encoding) RPC_path.(path / "active_chains") - end open RPC_context -let bootstrapped ctxt = - make_streamed_call S.bootstrapped ctxt () () () +let bootstrapped ctxt = make_streamed_call S.bootstrapped ctxt () () () + +let valid_blocks ctxt ?(chains = [`Main]) ?(protocols = []) + ?(next_protocols = []) () = + make_streamed_call + S.valid_blocks + ctxt + () + (object + method chains = chains + + method protocols = protocols -let valid_blocks - ctxt ?(chains = [`Main]) ?(protocols = []) ?(next_protocols = []) () = - make_streamed_call S.valid_blocks ctxt () (object - method chains = chains - method protocols = protocols - method next_protocols = next_protocols - end) () + method next_protocols = next_protocols + end) + () let heads ctxt ?(next_protocols = []) chain = - make_streamed_call S.heads ctxt ((), chain) (object - method next_protocols = next_protocols - end) () + make_streamed_call + S.heads + ctxt + ((), chain) + (object + method next_protocols = next_protocols + end) + () -let protocols ctxt = - make_streamed_call S.protocols ctxt () () () +let protocols ctxt = make_streamed_call S.protocols ctxt () () () -let commit_hash ctxt = - make_call S.commit_hash ctxt () () () +let commit_hash ctxt = make_call S.commit_hash ctxt () () () -let active_chains ctxt = - make_streamed_call S.active_chains ctxt () () () +let active_chains ctxt = make_streamed_call S.active_chains ctxt () () () diff --git a/src/lib_shell_services/monitor_services.mli b/src/lib_shell_services/monitor_services.mli index a16bd39daa5071334d393978e6ec093c169df76d..e4aa403a5883ee225c9419a5ac131be82feeeb52 100644 --- a/src/lib_shell_services/monitor_services.mli +++ b/src/lib_shell_services/monitor_services.mli @@ -27,68 +27,75 @@ open RPC_context type chain_status = | Active_main of Chain_id.t - | Active_test of { chain : Chain_id.t ; - protocol : Protocol_hash.t ; - expiration_date : Time.Protocol.t } + | Active_test of + { chain : Chain_id.t; + protocol : Protocol_hash.t; + expiration_date : Time.Protocol.t } | Stopping of Chain_id.t -val bootstrapped: - #streamed -> ((Block_hash.t * Time.Protocol.t) Lwt_stream.t * stopper) tzresult Lwt.t +val bootstrapped : + #streamed -> + ((Block_hash.t * Time.Protocol.t) Lwt_stream.t * stopper) tzresult Lwt.t -val valid_blocks: +val valid_blocks : #streamed -> ?chains:Chain_services.chain list -> ?protocols:Protocol_hash.t list -> ?next_protocols:Protocol_hash.t list -> - unit -> (((Chain_id.t * Block_hash.t) * Block_header.t) Lwt_stream.t * stopper) tzresult Lwt.t + unit -> + (((Chain_id.t * Block_hash.t) * Block_header.t) Lwt_stream.t * stopper) + tzresult + Lwt.t -val heads: +val heads : #streamed -> ?next_protocols:Protocol_hash.t list -> Chain_services.chain -> ((Block_hash.t * Block_header.t) Lwt_stream.t * stopper) tzresult Lwt.t -val protocols: - #streamed -> - (Protocol_hash.t Lwt_stream.t * stopper) tzresult Lwt.t +val protocols : + #streamed -> (Protocol_hash.t Lwt_stream.t * stopper) tzresult Lwt.t -val commit_hash: #simple -> string tzresult Lwt.t +val commit_hash : #simple -> string tzresult Lwt.t -val active_chains: - #streamed -> - (chain_status list Lwt_stream.t * stopper) tzresult Lwt.t +val active_chains : + #streamed -> (chain_status list Lwt_stream.t * stopper) tzresult Lwt.t module S : sig - - val bootstrapped: - ([ `GET ], unit, - unit, unit, unit, - Block_hash.t * Time.Protocol.t) RPC_service.t - - val valid_blocks: - ([ `GET ], unit, - unit, < chains : Chain_services.chain list; - next_protocols : Protocol_hash.t list; - protocols : Protocol_hash.t list >, unit, - (Chain_id.t * Block_hash.t) * Block_header.t) RPC_service.t - - val heads: - ([ `GET ], unit, - unit * Chain_services.chain, - < next_protocols : Protocol_hash.t list >, unit, - Block_hash.t * Block_header.t) RPC_service.t - - val protocols: - ([ `GET ], unit, - unit, unit, unit, - Protocol_hash.t) RPC_service.t - - val commit_hash: - ([ `GET ], unit, unit, unit, unit, string) RPC_service.t - - val active_chains: - ([ `GET ], unit, - unit, unit, unit, - chain_status list) RPC_service.t - + val bootstrapped : + ( [`GET], + unit, + unit, + unit, + unit, + Block_hash.t * Time.Protocol.t ) + RPC_service.t + + val valid_blocks : + ( [`GET], + unit, + unit, + < chains : Chain_services.chain list + ; next_protocols : Protocol_hash.t list + ; protocols : Protocol_hash.t list >, + unit, + (Chain_id.t * Block_hash.t) * Block_header.t ) + RPC_service.t + + val heads : + ( [`GET], + unit, + unit * Chain_services.chain, + < next_protocols : Protocol_hash.t list >, + unit, + Block_hash.t * Block_header.t ) + RPC_service.t + + val protocols : + ([`GET], unit, unit, unit, unit, Protocol_hash.t) RPC_service.t + + val commit_hash : ([`GET], unit, unit, unit, unit, string) RPC_service.t + + val active_chains : + ([`GET], unit, unit, unit, unit, chain_status list) RPC_service.t end diff --git a/src/lib_shell_services/p2p_services.ml b/src/lib_shell_services/p2p_services.ml index 05add47e00e78a2c029fd6781a673f763a0ced42..6592b6c36d18b955f0662206c39305082739e65a 100644 --- a/src/lib_shell_services/p2p_services.ml +++ b/src/lib_shell_services/p2p_services.ml @@ -25,328 +25,393 @@ let wait_query = let open RPC_query in - query (fun wait -> object - method wait = wait - end) + query (fun wait -> + object + method wait = wait + end) |+ flag "wait" (fun t -> t#wait) |> seal let monitor_query = let open RPC_query in - query (fun monitor -> object - method monitor = monitor - end) + query (fun monitor -> + object + method monitor = monitor + end) |+ flag "monitor" (fun t -> t#monitor) |> seal let timeout_query = let open RPC_query in - query (fun timeout -> object - method timeout = timeout - end) + query (fun timeout -> + object + method timeout = timeout + end) |+ field - "timeout" - Time.System.Span.rpc_arg - (Time.System.Span.of_seconds_exn 10.) - (fun t -> t#timeout) + "timeout" + Time.System.Span.rpc_arg + (Time.System.Span.of_seconds_exn 10.) + (fun t -> t#timeout) |> seal module S = struct - let self = RPC_service.get_service ~description:"Return the node's peer id" - ~query: RPC_query.empty - ~output: P2p_peer.Id.encoding + ~query:RPC_query.empty + ~output:P2p_peer.Id.encoding RPC_path.(root / "network" / "self") let version = RPC_service.get_service ~description:"Supported network layer version." - ~query: RPC_query.empty - ~output: Network_version.encoding + ~query:RPC_query.empty + ~output:Network_version.encoding RPC_path.(root / "network" / "version") (* DEPRECATED: use [version] instead. *) let versions = RPC_service.get_service ~description:"DEPRECATED: use `version` instead." - ~query: RPC_query.empty - ~output: (Data_encoding.list Network_version.encoding) + ~query:RPC_query.empty + ~output:(Data_encoding.list Network_version.encoding) RPC_path.(root / "network" / "versions") let stat = RPC_service.get_service ~description:"Global network bandwidth statistics in B/s." - ~query: RPC_query.empty - ~output: P2p_stat.encoding + ~query:RPC_query.empty + ~output:P2p_stat.encoding RPC_path.(root / "network" / "stat") let events = RPC_service.get_service ~description:"Stream of all network events" - ~query: RPC_query.empty - ~output: P2p_connection.Pool_event.encoding + ~query:RPC_query.empty + ~output:P2p_connection.Pool_event.encoding RPC_path.(root / "network" / "log") let connect = RPC_service.put_service ~description:"Connect to a peer" - ~query: timeout_query - ~input: Data_encoding.empty - ~output: Data_encoding.empty + ~query:timeout_query + ~input:Data_encoding.empty + ~output:Data_encoding.empty RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg) - end open RPC_context + let self ctxt = make_call S.self ctxt () () () + let stat ctxt = make_call S.stat ctxt () () () + let version ctxt = make_call S.version ctxt () () () -let versions ctxt = make_call S.versions ctxt () () () (* DEPRECATED: use [version] instead. *) + +let versions ctxt = make_call S.versions ctxt () () () + +(* DEPRECATED: use [version] instead. *) + let events ctxt = make_streamed_call S.events ctxt () () () + let connect ctxt ~timeout peer_id = - make_call1 S.connect ctxt peer_id (object method timeout = timeout end) () + make_call1 + S.connect + ctxt + peer_id + (object + method timeout = timeout + end) + () module Connections = struct - type connection_info = Connection_metadata.t P2p_connection.Info.t let connection_info_encoding = P2p_connection.Info.encoding Connection_metadata.encoding module S = struct - let list = RPC_service.get_service ~description:"List the running P2P connection." - ~query: RPC_query.empty - ~output: (Data_encoding.list connection_info_encoding) + ~query:RPC_query.empty + ~output:(Data_encoding.list connection_info_encoding) RPC_path.(root / "network" / "connections") let info = RPC_service.get_service - ~query: RPC_query.empty - ~output: connection_info_encoding - ~description:"Details about the current P2P connection to the given peer." + ~query:RPC_query.empty + ~output:connection_info_encoding + ~description: + "Details about the current P2P connection to the given peer." RPC_path.(root / "network" / "connections" /: P2p_peer.Id.rpc_arg) let kick = RPC_service.delete_service - ~query: wait_query - ~output: Data_encoding.empty - ~description:"Forced close of the current P2P connection to the given peer." + ~query:wait_query + ~output:Data_encoding.empty + ~description: + "Forced close of the current P2P connection to the given peer." RPC_path.(root / "network" / "connections" /: P2p_peer.Id.rpc_arg) - end let list ctxt = make_call S.list ctxt () () () + let info ctxt peer_id = make_call1 S.info ctxt peer_id () () - let kick ctxt ?(wait = false) peer_id = - make_call1 S.kick ctxt peer_id (object method wait = wait end) () + let kick ctxt ?(wait = false) peer_id = + make_call1 + S.kick + ctxt + peer_id + (object + method wait = wait + end) + () end module Points = struct - module S = struct - let info = RPC_service.get_service - ~query: RPC_query.empty - ~output: P2p_point.Info.encoding - ~description: "Details about a given `IP:addr`." + ~query:RPC_query.empty + ~output:P2p_point.Info.encoding + ~description:"Details about a given `IP:addr`." RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg) let events = RPC_service.get_service - ~query: monitor_query - ~output: (Data_encoding.list - P2p_point.Pool_event.encoding) - ~description: "Monitor network events related to an `IP:addr`." + ~query:monitor_query + ~output:(Data_encoding.list P2p_point.Pool_event.encoding) + ~description:"Monitor network events related to an `IP:addr`." RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg / "log") let list = let filter_query = let open RPC_query in - query (fun filters -> object - method filters = filters - end) + query (fun filters -> + object + method filters = filters + end) |+ multi_field "filter" P2p_point.Filter.rpc_arg (fun t -> t#filters) - |> seal in + |> seal + in RPC_service.get_service - ~query: filter_query + ~query:filter_query ~output: - Data_encoding.(list (tup2 - P2p_point.Id.encoding - P2p_point.Info.encoding)) - ~description:"List the pool of known `IP:port` \ - used for establishing P2P connections." + Data_encoding.( + list (tup2 P2p_point.Id.encoding P2p_point.Info.encoding)) + ~description: + "List the pool of known `IP:port` used for establishing P2P \ + connections." RPC_path.(root / "network" / "points") let ban = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty - ~description:"Blacklist the given address and remove it from the \ - whitelist if present." - RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg / "ban" ) + ~query:RPC_query.empty + ~output:Data_encoding.empty + ~description: + "Blacklist the given address and remove it from the whitelist if \ + present." + RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg / "ban") let unban = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty + ~query:RPC_query.empty + ~output:Data_encoding.empty ~description:"Remove an address from the blacklist." - RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg / "unban" ) + RPC_path.( + root / "network" / "points" /: P2p_point.Id.rpc_arg / "unban") let trust = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty - ~description:"Trust a given address permanently and remove it \ - from the blacklist if present. Connections from \ - this address can still be closed on \ - authentication if the peer is greylisted." - RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg / "trust" ) + ~query:RPC_query.empty + ~output:Data_encoding.empty + ~description: + "Trust a given address permanently and remove it from the blacklist \ + if present. Connections from this address can still be closed on \ + authentication if the peer is greylisted." + RPC_path.( + root / "network" / "points" /: P2p_point.Id.rpc_arg / "trust") let untrust = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty + ~query:RPC_query.empty + ~output:Data_encoding.empty ~description:"Remove an address from the whitelist." - RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg / "untrust" ) + RPC_path.( + root / "network" / "points" /: P2p_point.Id.rpc_arg / "untrust") let banned = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.bool - ~description:"Check is a given address is blacklisted or \ - greylisted." - RPC_path.(root / "network" / "points" /: P2p_point.Id.rpc_arg / "banned" ) - + ~query:RPC_query.empty + ~output:Data_encoding.bool + ~description:"Check is a given address is blacklisted or greylisted." + RPC_path.( + root / "network" / "points" /: P2p_point.Id.rpc_arg / "banned") end open RPC_context + let info ctxt peer_id = make_call1 S.info ctxt peer_id () () + let events ctxt point = - make_streamed_call S.events ctxt ((), point) - (object method monitor = true end) () - let list ?(filter = []) ctxt = make_call S.list ctxt () - (object method filters = filter end) () + make_streamed_call + S.events + ctxt + ((), point) + (object + method monitor = true + end) + () + + let list ?(filter = []) ctxt = + make_call + S.list + ctxt + () + (object + method filters = filter + end) + () + let ban ctxt peer_id = make_call1 S.ban ctxt peer_id () () + let unban ctxt peer_id = make_call1 S.unban ctxt peer_id () () + let trust ctxt peer_id = make_call1 S.trust ctxt peer_id () () + let untrust ctxt peer_id = make_call1 S.untrust ctxt peer_id () () - let banned ctxt peer_id = make_call1 S.banned ctxt peer_id () () + let banned ctxt peer_id = make_call1 S.banned ctxt peer_id () () end module Peers = struct - module S = struct - let info = RPC_service.get_service - ~query: RPC_query.empty - ~output: (P2p_peer.Info.encoding Peer_metadata.encoding - Connection_metadata.encoding) + ~query:RPC_query.empty + ~output: + (P2p_peer.Info.encoding + Peer_metadata.encoding + Connection_metadata.encoding) ~description:"Details about a given peer." RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg) let events = RPC_service.get_service - ~query: monitor_query - ~output: (Data_encoding.list - P2p_peer.Pool_event.encoding) + ~query:monitor_query + ~output:(Data_encoding.list P2p_peer.Pool_event.encoding) ~description:"Monitor network events related to a given peer." RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "log") let list = let filter = let open RPC_query in - query (fun filters -> object - method filters = filters - end) + query (fun filters -> + object + method filters = filters + end) |+ multi_field "filter" P2p_peer.Filter.rpc_arg (fun t -> t#filters) - |> seal in + |> seal + in RPC_service.get_service - ~query: filter + ~query:filter ~output: - Data_encoding.(list (tup2 - P2p_peer.Id.encoding - (P2p_peer.Info.encoding Peer_metadata.encoding - Connection_metadata.encoding))) + Data_encoding.( + list + (tup2 + P2p_peer.Id.encoding + (P2p_peer.Info.encoding + Peer_metadata.encoding + Connection_metadata.encoding))) ~description:"List the peers the node ever met." RPC_path.(root / "network" / "peers") let ban = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty - ~description:"Blacklist the given peer and remove it from the \ - whitelist if present." - RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "ban" ) + ~query:RPC_query.empty + ~output:Data_encoding.empty + ~description: + "Blacklist the given peer and remove it from the whitelist if \ + present." + RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "ban") let unban = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty + ~query:RPC_query.empty + ~output:Data_encoding.empty ~description:"Remove the given peer from the blacklist." - RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "unban" ) + RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "unban") let trust = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty - ~description:"Whitelist a given peer permanently and remove it \ - from the blacklist if present. The peer cannot \ - be blocked (but its host IP still can)." - RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "trust" ) + ~query:RPC_query.empty + ~output:Data_encoding.empty + ~description: + "Whitelist a given peer permanently and remove it from the \ + blacklist if present. The peer cannot be blocked (but its host IP \ + still can)." + RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "trust") let untrust = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty + ~query:RPC_query.empty + ~output:Data_encoding.empty ~description:"Remove a given peer from the whitelist." - RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "untrust" ) + RPC_path.( + root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "untrust") let banned = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.bool - ~description:"Check if a given peer is blacklisted or \ - greylisted." - RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "banned" ) - + ~query:RPC_query.empty + ~output:Data_encoding.bool + ~description:"Check if a given peer is blacklisted or greylisted." + RPC_path.(root / "network" / "peers" /: P2p_peer.Id.rpc_arg / "banned") end let info ctxt peer_id = make_call1 S.info ctxt peer_id () () + let events ctxt point = - make_streamed_call S.events ctxt ((), point) - (object method monitor = true end) () + make_streamed_call + S.events + ctxt + ((), point) + (object + method monitor = true + end) + () + let list ?(filter = []) ctxt = - make_call S.list ctxt () (object method filters = filter end) () + make_call + S.list + ctxt + () + (object + method filters = filter + end) + () + let ban ctxt point_id = make_call1 S.ban ctxt point_id () () + let unban ctxt point_id = make_call1 S.unban ctxt point_id () () + let trust ctxt point_id = make_call1 S.trust ctxt point_id () () + let untrust ctxt point_id = make_call1 S.untrust ctxt point_id () () - let banned ctxt point_id = make_call1 S.banned ctxt point_id () () + let banned ctxt point_id = make_call1 S.banned ctxt point_id () () end module ACL = struct - module S = struct - let clear = RPC_service.get_service - ~query: RPC_query.empty - ~output: Data_encoding.empty + ~query:RPC_query.empty + ~output:Data_encoding.empty ~description:"Clear all greylists tables." - RPC_path.(root / "network" / "greylist" / "clear" ) - + RPC_path.(root / "network" / "greylist" / "clear") end let clear ctxt = make_call S.clear ctxt () () - end diff --git a/src/lib_shell_services/p2p_services.mli b/src/lib_shell_services/p2p_services.mli index 1bbb71579e20f77f503a5ed409b56566e7ba8188..9997546186b401b2691c09669d2a87e6c7d4ba9d 100644 --- a/src/lib_shell_services/p2p_services.mli +++ b/src/lib_shell_services/p2p_services.mli @@ -25,240 +25,230 @@ open RPC_context -val self: #simple -> P2p_peer.Id.t tzresult Lwt.t +val self : #simple -> P2p_peer.Id.t tzresult Lwt.t -val stat: #simple -> P2p_stat.t tzresult Lwt.t +val stat : #simple -> P2p_stat.t tzresult Lwt.t -val version: #simple -> Network_version.t tzresult Lwt.t +val version : #simple -> Network_version.t tzresult Lwt.t (* DEPRECATED: use [version] instead. *) -val versions: #simple -> Network_version.t list tzresult Lwt.t +val versions : #simple -> Network_version.t list tzresult Lwt.t -val events: #streamed -> +val events : + #streamed -> (P2p_connection.Pool_event.t Lwt_stream.t * stopper) tzresult Lwt.t -val connect: #simple -> timeout:Ptime.Span.t -> P2p_point.Id.t -> unit tzresult Lwt.t +val connect : + #simple -> timeout:Ptime.Span.t -> P2p_point.Id.t -> unit tzresult Lwt.t module S : sig + val self : ([`GET], unit, unit, unit, unit, P2p_peer.Id.t) RPC_service.t - val self : - ([ `GET ], unit, - unit, unit, unit, - P2p_peer.Id.t) RPC_service.t - - val stat : - ([ `GET ], unit, - unit, unit, unit, - P2p_stat.t) RPC_service.t + val stat : ([`GET], unit, unit, unit, unit, P2p_stat.t) RPC_service.t val version : - ([ `GET ], unit, - unit, unit, unit, - Network_version.t) RPC_service.t + ([`GET], unit, unit, unit, unit, Network_version.t) RPC_service.t (* DEPRECATED: use [version] instead. *) val versions : - ([ `GET ], unit, - unit, unit, unit, - Network_version.t list) RPC_service.t + ([`GET], unit, unit, unit, unit, Network_version.t list) RPC_service.t val events : - ([ `GET ], unit, - unit, unit, unit, - P2p_connection.Pool_event.t) RPC_service.t + ([`GET], unit, unit, unit, unit, P2p_connection.Pool_event.t) RPC_service.t val connect : - ([ `PUT ], unit, - unit * P2p_point.Id.t, < timeout: Ptime.Span.t >, unit, - unit) RPC_service.t - + ( [`PUT], + unit, + unit * P2p_point.Id.t, + < timeout : Ptime.Span.t >, + unit, + unit ) + RPC_service.t end module Connections : sig - open RPC_context type connection_info = Connection_metadata.t P2p_connection.Info.t - val list: #simple -> connection_info list tzresult Lwt.t + val list : #simple -> connection_info list tzresult Lwt.t - val info: #simple -> P2p_peer.Id.t -> connection_info tzresult Lwt.t + val info : #simple -> P2p_peer.Id.t -> connection_info tzresult Lwt.t - val kick: #simple -> ?wait:bool -> P2p_peer.Id.t -> unit tzresult Lwt.t + val kick : #simple -> ?wait:bool -> P2p_peer.Id.t -> unit tzresult Lwt.t module S : sig - val list : - ([ `GET ], unit, - unit, unit, unit, - connection_info list) RPC_service.t + ([`GET], unit, unit, unit, unit, connection_info list) RPC_service.t val info : - ([ `GET ], unit, - unit * P2p_peer.Id.t, unit, unit, - connection_info) RPC_service.t + ( [`GET], + unit, + unit * P2p_peer.Id.t, + unit, + unit, + connection_info ) + RPC_service.t val kick : - ([ `DELETE ], unit, - unit * P2p_peer.Id.t, < wait: bool >, unit, - unit) RPC_service.t - + ( [`DELETE], + unit, + unit * P2p_peer.Id.t, + < wait : bool >, + unit, + unit ) + RPC_service.t end - end - module Points : sig + val list : + ?filter:P2p_point.Filter.t list -> + #simple -> + (P2p_point.Id.t * P2p_point.Info.t) list tzresult Lwt.t - val list: - ?filter:(P2p_point.Filter.t list) -> - #simple -> (P2p_point.Id.t * P2p_point.Info.t) list tzresult Lwt.t - - val info: #simple -> P2p_point.Id.t -> P2p_point.Info.t tzresult Lwt.t + val info : #simple -> P2p_point.Id.t -> P2p_point.Info.t tzresult Lwt.t - val events: + val events : #streamed -> P2p_point.Id.t -> (P2p_point.Pool_event.t list Lwt_stream.t * stopper) tzresult Lwt.t - val ban: #simple -> P2p_point.Id.t -> unit tzresult Lwt.t + val ban : #simple -> P2p_point.Id.t -> unit tzresult Lwt.t - val unban: #simple -> P2p_point.Id.t -> unit tzresult Lwt.t + val unban : #simple -> P2p_point.Id.t -> unit tzresult Lwt.t - val trust: #simple -> P2p_point.Id.t -> unit tzresult Lwt.t + val trust : #simple -> P2p_point.Id.t -> unit tzresult Lwt.t - val untrust: #simple -> P2p_point.Id.t -> unit tzresult Lwt.t + val untrust : #simple -> P2p_point.Id.t -> unit tzresult Lwt.t - val banned: #simple -> P2p_point.Id.t -> bool tzresult Lwt.t + val banned : #simple -> P2p_point.Id.t -> bool tzresult Lwt.t module S : sig - val list : - ([ `GET ], unit, - unit, < filters: P2p_point.Filter.t list >, unit, - (P2p_point.Id.t * P2p_point.Info.t) list) RPC_service.t + ( [`GET], + unit, + unit, + < filters : P2p_point.Filter.t list >, + unit, + (P2p_point.Id.t * P2p_point.Info.t) list ) + RPC_service.t val info : - ([ `GET ], unit, - unit * P2p_point.Id.t, unit, unit, - P2p_point.Info.t) RPC_service.t + ( [`GET], + unit, + unit * P2p_point.Id.t, + unit, + unit, + P2p_point.Info.t ) + RPC_service.t val events : - ([ `GET ], unit, - unit * P2p_point.Id.t, < monitor: bool>, unit, - P2p_point.Pool_event.t list) RPC_service.t + ( [`GET], + unit, + unit * P2p_point.Id.t, + < monitor : bool >, + unit, + P2p_point.Pool_event.t list ) + RPC_service.t val ban : - ([ `GET ], unit, - unit * P2p_point.Id.t, unit, unit, - unit) RPC_service.t + ([`GET], unit, unit * P2p_point.Id.t, unit, unit, unit) RPC_service.t val unban : - ([ `GET ], unit, - unit * P2p_point.Id.t, unit, unit, - unit) RPC_service.t + ([`GET], unit, unit * P2p_point.Id.t, unit, unit, unit) RPC_service.t val trust : - ([ `GET ], unit, - unit * P2p_point.Id.t, unit, unit, - unit) RPC_service.t + ([`GET], unit, unit * P2p_point.Id.t, unit, unit, unit) RPC_service.t val untrust : - ([ `GET ], unit, - unit * P2p_point.Id.t, unit, unit, - unit) RPC_service.t + ([`GET], unit, unit * P2p_point.Id.t, unit, unit, unit) RPC_service.t val banned : - ([ `GET ], unit, - unit * P2p_point.Id.t, unit, unit, - bool) RPC_service.t - + ([`GET], unit, unit * P2p_point.Id.t, unit, unit, bool) RPC_service.t end - end module Peers : sig - - val list: - ?filter:(P2p_peer.Filter.t list) -> + val list : + ?filter:P2p_peer.Filter.t list -> #simple -> - (P2p_peer.Id.t * (Peer_metadata.t, Connection_metadata.t) P2p_peer.Info.t) list tzresult Lwt.t + (P2p_peer.Id.t * (Peer_metadata.t, Connection_metadata.t) P2p_peer.Info.t) + list + tzresult + Lwt.t - val info: - #simple -> P2p_peer.Id.t -> + val info : + #simple -> + P2p_peer.Id.t -> (Peer_metadata.t, Connection_metadata.t) P2p_peer.Info.t tzresult Lwt.t - val events: - #streamed -> P2p_peer.Id.t -> + val events : + #streamed -> + P2p_peer.Id.t -> (P2p_peer.Pool_event.t list Lwt_stream.t * stopper) tzresult Lwt.t - val ban: #simple -> P2p_peer.Id.t -> unit tzresult Lwt.t + val ban : #simple -> P2p_peer.Id.t -> unit tzresult Lwt.t - val unban: #simple -> P2p_peer.Id.t -> unit tzresult Lwt.t + val unban : #simple -> P2p_peer.Id.t -> unit tzresult Lwt.t - val trust: #simple -> P2p_peer.Id.t -> unit tzresult Lwt.t + val trust : #simple -> P2p_peer.Id.t -> unit tzresult Lwt.t - val untrust: #simple -> P2p_peer.Id.t -> unit tzresult Lwt.t + val untrust : #simple -> P2p_peer.Id.t -> unit tzresult Lwt.t - val banned: #simple -> P2p_peer.Id.t -> bool tzresult Lwt.t + val banned : #simple -> P2p_peer.Id.t -> bool tzresult Lwt.t module S : sig - val list : - ([ `GET ], unit, - unit, < filters: P2p_peer.Filter.t list >, unit, - (P2p_peer.Id.t * (Peer_metadata.t, Connection_metadata.t) P2p_peer.Info.t) list) RPC_service.t + ( [`GET], + unit, + unit, + < filters : P2p_peer.Filter.t list >, + unit, + ( P2p_peer.Id.t + * (Peer_metadata.t, Connection_metadata.t) P2p_peer.Info.t ) + list ) + RPC_service.t val info : - ([ `GET ], unit, - unit * P2p_peer.Id.t, unit, unit, - (Peer_metadata.t, Connection_metadata.t) P2p_peer.Info.t) RPC_service.t + ( [`GET], + unit, + unit * P2p_peer.Id.t, + unit, + unit, + (Peer_metadata.t, Connection_metadata.t) P2p_peer.Info.t ) + RPC_service.t val events : - ([ `GET ], unit, - unit * P2p_peer.Id.t, < monitor: bool>, unit, - P2p_peer.Pool_event.t list) RPC_service.t + ( [`GET], + unit, + unit * P2p_peer.Id.t, + < monitor : bool >, + unit, + P2p_peer.Pool_event.t list ) + RPC_service.t val ban : - ([ `GET ], unit, - unit * P2p_peer.Id.t, unit, unit, - unit) RPC_service.t + ([`GET], unit, unit * P2p_peer.Id.t, unit, unit, unit) RPC_service.t val unban : - ([ `GET ], unit, - unit * P2p_peer.Id.t, unit, unit, - unit) RPC_service.t + ([`GET], unit, unit * P2p_peer.Id.t, unit, unit, unit) RPC_service.t val trust : - ([ `GET ], unit, - unit * P2p_peer.Id.t, unit, unit, - unit) RPC_service.t + ([`GET], unit, unit * P2p_peer.Id.t, unit, unit, unit) RPC_service.t val untrust : - ([ `GET ], unit, - unit * P2p_peer.Id.t, unit, unit, - unit) RPC_service.t + ([`GET], unit, unit * P2p_peer.Id.t, unit, unit, unit) RPC_service.t val banned : - ([ `GET ], unit, - unit * P2p_peer.Id.t, unit, unit, - bool) RPC_service.t - + ([`GET], unit, unit * P2p_peer.Id.t, unit, unit, bool) RPC_service.t end - end module ACL : sig - - val clear: #simple -> unit -> unit tzresult Lwt.t + val clear : #simple -> unit -> unit tzresult Lwt.t module S : sig - - val clear : - ([ `GET ], unit, - unit, unit, unit, - unit) RPC_service.t - + val clear : ([`GET], unit, unit, unit, unit, unit) RPC_service.t end - end diff --git a/src/lib_shell_services/peer_metadata.ml b/src/lib_shell_services/peer_metadata.ml index e6b524304601e0c5b73ec6f25cfa65bd8c810ed2..cc6f326276b958b8af3b5eb2a9197bce95f4ba7b 100644 --- a/src/lib_shell_services/peer_metadata.ml +++ b/src/lib_shell_services/peer_metadata.ml @@ -24,47 +24,62 @@ (*****************************************************************************) type counter = Z.t + let counter = Data_encoding.z -let ((+) : counter -> counter -> counter) = Z.add + +let (( + ) : counter -> counter -> counter) = Z.add let zero : counter = Z.zero + let one : counter = Z.one (* Distributed DB peer metadata *) -type messages = - { - mutable branch: counter ; - mutable head: counter ; - mutable block_header: counter ; - mutable operations: counter ; - mutable protocols: counter ; - mutable operation_hashes_for_block: counter ; - mutable operations_for_block: counter ; - mutable other: counter ; - } +type messages = { + mutable branch : counter; + mutable head : counter; + mutable block_header : counter; + mutable operations : counter; + mutable protocols : counter; + mutable operation_hashes_for_block : counter; + mutable operations_for_block : counter; + mutable other : counter +} let sent_requests_encoding = let open Data_encoding in (conv - (fun { branch ; head ; block_header ; - operations ; protocols ; - operation_hashes_for_block ; - operations_for_block ; - other ; } -> (branch, head, block_header, - operations, protocols, - operation_hashes_for_block, - operations_for_block, - other )) - (fun (branch, head, block_header, - operations, protocols, - operation_hashes_for_block, - operations_for_block, - other) -> { branch ; head ; block_header ; - operations ; protocols ; - operation_hashes_for_block ; - operations_for_block ; - other }) - ) + (fun { branch; + head; + block_header; + operations; + protocols; + operation_hashes_for_block; + operations_for_block; + other } -> + ( branch, + head, + block_header, + operations, + protocols, + operation_hashes_for_block, + operations_for_block, + other )) + (fun ( branch, + head, + block_header, + operations, + protocols, + operation_hashes_for_block, + operations_for_block, + other ) -> + { branch; + head; + block_header; + operations; + protocols; + operation_hashes_for_block; + operations_for_block; + other })) (obj8 (req "branch" counter) (req "head" counter) @@ -73,103 +88,110 @@ let sent_requests_encoding = (req "protocols" counter) (req "operation_hashes_for_block" counter) (req "operations_for_block" counter) - (req "other" counter) - ) + (req "other" counter)) type requests_kind = - | Branch | Head | Block_header | Operations - | Protocols | Operation_hashes_for_block - | Operations_for_block | Other - + | Branch + | Head + | Block_header + | Operations + | Protocols + | Operation_hashes_for_block + | Operations_for_block + | Other type requests = { - sent : messages ; - (** p2p sent messages of type requests *) - received : messages ; - (** p2p received messages of type requests *) - failed : messages ; - (** p2p messages of type requests that we failed to send *) - scheduled : messages ; - (** p2p messages ent via request scheduler *) + sent : messages; (** p2p sent messages of type requests *) + received : messages; (** p2p received messages of type requests *) + failed : messages; + (** p2p messages of type requests that we failed to send *) + scheduled : messages (** p2p messages ent via request scheduler *) } let requests_encoding = let open Data_encoding in (conv - (fun - { sent ; received ; - failed ; scheduled } -> (sent, received, - failed, scheduled)) - (fun (sent, received, - failed, scheduled) -> { sent ; received ; - failed ; scheduled }) - ) + (fun {sent; received; failed; scheduled} -> + (sent, received, failed, scheduled)) + (fun (sent, received, failed, scheduled) -> + {sent; received; failed; scheduled})) (obj4 (req "sent" sent_requests_encoding) (req "received" sent_requests_encoding) (req "failed" sent_requests_encoding) - (req "scheduled" sent_requests_encoding) - ) - + (req "scheduled" sent_requests_encoding)) (* Prevalidator peer metadata *) -type prevalidator_results = - { cannot_download : counter ; cannot_parse : counter ; - refused_by_prefilter : counter ; - refused_by_postfilter : counter ; - (* prevalidation results *) - applied : counter ; branch_delayed : counter ; - branch_refused : counter ; - refused : counter ; duplicate : counter ; outdated : counter } +type prevalidator_results = { + cannot_download : counter; + cannot_parse : counter; + refused_by_prefilter : counter; + refused_by_postfilter : counter; + (* prevalidation results *) + applied : counter; + branch_delayed : counter; + branch_refused : counter; + refused : counter; + duplicate : counter; + outdated : counter +} let prevalidator_results_encoding = let open Data_encoding in - (conv - (fun { cannot_download ; - cannot_parse ; - refused_by_prefilter ; - refused_by_postfilter ; - applied ; branch_delayed; - branch_refused ; - refused ; duplicate ; - outdated } -> (cannot_download, cannot_parse, - refused_by_prefilter, - refused_by_postfilter, - applied, branch_delayed, - branch_refused, - refused, duplicate, outdated)) - (fun (cannot_download, + conv + (fun { cannot_download; + cannot_parse; + refused_by_prefilter; + refused_by_postfilter; + applied; + branch_delayed; + branch_refused; + refused; + duplicate; + outdated } -> + ( cannot_download, + cannot_parse, + refused_by_prefilter, + refused_by_postfilter, + applied, + branch_delayed, + branch_refused, + refused, + duplicate, + outdated )) + (fun ( cannot_download, cannot_parse, refused_by_prefilter, refused_by_postfilter, - applied, branch_delayed, + applied, + branch_delayed, branch_refused, - refused, duplicate, - outdated) -> { cannot_download ; cannot_parse ; - refused_by_prefilter ; - refused_by_postfilter ; - applied ; branch_delayed; - branch_refused ; - refused ; duplicate ; outdated } - - ) - (obj10 - (req "cannot_download" counter) - (req "cannot_parse" counter) - (req "refused_by_prefilter" counter) - (req "refused_by_postfilter" counter) - (req "applied" counter) - (req "branch_delayed" counter) - (req "branch_refused" counter) - (req "refused" counter) - (req "duplicate" counter) - (req "outdated" counter) - ) - ) - - -type resource_kind = - | Block | Operations | Protocol + refused, + duplicate, + outdated ) -> + { cannot_download; + cannot_parse; + refused_by_prefilter; + refused_by_postfilter; + applied; + branch_delayed; + branch_refused; + refused; + duplicate; + outdated }) + (obj10 + (req "cannot_download" counter) + (req "cannot_parse" counter) + (req "refused_by_prefilter" counter) + (req "refused_by_postfilter" counter) + (req "applied" counter) + (req "branch_delayed" counter) + (req "branch_refused" counter) + (req "refused" counter) + (req "duplicate" counter) + (req "outdated" counter)) + +type resource_kind = Block | Operations | Protocol type advertisement = Head | Branch @@ -190,213 +212,183 @@ type metadata = | Received_advertisement of advertisement | Outdated_response (* TODO : unused *) (* Peer validator *) - | Valid_blocks | Old_heads + | Valid_blocks + | Old_heads (* Prevalidation *) - | Cannot_download | Cannot_parse + | Cannot_download + | Cannot_parse | Refused_by_prefilter | Refused_by_postfilter - | Applied | Branch_delayed + | Applied + | Branch_delayed | Branch_refused - | Refused | Duplicate | Outdated - - - + | Refused + | Duplicate + | Outdated type responses = { - mutable sent : messages ; - (** p2p sent messages of type responses *) - mutable failed : messages ; - (** p2p sent messages of type responses *) - mutable received : messages ; - (** p2p received responses *) - mutable unexpected : counter ; - (** p2p received responses that were unexpected *) - mutable outdated : counter ; - (** p2p received responses that are now outdated *) + mutable sent : messages; (** p2p sent messages of type responses *) + mutable failed : messages; (** p2p sent messages of type responses *) + mutable received : messages; (** p2p received responses *) + mutable unexpected : counter; + (** p2p received responses that were unexpected *) + mutable outdated : counter + (** p2p received responses that are now outdated *) } - let responses_encoding = let open Data_encoding in (conv - (fun - { sent ; failed ; received ; - unexpected ; outdated ; } -> (sent, failed, received, - unexpected, outdated)) - (fun - (sent, failed, received, - unexpected, outdated) -> { sent ; failed ; received ; - unexpected ; outdated }) - ) + (fun {sent; failed; received; unexpected; outdated} -> + (sent, failed, received, unexpected, outdated)) + (fun (sent, failed, received, unexpected, outdated) -> + {sent; failed; received; unexpected; outdated})) (obj5 (req "sent" sent_requests_encoding) (req "failed" sent_requests_encoding) (req "received" sent_requests_encoding) (req "unexpected" counter) - (req "outdated" counter) - ) - + (req "outdated" counter)) type unadvertised = { - mutable block : counter ; - (** requests for unadvertised block *) - mutable operations : counter ; - (** requests for unadvertised operations *) - mutable protocol : counter ; - (** requests for unadvertised protocol *) + mutable block : counter; (** requests for unadvertised block *) + mutable operations : counter; (** requests for unadvertised operations *) + mutable protocol : counter (** requests for unadvertised protocol *) } let unadvertised_encoding = let open Data_encoding in (conv - (fun - { block ; operations ; protocol ; } -> (block, operations, protocol)) - (fun - (block, operations, protocol) -> { block ; operations ; protocol ; }) - ) + (fun {block; operations; protocol} -> (block, operations, protocol)) + (fun (block, operations, protocol) -> {block; operations; protocol})) (obj3 (req "block" counter) (req "operations" counter) - (req "protocol" counter) - ) - + (req "protocol" counter)) -type advertisements_kind = { - mutable head : counter ; - mutable branch : counter ; -} +type advertisements_kind = {mutable head : counter; mutable branch : counter} let advertisements_kind_encoding = let open Data_encoding in (conv - (fun - { head ; branch ; } -> (head, branch)) - (fun - (head, branch) -> { head ; branch ; }) - ) - (obj2 - (req "head" counter) - (req "branch" counter) - ) + (fun {head; branch} -> (head, branch)) + (fun (head, branch) -> {head; branch})) + (obj2 (req "head" counter) (req "branch" counter)) type advertisements = { - mutable sent: advertisements_kind ; - mutable received: advertisements_kind ; + mutable sent : advertisements_kind; + mutable received : advertisements_kind } - let advertisements_encoding = let open Data_encoding in (conv - (fun - { sent ; received ; } -> (sent, received)) - (fun - (sent, received) -> { sent ; received ; }) - ) + (fun {sent; received} -> (sent, received)) + (fun (sent, received) -> {sent; received})) (obj2 (req "sent" advertisements_kind_encoding) - (req "received" advertisements_kind_encoding) - ) + (req "received" advertisements_kind_encoding)) type t = { - mutable responses : responses ; - (** responses sent/received *) - mutable requests : requests ; - (** requests sent/received *) - mutable valid_blocks : counter ; - (** new valid blocks advertized by a peer *) - mutable old_heads : counter ; - (** previously validated blocks from a peer *) - mutable prevalidator_results : prevalidator_results ; - (** prevalidator metadata *) - mutable unactivated_chains : counter ; - (** requests from unactivated chains *) - mutable inactive_chains : counter ; - (** advertise inactive chains *) - mutable future_blocks_advertised : counter ; - (** future blocks *) - mutable unadvertised : unadvertised ; - (** requests for unadvertised resources *) - mutable advertisements : advertisements ; - (** advertisements sent *) + mutable responses : responses; (** responses sent/received *) + mutable requests : requests; (** requests sent/received *) + mutable valid_blocks : counter; (** new valid blocks advertized by a peer *) + mutable old_heads : counter; (** previously validated blocks from a peer *) + mutable prevalidator_results : prevalidator_results; + (** prevalidator metadata *) + mutable unactivated_chains : counter; + (** requests from unactivated chains *) + mutable inactive_chains : counter; (** advertise inactive chains *) + mutable future_blocks_advertised : counter; (** future blocks *) + mutable unadvertised : unadvertised; + (** requests for unadvertised resources *) + mutable advertisements : advertisements (** advertisements sent *) } let empty () = let empty_request () = - { branch = zero ; head = zero ; block_header = zero ; - operations = zero ; protocols = zero ; - operation_hashes_for_block = zero ; - operations_for_block = zero ; - other = zero ; - } in - { - responses = { sent = empty_request () ; - failed = empty_request () ; - received = empty_request () ; - unexpected = zero ; - outdated = zero ; - } ; + { branch = zero; + head = zero; + block_header = zero; + operations = zero; + protocols = zero; + operation_hashes_for_block = zero; + operations_for_block = zero; + other = zero } + in + { responses = + { sent = empty_request (); + failed = empty_request (); + received = empty_request (); + unexpected = zero; + outdated = zero }; requests = - { sent = empty_request () ; - failed = empty_request () ; - scheduled = empty_request () ; - received = empty_request () ; - } ; - valid_blocks = zero ; - old_heads = zero ; + { sent = empty_request (); + failed = empty_request (); + scheduled = empty_request (); + received = empty_request () }; + valid_blocks = zero; + old_heads = zero; prevalidator_results = - { cannot_download = zero ; cannot_parse = zero ; - refused_by_prefilter = zero ; refused_by_postfilter = zero ; - applied = zero ; branch_delayed = zero ; branch_refused = zero ; - refused = zero ; duplicate = zero ; outdated = zero - } ; - unactivated_chains = zero ; - inactive_chains = zero ; - future_blocks_advertised = zero ; - unadvertised = {block = zero ; operations = zero ; protocol = zero } ; - advertisements = { sent = { head = zero ; branch = zero ; } ; - received = { head = zero ; branch = zero ; } } - } - + { cannot_download = zero; + cannot_parse = zero; + refused_by_prefilter = zero; + refused_by_postfilter = zero; + applied = zero; + branch_delayed = zero; + branch_refused = zero; + refused = zero; + duplicate = zero; + outdated = zero }; + unactivated_chains = zero; + inactive_chains = zero; + future_blocks_advertised = zero; + unadvertised = {block = zero; operations = zero; protocol = zero}; + advertisements = + { sent = {head = zero; branch = zero}; + received = {head = zero; branch = zero} } } let encoding = let open Data_encoding in (conv - (fun { responses ; requests ; - valid_blocks ; old_heads ; - prevalidator_results ; - unactivated_chains ; - inactive_chains ; - future_blocks_advertised ; - unadvertised ; + (fun { responses; + requests; + valid_blocks; + old_heads; + prevalidator_results; + unactivated_chains; + inactive_chains; + future_blocks_advertised; + unadvertised; advertisements } -> - ((responses, requests, - valid_blocks, old_heads, - prevalidator_results, - unactivated_chains, - inactive_chains, - future_blocks_advertised), - (unadvertised, - advertisements)) - ) - (fun ((responses, requests, - valid_blocks, old_heads, - prevalidator_results, - unactivated_chains, - inactive_chains, - future_blocks_advertised), - (unadvertised, - advertisements)) -> - { responses ; requests ; - valid_blocks ; old_heads ; - prevalidator_results ; - unactivated_chains ; - inactive_chains ; - future_blocks_advertised ; - unadvertised ; - advertisements ; } - ) - ) + ( ( responses, + requests, + valid_blocks, + old_heads, + prevalidator_results, + unactivated_chains, + inactive_chains, + future_blocks_advertised ), + (unadvertised, advertisements) )) + (fun ( ( responses, + requests, + valid_blocks, + old_heads, + prevalidator_results, + unactivated_chains, + inactive_chains, + future_blocks_advertised ), + (unadvertised, advertisements) ) -> + { responses; + requests; + valid_blocks; + old_heads; + prevalidator_results; + unactivated_chains; + inactive_chains; + future_blocks_advertised; + unadvertised; + advertisements })) (merge_objs (obj8 (req "responses" responses_encoding) @@ -406,22 +398,23 @@ let encoding = (req "prevalidator_results" prevalidator_results_encoding) (req "unactivated_chains" counter) (req "inactive_chains" counter) - (req "future_blocks_advertised" counter) - - ) + (req "future_blocks_advertised" counter)) (obj2 (req "unadvertised" unadvertised_encoding) - (req "advertisements" advertisements_encoding) - ) - ) + (req "advertisements" advertisements_encoding))) let incr_requests (msgs : messages) (req : requests_kind) = match req with - | Branch -> msgs.branch <- msgs.branch + one - | Head -> msgs.head <- msgs.head + one - | Block_header -> msgs.block_header <- msgs.block_header + one - | Operations -> msgs.operations <- msgs.operations + one - | Protocols -> msgs.protocols <- msgs.protocols + one + | Branch -> + msgs.branch <- msgs.branch + one + | Head -> + msgs.head <- msgs.head + one + | Block_header -> + msgs.block_header <- msgs.block_header + one + | Operations -> + msgs.operations <- msgs.operations + one + | Protocols -> + msgs.protocols <- msgs.protocols + one | Operation_hashes_for_block -> msgs.operation_hashes_for_block <- msgs.operation_hashes_for_block + one | Operations_for_block -> @@ -429,15 +422,15 @@ let incr_requests (msgs : messages) (req : requests_kind) = | Other -> msgs.other <- msgs.other + one +let incr_unadvertised {unadvertised = u; _} = function + | Block -> + u.block <- u.block + one + | Operations -> + u.operations <- u.operations + one + | Protocol -> + u.protocol <- u.protocol + one - -let incr_unadvertised { unadvertised = u ; _ } = function - | Block -> u.block <- u.block + one - | Operations -> u.operations <- u.operations + one - | Protocol -> u.protocol <- u.protocol + one - - -let incr ({responses = rsps ; requests = rqst ; _ } as m) metadata = +let incr ({responses = rsps; requests = rqst; _} as m) metadata = match metadata with (* requests *) | Received_request req -> @@ -458,20 +451,19 @@ let incr ({responses = rsps ; requests = rqst ; _ } as m) metadata = | Outdated_response -> rsps.outdated <- rsps.outdated + one (* Advertisements *) - | Sent_advertisement ad -> - begin match ad with - | Head -> - m.advertisements.sent.head <- m.advertisements.sent.head + one - | Branch -> - m.advertisements.sent.branch <- m.advertisements.sent.branch + one - end - | Received_advertisement ad -> - begin match ad with - | Head -> - m.advertisements.received.head <- m.advertisements.received.head + one - | Branch -> - m.advertisements.received.branch <- m.advertisements.received.branch + one - end + | Sent_advertisement ad -> ( + match ad with + | Head -> + m.advertisements.sent.head <- m.advertisements.sent.head + one + | Branch -> + m.advertisements.sent.branch <- m.advertisements.sent.branch + one ) + | Received_advertisement ad -> ( + match ad with + | Head -> + m.advertisements.received.head <- m.advertisements.received.head + one + | Branch -> + m.advertisements.received.branch <- + m.advertisements.received.branch + one ) (* Unexpected erroneous msg *) | Unactivated_chain -> m.unactivated_chains <- m.unactivated_chains + one @@ -479,7 +471,8 @@ let incr ({responses = rsps ; requests = rqst ; _ } as m) metadata = m.inactive_chains <- m.inactive_chains + one | Future_block -> m.future_blocks_advertised <- m.future_blocks_advertised + one - | Unadvertised u -> incr_unadvertised m u + | Unadvertised u -> + incr_unadvertised m u (* Peer validator *) | Valid_blocks -> m.valid_blocks <- m.valid_blocks + one @@ -490,14 +483,17 @@ let incr ({responses = rsps ; requests = rqst ; _ } as m) metadata = m.prevalidator_results <- { m.prevalidator_results with cannot_download = m.prevalidator_results.cannot_download + one } - | Cannot_parse -> m.prevalidator_results <- + | Cannot_parse -> + m.prevalidator_results <- { m.prevalidator_results with cannot_parse = m.prevalidator_results.cannot_parse + one } - | Refused_by_prefilter -> m.prevalidator_results <- + | Refused_by_prefilter -> + m.prevalidator_results <- { m.prevalidator_results with refused_by_prefilter = m.prevalidator_results.refused_by_prefilter + one } - | Refused_by_postfilter -> m.prevalidator_results <- + | Refused_by_postfilter -> + m.prevalidator_results <- { m.prevalidator_results with refused_by_postfilter = m.prevalidator_results.refused_by_postfilter + one } @@ -526,16 +522,18 @@ let incr ({responses = rsps ; requests = rqst ; _ } as m) metadata = { m.prevalidator_results with outdated = m.prevalidator_results.outdated + one } - (* shortcuts to update sent/failed requests/responses *) -let update_requests { requests = { sent ; failed ; _ } ; _ } kind = function - | true -> incr_requests sent kind - | false -> incr_requests failed kind - -let update_responses { responses = { sent ; failed ; _ } ; _ } kind = function - | true -> incr_requests sent kind - | false -> incr_requests failed kind - +let update_requests {requests = {sent; failed; _}; _} kind = function + | true -> + incr_requests sent kind + | false -> + incr_requests failed kind + +let update_responses {responses = {sent; failed; _}; _} kind = function + | true -> + incr_requests sent kind + | false -> + incr_requests failed kind (* Scores computation *) (* TODO: @@ -550,7 +548,7 @@ let distributed_db_score _ = (* TODO *) 1.0 -let prevalidation_score { prevalidator_results = _ ; _ } = +let prevalidation_score {prevalidator_results = _; _} = (* TODO *) 1.0 diff --git a/src/lib_shell_services/peer_metadata.mli b/src/lib_shell_services/peer_metadata.mli index 36bc1f585fa95e96db2fb28ce3cb82b5722bb797..19f77047e75c9260b2a4f88e1c6ea517bf4eaa10 100644 --- a/src/lib_shell_services/peer_metadata.mli +++ b/src/lib_shell_services/peer_metadata.mli @@ -27,25 +27,29 @@ type t -val encoding: t Data_encoding.t -val empty : unit -> t - +val encoding : t Data_encoding.t +val empty : unit -> t (** the aggregate score function computed from the metadata collected for a peer *) val distributed_db_score : t -> float + val prevalidation_score : t -> float + val score : t -> float type requests_kind = - | Branch | Head | Block_header - | Operations | Protocols - | Operation_hashes_for_block | Operations_for_block + | Branch + | Head + | Block_header + | Operations + | Protocols + | Operation_hashes_for_block + | Operations_for_block | Other -type resource_kind = - | Block | Operations | Protocol +type resource_kind = Block | Operations | Protocol type advertisement = Head | Branch @@ -66,18 +70,23 @@ type metadata = | Received_advertisement of advertisement | Outdated_response (* TODO : unused *) (* Peer validator *) - | Valid_blocks | Old_heads + | Valid_blocks + | Old_heads (* Prevalidation *) - | Cannot_download | Cannot_parse + | Cannot_download + | Cannot_parse | Refused_by_prefilter | Refused_by_postfilter - | Applied | Branch_delayed + | Applied + | Branch_delayed | Branch_refused - | Refused | Duplicate | Outdated + | Refused + | Duplicate + | Outdated (** incr score counters . Used to compute the final score for a peer *) -val incr : t -> metadata -> unit -val update_requests : t -> requests_kind -> bool -> unit -val update_responses : t -> requests_kind -> bool -> unit +val incr : t -> metadata -> unit +val update_requests : t -> requests_kind -> bool -> unit +val update_responses : t -> requests_kind -> bool -> unit diff --git a/src/lib_shell_services/peer_validator_worker_state.ml b/src/lib_shell_services/peer_validator_worker_state.ml index 1ad21cfc0d4db7479848c8d5b20b8882e85941de..022cabc6b92b5e6fd85a4c64c735c857e83bb2e9 100644 --- a/src/lib_shell_services/peer_validator_worker_state.ml +++ b/src/lib_shell_services/peer_validator_worker_state.ml @@ -24,20 +24,22 @@ (*****************************************************************************) module Request = struct - type view = - | New_head of Block_hash.t - | New_branch of Block_hash.t * int + type view = New_head of Block_hash.t | New_branch of Block_hash.t * int let encoding = let open Data_encoding in union - [ case (Tag 0) ~title:"New_head" + [ case + (Tag 0) + ~title:"New_head" (obj2 (req "request" (constant "new_head")) (req "block" Block_hash.encoding)) (function New_head h -> Some ((), h) | _ -> None) - (fun ((), h) -> New_head h) ; - case (Tag 1) ~title:"New_branch" + (fun ((), h) -> New_head h); + case + (Tag 1) + ~title:"New_branch" (obj3 (req "request" (constant "new_branch")) (req "block" Block_hash.encoding) @@ -49,100 +51,131 @@ module Request = struct | New_head hash -> Format.fprintf ppf "New head %a" Block_hash.pp hash | New_branch (hash, len) -> - Format.fprintf ppf "New branch %a, locator length %d" - Block_hash.pp hash len + Format.fprintf + ppf + "New branch %a, locator length %d" + Block_hash.pp + hash + len end module Event = struct type t = - | Request of (Request.view * Worker_types.request_status * error list option) + | Request of + (Request.view * Worker_types.request_status * error list option) | Debug of string let level req = match req with - | Debug _ -> Internal_event.Debug - | Request _ -> Internal_event.Info + | Debug _ -> + Internal_event.Debug + | Request _ -> + Internal_event.Info let encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Debug" (obj1 (req "message" string)) (function Debug msg -> Some msg | _ -> None) - (fun msg -> Debug msg) ; - case (Tag 1) + (fun msg -> Debug msg); + case + (Tag 1) ~title:"Request" (obj2 (req "request" Request.encoding) (req "status" Worker_types.request_status_encoding)) (function Request (req, t, None) -> Some (req, t) | _ -> None) - (fun (req, t) -> Request (req, t, None)) ; - case (Tag 2) + (fun (req, t) -> Request (req, t, None)); + case + (Tag 2) ~title:"Failed request" (obj3 (req "error" RPC_error.encoding) (req "failed_request" Request.encoding) (req "status" Worker_types.request_status_encoding)) - (function Request (req, t, Some errs) -> Some (errs, req, t) | _ -> None) + (function + | Request (req, t, Some errs) -> Some (errs, req, t) | _ -> None) (fun (errs, req, t) -> Request (req, t, Some errs)) ] let pp ppf = function - | Debug msg -> Format.fprintf ppf "%s" msg - | Request (view, { pushed ; treated ; completed }, None) -> - Format.fprintf ppf - "@[<v 0>%a@,\ - Pushed: %a, Treated: %a, Completed: %a@]" - Request.pp view - Time.System.pp_hum pushed Time.System.pp_hum treated Time.System.pp_hum completed - | Request (view, { pushed ; treated ; completed }, Some errors) -> - Format.fprintf ppf - "@[<v 0>%a@,\ - Pushed: %a, Treated: %a, Failed: %a@,\ - %a@]" - Request.pp view - Time.System.pp_hum pushed Time.System.pp_hum treated Time.System.pp_hum completed - (Format.pp_print_list Error_monad.pp) errors + | Debug msg -> + Format.fprintf ppf "%s" msg + | Request (view, {pushed; treated; completed}, None) -> + Format.fprintf + ppf + "@[<v 0>%a@,Pushed: %a, Treated: %a, Completed: %a@]" + Request.pp + view + Time.System.pp_hum + pushed + Time.System.pp_hum + treated + Time.System.pp_hum + completed + | Request (view, {pushed; treated; completed}, Some errors) -> + Format.fprintf + ppf + "@[<v 0>%a@,Pushed: %a, Treated: %a, Failed: %a@,%a@]" + Request.pp + view + Time.System.pp_hum + pushed + Time.System.pp_hum + treated + Time.System.pp_hum + completed + (Format.pp_print_list Error_monad.pp) + errors end module Worker_state = struct - type pipeline_length = { fetched_header_length : int ; - fetched_block_length : int ; } + type pipeline_length = { + fetched_header_length : int; + fetched_block_length : int + } let pipeline_length_encoding = let open Data_encoding in conv (function - { fetched_header_length ; - fetched_block_length } -> - (fetched_header_length, - fetched_block_length)) - (function (fetched_header_length, - fetched_block_length) -> - {fetched_header_length ; - fetched_block_length }) - (obj2 - (req "fetched_headers" int31) - (req "fetched_blocks" int31) - ) + | {fetched_header_length; fetched_block_length} -> + (fetched_header_length, fetched_block_length)) + (function + | (fetched_header_length, fetched_block_length) -> + {fetched_header_length; fetched_block_length}) + (obj2 (req "fetched_headers" int31) (req "fetched_blocks" int31)) + type view = { + bootstrapped : bool; + pipeline_length : pipeline_length; + mutable last_validated_head : Block_hash.t; + mutable last_advertised_head : Block_hash.t + } - type view = - { bootstrapped : bool ; - pipeline_length : pipeline_length ; - mutable last_validated_head: Block_hash.t ; - mutable last_advertised_head: Block_hash.t } let encoding = let open Data_encoding in conv - (function { bootstrapped ; pipeline_length ; - last_validated_head ; last_advertised_head } -> - (bootstrapped, pipeline_length, - last_validated_head, last_advertised_head)) - (function (bootstrapped, pipeline_length, - last_validated_head, last_advertised_head) -> - { bootstrapped ; pipeline_length ; - last_validated_head ; last_advertised_head }) + (function + | { bootstrapped; + pipeline_length; + last_validated_head; + last_advertised_head } -> + ( bootstrapped, + pipeline_length, + last_validated_head, + last_advertised_head )) + (function + | ( bootstrapped, + pipeline_length, + last_validated_head, + last_advertised_head ) -> + { bootstrapped; + pipeline_length; + last_validated_head; + last_advertised_head }) (obj4 (req "bootstrapped" bool) (req "pipelines" pipeline_length_encoding) @@ -150,7 +183,8 @@ module Worker_state = struct (req "last_advertised_head" Block_hash.encoding)) let pp ppf state = - Format.fprintf ppf + Format.fprintf + ppf "@[<v 0>Bootstrapped: %s@,\ Pipeline_length: %d - %d @,\ Last validated head: %a@,\ @@ -158,7 +192,8 @@ module Worker_state = struct (if state.bootstrapped then "yes" else "no") state.pipeline_length.fetched_header_length state.pipeline_length.fetched_block_length - Block_hash.pp state.last_validated_head - Block_hash.pp state.last_advertised_head - + Block_hash.pp + state.last_validated_head + Block_hash.pp + state.last_advertised_head end diff --git a/src/lib_shell_services/peer_validator_worker_state.mli b/src/lib_shell_services/peer_validator_worker_state.mli index 5c0a4065ca9cb79fbf5fce21d7ae48b49232edb8..0a8fd93bf8b14c181b7ec46493bf9f92cc3f9302 100644 --- a/src/lib_shell_services/peer_validator_worker_state.mli +++ b/src/lib_shell_services/peer_validator_worker_state.mli @@ -24,32 +24,42 @@ (*****************************************************************************) module Request : sig - type view = - | New_head of Block_hash.t - | New_branch of Block_hash.t * int + type view = New_head of Block_hash.t | New_branch of Block_hash.t * int + val encoding : view Data_encoding.encoding + val pp : Format.formatter -> view -> unit end module Event : sig type t = - | Request of (Request.view * Worker_types.request_status * error list option) + | Request of + (Request.view * Worker_types.request_status * error list option) | Debug of string + val level : t -> Internal_event.level + val encoding : t Data_encoding.encoding + val pp : Format.formatter -> t -> unit end module Worker_state : sig - type pipeline_length = { fetched_header_length : int ; - fetched_block_length : int ; } + type pipeline_length = { + fetched_header_length : int; + fetched_block_length : int + } val pipeline_length_encoding : pipeline_length Data_encoding.encoding - type view = - { bootstrapped : bool ; - pipeline_length : pipeline_length ; - mutable last_validated_head: Block_hash.t ; - mutable last_advertised_head: Block_hash.t } + + type view = { + bootstrapped : bool; + pipeline_length : pipeline_length; + mutable last_validated_head : Block_hash.t; + mutable last_advertised_head : Block_hash.t + } + val encoding : view Data_encoding.encoding + val pp : Format.formatter -> view -> unit end diff --git a/src/lib_shell_services/prevalidator_worker_state.ml b/src/lib_shell_services/prevalidator_worker_state.ml index 890c3522ff58a3c0b034abb200ec98f3983eb315..afdf8c7127ab1d69a252f264cdca1208aea8b11c 100644 --- a/src/lib_shell_services/prevalidator_worker_state.ml +++ b/src/lib_shell_services/prevalidator_worker_state.ml @@ -30,6 +30,7 @@ module Request = struct | Inject : Operation.t -> unit t | Arrived : Operation_hash.t * Operation.t -> unit t | Advertise : unit t + type view = View : _ t -> view let view req = View req @@ -37,143 +38,181 @@ module Request = struct let encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Flush" (obj2 (req "request" (constant "flush")) (req "block" Block_hash.encoding)) (function View (Flush hash) -> Some ((), hash) | _ -> None) - (fun ((), hash) -> View (Flush hash)) ; - case (Tag 1) + (fun ((), hash) -> View (Flush hash)); + case + (Tag 1) ~title:"Notify" (obj3 (req "request" (constant "notify")) (req "peer" P2p_peer.Id.encoding) (req "mempool" Mempool.encoding)) - (function View (Notify (peer, mempool)) -> Some ((), peer, mempool) | _ -> None) - (fun ((), peer, mempool) -> View (Notify (peer, mempool))) ; - case (Tag 2) + (function + | View (Notify (peer, mempool)) -> + Some ((), peer, mempool) + | _ -> + None) + (fun ((), peer, mempool) -> View (Notify (peer, mempool))); + case + (Tag 2) ~title:"Inject" (obj2 (req "request" (constant "inject")) (req "operation" Operation.encoding)) (function View (Inject op) -> Some ((), op) | _ -> None) - (fun ((), op) -> View (Inject op)) ; - case (Tag 3) + (fun ((), op) -> View (Inject op)); + case + (Tag 3) ~title:"Arrived" (obj3 (req "request" (constant "arrived")) (req "operation_hash" Operation_hash.encoding) (req "operation" Operation.encoding)) - (function View (Arrived (oph, op)) -> Some ((), oph, op) | _ -> None) - (fun ((), oph, op) -> View (Arrived (oph, op))) ; - case (Tag 4) + (function + | View (Arrived (oph, op)) -> Some ((), oph, op) | _ -> None) + (fun ((), oph, op) -> View (Arrived (oph, op))); + case + (Tag 4) ~title:"Advertise" (obj1 (req "request" (constant "advertise"))) (function View Advertise -> Some () | _ -> None) (fun () -> View Advertise) ] - let pp ppf (View r) = match r with + let pp ppf (View r) = + match r with | Flush hash -> - Format.fprintf ppf "switching to new head %a" - Block_hash.pp hash - | Notify (id, { Mempool.known_valid ; pending }) -> - Format.fprintf ppf "@[<v 2>notified by %a of operations" - P2p_peer.Id.pp id ; + Format.fprintf ppf "switching to new head %a" Block_hash.pp hash + | Notify (id, {Mempool.known_valid; pending}) -> + Format.fprintf + ppf + "@[<v 2>notified by %a of operations" + P2p_peer.Id.pp + id ; List.iter (fun oph -> - Format.fprintf ppf "@,%a (applied)" - Operation_hash.pp oph) + Format.fprintf ppf "@,%a (applied)" Operation_hash.pp oph) known_valid ; List.iter (fun oph -> - Format.fprintf ppf "@,%a (pending)" - Operation_hash.pp oph) + Format.fprintf ppf "@,%a (pending)" Operation_hash.pp oph) (Operation_hash.Set.elements pending) ; Format.fprintf ppf "@]" | Inject op -> - Format.fprintf ppf "injecting operation %a" - Operation_hash.pp (Operation.hash op) + Format.fprintf + ppf + "injecting operation %a" + Operation_hash.pp + (Operation.hash op) | Arrived (oph, _) -> - Format.fprintf ppf "operation %a arrived" - Operation_hash.pp oph + Format.fprintf ppf "operation %a arrived" Operation_hash.pp oph | Advertise -> Format.fprintf ppf "advertising pending operations" end module Event = struct type t = - | Request of (Request.view * Worker_types.request_status * error list option) + | Request of + (Request.view * Worker_types.request_status * error list option) | Debug of string let level req = let open Request in match req with - | Debug _ -> Internal_event.Debug - | Request (View (Flush _), _, _) -> Internal_event.Notice - | Request (View (Notify _), _, _) -> Internal_event.Debug - | Request (View (Inject _), _, _) -> Internal_event.Notice - | Request (View (Arrived _), _, _) -> Internal_event.Debug - | Request (View Advertise, _, _) -> Internal_event.Debug + | Debug _ -> + Internal_event.Debug + | Request (View (Flush _), _, _) -> + Internal_event.Notice + | Request (View (Notify _), _, _) -> + Internal_event.Debug + | Request (View (Inject _), _, _) -> + Internal_event.Notice + | Request (View (Arrived _), _, _) -> + Internal_event.Debug + | Request (View Advertise, _, _) -> + Internal_event.Debug let encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Debug" (obj1 (req "message" string)) (function Debug msg -> Some msg | _ -> None) - (fun msg -> Debug msg) ; - case (Tag 1) + (fun msg -> Debug msg); + case + (Tag 1) ~title:"Request" (obj2 (req "request" Request.encoding) (req "status" Worker_types.request_status_encoding)) (function Request (req, t, None) -> Some (req, t) | _ -> None) - (fun (req, t) -> Request (req, t, None)) ; - case (Tag 2) + (fun (req, t) -> Request (req, t, None)); + case + (Tag 2) ~title:"Failed request" (obj3 (req "error" RPC_error.encoding) (req "failed_request" Request.encoding) (req "status" Worker_types.request_status_encoding)) - (function Request (req, t, Some errs) -> Some (errs, req, t) | _ -> None) + (function + | Request (req, t, Some errs) -> Some (errs, req, t) | _ -> None) (fun (errs, req, t) -> Request (req, t, Some errs)) ] let pp ppf = function - | Debug msg -> Format.fprintf ppf "%s" msg - | Request (view, { pushed ; treated ; completed }, None) -> - Format.fprintf ppf - "@[<v 0>%a@,\ - Pushed: %a, Treated: %a, Completed: %a@]" - Request.pp view - Time.System.pp_hum pushed Time.System.pp_hum treated Time.System.pp_hum completed - | Request (view, { pushed ; treated ; completed }, Some errors) -> - Format.fprintf ppf - "@[<v 0>%a@,\ - Pushed: %a, Treated: %a, Failed: %a@,\ - %a@]" - Request.pp view - Time.System.pp_hum pushed Time.System.pp_hum treated Time.System.pp_hum completed - (Format.pp_print_list Error_monad.pp) errors + | Debug msg -> + Format.fprintf ppf "%s" msg + | Request (view, {pushed; treated; completed}, None) -> + Format.fprintf + ppf + "@[<v 0>%a@,Pushed: %a, Treated: %a, Completed: %a@]" + Request.pp + view + Time.System.pp_hum + pushed + Time.System.pp_hum + treated + Time.System.pp_hum + completed + | Request (view, {pushed; treated; completed}, Some errors) -> + Format.fprintf + ppf + "@[<v 0>%a@,Pushed: %a, Treated: %a, Failed: %a@,%a@]" + Request.pp + view + Time.System.pp_hum + pushed + Time.System.pp_hum + treated + Time.System.pp_hum + completed + (Format.pp_print_list Error_monad.pp) + errors end module Worker_state = struct - type view = - { head : Block_hash.t ; - timestamp : Time.System.t ; - fetching : Operation_hash.Set.t ; - pending : Operation_hash.Set.t ; - applied : Operation_hash.t list ; - delayed : Operation_hash.Set.t } + type view = { + head : Block_hash.t; + timestamp : Time.System.t; + fetching : Operation_hash.Set.t; + pending : Operation_hash.Set.t; + applied : Operation_hash.t list; + delayed : Operation_hash.Set.t + } let encoding = let open Data_encoding in conv - (fun { head ; timestamp ; fetching ; pending ; applied ; delayed } -> - (head, timestamp, fetching, pending, applied, delayed)) + (fun {head; timestamp; fetching; pending; applied; delayed} -> + (head, timestamp, fetching, pending, applied, delayed)) (fun (head, timestamp, fetching, pending, applied, delayed) -> - { head ; timestamp ; fetching ; pending ; applied ; delayed }) + {head; timestamp; fetching; pending; applied; delayed}) (obj6 (req "head" Block_hash.encoding) (req "timestamp" Time.System.encoding) @@ -183,14 +222,14 @@ module Worker_state = struct (req "delayed" Operation_hash.Set.encoding)) let pp ppf view = - Format.fprintf ppf - "@[<v 0>\ - Head: %a@,\ - Timestamp: %a@, - @[<v 2>Fetching: %a@]@, - @[<v 2>Pending: %a@]@, - @[<v 2>Applied: %a@]@, - @[<v 2>Delayed: %a@]@]" + Format.fprintf + ppf + "@[<v 0>Head: %a@,\ + Timestamp: %a@,\n\ + \ @[<v 2>Fetching: %a@]@,\n\ + \ @[<v 2>Pending: %a@]@,\n\ + \ @[<v 2>Applied: %a@]@,\n\ + \ @[<v 2>Delayed: %a@]@]" Block_hash.pp view.head Time.System.pp_hum @@ -198,9 +237,9 @@ module Worker_state = struct (Format.pp_print_list Operation_hash.pp) (Operation_hash.Set.elements view.fetching) (Format.pp_print_list Operation_hash.pp) - (Operation_hash.Set.elements view.pending) + (Operation_hash.Set.elements view.pending) (Format.pp_print_list Operation_hash.pp) view.applied (Format.pp_print_list Operation_hash.pp) - (Operation_hash.Set.elements view.delayed) + (Operation_hash.Set.elements view.delayed) end diff --git a/src/lib_shell_services/prevalidator_worker_state.mli b/src/lib_shell_services/prevalidator_worker_state.mli index 7af9a0bf7fed2e54faa5cada8987062f5cdc42fb..25d030af93e96e36e33b40afa80f4def0e3ac092 100644 --- a/src/lib_shell_services/prevalidator_worker_state.mli +++ b/src/lib_shell_services/prevalidator_worker_state.mli @@ -30,29 +30,40 @@ module Request : sig | Inject : Operation.t -> unit t | Arrived : Operation_hash.t * Operation.t -> unit t | Advertise : unit t + type view = View : _ t -> view + val view : 'a t -> view + val encoding : view Data_encoding.t + val pp : Format.formatter -> view -> unit end module Event : sig type t = - | Request of (Request.view * Worker_types.request_status * error list option) + | Request of + (Request.view * Worker_types.request_status * error list option) | Debug of string + val level : t -> Internal_event.level + val encoding : t Data_encoding.t + val pp : Format.formatter -> t -> unit end module Worker_state : sig - type view = - { head : Block_hash.t ; - timestamp : Time.System.t ; - fetching : Operation_hash.Set.t ; - pending : Operation_hash.Set.t ; - applied : Operation_hash.t list ; - delayed : Operation_hash.Set.t } + type view = { + head : Block_hash.t; + timestamp : Time.System.t; + fetching : Operation_hash.Set.t; + pending : Operation_hash.Set.t; + applied : Operation_hash.t list; + delayed : Operation_hash.Set.t + } + val encoding : view Data_encoding.t + val pp : Format.formatter -> view -> unit end diff --git a/src/lib_shell_services/protocol_services.ml b/src/lib_shell_services/protocol_services.ml index e79f4925ee45d3a1e20ef74d5ca566cec7f2d273..7a63dd4870092765a379ac2f7cc0a9c0ef1e4b65 100644 --- a/src/lib_shell_services/protocol_services.ml +++ b/src/lib_shell_services/protocol_services.ml @@ -26,36 +26,32 @@ open Data_encoding module S = struct - let protocols_arg = Protocol_hash.rpc_arg let contents = RPC_service.get_service - ~query: RPC_query.empty - ~output: Protocol.encoding + ~query:RPC_query.empty + ~output:Protocol.encoding RPC_path.(root / "protocols" /: protocols_arg) let list = RPC_service.get_service - ~query: RPC_query.empty - ~output: (list Protocol_hash.encoding) + ~query:RPC_query.empty + ~output:(list Protocol_hash.encoding) RPC_path.(root / "protocols") let fetch = RPC_service.get_service - ~description: "Fetch a protocol from the network." - ~query: RPC_query.empty - ~output: unit + ~description:"Fetch a protocol from the network." + ~query:RPC_query.empty + ~output:unit RPC_path.(root / "fetch_protocol" /: protocols_arg) - end open RPC_context -let contents ctxt h = - make_call1 S.contents ctxt h () () -let list ctxt = - make_call S.list ctxt () () () +let contents ctxt h = make_call1 S.contents ctxt h () () + +let list ctxt = make_call S.list ctxt () () () -let fetch ctxt h = - make_call1 S.fetch ctxt h () () +let fetch ctxt h = make_call1 S.fetch ctxt h () () diff --git a/src/lib_shell_services/protocol_services.mli b/src/lib_shell_services/protocol_services.mli index 8ad6747e91cb806704babf3fbcbdbec2b6b7aad1..7c53a601863752b849fc9d905d7c6522103262ba 100644 --- a/src/lib_shell_services/protocol_services.mli +++ b/src/lib_shell_services/protocol_services.mli @@ -25,33 +25,25 @@ open RPC_context -val contents: - #simple -> Protocol_hash.t -> Protocol.t tzresult Lwt.t +val contents : #simple -> Protocol_hash.t -> Protocol.t tzresult Lwt.t -val list: - #simple -> - Protocol_hash.t list tzresult Lwt.t +val list : #simple -> Protocol_hash.t list tzresult Lwt.t -val fetch: - #simple -> - Protocol_hash.t -> - unit tzresult Lwt.t +val fetch : #simple -> Protocol_hash.t -> unit tzresult Lwt.t module S : sig - - val contents: - ([ `GET ], unit, - unit * Protocol_hash.t, unit, unit, - Protocol.t) RPC_service.t - - val list: - ([ `GET ], unit, - unit, unit, unit, - Protocol_hash.t list) RPC_service.t - - val fetch: - ([ `GET ], unit, - unit * Protocol_hash.t, unit, unit, - unit) RPC_service.t - + val contents : + ( [`GET], + unit, + unit * Protocol_hash.t, + unit, + unit, + Protocol.t ) + RPC_service.t + + val list : + ([`GET], unit, unit, unit, unit, Protocol_hash.t list) RPC_service.t + + val fetch : + ([`GET], unit, unit * Protocol_hash.t, unit, unit, unit) RPC_service.t end diff --git a/src/lib_shell_services/shell_services.ml b/src/lib_shell_services/shell_services.ml index b57a8dd2321b04927f9461649350f37ab00cad81..89fc8f5c15a7fdda5ade724336d8bbe3169c2c46 100644 --- a/src/lib_shell_services/shell_services.ml +++ b/src/lib_shell_services/shell_services.ml @@ -24,17 +24,15 @@ (*****************************************************************************) type chain = Chain_services.chain + type block = Block_services.block module Chain = Chain_services module Blocks = Chain.Blocks module Invalid_blocks = Chain.Invalid_blocks module Mempool = Chain.Mempool - module Protocol = Protocol_services - module Monitor = Monitor_services module Injection = Injection_services - module P2p = P2p_services module Worker = Worker_services diff --git a/src/lib_shell_services/shell_services.mli b/src/lib_shell_services/shell_services.mli index b57a8dd2321b04927f9461649350f37ab00cad81..89fc8f5c15a7fdda5ade724336d8bbe3169c2c46 100644 --- a/src/lib_shell_services/shell_services.mli +++ b/src/lib_shell_services/shell_services.mli @@ -24,17 +24,15 @@ (*****************************************************************************) type chain = Chain_services.chain + type block = Block_services.block module Chain = Chain_services module Blocks = Chain.Blocks module Invalid_blocks = Chain.Invalid_blocks module Mempool = Chain.Mempool - module Protocol = Protocol_services - module Monitor = Monitor_services module Injection = Injection_services - module P2p = P2p_services module Worker = Worker_services diff --git a/src/lib_shell_services/stat_services.ml b/src/lib_shell_services/stat_services.ml index aa18c05e545f111eb97249abfe4a2dfc301d618d..342579fc0dc936cd3ede8f206276e67b5d1b9a1a 100644 --- a/src/lib_shell_services/stat_services.ml +++ b/src/lib_shell_services/stat_services.ml @@ -25,30 +25,64 @@ open Gc let gc_stat_encoding = - let open Data_encoding in + let open Data_encoding in conv - (fun - { minor_words ; promoted_words ; major_words ; - minor_collections ; major_collections ; - heap_words ; heap_chunks ; live_words ; live_blocks ; - free_words ; free_blocks ; largest_free ; fragments ; - compactions ; top_heap_words ; stack_size ; } -> - ((minor_words, promoted_words, major_words, minor_collections, - major_collections), - ((heap_words, heap_chunks, live_words, live_blocks, free_words), - (free_blocks, largest_free, fragments, compactions, - top_heap_words, stack_size)))) - (fun - ((minor_words, promoted_words, major_words, minor_collections, - major_collections), - ((heap_words, heap_chunks, live_words, live_blocks, free_words), - (free_blocks, largest_free, fragments, compactions, - top_heap_words, stack_size))) -> - { minor_words ; promoted_words ; major_words ; - minor_collections ; major_collections ; - heap_words ; heap_chunks ; live_words ; live_blocks ; - free_words ; free_blocks ; largest_free ; fragments ; - compactions ; top_heap_words ; stack_size ; }) + (fun { minor_words; + promoted_words; + major_words; + minor_collections; + major_collections; + heap_words; + heap_chunks; + live_words; + live_blocks; + free_words; + free_blocks; + largest_free; + fragments; + compactions; + top_heap_words; + stack_size } -> + ( ( minor_words, + promoted_words, + major_words, + minor_collections, + major_collections ), + ( (heap_words, heap_chunks, live_words, live_blocks, free_words), + ( free_blocks, + largest_free, + fragments, + compactions, + top_heap_words, + stack_size ) ) )) + (fun ( ( minor_words, + promoted_words, + major_words, + minor_collections, + major_collections ), + ( (heap_words, heap_chunks, live_words, live_blocks, free_words), + ( free_blocks, + largest_free, + fragments, + compactions, + top_heap_words, + stack_size ) ) ) -> + { minor_words; + promoted_words; + major_words; + minor_collections; + major_collections; + heap_words; + heap_chunks; + live_words; + live_blocks; + free_words; + free_blocks; + largest_free; + fragments; + compactions; + top_heap_words; + stack_size }) (merge_objs (obj5 (req "minor_words" float) @@ -69,22 +103,20 @@ let gc_stat_encoding = (req "fragments" int31) (req "compactions" int31) (req "top_heap_words" int31) - (req "stack_size" int31))) - ) + (req "stack_size" int31)))) let proc_stat_encoding = let open Memory in - let open Data_encoding in + let open Data_encoding in union ~tag_size:`Uint8 - [ case (Tag 0) + [ case + (Tag 0) (conv - (fun { page_size ; size ; resident ; shared ; text ; - lib ; data ; dt ; } -> - (page_size , size, resident, shared, text, lib, data, dt)) - ( fun (page_size , size, resident, shared, text, lib, data, dt) -> - { page_size ; size ; resident ; shared ; text ; - lib ; data ; dt ; }) + (fun {page_size; size; resident; shared; text; lib; data; dt} -> + (page_size, size, resident, shared, text, lib, data, dt)) + (fun (page_size, size, resident, shared, text, lib, data, dt) -> + {page_size; size; resident; shared; text; lib; data; dt}) (obj8 (req "page_size" int31) (req "size" int64) @@ -96,43 +128,36 @@ let proc_stat_encoding = (req "dt" int64))) ~title:"Linux_proc_statm" (function Statm x -> Some x | _ -> None) - (function res -> Statm res) ; - case (Tag 1) + (function res -> Statm res); + case + (Tag 1) (conv - (fun { page_size ; mem ; resident } -> - (page_size , mem, resident)) - ( fun (page_size , mem, resident) -> - { page_size ; mem ; resident }) + (fun {page_size; mem; resident} -> (page_size, mem, resident)) + (fun (page_size, mem, resident) -> {page_size; mem; resident}) (obj3 (req "page_size" int31) (req "mem" float) (req "resident" int64))) ~title:"Darwin_ps" (function Ps x -> Some x | _ -> None) - (function res -> Ps res) - ] + (function res -> Ps res) ] module S = struct - let gc = RPC_service.get_service ~description:"Gets stats from the OCaml Garbage Collector" - ~query: RPC_query.empty + ~query:RPC_query.empty ~output:gc_stat_encoding RPC_path.(root / "stats" / "gc") let memory = RPC_service.get_service ~description:"Gets memory usage stats" - ~query: RPC_query.empty + ~query:RPC_query.empty ~output:proc_stat_encoding RPC_path.(root / "stats" / "memory") - - end -let gc ctxt = - RPC_context.make_call S.gc ctxt () () () +let gc ctxt = RPC_context.make_call S.gc ctxt () () () -let memory ctxt = - RPC_context.make_call S.memory ctxt () () () +let memory ctxt = RPC_context.make_call S.memory ctxt () () () diff --git a/src/lib_shell_services/stat_services.mli b/src/lib_shell_services/stat_services.mli index d6a3f1f7d483ac62761f77afb69e619f10cd437b..2790c271cbd787e994d3c959ccee6f959fd5e9a4 100644 --- a/src/lib_shell_services/stat_services.mli +++ b/src/lib_shell_services/stat_services.mli @@ -23,16 +23,12 @@ (*****************************************************************************) module S : sig - val gc: - ([ `GET ], unit, unit, unit, unit, Gc.stat) RPC_service.service - - val memory: - ([ `GET ], unit, unit, unit, unit, Memory.mem_stats) RPC_service.service + val gc : ([`GET], unit, unit, unit, unit, Gc.stat) RPC_service.service + val memory : + ([`GET], unit, unit, unit, unit, Memory.mem_stats) RPC_service.service end -val gc: - #RPC_context.simple -> Gc.stat Error_monad.tzresult Lwt.t +val gc : #RPC_context.simple -> Gc.stat Error_monad.tzresult Lwt.t -val memory: - #RPC_context.simple -> Memory.mem_stats Error_monad.tzresult Lwt.t +val memory : #RPC_context.simple -> Memory.mem_stats Error_monad.tzresult Lwt.t diff --git a/src/lib_shell_services/state_logging.ml b/src/lib_shell_services/state_logging.ml index 7c54c431a0cb846e3531a02df66e447fff46ebc2..d437c46b05d78067efc7d398633ca12ee2f5b35d 100644 --- a/src/lib_shell_services/state_logging.ml +++ b/src/lib_shell_services/state_logging.ml @@ -23,6 +23,8 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make_semantic(struct let name = "node.state" end) +include Internal_event.Legacy_logging.Make_semantic (struct + let name = "node.state" +end) let chain_id = Tag.def ~doc:"Chain ID" "chain_id" Chain_id.pp diff --git a/src/lib_shell_services/state_logging.mli b/src/lib_shell_services/state_logging.mli index a529af64c1b9f04f5c93e58f2004ca54c81128c1..f24197fcd6a6be28aceb6e8f2c3f8a96fb3f9a0d 100644 --- a/src/lib_shell_services/state_logging.mli +++ b/src/lib_shell_services/state_logging.mli @@ -25,4 +25,4 @@ include Internal_event.Legacy_logging.SEMLOG -val chain_id: Chain_id.t Tag.def +val chain_id : Chain_id.t Tag.def diff --git a/src/lib_shell_services/validation_errors.ml b/src/lib_shell_services/validation_errors.ml index 85b9b0c4927f4ff5d47099801b45fcef9cad4c73..9992e94c6ec453c36ac69146f3f152dcaa18ec00 100644 --- a/src/lib_shell_services/validation_errors.ml +++ b/src/lib_shell_services/validation_errors.ml @@ -26,11 +26,16 @@ (***************** Prevalidation errors ***********************************) type error += Parse_error + type error += Too_many_operations -type error += Oversized_operation of { size: int ; max: int } -type error += Future_block_header of { block: Block_hash.t ; - block_time : Time.Protocol.t ; - time : Time.System.t } + +type error += Oversized_operation of {size : int; max : int} + +type error += + | Future_block_header of + { block : Block_hash.t; + block_time : Time.Protocol.t; + time : Time.System.t } let () = (* Parse error *) @@ -38,9 +43,11 @@ let () = `Permanent ~id:"node.prevalidation.parse_error" ~title:"Parsing error in prevalidation" - ~description:"Raised when an operation has not been parsed correctly during prevalidation." + ~description: + "Raised when an operation has not been parsed correctly during \ + prevalidation." ~pp:(fun ppf () -> - Format.fprintf ppf "Operation parsing error in prevalidation.") + Format.fprintf ppf "Operation parsing error in prevalidation.") Data_encoding.empty (function Parse_error -> Some () | _ -> None) (fun () -> Parse_error) ; @@ -51,7 +58,7 @@ let () = ~title:"Too many pending operations in prevalidation" ~description:"The prevalidation context is full." ~pp:(fun ppf () -> - Format.fprintf ppf "Too many operations in prevalidation context.") + Format.fprintf ppf "Too many operations in prevalidation context.") Data_encoding.empty (function Too_many_operations -> Some () | _ -> None) (fun () -> Too_many_operations) ; @@ -62,13 +69,11 @@ let () = ~title:"Oversized operation" ~description:"The operation size is bigger than allowed." ~pp:(fun ppf (size, max) -> - Format.fprintf ppf "Oversized operation (size: %d, max: %d)" - size max) - Data_encoding.(obj2 - (req "size" int31) - (req "max_size" int31)) - (function Oversized_operation { size ; max } -> Some (size, max) | _ -> None) - (fun (size, max) -> Oversized_operation { size ; max }) ; + Format.fprintf ppf "Oversized operation (size: %d, max: %d)" size max) + Data_encoding.(obj2 (req "size" int31) (req "max_size" int31)) + (function + | Oversized_operation {size; max} -> Some (size, max) | _ -> None) + (fun (size, max) -> Oversized_operation {size; max}) ; (* Block from the future *) register_error_kind `Temporary @@ -76,21 +81,31 @@ let () = ~title:"Future block header" ~description:"The block was annotated with a time too far in the future." ~pp:(fun ppf (block, block_time, time) -> - Format.fprintf ppf "Future block header (block: %a, block_time: %a, time: %a)" - Block_hash.pp block - Time.System.pp_hum (Time.System.of_protocol_exn block_time) - Time.System.pp_hum time) - Data_encoding.(obj3 - (req "block" Block_hash.encoding) - (req "block_time" Time.Protocol.encoding) - (req "time" Time.System.encoding)) - (function Future_block_header { block ; block_time ; time } -> Some (block, block_time, time) | _ -> None) - (fun (block, block_time, time) -> Future_block_header { block ; block_time ; time }) - + Format.fprintf + ppf + "Future block header (block: %a, block_time: %a, time: %a)" + Block_hash.pp + block + Time.System.pp_hum + (Time.System.of_protocol_exn block_time) + Time.System.pp_hum + time) + Data_encoding.( + obj3 + (req "block" Block_hash.encoding) + (req "block_time" Time.Protocol.encoding) + (req "time" Time.System.encoding)) + (function + | Future_block_header {block; block_time; time} -> + Some (block, block_time, time) + | _ -> + None) + (fun (block, block_time, time) -> + Future_block_header {block; block_time; time}) (************************* State errors ***********************************) -type error += Unknown_chain of Chain_id.t +type error += Unknown_chain of Chain_id.t type error += Bad_data_dir @@ -102,10 +117,9 @@ let () = `Permanent ~id:"node.state.unknown_chain" ~title:"Unknown chain" - ~description:"The chain identifier could not be found in \ - the chain identifiers table." - ~pp:(fun ppf id -> - Format.fprintf ppf "Unknown chain %a" Chain_id.pp id) + ~description: + "The chain identifier could not be found in the chain identifiers table." + ~pp:(fun ppf id -> Format.fprintf ppf "Unknown chain %a" Chain_id.pp id) Data_encoding.(obj1 (req "chain" Chain_id.encoding)) (function Unknown_chain x -> Some x | _ -> None) (fun x -> Unknown_chain x) ; @@ -113,11 +127,10 @@ let () = `Permanent ~id:"node.state.bad_data_dir" ~title:"Bad data directory" - ~description:"The data directory could not be read. \ - This could be because it was generated with an \ - old version of the tezos-node program. \ - Deleting and regenerating this directory \ - may fix the problem." + ~description: + "The data directory could not be read. This could be because it was \ + generated with an old version of the tezos-node program. Deleting and \ + regenerating this directory may fix the problem." ~pp:(fun ppf () -> Format.fprintf ppf "Bad data directory.") Data_encoding.empty (function Bad_data_dir -> Some () | _ -> None) @@ -129,8 +142,11 @@ let () = ~title:"Block not invalid" ~description:"The invalid block to be unmarked was not actually invalid." ~pp:(fun ppf block -> - Format.fprintf ppf "Block %a was expected to be invalid, but was not actually invalid." - Block_hash.pp block) + Format.fprintf + ppf + "Block %a was expected to be invalid, but was not actually invalid." + Block_hash.pp + block) Data_encoding.(obj1 (req "block" Block_hash.encoding)) (function Block_not_invalid block -> Some block | _ -> None) (fun block -> Block_not_invalid block) @@ -146,22 +162,27 @@ let () = ~id:"node.state.block.inconsistent_context_hash" ~title:"Inconsistent commit hash" ~description: - "When commiting the context of a block, the announced context \ - hash was not the one computed at commit time." - ~pp: (fun ppf (got, exp) -> - Format.fprintf ppf - "@[<v 2>Inconsistent hash:@ got: %a@ expected: %a" - Context_hash.pp got - Context_hash.pp exp) - Data_encoding.(obj2 - (req "wrong_context_hash" Context_hash.encoding) - (req "expected_context_hash" Context_hash.encoding)) + "When commiting the context of a block, the announced context hash was \ + not the one computed at commit time." + ~pp:(fun ppf (got, exp) -> + Format.fprintf + ppf + "@[<v 2>Inconsistent hash:@ got: %a@ expected: %a" + Context_hash.pp + got + Context_hash.pp + exp) + Data_encoding.( + obj2 + (req "wrong_context_hash" Context_hash.encoding) + (req "expected_context_hash" Context_hash.encoding)) (function Inconsistent_hash (got, exp) -> Some (got, exp) | _ -> None) (fun (got, exp) -> Inconsistent_hash (got, exp)) (******************* Bootstrap pipeline errors ****************************) type error += Invalid_locator of P2p_peer.Id.t * Block_locator.t + type error += Too_short_locator of P2p_peer.Id.t * Block_locator.t let () = @@ -171,15 +192,19 @@ let () = ~id:"node.bootstrap_pipeline.invalid_locator" ~title:"Invalid block locator" ~description:"Block locator is invalid." - ~pp: (fun ppf (id, locator) -> - Format.fprintf ppf - "Invalid block locator on peer %a:\n%a" - P2p_peer.Id.pp id - Block_locator.pp locator) - Data_encoding.(obj2 - (req "id" P2p_peer.Id.encoding) - (req "locator" Block_locator.encoding)) - (function | Invalid_locator (id, loc) -> Some (id, loc) | _ -> None) + ~pp:(fun ppf (id, locator) -> + Format.fprintf + ppf + "Invalid block locator on peer %a:\n%a" + P2p_peer.Id.pp + id + Block_locator.pp + locator) + Data_encoding.( + obj2 + (req "id" P2p_peer.Id.encoding) + (req "locator" Block_locator.encoding)) + (function Invalid_locator (id, loc) -> Some (id, loc) | _ -> None) (fun (id, loc) -> Invalid_locator (id, loc)) ; (* Too short locator *) register_error_kind @@ -187,45 +212,43 @@ let () = ~id:"node.bootstrap_pipeline.too_short_locator" ~title:"Too short locator" ~description:"Block locator is too short." - ~pp: (fun ppf (id, locator) -> - Format.fprintf ppf - "Too short locator on peer %a:\n%a" - P2p_peer.Id.pp id - Block_locator.pp locator) - Data_encoding.(obj2 - (req "id" P2p_peer.Id.encoding) - (req "locator" Block_locator.encoding)) - (function | Too_short_locator (id, loc) -> Some (id, loc) | _ -> None) + ~pp:(fun ppf (id, locator) -> + Format.fprintf + ppf + "Too short locator on peer %a:\n%a" + P2p_peer.Id.pp + id + Block_locator.pp + locator) + Data_encoding.( + obj2 + (req "id" P2p_peer.Id.encoding) + (req "locator" Block_locator.encoding)) + (function Too_short_locator (id, loc) -> Some (id, loc) | _ -> None) (fun (id, loc) -> Too_short_locator (id, loc)) - (******************* Protocol validator errors ****************************) -type protocol_error = - | Compilation_failed - | Dynlinking_failed +type protocol_error = Compilation_failed | Dynlinking_failed -type error += Invalid_protocol of { hash: Protocol_hash.t ; error: protocol_error } +type error += + | Invalid_protocol of {hash : Protocol_hash.t; error : protocol_error} let protocol_error_encoding = let open Data_encoding in union - [ - case (Tag 0) + [ case + (Tag 0) ~title:"Compilation failed" - (obj1 - (req "error" (constant "compilation_failed"))) - (function Compilation_failed -> Some () - | _ -> None) - (fun () -> Compilation_failed) ; - case (Tag 1) + (obj1 (req "error" (constant "compilation_failed"))) + (function Compilation_failed -> Some () | _ -> None) + (fun () -> Compilation_failed); + case + (Tag 1) ~title:"Dynlinking failed" - (obj1 - (req "error" (constant "dynlinking_failed"))) - (function Dynlinking_failed -> Some () - | _ -> None) - (fun () -> Dynlinking_failed) ; - ] + (obj1 (req "error" (constant "dynlinking_failed"))) + (function Dynlinking_failed -> Some () | _ -> None) + (fun () -> Dynlinking_failed) ] let pp_protocol_error ppf = function | Compilation_failed -> @@ -240,88 +263,97 @@ let () = ~id:"node.protocol_validator.invalid_protocol" ~title:"Invalid protocol" ~description:"Invalid protocol." - ~pp:begin fun ppf (protocol, error) -> - Format.fprintf ppf + ~pp:(fun ppf (protocol, error) -> + Format.fprintf + ppf "@[<v 2>Invalid protocol %a@ %a@]" - Protocol_hash.pp_short protocol pp_protocol_error error - end - Data_encoding.(merge_objs - (obj1 (req "invalid_protocol" Protocol_hash.encoding)) - protocol_error_encoding) - (function Invalid_protocol { hash ; error } -> - Some (hash, error) | _ -> None) - (fun (hash, error) -> - Invalid_protocol { hash ; error }) + Protocol_hash.pp_short + protocol + pp_protocol_error + error) + Data_encoding.( + merge_objs + (obj1 (req "invalid_protocol" Protocol_hash.encoding)) + protocol_error_encoding) + (function + | Invalid_protocol {hash; error} -> Some (hash, error) | _ -> None) + (fun (hash, error) -> Invalid_protocol {hash; error}) (********************* Peer validator errors ******************************) -type error += - | Unknown_ancestor - | Known_invalid +type error += Unknown_ancestor | Known_invalid let () = (* Unknown ancestor *) register_error_kind `Permanent - ~id: "node.peer_validator.unknown_ancestor" - ~title: "Unknown ancestor" - ~description: "Unknown ancestor block found in the peer's chain" - ~pp: (fun ppf () -> Format.fprintf ppf "Unknown ancestor") + ~id:"node.peer_validator.unknown_ancestor" + ~title:"Unknown ancestor" + ~description:"Unknown ancestor block found in the peer's chain" + ~pp:(fun ppf () -> Format.fprintf ppf "Unknown ancestor") Data_encoding.empty (function Unknown_ancestor -> Some () | _ -> None) (fun () -> Unknown_ancestor) ; (* Known invalid *) register_error_kind `Permanent - ~id: "node.peer_validator.known_invalid" - ~title: "Known invalid" - ~description: "Known invalid block found in the peer's chain" - ~pp: (fun ppf () -> Format.fprintf ppf "Known invalid") + ~id:"node.peer_validator.known_invalid" + ~title:"Known invalid" + ~description:"Known invalid block found in the peer's chain" + ~pp:(fun ppf () -> Format.fprintf ppf "Known invalid") Data_encoding.empty (function Known_invalid -> Some () | _ -> None) (fun () -> Known_invalid) (************************ Validator errors ********************************) -type error += Inactive_chain of Chain_id.t +type error += Inactive_chain of Chain_id.t + type error += Checkpoint_error of Block_hash.t * P2p_peer.Id.t option let () = (* Inactive network *) register_error_kind `Branch - ~id: "node.validator.inactive_chain" - ~title: "Inactive chain" - ~description: "Attempted validation of a block from an inactive chain." - ~pp: (fun ppf chain -> - Format.fprintf ppf - "Tried to validate a block from chain %a, \ - that is not currently considered active." - Chain_id.pp chain) + ~id:"node.validator.inactive_chain" + ~title:"Inactive chain" + ~description:"Attempted validation of a block from an inactive chain." + ~pp:(fun ppf chain -> + Format.fprintf + ppf + "Tried to validate a block from chain %a, that is not currently \ + considered active." + Chain_id.pp + chain) Data_encoding.(obj1 (req "inactive_chain" Chain_id.encoding)) (function Inactive_chain chain -> Some chain | _ -> None) (fun chain -> Inactive_chain chain) ; register_error_kind `Branch ~id:"node.validator.checkpoint_error" - ~title: "Block incompatible with the current checkpoint." - ~description: "The block belongs to a branch that is not compatible \ - with the current checkpoint." - ~pp: (fun ppf (block, peer) -> - match peer with - | None -> - Format.fprintf ppf - "The block %a is incompatible with the current checkpoint." - Block_hash.pp_short block - | Some peer -> - Format.fprintf ppf - "The peer %a send us a block which is a sibling \ - of the configured checkpoint (%a)." - P2p_peer.Id.pp peer - Block_hash.pp_short block) - Data_encoding.(obj2 - (req "block" Block_hash.encoding) - (opt "peer" P2p_peer.Id.encoding)) - (function Checkpoint_error (block, peer) -> Some (block, peer) | _ -> None) + ~title:"Block incompatible with the current checkpoint." + ~description: + "The block belongs to a branch that is not compatible with the current \ + checkpoint." + ~pp:(fun ppf (block, peer) -> + match peer with + | None -> + Format.fprintf + ppf + "The block %a is incompatible with the current checkpoint." + Block_hash.pp_short + block + | Some peer -> + Format.fprintf + ppf + "The peer %a send us a block which is a sibling of the configured \ + checkpoint (%a)." + P2p_peer.Id.pp + peer + Block_hash.pp_short + block) + Data_encoding.( + obj2 (req "block" Block_hash.encoding) (opt "peer" P2p_peer.Id.encoding)) + (function + | Checkpoint_error (block, peer) -> Some (block, peer) | _ -> None) (fun (block, peer) -> Checkpoint_error (block, peer)) - diff --git a/src/lib_shell_services/validation_errors.mli b/src/lib_shell_services/validation_errors.mli index f852864ccea251726c5f63f0df94e51e7429c2c5..62dc838e4232da59453adaf137498f912f9e78ac 100644 --- a/src/lib_shell_services/validation_errors.mli +++ b/src/lib_shell_services/validation_errors.mli @@ -26,16 +26,23 @@ (***************** Prevalidation errors ***********************************) type error += Parse_error + type error += Too_many_operations -type error += Oversized_operation of { size: int ; max: int } -type error += Future_block_header of { block: Block_hash.t ; - block_time : Time.Protocol.t ; - time : Time.System.t } + +type error += Oversized_operation of {size : int; max : int} + +type error += + | Future_block_header of + { block : Block_hash.t; + block_time : Time.Protocol.t; + time : Time.System.t } (************************* State errors ***********************************) -type error += Unknown_chain of Chain_id.t +type error += Unknown_chain of Chain_id.t + type error += Bad_data_dir + type error += Block_not_invalid of Block_hash.t (* Block database error *) @@ -45,23 +52,22 @@ type error += Inconsistent_hash of Context_hash.t * Context_hash.t (******************* Bootstrap pipeline errors ****************************) type error += Invalid_locator of P2p_peer.Id.t * Block_locator.t + type error += Too_short_locator of P2p_peer.Id.t * Block_locator.t (******************* Protocol validator errors ****************************) -type protocol_error = - | Compilation_failed - | Dynlinking_failed +type protocol_error = Compilation_failed | Dynlinking_failed -type error += Invalid_protocol of { hash: Protocol_hash.t ; error: protocol_error } +type error += + | Invalid_protocol of {hash : Protocol_hash.t; error : protocol_error} (********************* Peer validator errors ******************************) -type error += - | Unknown_ancestor - | Known_invalid +type error += Unknown_ancestor | Known_invalid (************************ Validator errors ********************************) -type error += Inactive_chain of Chain_id.t +type error += Inactive_chain of Chain_id.t + type error += Checkpoint_error of Block_hash.t * P2p_peer.Id.t option diff --git a/src/lib_shell_services/worker_services.ml b/src/lib_shell_services/worker_services.ml index 8216320a7ffa42d6f823950b741a048adf19ea8f..9903533d44b7432997e6e800cfd6e93cd41f6524 100644 --- a/src/lib_shell_services/worker_services.ml +++ b/src/lib_shell_services/worker_services.ml @@ -26,142 +26,156 @@ open Data_encoding module Prevalidators = struct - module S = struct - let list = RPC_service.get_service ~description:"Lists the Prevalidator workers and their status." - ~query: RPC_query.empty + ~query:RPC_query.empty ~output: (list (obj4 (req "chain_id" Chain_id.encoding) - (req "status" (Worker_types.worker_status_encoding RPC_error.encoding)) - (req "information" (Worker_types.worker_information_encoding RPC_error.encoding)) - (req "pipelines" int8) - )) + (req + "status" + (Worker_types.worker_status_encoding RPC_error.encoding)) + (req + "information" + (Worker_types.worker_information_encoding RPC_error.encoding)) + (req "pipelines" int8))) RPC_path.(root / "workers" / "prevalidators") let state = RPC_service.get_service ~description:"Introspect the state of prevalidator workers." - ~query: RPC_query.empty + ~query:RPC_query.empty ~output: (Worker_types.full_status_encoding Prevalidator_worker_state.Request.encoding Prevalidator_worker_state.Event.encoding RPC_error.encoding) - RPC_path.(root / "workers" / "prevalidators" /: Chain_services.chain_arg ) - + RPC_path.( + root / "workers" / "prevalidators" /: Chain_services.chain_arg) end open RPC_context + let list ctxt = make_call S.list ctxt () () () - let state ctxt h = make_call1 S.state ctxt h () () + let state ctxt h = make_call1 S.state ctxt h () () end module Block_validator = struct - module S = struct - let state = RPC_service.get_service ~description:"Introspect the state of the block_validator worker." - ~query: RPC_query.empty + ~query:RPC_query.empty ~output: (Worker_types.full_status_encoding Block_validator_worker_state.Request.encoding Block_validator_worker_state.Event.encoding RPC_error.encoding) RPC_path.(root / "workers" / "block_validator") - end open RPC_context - let state ctxt = make_call S.state ctxt () () () + let state ctxt = make_call S.state ctxt () () () end module Peer_validators = struct - module S = struct - let list = RPC_service.get_service ~description:"Lists the peer validator workers and their status." - ~query: RPC_query.empty + ~query:RPC_query.empty ~output: (list (obj4 (req "peer_id" P2p_peer.Id.encoding) - (req "status" (Worker_types.worker_status_encoding RPC_error.encoding)) - (req "information" (Worker_types.worker_information_encoding RPC_error.encoding)) - (req "pipelines" Peer_validator_worker_state.Worker_state.pipeline_length_encoding) - )) - RPC_path.(root / "workers" / "chain_validators" /: Chain_services.chain_arg / "peers_validators" ) + (req + "status" + (Worker_types.worker_status_encoding RPC_error.encoding)) + (req + "information" + (Worker_types.worker_information_encoding RPC_error.encoding)) + (req + "pipelines" + Peer_validator_worker_state.Worker_state + .pipeline_length_encoding))) + RPC_path.( + root / "workers" / "chain_validators" /: Chain_services.chain_arg + / "peers_validators") let state = RPC_service.get_service ~description:"Introspect the state of a peer validator worker." - ~query: RPC_query.empty + ~query:RPC_query.empty ~output: (Worker_types.full_status_encoding Peer_validator_worker_state.Request.encoding Peer_validator_worker_state.Event.encoding RPC_error.encoding) - RPC_path.(root / "workers" / "chain_validators" /: Chain_services.chain_arg / "peers_validators" /: P2p_peer.Id.rpc_arg) - + RPC_path.( + root / "workers" / "chain_validators" /: Chain_services.chain_arg + / "peers_validators" /: P2p_peer.Id.rpc_arg) end open RPC_context + let list ctxt n = make_call1 S.list ctxt n () () - let state ctxt n h = make_call2 S.state ctxt n h () () + let state ctxt n h = make_call2 S.state ctxt n h () () end module Chain_validators = struct - module S = struct - let list = RPC_service.get_service ~description:"Lists the chain validator workers and their status." - ~query: RPC_query.empty + ~query:RPC_query.empty ~output: (list (obj4 (req "chain_id" Chain_id.encoding) - (req "status" (Worker_types.worker_status_encoding RPC_error.encoding)) - (req "information" (Worker_types.worker_information_encoding RPC_error.encoding)) - (req "pipelines" int8) - )) + (req + "status" + (Worker_types.worker_status_encoding RPC_error.encoding)) + (req + "information" + (Worker_types.worker_information_encoding RPC_error.encoding)) + (req "pipelines" int8))) RPC_path.(root / "workers" / "chain_validators") let state = RPC_service.get_service ~description:"Introspect the state of a chain validator worker." - ~query: RPC_query.empty + ~query:RPC_query.empty ~output: (Worker_types.full_status_encoding Chain_validator_worker_state.Request.encoding Chain_validator_worker_state.Event.encoding RPC_error.encoding) - RPC_path.(root / "workers" / "chain_validators" /: Chain_services.chain_arg ) + RPC_path.( + root / "workers" / "chain_validators" /: Chain_services.chain_arg) let ddb_state = RPC_service.get_service - ~description:"Introspect the state of the DDB attached to a chain validator worker." - ~query: RPC_query.empty - ~output: Chain_validator_worker_state.Distributed_db_state.encoding - RPC_path.(root / "workers" / "chain_validators" /: Chain_services.chain_arg / "ddb") - + ~description: + "Introspect the state of the DDB attached to a chain validator \ + worker." + ~query:RPC_query.empty + ~output:Chain_validator_worker_state.Distributed_db_state.encoding + RPC_path.( + root / "workers" / "chain_validators" /: Chain_services.chain_arg + / "ddb") end open RPC_context + let list ctxt = make_call S.list ctxt () () () + let state ctxt h = make_call1 S.state ctxt h () () - let ddb_state ctxt h = make_call1 S.ddb_state ctxt h () () + let ddb_state ctxt h = make_call1 S.ddb_state ctxt h () () end diff --git a/src/lib_shell_services/worker_services.mli b/src/lib_shell_services/worker_services.mli index 52f9de847e004a1dc9d642b02a4755ffde07e5d9..6acbac717ea58b717207212d3255afdcb7933e0e 100644 --- a/src/lib_shell_services/worker_services.mli +++ b/src/lib_shell_services/worker_services.mli @@ -26,129 +26,162 @@ open RPC_context module Prevalidators : sig - open Prevalidator_worker_state - val list: - #simple -> ( - Chain_id.t * - Worker_types.worker_status * - Worker_types.worker_information * - int) list tzresult Lwt.t - - val state: - #simple -> Chain_services.chain -> (Request.view, Event.t) Worker_types.full_status tzresult Lwt.t + val list : + #simple -> + ( Chain_id.t + * Worker_types.worker_status + * Worker_types.worker_information + * int ) + list + tzresult + Lwt.t + + val state : + #simple -> + Chain_services.chain -> + (Request.view, Event.t) Worker_types.full_status tzresult Lwt.t module S : sig - val list : - ([ `GET ], unit, - unit, unit, unit, - (Chain_id.t * - Worker_types.worker_status * - Worker_types.worker_information * - int) list) RPC_service.t + ( [`GET], + unit, + unit, + unit, + unit, + ( Chain_id.t + * Worker_types.worker_status + * Worker_types.worker_information + * int ) + list ) + RPC_service.t val state : - ([ `GET ], unit, - unit * Chain_services.chain, unit, unit, - (Request.view, Event.t) Worker_types.full_status) RPC_service.t - + ( [`GET], + unit, + unit * Chain_services.chain, + unit, + unit, + (Request.view, Event.t) Worker_types.full_status ) + RPC_service.t end - end module Block_validator : sig - open Block_validator_worker_state - val state: + val state : #simple -> (Request.view, Event.t) Worker_types.full_status tzresult Lwt.t module S : sig - val state : - ([ `GET ], unit, - unit, unit, unit, - (Request.view, Event.t) Worker_types.full_status) RPC_service.t - + ( [`GET], + unit, + unit, + unit, + unit, + (Request.view, Event.t) Worker_types.full_status ) + RPC_service.t end - end module Peer_validators : sig - open Peer_validator_worker_state - val list: - #simple -> Chain_services.chain -> - (P2p_peer.Id.t * - Worker_types.worker_status * - Worker_types.worker_information * - Peer_validator_worker_state.Worker_state.pipeline_length) list tzresult Lwt.t - - val state: + val list : #simple -> - Chain_services.chain -> P2p_peer.Id.t -> (Request.view, Event.t) Worker_types.full_status tzresult Lwt.t + Chain_services.chain -> + ( P2p_peer.Id.t + * Worker_types.worker_status + * Worker_types.worker_information + * Peer_validator_worker_state.Worker_state.pipeline_length ) + list + tzresult + Lwt.t + + val state : + #simple -> + Chain_services.chain -> + P2p_peer.Id.t -> + (Request.view, Event.t) Worker_types.full_status tzresult Lwt.t module S : sig - val list : - ([ `GET ], unit, - unit * Chain_services.chain, unit, unit, - (P2p_peer.Id.t * - Worker_types.worker_status * - Worker_types.worker_information * - Peer_validator_worker_state.Worker_state.pipeline_length) list) RPC_service.t + ( [`GET], + unit, + unit * Chain_services.chain, + unit, + unit, + ( P2p_peer.Id.t + * Worker_types.worker_status + * Worker_types.worker_information + * Peer_validator_worker_state.Worker_state.pipeline_length ) + list ) + RPC_service.t val state : - ([ `GET ], unit, - (unit * Chain_services.chain) * P2p_peer.Id.t, unit, unit, - (Request.view, Event.t) Worker_types.full_status) RPC_service.t - + ( [`GET], + unit, + (unit * Chain_services.chain) * P2p_peer.Id.t, + unit, + unit, + (Request.view, Event.t) Worker_types.full_status ) + RPC_service.t end - end module Chain_validators : sig - open Chain_validator_worker_state - val list: - #simple -> ( - Chain_id.t * - Worker_types.worker_status * - Worker_types.worker_information * - int) list tzresult Lwt.t - - val state: - #simple -> Chain_services.chain -> + val list : + #simple -> + ( Chain_id.t + * Worker_types.worker_status + * Worker_types.worker_information + * int ) + list + tzresult + Lwt.t + + val state : + #simple -> + Chain_services.chain -> (Request.view, Event.t) Worker_types.full_status tzresult Lwt.t - val ddb_state: + val ddb_state : #simple -> Chain_services.chain -> Distributed_db_state.view tzresult Lwt.t - module S : sig - val list : - ([ `GET ], unit, - unit, unit, unit, - (Chain_id.t * - Worker_types.worker_status * - Worker_types.worker_information * - int ) list) RPC_service.t + ( [`GET], + unit, + unit, + unit, + unit, + ( Chain_id.t + * Worker_types.worker_status + * Worker_types.worker_information + * int ) + list ) + RPC_service.t val state : - ([ `GET ], unit, - unit * Chain_services.chain, unit, unit, - (Request.view, Event.t) Worker_types.full_status) RPC_service.t + ( [`GET], + unit, + unit * Chain_services.chain, + unit, + unit, + (Request.view, Event.t) Worker_types.full_status ) + RPC_service.t val ddb_state : - ([ `GET ], unit, - unit * Chain_services.chain, unit, unit, - Distributed_db_state.view) RPC_service.t - + ( [`GET], + unit, + unit * Chain_services.chain, + unit, + unit, + Distributed_db_state.view ) + RPC_service.t end - end diff --git a/src/lib_shell_services/worker_types.ml b/src/lib_shell_services/worker_types.ml index 87f787d9b1650833a9efe3d8388cf6a849eeda71..86f6e53c528e518ca4a193b1d705629fa46a6fe6 100644 --- a/src/lib_shell_services/worker_types.ml +++ b/src/lib_shell_services/worker_types.ml @@ -23,9 +23,7 @@ (* *) (*****************************************************************************) -type limits = - { backlog_size : int ; - backlog_level : Internal_event.level ; } +type limits = {backlog_size : int; backlog_level : Internal_event.level} type worker_status = | Launching of Time.System.t @@ -36,88 +34,92 @@ type worker_status = let worker_status_encoding error_encoding = let open Data_encoding in union - [ case (Tag 0) + [ case + (Tag 0) ~title:"Launching" (obj2 (req "phase" (constant "launching")) (req "since" Time.System.encoding)) (function Launching t -> Some ((), t) | _ -> None) - (fun ((), t) -> Launching t) ; - case (Tag 1) + (fun ((), t) -> Launching t); + case + (Tag 1) ~title:"Running" (obj2 (req "phase" (constant "running")) (req "since" Time.System.encoding)) (function Running t -> Some ((), t) | _ -> None) - (fun ((), t) -> Running t) ; - case (Tag 2) + (fun ((), t) -> Running t); + case + (Tag 2) ~title:"Closing" (obj3 (req "phase" (constant "closing")) (req "birth" Time.System.encoding) (req "since" Time.System.encoding)) (function Closing (t0, t) -> Some ((), t0, t) | _ -> None) - (fun ((), t0, t) -> Closing (t0, t)) ; - case (Tag 3) + (fun ((), t0, t) -> Closing (t0, t)); + case + (Tag 3) ~title:"Closed" (obj3 (req "phase" (constant "closed")) (req "birth" Time.System.encoding) (req "since" Time.System.encoding)) (function Closed (t0, t, None) -> Some ((), t0, t) | _ -> None) - (fun ((), t0, t) -> Closed (t0, t, None)) ; - case (Tag 4) + (fun ((), t0, t) -> Closed (t0, t, None)); + case + (Tag 4) ~title:"Crashed" (obj4 (req "phase" (constant "crashed")) (req "birth" Time.System.encoding) (req "since" Time.System.encoding) (req "errors" error_encoding)) - (function Closed (t0, t, Some errs) -> Some ((), t0, t, errs) | _ -> None) - (fun ((), t0, t, errs) -> Closed (t0, t, Some errs )) ] + (function + | Closed (t0, t, Some errs) -> Some ((), t0, t, errs) | _ -> None) + (fun ((), t0, t, errs) -> Closed (t0, t, Some errs)) ] type worker_information = { - instances_number : int ; - wstatus : worker_status ; - queue_length : int ; + instances_number : int; + wstatus : worker_status; + queue_length : int } let worker_information_encoding error_encoding = Data_encoding.( conv - (fun { instances_number ; wstatus ; queue_length } -> - (instances_number, wstatus, queue_length)) + (fun {instances_number; wstatus; queue_length} -> + (instances_number, wstatus, queue_length)) (fun (instances_number, wstatus, queue_length) -> - { instances_number ; wstatus ; queue_length }) + {instances_number; wstatus; queue_length}) (obj3 (req "instances" int31) (req "status" (worker_status_encoding error_encoding)) - (req "queue_length" int31) - ) - ) + (req "queue_length" int31))) -type request_status = - { pushed : Time.System.t ; - treated : Time.System.t ; - completed : Time.System.t } +type request_status = { + pushed : Time.System.t; + treated : Time.System.t; + completed : Time.System.t +} let request_status_encoding = let open Data_encoding in conv - (fun { pushed ; treated ; completed } -> - (pushed, treated, completed)) - (fun (pushed, treated, completed) -> - { pushed ; treated ; completed }) + (fun {pushed; treated; completed} -> (pushed, treated, completed)) + (fun (pushed, treated, completed) -> {pushed; treated; completed}) (obj3 (req "pushed" Time.System.encoding) (req "treated" Time.System.encoding) (req "completed" Time.System.encoding)) -type ('req, 'evt) full_status = - { status : worker_status ; - pending_requests : (Time.System.t * 'req) list ; - backlog : (Internal_event.level * 'evt list) list ; - current_request : (Time.System.t * Time.System.t * 'req) option } +type ('req, 'evt) full_status = { + status : worker_status; + pending_requests : (Time.System.t * 'req) list; + backlog : (Internal_event.level * 'evt list) list; + current_request : (Time.System.t * Time.System.t * 'req) option +} let full_status_encoding req_encoding evt_encoding error_encoding = let open Data_encoding in @@ -125,22 +127,25 @@ let full_status_encoding req_encoding evt_encoding error_encoding = list (obj2 (req "pushed" Time.System.encoding) - (req "request" (dynamic_size req_encoding))) in + (req "request" (dynamic_size req_encoding))) + in let events_encoding = list (obj2 (req "level" Internal_event.Level.encoding) - (req "events" (dynamic_size (list (dynamic_size evt_encoding))))) in + (req "events" (dynamic_size (list (dynamic_size evt_encoding))))) + in let current_request_encoding = obj3 (req "pushed" Time.System.encoding) (req "treated" Time.System.encoding) - (req "request" req_encoding) in + (req "request" req_encoding) + in conv - (fun { status ; pending_requests ; backlog ; current_request } -> - (status, pending_requests, backlog, current_request)) + (fun {status; pending_requests; backlog; current_request} -> + (status, pending_requests, backlog, current_request)) (fun (status, pending_requests, backlog, current_request) -> - { status ; pending_requests ; backlog ; current_request }) + {status; pending_requests; backlog; current_request}) (obj4 (req "status" (worker_status_encoding error_encoding)) (req "pending_requests" requests_encoding) diff --git a/src/lib_shell_services/worker_types.mli b/src/lib_shell_services/worker_types.mli index 9db1b848238aa15c06b31f96dda186b71c759fe2..8d597f524a969baa00f132efbbea71ce1b277125 100644 --- a/src/lib_shell_services/worker_types.mli +++ b/src/lib_shell_services/worker_types.mli @@ -24,11 +24,12 @@ (*****************************************************************************) (** Some memory and time limits. *) -type limits = - { backlog_size : int - (** Number of event stored in the backlog for each debug level. *) ; - backlog_level : Internal_event.level - (** Stores events at least as important as this value. *) ; } +type limits = { + backlog_size : int; + (** Number of event stored in the backlog for each debug level. *) + backlog_level : Internal_event.level + (** Stores events at least as important as this value. *) +} (** The running status of an individual worker. *) type worker_status = @@ -38,32 +39,35 @@ type worker_status = | Closed of Time.System.t * Time.System.t * error list option (** Worker status serializer for RPCs. *) -val worker_status_encoding : error list Data_encoding.t -> worker_status Data_encoding.t +val worker_status_encoding : + error list Data_encoding.t -> worker_status Data_encoding.t type worker_information = { - instances_number : int ; - wstatus: worker_status ; - queue_length : int ; + instances_number : int; + wstatus : worker_status; + queue_length : int } val worker_information_encoding : error list Data_encoding.t -> worker_information Data_encoding.t (** The runnning status of an individual request. *) -type request_status = - { pushed : Time.System.t ; - treated : Time.System.t ; - completed : Time.System.t } +type request_status = { + pushed : Time.System.t; + treated : Time.System.t; + completed : Time.System.t +} (** Request status serializer for RPCs. *) val request_status_encoding : request_status Data_encoding.t (** The full status of an individual worker. *) -type ('req, 'evt) full_status = - { status : worker_status ; - pending_requests : (Time.System.t * 'req) list ; - backlog : (Internal_event.level * 'evt list) list ; - current_request : (Time.System.t * Time.System.t * 'req) option } +type ('req, 'evt) full_status = { + status : worker_status; + pending_requests : (Time.System.t * 'req) list; + backlog : (Internal_event.level * 'evt list) list; + current_request : (Time.System.t * Time.System.t * 'req) option +} (** Full worker status serializer for RPCs. *) val full_status_encoding : diff --git a/src/lib_signer_backends/.ocamlformat b/src/lib_signer_backends/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_signer_backends/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_signer_backends/encrypted.ml b/src/lib_signer_backends/encrypted.ml index 6cade24e63cdb8bd997e8a3d652bc75886c26fd0..2b4fdcd010376a494be1c1a8e25ad74cbb71701e 100644 --- a/src/lib_signer_backends/encrypted.ml +++ b/src/lib_signer_backends/encrypted.ml @@ -25,7 +25,9 @@ (*****************************************************************************) type Base58.data += Encrypted_ed25519 of MBytes.t + type Base58.data += Encrypted_secp256k1 of MBytes.t + type Base58.data += Encrypted_p256 of MBytes.t open Client_keys @@ -33,7 +35,6 @@ open Client_keys let scheme = "encrypted" module Raw = struct - (* https://tools.ietf.org/html/rfc2898#section-4.1 *) let salt_len = 8 @@ -56,75 +57,82 @@ module Raw = struct | Secp256k1 sk -> Data_encoding.Binary.to_bytes_exn Secp256k1.Secret_key.encoding sk | P256 sk -> - Data_encoding.Binary.to_bytes_exn P256.Secret_key.encoding sk in - MBytes.concat "" [ salt ; - Crypto_box.Secretbox.box key msg nonce ] + Data_encoding.Binary.to_bytes_exn P256.Secret_key.encoding sk + in + MBytes.concat "" [salt; Crypto_box.Secretbox.box key msg nonce] let decrypt algo ~password ~encrypted_sk = let salt = MBytes.sub encrypted_sk 0 salt_len in - let encrypted_sk = - MBytes.sub encrypted_sk salt_len encrypted_size in + let encrypted_sk = MBytes.sub encrypted_sk salt_len encrypted_size in let key = Crypto_box.Secretbox.unsafe_of_bytes (pbkdf ~salt ~password) in - match Crypto_box.Secretbox.box_open key encrypted_sk nonce, algo with - | None, _ -> return_none - | Some bytes, Signature.Ed25519 -> begin - match Data_encoding.Binary.of_bytes Ed25519.Secret_key.encoding bytes with - | Some sk -> return_some (Ed25519 sk : Signature.Secret_key.t) - | None -> failwith "Corrupted wallet, deciphered key is not a \ - valid Ed25519 secret key" - end - | Some bytes, Signature.Secp256k1 -> begin - match Data_encoding.Binary.of_bytes Secp256k1.Secret_key.encoding bytes with - | Some sk -> return_some (Secp256k1 sk : Signature.Secret_key.t) - | None -> failwith "Corrupted wallet, deciphered key is not a \ - valid Secp256k1 secret key" - end - | Some bytes, Signature.P256 -> begin - match Data_encoding.Binary.of_bytes P256.Secret_key.encoding bytes with - | Some sk -> return_some (P256 sk : Signature.Secret_key.t) - | None -> failwith "Corrupted wallet, deciphered key is not a \ - valid P256 secret key" - end + match (Crypto_box.Secretbox.box_open key encrypted_sk nonce, algo) with + | (None, _) -> + return_none + | (Some bytes, Signature.Ed25519) -> ( + match + Data_encoding.Binary.of_bytes Ed25519.Secret_key.encoding bytes + with + | Some sk -> + return_some (Ed25519 sk : Signature.Secret_key.t) + | None -> + failwith + "Corrupted wallet, deciphered key is not a valid Ed25519 secret key" + ) + | (Some bytes, Signature.Secp256k1) -> ( + match + Data_encoding.Binary.of_bytes Secp256k1.Secret_key.encoding bytes + with + | Some sk -> + return_some (Secp256k1 sk : Signature.Secret_key.t) + | None -> + failwith + "Corrupted wallet, deciphered key is not a valid Secp256k1 secret \ + key" ) + | (Some bytes, Signature.P256) -> ( + match Data_encoding.Binary.of_bytes P256.Secret_key.encoding bytes with + | Some sk -> + return_some (P256 sk : Signature.Secret_key.t) + | None -> + failwith + "Corrupted wallet, deciphered key is not a valid P256 secret key" ) end module Encodings = struct - let ed25519 = - let length = - Hacl.Sign.skbytes + Crypto_box.boxzerobytes + Raw.salt_len in + let length = Hacl.Sign.skbytes + Crypto_box.boxzerobytes + Raw.salt_len in Base58.register_encoding - ~prefix: Base58.Prefix.ed25519_encrypted_seed + ~prefix:Base58.Prefix.ed25519_encrypted_seed ~length - ~to_raw: (fun sk -> MBytes.to_string sk) - ~of_raw: (fun buf -> - if String.length buf <> length then None - else Some (MBytes.of_string buf)) - ~wrap: (fun sk -> Encrypted_ed25519 sk) + ~to_raw:(fun sk -> MBytes.to_string sk) + ~of_raw:(fun buf -> + if String.length buf <> length then None + else Some (MBytes.of_string buf)) + ~wrap:(fun sk -> Encrypted_ed25519 sk) let secp256k1 = let open Libsecp256k1.External in - let length = - Key.secret_bytes + Crypto_box.boxzerobytes + Raw.salt_len in + let length = Key.secret_bytes + Crypto_box.boxzerobytes + Raw.salt_len in Base58.register_encoding - ~prefix: Base58.Prefix.secp256k1_encrypted_secret_key + ~prefix:Base58.Prefix.secp256k1_encrypted_secret_key ~length - ~to_raw: (fun sk -> MBytes.to_string sk) - ~of_raw: (fun buf -> - if String.length buf <> length then None - else Some (MBytes.of_string buf)) - ~wrap: (fun sk -> Encrypted_secp256k1 sk) + ~to_raw:(fun sk -> MBytes.to_string sk) + ~of_raw:(fun buf -> + if String.length buf <> length then None + else Some (MBytes.of_string buf)) + ~wrap:(fun sk -> Encrypted_secp256k1 sk) let p256 = let length = - Uecc.(sk_size secp256r1) + Crypto_box.boxzerobytes + Raw.salt_len in + Uecc.(sk_size secp256r1) + Crypto_box.boxzerobytes + Raw.salt_len + in Base58.register_encoding - ~prefix: Base58.Prefix.p256_encrypted_secret_key + ~prefix:Base58.Prefix.p256_encrypted_secret_key ~length - ~to_raw: (fun sk -> MBytes.to_string sk) - ~of_raw: (fun buf -> - if String.length buf <> length then None - else Some (MBytes.of_string buf)) - ~wrap: (fun sk -> Encrypted_p256 sk) + ~to_raw:(fun sk -> MBytes.to_string sk) + ~of_raw:(fun buf -> + if String.length buf <> length then None + else Some (MBytes.of_string buf)) + ~wrap:(fun sk -> Encrypted_p256 sk) let () = Base58.check_encoded_prefix ed25519 "edesk" 88 ; @@ -138,18 +146,16 @@ let decrypted = Hashtbl.create 13 asking the user all the time *) let passwords = ref [] -let rec interactive_decrypt_loop - (cctxt : #Client_context.prompter) - ?name ~encrypted_sk algo = - begin match name with - | None -> - cctxt#prompt_password - "Enter password for encrypted key: " - | Some name -> - cctxt#prompt_password - "Enter password for encrypted key \"%s\": " name - end >>=? fun password -> - Raw.decrypt algo ~password ~encrypted_sk >>=? function +let rec interactive_decrypt_loop (cctxt : #Client_context.prompter) ?name + ~encrypted_sk algo = + ( match name with + | None -> + cctxt#prompt_password "Enter password for encrypted key: " + | Some name -> + cctxt#prompt_password "Enter password for encrypted key \"%s\": " name ) + >>=? fun password -> + Raw.decrypt algo ~password ~encrypted_sk + >>=? function | Some sk -> passwords := password :: !passwords ; return sk @@ -157,101 +163,117 @@ let rec interactive_decrypt_loop interactive_decrypt_loop cctxt ?name ~encrypted_sk algo (* add all passwords obtained by [ctxt#load_passwords] to the list of known passwords *) -let password_file_load ctxt = match ctxt#load_passwords with +let password_file_load ctxt = + match ctxt#load_passwords with | Some stream -> Lwt_stream.iter - (fun p -> - passwords := MBytes.of_string p :: !passwords) - stream >>= fun () -> + (fun p -> passwords := MBytes.of_string p :: !passwords) + stream + >>= fun () -> return_unit + | None -> return_unit - | None -> return_unit let rec noninteractive_decrypt_loop algo ~encrypted_sk = function - | [] -> return_none - | password :: passwords -> - Raw.decrypt algo ~password ~encrypted_sk >>=? function - | None -> noninteractive_decrypt_loop algo ~encrypted_sk passwords - | Some sk -> return_some sk + | [] -> + return_none + | password :: passwords -> ( + Raw.decrypt algo ~password ~encrypted_sk + >>=? function + | None -> + noninteractive_decrypt_loop algo ~encrypted_sk passwords + | Some sk -> + return_some sk ) let decrypt_payload cctxt ?name encrypted_sk = - begin match Base58.decode encrypted_sk with - | Some (Encrypted_ed25519 encrypted_sk) -> - return (Signature.Ed25519, encrypted_sk) - | Some (Encrypted_secp256k1 encrypted_sk) -> - return (Signature.Secp256k1, encrypted_sk) - | Some (Encrypted_p256 encrypted_sk) -> - return (Signature.P256, encrypted_sk) - | _ -> failwith "Not a Base58Check-encoded encrypted key" - end >>=? fun (algo, encrypted_sk) -> - noninteractive_decrypt_loop algo ~encrypted_sk !passwords >>=? function - | Some sk -> return sk - | None -> interactive_decrypt_loop cctxt ?name ~encrypted_sk algo + ( match Base58.decode encrypted_sk with + | Some (Encrypted_ed25519 encrypted_sk) -> + return (Signature.Ed25519, encrypted_sk) + | Some (Encrypted_secp256k1 encrypted_sk) -> + return (Signature.Secp256k1, encrypted_sk) + | Some (Encrypted_p256 encrypted_sk) -> + return (Signature.P256, encrypted_sk) + | _ -> + failwith "Not a Base58Check-encoded encrypted key" ) + >>=? fun (algo, encrypted_sk) -> + noninteractive_decrypt_loop algo ~encrypted_sk !passwords + >>=? function + | Some sk -> + return sk + | None -> + interactive_decrypt_loop cctxt ?name ~encrypted_sk algo let decrypt (cctxt : #Client_context.prompter) ?name sk_uri = let payload = Uri.path (sk_uri : sk_uri :> Uri.t) in - decrypt_payload cctxt ?name payload >>=? fun sk -> + decrypt_payload cctxt ?name payload + >>=? fun sk -> Hashtbl.replace decrypted sk_uri sk ; return sk let decrypt_all (cctxt : #Client_context.io_wallet) = - Secret_key.load cctxt >>=? fun sks -> - password_file_load cctxt >>=? fun () -> - iter_s begin fun (name, sk_uri) -> - if Uri.scheme (sk_uri : sk_uri :> Uri.t) <> Some scheme then - return_unit - else - decrypt cctxt ~name sk_uri >>=? fun _ -> - return_unit - end sks + Secret_key.load cctxt + >>=? fun sks -> + password_file_load cctxt + >>=? fun () -> + iter_s + (fun (name, sk_uri) -> + if Uri.scheme (sk_uri : sk_uri :> Uri.t) <> Some scheme then return_unit + else decrypt cctxt ~name sk_uri >>=? fun _ -> return_unit) + sks let decrypt_list (cctxt : #Client_context.io_wallet) keys = - Secret_key.load cctxt >>=? fun sks -> - password_file_load cctxt >>=? fun () -> - iter_s begin fun (name, sk_uri) -> - if Uri.scheme (sk_uri : sk_uri :> Uri.t) = Some scheme && - (keys = [] || List.mem name keys) then - decrypt cctxt ~name sk_uri >>=? fun _ -> - return_unit - else - return_unit - end sks + Secret_key.load cctxt + >>=? fun sks -> + password_file_load cctxt + >>=? fun () -> + iter_s + (fun (name, sk_uri) -> + if + Uri.scheme (sk_uri : sk_uri :> Uri.t) = Some scheme + && (keys = [] || List.mem name keys) + then decrypt cctxt ~name sk_uri >>=? fun _ -> return_unit + else return_unit) + sks let rec read_password (cctxt : #Client_context.io) = - cctxt#prompt_password - "Enter password to encrypt your key: " >>=? fun password -> - cctxt#prompt_password - "Confirm password: " >>=? fun confirm -> + cctxt#prompt_password "Enter password to encrypt your key: " + >>=? fun password -> + cctxt#prompt_password "Confirm password: " + >>=? fun confirm -> if not (MBytes.equal password confirm) then - cctxt#message "Passwords do not match." >>= fun () -> - read_password cctxt - else - return password + cctxt#message "Passwords do not match." >>= fun () -> read_password cctxt + else return password let encrypt cctxt sk = - read_password cctxt >>=? fun password -> + read_password cctxt + >>=? fun password -> let payload = Raw.encrypt ~password sk in - let encoding = match sk with - | Ed25519 _ -> Encodings.ed25519 - | Secp256k1 _ -> Encodings.secp256k1 - | P256 _ -> Encodings.p256 in + let encoding = + match sk with + | Ed25519 _ -> + Encodings.ed25519 + | Secp256k1 _ -> + Encodings.secp256k1 + | P256 _ -> + Encodings.p256 + in let path = Base58.simple_encode encoding payload in let sk_uri = Client_keys.make_sk_uri (Uri.make ~scheme ~path ()) in Hashtbl.replace decrypted sk_uri sk ; return sk_uri -module Make(C : sig val cctxt: Client_context.prompter end) = struct - +module Make (C : sig + val cctxt : Client_context.prompter +end) = +struct let scheme = "encrypted" - let title = - "Built-in signer using encrypted keys." + let title = "Built-in signer using encrypted keys." let description = "Valid secret key URIs are of the form\n\ \ - encrypted:<encrypted_key>\n\ - where <encrypted_key> is the encrypted (password protected \ - using Nacl's cryptobox and pbkdf) secret key, formatted in \ - unprefixed Base58.\n\ + where <encrypted_key> is the encrypted (password protected using Nacl's \ + cryptobox and pbkdf) secret key, formatted in unprefixed Base58.\n\ Valid public key URIs are of the form\n\ \ - encrypted:<public_key>\n\ where <public_key> is the public key in Base58." @@ -261,21 +283,21 @@ module Make(C : sig val cctxt: Client_context.prompter end) = struct let public_key_hash = Unencrypted.public_key_hash let neuterize sk_uri = - decrypt C.cctxt sk_uri >>=? fun sk -> + decrypt C.cctxt sk_uri + >>=? fun sk -> return (Unencrypted.make_pk (Signature.Secret_key.to_public_key sk)) let sign ?watermark sk_uri buf = - decrypt C.cctxt sk_uri >>=? fun sk -> - return (Signature.sign ?watermark sk buf) + decrypt C.cctxt sk_uri + >>=? fun sk -> return (Signature.sign ?watermark sk buf) let deterministic_nonce sk_uri buf = - decrypt C.cctxt sk_uri >>=? fun sk -> - return (Signature.deterministic_nonce sk buf) + decrypt C.cctxt sk_uri + >>=? fun sk -> return (Signature.deterministic_nonce sk buf) let deterministic_nonce_hash sk_uri buf = - decrypt C.cctxt sk_uri >>=? fun sk -> - return (Signature.deterministic_nonce_hash sk buf) + decrypt C.cctxt sk_uri + >>=? fun sk -> return (Signature.deterministic_nonce_hash sk buf) let supports_deterministic_nonces _ = return_true - end diff --git a/src/lib_signer_backends/encrypted.mli b/src/lib_signer_backends/encrypted.mli index 09096e71859c35f86cb19290ccad1681591dc64a..83ac8f0971d3839659af82d147343fee962f7c51 100644 --- a/src/lib_signer_backends/encrypted.mli +++ b/src/lib_signer_backends/encrypted.mli @@ -23,19 +23,22 @@ (* *) (*****************************************************************************) -module Make(C : sig val cctxt: Client_context.prompter end) : Client_keys.SIGNER +module Make (C : sig + val cctxt : Client_context.prompter +end) : Client_keys.SIGNER -val decrypt: +val decrypt : #Client_context.prompter -> ?name:string -> - Client_keys.sk_uri -> Signature.secret_key tzresult Lwt.t + Client_keys.sk_uri -> + Signature.secret_key tzresult Lwt.t -val decrypt_all: - #Client_context.io_wallet -> unit tzresult Lwt.t +val decrypt_all : #Client_context.io_wallet -> unit tzresult Lwt.t -val decrypt_list: +val decrypt_list : #Client_context.io_wallet -> string list -> unit tzresult Lwt.t -val encrypt: +val encrypt : #Client_context.io -> - Signature.secret_key -> Client_keys.sk_uri tzresult Lwt.t + Signature.secret_key -> + Client_keys.sk_uri tzresult Lwt.t diff --git a/src/lib_signer_backends/http.ml b/src/lib_signer_backends/http.ml index 51fb58fa18edb0ca994b9da833ca13bd6cdaf0fa..86f6d4aba98f99e060ddd12a2bb592069e991d71 100644 --- a/src/lib_signer_backends/http.ml +++ b/src/lib_signer_backends/http.ml @@ -23,4 +23,6 @@ (* *) (*****************************************************************************) -include Http_gen.Make(struct let scheme = "http" end) +include Http_gen.Make (struct + let scheme = "http" +end) diff --git a/src/lib_signer_backends/http.mli b/src/lib_signer_backends/http.mli index bd7ef10922daeff5d5a6c5ee8ca85afe4638b7a7..662629a7aff7673ebcb8f1ae05ca13d48bc899e4 100644 --- a/src/lib_signer_backends/http.mli +++ b/src/lib_signer_backends/http.mli @@ -23,10 +23,11 @@ (* *) (*****************************************************************************) -module Make(P : sig - val authenticate: Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t - val logger: RPC_client.logger - end) - : Client_keys.SIGNER +module Make (P : sig + val authenticate : + Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t -val make_base: string -> int -> Uri.t + val logger : RPC_client.logger +end) : Client_keys.SIGNER + +val make_base : string -> int -> Uri.t diff --git a/src/lib_signer_backends/http_gen.ml b/src/lib_signer_backends/http_gen.ml index e247f6f278889aa56e636e67372c3637204cc205..565fde33208c891ab90f4a6817812ec3887abf84 100644 --- a/src/lib_signer_backends/http_gen.ml +++ b/src/lib_signer_backends/http_gen.ml @@ -23,150 +23,203 @@ (* *) (*****************************************************************************) -module Make(N : sig val scheme : string end) = struct - +module Make (N : sig + val scheme : string +end) = +struct open Client_keys let scheme = N.scheme - module Make(P : sig - val authenticate: Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t - val logger: RPC_client.logger - end) = struct + module Make (P : sig + val authenticate : + Signature.Public_key_hash.t list -> + MBytes.t -> + Signature.t tzresult Lwt.t + val logger : RPC_client.logger + end) = + struct let scheme = scheme let title = - "Built-in tezos-signer using remote signer through hardcoded " ^ scheme ^ " requests." + "Built-in tezos-signer using remote signer through hardcoded " ^ scheme + ^ " requests." let description = - "Valid locators are of this form:\n" - ^ " - " ^ scheme ^ "://host/tz1...\n" - ^ " - " ^ scheme ^ "://host:port/path/to/service/tz1...\n" - ^ "Environment variable TEZOS_SIGNER_HTTP_HEADERS can be specified \ - to add headers to the requests (only 'host' and custom 'x-...' headers are supported)." - - let headers = match Sys.getenv_opt "TEZOS_SIGNER_HTTP_HEADERS" with - | None -> None + "Valid locators are of this form:\n" ^ " - " ^ scheme + ^ "://host/tz1...\n" ^ " - " ^ scheme + ^ "://host:port/path/to/service/tz1...\n" + ^ "Environment variable TEZOS_SIGNER_HTTP_HEADERS can be specified to \ + add headers to the requests (only 'host' and custom 'x-...' headers \ + are supported)." + + let headers = + match Sys.getenv_opt "TEZOS_SIGNER_HTTP_HEADERS" with + | None -> + None | Some contents -> let lines = String.split_on_char '\n' contents in Some - (List.fold_left (fun acc line -> + (List.fold_left + (fun acc line -> match String.index_opt line ':' with | None -> Pervasives.failwith - "Http signer: invalid TEZOS_SIGNER_HTTP_HEADERS environment variable, missing colon" + "Http signer: invalid TEZOS_SIGNER_HTTP_HEADERS \ + environment variable, missing colon" | Some pos -> let header = String.trim (String.sub line 0 pos) in let header = String.lowercase_ascii header in - if header <> "host" - && (String.length header < 2 - || String.sub header 0 2 <> "x-") then + if + header <> "host" + && ( String.length header < 2 + || String.sub header 0 2 <> "x-" ) + then Pervasives.failwith - "Http signer: invalid TEZOS_SIGNER_HTTP_HEADERS environment variable, \ - only 'host' or 'x-' headers are supported" ; - let value = String.trim (String.sub line (pos + 1) (String.length line - pos - 1)) in - (header, value) :: acc) [] lines) + "Http signer: invalid TEZOS_SIGNER_HTTP_HEADERS \ + environment variable, only 'host' or 'x-' headers \ + are supported" ; + let value = + String.trim + (String.sub + line + (pos + 1) + (String.length line - pos - 1)) + in + (header, value) :: acc) + [] + lines) let parse uri = (* extract `tz1..` from the last component of the path *) assert (Uri.scheme uri = Some scheme) ; let path = Uri.path uri in - begin match String.rindex_opt path '/' with - | None -> - failwith "Invalid locator %a" Uri.pp_hum uri - | Some i -> - let pkh = - try String.sub path (i + 1) (String.length path - i - 1) - with _ -> "" in - let path = String.sub path 0 i in - return (Uri.with_path uri path, pkh) - end >>=? fun (base, pkh) -> - Lwt.return (Signature.Public_key_hash.of_b58check pkh) >>=? fun pkh -> - return (base, pkh) + ( match String.rindex_opt path '/' with + | None -> + failwith "Invalid locator %a" Uri.pp_hum uri + | Some i -> + let pkh = + try String.sub path (i + 1) (String.length path - i - 1) + with _ -> "" + in + let path = String.sub path 0 i in + return (Uri.with_path uri path, pkh) ) + >>=? fun (base, pkh) -> + Lwt.return (Signature.Public_key_hash.of_b58check pkh) + >>=? fun pkh -> return (base, pkh) let public_key ?interactive:_ uri = - parse (uri : pk_uri :> Uri.t) >>=? fun (base, pkh) -> + parse (uri : pk_uri :> Uri.t) + >>=? fun (base, pkh) -> RPC_client.call_service - ~logger: P.logger + ~logger:P.logger ?headers Media_type.all_media_types - ~base Signer_services.public_key ((), pkh) () () + ~base + Signer_services.public_key + ((), pkh) + () + () let neuterize uri = return (Client_keys.make_pk_uri (uri : sk_uri :> Uri.t)) let public_key_hash ?interactive uri = - public_key ?interactive uri >>=? fun pk -> - return (Signature.Public_key.hash pk, Some pk) + public_key ?interactive uri + >>=? fun pk -> return (Signature.Public_key.hash pk, Some pk) let get_signature base pkh msg = RPC_client.call_service - ~logger: P.logger + ~logger:P.logger ?headers Media_type.all_media_types - ~base Signer_services.authorized_keys () () () + ~base + Signer_services.authorized_keys + () + () + () >>=? function | Some authorized_keys -> P.authenticate authorized_keys - (Signer_messages.Sign.Request.to_sign ~pkh ~data:msg) >>=? fun signature -> - return_some signature - | None -> return_none + (Signer_messages.Sign.Request.to_sign ~pkh ~data:msg) + >>=? fun signature -> return_some signature + | None -> + return_none let sign ?watermark uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (base, pkh) -> + parse (uri : sk_uri :> Uri.t) + >>=? fun (base, pkh) -> let msg = match watermark with - | None -> msg + | None -> + msg | Some watermark -> - MBytes.concat "" [ Signature.bytes_of_watermark watermark ; msg ] in - get_signature base pkh msg >>=? fun signature -> + MBytes.concat "" [Signature.bytes_of_watermark watermark; msg] + in + get_signature base pkh msg + >>=? fun signature -> RPC_client.call_service - ~logger: P.logger + ~logger:P.logger ?headers Media_type.all_media_types - ~base Signer_services.sign ((), pkh) + ~base + Signer_services.sign + ((), pkh) signature msg let deterministic_nonce uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (base, pkh) -> - get_signature base pkh msg >>=? fun signature -> + parse (uri : sk_uri :> Uri.t) + >>=? fun (base, pkh) -> + get_signature base pkh msg + >>=? fun signature -> RPC_client.call_service - ~logger: P.logger + ~logger:P.logger ?headers Media_type.all_media_types - ~base Signer_services.deterministic_nonce ((), pkh) + ~base + Signer_services.deterministic_nonce + ((), pkh) signature msg let deterministic_nonce_hash uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (base, pkh) -> - get_signature base pkh msg >>=? fun signature -> + parse (uri : sk_uri :> Uri.t) + >>=? fun (base, pkh) -> + get_signature base pkh msg + >>=? fun signature -> RPC_client.call_service - ~logger: P.logger + ~logger:P.logger ?headers Media_type.all_media_types - ~base Signer_services.deterministic_nonce_hash ((), pkh) + ~base + Signer_services.deterministic_nonce_hash + ((), pkh) signature msg let supports_deterministic_nonces uri = - parse (uri : sk_uri :> Uri.t) >>=? fun (base, pkh) -> + parse (uri : sk_uri :> Uri.t) + >>=? fun (base, pkh) -> RPC_client.call_service - ~logger: P.logger + ~logger:P.logger ?headers Media_type.all_media_types - ~base Signer_services.supports_deterministic_nonces ((), pkh) () () >>= function - | Ok ans -> return ans - | Error ((RPC_context.Not_found _) :: _) -> return_false - | Error _ as res -> Lwt.return res - - + ~base + Signer_services.supports_deterministic_nonces + ((), pkh) + () + () + >>= function + | Ok ans -> + return ans + | Error (RPC_context.Not_found _ :: _) -> + return_false + | Error _ as res -> + Lwt.return res end - let make_base host port = - Uri.make ~scheme ~host ~port () - + let make_base host port = Uri.make ~scheme ~host ~port () end diff --git a/src/lib_signer_backends/http_gen.mli b/src/lib_signer_backends/http_gen.mli index cb6253e4b7ac03eb9d009020b603e2a4d2bacfa2..446853be0535c5e0093b9858ad1ecd58a4b4fb28 100644 --- a/src/lib_signer_backends/http_gen.mli +++ b/src/lib_signer_backends/http_gen.mli @@ -23,14 +23,17 @@ (* *) (*****************************************************************************) -module Make(N : sig val scheme : string end) : sig +module Make (N : sig + val scheme : string +end) : sig + module Make (P : sig + val authenticate : + Signature.Public_key_hash.t list -> + MBytes.t -> + Signature.t tzresult Lwt.t - module Make(P : sig - val authenticate: Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t - val logger: RPC_client.logger - end) - : Client_keys.SIGNER - - val make_base: string -> int -> Uri.t + val logger : RPC_client.logger + end) : Client_keys.SIGNER + val make_base : string -> int -> Uri.t end diff --git a/src/lib_signer_backends/https.ml b/src/lib_signer_backends/https.ml index 0d27a06abfd04fd22b50726f791210c6e001d408..463b3951874b6f0edc1a3cf0b47c23394d92e2e5 100644 --- a/src/lib_signer_backends/https.ml +++ b/src/lib_signer_backends/https.ml @@ -23,4 +23,6 @@ (* *) (*****************************************************************************) -include Http_gen.Make(struct let scheme = "https" end) +include Http_gen.Make (struct + let scheme = "https" +end) diff --git a/src/lib_signer_backends/https.mli b/src/lib_signer_backends/https.mli index bd7ef10922daeff5d5a6c5ee8ca85afe4638b7a7..662629a7aff7673ebcb8f1ae05ca13d48bc899e4 100644 --- a/src/lib_signer_backends/https.mli +++ b/src/lib_signer_backends/https.mli @@ -23,10 +23,11 @@ (* *) (*****************************************************************************) -module Make(P : sig - val authenticate: Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t - val logger: RPC_client.logger - end) - : Client_keys.SIGNER +module Make (P : sig + val authenticate : + Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t -val make_base: string -> int -> Uri.t + val logger : RPC_client.logger +end) : Client_keys.SIGNER + +val make_base : string -> int -> Uri.t diff --git a/src/lib_signer_backends/ledger.ml b/src/lib_signer_backends/ledger.ml index ec09f9432f84909159492e715ea95937a8c5db4f..63d53b3030eabd00b2fe9135f196cdd5d4caf88f 100644 --- a/src/lib_signer_backends/ledger.ml +++ b/src/lib_signer_backends/ledger.ml @@ -25,53 +25,60 @@ open Client_keys -include Internal_event.Legacy_logging.Make(struct - let name = "client.signer.ledger" - end) +include Internal_event.Legacy_logging.Make (struct + let name = "client.signer.ledger" +end) module Bip32_path = struct let hard = Int32.logor 0x8000_0000l + let unhard = Int32.logand 0x7fff_ffffl + let is_hard n = Int32.logand 0x8000_0000l n <> 0l - let tezos_root = [hard 44l ; hard 1729l] + + let tezos_root = [hard 44l; hard 1729l] let node_of_string str = match Int32.of_string_opt str with - | Some node -> Some node - | None -> - match Int32.of_string_opt String.(sub str 0 ((length str) - 1)) with - | None -> None - | Some node -> Some (hard node) + | Some node -> + Some node + | None -> ( + match Int32.of_string_opt String.(sub str 0 (length str - 1)) with + | None -> + None + | Some node -> + Some (hard node) ) let node_of_string_exn str = match node_of_string str with | None -> invalid_arg (Printf.sprintf "node_of_string_exn: got %S" str) - | Some str -> str + | Some str -> + str let pp_node ppf node = match is_hard node with - | true -> Fmt.pf ppf "%ld'" (unhard node) - | false -> Fmt.pf ppf "%ld" node + | true -> + Fmt.pf ppf "%ld'" (unhard node) + | false -> + Fmt.pf ppf "%ld" node let string_of_node = Fmt.to_to_string pp_node let path_of_string_exn s = match String.split_on_char '/' s with - | [""] -> [] + | [""] -> + [] | nodes -> List.map node_of_string_exn nodes - let path_of_string s = - try Some (path_of_string_exn s) with _ -> None + let path_of_string s = try Some (path_of_string_exn s) with _ -> None - let pp_path = - Fmt.(list ~sep:(const char '/') pp_node) + let pp_path = Fmt.(list ~sep:(const char '/') pp_node) let string_of_path = Fmt.to_to_string pp_path end - type error += | LedgerError of Ledgerwallet.Transport.error | Ledger_deterministic_nonce_not_implemented @@ -80,17 +87,17 @@ let error_encoding = let open Data_encoding in conv (fun e -> Format.asprintf "%a" Ledgerwallet.Transport.pp_error e) - (fun _ ->invalid_arg "Ledger error is not deserializable") + (fun _ -> invalid_arg "Ledger error is not deserializable") (obj1 (req "ledger-error" string)) let () = register_error_kind `Permanent - ~id: "signer.ledger" - ~title: "Ledger error" - ~description: "Error when communication to a Ledger Nano S device" + ~id:"signer.ledger" + ~title:"Ledger error" + ~description:"Error when communication to a Ledger Nano S device" ~pp:(fun ppf e -> - Format.fprintf ppf "Ledger %a" Ledgerwallet.Transport.pp_error e) + Format.fprintf ppf "Ledger %a" Ledgerwallet.Transport.pp_error e) error_encoding (function LedgerError e -> Some e | _ -> None) (fun e -> LedgerError e) @@ -98,33 +105,36 @@ let () = let () = register_error_kind `Permanent - ~id: "signer.ledger.deterministic_nonce_not_implemented" - ~title: "Ledger deterministic_nonce(_hash) not implemented" - ~description: "The deterministic_nonce(_hash) functionality \ - is not implemented by the ledger" + ~id:"signer.ledger.deterministic_nonce_not_implemented" + ~title:"Ledger deterministic_nonce(_hash) not implemented" + ~description: + "The deterministic_nonce(_hash) functionality is not implemented by the \ + ledger" ~pp:(fun ppf () -> - Format.fprintf ppf "Asked the ledger to generate a deterministic nonce (hash), \ - but this functionality is not yet implemented") + Format.fprintf + ppf + "Asked the ledger to generate a deterministic nonce (hash), but this \ + functionality is not yet implemented") Data_encoding.unit - (function Ledger_deterministic_nonce_not_implemented -> Some () | _ -> None) + (function + | Ledger_deterministic_nonce_not_implemented -> Some () | _ -> None) (fun () -> Ledger_deterministic_nonce_not_implemented) (** Wrappers around Ledger APDUs. *) module Ledger_commands = struct - let wrap_ledger_cmd f = let buf = Buffer.create 100 in let pp = Format.make_formatter (fun s ofs lgth -> Buffer.add_substring buf s ofs lgth) - (fun () -> debug "%s%!" (Buffer.contents buf) ; Buffer.clear buf) in + (fun () -> + debug "%s%!" (Buffer.contents buf) ; + Buffer.clear buf) + in let res = f pp in - lwt_debug "%!" >>= fun () -> - match res with - | Error err -> - fail (LedgerError err) - | Ok v -> - return v + lwt_debug "%!" + >>= fun () -> + match res with Error err -> fail (LedgerError err) | Ok v -> return v let get_version ~device_info h = let buf = Buffer.create 100 in @@ -133,55 +143,65 @@ module Ledger_commands = struct debug "%s" (Buffer.contents buf) ; match version with | Error e -> - warn "WARNING:@ The device at [%s] is not a Tezos application@ \ - %a" + warn + "WARNING:@ The device at [%s] is not a Tezos application@ %a" device_info.Hidapi.path - Ledgerwallet.Transport.pp_error e ; + Ledgerwallet.Transport.pp_error + e ; return_none | Ok version -> - (if (version.major, version.minor) < (1, 4) - then - failwith - "Version %a of the ledger apps is not supported by this client" - Ledgerwallet_tezos.Version.pp version - else return_unit) + ( if (version.major, version.minor) < (1, 4) then + failwith + "Version %a of the ledger apps is not supported by this client" + Ledgerwallet_tezos.Version.pp + version + else return_unit ) >>=? fun () -> - wrap_ledger_cmd (fun pp -> - Ledgerwallet_tezos.get_git_commit ~pp h) >>=? fun git_commit -> - log_info "Found a %a application at [%s] (git-description: %S)" - Ledgerwallet_tezos.Version.pp version device_info.path git_commit ; + wrap_ledger_cmd (fun pp -> Ledgerwallet_tezos.get_git_commit ~pp h) + >>=? fun git_commit -> + log_info + "Found a %a application at [%s] (git-description: %S)" + Ledgerwallet_tezos.Version.pp + version + device_info.path + git_commit ; let cleaned_up = (* The ledger sends a NUL-terminated C-String: *) - if git_commit.[String.length git_commit - 1] = '\x00' - then String.sub git_commit 0 (String.length git_commit - 1) - else git_commit in + if git_commit.[String.length git_commit - 1] = '\x00' then + String.sub git_commit 0 (String.length git_commit - 1) + else git_commit + in return_some (version, cleaned_up) let secp256k1_ctx = Libsecp256k1.External.Context.create ~sign:false ~verify:false () - let public_key_returning_instruction which - ?(prompt=false) - hidapi curve path = + let public_key_returning_instruction which ?(prompt = false) hidapi curve + path = let path = Bip32_path.tezos_root @ path in - begin match which with - | `Get_public_key -> wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.get_public_key ~prompt ~pp hidapi curve path - end - | `Authorize_baking -> - wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.authorize_baking ~pp hidapi curve path - end - | `Setup (main_chain_id, main_hwm, test_hwm) -> - wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.setup_baking ~pp hidapi curve path - ~main_chain_id ~main_hwm ~test_hwm - end - end >>|? fun pk -> + ( match which with + | `Get_public_key -> + wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.get_public_key ~prompt ~pp hidapi curve path) + | `Authorize_baking -> + wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.authorize_baking ~pp hidapi curve path) + | `Setup (main_chain_id, main_hwm, test_hwm) -> + wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.setup_baking + ~pp + hidapi + curve + path + ~main_chain_id + ~main_hwm + ~test_hwm) ) + >>|? fun pk -> let pk = Cstruct.to_bigarray pk in match curve with | Ledgerwallet_tezos.Ed25519 -> - MBytes.set_int8 pk 0 0 ; (* hackish, but works. *) + MBytes.set_int8 pk 0 0 ; + (* hackish, but works. *) Data_encoding.Binary.of_bytes_exn Signature.Public_key.encoding pk | Secp256k1 -> let open Libsecp256k1.External in @@ -190,73 +210,82 @@ module Ledger_commands = struct MBytes.set_int8 buf 0 1 ; let _nb_written = Key.write secp256k1_ctx ~pos:1 buf pk in Data_encoding.Binary.of_bytes_exn Signature.Public_key.encoding buf - | Secp256r1 -> + | Secp256r1 -> ( let open Uecc in let pklen = compressed_size secp256r1 in let buf = MBytes.create (pklen + 1) in match pk_of_bytes secp256r1 pk with | None -> - Pervasives.failwith "Impossible to read P256 public key from Ledger" + Pervasives.failwith + "Impossible to read P256 public key from Ledger" | Some pk -> MBytes.set_int8 buf 0 2 ; - let _nb_written = write_key ~compress:true (MBytes.sub buf 1 pklen) pk in + let _nb_written = + write_key ~compress:true (MBytes.sub buf 1 pklen) pk + in Data_encoding.Binary.of_bytes_exn Signature.Public_key.encoding buf + ) let get_public_key = public_key_returning_instruction `Get_public_key let pkh_of_pk = Signature.Public_key.hash - let public_key - ?(interactive : Client_context.io_wallet option) hid curve path = - begin match interactive with - | Some cctxt -> - get_public_key ~prompt:false hid curve path >>=? fun pk -> - let pkh = pkh_of_pk pk in - cctxt#message - "Please validate@ (and write down)@ the public key hash\ - @ displayed@ on the Ledger,@ it should be equal@ to `%a`:" - Signature.Public_key_hash.pp pkh >>= fun () -> - get_public_key ~prompt:true hid curve path - | None -> - get_public_key ~prompt:false hid curve path - end + let public_key ?(interactive : Client_context.io_wallet option) hid curve + path = + match interactive with + | Some cctxt -> + get_public_key ~prompt:false hid curve path + >>=? fun pk -> + let pkh = pkh_of_pk pk in + cctxt#message + "Please validate@ (and write down)@ the public key hash@ displayed@ \ + on the Ledger,@ it should be equal@ to `%a`:" + Signature.Public_key_hash.pp + pkh + >>= fun () -> get_public_key ~prompt:true hid curve path + | None -> + get_public_key ~prompt:false hid curve path let public_key_hash ?interactive hid curve path = - public_key ?interactive hid curve path >>=? fun pk -> - return (pkh_of_pk pk, pk) + public_key ?interactive hid curve path + >>=? fun pk -> return (pkh_of_pk pk, pk) - let get_authorized_path hid version = + let get_authorized_path hid version = let open Ledgerwallet_tezos.Version in if version.major < 2 then wrap_ledger_cmd (fun pp -> Ledgerwallet_tezos.get_authorized_key ~pp hid) >>|? fun path -> `Legacy_path path else - wrap_ledger_cmd - (fun pp -> Ledgerwallet_tezos.get_authorized_path_and_curve ~pp hid) + wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.get_authorized_path_and_curve ~pp hid) >>= function - | Error (LedgerError - (AppError - {status = Ledgerwallet.Transport.Status.Referenced_data_not_found; _}) :: _) -> + | Error + (LedgerError + (AppError + { status = + Ledgerwallet.Transport.Status.Referenced_data_not_found; + _ }) + :: _) -> return `No_baking_authorized - | Error _ as e -> Lwt.return e - | Ok (path, curve) -> return (`Path_curve (path, curve)) + | Error _ as e -> + Lwt.return e + | Ok (path, curve) -> + return (`Path_curve (path, curve)) let sign ?watermark hid curve path base_msg = let msg = - Option.unopt_map watermark - ~default:base_msg ~f:(fun watermark -> - MBytes.concat "" - [Signature.bytes_of_watermark watermark ; base_msg]) in + Option.unopt_map watermark ~default:base_msg ~f:(fun watermark -> + MBytes.concat "" [Signature.bytes_of_watermark watermark; base_msg]) + in let path = Bip32_path.tezos_root @ path in - wrap_ledger_cmd begin fun pp -> - (* if msg_len > 1024 && (major, minor, patch) < (1, 1, 0) then - * Ledgerwallet_tezos.sign ~hash_on_ledger:false - * ~pp ledger curve path - * (Cstruct.of_bigarray (Blake2B.(to_bytes (hash_bytes [ msg ])))) - * else *) - Ledgerwallet_tezos.sign - ~pp hid curve path (Cstruct.of_bigarray msg) - end >>=? fun signature -> + wrap_ledger_cmd (fun pp -> + (* if msg_len > 1024 && (major, minor, patch) < (1, 1, 0) then + * Ledgerwallet_tezos.sign ~hash_on_ledger:false + * ~pp ledger curve path + * (Cstruct.of_bigarray (Blake2B.(to_bytes (hash_bytes [ msg ])))) + * else *) + Ledgerwallet_tezos.sign ~pp hid curve path (Cstruct.of_bigarray msg)) + >>=? fun signature -> match curve with | Ed25519 -> let signature = Cstruct.to_bigarray signature in @@ -293,25 +322,24 @@ module Ledger_id = struct type t = Animals of Ledger_names.t | Pkh of Signature.public_key_hash let animals_of_pkh pkh = - pkh |> Signature.Public_key_hash.to_string |> - Ledger_names.crouching_tiger + pkh |> Signature.Public_key_hash.to_string |> Ledger_names.crouching_tiger let curve = Ledgerwallet_tezos.Ed25519 let get hidapi = - Ledger_commands.get_public_key hidapi curve [] >>=? fun pk -> + Ledger_commands.get_public_key hidapi curve [] + >>=? fun pk -> let pkh = Signature.Public_key.hash pk in let animals = animals_of_pkh pkh in return (Animals animals) let pp ppf = function - | Animals a -> Ledger_names.pp ppf a - | Pkh pkh -> Signature.Public_key_hash.pp ppf pkh + | Animals a -> + Ledger_names.pp ppf a + | Pkh pkh -> + Signature.Public_key_hash.pp ppf pkh - let to_animals = - function - | Animals a -> a - | Pkh pkh -> animals_of_pkh pkh + let to_animals = function Animals a -> a | Pkh pkh -> animals_of_pkh pkh let equal a b = to_animals a = to_animals b end @@ -320,157 +348,175 @@ end [ledger + curve + derivation-path]. *) module Ledger_account = struct type t = { - ledger : Ledger_id.t ; - curve : Ledgerwallet_tezos.curve ; - path: int32 list + ledger : Ledger_id.t; + curve : Ledgerwallet_tezos.curve; + path : int32 list } end (** {!Leder_uri.t} represents a parsed ["ledger://..."] URI which may refer to a {!Ledger_id.t} or a full blown {!Ledger_account.t}. *) module Ledger_uri = struct - - type t = [ `Ledger of Ledger_id.t | `Ledger_account of Ledger_account.t ] + type t = [`Ledger of Ledger_id.t | `Ledger_account of Ledger_account.t] let int32_of_path_element_exn ?(allow_weak = false) x = let failf ppf = Printf.ksprintf Pervasives.failwith ppf in let len = String.length x in - match String.get x (len - 1) with - | exception _ -> failf "Empty path element" - | '\'' -> + match x.[len - 1] with + | exception _ -> + failf "Empty path element" + | '\'' -> ( let intpart = String.sub x 0 (len - 1) in - begin match Int32.of_string_opt intpart with - | Some i -> Bip32_path.hard i - | None -> failf "Path is not an integer: %S" intpart - end - | _ when allow_weak -> - begin match Int32.of_string_opt x with - | Some i -> i - | None -> failf "Path is not a non-hardened integer: %S" x - end + match Int32.of_string_opt intpart with + | Some i -> + Bip32_path.hard i + | None -> + failf "Path is not an integer: %S" intpart ) + | _ when allow_weak -> ( + match Int32.of_string_opt x with + | Some i -> + i + | None -> + failf "Path is not a non-hardened integer: %S" x ) | _ -> (* Future derivation schemes will support weak paths, not for now. *) failf "Non-hardened paths are not allowed (%S)" x let parse_animals animals = match String.split '-' animals with - | [c; t; h; d] -> Some { Ledger_names.c ; t ; h ; d } - | _ -> None + | [c; t; h; d] -> + Some {Ledger_names.c; t; h; d} + | _ -> + None let parse ?allow_weak uri : t tzresult Lwt.t = let host = Uri.host uri in - begin match Option.apply host - ~f:Signature.Public_key_hash.of_b58check_opt with - | Some pkh -> return (Ledger_id.Pkh pkh) - | None -> - (match Option.apply host ~f:parse_animals with - | Some animals -> return (Ledger_id.Animals animals) - | None -> - failwith "Cannot parse host of URI: %s" (Uri.to_string uri)) - end >>=? fun ledger -> + ( match Option.apply host ~f:Signature.Public_key_hash.of_b58check_opt with + | Some pkh -> + return (Ledger_id.Pkh pkh) + | None -> ( + match Option.apply host ~f:parse_animals with + | Some animals -> + return (Ledger_id.Animals animals) + | None -> + failwith "Cannot parse host of URI: %s" (Uri.to_string uri) ) ) + >>=? fun ledger -> let components = String.split '/' (Uri.path uri) in - begin match components with - | s :: tl -> - let curve, more_path = - match Ledgerwallet_tezos.curve_of_string s with - | Some curve -> curve, tl - | None -> Ledger_id.curve, s :: tl in - begin - try return (List.map - (int32_of_path_element_exn ?allow_weak) - more_path) - with Failure s -> - failwith "Failed to parse Curve/BIP32 path from %s (%s): %s" - (Uri.path uri) (Uri.to_string uri) s - end - >>=? fun bip32 -> - return (`Ledger_account Ledger_account.{ledger; curve; path = bip32}) - | [] -> - return (`Ledger ledger) - end + match components with + | s :: tl -> + let (curve, more_path) = + match Ledgerwallet_tezos.curve_of_string s with + | Some curve -> + (curve, tl) + | None -> + (Ledger_id.curve, s :: tl) + in + ( try + return (List.map (int32_of_path_element_exn ?allow_weak) more_path) + with Failure s -> + failwith + "Failed to parse Curve/BIP32 path from %s (%s): %s" + (Uri.path uri) + (Uri.to_string uri) + s ) + >>=? fun bip32 -> + return (`Ledger_account Ledger_account.{ledger; curve; path = bip32}) + | [] -> + return (`Ledger ledger) let ledger_uri_or_alias_param next = let name = "account-alias-or-ledger-uri" in let desc = - "An imported ledger alias or a ledger URI \ - (e.g. \"ledger://animal/curve/path\")." in + "An imported ledger alias or a ledger URI (e.g. \ + \"ledger://animal/curve/path\")." + in let open Clic in - param ~name ~desc - (parameter - (fun cctxt str -> - Public_key.find_opt cctxt str >>=? begin function - | Some ((x : pk_uri), _) -> return (x :> Uri.t) - | None -> - try return (Uri.of_string str) - with e -> - failwith "Error while parsing URI: %s" - (Printexc.to_string e) - end - >>=? fun uri -> - parse uri)) + param + ~name + ~desc + (parameter (fun cctxt str -> + Public_key.find_opt cctxt str + >>=? (function + | Some ((x : pk_uri), _) -> + return (x :> Uri.t) + | None -> ( + try return (Uri.of_string str) + with e -> + failwith + "Error while parsing URI: %s" + (Printexc.to_string e) )) + >>=? fun uri -> parse uri)) next - let pp: _ -> t -> unit = fun ppf -> Format.(function - | `Ledger lid -> fprintf ppf "ledger://%a" Ledger_id.pp lid - | `Ledger_account {Ledger_account.ledger ; curve ; path } -> - fprintf ppf "ledger://%a/%a/%a" - Ledger_id.pp ledger - Ledgerwallet_tezos.pp_curve curve - Bip32_path.pp_path path) + let pp : _ -> t -> unit = + fun ppf -> + Format.( + function + | `Ledger lid -> + fprintf ppf "ledger://%a" Ledger_id.pp lid + | `Ledger_account {Ledger_account.ledger; curve; path} -> + fprintf + ppf + "ledger://%a/%a/%a" + Ledger_id.pp + ledger + Ledgerwallet_tezos.pp_curve + curve + Bip32_path.pp_path + path) let if_matches (meta_uri : t) ledger_id cont = match meta_uri with | `Ledger l -> if Ledger_id.equal l ledger_id then cont () else return_none - | `Ledger_account { Ledger_account. ledger ; _ } -> + | `Ledger_account {Ledger_account.ledger; _} -> if Ledger_id.equal ledger ledger_id then cont () else return_none - let full_account (ledger_uri : t) = - begin match ledger_uri with - | `Ledger_account acc -> return acc - | `Ledger ledger_id -> - failwith - "Insufficient information: \ - you need to provide a curve & BIP32 path (%a)." - Ledger_id.pp ledger_id - end - + match ledger_uri with + | `Ledger_account acc -> + return acc + | `Ledger ledger_id -> + failwith + "Insufficient information: you need to provide a curve & BIP32 path \ + (%a)." + Ledger_id.pp + ledger_id end (** Filters allow early dismissal of HID devices/ledgers which searching for a ledger. *) module Filter = struct - type version_filter = - Ledgerwallet_tezos.Version.t * string -> bool - type t = [ - | `None - | `Hid_path of string - | `Version of string * version_filter - ] + type version_filter = Ledgerwallet_tezos.Version.t * string -> bool + + type t = [`None | `Hid_path of string | `Version of string * version_filter] let version_matches (t : t) version_commit = - match t with - | `Version (_, f) -> f version_commit - | _ -> true + match t with `Version (_, f) -> f version_commit | _ -> true let is_app : _ -> _ -> t = - fun msg app -> - `Version (msg, fun ({Ledgerwallet_tezos.Version.app_class ; _} ,_) -> - app = app_class) + fun msg app -> + `Version + ( msg, + fun ({Ledgerwallet_tezos.Version.app_class; _}, _) -> app = app_class + ) let is_baking = is_app "App = Baking" Ledgerwallet_tezos.Version.TezBake let pp ppf (f : t) = let open Format in match f with - | `None -> fprintf ppf "None" - | `Hid_path s -> fprintf ppf "HID-path: %s" s - | `Version (s, _) -> fprintf ppf "%s" s + | `None -> + fprintf ppf "None" + | `Hid_path s -> + fprintf ppf "HID-path: %s" s + | `Version (s, _) -> + fprintf ppf "%s" s end (* Those are always valid on Ledger Nano S with latest firmware. *) let vendor_id = 0x2c97 + let product_id = 0x0001 let use_ledger ?(filter : Filter.t = `None) f = @@ -486,86 +532,83 @@ let use_ledger ?(filter : Filter.t = `None) f = https://github.com/LedgerHQ/ledgerjs/commit/333ade0d55dc9c59bcc4b451cf7c976e78629681). *) if - (device_info.Hidapi.interface_number = 0) - || - (device_info.Hidapi.interface_number = -1 - && device_info.Hidapi.usage_page = 0xffa0) + device_info.Hidapi.interface_number = 0 + || device_info.Hidapi.interface_number = -1 + && device_info.Hidapi.usage_page = 0xffa0 then - begin match filter with - | `Hid_path hp when device_info.path <> hp -> return_none - | _ -> - begin match Hidapi.(open_path device_info.path) with - | None -> return_none - | Some h -> - Lwt.finalize - (fun () -> - Ledger_commands.get_version ~device_info h - >>=? function - | Some version_git when - (Filter.version_matches filter version_git) -> - Ledger_id.get h >>=? fun ledger_id -> - f h version_git device_info ledger_id - | None | Some _ -> return_none) - (fun () -> Hidapi.close h ; Lwt.return_unit) - end - end - else - return_none + match filter with + | `Hid_path hp when device_info.path <> hp -> + return_none + | _ -> ( + match Hidapi.(open_path device_info.path) with + | None -> + return_none + | Some h -> + Lwt.finalize + (fun () -> + Ledger_commands.get_version ~device_info h + >>=? function + | Some version_git + when Filter.version_matches filter version_git -> + Ledger_id.get h + >>=? fun ledger_id -> f h version_git device_info ledger_id + | None | Some _ -> + return_none) + (fun () -> Hidapi.close h ; Lwt.return_unit) ) + else return_none in - let rec go = - function - | [] -> return_none - | h :: t -> + let rec go = function + | [] -> + return_none + | h :: t -> ( process_device h f - >>=? function - | Some x -> return_some x - | None -> go t + >>=? function Some x -> return_some x | None -> go t ) in go ledgers let use_ledger_or_fail ~ledger_uri ?filter ?msg f = - use_ledger ?filter - (fun hidapi (version, git_commit) device_info ledger_id -> - Ledger_uri.if_matches ledger_uri ledger_id (fun () -> - f hidapi (version, git_commit) device_info ledger_id)) + use_ledger ?filter (fun hidapi (version, git_commit) device_info ledger_id -> + Ledger_uri.if_matches ledger_uri ledger_id (fun () -> + f hidapi (version, git_commit) device_info ledger_id)) >>=? function - | Some o -> return o + | Some o -> + return o | None -> - failwith "%sFound no ledger corresponding to %a%t." + failwith + "%sFound no ledger corresponding to %a%t." (Option.unopt_map ~default:"" ~f:(Printf.sprintf "%s: ") msg) - Ledger_uri.pp ledger_uri + Ledger_uri.pp + ledger_uri (fun ppf -> - match filter with - | Some f -> Format.fprintf ppf " with filter \"%a\"" Filter.pp f - | None -> ()) - + match filter with + | Some f -> + Format.fprintf ppf " with filter \"%a\"" Filter.pp f + | None -> + ()) (** A global {!Hashtbl.t} which allows us to avoid calling {!Signer_implementation.get_public_key} too often. *) module Global_cache : sig val record : - pk_uri -> - pk:Signature.public_key -> pkh:Signature.public_key_hash -> unit - val get : - pk_uri -> (Signature.public_key_hash * Signature.public_key) option + pk_uri -> pk:Signature.public_key -> pkh:Signature.public_key_hash -> unit + + val get : pk_uri -> (Signature.public_key_hash * Signature.public_key) option end = struct let _cache : - (pk_uri, Signature.Public_key_hash.t * Signature.Public_key.t) Hashtbl.t = + (pk_uri, Signature.Public_key_hash.t * Signature.Public_key.t) Hashtbl.t + = Hashtbl.create 13 - let record pk_uri ~pk ~pkh = - Hashtbl.replace _cache pk_uri (pkh, pk) + let record pk_uri ~pk ~pkh = Hashtbl.replace _cache pk_uri (pkh, pk) - let get pk_uri = - Hashtbl.find_opt _cache pk_uri + let get pk_uri = Hashtbl.find_opt _cache pk_uri end (** The implementation of the “signer-plugin.” *) module Signer_implementation : Client_keys.SIGNER = struct let scheme = "ledger" - let title = - "Built-in signer using a Ledger Nano S." + let title = "Built-in signer using a Ledger Nano S." let description = Printf.sprintf @@ -574,85 +617,100 @@ module Signer_implementation : Client_keys.SIGNER = struct where:\n\ \ - <animals> is the identifier of the ledger of the form \ 'crouching-tiger-hidden-dragon' and can be obtained with the command \ - `tezos-client list connected ledgers` (which also provides full examples).\n\ + `tezos-client list connected ledgers` (which also provides full \ + examples).\n\ - <curve> is the signing curve, e.g. `ed1551`\n\ - - <path> is a BIP32 path anchored at \ - m/%s. The ledger does not yet support non-hardened paths, so each \ - node of the path must be hardened." + - <path> is a BIP32 path anchored at m/%s. The ledger does not yet \ + support non-hardened paths, so each node of the path must be hardened." Bip32_path.(string_of_path tezos_root) let neuterize (sk : sk_uri) = return (make_pk_uri (sk :> Uri.t)) let pkh_of_pk = Signature.Public_key.hash - let public_key - ?(interactive : Client_context.io_wallet option) (pk_uri : pk_uri) = + let public_key ?(interactive : Client_context.io_wallet option) + (pk_uri : pk_uri) = match Global_cache.get pk_uri with - | Some (_, pk) -> return pk - | None -> - begin - Ledger_uri.parse (pk_uri :> Uri.t) >>=? fun ledger_uri -> - Ledger_uri.full_account ledger_uri >>=? fun { curve ; path ; _ } -> - use_ledger_or_fail ~ledger_uri - (fun hidapi (_version, _git_commit) _device_info _ledger_id -> - Ledger_commands.public_key ?interactive hidapi curve path - >>=? fun pk -> - let pkh = pkh_of_pk pk in - Global_cache.record pk_uri ~pkh ~pk ; - return_some pk) - end >>= function - | Error err -> failwith "%a" pp_print_error err - | Ok v -> return v + | Some (_, pk) -> + return pk + | None -> ( + Ledger_uri.parse (pk_uri :> Uri.t) + >>=? (fun ledger_uri -> + Ledger_uri.full_account ledger_uri + >>=? fun {curve; path; _} -> + use_ledger_or_fail + ~ledger_uri + (fun hidapi (_version, _git_commit) _device_info _ledger_id -> + Ledger_commands.public_key ?interactive hidapi curve path + >>=? fun pk -> + let pkh = pkh_of_pk pk in + Global_cache.record pk_uri ~pkh ~pk ; + return_some pk)) + >>= function + | Error err -> failwith "%a" pp_print_error err | Ok v -> return v ) let public_key_hash ?interactive pk_uri = match Global_cache.get pk_uri with - | Some (pkh, pk) -> return (pkh, Some pk) + | Some (pkh, pk) -> + return (pkh, Some pk) | None -> - public_key ?interactive pk_uri >>=? fun pk -> - return (pkh_of_pk pk, Some pk) + public_key ?interactive pk_uri + >>=? fun pk -> return (pkh_of_pk pk, Some pk) let sign ?watermark (sk_uri : sk_uri) msg = - Ledger_uri.parse (sk_uri :> Uri.t) >>=? fun ledger_uri -> - Ledger_uri.full_account ledger_uri >>=? fun { curve ; path ; _ } -> - use_ledger_or_fail ~ledger_uri + Ledger_uri.parse (sk_uri :> Uri.t) + >>=? fun ledger_uri -> + Ledger_uri.full_account ledger_uri + >>=? fun {curve; path; _} -> + use_ledger_or_fail + ~ledger_uri (fun hidapi (_version, _git_commit) _device_info _ledger_id -> - Ledger_commands.sign ?watermark hidapi curve path msg - >>=? fun bytes -> - return_some bytes) + Ledger_commands.sign ?watermark hidapi curve path msg + >>=? fun bytes -> return_some bytes) let deterministic_nonce _ _ = fail Ledger_deterministic_nonce_not_implemented - let deterministic_nonce_hash _ _ = fail Ledger_deterministic_nonce_not_implemented + + let deterministic_nonce_hash _ _ = + fail Ledger_deterministic_nonce_not_implemented + let supports_deterministic_nonces _ = return_false end (* The Ledger uses a special value 0x00000000 for the “any” chain-id: *) let pp_ledger_chain_id fmt s = match s with - | "\x00\x00\x00\x00" -> Format.fprintf fmt "'Unspecified'" - | other -> Format.fprintf fmt "%a" Chain_id.pp (Chain_id.of_string_exn other) + | "\x00\x00\x00\x00" -> + Format.fprintf fmt "'Unspecified'" + | other -> + Format.fprintf fmt "%a" Chain_id.pp (Chain_id.of_string_exn other) (** Commands for both ledger applications. *) -let generic_commands group = Clic.[ - command ~group - ~desc: "List supported Ledger Nano S devices connected." - no_options - (fixed [ "list" ; "connected" ; "ledgers" ]) - (fun () (cctxt : Client_context.full) -> - use_ledger - (fun _hidapi (version, git_commit) device_info ledger_id -> +let generic_commands group = + Clic. + [ command + ~group + ~desc:"List supported Ledger Nano S devices connected." + no_options + (fixed ["list"; "connected"; "ledgers"]) + (fun () (cctxt : Client_context.full) -> + use_ledger + (fun _hidapi (version, git_commit) device_info ledger_id -> let open Hidapi in - cctxt#message "%t" - Format.(fun ppf -> + cctxt#message + "%t" + Format.( + fun ppf -> let intro = asprintf - "Found a %a (git-description: %S) application \ - running on %s %s at [%s]." - Ledgerwallet_tezos.Version.pp version + "Found a %a (git-description: %S) application running \ + on %s %s at [%s]." + Ledgerwallet_tezos.Version.pp + version git_commit - (device_info.manufacturer_string - |> Option.unopt ~default:"NO-MANUFACTURER") - (device_info.product_string - |> Option.unopt ~default:"NO-PRODUCT") + ( device_info.manufacturer_string + |> Option.unopt ~default:"NO-MANUFACTURER" ) + ( device_info.product_string + |> Option.unopt ~default:"NO-PRODUCT" ) device_info.path in pp_open_vbox ppf 0 ; @@ -663,314 +721,382 @@ let generic_commands group = Clic.[ pp_print_cut ppf () ; pp_print_cut ppf () ; pp_open_hovbox ppf 0 ; - pp_print_text ppf - "To use keys at BIP32 path m/44'/1729'/0'/0' \ - (default Tezos key path), use one of:" ; + pp_print_text + ppf + "To use keys at BIP32 path m/44'/1729'/0'/0' (default \ + Tezos key path), use one of:" ; pp_close_box ppf () ; pp_print_cut ppf () ; - List.iter (fun curve -> - fprintf ppf - " tezos-client import secret key \ - ledger_%s \"ledger://%a/%a/0'/0'\"" - (Sys.getenv_opt "USER" |> Option.unopt ~default:"user") - Ledger_id.pp ledger_id - Ledgerwallet_tezos.pp_curve curve ; + List.iter + (fun curve -> + fprintf + ppf + " tezos-client import secret key ledger_%s \ + \"ledger://%a/%a/0'/0'\"" + ( Sys.getenv_opt "USER" + |> Option.unopt ~default:"user" ) + Ledger_id.pp + ledger_id + Ledgerwallet_tezos.pp_curve + curve ; pp_print_cut ppf ()) - [ Ed25519 ; Secp256k1 ; Secp256r1 ] ; + [Ed25519; Secp256k1; Secp256r1] ; pp_close_box ppf () ; - pp_print_newline ppf () ) - >>= fun () -> - return_none) - >>=? fun _ -> - return_unit) ; - Clic.command ~group - ~desc: "Display version/public-key/address information for a Ledger URI" - (args1 (switch ~doc:"Test signing operation" ~long:"test-sign" ())) - (prefixes [ "show" ; "ledger" ] - @@ Ledger_uri.ledger_uri_or_alias_param - @@ stop) - (fun test_sign ledger_uri (cctxt : Client_context.full) -> - use_ledger_or_fail - ~ledger_uri - (fun hidapi (version, git_commit) device_info _ledger_id -> - cctxt#message "Found ledger corresponding to %a:" - Ledger_uri.pp ledger_uri + pp_print_newline ppf ()) + >>= fun () -> return_none) + >>=? fun _ -> return_unit); + Clic.command + ~group + ~desc:"Display version/public-key/address information for a Ledger URI" + (args1 (switch ~doc:"Test signing operation" ~long:"test-sign" ())) + ( prefixes ["show"; "ledger"] + @@ Ledger_uri.ledger_uri_or_alias_param @@ stop ) + (fun test_sign ledger_uri (cctxt : Client_context.full) -> + use_ledger_or_fail + ~ledger_uri + (fun hidapi (version, git_commit) device_info _ledger_id -> + cctxt#message + "Found ledger corresponding to %a:" + Ledger_uri.pp + ledger_uri >>= fun () -> - cctxt#message "* Manufacturer: %s" + cctxt#message + "* Manufacturer: %s" (Option.unopt device_info.manufacturer_string ~default:"NONE") >>= fun () -> - cctxt#message "* Product: %s" + cctxt#message + "* Product: %s" (Option.unopt device_info.product_string ~default:"NONE") >>= fun () -> - cctxt#message "* Application: %a (git-description: %S)" - Ledgerwallet_tezos.Version.pp version git_commit + cctxt#message + "* Application: %a (git-description: %S)" + Ledgerwallet_tezos.Version.pp + version + git_commit >>= fun () -> - begin match ledger_uri with - | `Ledger_account { curve ; path ; _ } -> - cctxt#message "* Curve: `%a`" - Ledgerwallet_tezos.pp_curve curve - >>= fun () -> - let full_path = Bip32_path.tezos_root @ path in - cctxt#message "* Path: `%s` [%s]" - (Bip32_path.string_of_path full_path) - (String.concat "; " - (List.map (Printf.sprintf "0x%lX") full_path)) - >>= fun () -> - Ledger_commands.public_key_hash hidapi curve path - >>=? fun (pkh, pk) -> - cctxt#message "* Public Key: %a" - Signature.Public_key.pp pk >>= fun () -> - cctxt#message "* Public Key Hash: %a@\n" - Signature.Public_key_hash.pp pkh >>= fun () -> - begin match test_sign, version.app_class with - | true, Tezos -> - let pkh_bytes = - Signature.Public_key_hash.to_bytes pkh in - (* Signing requires validation on the device. *) + ( match ledger_uri with + | `Ledger_account {curve; path; _} -> ( + cctxt#message + "* Curve: `%a`" + Ledgerwallet_tezos.pp_curve + curve + >>= fun () -> + let full_path = Bip32_path.tezos_root @ path in + cctxt#message + "* Path: `%s` [%s]" + (Bip32_path.string_of_path full_path) + (String.concat + "; " + (List.map (Printf.sprintf "0x%lX") full_path)) + >>= fun () -> + Ledger_commands.public_key_hash hidapi curve path + >>=? fun (pkh, pk) -> + cctxt#message "* Public Key: %a" Signature.Public_key.pp pk + >>= fun () -> + cctxt#message + "* Public Key Hash: %a@\n" + Signature.Public_key_hash.pp + pkh + >>= fun () -> + match (test_sign, version.app_class) with + | (true, Tezos) -> ( + let pkh_bytes = Signature.Public_key_hash.to_bytes pkh in + (* Signing requires validation on the device. *) + cctxt#message + "@[Attempting a signature@ (of `%a`),@ please@ \ + validate on@ the ledger.@]" + MBytes.pp_hex + pkh_bytes + >>= fun () -> + Ledger_commands.sign + ~watermark:Generic_operation + hidapi + curve + path + pkh_bytes + >>=? fun signature -> + match + Signature.check + ~watermark:Generic_operation + pk + signature + pkh_bytes + with + | false -> + failwith + "Fatal: Ledger cannot sign with %a" + Signature.Public_key_hash.pp + pkh + | true -> cctxt#message - "@[Attempting a signature@ (of `%a`),@ please@ \ - validate on@ the ledger.@]" - MBytes.pp_hex pkh_bytes - >>= fun () -> - Ledger_commands.sign - ~watermark:Generic_operation - hidapi curve path pkh_bytes >>=? fun signature -> - begin match - Signature.check ~watermark:Generic_operation - pk signature pkh_bytes - with - | false -> - failwith "Fatal: Ledger cannot sign with %a" - Signature.Public_key_hash.pp pkh - | true -> - cctxt#message "Tezos Wallet successfully signed:@ %a." - Signature.pp signature - >>= fun () -> - return_unit - end - | true, TezBake -> - failwith "Option --test-sign only works \ - for the Tezos Wallet app." - | false, _ -> - return_unit - end - | `Ledger _ when test_sign -> - failwith "Option --test-sign only works with a full \ - ledger URI/account (with curve/path)." - | `Ledger _ -> - cctxt#message "* This is just a ledger URI." - >>= fun () -> return_unit - end - >>=? fun () -> - return_some ())) ; - ] + "Tezos Wallet successfully signed:@ %a." + Signature.pp + signature + >>= fun () -> return_unit ) + | (true, TezBake) -> + failwith + "Option --test-sign only works for the Tezos Wallet \ + app." + | (false, _) -> + return_unit ) + | `Ledger _ when test_sign -> + failwith + "Option --test-sign only works with a full ledger \ + URI/account (with curve/path)." + | `Ledger _ -> + cctxt#message "* This is just a ledger URI." + >>= fun () -> return_unit ) + >>=? fun () -> return_some ())) ] (** Commands specific to the Baking app minus the high-water-mark ones which get a specific treatment in {!high_water_mark_commands}. *) -let baking_commands group = Clic.[ - Clic.command ~group - ~desc: "Query the path of the authorized key" - no_options - (prefixes [ "get" ; "ledger" ; "authorized" ; "path" ; "for" ] - @@ Ledger_uri.ledger_uri_or_alias_param - @@ stop) - (fun () ledger_uri (cctxt : Client_context.full) -> - use_ledger_or_fail ~ledger_uri ~filter:Filter.is_baking - (fun hidapi (version, _git_commit) _device_info _ledger_id -> +let baking_commands group = + Clic. + [ Clic.command + ~group + ~desc:"Query the path of the authorized key" + no_options + ( prefixes ["get"; "ledger"; "authorized"; "path"; "for"] + @@ Ledger_uri.ledger_uri_or_alias_param @@ stop ) + (fun () ledger_uri (cctxt : Client_context.full) -> + use_ledger_or_fail + ~ledger_uri + ~filter:Filter.is_baking + (fun hidapi (version, _git_commit) _device_info _ledger_id -> Ledger_commands.get_authorized_path hidapi version >>=? fun authorized -> - begin match authorized with - | `Legacy_path p -> - cctxt#message - "@[<v 0>Authorized baking path (Legacy < 2.x.y): %a@]" - Bip32_path.pp_path p >>= fun () -> - return_some () - | `No_baking_authorized -> - cctxt#message "No baking key authorized at all." - >>= fun () -> - return_some () - | `Path_curve (ledger_path, ledger_curve) -> - cctxt#message - "@[<v 0>Authorized baking path: %a@]" - Bip32_path.pp_path ledger_path >>= fun () -> - cctxt#message - "@[<v 0>Authorized baking curve: %a@]" - Ledgerwallet_tezos.pp_curve ledger_curve >>= fun () -> - begin match ledger_uri with - | `Ledger _ -> return_some () - | `Ledger_account { curve; path ; _ } - when curve = ledger_curve - && Bip32_path.tezos_root @ path = ledger_path -> - cctxt#message - "@[<v 0>Authorized baking URI: %a@]" - Ledger_uri.pp ledger_uri - >>= fun () -> - return_some () - | `Ledger_account { curve; path ; _ } -> - failwith - "Path and curve do not match the ones \ - specified in the command line: %a & %a" - Ledgerwallet_tezos.pp_curve curve - Bip32_path.pp_path (Bip32_path.tezos_root @ path) - end - end)) ; - Clic.command ~group - ~desc: "Authorize a Ledger to bake for a key (deprecated, \ - use `setup ledger ...` with recent versions of the Baking app)" - no_options - (prefixes [ "authorize" ; "ledger" ; "to" ; "bake" ; "for" ] - @@ Ledger_uri.ledger_uri_or_alias_param - @@ stop) - (fun () ledger_uri (cctxt : Client_context.full) -> - use_ledger_or_fail ~ledger_uri ~filter:Filter.is_baking - (fun hidapi (version, _git_commit) _device_info _ledger_id -> - begin match version with - | { Ledgerwallet_tezos.Version.app_class = Tezos ; _ } -> - failwith "This command (`authorize ledger ...`) only \ - works with the Tezos Baking app" - | { Ledgerwallet_tezos.Version.app_class = TezBake ; - major ; _ } when major >= 2 -> - failwith - "This command (`authorize ledger ...`) is@ \ - not compatible with@ this version of the Ledger@ \ - Baking app (%a >= 2.0.0),@ please use the command@ \ - `setup ledger to bake for ...`@ from now on." - Ledgerwallet_tezos.Version.pp version - | _ -> - cctxt#message - "This Ledger Baking app is outdated (%a)@ running@ \ - in backwards@ compatibility mode." - Ledgerwallet_tezos.Version.pp version - >>= fun () -> - return_unit - end + match authorized with + | `Legacy_path p -> + cctxt#message + "@[<v 0>Authorized baking path (Legacy < 2.x.y): %a@]" + Bip32_path.pp_path + p + >>= fun () -> return_some () + | `No_baking_authorized -> + cctxt#message "No baking key authorized at all." + >>= fun () -> return_some () + | `Path_curve (ledger_path, ledger_curve) -> ( + cctxt#message + "@[<v 0>Authorized baking path: %a@]" + Bip32_path.pp_path + ledger_path + >>= fun () -> + cctxt#message + "@[<v 0>Authorized baking curve: %a@]" + Ledgerwallet_tezos.pp_curve + ledger_curve + >>= fun () -> + match ledger_uri with + | `Ledger _ -> + return_some () + | `Ledger_account {curve; path; _} + when curve = ledger_curve + && Bip32_path.tezos_root @ path = ledger_path -> + cctxt#message + "@[<v 0>Authorized baking URI: %a@]" + Ledger_uri.pp + ledger_uri + >>= fun () -> return_some () + | `Ledger_account {curve; path; _} -> + failwith + "Path and curve do not match the ones specified in \ + the command line: %a & %a" + Ledgerwallet_tezos.pp_curve + curve + Bip32_path.pp_path + (Bip32_path.tezos_root @ path) ))); + Clic.command + ~group + ~desc: + "Authorize a Ledger to bake for a key (deprecated, use `setup \ + ledger ...` with recent versions of the Baking app)" + no_options + ( prefixes ["authorize"; "ledger"; "to"; "bake"; "for"] + @@ Ledger_uri.ledger_uri_or_alias_param @@ stop ) + (fun () ledger_uri (cctxt : Client_context.full) -> + use_ledger_or_fail + ~ledger_uri + ~filter:Filter.is_baking + (fun hidapi (version, _git_commit) _device_info _ledger_id -> + ( match version with + | {Ledgerwallet_tezos.Version.app_class = Tezos; _} -> + failwith + "This command (`authorize ledger ...`) only works with \ + the Tezos Baking app" + | {Ledgerwallet_tezos.Version.app_class = TezBake; major; _} + when major >= 2 -> + failwith + "This command (`authorize ledger ...`) is@ not compatible \ + with@ this version of the Ledger@ Baking app (%a >= \ + 2.0.0),@ please use the command@ `setup ledger to bake \ + for ...`@ from now on." + Ledgerwallet_tezos.Version.pp + version + | _ -> + cctxt#message + "This Ledger Baking app is outdated (%a)@ running@ in \ + backwards@ compatibility mode." + Ledgerwallet_tezos.Version.pp + version + >>= fun () -> return_unit ) >>=? fun () -> Ledger_uri.full_account ledger_uri - >>=? fun { Ledger_account. curve ; path ; _ } -> + >>=? fun {Ledger_account.curve; path; _} -> Ledger_commands.public_key_returning_instruction - `Authorize_baking hidapi curve path + `Authorize_baking + hidapi + curve + path >>=? fun pk -> let pkh = Signature.Public_key.hash pk in cctxt#message "@[<v 0>Authorized baking for address: %a@,\ Corresponding full public key: %a@]" - Signature.Public_key_hash.pp pkh - Signature.Public_key.pp pk >>= fun () -> - return_some ())) ; - Clic.command ~group - ~desc: "Setup a Ledger to bake for a key" - (let hwm_arg kind = - let doc = - Printf.sprintf - "Use <HWM> as %s chain high watermark instead of asking the ledger." - kind in - let long = kind ^ "-hwm" in - default_arg ~doc ~long ~placeholder:"HWM" - ~default:"ASK-LEDGER" - (parameter - (fun _ -> function - | "ASK-LEDGER" -> return_none - | s -> - try return_some (Int32.of_string s) with _ -> - failwith "Parameter %S should be a 32-bits integer" s)) - in - args3 - (default_arg - ~doc:"Use <ID> as main chain-id instead of asking the node." - ~long:"main-chain-id" ~placeholder:"ID" - ~default:"ASK-NODE" - (parameter - (fun _ -> function - | "ASK-NODE" -> return `Ask_node - | s -> - try return (`Int32 (Int32.of_string s)) - with _ -> - (try return (`Chain_id (Chain_id.of_b58check_exn s)) - with _ -> - failwith "Parameter %S should be a 32-bits integer \ - or a Base58 chain-id" s)))) - (hwm_arg "main") (hwm_arg "test")) - (prefixes [ "setup" ; "ledger" ; "to" ; "bake" ; "for" ] - @@ Ledger_uri.ledger_uri_or_alias_param - @@ stop) - (fun (chain_id_opt, main_hwm_opt, test_hwm_opt) ledger_uri (cctxt : Client_context.full) -> - use_ledger_or_fail ~ledger_uri ~filter:Filter.is_baking - (fun hidapi (version, _git_commit) _device_info _ledger_id -> - begin - let open Ledgerwallet_tezos.Version in - match version with - | { app_class = Tezos ; _ } -> - failwith "This command (`setup ledger ...`) only \ - works with the Tezos Baking app" - | { app_class = TezBake ; - major ; _ } when major < 2 -> - failwith - "This command (`setup ledger ...`)@ is not@ compatible@ with \ - this version@ of the Ledger Baking app@ (%a < 2.0.0),@ \ - please upgrade@ your ledger@ or use the command@ \ - `authorize ledger to bake for ...`" - pp version - | _ -> return_unit - end + Signature.Public_key_hash.pp + pkh + Signature.Public_key.pp + pk + >>= fun () -> return_some ())); + Clic.command + ~group + ~desc:"Setup a Ledger to bake for a key" + (let hwm_arg kind = + let doc = + Printf.sprintf + "Use <HWM> as %s chain high watermark instead of asking the \ + ledger." + kind + in + let long = kind ^ "-hwm" in + default_arg + ~doc + ~long + ~placeholder:"HWM" + ~default:"ASK-LEDGER" + (parameter (fun _ -> + function + | "ASK-LEDGER" -> + return_none + | s -> ( + try return_some (Int32.of_string s) + with _ -> + failwith "Parameter %S should be a 32-bits integer" s ))) + in + args3 + (default_arg + ~doc:"Use <ID> as main chain-id instead of asking the node." + ~long:"main-chain-id" + ~placeholder:"ID" + ~default:"ASK-NODE" + (parameter (fun _ -> + function + | "ASK-NODE" -> + return `Ask_node + | s -> ( + try return (`Int32 (Int32.of_string s)) + with _ -> ( + try return (`Chain_id (Chain_id.of_b58check_exn s)) + with _ -> + failwith + "Parameter %S should be a 32-bits integer or a \ + Base58 chain-id" + s ) )))) + (hwm_arg "main") + (hwm_arg "test")) + ( prefixes ["setup"; "ledger"; "to"; "bake"; "for"] + @@ Ledger_uri.ledger_uri_or_alias_param @@ stop ) + (fun (chain_id_opt, main_hwm_opt, test_hwm_opt) + ledger_uri + (cctxt : Client_context.full) -> + use_ledger_or_fail + ~ledger_uri + ~filter:Filter.is_baking + (fun hidapi (version, _git_commit) _device_info _ledger_id -> + (let open Ledgerwallet_tezos.Version in + match version with + | {app_class = Tezos; _} -> + failwith + "This command (`setup ledger ...`) only works with the \ + Tezos Baking app" + | {app_class = TezBake; major; _} when major < 2 -> + failwith + "This command (`setup ledger ...`)@ is not@ compatible@ \ + with this version@ of the Ledger Baking app@ (%a < \ + 2.0.0),@ please upgrade@ your ledger@ or use the \ + command@ `authorize ledger to bake for ...`" + pp + version + | _ -> + return_unit) >>=? fun () -> Ledger_uri.full_account ledger_uri - >>=? fun { Ledger_account. curve ; path ; _ } -> + >>=? fun {Ledger_account.curve; path; _} -> let chain_id_of_int32 i32 = let open Int32 in let byte n = logand 0xFFl (shift_right i32 (n * 8)) - |> Int32.to_int |> char_of_int in + |> Int32.to_int |> char_of_int + in Chain_id.of_string_exn - (Stringext.of_array (Array.init 4 (fun i -> byte (3 - i)))) in - begin match chain_id_opt with - | `Ask_node -> - Chain_services.chain_id cctxt () - | `Int32 s -> return (chain_id_of_int32 s) - | `Chain_id chid -> return chid - end + (Stringext.of_array (Array.init 4 (fun i -> byte (3 - i)))) + in + ( match chain_id_opt with + | `Ask_node -> + Chain_services.chain_id cctxt () + | `Int32 s -> + return (chain_id_of_int32 s) + | `Chain_id chid -> + return chid ) >>=? fun main_chain_id -> - Ledger_commands.wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.get_all_high_watermarks ~pp hidapi - end - >>=? fun (`Main_hwm current_mh, `Test_hwm current_th, `Chain_id current_ci) -> + Ledger_commands.wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.get_all_high_watermarks ~pp hidapi) + >>=? fun ( `Main_hwm current_mh, + `Test_hwm current_th, + `Chain_id current_ci ) -> let main_hwm = Option.unopt main_hwm_opt ~default:current_mh in let test_hwm = Option.unopt test_hwm_opt ~default:current_th in - cctxt#message "Setting up the ledger:@.\ - * Main chain ID: %a -> %a@.\ - * Main chain High Watermark: %ld -> %ld@.\ - * Test chain High Watermark: %ld -> %ld" - pp_ledger_chain_id current_ci - Chain_id.pp main_chain_id - current_mh main_hwm - current_th test_hwm + cctxt#message + "Setting up the ledger:@.* Main chain ID: %a -> %a@.* Main \ + chain High Watermark: %ld -> %ld@.* Test chain High \ + Watermark: %ld -> %ld" + pp_ledger_chain_id + current_ci + Chain_id.pp + main_chain_id + current_mh + main_hwm + current_th + test_hwm >>= fun () -> Ledger_commands.public_key_returning_instruction (`Setup (Chain_id.to_string main_chain_id, main_hwm, test_hwm)) - hidapi curve path + hidapi + curve + path >>=? fun pk -> let pkh = Signature.Public_key.hash pk in cctxt#message "@[<v 0>Authorized baking for address: %a@,\ Corresponding full public key: %a@]" - Signature.Public_key_hash.pp pkh - Signature.Public_key.pp pk >>= fun () -> - return_some () - )) ; - Clic.command ~group - ~desc: "Deauthorize Ledger from baking" - no_options - (prefixes [ "deauthorize" ; "ledger" ; "baking" ; "for" ] - @@ Ledger_uri.ledger_uri_or_alias_param - @@ stop) - (fun () ledger_uri (_cctxt : Client_context.full) -> - use_ledger_or_fail ~ledger_uri ~filter:Filter.is_baking - (fun hidapi (_version, _git_commit) _device_info _ledger_id -> - Ledger_commands.wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.deauthorize_baking ~pp hidapi - end >>=? fun () -> - return_some () - )) ; - ] + Signature.Public_key_hash.pp + pkh + Signature.Public_key.pp + pk + >>= fun () -> return_some ())); + Clic.command + ~group + ~desc:"Deauthorize Ledger from baking" + no_options + ( prefixes ["deauthorize"; "ledger"; "baking"; "for"] + @@ Ledger_uri.ledger_uri_or_alias_param @@ stop ) + (fun () ledger_uri (_cctxt : Client_context.full) -> + use_ledger_or_fail + ~ledger_uri + ~filter:Filter.is_baking + (fun hidapi (_version, _git_commit) _device_info _ledger_id -> + Ledger_commands.wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.deauthorize_baking ~pp hidapi) + >>=? fun () -> return_some ())) ] (** Commands for high water mark of the Baking app. The [watermark_spelling] argument is used to make 2 sets of commands: with @@ -979,87 +1105,104 @@ let baking_commands group = Clic.[ water level). *) let high_water_mark_commands group watermark_spelling = let make_desc desc = - if List.length watermark_spelling = 1 - then desc ^ " (legacy/deprecated spelling)" - else desc in - Clic.[ - Clic.command ~group - ~desc:(make_desc "Get high water mark of a Ledger") - (args1 (switch ~doc:"Prevent the fallback to the (deprecated) Ledger \ - instructions (for 1.x.y versions of the Baking app)" - ~long:"no-legacy-instructions" ())) - (prefixes ([ "get" ; "ledger" ; "high" ] @ watermark_spelling @ [ "for" ]) - @@ Ledger_uri.ledger_uri_or_alias_param - @@ stop) - (fun no_legacy_apdu ledger_uri (cctxt : Client_context.full) -> - use_ledger_or_fail ~ledger_uri ~filter:Filter.is_baking - (fun hidapi (version, _git_commit) _device_info _ledger_id -> + if List.length watermark_spelling = 1 then + desc ^ " (legacy/deprecated spelling)" + else desc + in + Clic. + [ Clic.command + ~group + ~desc:(make_desc "Get high water mark of a Ledger") + (args1 + (switch + ~doc: + "Prevent the fallback to the (deprecated) Ledger instructions \ + (for 1.x.y versions of the Baking app)" + ~long:"no-legacy-instructions" + ())) + ( prefixes (["get"; "ledger"; "high"] @ watermark_spelling @ ["for"]) + @@ Ledger_uri.ledger_uri_or_alias_param @@ stop ) + (fun no_legacy_apdu ledger_uri (cctxt : Client_context.full) -> + use_ledger_or_fail + ~ledger_uri + ~filter:Filter.is_baking + (fun hidapi (version, _git_commit) _device_info _ledger_id -> match version.app_class with | Tezos -> - failwith "Fatal: this operation is only valid with the \ - Tezos Baking application" - | TezBake when not no_legacy_apdu && version.major < 2 -> - Ledger_commands.wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.get_high_watermark ~pp hidapi - end >>=? fun hwm -> - cctxt#message "The high water mark for@ %a@ is %ld." - Ledger_uri.pp ledger_uri hwm >>= fun () -> - return_some () + failwith + "Fatal: this operation is only valid with the Tezos \ + Baking application" + | TezBake when (not no_legacy_apdu) && version.major < 2 -> + Ledger_commands.wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.get_high_watermark ~pp hidapi) + >>=? fun hwm -> + cctxt#message + "The high water mark for@ %a@ is %ld." + Ledger_uri.pp + ledger_uri + hwm + >>= fun () -> return_some () | TezBake when no_legacy_apdu && version.major < 2 -> failwith "Cannot get the high water mark with@ \ `--no-legacy-instructions` and version %a" - Ledgerwallet_tezos.Version.pp version + Ledgerwallet_tezos.Version.pp + version | TezBake -> - Ledger_commands.wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.get_all_high_watermarks ~pp hidapi - end + Ledger_commands.wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.get_all_high_watermarks ~pp hidapi) >>=? fun (`Main_hwm mh, `Test_hwm th, `Chain_id ci) -> cctxt#message - "The high water mark values for@ %a@ are\ - @ %ld for the main-chain@ (%a)@ \ - and@ %ld for the test-chain." - Ledger_uri.pp ledger_uri mh pp_ledger_chain_id ci th - >>= fun () -> - return_some () ) ) ; - Clic.command ~group - ~desc:(make_desc "Set high water mark of a Ledger") - no_options - (prefixes ([ "set" ; "ledger" ; "high" ] @ watermark_spelling @ [ "for" ]) - @@ Ledger_uri.ledger_uri_or_alias_param - @@ (prefix "to") - @@ (param - ~name: "high watermark" - ~desc: "High watermark" + "The high water mark values for@ %a@ are@ %ld for the \ + main-chain@ (%a)@ and@ %ld for the test-chain." + Ledger_uri.pp + ledger_uri + mh + pp_ledger_chain_id + ci + th + >>= fun () -> return_some ())); + Clic.command + ~group + ~desc:(make_desc "Set high water mark of a Ledger") + no_options + ( prefixes (["set"; "ledger"; "high"] @ watermark_spelling @ ["for"]) + @@ Ledger_uri.ledger_uri_or_alias_param @@ prefix "to" + @@ param + ~name:"high watermark" + ~desc:"High watermark" (parameter (fun _ctx s -> try return (Int32.of_string s) - with _ -> failwith "%s is not an int32 value" s))) - @@ stop) - (fun () ledger_uri hwm (cctxt : Client_context.full) -> - use_ledger_or_fail ~ledger_uri ~filter:Filter.is_baking - (fun hidapi (version, _git_commit) _device_info _ledger_id -> + with _ -> failwith "%s is not an int32 value" s)) + @@ stop ) + (fun () ledger_uri hwm (cctxt : Client_context.full) -> + use_ledger_or_fail + ~ledger_uri + ~filter:Filter.is_baking + (fun hidapi (version, _git_commit) _device_info _ledger_id -> match version.app_class with | Tezos -> failwith "Fatal: this operation is only valid with TezBake" | TezBake -> - Ledger_commands.wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.set_high_watermark ~pp hidapi hwm - end >>=? fun () -> - Ledger_commands.wrap_ledger_cmd begin fun pp -> - Ledgerwallet_tezos.get_high_watermark ~pp hidapi - end >>=? fun new_hwm -> + Ledger_commands.wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.set_high_watermark ~pp hidapi hwm) + >>=? fun () -> + Ledger_commands.wrap_ledger_cmd (fun pp -> + Ledgerwallet_tezos.get_high_watermark ~pp hidapi) + >>=? fun new_hwm -> cctxt#message "@[<v 0>%a has now high water mark: %ld@]" - Ledger_uri.pp ledger_uri new_hwm >>= fun () -> - return_some ())) ; - ] + Ledger_uri.pp + ledger_uri + new_hwm + >>= fun () -> return_some ())) ] let commands = let group = - { Clic.name = "ledger" ; - title = "Commands for managing the connected Ledger Nano S devices" } in + { Clic.name = "ledger"; + title = "Commands for managing the connected Ledger Nano S devices" } + in fun () -> - generic_commands group @ - baking_commands group @ - high_water_mark_commands group [ "water" ; "mark" ] @ - high_water_mark_commands group [ "watermark" ] + generic_commands group @ baking_commands group + @ high_water_mark_commands group ["water"; "mark"] + @ high_water_mark_commands group ["watermark"] diff --git a/src/lib_signer_backends/ledger.mli b/src/lib_signer_backends/ledger.mli index 1fa9533afef82cd14a04ed9c36a9ecf0f62568ad..b24bcb75b637d223e0dcdffde645365f0b426d18 100644 --- a/src/lib_signer_backends/ledger.mli +++ b/src/lib_signer_backends/ledger.mli @@ -25,13 +25,19 @@ module Bip32_path : sig val node_of_string : string -> int32 option + val node_of_string_exn : string -> int32 + val pp_node : int32 Fmt.t + val string_of_node : int32 -> string val path_of_string : string -> int32 list option + val path_of_string_exn : string -> int32 list + val pp_path : int32 list Fmt.t + val string_of_path : int32 list -> string end diff --git a/src/lib_signer_backends/ledger_names.ml b/src/lib_signer_backends/ledger_names.ml index dec54c4dbebeb1f721ff9dbfff7f16b5564b02e5..bc82f6122e5d7c20a937cdb1d4988c8cf74f6770 100644 --- a/src/lib_signer_backends/ledger_names.ml +++ b/src/lib_signer_backends/ledger_names.ml @@ -23,244 +23,1612 @@ (* *) (*****************************************************************************) -let adjectives = [| - "abandoned";"able";"absolute";"adorable";"adventurous";"academic";"acceptable"; - "acclaimed";"accomplished";"accurate";"aching";"acidic";"acrobatic";"active"; - "actual";"adept";"admirable";"admired";"adolescent";"adorable";"adored"; - "advanced";"afraid";"affectionate";"aged";"aggravating";"aggressive";"agile"; - "agitated";"agonizing";"agreeable";"ajar";"alarmed";"alarming";"alert"; - "alienated";"alive";"all";"altruistic";"amazing";"ambitious";"ample";"amused"; - "amusing";"anchored";"ancient";"angelic";"angry";"anguished";"animated"; - "annual";"another";"antique";"anxious";"any";"apprehensive";"appropriate"; - "apt";"arctic";"arid";"aromatic";"artistic";"ashamed";"assured";"astonishing"; - "athletic";"attached";"attentive";"attractive";"austere";"authentic"; - "authorized";"automatic";"avaricious";"average";"aware";"awesome";"awful"; - "awkward";"babyish";"bad";"back";"baggy";"bare";"barren";"basic";"beautiful"; - "belated";"beloved";"beneficial";"better";"best";"bewitched";"big"; - "biodegradable";"bitter";"black";"bland";"blank";"blaring";"bleak";"blind"; - "blissful";"blond";"blue";"blushing";"bogus";"boiling";"bold";"bony";"boring"; - "bossy";"both";"bouncy";"bountiful";"bowed";"brave";"breakable";"brief"; - "bright";"brilliant";"brisk";"broken";"bronze";"brown";"bruised";"bubbly"; - "bulky";"bumpy";"buoyant";"burdensome";"burly";"bustling";"busy";"buttery"; - "buzzing";"calculating";"calm";"candid";"canine";"capital";"carefree"; - "careful";"careless";"caring";"cautious";"cavernous";"celebrated";"charming"; - "cheap";"cheerful";"cheery";"chief";"chilly";"chubby";"circular";"classic"; - "clean";"clear";"clever";"close";"closed";"cloudy";"clueless";"clumsy"; - "cluttered";"coarse";"cold";"colorful";"colorless";"colossal";"comfortable"; - "common";"compassionate";"competent";"complete";"complex";"complicated"; - "composed";"concerned";"concrete";"confused";"conscious";"considerate"; - "constant";"content";"conventional";"cooked";"cool";"cooperative"; - "coordinated";"corny";"corrupt";"costly";"courageous";"courteous";"crafty"; - "crazy";"creamy";"creative";"creepy";"criminal";"crisp";"critical";"crooked"; - "crowded";"cruel";"crushing";"cuddly";"cultivated";"cultured";"cumbersome"; - "curly";"curvy";"cute";"cylindrical";"damaged";"damp";"dangerous";"dapper"; - "daring";"darling";"dark";"dazzling";"dead";"deadly";"deafening";"dear"; - "dearest";"decent";"decimal";"decisive";"deep";"defenseless";"defensive"; - "defiant";"deficient";"definite";"definitive";"delayed";"delectable"; - "delicious";"delightful";"delirious";"demanding";"dense";"dental";"dependable"; - "dependent";"descriptive";"deserted";"detailed";"determined";"devoted"; - "different";"difficult";"digital";"diligent";"dim";"dimpled";"dimwitted"; - "direct";"disastrous";"discrete";"disfigured";"disgusting";"disloyal";"dismal"; - "distant";"downright";"dreary";"dirty";"disguised";"dishonest";"dismal"; - "distant";"distinct";"distorted";"dizzy";"dopey";"doting";"double";"downright"; - "drab";"drafty";"dramatic";"dreary";"droopy";"dry";"dual";"dull";"dutiful"; - "eager";"earnest";"early";"easy";"ecstatic";"edible";"educated";"elaborate"; - "elastic";"elated";"elderly";"electric";"elegant";"elementary";"elliptical"; - "embarrassed";"embellished";"eminent";"emotional";"empty";"enchanted"; - "enchanting";"energetic";"enlightened";"enormous";"enraged";"entire";"envious"; - "equal";"equatorial";"essential";"esteemed";"ethical";"euphoric";"even"; - "evergreen";"everlasting";"every";"evil";"exalted";"excellent";"exemplary"; - "exhausted";"excitable";"excited";"exciting";"exotic";"expensive"; - "experienced";"expert";"extraneous";"extroverted";"fabulous";"failing";"faint"; - "fair";"faithful";"fake";"false";"familiar";"famous";"fancy";"fantastic";"far"; - "faraway";"fast";"fat";"fatal";"fatherly";"favorable";"favorite";"fearful"; - "fearless";"feisty";"feline";"female";"feminine";"few";"fickle";"filthy"; - "fine";"finished";"firm";"first";"firsthand";"fitting";"fixed";"flaky"; - "flamboyant";"flashy";"flat";"flawed";"flawless";"flickering";"flimsy"; - "flippant";"flowery";"fluffy";"fluid";"flustered";"focused";"fond";"foolhardy"; - "foolish";"forceful";"forked";"formal";"forsaken";"forthright";"fortunate"; - "fragrant";"frail";"frank";"frayed";"free";"french";"fresh";"frequent"; - "friendly";"frightened";"frightening";"frigid";"frilly";"frizzy";"frivolous"; - "front";"frosty";"frozen";"frugal";"fruitful";"full";"fumbling";"functional"; - "funny";"fussy";"fuzzy";"gargantuan";"gaseous";"general";"generous";"gentle"; - "genuine";"giant";"giddy";"gigantic";"gifted";"giving";"glamorous";"glaring"; - "glass";"gleaming";"gleeful";"glistening";"glittering";"gloomy";"glorious"; - "glossy";"glum";"golden";"good";"gorgeous";"graceful";"gracious";"grand"; - "grandiose";"granular";"grateful";"grave";"gray";"great";"greedy";"green"; - "gregarious";"grim";"grimy";"gripping";"grizzled";"gross";"grotesque"; - "grouchy";"grounded";"growing";"growling";"grown";"grubby";"gruesome";"grumpy"; - "guilty";"gullible";"gummy";"hairy";"half";"handmade";"handsome";"handy"; - "happy";"hard";"harmful";"harmless";"harmonious";"harsh";"hasty";"hateful"; - "haunting";"healthy";"heartfelt";"hearty";"heavenly";"heavy";"hefty";"helpful"; - "helpless";"hidden";"hideous";"high";"hilarious";"hoarse";"hollow";"homely"; - "honest";"honorable";"honored";"hopeful";"horrible";"hospitable";"hot";"huge"; - "humble";"humiliating";"humming";"humongous";"hungry";"hurtful";"husky";"icky"; - "icy";"ideal";"idealistic";"identical";"idle";"idiotic";"idolized";"ignorant"; - "ill";"illegal";"illiterate";"illustrious";"imaginary";"imaginative"; - "immaculate";"immaterial";"immediate";"immense";"impassioned";"impeccable"; - "impartial";"imperfect";"imperturbable";"impish";"impolite";"important"; - "impossible";"impractical";"impressionable";"impressive";"improbable";"impure"; - "inborn";"incomparable";"incompatible";"incomplete";"inconsequential"; - "incredible";"indelible";"inexperienced";"indolent";"infamous";"infantile"; - "infatuated";"inferior";"infinite";"informal";"innocent";"insecure"; - "insidious";"insignificant";"insistent";"instructive";"insubstantial"; - "intelligent";"intent";"intentional";"interesting";"internal";"international"; - "intrepid";"ironclad";"irresponsible";"irritating";"itchy";"jaded";"jagged"; - "jaunty";"jealous";"jittery";"joint";"jolly";"jovial";"joyful";"joyous"; - "jubilant";"judicious";"juicy";"jumbo";"junior";"jumpy";"juvenile"; - "kaleidoscopic";"keen";"key";"kind";"kindhearted";"kindly";"klutzy";"knobby"; - "knotty";"knowledgeable";"knowing";"known";"kooky";"lame";"lanky";"large"; - "last";"lasting";"late";"lavish";"lawful";"lazy";"leading";"lean";"leafy"; - "left";"legal";"legitimate";"light";"lighthearted";"likable";"likely"; - "limited";"limp";"limping";"linear";"lined";"liquid";"little";"live";"lively"; - "livid";"loathsome";"lone";"lonely";"long";"loose";"lopsided";"lost";"loud"; - "lovable";"lovely";"loving";"low";"loyal";"lucky";"lumbering";"luminous"; - "lumpy";"lustrous";"luxurious";"mad";"magnificent";"majestic";"major";"male"; - "mammoth";"married";"marvelous";"masculine";"massive";"mature";"meager"; - "mealy";"mean";"measly";"meaty";"medical";"mediocre";"medium";"meek";"mellow"; - "melodic";"memorable";"menacing";"merry";"messy";"metallic";"mild";"milky"; - "mindless";"miniature";"minor";"minty";"miserable";"miserly";"misguided"; - "misty";"mixed";"modern";"modest";"moist";"monstrous";"monthly";"monumental"; - "moral";"mortified";"motherly";"motionless";"mountainous";"muddy";"muffled"; - "multicolored";"mundane";"murky";"mushy";"musty";"muted";"mysterious";"naive"; - "narrow";"nasty";"natural";"naughty";"nautical";"near";"neat";"necessary"; - "needy";"negative";"neglected";"negligible";"neighboring";"nervous";"new"; - "nice";"nifty";"nimble";"nippy";"nocturnal";"noisy";"nonstop";"normal"; - "notable";"noted";"noteworthy";"novel";"noxious";"numb";"nutritious";"nutty"; - "obedient";"obese";"oblong";"oily";"oblong";"obvious";"occasional";"odd"; - "oddball";"offbeat";"offensive";"official";"old";"only";"open";"optimal"; - "optimistic";"opulent";"orange";"orderly";"organic";"ornate";"ornery"; - "ordinary";"original";"other";"our";"outlying";"outgoing";"outlandish"; - "outrageous";"outstanding";"oval";"overcooked";"overdue";"overjoyed"; - "overlooked";"palatable";"pale";"paltry";"parallel";"parched";"partial"; - "passionate";"past";"pastel";"peaceful";"peppery";"perfect";"perfumed"; - "periodic";"perky";"personal";"pertinent";"pesky";"pessimistic";"petty"; - "phony";"physical";"piercing";"pink";"pitiful";"plain";"plaintive";"plastic"; - "playful";"pleasant";"pleased";"pleasing";"plump";"plush";"polished";"polite"; - "political";"pointed";"pointless";"poised";"poor";"popular";"portly";"posh"; - "positive";"possible";"potable";"powerful";"powerless";"practical";"precious"; - "present";"prestigious";"pretty";"precious";"previous";"pricey";"prickly"; - "primary";"prime";"pristine";"private";"prize";"probable";"productive"; - "profitable";"profuse";"proper";"proud";"prudent";"punctual";"pungent";"puny"; - "pure";"purple";"pushy";"putrid";"puzzled";"puzzling";"quaint";"qualified"; - "quarrelsome";"quarterly";"queasy";"querulous";"questionable";"quick";"quiet"; - "quintessential";"quirky";"quixotic";"quizzical";"radiant";"ragged";"rapid"; - "rare";"rash";"raw";"recent";"reckless";"rectangular";"ready";"real"; - "realistic";"reasonable";"red";"reflecting";"regal";"regular";"reliable"; - "relieved";"remarkable";"remorseful";"remote";"repentant";"required"; - "respectful";"responsible";"repulsive";"revolving";"rewarding";"rich";"rigid"; - "right";"ringed";"ripe";"roasted";"robust";"rosy";"rotating";"rotten";"rough"; - "round";"rowdy";"royal";"rubbery";"rundown";"ruddy";"rude";"runny";"rural"; - "rusty";"sad";"safe";"salty";"same";"sandy";"sane";"sarcastic";"sardonic"; - "satisfied";"scaly";"scarce";"scared";"scary";"scented";"scholarly"; - "scientific";"scornful";"scratchy";"scrawny";"second";"secondary";"secret"; - "selfish";"sentimental";"separate";"serene";"serious";"serpentine";"several"; - "severe";"shabby";"shadowy";"shady";"shallow";"shameful";"shameless";"sharp"; - "shimmering";"shiny";"shocked";"shocking";"shoddy";"short";"showy";"shrill"; - "shy";"sick";"silent";"silky";"silly";"silver";"similar";"simple";"simplistic"; - "sinful";"single";"sizzling";"skeletal";"skinny";"sleepy";"slight";"slim"; - "slimy";"slippery";"slow";"slushy";"small";"smart";"smoggy";"smooth";"smug"; - "snappy";"snarling";"sneaky";"sniveling";"snoopy";"sociable";"soft";"soggy"; - "solid";"somber";"some";"spherical";"sophisticated";"sore";"sorrowful"; - "soulful";"soupy";"sour";"spanish";"sparkling";"sparse";"specific"; - "spectacular";"speedy";"spicy";"spiffy";"spirited";"spiteful";"splendid"; - "spotless";"spotted";"spry";"square";"squeaky";"squiggly";"stable";"staid"; - "stained";"stale";"standard";"starchy";"stark";"starry";"steep";"sticky"; - "stiff";"stimulating";"stingy";"stormy";"straight";"strange";"steel";"strict"; - "strident";"striking";"striped";"strong";"studious";"stunning";"stupendous"; - "stupid";"sturdy";"stylish";"subdued";"submissive";"substantial";"subtle"; - "suburban";"sudden";"sugary";"sunny";"super";"superb";"superficial";"superior"; - "supportive";"surprised";"suspicious";"svelte";"sweaty";"sweet";"sweltering"; - "swift";"sympathetic";"tall";"talkative";"tame";"tan";"tangible";"tart"; - "tasty";"tattered";"taut";"tedious";"teeming";"tempting";"tender";"tense"; - "tepid";"terrible";"terrific";"testy";"thankful";"that";"these";"thick";"thin"; - "third";"thirsty";"this";"thorough";"thorny";"those";"thoughtful";"threadbare"; - "thrifty";"thunderous";"tidy";"tight";"timely";"tinted";"tiny";"tired";"torn"; - "total";"tough";"traumatic";"treasured";"tremendous";"tragic";"trained"; - "tremendous";"triangular";"tricky";"trifling";"trim";"trivial";"troubled"; - "true";"trusting";"trustworthy";"trusty";"truthful";"tubby";"turbulent";"twin"; - "ugly";"ultimate";"unacceptable";"unaware";"uncomfortable";"uncommon"; - "unconscious";"understated";"unequaled";"uneven";"unfinished";"unfit"; - "unfolded";"unfortunate";"unhappy";"unhealthy";"uniform";"unimportant"; - "unique";"united";"unkempt";"unknown";"unlawful";"unlined";"unlucky"; - "unnatural";"unpleasant";"unrealistic";"unripe";"unruly";"unselfish"; - "unsightly";"unsteady";"unsung";"untidy";"untimely";"untried";"untrue"; - "unused";"unusual";"unwelcome";"unwieldy";"unwilling";"unwitting";"unwritten"; - "upbeat";"upright";"upset";"urban";"usable";"used";"useful";"useless"; - "utilized";"utter";"vacant";"vague";"vain";"valid";"valuable";"vapid"; - "variable";"vast";"velvety";"venerated";"vengeful";"verifiable";"vibrant"; - "vicious";"victorious";"vigilant";"vigorous";"villainous";"violet";"violent"; - "virtual";"virtuous";"visible";"vital";"vivacious";"vivid";"voluminous"; - "warlike";"warm";"warmhearted";"warped";"wary";"wasteful";"watchful"; - "waterlogged";"watery";"wavy";"wealthy";"weak";"weary";"webbed";"wee";"weekly"; - "weepy";"weighty";"weird";"welcome";"wet";"which";"whimsical";"whirlwind"; - "whispered";"white";"whole";"whopping";"wicked";"wide";"wiggly";"wild"; - "willing";"wilted";"winding";"windy";"winged";"wiry";"wise";"witty";"wobbly"; - "woeful";"wonderful";"wooden";"woozy";"wordy";"worldly";"worn";"worried"; - "worrisome";"worse";"worst";"worthless";"worthwhile";"worthy";"wrathful"; - "wretched";"writhing";"wrong";"wry";"yawning";"yearly";"yellow";"yellowish"; - "young";"youthful";"yummy";"zany";"zealous";"zesty"; -|] +let adjectives = + [| "abandoned"; + "able"; + "absolute"; + "adorable"; + "adventurous"; + "academic"; + "acceptable"; + "acclaimed"; + "accomplished"; + "accurate"; + "aching"; + "acidic"; + "acrobatic"; + "active"; + "actual"; + "adept"; + "admirable"; + "admired"; + "adolescent"; + "adorable"; + "adored"; + "advanced"; + "afraid"; + "affectionate"; + "aged"; + "aggravating"; + "aggressive"; + "agile"; + "agitated"; + "agonizing"; + "agreeable"; + "ajar"; + "alarmed"; + "alarming"; + "alert"; + "alienated"; + "alive"; + "all"; + "altruistic"; + "amazing"; + "ambitious"; + "ample"; + "amused"; + "amusing"; + "anchored"; + "ancient"; + "angelic"; + "angry"; + "anguished"; + "animated"; + "annual"; + "another"; + "antique"; + "anxious"; + "any"; + "apprehensive"; + "appropriate"; + "apt"; + "arctic"; + "arid"; + "aromatic"; + "artistic"; + "ashamed"; + "assured"; + "astonishing"; + "athletic"; + "attached"; + "attentive"; + "attractive"; + "austere"; + "authentic"; + "authorized"; + "automatic"; + "avaricious"; + "average"; + "aware"; + "awesome"; + "awful"; + "awkward"; + "babyish"; + "bad"; + "back"; + "baggy"; + "bare"; + "barren"; + "basic"; + "beautiful"; + "belated"; + "beloved"; + "beneficial"; + "better"; + "best"; + "bewitched"; + "big"; + "biodegradable"; + "bitter"; + "black"; + "bland"; + "blank"; + "blaring"; + "bleak"; + "blind"; + "blissful"; + "blond"; + "blue"; + "blushing"; + "bogus"; + "boiling"; + "bold"; + "bony"; + "boring"; + "bossy"; + "both"; + "bouncy"; + "bountiful"; + "bowed"; + "brave"; + "breakable"; + "brief"; + "bright"; + "brilliant"; + "brisk"; + "broken"; + "bronze"; + "brown"; + "bruised"; + "bubbly"; + "bulky"; + "bumpy"; + "buoyant"; + "burdensome"; + "burly"; + "bustling"; + "busy"; + "buttery"; + "buzzing"; + "calculating"; + "calm"; + "candid"; + "canine"; + "capital"; + "carefree"; + "careful"; + "careless"; + "caring"; + "cautious"; + "cavernous"; + "celebrated"; + "charming"; + "cheap"; + "cheerful"; + "cheery"; + "chief"; + "chilly"; + "chubby"; + "circular"; + "classic"; + "clean"; + "clear"; + "clever"; + "close"; + "closed"; + "cloudy"; + "clueless"; + "clumsy"; + "cluttered"; + "coarse"; + "cold"; + "colorful"; + "colorless"; + "colossal"; + "comfortable"; + "common"; + "compassionate"; + "competent"; + "complete"; + "complex"; + "complicated"; + "composed"; + "concerned"; + "concrete"; + "confused"; + "conscious"; + "considerate"; + "constant"; + "content"; + "conventional"; + "cooked"; + "cool"; + "cooperative"; + "coordinated"; + "corny"; + "corrupt"; + "costly"; + "courageous"; + "courteous"; + "crafty"; + "crazy"; + "creamy"; + "creative"; + "creepy"; + "criminal"; + "crisp"; + "critical"; + "crooked"; + "crowded"; + "cruel"; + "crushing"; + "cuddly"; + "cultivated"; + "cultured"; + "cumbersome"; + "curly"; + "curvy"; + "cute"; + "cylindrical"; + "damaged"; + "damp"; + "dangerous"; + "dapper"; + "daring"; + "darling"; + "dark"; + "dazzling"; + "dead"; + "deadly"; + "deafening"; + "dear"; + "dearest"; + "decent"; + "decimal"; + "decisive"; + "deep"; + "defenseless"; + "defensive"; + "defiant"; + "deficient"; + "definite"; + "definitive"; + "delayed"; + "delectable"; + "delicious"; + "delightful"; + "delirious"; + "demanding"; + "dense"; + "dental"; + "dependable"; + "dependent"; + "descriptive"; + "deserted"; + "detailed"; + "determined"; + "devoted"; + "different"; + "difficult"; + "digital"; + "diligent"; + "dim"; + "dimpled"; + "dimwitted"; + "direct"; + "disastrous"; + "discrete"; + "disfigured"; + "disgusting"; + "disloyal"; + "dismal"; + "distant"; + "downright"; + "dreary"; + "dirty"; + "disguised"; + "dishonest"; + "dismal"; + "distant"; + "distinct"; + "distorted"; + "dizzy"; + "dopey"; + "doting"; + "double"; + "downright"; + "drab"; + "drafty"; + "dramatic"; + "dreary"; + "droopy"; + "dry"; + "dual"; + "dull"; + "dutiful"; + "eager"; + "earnest"; + "early"; + "easy"; + "ecstatic"; + "edible"; + "educated"; + "elaborate"; + "elastic"; + "elated"; + "elderly"; + "electric"; + "elegant"; + "elementary"; + "elliptical"; + "embarrassed"; + "embellished"; + "eminent"; + "emotional"; + "empty"; + "enchanted"; + "enchanting"; + "energetic"; + "enlightened"; + "enormous"; + "enraged"; + "entire"; + "envious"; + "equal"; + "equatorial"; + "essential"; + "esteemed"; + "ethical"; + "euphoric"; + "even"; + "evergreen"; + "everlasting"; + "every"; + "evil"; + "exalted"; + "excellent"; + "exemplary"; + "exhausted"; + "excitable"; + "excited"; + "exciting"; + "exotic"; + "expensive"; + "experienced"; + "expert"; + "extraneous"; + "extroverted"; + "fabulous"; + "failing"; + "faint"; + "fair"; + "faithful"; + "fake"; + "false"; + "familiar"; + "famous"; + "fancy"; + "fantastic"; + "far"; + "faraway"; + "fast"; + "fat"; + "fatal"; + "fatherly"; + "favorable"; + "favorite"; + "fearful"; + "fearless"; + "feisty"; + "feline"; + "female"; + "feminine"; + "few"; + "fickle"; + "filthy"; + "fine"; + "finished"; + "firm"; + "first"; + "firsthand"; + "fitting"; + "fixed"; + "flaky"; + "flamboyant"; + "flashy"; + "flat"; + "flawed"; + "flawless"; + "flickering"; + "flimsy"; + "flippant"; + "flowery"; + "fluffy"; + "fluid"; + "flustered"; + "focused"; + "fond"; + "foolhardy"; + "foolish"; + "forceful"; + "forked"; + "formal"; + "forsaken"; + "forthright"; + "fortunate"; + "fragrant"; + "frail"; + "frank"; + "frayed"; + "free"; + "french"; + "fresh"; + "frequent"; + "friendly"; + "frightened"; + "frightening"; + "frigid"; + "frilly"; + "frizzy"; + "frivolous"; + "front"; + "frosty"; + "frozen"; + "frugal"; + "fruitful"; + "full"; + "fumbling"; + "functional"; + "funny"; + "fussy"; + "fuzzy"; + "gargantuan"; + "gaseous"; + "general"; + "generous"; + "gentle"; + "genuine"; + "giant"; + "giddy"; + "gigantic"; + "gifted"; + "giving"; + "glamorous"; + "glaring"; + "glass"; + "gleaming"; + "gleeful"; + "glistening"; + "glittering"; + "gloomy"; + "glorious"; + "glossy"; + "glum"; + "golden"; + "good"; + "gorgeous"; + "graceful"; + "gracious"; + "grand"; + "grandiose"; + "granular"; + "grateful"; + "grave"; + "gray"; + "great"; + "greedy"; + "green"; + "gregarious"; + "grim"; + "grimy"; + "gripping"; + "grizzled"; + "gross"; + "grotesque"; + "grouchy"; + "grounded"; + "growing"; + "growling"; + "grown"; + "grubby"; + "gruesome"; + "grumpy"; + "guilty"; + "gullible"; + "gummy"; + "hairy"; + "half"; + "handmade"; + "handsome"; + "handy"; + "happy"; + "hard"; + "harmful"; + "harmless"; + "harmonious"; + "harsh"; + "hasty"; + "hateful"; + "haunting"; + "healthy"; + "heartfelt"; + "hearty"; + "heavenly"; + "heavy"; + "hefty"; + "helpful"; + "helpless"; + "hidden"; + "hideous"; + "high"; + "hilarious"; + "hoarse"; + "hollow"; + "homely"; + "honest"; + "honorable"; + "honored"; + "hopeful"; + "horrible"; + "hospitable"; + "hot"; + "huge"; + "humble"; + "humiliating"; + "humming"; + "humongous"; + "hungry"; + "hurtful"; + "husky"; + "icky"; + "icy"; + "ideal"; + "idealistic"; + "identical"; + "idle"; + "idiotic"; + "idolized"; + "ignorant"; + "ill"; + "illegal"; + "illiterate"; + "illustrious"; + "imaginary"; + "imaginative"; + "immaculate"; + "immaterial"; + "immediate"; + "immense"; + "impassioned"; + "impeccable"; + "impartial"; + "imperfect"; + "imperturbable"; + "impish"; + "impolite"; + "important"; + "impossible"; + "impractical"; + "impressionable"; + "impressive"; + "improbable"; + "impure"; + "inborn"; + "incomparable"; + "incompatible"; + "incomplete"; + "inconsequential"; + "incredible"; + "indelible"; + "inexperienced"; + "indolent"; + "infamous"; + "infantile"; + "infatuated"; + "inferior"; + "infinite"; + "informal"; + "innocent"; + "insecure"; + "insidious"; + "insignificant"; + "insistent"; + "instructive"; + "insubstantial"; + "intelligent"; + "intent"; + "intentional"; + "interesting"; + "internal"; + "international"; + "intrepid"; + "ironclad"; + "irresponsible"; + "irritating"; + "itchy"; + "jaded"; + "jagged"; + "jaunty"; + "jealous"; + "jittery"; + "joint"; + "jolly"; + "jovial"; + "joyful"; + "joyous"; + "jubilant"; + "judicious"; + "juicy"; + "jumbo"; + "junior"; + "jumpy"; + "juvenile"; + "kaleidoscopic"; + "keen"; + "key"; + "kind"; + "kindhearted"; + "kindly"; + "klutzy"; + "knobby"; + "knotty"; + "knowledgeable"; + "knowing"; + "known"; + "kooky"; + "lame"; + "lanky"; + "large"; + "last"; + "lasting"; + "late"; + "lavish"; + "lawful"; + "lazy"; + "leading"; + "lean"; + "leafy"; + "left"; + "legal"; + "legitimate"; + "light"; + "lighthearted"; + "likable"; + "likely"; + "limited"; + "limp"; + "limping"; + "linear"; + "lined"; + "liquid"; + "little"; + "live"; + "lively"; + "livid"; + "loathsome"; + "lone"; + "lonely"; + "long"; + "loose"; + "lopsided"; + "lost"; + "loud"; + "lovable"; + "lovely"; + "loving"; + "low"; + "loyal"; + "lucky"; + "lumbering"; + "luminous"; + "lumpy"; + "lustrous"; + "luxurious"; + "mad"; + "magnificent"; + "majestic"; + "major"; + "male"; + "mammoth"; + "married"; + "marvelous"; + "masculine"; + "massive"; + "mature"; + "meager"; + "mealy"; + "mean"; + "measly"; + "meaty"; + "medical"; + "mediocre"; + "medium"; + "meek"; + "mellow"; + "melodic"; + "memorable"; + "menacing"; + "merry"; + "messy"; + "metallic"; + "mild"; + "milky"; + "mindless"; + "miniature"; + "minor"; + "minty"; + "miserable"; + "miserly"; + "misguided"; + "misty"; + "mixed"; + "modern"; + "modest"; + "moist"; + "monstrous"; + "monthly"; + "monumental"; + "moral"; + "mortified"; + "motherly"; + "motionless"; + "mountainous"; + "muddy"; + "muffled"; + "multicolored"; + "mundane"; + "murky"; + "mushy"; + "musty"; + "muted"; + "mysterious"; + "naive"; + "narrow"; + "nasty"; + "natural"; + "naughty"; + "nautical"; + "near"; + "neat"; + "necessary"; + "needy"; + "negative"; + "neglected"; + "negligible"; + "neighboring"; + "nervous"; + "new"; + "nice"; + "nifty"; + "nimble"; + "nippy"; + "nocturnal"; + "noisy"; + "nonstop"; + "normal"; + "notable"; + "noted"; + "noteworthy"; + "novel"; + "noxious"; + "numb"; + "nutritious"; + "nutty"; + "obedient"; + "obese"; + "oblong"; + "oily"; + "oblong"; + "obvious"; + "occasional"; + "odd"; + "oddball"; + "offbeat"; + "offensive"; + "official"; + "old"; + "only"; + "open"; + "optimal"; + "optimistic"; + "opulent"; + "orange"; + "orderly"; + "organic"; + "ornate"; + "ornery"; + "ordinary"; + "original"; + "other"; + "our"; + "outlying"; + "outgoing"; + "outlandish"; + "outrageous"; + "outstanding"; + "oval"; + "overcooked"; + "overdue"; + "overjoyed"; + "overlooked"; + "palatable"; + "pale"; + "paltry"; + "parallel"; + "parched"; + "partial"; + "passionate"; + "past"; + "pastel"; + "peaceful"; + "peppery"; + "perfect"; + "perfumed"; + "periodic"; + "perky"; + "personal"; + "pertinent"; + "pesky"; + "pessimistic"; + "petty"; + "phony"; + "physical"; + "piercing"; + "pink"; + "pitiful"; + "plain"; + "plaintive"; + "plastic"; + "playful"; + "pleasant"; + "pleased"; + "pleasing"; + "plump"; + "plush"; + "polished"; + "polite"; + "political"; + "pointed"; + "pointless"; + "poised"; + "poor"; + "popular"; + "portly"; + "posh"; + "positive"; + "possible"; + "potable"; + "powerful"; + "powerless"; + "practical"; + "precious"; + "present"; + "prestigious"; + "pretty"; + "precious"; + "previous"; + "pricey"; + "prickly"; + "primary"; + "prime"; + "pristine"; + "private"; + "prize"; + "probable"; + "productive"; + "profitable"; + "profuse"; + "proper"; + "proud"; + "prudent"; + "punctual"; + "pungent"; + "puny"; + "pure"; + "purple"; + "pushy"; + "putrid"; + "puzzled"; + "puzzling"; + "quaint"; + "qualified"; + "quarrelsome"; + "quarterly"; + "queasy"; + "querulous"; + "questionable"; + "quick"; + "quiet"; + "quintessential"; + "quirky"; + "quixotic"; + "quizzical"; + "radiant"; + "ragged"; + "rapid"; + "rare"; + "rash"; + "raw"; + "recent"; + "reckless"; + "rectangular"; + "ready"; + "real"; + "realistic"; + "reasonable"; + "red"; + "reflecting"; + "regal"; + "regular"; + "reliable"; + "relieved"; + "remarkable"; + "remorseful"; + "remote"; + "repentant"; + "required"; + "respectful"; + "responsible"; + "repulsive"; + "revolving"; + "rewarding"; + "rich"; + "rigid"; + "right"; + "ringed"; + "ripe"; + "roasted"; + "robust"; + "rosy"; + "rotating"; + "rotten"; + "rough"; + "round"; + "rowdy"; + "royal"; + "rubbery"; + "rundown"; + "ruddy"; + "rude"; + "runny"; + "rural"; + "rusty"; + "sad"; + "safe"; + "salty"; + "same"; + "sandy"; + "sane"; + "sarcastic"; + "sardonic"; + "satisfied"; + "scaly"; + "scarce"; + "scared"; + "scary"; + "scented"; + "scholarly"; + "scientific"; + "scornful"; + "scratchy"; + "scrawny"; + "second"; + "secondary"; + "secret"; + "selfish"; + "sentimental"; + "separate"; + "serene"; + "serious"; + "serpentine"; + "several"; + "severe"; + "shabby"; + "shadowy"; + "shady"; + "shallow"; + "shameful"; + "shameless"; + "sharp"; + "shimmering"; + "shiny"; + "shocked"; + "shocking"; + "shoddy"; + "short"; + "showy"; + "shrill"; + "shy"; + "sick"; + "silent"; + "silky"; + "silly"; + "silver"; + "similar"; + "simple"; + "simplistic"; + "sinful"; + "single"; + "sizzling"; + "skeletal"; + "skinny"; + "sleepy"; + "slight"; + "slim"; + "slimy"; + "slippery"; + "slow"; + "slushy"; + "small"; + "smart"; + "smoggy"; + "smooth"; + "smug"; + "snappy"; + "snarling"; + "sneaky"; + "sniveling"; + "snoopy"; + "sociable"; + "soft"; + "soggy"; + "solid"; + "somber"; + "some"; + "spherical"; + "sophisticated"; + "sore"; + "sorrowful"; + "soulful"; + "soupy"; + "sour"; + "spanish"; + "sparkling"; + "sparse"; + "specific"; + "spectacular"; + "speedy"; + "spicy"; + "spiffy"; + "spirited"; + "spiteful"; + "splendid"; + "spotless"; + "spotted"; + "spry"; + "square"; + "squeaky"; + "squiggly"; + "stable"; + "staid"; + "stained"; + "stale"; + "standard"; + "starchy"; + "stark"; + "starry"; + "steep"; + "sticky"; + "stiff"; + "stimulating"; + "stingy"; + "stormy"; + "straight"; + "strange"; + "steel"; + "strict"; + "strident"; + "striking"; + "striped"; + "strong"; + "studious"; + "stunning"; + "stupendous"; + "stupid"; + "sturdy"; + "stylish"; + "subdued"; + "submissive"; + "substantial"; + "subtle"; + "suburban"; + "sudden"; + "sugary"; + "sunny"; + "super"; + "superb"; + "superficial"; + "superior"; + "supportive"; + "surprised"; + "suspicious"; + "svelte"; + "sweaty"; + "sweet"; + "sweltering"; + "swift"; + "sympathetic"; + "tall"; + "talkative"; + "tame"; + "tan"; + "tangible"; + "tart"; + "tasty"; + "tattered"; + "taut"; + "tedious"; + "teeming"; + "tempting"; + "tender"; + "tense"; + "tepid"; + "terrible"; + "terrific"; + "testy"; + "thankful"; + "that"; + "these"; + "thick"; + "thin"; + "third"; + "thirsty"; + "this"; + "thorough"; + "thorny"; + "those"; + "thoughtful"; + "threadbare"; + "thrifty"; + "thunderous"; + "tidy"; + "tight"; + "timely"; + "tinted"; + "tiny"; + "tired"; + "torn"; + "total"; + "tough"; + "traumatic"; + "treasured"; + "tremendous"; + "tragic"; + "trained"; + "tremendous"; + "triangular"; + "tricky"; + "trifling"; + "trim"; + "trivial"; + "troubled"; + "true"; + "trusting"; + "trustworthy"; + "trusty"; + "truthful"; + "tubby"; + "turbulent"; + "twin"; + "ugly"; + "ultimate"; + "unacceptable"; + "unaware"; + "uncomfortable"; + "uncommon"; + "unconscious"; + "understated"; + "unequaled"; + "uneven"; + "unfinished"; + "unfit"; + "unfolded"; + "unfortunate"; + "unhappy"; + "unhealthy"; + "uniform"; + "unimportant"; + "unique"; + "united"; + "unkempt"; + "unknown"; + "unlawful"; + "unlined"; + "unlucky"; + "unnatural"; + "unpleasant"; + "unrealistic"; + "unripe"; + "unruly"; + "unselfish"; + "unsightly"; + "unsteady"; + "unsung"; + "untidy"; + "untimely"; + "untried"; + "untrue"; + "unused"; + "unusual"; + "unwelcome"; + "unwieldy"; + "unwilling"; + "unwitting"; + "unwritten"; + "upbeat"; + "upright"; + "upset"; + "urban"; + "usable"; + "used"; + "useful"; + "useless"; + "utilized"; + "utter"; + "vacant"; + "vague"; + "vain"; + "valid"; + "valuable"; + "vapid"; + "variable"; + "vast"; + "velvety"; + "venerated"; + "vengeful"; + "verifiable"; + "vibrant"; + "vicious"; + "victorious"; + "vigilant"; + "vigorous"; + "villainous"; + "violet"; + "violent"; + "virtual"; + "virtuous"; + "visible"; + "vital"; + "vivacious"; + "vivid"; + "voluminous"; + "warlike"; + "warm"; + "warmhearted"; + "warped"; + "wary"; + "wasteful"; + "watchful"; + "waterlogged"; + "watery"; + "wavy"; + "wealthy"; + "weak"; + "weary"; + "webbed"; + "wee"; + "weekly"; + "weepy"; + "weighty"; + "weird"; + "welcome"; + "wet"; + "which"; + "whimsical"; + "whirlwind"; + "whispered"; + "white"; + "whole"; + "whopping"; + "wicked"; + "wide"; + "wiggly"; + "wild"; + "willing"; + "wilted"; + "winding"; + "windy"; + "winged"; + "wiry"; + "wise"; + "witty"; + "wobbly"; + "woeful"; + "wonderful"; + "wooden"; + "woozy"; + "wordy"; + "worldly"; + "worn"; + "worried"; + "worrisome"; + "worse"; + "worst"; + "worthless"; + "worthwhile"; + "worthy"; + "wrathful"; + "wretched"; + "writhing"; + "wrong"; + "wry"; + "yawning"; + "yearly"; + "yellow"; + "yellowish"; + "young"; + "youthful"; + "yummy"; + "zany"; + "zealous"; + "zesty" |] -let animals = [| - "aardvark";"abyssinian";"affenpinscher";"akbash";"akita";"albatross"; - "alligator";"angelfish";"ant";"anteater";"antelope";"armadillo";"avocet"; - "axolotl";"baboon";"badger";"balinese";"bandicoot";"barb";"barnacle"; - "barracuda";"bat";"beagle";"bear";"beaver";"beetle";"binturong";"birman"; - "bison";"bloodhound";"bobcat";"bombay";"bongo";"bonobo";"booby";"budgerigar"; - "buffalo";"bulldog";"bullfrog";"burmese";"butterfly";"caiman";"camel"; - "capybara";"caracal";"cassowary";"cat";"caterpillar";"catfish";"centipede"; - "chameleon";"chamois";"cheetah";"chicken";"chihuahua";"chimpanzee"; - "chinchilla";"chinook";"chipmunk";"cichlid";"coati";"cockroach";"collie"; - "coral";"cougar";"cow";"coyote";"crab";"crane";"crocodile";"cuscus"; - "cuttlefish";"dachshund";"dalmatian";"deer";"dhole";"dingo";"discus";"dodo"; - "dog";"dolphin";"donkey";"dormouse";"dragonfly";"drever";"duck";"dugong"; - "dunker";"eagle";"earwig";"echidna";"elephant";"emu";"falcon";"fennec"; - "ferret";"fish";"flamingo";"flounder";"fly";"fossa";"fox";"frigatebird";"frog"; - "gar";"gecko";"gerbil";"gharial";"gibbon";"giraffe";"goat";"goose";"gopher"; - "gorilla";"grasshopper";"greyhound";"grouse";"guppy";"hamster";"hare"; - "harrier";"havanese";"hedgehog";"heron";"himalayan";"hippopotamus";"horse"; - "human";"hummingbird";"hyena";"ibis";"iguana";"impala";"indri";"insect"; - "jackal";"jaguar";"javanese";"jellyfish";"kakapo";"kangaroo";"kingfisher"; - "kiwi";"koala";"kudu";"labradoodle";"ladybird";"lemming";"lemur";"leopard"; - "liger";"lion";"lionfish";"lizard";"llama";"lobster";"lynx";"macaw";"magpie"; - "maltese";"manatee";"mandrill";"markhor";"mastiff";"mayfly";"meerkat"; - "millipede";"mole";"molly";"mongoose";"mongrel";"monkey";"moorhen";"moose"; - "moth";"mouse";"mule";"neanderthal";"newfoundland";"newt";"nightingale"; - "numbat";"ocelot";"octopus";"okapi";"olm";"opossum";"ostrich";"otter";"oyster"; - "pademelon";"panther";"parrot";"peacock";"pekingese";"pelican";"penguin"; - "persian";"pheasant";"pig";"pika";"pike";"piranha";"platypus";"pointer"; - "poodle";"porcupine";"possum";"prawn";"puffin";"pug";"puma";"quail";"quetzal"; - "quokka";"quoll";"rabbit";"raccoon";"ragdoll";"rat";"rattlesnake";"reindeer"; - "rhinoceros";"robin";"rottweiler";"salamander";"saola";"scorpion";"seahorse"; - "seal";"serval";"sheep";"shrimp";"siamese";"siberian";"skunk";"sloth";"snail"; - "snake";"snowshoe";"somali";"sparrow";"sponge";"squid";"squirrel";"starfish"; - "stingray";"stoat";"swan";"tang";"tapir";"tarsier";"termite";"tetra";"tiffany"; - "tiger";"tortoise";"toucan";"tropicbird";"tuatara";"turkey";"uakari";"uguisu"; - "umbrellabird";"vulture";"wallaby";"walrus";"warthog";"wasp";"weasel"; - "whippet";"wildebeest";"wolf";"wolverine";"wombat";"woodlouse";"woodpecker"; - "wrasse";"yak";"zebra";"zebu";"zonkey";"zorse"; -|] +let animals = + [| "aardvark"; + "abyssinian"; + "affenpinscher"; + "akbash"; + "akita"; + "albatross"; + "alligator"; + "angelfish"; + "ant"; + "anteater"; + "antelope"; + "armadillo"; + "avocet"; + "axolotl"; + "baboon"; + "badger"; + "balinese"; + "bandicoot"; + "barb"; + "barnacle"; + "barracuda"; + "bat"; + "beagle"; + "bear"; + "beaver"; + "beetle"; + "binturong"; + "birman"; + "bison"; + "bloodhound"; + "bobcat"; + "bombay"; + "bongo"; + "bonobo"; + "booby"; + "budgerigar"; + "buffalo"; + "bulldog"; + "bullfrog"; + "burmese"; + "butterfly"; + "caiman"; + "camel"; + "capybara"; + "caracal"; + "cassowary"; + "cat"; + "caterpillar"; + "catfish"; + "centipede"; + "chameleon"; + "chamois"; + "cheetah"; + "chicken"; + "chihuahua"; + "chimpanzee"; + "chinchilla"; + "chinook"; + "chipmunk"; + "cichlid"; + "coati"; + "cockroach"; + "collie"; + "coral"; + "cougar"; + "cow"; + "coyote"; + "crab"; + "crane"; + "crocodile"; + "cuscus"; + "cuttlefish"; + "dachshund"; + "dalmatian"; + "deer"; + "dhole"; + "dingo"; + "discus"; + "dodo"; + "dog"; + "dolphin"; + "donkey"; + "dormouse"; + "dragonfly"; + "drever"; + "duck"; + "dugong"; + "dunker"; + "eagle"; + "earwig"; + "echidna"; + "elephant"; + "emu"; + "falcon"; + "fennec"; + "ferret"; + "fish"; + "flamingo"; + "flounder"; + "fly"; + "fossa"; + "fox"; + "frigatebird"; + "frog"; + "gar"; + "gecko"; + "gerbil"; + "gharial"; + "gibbon"; + "giraffe"; + "goat"; + "goose"; + "gopher"; + "gorilla"; + "grasshopper"; + "greyhound"; + "grouse"; + "guppy"; + "hamster"; + "hare"; + "harrier"; + "havanese"; + "hedgehog"; + "heron"; + "himalayan"; + "hippopotamus"; + "horse"; + "human"; + "hummingbird"; + "hyena"; + "ibis"; + "iguana"; + "impala"; + "indri"; + "insect"; + "jackal"; + "jaguar"; + "javanese"; + "jellyfish"; + "kakapo"; + "kangaroo"; + "kingfisher"; + "kiwi"; + "koala"; + "kudu"; + "labradoodle"; + "ladybird"; + "lemming"; + "lemur"; + "leopard"; + "liger"; + "lion"; + "lionfish"; + "lizard"; + "llama"; + "lobster"; + "lynx"; + "macaw"; + "magpie"; + "maltese"; + "manatee"; + "mandrill"; + "markhor"; + "mastiff"; + "mayfly"; + "meerkat"; + "millipede"; + "mole"; + "molly"; + "mongoose"; + "mongrel"; + "monkey"; + "moorhen"; + "moose"; + "moth"; + "mouse"; + "mule"; + "neanderthal"; + "newfoundland"; + "newt"; + "nightingale"; + "numbat"; + "ocelot"; + "octopus"; + "okapi"; + "olm"; + "opossum"; + "ostrich"; + "otter"; + "oyster"; + "pademelon"; + "panther"; + "parrot"; + "peacock"; + "pekingese"; + "pelican"; + "penguin"; + "persian"; + "pheasant"; + "pig"; + "pika"; + "pike"; + "piranha"; + "platypus"; + "pointer"; + "poodle"; + "porcupine"; + "possum"; + "prawn"; + "puffin"; + "pug"; + "puma"; + "quail"; + "quetzal"; + "quokka"; + "quoll"; + "rabbit"; + "raccoon"; + "ragdoll"; + "rat"; + "rattlesnake"; + "reindeer"; + "rhinoceros"; + "robin"; + "rottweiler"; + "salamander"; + "saola"; + "scorpion"; + "seahorse"; + "seal"; + "serval"; + "sheep"; + "shrimp"; + "siamese"; + "siberian"; + "skunk"; + "sloth"; + "snail"; + "snake"; + "snowshoe"; + "somali"; + "sparrow"; + "sponge"; + "squid"; + "squirrel"; + "starfish"; + "stingray"; + "stoat"; + "swan"; + "tang"; + "tapir"; + "tarsier"; + "termite"; + "tetra"; + "tiffany"; + "tiger"; + "tortoise"; + "toucan"; + "tropicbird"; + "tuatara"; + "turkey"; + "uakari"; + "uguisu"; + "umbrellabird"; + "vulture"; + "wallaby"; + "walrus"; + "warthog"; + "wasp"; + "weasel"; + "whippet"; + "wildebeest"; + "wolf"; + "wolverine"; + "wombat"; + "woodlouse"; + "woodpecker"; + "wrasse"; + "yak"; + "zebra"; + "zebu"; + "zonkey"; + "zorse" |] -let pick a z = - a.(Z.rem z (Array.length a |> Z.of_int) |> Z.to_int) +let pick a z = a.(Z.rem z (Array.length a |> Z.of_int) |> Z.to_int) -let hash a = - Blake2B.hash_string [a] |> Blake2B.to_string +let hash a = Blake2B.hash_string [a] |> Blake2B.to_string -type t = { - c : string ; - t : string ; - h : string ; - d : string ; -} +type t = {c : string; t : string; h : string; d : string} -let pp ppf { c ; t ; h ; d } = - Format.fprintf ppf "%s-%s-%s-%s" c t h d +let pp ppf {c; t; h; d} = Format.fprintf ppf "%s-%s-%s-%s" c t h d let crouching_tiger string = let c = pick adjectives (string |> hash |> Z.of_bits) in - let t = pick animals (string |> hash |> hash |> Z.of_bits) in + let t = pick animals (string |> hash |> hash |> Z.of_bits) in let h = pick adjectives (string |> hash |> hash |> hash |> Z.of_bits) in - let d = pick animals (string |> hash |> hash |> hash |> hash |> Z.of_bits) in - { c ; t ; h ; d } + let d = pick animals (string |> hash |> hash |> hash |> hash |> Z.of_bits) in + {c; t; h; d} diff --git a/src/lib_signer_backends/ledger_names.mli b/src/lib_signer_backends/ledger_names.mli index 9723d11dfb64d0da50ac7702a5e30cc1b80f5f5d..84cff4a470a63cb6af472896ac7d7b6c07349f6d 100644 --- a/src/lib_signer_backends/ledger_names.mli +++ b/src/lib_signer_backends/ledger_names.mli @@ -23,16 +23,11 @@ (* *) (*****************************************************************************) -type t = { - c : string ; - t : string ; - h : string ; - d : string ; -} +type t = {c : string; t : string; h : string; d : string} val pp : Format.formatter -> t -> unit -val crouching_tiger : string -> t (** [crouching_tiger str] is a sentence derived deterministically from [str] with the form adjective-animal-adjective-animal. E.g. crouching-tiger-hidden-dragon *) +val crouching_tiger : string -> t diff --git a/src/lib_signer_backends/remote.ml b/src/lib_signer_backends/remote.ml index 86a708e4b89a54c2a6ab474540e5422f00bf58f2..a81bbff3f5a20bf880c168b2030f502a0995c61a 100644 --- a/src/lib_signer_backends/remote.ml +++ b/src/lib_signer_backends/remote.ml @@ -27,65 +27,78 @@ open Client_keys let scheme = "remote" -module Make(S : sig - val default : Uri.t - val authenticate: Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t - val logger: RPC_client.logger - end) = struct +module Make (S : sig + val default : Uri.t + val authenticate : + Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t + + val logger : RPC_client.logger +end) = +struct let scheme = scheme - let title = - "Built-in tezos-signer using remote wallet." + let title = "Built-in tezos-signer using remote wallet." let description = "Valid locators are of the form\n\ \ - remote://tz1...\n\ The key will be queried to current remote signer, which can be \ - configured with the `--remote-signer` or `-R` options, \ - or by defining the following environment variables:\n\ + configured with the `--remote-signer` or `-R` options, or by defining \ + the following environment variables:\n\ \ - $TEZOS_SIGNER_UNIX_PATH,\n\ \ - $TEZOS_SIGNER_TCP_HOST and $TEZOS_SIGNER_TCP_PORT (default: 7732),\n\ \ - $TEZOS_SIGNER_HTTP_HOST and $TEZOS_SIGNER_HTTP_PORT (default: 6732),\n\ \ - $TEZOS_SIGNER_HTTPS_HOST and $TEZOS_SIGNER_HTTPS_PORT (default: 443)." - module Socket = Socket.Make(S) - module Http = Http.Make(S) - module Https = Https.Make(S) + module Socket = Socket.Make (S) + module Http = Http.Make (S) + module Https = Https.Make (S) let get_remote () = match Uri.scheme S.default with - | Some "unix" -> (module Socket.Unix : SIGNER) - | Some "tcp" -> (module Socket.Tcp : SIGNER) - | Some "http" -> (module Http : SIGNER) - | Some "https" -> (module Https : SIGNER) - | _ -> assert false + | Some "unix" -> + (module Socket.Unix : SIGNER) + | Some "tcp" -> + (module Socket.Tcp : SIGNER) + | Some "http" -> + (module Http : SIGNER) + | Some "https" -> + (module Https : SIGNER) + | _ -> + assert false module Remote = (val get_remote () : SIGNER) + let key = match Uri.scheme S.default with | Some "unix" -> - (fun uri -> - let key = Uri.path uri in - Uri.add_query_param' S.default ("pkh", key)) + fun uri -> + let key = Uri.path uri in + Uri.add_query_param' S.default ("pkh", key) | Some "tcp" -> - (fun uri -> - let key = Uri.path uri in - Uri.with_path S.default key) - | Some ("https" | "http") -> - (fun uri -> - let key = Uri.path uri in - match Uri.path S.default with - | "" -> Uri.with_path S.default key - | path -> Uri.with_path S.default (path ^ "/" ^ key)) - | _ -> assert false + fun uri -> + let key = Uri.path uri in + Uri.with_path S.default key + | Some ("https" | "http") -> ( + fun uri -> + let key = Uri.path uri in + match Uri.path S.default with + | "" -> + Uri.with_path S.default key + | path -> + Uri.with_path S.default (path ^ "/" ^ key) ) + | _ -> + assert false let public_key ?interactive pk_uri = - Remote.public_key ?interactive + Remote.public_key + ?interactive (Client_keys.make_pk_uri (key (pk_uri : pk_uri :> Uri.t))) let public_key_hash ?interactive pk_uri = - Remote.public_key_hash ?interactive + Remote.public_key_hash + ?interactive (Client_keys.make_pk_uri (key (pk_uri : pk_uri :> Uri.t))) let neuterize sk_uri = @@ -110,7 +123,6 @@ module Make(S : sig let supports_deterministic_nonces sk_uri = Remote.supports_deterministic_nonces (Client_keys.make_sk_uri (key (sk_uri : sk_uri :> Uri.t))) - end let make_sk sk = @@ -122,81 +134,96 @@ let make_pk pk = (Uri.make ~scheme ~path:(Signature.Public_key.to_b58check pk) ()) let read_base_uri_from_env () = - match Sys.getenv_opt "TEZOS_SIGNER_UNIX_PATH", - Sys.getenv_opt "TEZOS_SIGNER_TCP_HOST", - Sys.getenv_opt "TEZOS_SIGNER_HTTP_HOST", - Sys.getenv_opt "TEZOS_SIGNER_HTTPS_HOST" with - | None, None, None, None -> return_none - | Some path, None, None, None -> + match + ( Sys.getenv_opt "TEZOS_SIGNER_UNIX_PATH", + Sys.getenv_opt "TEZOS_SIGNER_TCP_HOST", + Sys.getenv_opt "TEZOS_SIGNER_HTTP_HOST", + Sys.getenv_opt "TEZOS_SIGNER_HTTPS_HOST" ) + with + | (None, None, None, None) -> + return_none + | (Some path, None, None, None) -> return_some (Socket.make_unix_base path) - | None, Some host, None, None -> begin - try - let port = - match Sys.getenv_opt "TEZOS_SIGNER_TCP_PORT" with - | None -> 7732 - | Some port -> int_of_string port in - return_some (Socket.make_tcp_base host port) - with Invalid_argument _ -> - failwith "Failed to parse TEZOS_SIGNER_TCP_PORT.@." - end - | None, None, Some host, None -> begin - try - let port = - match Sys.getenv_opt "TEZOS_SIGNER_HTTP_PORT" with - | None -> 6732 - | Some port -> int_of_string port in - return_some (Http.make_base host port) - with Invalid_argument _ -> - failwith "Failed to parse TEZOS_SIGNER_HTTP_PORT.@." - end - | None, None, None, Some host -> begin - try - let port = - match Sys.getenv_opt "TEZOS_SIGNER_HTTPS_PORT" with - | None -> 443 - | Some port -> int_of_string port in - return_some (Https.make_base host port) - with Invalid_argument _ -> - failwith "Failed to parse TEZOS_SIGNER_HTTPS_PORT.@." - end - | _, _, _, _ -> + | (None, Some host, None, None) -> ( + try + let port = + match Sys.getenv_opt "TEZOS_SIGNER_TCP_PORT" with + | None -> + 7732 + | Some port -> + int_of_string port + in + return_some (Socket.make_tcp_base host port) + with Invalid_argument _ -> + failwith "Failed to parse TEZOS_SIGNER_TCP_PORT.@." ) + | (None, None, Some host, None) -> ( + try + let port = + match Sys.getenv_opt "TEZOS_SIGNER_HTTP_PORT" with + | None -> + 6732 + | Some port -> + int_of_string port + in + return_some (Http.make_base host port) + with Invalid_argument _ -> + failwith "Failed to parse TEZOS_SIGNER_HTTP_PORT.@." ) + | (None, None, None, Some host) -> ( + try + let port = + match Sys.getenv_opt "TEZOS_SIGNER_HTTPS_PORT" with + | None -> + 443 + | Some port -> + int_of_string port + in + return_some (Https.make_base host port) + with Invalid_argument _ -> + failwith "Failed to parse TEZOS_SIGNER_HTTPS_PORT.@." ) + | (_, _, _, _) -> failwith "Only one the following environment variable must be defined: \ - TEZOS_SIGNER_UNIX_PATH, \ - TEZOS_SIGNER_TCP_HOST, \ - TEZOS_SIGNER_HTTP_HOST, \ - TEZOS_SIGNER_HTTPS_HOST@." + TEZOS_SIGNER_UNIX_PATH, TEZOS_SIGNER_TCP_HOST, \ + TEZOS_SIGNER_HTTP_HOST, TEZOS_SIGNER_HTTPS_HOST@." type error += Invalid_remote_signer of string let () = register_error_kind `Branch - ~id: "invalid_remote_signer" - ~title: "Unexpected URI fot remote signer" - ~description: "The provided remote signer is invalid." - ~pp: - (fun ppf s -> - Format.fprintf ppf - "@[<v 0>Value '%s' is not a valid URI for a remote signer.@,\ - Supported URIs for remote signers are of the form:@,\ - \ - unix:///path/to/socket/file@,\ - \ - tcp://host:port@,\ - \ - http://host[:port][/prefix]@,\ - \ - https://host[:port][/prefix]@]" s) + ~id:"invalid_remote_signer" + ~title:"Unexpected URI fot remote signer" + ~description:"The provided remote signer is invalid." + ~pp:(fun ppf s -> + Format.fprintf + ppf + "@[<v 0>Value '%s' is not a valid URI for a remote signer.@,\ + Supported URIs for remote signers are of the form:@,\ + \ - unix:///path/to/socket/file@,\ + \ - tcp://host:port@,\ + \ - http://host[:port][/prefix]@,\ + \ - https://host[:port][/prefix]@]" + s) Data_encoding.(obj1 (req "uri" string)) (function Invalid_remote_signer s -> Some s | _ -> None) (fun s -> Invalid_remote_signer s) let parse_base_uri s = - trace (Invalid_remote_signer s) @@ + trace (Invalid_remote_signer s) + @@ try let uri = Uri.of_string s in match Uri.scheme uri with - | Some "http" -> return uri - | Some "https" -> return uri - | Some "tcp" -> return uri - | Some "unix" -> return uri - | Some scheme -> failwith "Unknown scheme: %s" scheme - | None -> failwith "Unknown scheme: <empty>" + | Some "http" -> + return uri + | Some "https" -> + return uri + | Some "tcp" -> + return uri + | Some "unix" -> + return uri + | Some scheme -> + failwith "Unknown scheme: %s" scheme + | None -> + failwith "Unknown scheme: <empty>" with Invalid_argument msg -> failwith "Malformed URI: %s" msg diff --git a/src/lib_signer_backends/remote.mli b/src/lib_signer_backends/remote.mli index f29a009d718d05058cbaa6520a4eaa8823915cf8..8e57daf3f3f680b2cf667e1b5a9dc1c9dc0ac1a3 100644 --- a/src/lib_signer_backends/remote.mli +++ b/src/lib_signer_backends/remote.mli @@ -23,14 +23,19 @@ (* *) (*****************************************************************************) -module Make(S : sig - val default : Uri.t - val authenticate: Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t - val logger: RPC_client.logger - end) : Client_keys.SIGNER +module Make (S : sig + val default : Uri.t -val make_pk: Signature.public_key -> Client_keys.pk_uri -val make_sk: Signature.secret_key -> Client_keys.sk_uri + val authenticate : + Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t -val read_base_uri_from_env: unit -> Uri.t option tzresult Lwt.t -val parse_base_uri: string -> Uri.t tzresult Lwt.t + val logger : RPC_client.logger +end) : Client_keys.SIGNER + +val make_pk : Signature.public_key -> Client_keys.pk_uri + +val make_sk : Signature.secret_key -> Client_keys.sk_uri + +val read_base_uri_from_env : unit -> Uri.t option tzresult Lwt.t + +val parse_base_uri : string -> Uri.t tzresult Lwt.t diff --git a/src/lib_signer_backends/socket.ml b/src/lib_signer_backends/socket.ml index 6991880b49ae1dbc4e0df6f3e91bfd54f612abf6..02a1cac9880d297c76d153e9bbd4c1e90a493da3 100644 --- a/src/lib_signer_backends/socket.ml +++ b/src/lib_signer_backends/socket.ml @@ -27,12 +27,14 @@ open Client_keys open Signer_messages let tcp_scheme = "tcp" -let unix_scheme = "unix" -module Make(P : sig - val authenticate: Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t - end) = struct +let unix_scheme = "unix" +module Make (P : sig + val authenticate : + Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t +end) = +struct type request_type = | Sign_request | Deterministic_nonce_request @@ -40,192 +42,200 @@ module Make(P : sig let build_request pkh data signature = function | Sign_request -> - Request.Sign { Sign.Request.pkh ; data ; signature } + Request.Sign {Sign.Request.pkh; data; signature} | Deterministic_nonce_request -> Request.Deterministic_nonce - { Deterministic_nonce.Request.pkh ; data ; signature } + {Deterministic_nonce.Request.pkh; data; signature} | Deterministic_nonce_hash_request -> Request.Deterministic_nonce_hash - { Deterministic_nonce_hash.Request.pkh ; data ; signature } + {Deterministic_nonce_hash.Request.pkh; data; signature} let signer_operation path pkh msg request_type = - begin - Lwt_utils_unix.Socket.connect path >>=? fun conn -> - Lwt_utils_unix.Socket.send - conn Request.encoding Request.Authorized_keys >>=? fun () -> - Lwt_utils_unix.Socket.recv conn - (result_encoding Authorized_keys.Response.encoding) >>=? fun authorized_keys -> - Lwt.return authorized_keys >>=? fun authorized_keys -> - Lwt_unix.close conn >>= fun () -> - begin match authorized_keys with - | No_authentication -> return_none - | Authorized_keys authorized_keys -> - P.authenticate authorized_keys - (Sign.Request.to_sign ~pkh ~data:msg) >>=? fun signature -> - return_some signature - end - end >>=? fun signature -> - Lwt_utils_unix.Socket.connect path >>=? fun conn -> + Lwt_utils_unix.Socket.connect path + >>=? (fun conn -> + Lwt_utils_unix.Socket.send + conn + Request.encoding + Request.Authorized_keys + >>=? fun () -> + Lwt_utils_unix.Socket.recv + conn + (result_encoding Authorized_keys.Response.encoding) + >>=? fun authorized_keys -> + Lwt.return authorized_keys + >>=? fun authorized_keys -> + Lwt_unix.close conn + >>= fun () -> + match authorized_keys with + | No_authentication -> + return_none + | Authorized_keys authorized_keys -> + P.authenticate + authorized_keys + (Sign.Request.to_sign ~pkh ~data:msg) + >>=? fun signature -> return_some signature) + >>=? fun signature -> + Lwt_utils_unix.Socket.connect path + >>=? fun conn -> let req = build_request pkh msg signature request_type in - Lwt_utils_unix.Socket.send conn Request.encoding req >>=? fun () -> - return conn + Lwt_utils_unix.Socket.send conn Request.encoding req + >>=? fun () -> return conn let sign ?watermark path pkh msg = let msg = match watermark with - | None -> msg + | None -> + msg | Some watermark -> - MBytes.concat "" [ Signature.bytes_of_watermark watermark ; msg ] in - signer_operation path pkh msg Sign_request >>=? fun conn -> - Lwt_utils_unix.Socket.recv conn - (result_encoding Sign.Response.encoding) >>=? fun res -> - Lwt_unix.close conn >>= fun () -> - Lwt.return res + MBytes.concat "" [Signature.bytes_of_watermark watermark; msg] + in + signer_operation path pkh msg Sign_request + >>=? fun conn -> + Lwt_utils_unix.Socket.recv conn (result_encoding Sign.Response.encoding) + >>=? fun res -> Lwt_unix.close conn >>= fun () -> Lwt.return res let deterministic_nonce path pkh msg = - signer_operation path pkh msg Deterministic_nonce_request >>=? fun conn -> - Lwt_utils_unix.Socket.recv conn - (result_encoding Deterministic_nonce.Response.encoding) >>=? fun res -> - Lwt_unix.close conn >>= fun () -> - Lwt.return res + signer_operation path pkh msg Deterministic_nonce_request + >>=? fun conn -> + Lwt_utils_unix.Socket.recv + conn + (result_encoding Deterministic_nonce.Response.encoding) + >>=? fun res -> Lwt_unix.close conn >>= fun () -> Lwt.return res let deterministic_nonce_hash path pkh msg = - signer_operation path pkh msg Deterministic_nonce_hash_request >>=? fun conn -> - Lwt_utils_unix.Socket.recv conn - (result_encoding Deterministic_nonce_hash.Response.encoding) >>=? fun res -> - Lwt_unix.close conn >>= fun () -> - Lwt.return res + signer_operation path pkh msg Deterministic_nonce_hash_request + >>=? fun conn -> + Lwt_utils_unix.Socket.recv + conn + (result_encoding Deterministic_nonce_hash.Response.encoding) + >>=? fun res -> Lwt_unix.close conn >>= fun () -> Lwt.return res let supports_deterministic_nonces path pkh = - Lwt_utils_unix.Socket.connect path >>=? fun conn -> + Lwt_utils_unix.Socket.connect path + >>=? fun conn -> Lwt_utils_unix.Socket.send - conn Request.encoding (Request.Supports_deterministic_nonces pkh) >>=? fun () -> - Lwt_utils_unix.Socket.recv conn - (result_encoding Supports_deterministic_nonces.Response.encoding) >>=? fun res -> - Lwt_unix.close conn >>= fun () -> - Lwt.return res + conn + Request.encoding + (Request.Supports_deterministic_nonces pkh) + >>=? fun () -> + Lwt_utils_unix.Socket.recv + conn + (result_encoding Supports_deterministic_nonces.Response.encoding) + >>=? fun res -> Lwt_unix.close conn >>= fun () -> Lwt.return res let public_key path pkh = - Lwt_utils_unix.Socket.connect path >>=? fun conn -> - Lwt_utils_unix.Socket.send - conn Request.encoding (Request.Public_key pkh) >>=? fun () -> + Lwt_utils_unix.Socket.connect path + >>=? fun conn -> + Lwt_utils_unix.Socket.send conn Request.encoding (Request.Public_key pkh) + >>=? fun () -> let encoding = result_encoding Public_key.Response.encoding in - Lwt_utils_unix.Socket.recv conn encoding >>=? fun res -> - Lwt_unix.close conn >>= fun () -> - Lwt.return res + Lwt_utils_unix.Socket.recv conn encoding + >>=? fun res -> Lwt_unix.close conn >>= fun () -> Lwt.return res module Unix = struct - let scheme = unix_scheme let title = "Built-in tezos-signer using remote signer through hardcoded unix socket." let description = - "Valid locators are of the form\n\ - \ - unix:/path/to/socket?pkh=tz1..." + "Valid locators are of the form\n - unix:/path/to/socket?pkh=tz1..." let parse uri = assert (Uri.scheme uri = Some scheme) ; - trace (Invalid_uri uri) @@ + trace (Invalid_uri uri) + @@ match Uri.get_query_param uri "pkh" with - | None -> failwith "Missing the query parameter: 'pkh=tz1...'" + | None -> + failwith "Missing the query parameter: 'pkh=tz1...'" | Some key -> - Lwt.return (Signature.Public_key_hash.of_b58check key) >>=? fun key -> + Lwt.return (Signature.Public_key_hash.of_b58check key) + >>=? fun key -> return (Lwt_utils_unix.Socket.Unix (Uri.path uri), key) - let public_key ?interactive:(_) uri = - parse (uri : pk_uri :> Uri.t) >>=? fun (path, pkh) -> - public_key path pkh + let public_key ?interactive:_ uri = + parse (uri : pk_uri :> Uri.t) >>=? fun (path, pkh) -> public_key path pkh let neuterize uri = return (Client_keys.make_pk_uri (uri : sk_uri :> Uri.t)) - let public_key_hash ?interactive:(_) uri = - public_key uri >>=? fun pk -> - return (Signature.Public_key.hash pk, Some pk) + let public_key_hash ?interactive:_ uri = + public_key uri + >>=? fun pk -> return (Signature.Public_key.hash pk, Some pk) let sign ?watermark uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (path, pkh) -> - sign ?watermark path pkh msg + parse (uri : sk_uri :> Uri.t) + >>=? fun (path, pkh) -> sign ?watermark path pkh msg let deterministic_nonce uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (path, pkh) -> - deterministic_nonce path pkh msg + parse (uri : sk_uri :> Uri.t) + >>=? fun (path, pkh) -> deterministic_nonce path pkh msg let deterministic_nonce_hash uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (path, pkh) -> - deterministic_nonce_hash path pkh msg + parse (uri : sk_uri :> Uri.t) + >>=? fun (path, pkh) -> deterministic_nonce_hash path pkh msg let supports_deterministic_nonces uri = - parse (uri : sk_uri :> Uri.t) >>=? fun (path, pkh) -> - supports_deterministic_nonces path pkh - + parse (uri : sk_uri :> Uri.t) + >>=? fun (path, pkh) -> supports_deterministic_nonces path pkh end module Tcp = struct - let scheme = tcp_scheme let title = "Built-in tezos-signer using remote signer through hardcoded tcp socket." let description = - "Valid locators are of the form\n\ - \ - tcp://host:port/tz1..." + "Valid locators are of the form\n - tcp://host:port/tz1..." let parse uri = assert (Uri.scheme uri = Some scheme) ; - trace (Invalid_uri uri) @@ - match Uri.host uri, Uri.port uri with - | None, _ -> + trace (Invalid_uri uri) + @@ + match (Uri.host uri, Uri.port uri) with + | (None, _) -> failwith "Missing host address" - | _, None -> + | (_, None) -> failwith "Missing host port" - | Some path, Some port -> + | (Some path, Some port) -> let pkh = Uri.path uri in - let pkh = - try String.(sub pkh 1 (length pkh - 1)) - with _ -> "" in - Lwt.return - (Signature.Public_key_hash.of_b58check pkh) >>=? fun pkh -> - return (Lwt_utils_unix.Socket.Tcp (path, string_of_int port, - [Lwt_unix.AI_SOCKTYPE SOCK_STREAM]), pkh) - - let public_key ?interactive:(_) uri = - parse (uri : pk_uri :> Uri.t) >>=? fun (path, pkh) -> - public_key path pkh + let pkh = try String.(sub pkh 1 (length pkh - 1)) with _ -> "" in + Lwt.return (Signature.Public_key_hash.of_b58check pkh) + >>=? fun pkh -> + return + ( Lwt_utils_unix.Socket.Tcp + (path, string_of_int port, [Lwt_unix.AI_SOCKTYPE SOCK_STREAM]), + pkh ) + + let public_key ?interactive:_ uri = + parse (uri : pk_uri :> Uri.t) >>=? fun (path, pkh) -> public_key path pkh let neuterize uri = return (Client_keys.make_pk_uri (uri : sk_uri :> Uri.t)) let public_key_hash ?interactive uri = - public_key ?interactive uri >>=? fun pk -> - return (Signature.Public_key.hash pk, Some pk) + public_key ?interactive uri + >>=? fun pk -> return (Signature.Public_key.hash pk, Some pk) let sign ?watermark uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (path, pkh) -> - sign ?watermark path pkh msg + parse (uri : sk_uri :> Uri.t) + >>=? fun (path, pkh) -> sign ?watermark path pkh msg let deterministic_nonce uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (path, pkh) -> - deterministic_nonce path pkh msg + parse (uri : sk_uri :> Uri.t) + >>=? fun (path, pkh) -> deterministic_nonce path pkh msg let deterministic_nonce_hash uri msg = - parse (uri : sk_uri :> Uri.t) >>=? fun (path, pkh) -> - deterministic_nonce_hash path pkh msg + parse (uri : sk_uri :> Uri.t) + >>=? fun (path, pkh) -> deterministic_nonce_hash path pkh msg let supports_deterministic_nonces uri = - parse (uri : sk_uri :> Uri.t) >>=? fun (path, pkh) -> - supports_deterministic_nonces path pkh - + parse (uri : sk_uri :> Uri.t) + >>=? fun (path, pkh) -> supports_deterministic_nonces path pkh end - end +let make_unix_base path = Uri.make ~scheme:unix_scheme ~path () -let make_unix_base path = - Uri.make ~scheme:unix_scheme ~path () - -let make_tcp_base host port = - Uri.make ~scheme:tcp_scheme ~host ~port () +let make_tcp_base host port = Uri.make ~scheme:tcp_scheme ~host ~port () diff --git a/src/lib_signer_backends/socket.mli b/src/lib_signer_backends/socket.mli index b42a769dd64ffaa5e79eb3e7e2f875466155b173..587c4d53545bde60bb4fc39e7bbec2f48c3810de 100644 --- a/src/lib_signer_backends/socket.mli +++ b/src/lib_signer_backends/socket.mli @@ -23,12 +23,15 @@ (* *) (*****************************************************************************) -module Make(P : sig - val authenticate: Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t - end) : sig +module Make (P : sig + val authenticate : + Signature.Public_key_hash.t list -> MBytes.t -> Signature.t tzresult Lwt.t +end) : sig module Unix : Client_keys.SIGNER + module Tcp : Client_keys.SIGNER end -val make_unix_base: string -> Uri.t -val make_tcp_base: string -> int -> Uri.t +val make_unix_base : string -> Uri.t + +val make_tcp_base : string -> int -> Uri.t diff --git a/src/lib_signer_backends/test/test_crouching.ml b/src/lib_signer_backends/test/test_crouching.ml index bf6756ecee820b93baa5121712d7e276bce69a64..9bbae3a0bea464794182db0b236d2a17bff79a2a 100644 --- a/src/lib_signer_backends/test/test_crouching.ml +++ b/src/lib_signer_backends/test/test_crouching.ml @@ -1,13 +1,8 @@ - let test_example () = let name = Ledger_names.crouching_tiger "12345" in - assert (name = { c = "calculating" ; t = "meerkat" ; h = "straight" ; d = "beetle" }) + assert ( + name = {c = "calculating"; t = "meerkat"; h = "straight"; d = "beetle"} ) -let tests = [ - Alcotest.test_case "print_example" `Quick test_example; -] +let tests = [Alcotest.test_case "print_example" `Quick test_example] -let () = - Alcotest.run "tezos-signed-backends" [ - "ledger-names", tests - ] +let () = Alcotest.run "tezos-signed-backends" [("ledger-names", tests)] diff --git a/src/lib_signer_backends/test/test_encrypted.ml b/src/lib_signer_backends/test/test_encrypted.ml index f04bd8e449c8294d7265525a4bb7250efc7c70b7..21e1d78aea2f5bb219a5986961be4d1ed11791e6 100644 --- a/src/lib_signer_backends/test/test_encrypted.ml +++ b/src/lib_signer_backends/test/test_encrypted.ml @@ -11,91 +11,119 @@ open Error_monad let loops = 10 -let passwords = List.map MBytes.of_string [ - "ahThie5H"; "aVah7eid"; "Hihohh1n"; "mui0Hoox"; "Piu7pual"; "paik6aiW"; - "caeS5me5"; "boh5dauL"; "zaiK1Oht"; "Oogh4hah"; "kiY5ohlo"; "booth0Ei"; - "xa2Aidao"; "aju6oXu4"; "gooruGh9"; "ahy4Daih"; "chosh0Wu"; "Cheij6za"; - "quee9ooL"; "Sohs9are"; "Pae3gay7"; "Naif5iel"; " eir6Aed1"; "aa6Aesai"; - ""; - ] +let passwords = + List.map + MBytes.of_string + [ "ahThie5H"; + "aVah7eid"; + "Hihohh1n"; + "mui0Hoox"; + "Piu7pual"; + "paik6aiW"; + "caeS5me5"; + "boh5dauL"; + "zaiK1Oht"; + "Oogh4hah"; + "kiY5ohlo"; + "booth0Ei"; + "xa2Aidao"; + "aju6oXu4"; + "gooruGh9"; + "ahy4Daih"; + "chosh0Wu"; + "Cheij6za"; + "quee9ooL"; + "Sohs9are"; + "Pae3gay7"; + "Naif5iel"; + " eir6Aed1"; + "aa6Aesai"; + "" ] let nb_passwds = List.length passwords -let fake_ctx () = object - val mutable i = 0; - val mutable distributed = false; - inherit Client_context.simple_printer (fun _ _ -> Lwt.return_unit) - method prompt : type a. (a, string tzresult) Client_context.lwt_format -> a = - Format.kasprintf (fun _ -> return "") - method prompt_password : type a. (a, MBytes.t tzresult) Client_context.lwt_format -> a = - Format.kasprintf begin fun _ -> (* return Bigstring.empty *) - match distributed with - | false -> - distributed <- true ; - return (List.nth passwords 0) - | true -> - i <- if i = nb_passwds - 1 then 0 else succ i ; - distributed <- false ; - return (List.nth passwords i) - end -end - -let make_sk_uris = - List.map begin fun path -> - Client_keys.make_sk_uri (Uri.make ~scheme:"encrypted" ~path ()) +let fake_ctx () = + object + val mutable i = 0 + + val mutable distributed = false + + inherit Client_context.simple_printer (fun _ _ -> Lwt.return_unit) + + method prompt : type a. (a, string tzresult) Client_context.lwt_format -> a + = + Format.kasprintf (fun _ -> return "") + + method prompt_password : type a. + (a, MBytes.t tzresult) Client_context.lwt_format -> a = + Format.kasprintf (fun _ -> + (* return Bigstring.empty *) + match distributed with + | false -> + distributed <- true ; + return (List.nth passwords 0) + | true -> + i <- (if i = nb_passwds - 1 then 0 else succ i) ; + distributed <- false ; + return (List.nth passwords i)) end -let ed25519_sks = [ - "edsk3kMNLNdzLPbbABASDLARft8JRZ3Wpwibn8SMAb4KmuWSMJmAFd"; - "edsk3Kqr8VHRx9kmR8Pj5qRGcmvQH34cForiMaMz1Ahhq5DkZp7FxJ"; - "edsk2mBu4w9sMGhryvvXK53dXgpcNdZWi8pJQ1QL2rAgRPrE5y12az" ] - -let ed25519_sks_encrypted = make_sk_uris [ - "edesk1oGXxunJ5FTGpQ6o1xdop8VGKdT36Fj7LwWF9HLjzEqaCC4V6tdRVN1jaeJTfCHS8bYf7U2YhMK2yW6jSUy" ; - "edesk1s4xEifbUdUkghHHimbNUuyQ4eidDVJdc8JtPRUon758hBqZNZsQxadUDFRSRiUdLoFqBG35HAiLKafyczw" ; - "edesk1zY5jEs4QXrF9tXxFq1mfW9PkatdRxCKQ2Q598y5LLz65nQj4eWxefYFp8YLerya1haRdGe5NWckHDb5ApM" ; - ] - -let secp256k1_sks = [ - "spsk24attf9uuQ7PUKFHxTm6E3TMqB6SPkFiMbXPBur7JNrvupW2xg"; - "spsk2H32XfWL7MkW58r76q6Yu5tJg77YGgVyjwq7EvLUHhn4JmAtEG"; - "spsk3KQ56REAUGc6Gn87xCRnWyPwR2Un667vegQVuU16ZcgNyLCooh" ] - -let secp256k1_sks_encrypted = make_sk_uris [ - "spesk2CXQHDbzrcNatRzmg83Dto6kX6BWwpP2zGs4Zks9LDsXzaX6mAYRj5ZrrdgyZQap4DS9YRRLNSpaVC2TSsk" ; - "spesk1upiFp23osWSUTgHcx8DCVpTrMr9xtdqVQkQDWj5sFG7vqcWLDaNv9AKKcF27Nb266YfuAGF2hEbcyAxHmK" ; - "spesk1w7d68hzTWJusk5Xn5oz8EgDXbotDW9BXb5ksFjr8Jd94Kxnu5yKAhgRszojhMUoJ1EEt5BtPpGpkgCjELq" ; - ] - -let p256_sks = [ - "p2sk2YQcwF5h7qgRztocEMrfizUwZaM41f4v7zWneiig2Y5AxajqYC"; - "p2sk2XiSoQC9tvejVBDJyvkbHUq2kvcQHdJJ2wM8rii228DkjKV2b5"; - "p2sk3ZsfsEaxDNn74orv91Ruu35fomzF373aT9ForA4fDo54c47o6H" ] - -let p256_sks_encrypted = make_sk_uris [ - "p2esk2JMFpR9yaSpgsaKQYLqFnv16t4gowJ4cgjj7D7iMfoaJz2vZuH7Tdi11MrX6FC2yhfs2nvy5VRxAvzH1STE" ; - "p2esk1nfobVL73mY5Y18W8Ltb3Vm6Nf5Th7trN3yA3ucyyP4AH93XfyRatkh9AxxaDtnju1EtArykjroEQHDT97k" ; - "p2esk2Ge1jrVak7NhxksimzaQjRCTLx5vxUZ4Akgq3spGQLx6N41h6aKXeEYDgxN5eztnPwD6QiCHCfVAKXLPNm8" ; - ] +let make_sk_uris = + List.map (fun path -> + Client_keys.make_sk_uri (Uri.make ~scheme:"encrypted" ~path ())) + +let ed25519_sks = + [ "edsk3kMNLNdzLPbbABASDLARft8JRZ3Wpwibn8SMAb4KmuWSMJmAFd"; + "edsk3Kqr8VHRx9kmR8Pj5qRGcmvQH34cForiMaMz1Ahhq5DkZp7FxJ"; + "edsk2mBu4w9sMGhryvvXK53dXgpcNdZWi8pJQ1QL2rAgRPrE5y12az" ] + +let ed25519_sks_encrypted = + make_sk_uris + [ "edesk1oGXxunJ5FTGpQ6o1xdop8VGKdT36Fj7LwWF9HLjzEqaCC4V6tdRVN1jaeJTfCHS8bYf7U2YhMK2yW6jSUy"; + "edesk1s4xEifbUdUkghHHimbNUuyQ4eidDVJdc8JtPRUon758hBqZNZsQxadUDFRSRiUdLoFqBG35HAiLKafyczw"; + "edesk1zY5jEs4QXrF9tXxFq1mfW9PkatdRxCKQ2Q598y5LLz65nQj4eWxefYFp8YLerya1haRdGe5NWckHDb5ApM" + ] + +let secp256k1_sks = + [ "spsk24attf9uuQ7PUKFHxTm6E3TMqB6SPkFiMbXPBur7JNrvupW2xg"; + "spsk2H32XfWL7MkW58r76q6Yu5tJg77YGgVyjwq7EvLUHhn4JmAtEG"; + "spsk3KQ56REAUGc6Gn87xCRnWyPwR2Un667vegQVuU16ZcgNyLCooh" ] + +let secp256k1_sks_encrypted = + make_sk_uris + [ "spesk2CXQHDbzrcNatRzmg83Dto6kX6BWwpP2zGs4Zks9LDsXzaX6mAYRj5ZrrdgyZQap4DS9YRRLNSpaVC2TSsk"; + "spesk1upiFp23osWSUTgHcx8DCVpTrMr9xtdqVQkQDWj5sFG7vqcWLDaNv9AKKcF27Nb266YfuAGF2hEbcyAxHmK"; + "spesk1w7d68hzTWJusk5Xn5oz8EgDXbotDW9BXb5ksFjr8Jd94Kxnu5yKAhgRszojhMUoJ1EEt5BtPpGpkgCjELq" + ] + +let p256_sks = + [ "p2sk2YQcwF5h7qgRztocEMrfizUwZaM41f4v7zWneiig2Y5AxajqYC"; + "p2sk2XiSoQC9tvejVBDJyvkbHUq2kvcQHdJJ2wM8rii228DkjKV2b5"; + "p2sk3ZsfsEaxDNn74orv91Ruu35fomzF373aT9ForA4fDo54c47o6H" ] + +let p256_sks_encrypted = + make_sk_uris + [ "p2esk2JMFpR9yaSpgsaKQYLqFnv16t4gowJ4cgjj7D7iMfoaJz2vZuH7Tdi11MrX6FC2yhfs2nvy5VRxAvzH1STE"; + "p2esk1nfobVL73mY5Y18W8Ltb3Vm6Nf5Th7trN3yA3ucyyP4AH93XfyRatkh9AxxaDtnju1EtArykjroEQHDT97k"; + "p2esk2Ge1jrVak7NhxksimzaQjRCTLx5vxUZ4Akgq3spGQLx6N41h6aKXeEYDgxN5eztnPwD6QiCHCfVAKXLPNm8" + ] let sk_testable = - Alcotest.testable - Signature.Secret_key.pp - Signature.Secret_key.equal + Alcotest.testable Signature.Secret_key.pp Signature.Secret_key.equal let test_vectors () = let open Encrypted in - iter_s begin fun (sks, encrypted_sks) -> - let ctx = fake_ctx () in - let sks = List.map Signature.Secret_key.of_b58check_exn sks in - map_s (decrypt ctx) encrypted_sks >>=? fun decs -> - assert (decs = sks) ; - return_unit - end [ - ed25519_sks, ed25519_sks_encrypted ; - secp256k1_sks, secp256k1_sks_encrypted ; - p256_sks, p256_sks_encrypted ; - ] + iter_s + (fun (sks, encrypted_sks) -> + let ctx = fake_ctx () in + let sks = List.map Signature.Secret_key.of_b58check_exn sks in + map_s (decrypt ctx) encrypted_sks + >>=? fun decs -> + assert (decs = sks) ; + return_unit) + [ (ed25519_sks, ed25519_sks_encrypted); + (secp256k1_sks, secp256k1_sks_encrypted); + (p256_sks, p256_sks_encrypted) ] let test_random algo = let open Encrypted in @@ -104,29 +132,28 @@ let test_random algo = let rec inner i = if i >= loops then return_unit else - let _, _, sk = Signature.generate_key ~algo () in - encrypt ctx sk >>=? fun sk_uri -> - decrypt decrypt_ctx sk_uri >>=? fun decrypted_sk -> + let (_, _, sk) = Signature.generate_key ~algo () in + encrypt ctx sk + >>=? fun sk_uri -> + decrypt decrypt_ctx sk_uri + >>=? fun decrypted_sk -> Alcotest.check sk_testable "test_encrypt: decrypt" sk decrypted_sk ; inner (succ i) - in inner 0 + in + inner 0 let test_random _switch () = - iter_s test_random Signature.[Ed25519 ; Secp256k1 ; P256] >>= function - | Ok _ -> Lwt.return_unit - | Error _ -> Lwt.fail_with "test_random" + iter_s test_random Signature.[Ed25519; Secp256k1; P256] + >>= function + | Ok _ -> Lwt.return_unit | Error _ -> Lwt.fail_with "test_random" let test_vectors _switch () = - test_vectors () >>= function - | Ok _ -> Lwt.return_unit - | Error _ -> Lwt.fail_with "test_vectors" - -let tests = [ - Alcotest_lwt.test_case "random_roundtrip" `Quick test_random ; - Alcotest_lwt.test_case "vectors_decrypt" `Quick test_vectors ; -] - -let () = - Alcotest.run "tezos-signer-backends" [ - "encrypted", tests - ] + test_vectors () + >>= function + | Ok _ -> Lwt.return_unit | Error _ -> Lwt.fail_with "test_vectors" + +let tests = + [ Alcotest_lwt.test_case "random_roundtrip" `Quick test_random; + Alcotest_lwt.test_case "vectors_decrypt" `Quick test_vectors ] + +let () = Alcotest.run "tezos-signer-backends" [("encrypted", tests)] diff --git a/src/lib_signer_backends/unencrypted.ml b/src/lib_signer_backends/unencrypted.ml index 52a2f3c98042292ca972b8b9c38c44cfd4c277b3..bd5d918869956a45036397131614dba58d147109 100644 --- a/src/lib_signer_backends/unencrypted.ml +++ b/src/lib_signer_backends/unencrypted.ml @@ -27,8 +27,7 @@ open Client_keys let scheme = "unencrypted" -let title = - "Built-in signer using raw unencrypted keys." +let title = "Built-in signer using raw unencrypted keys." let description = "Please DO NOT USE this signer outside of test environments.\n\ @@ -47,7 +46,7 @@ let make_sk sk = Client_keys.make_sk_uri (Uri.make ~scheme ~path:(Signature.Secret_key.to_b58check sk) ()) -let public_key ?interactive:(_) pk_uri = +let public_key ?interactive:_ pk_uri = Lwt.return (Signature.Public_key.of_b58check (Uri.path (pk_uri : pk_uri :> Uri.t))) @@ -56,23 +55,22 @@ let make_pk pk = (Uri.make ~scheme ~path:(Signature.Public_key.to_b58check pk) ()) let neuterize sk_uri = - secret_key sk_uri >>=? fun sk -> - return (make_pk (Signature.Secret_key.to_public_key sk)) + secret_key sk_uri + >>=? fun sk -> return (make_pk (Signature.Secret_key.to_public_key sk)) let public_key_hash ?interactive pk_uri = - public_key ?interactive pk_uri >>=? fun pk -> - return (Signature.Public_key.hash pk, Some pk) + public_key ?interactive pk_uri + >>=? fun pk -> return (Signature.Public_key.hash pk, Some pk) let sign ?watermark sk_uri buf = - secret_key sk_uri >>=? fun sk -> - return (Signature.sign ?watermark sk buf) + secret_key sk_uri >>=? fun sk -> return (Signature.sign ?watermark sk buf) let deterministic_nonce sk_uri buf = - secret_key sk_uri >>=? fun sk -> - return (Signature.deterministic_nonce sk buf) + secret_key sk_uri + >>=? fun sk -> return (Signature.deterministic_nonce sk buf) let deterministic_nonce_hash sk_uri buf = - secret_key sk_uri >>=? fun sk -> - return (Signature.deterministic_nonce_hash sk buf) + secret_key sk_uri + >>=? fun sk -> return (Signature.deterministic_nonce_hash sk buf) let supports_deterministic_nonces _ = return_true diff --git a/src/lib_signer_backends/unencrypted.mli b/src/lib_signer_backends/unencrypted.mli index f5a3dfca2e27609a952bcaff2d2669906fd2aced..d3b886791c1212ace429693e4d1327df341fa9dd 100644 --- a/src/lib_signer_backends/unencrypted.mli +++ b/src/lib_signer_backends/unencrypted.mli @@ -25,5 +25,6 @@ include Client_keys.SIGNER -val make_pk: Signature.public_key -> Client_keys.pk_uri -val make_sk: Signature.secret_key -> Client_keys.sk_uri +val make_pk : Signature.public_key -> Client_keys.pk_uri + +val make_sk : Signature.secret_key -> Client_keys.sk_uri diff --git a/src/lib_signer_services/.ocamlformat b/src/lib_signer_services/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_signer_services/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_signer_services/signer_messages.ml b/src/lib_signer_services/signer_messages.ml index 6b0c52898e154bacb98404631d7973b050028e3b..22b188cdc2f736587df0ca0b5c2e7b2144a77db2 100644 --- a/src/lib_signer_services/signer_messages.ml +++ b/src/lib_signer_services/signer_messages.ml @@ -25,145 +25,117 @@ module type Authenticated_request = sig type t = { - pkh: Signature.Public_key_hash.t ; - data: MBytes.t ; - signature: Signature.t option ; + pkh : Signature.Public_key_hash.t; + data : MBytes.t; + signature : Signature.t option } - val to_sign: - pkh: Signature.Public_key_hash.t -> - data: MBytes.t -> - MBytes.t + + val to_sign : pkh:Signature.Public_key_hash.t -> data:MBytes.t -> MBytes.t + val encoding : t Data_encoding.t end module type Tag = sig - val tag: int + val tag : int end -module Make_authenticated_request(T: Tag) : Authenticated_request = struct - +module Make_authenticated_request (T : Tag) : Authenticated_request = struct type t = { - pkh: Signature.Public_key_hash.t ; - data: MBytes.t ; - signature: Signature.t option ; + pkh : Signature.Public_key_hash.t; + data : MBytes.t; + signature : Signature.t option } let to_sign ~pkh ~data = let tag = MBytes.make 1 '0' in - MBytes.set_int8 tag 0 T.tag; - MBytes.concat "" - [ MBytes.of_string "\x04" ; + MBytes.set_int8 tag 0 T.tag ; + MBytes.concat + "" + [ MBytes.of_string "\x04"; tag; - Signature.Public_key_hash.to_bytes pkh ; + Signature.Public_key_hash.to_bytes pkh; data ] let encoding = let open Data_encoding in conv - (fun { pkh ; data ; signature } -> - (pkh, data, signature)) - (fun (pkh, data, signature) -> - { pkh ; data ; signature }) + (fun {pkh; data; signature} -> (pkh, data, signature)) + (fun (pkh, data, signature) -> {pkh; data; signature}) (obj3 (req "pkh" Signature.Public_key_hash.encoding) (req "data" bytes) (opt "signature" Signature.encoding)) - end module Sign = struct - - module Request = Make_authenticated_request (struct let tag = 1 end) + module Request = Make_authenticated_request (struct + let tag = 1 + end) module Response = struct - type t = Signature.t - let encoding = - Data_encoding.(obj1 (req "signature" Signature.encoding)) - + let encoding = Data_encoding.(obj1 (req "signature" Signature.encoding)) end - end module Deterministic_nonce = struct - - module Request = Make_authenticated_request (struct let tag = 2 end) + module Request = Make_authenticated_request (struct + let tag = 2 + end) module Response = struct - type t = MBytes.t - let encoding = - Data_encoding.(obj1 (req "deterministic_nonce" bytes)) - + let encoding = Data_encoding.(obj1 (req "deterministic_nonce" bytes)) end - end module Deterministic_nonce_hash = struct - - module Request = Make_authenticated_request (struct let tag = 3 end) + module Request = Make_authenticated_request (struct + let tag = 3 + end) module Response = struct - type t = MBytes.t - let encoding = - Data_encoding.(obj1 (req "deterministic_nonce_hash" bytes)) - + let encoding = Data_encoding.(obj1 (req "deterministic_nonce_hash" bytes)) end - end module Supports_deterministic_nonces = struct - module Request = struct - type t = Signature.Public_key_hash.t let encoding = Data_encoding.(obj1 (req "pkh" Signature.Public_key_hash.encoding)) - end module Response = struct - type t = bool let encoding = Data_encoding.(obj1 (req "bool" bool)) end - end - - module Public_key = struct - module Request = struct - type t = Signature.Public_key_hash.t let encoding = Data_encoding.(obj1 (req "pkh" Signature.Public_key_hash.encoding)) - end module Response = struct - type t = Signature.Public_key.t let encoding = Data_encoding.(obj1 (req "pubkey" Signature.Public_key.encoding)) - end - end module Authorized_keys = struct - module Response = struct - type t = | No_authentication | Authorized_keys of Signature.Public_key_hash.t list @@ -171,23 +143,22 @@ module Authorized_keys = struct let encoding = let open Data_encoding in union - [ case (Tag 0) - ~title: "No_authentication" + [ case + (Tag 0) + ~title:"No_authentication" (constant "no_authentication_required") (function No_authentication -> Some () | _ -> None) - (fun () -> No_authentication) ; - case (Tag 1) - ~title: "Authorized_keys" + (fun () -> No_authentication); + case + (Tag 1) + ~title:"Authorized_keys" (list Signature.Public_key_hash.encoding) (function Authorized_keys l -> Some l | _ -> None) (fun l -> Authorized_keys l) ] - end - end module Request = struct - type t = | Sign of Sign.Request.t | Public_key of Public_key.Request.t @@ -198,47 +169,53 @@ module Request = struct let encoding = let open Data_encoding in - union [ - case (Tag 0) - ~title:"Sign" - (merge_objs - (obj1 (req "kind" (constant "sign"))) - Sign.Request.encoding) - (function Sign req -> Some ((), req) | _ -> None) - (fun ((), req) -> Sign req) ; - case (Tag 1) - ~title:"Public_key" - (merge_objs - (obj1 (req "kind" (constant "public_key"))) - Public_key.Request.encoding) - (function Public_key req -> Some ((), req) | _ -> None) - (fun ((), req) -> Public_key req) ; - case (Tag 2) - ~title:"Authorized_keys" - (obj1 (req "kind" (constant "authorized_keys"))) - (function Authorized_keys -> Some () | _ -> None) - (fun () -> Authorized_keys) ; - case (Tag 3) - ~title:"Deterministic_nonce" - (merge_objs - (obj1 (req "kind" (constant "deterministic_nonce"))) - Deterministic_nonce.Request.encoding) - (function Deterministic_nonce req -> Some ((), req) | _ -> None) - (fun ((), req) -> Deterministic_nonce req) ; - case (Tag 4) - ~title:"Deterministic_nonce_hash" - (merge_objs - (obj1 (req "kind" (constant "deterministic_nonce_hash"))) - Deterministic_nonce_hash.Request.encoding) - (function Deterministic_nonce_hash req -> Some ((), req) | _ -> None) - (fun ((), req) -> Deterministic_nonce_hash req) ; - case (Tag 5) - ~title:"Supports_deterministic_nonces" - (merge_objs - (obj1 (req "kind" (constant "supports_deterministic_nonces"))) - Supports_deterministic_nonces.Request.encoding) - (function Supports_deterministic_nonces req -> Some ((), req) | _ -> None) - (fun ((), req) -> Supports_deterministic_nonces req) ; - ] - + union + [ case + (Tag 0) + ~title:"Sign" + (merge_objs + (obj1 (req "kind" (constant "sign"))) + Sign.Request.encoding) + (function Sign req -> Some ((), req) | _ -> None) + (fun ((), req) -> Sign req); + case + (Tag 1) + ~title:"Public_key" + (merge_objs + (obj1 (req "kind" (constant "public_key"))) + Public_key.Request.encoding) + (function Public_key req -> Some ((), req) | _ -> None) + (fun ((), req) -> Public_key req); + case + (Tag 2) + ~title:"Authorized_keys" + (obj1 (req "kind" (constant "authorized_keys"))) + (function Authorized_keys -> Some () | _ -> None) + (fun () -> Authorized_keys); + case + (Tag 3) + ~title:"Deterministic_nonce" + (merge_objs + (obj1 (req "kind" (constant "deterministic_nonce"))) + Deterministic_nonce.Request.encoding) + (function Deterministic_nonce req -> Some ((), req) | _ -> None) + (fun ((), req) -> Deterministic_nonce req); + case + (Tag 4) + ~title:"Deterministic_nonce_hash" + (merge_objs + (obj1 (req "kind" (constant "deterministic_nonce_hash"))) + Deterministic_nonce_hash.Request.encoding) + (function + | Deterministic_nonce_hash req -> Some ((), req) | _ -> None) + (fun ((), req) -> Deterministic_nonce_hash req); + case + (Tag 5) + ~title:"Supports_deterministic_nonces" + (merge_objs + (obj1 (req "kind" (constant "supports_deterministic_nonces"))) + Supports_deterministic_nonces.Request.encoding) + (function + | Supports_deterministic_nonces req -> Some ((), req) | _ -> None) + (fun ((), req) -> Supports_deterministic_nonces req) ] end diff --git a/src/lib_signer_services/signer_messages.mli b/src/lib_signer_services/signer_messages.mli index 2a0f0794c795a0cb78ecf3b25b53ee29104e8ade..9a7501f2c0c3543b2d10dd07f7754a6e0916ea2a 100644 --- a/src/lib_signer_services/signer_messages.mli +++ b/src/lib_signer_services/signer_messages.mli @@ -25,92 +25,85 @@ module type Authenticated_request = sig type t = { - pkh: Signature.Public_key_hash.t ; - data: MBytes.t ; - signature: Signature.t option ; + pkh : Signature.Public_key_hash.t; + data : MBytes.t; + signature : Signature.t option } - val to_sign: - pkh: Signature.Public_key_hash.t -> - data: MBytes.t -> - MBytes.t + + val to_sign : pkh:Signature.Public_key_hash.t -> data:MBytes.t -> MBytes.t + val encoding : t Data_encoding.t end module Sign : sig - module Request : Authenticated_request module Response : sig type t = Signature.t + val encoding : t Data_encoding.t end - end module Deterministic_nonce : sig - module Request : Authenticated_request module Response : sig type t = MBytes.t + val encoding : t Data_encoding.t end - end module Deterministic_nonce_hash : sig - module Request : Authenticated_request module Response : sig type t = MBytes.t + val encoding : t Data_encoding.t end - end module Supports_deterministic_nonces : sig - module Request : sig type t = Signature.Public_key_hash.t + val encoding : t Data_encoding.t end module Response : sig type t = bool + val encoding : t Data_encoding.t end - end module Public_key : sig - module Request : sig type t = Signature.Public_key_hash.t + val encoding : t Data_encoding.t end module Response : sig type t = Signature.Public_key.t + val encoding : t Data_encoding.t end - end module Authorized_keys : sig - module Response : sig type t = | No_authentication | Authorized_keys of Signature.Public_key_hash.t list + val encoding : t Data_encoding.t end - end - module Request : sig - type t = | Sign of Sign.Request.t | Public_key of Public_key.Request.t @@ -118,6 +111,6 @@ module Request : sig | Deterministic_nonce of Deterministic_nonce.Request.t | Deterministic_nonce_hash of Deterministic_nonce_hash.Request.t | Supports_deterministic_nonces of Supports_deterministic_nonces.Request.t - val encoding : t Data_encoding.t + val encoding : t Data_encoding.t end diff --git a/src/lib_signer_services/signer_services.ml b/src/lib_signer_services/signer_services.ml index 694125b086429aa8bbafd0ea3b930aff5cea9d77..6f50826cdb614329eb0e9dab42f6858031f9f086 100644 --- a/src/lib_signer_services/signer_services.ml +++ b/src/lib_signer_services/signer_services.ml @@ -23,62 +23,73 @@ (* *) (*****************************************************************************) - let query = let open RPC_query in query (fun signature -> signature) |+ opt_field - ~descr: "Must be provided if the signer requires \ - authentication. In this case, it must be the signature \ - of the public key hash and message concatenated, by one \ - of the keys authorized by the signer." - "authentication" Signature.rpc_arg (fun signature -> signature) + ~descr: + "Must be provided if the signer requires authentication. In this \ + case, it must be the signature of the public key hash and message \ + concatenated, by one of the keys authorized by the signer." + "authentication" + Signature.rpc_arg + (fun signature -> signature) |> seal let sign = RPC_service.post_service - ~description: "Sign a piece of data with a given remote key" + ~description:"Sign a piece of data with a given remote key" ~query - ~input: Data_encoding.bytes - ~output: Data_encoding.(obj1 (req "signature" Signature.encoding)) + ~input:Data_encoding.bytes + ~output:Data_encoding.(obj1 (req "signature" Signature.encoding)) RPC_path.(root / "keys" /: Signature.Public_key_hash.rpc_arg) let deterministic_nonce = RPC_service.post_service - ~description: "Obtain some random data generated deterministically from some piece of data with a given remote key" + ~description: + "Obtain some random data generated deterministically from some piece of \ + data with a given remote key" ~query - ~input: Data_encoding.bytes - ~output: Data_encoding.(obj1 (req "deterministic_nonce" bytes)) + ~input:Data_encoding.bytes + ~output:Data_encoding.(obj1 (req "deterministic_nonce" bytes)) RPC_path.(root / "keys" /: Signature.Public_key_hash.rpc_arg) let deterministic_nonce_hash = RPC_service.post_service - ~description: "Obtain the hash of some random data generated deterministically from some piece of data with a given remote key" + ~description: + "Obtain the hash of some random data generated deterministically from \ + some piece of data with a given remote key" ~query - ~input: Data_encoding.bytes - ~output: Data_encoding.(obj1 (req "deterministic_nonce_hash" bytes)) + ~input:Data_encoding.bytes + ~output:Data_encoding.(obj1 (req "deterministic_nonce_hash" bytes)) RPC_path.(root / "keys" /: Signature.Public_key_hash.rpc_arg) let supports_deterministic_nonces = RPC_service.get_service - ~description: "Obtain whether the signing service suppports the determinstic nonces functionality" - ~query: RPC_query.empty - ~output: Data_encoding.(obj1 (req "supports_deterministic_nonces" bool)) + ~description: + "Obtain whether the signing service suppports the determinstic nonces \ + functionality" + ~query:RPC_query.empty + ~output:Data_encoding.(obj1 (req "supports_deterministic_nonces" bool)) RPC_path.(root / "keys" /: Signature.Public_key_hash.rpc_arg) let public_key = RPC_service.get_service - ~description: "Retrieve the public key of a given remote key" - ~query: RPC_query.empty - ~output: Data_encoding.(obj1 (req "public_key" Signature.Public_key.encoding)) + ~description:"Retrieve the public key of a given remote key" + ~query:RPC_query.empty + ~output: + Data_encoding.(obj1 (req "public_key" Signature.Public_key.encoding)) RPC_path.(root / "keys" /: Signature.Public_key_hash.rpc_arg) let authorized_keys = RPC_service.get_service - ~description: "Retrieve the public keys that can be used to \ - authenticate signing commands.\n\ - If the empty object is returned, the signer has \ - been set to accept unsigned commands." - ~query: RPC_query.empty - ~output: Data_encoding.(obj1 (opt "authorized_keys" (list Signature.Public_key_hash.encoding))) + ~description: + "Retrieve the public keys that can be used to authenticate signing \ + commands.\n\ + If the empty object is returned, the signer has been set to accept \ + unsigned commands." + ~query:RPC_query.empty + ~output: + Data_encoding.( + obj1 (opt "authorized_keys" (list Signature.Public_key_hash.encoding))) RPC_path.(root / "authorized_keys") diff --git a/src/lib_signer_services/signer_services.mli b/src/lib_signer_services/signer_services.mli index 2785dec0eab7c0869c6c6710ffe5519bf7f3792f..3465efae69fa96d6621bacf4815fc04e04653125 100644 --- a/src/lib_signer_services/signer_services.mli +++ b/src/lib_signer_services/signer_services.mli @@ -24,25 +24,55 @@ (*****************************************************************************) val sign : - ([ `POST ], unit, unit * Signature.Public_key_hash.t, - Signature.t option, MBytes.t, Signature.t) RPC_service.t + ( [`POST], + unit, + unit * Signature.Public_key_hash.t, + Signature.t option, + MBytes.t, + Signature.t ) + RPC_service.t val deterministic_nonce : - ([ `POST ], unit, unit * Signature.Public_key_hash.t, - Signature.t option, MBytes.t, MBytes.t) RPC_service.t + ( [`POST], + unit, + unit * Signature.Public_key_hash.t, + Signature.t option, + MBytes.t, + MBytes.t ) + RPC_service.t val deterministic_nonce_hash : - ([ `POST ], unit, unit * Signature.Public_key_hash.t, - Signature.t option, MBytes.t, MBytes.t) RPC_service.t + ( [`POST], + unit, + unit * Signature.Public_key_hash.t, + Signature.t option, + MBytes.t, + MBytes.t ) + RPC_service.t val supports_deterministic_nonces : - ([ `GET ], unit, unit * Signature.Public_key_hash.t, - unit, unit, bool) RPC_service.t + ( [`GET], + unit, + unit * Signature.Public_key_hash.t, + unit, + unit, + bool ) + RPC_service.t val public_key : - ([ `GET ], unit, unit * Signature.Public_key_hash.t, - unit, unit, Signature.Public_key.t) RPC_service.t + ( [`GET], + unit, + unit * Signature.Public_key_hash.t, + unit, + unit, + Signature.Public_key.t ) + RPC_service.t val authorized_keys : - ([ `GET ], unit, unit, - unit, unit, Signature.Public_key_hash.t list option) RPC_service.t + ( [`GET], + unit, + unit, + unit, + unit, + Signature.Public_key_hash.t list option ) + RPC_service.t diff --git a/src/lib_stdlib/.ocamlformat b/src/lib_stdlib/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_stdlib/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_stdlib/binary_stream.ml b/src/lib_stdlib/binary_stream.ml index f175b21ffdb138a9bf9873b3344538ddfd92767a..747fabd3c3f2ebf03732e3ff3d621745d75f2bca 100644 --- a/src/lib_stdlib/binary_stream.ml +++ b/src/lib_stdlib/binary_stream.ml @@ -25,73 +25,69 @@ (* Facilities to decode streams of binary data *) -type buffer = { - buffer : MBytes.t ; - ofs : int ; - len : int ; -} +type buffer = {buffer : MBytes.t; ofs : int; len : int} type t = { - current : buffer ; + current : buffer; (* buffer queue (classical double list implementation) *) - pending : MBytes.t list ; - pending_rev : MBytes.t list ; + pending : MBytes.t list; + pending_rev : MBytes.t list; (* number unread bytes in 'current + pending + pending_rev' *) - unread : int ; + unread : int } -let is_empty { unread ; _ } = unread = 0 +let is_empty {unread; _} = unread = 0 let of_buffer current = - { current ; - pending = [] ; - pending_rev = [] ; - unread = current.len } + {current; pending = []; pending_rev = []; unread = current.len} let of_bytes buffer = let len = MBytes.length buffer in - of_buffer { buffer ; ofs = 0 ; len } + of_buffer {buffer; ofs = 0; len} let empty = of_bytes (MBytes.create 0) let push buffer stream = - { stream with pending_rev = buffer :: stream.pending_rev ; - unread = stream.unread + MBytes.length buffer } + { stream with + pending_rev = buffer :: stream.pending_rev; + unread = stream.unread + MBytes.length buffer } exception Need_more_data let split buffer len = assert (len <= buffer.len) ; - { buffer with len }, - { buffer with ofs = buffer.ofs + len ; len = buffer.len - len } + ( {buffer with len}, + {buffer with ofs = buffer.ofs + len; len = buffer.len - len} ) let read stream len = if len > stream.unread then raise Need_more_data ; if len <= stream.current.len then - let res, current = split stream.current len in - res, { stream with current ; unread = stream.unread - len } + let (res, current) = split stream.current len in + (res, {stream with current; unread = stream.unread - len}) else - let res = { buffer = MBytes.create len ; ofs = 0 ; len } in + let res = {buffer = MBytes.create len; ofs = 0; len} in MBytes.blit - stream.current.buffer stream.current.ofs - res.buffer 0 + stream.current.buffer + stream.current.ofs + res.buffer + 0 stream.current.len ; let rec loop ofs pending_rev = function - | [] -> loop ofs [] (List.rev pending_rev) + | [] -> + loop ofs [] (List.rev pending_rev) | buffer :: pending -> - let current = { buffer ; ofs = 0 ; len = MBytes.length buffer } in + let current = {buffer; ofs = 0; len = MBytes.length buffer} in let to_read = len - ofs in - if to_read <= current.len then begin + if to_read <= current.len then ( MBytes.blit current.buffer 0 res.buffer ofs to_read ; - res, - { current = { current with ofs = to_read ; - len = current.len - to_read } ; - pending ; - pending_rev ; - unread = stream.unread - len ; - } - end else begin + ( res, + { current = + {current with ofs = to_read; len = current.len - to_read}; + pending; + pending_rev; + unread = stream.unread - len } ) ) + else ( MBytes.blit current.buffer 0 res.buffer ofs current.len ; - loop (ofs + current.len) pending_rev pending - end in + loop (ofs + current.len) pending_rev pending ) + in loop stream.current.len stream.pending_rev stream.pending diff --git a/src/lib_stdlib/binary_stream.mli b/src/lib_stdlib/binary_stream.mli index f70d4dbe491bb9472b47e58717df7333c6d569bb..70fbf10f9e2b88fe1c221d12aa6a0cf3b3fb3f0b 100644 --- a/src/lib_stdlib/binary_stream.mli +++ b/src/lib_stdlib/binary_stream.mli @@ -25,16 +25,16 @@ type t -type buffer = { - buffer : MBytes.t ; - ofs : int ; - len : int ; -} +type buffer = {buffer : MBytes.t; ofs : int; len : int} exception Need_more_data -val is_empty: t -> bool -val empty: t -val of_buffer: buffer -> t -val read: t -> int -> buffer * t -val push: MBytes.t -> t -> t +val is_empty : t -> bool + +val empty : t + +val of_buffer : buffer -> t + +val read : t -> int -> buffer * t + +val push : MBytes.t -> t -> t diff --git a/src/lib_stdlib/compare.ml b/src/lib_stdlib/compare.ml index f34b04eaa235eec37df07bd96d5c5226d6a3360a..47607783aea98aa0524e355493797c7b1e0d4704 100644 --- a/src/lib_stdlib/compare.ml +++ b/src/lib_stdlib/compare.ml @@ -25,111 +25,195 @@ module type COMPARABLE = sig type t + val compare : t -> t -> int end module type S = sig type t - val (=) : t -> t -> bool - val (<>) : t -> t -> bool - val (<) : t -> t -> bool - val (<=) : t -> t -> bool - val (>=) : t -> t -> bool - val (>) : t -> t -> bool + + val ( = ) : t -> t -> bool + + val ( <> ) : t -> t -> bool + + val ( < ) : t -> t -> bool + + val ( <= ) : t -> t -> bool + + val ( >= ) : t -> t -> bool + + val ( > ) : t -> t -> bool + val compare : t -> t -> int + val equal : t -> t -> bool + val max : t -> t -> t + val min : t -> t -> t end module Make (P : COMPARABLE) = struct include P + let compare = compare - let (=) a b = compare a b = 0 - let (<>) a b = compare a b <> 0 - let (<) a b = compare a b < 0 - let (<=) a b = compare a b <= 0 - let (>=) a b = compare a b >= 0 - let (>) a b = compare a b > 0 - let equal = (=) + + let ( = ) a b = compare a b = 0 + + let ( <> ) a b = compare a b <> 0 + + let ( < ) a b = compare a b < 0 + + let ( <= ) a b = compare a b <= 0 + + let ( >= ) a b = compare a b >= 0 + + let ( > ) a b = compare a b > 0 + + let equal = ( = ) + let max x y = if x >= y then x else y + let min x y = if x <= y then x else y end module List (P : COMPARABLE) = struct type t = P.t list + let rec compare xs ys = - match xs, ys with - | [], [] -> 0 - | [], _ -> -1 - | _, [] -> 1 - | x :: xs, y :: ys -> + match (xs, ys) with + | ([], []) -> + 0 + | ([], _) -> + -1 + | (_, []) -> + 1 + | (x :: xs, y :: ys) -> let hd = P.compare x y in if hd <> 0 then hd else compare xs ys - let (=) xs ys = compare xs ys = 0 - let (<>) xs ys = compare xs ys <> 0 - let (<) xs ys = compare xs ys < 0 - let (<=) xs ys = compare xs ys <= 0 - let (>=) xs ys = compare xs ys >= 0 - let (>) xs ys = compare xs ys > 0 - let equal = (=) + + let ( = ) xs ys = compare xs ys = 0 + + let ( <> ) xs ys = compare xs ys <> 0 + + let ( < ) xs ys = compare xs ys < 0 + + let ( <= ) xs ys = compare xs ys <= 0 + + let ( >= ) xs ys = compare xs ys >= 0 + + let ( > ) xs ys = compare xs ys > 0 + + let equal = ( = ) + let max x y = if x >= y then x else y + let min x y = if x <= y then x else y end module Option (P : COMPARABLE) = struct type t = P.t option + let compare xs ys = - match xs, ys with - | None, None -> 0 - | None, _ -> -1 - | _, None -> 1 - | Some x, Some y -> P.compare x y - let (=) xs ys = compare xs ys = 0 - let (<>) xs ys = compare xs ys <> 0 - let (<) xs ys = compare xs ys < 0 - let (<=) xs ys = compare xs ys <= 0 - let (>=) xs ys = compare xs ys >= 0 - let (>) xs ys = compare xs ys > 0 - let equal = (=) + match (xs, ys) with + | (None, None) -> + 0 + | (None, _) -> + -1 + | (_, None) -> + 1 + | (Some x, Some y) -> + P.compare x y + + let ( = ) xs ys = compare xs ys = 0 + + let ( <> ) xs ys = compare xs ys <> 0 + + let ( < ) xs ys = compare xs ys < 0 + + let ( <= ) xs ys = compare xs ys <= 0 + + let ( >= ) xs ys = compare xs ys >= 0 + + let ( > ) xs ys = compare xs ys > 0 + + let equal = ( = ) + let max x y = if x >= y then x else y + let min x y = if x <= y then x else y end module Char = Make (Char) -module Bool = Make (struct type t = bool let compare = Pervasives.compare end) -module Int = Make (struct type t = int let compare = Pervasives.compare end) + +module Bool = Make (struct + type t = bool + + let compare = Pervasives.compare +end) + +module Int = Make (struct + type t = int + + let compare = Pervasives.compare +end) + module Int32 = Make (Int32) module Int64 = Make (Int64) -module MakeUnsigned (Int : S) (Z : sig val zero : Int.t end) = struct +module MakeUnsigned + (Int : S) (Z : sig + val zero : Int.t + end) = +struct type t = Int.t + let compare va vb = - Int.(if va >= Z.zero then if vb >= Z.zero then compare va vb else -1 - else if vb >= Z.zero then 1 else compare va vb) - let (=) = ((=) : t -> t -> bool) - let (<>) = ((<>) : t -> t -> bool) - let (<) a b = - Int.(if Z.zero <= a then - (a < b || b < Z.zero) - else - (b < Z.zero && a < b)) - let (<=) a b = - Int.(if Z.zero <= a then - (a <= b || b < Z.zero) - else - (b < Z.zero && a <= b)) - let (>=) a b = (<=) b a - let (>) a b = (<) b a - let equal = (=) + Int.( + if va >= Z.zero then if vb >= Z.zero then compare va vb else -1 + else if vb >= Z.zero then 1 + else compare va vb) + + let ( = ) = (( = ) : t -> t -> bool) + + let ( <> ) = (( <> ) : t -> t -> bool) + + let ( < ) a b = + Int.(if Z.zero <= a then a < b || b < Z.zero else b < Z.zero && a < b) + + let ( <= ) a b = + Int.(if Z.zero <= a then a <= b || b < Z.zero else b < Z.zero && a <= b) + + let ( >= ) a b = b <= a + + let ( > ) a b = b < a + + let equal = ( = ) + let max x y = if x >= y then x else y + let min x y = if x <= y then x else y end -module Uint32 = MakeUnsigned (Int32) (struct let zero = 0l end) -module Uint64 = MakeUnsigned (Int64) (struct let zero = 0L end) +module Uint32 = + MakeUnsigned + (Int32) + (struct + let zero = 0l + end) -module Float = Make (struct type t = float let compare = Pervasives.compare end) -module String = Make (String) +module Uint64 = + MakeUnsigned + (Int64) + (struct + let zero = 0L + end) +module Float = Make (struct + type t = float + + let compare = Pervasives.compare +end) + +module String = Make (String) module Z = Make (Z) diff --git a/src/lib_stdlib/compare.mli b/src/lib_stdlib/compare.mli index 8a67a866e75bd804ac560231b5ff5145d710f570..fac841f138e619129ce761c9227748a31f522079 100644 --- a/src/lib_stdlib/compare.mli +++ b/src/lib_stdlib/compare.mli @@ -25,35 +25,56 @@ module type COMPARABLE = sig type t + val compare : t -> t -> int end module type S = sig type t - val (=) : t -> t -> bool - val (<>) : t -> t -> bool - val (<) : t -> t -> bool - val (<=) : t -> t -> bool - val (>=) : t -> t -> bool - val (>) : t -> t -> bool + + val ( = ) : t -> t -> bool + + val ( <> ) : t -> t -> bool + + val ( < ) : t -> t -> bool + + val ( <= ) : t -> t -> bool + + val ( >= ) : t -> t -> bool + + val ( > ) : t -> t -> bool + val compare : t -> t -> int + val equal : t -> t -> bool + val max : t -> t -> t + val min : t -> t -> t end module Make (P : COMPARABLE) : S with type t := P.t module Char : S with type t = char + module Bool : S with type t = bool + module Int : S with type t = int + module Int32 : S with type t = int32 + module Uint32 : S with type t = int32 + module Int64 : S with type t = int64 + module Uint64 : S with type t = int64 + module Float : S with type t = float + module String : S with type t = string + module Z : S with type t = Z.t module List (P : COMPARABLE) : S with type t = P.t list + module Option (P : COMPARABLE) : S with type t = P.t option diff --git a/src/lib_stdlib/hashPtree.ml b/src/lib_stdlib/hashPtree.ml index 342617a66ebcb8e60555b8d93e94aeec5c5e3b6c..f8158d762e4aa5032ffbd10e108cc5bbe90b5f5b 100644 --- a/src/lib_stdlib/hashPtree.ml +++ b/src/lib_stdlib/hashPtree.ml @@ -25,30 +25,32 @@ module Ptree_sig = struct module type Value = sig - type t + val equal : t -> t -> bool - val hash : t -> int + val hash : t -> int end - type prefix_order = - | Equal - | Shorter - | Longer - | Different + type prefix_order = Equal | Shorter | Longer | Different module type Prefix = sig - type key (* bit sequence *) + type key (* bit sequence *) + type prefix (* prefix of a bit sequence *) - type mask (* integer length of a bit sequence *) + + type mask (* integer length of a bit sequence *) val equal_key : key -> key -> bool + val equal_mask : mask -> mask -> bool + val equal_prefix : prefix -> prefix -> bool val hash_key : key -> int + val hash_mask : mask -> int + val hash_prefix : prefix -> int val full_length_mask : mask @@ -56,24 +58,31 @@ module Ptree_sig = struct val strictly_shorter_mask : mask -> mask -> bool val key_prefix : key -> prefix + (* Full length prefix *) val prefix_key : prefix -> mask -> key + (* Some key matching the prefix with the given mask *) val match_prefix : key:key -> prefix:prefix -> mask:mask -> bool + (* Does the prefix of length [mask] of [key] equals to [prefix] *) val select_bit : prefix:prefix -> mask:mask -> bool + (* Get the bit of [prefix] at position [mask] assumes that [mask] is less than the length of prefix *) val common_mask : prefix -> prefix -> mask + (* The length of the common part of given prefixes *) val apply_mask : prefix -> mask -> prefix + (* Cut the prefix to the given length *) val compare_prefix : mask -> prefix -> mask -> prefix -> prefix_order + (* [compare_prefix m1 p1 m2 p2]: let p1' (resp p2') be the sub-prefix of length m1 of p1 (resp m2 of p2) The result is @@ -85,38 +94,50 @@ module Ptree_sig = struct end module type S = sig - type key + type prefix + type mask + type value type not_empty = TNot_empty - type empty = TEmpty + + type empty = TEmpty type _ t = private - | Leaf : { - mutable id: int; (* Mutable to get a good sharing semantics *) - mask : mask; - key : key; - value : value; - } -> not_empty t - | Node : { - mutable id : int; + | Leaf : + { mutable id : int; + (* Mutable to get a good sharing semantics *) mask : mask; - prefix : prefix; - true_ : not_empty t; - false_ : not_empty t; - } -> not_empty t + key : key; + value : value } + -> not_empty t + | Node : + { mutable id : int; + mask : mask; + prefix : prefix; + true_ : not_empty t; + false_ : not_empty t } + -> not_empty t | Empty : empty t val leaf : key:key -> mask:mask -> value -> not_empty t - val node : prefix:prefix -> mask:mask -> true_:not_empty t -> false_:not_empty t -> not_empty t + + val node : + prefix:prefix -> + mask:mask -> + true_:not_empty t -> + false_:not_empty t -> + not_empty t + val empty : empty t val equal : not_empty t -> not_empty t -> bool val fast_partial_equal : not_empty t -> not_empty t -> bool + (* if [fast_partial_equal x y] is true, then [equal x y] is true, but if fast_partial_equal returns false, nothing can be asserted. *) @@ -126,80 +147,103 @@ module Ptree_sig = struct end module Shared_tree : sig - - module Hash_consed_tree(P:Ptree_sig.Prefix)(V:Ptree_sig.Value) : Ptree_sig.S - with type value = V.t - and type key = P.key - and type prefix = P.prefix - and type mask = P.mask - - module Simple_tree(P:Ptree_sig.Prefix)(V:sig type t val equal : t -> t -> bool end) : Ptree_sig.S - with type value = V.t - and type key = P.key - and type prefix = P.prefix - and type mask = P.mask - + module Hash_consed_tree (P : Ptree_sig.Prefix) (V : Ptree_sig.Value) : + Ptree_sig.S + with type value = V.t + and type key = P.key + and type prefix = P.prefix + and type mask = P.mask + + module Simple_tree + (P : Ptree_sig.Prefix) (V : sig + type t + + val equal : t -> t -> bool + end) : + Ptree_sig.S + with type value = V.t + and type key = P.key + and type prefix = P.prefix + and type mask = P.mask end = struct open Ptree_sig -(* + (* type int2 = { mutable i1 : int; mutable i2 : int } let h2 = { i1 = 0; i2 = 0 } let hash2int x1 x2 = h2.i1 <- x1; h2.i2 <- x2; Hashtbl.hash h2 *) - type int3 = { mutable i1 : int; mutable i2 : int; mutable i3 : int } - let h3 = { i1 = 0; i2 = 0; i3 = 0 } + type int3 = {mutable i1 : int; mutable i2 : int; mutable i3 : int} + + let h3 = {i1 = 0; i2 = 0; i3 = 0} + let hash3int x1 x2 x3 = - h3.i1 <- x1; h3.i2 <- x2; h3.i3 <- x3; + h3.i1 <- x1 ; + h3.i2 <- x2 ; + h3.i3 <- x3 ; Hashtbl.hash h3 - type int4 = { mutable i1 : int; mutable i2 : int; mutable i3 : int; mutable i4 : int } - let h4 = { i1 = 0; i2 = 0; i3 = 0; i4 = 0 } - let hash4int x1 x2 x3 x4 = - h4.i1 <- x1; h4.i2 <- x2; h4.i3 <- x3; h4.i4 <- x4; - Hashtbl.hash h4 + type int4 = { + mutable i1 : int; + mutable i2 : int; + mutable i3 : int; + mutable i4 : int + } + let h4 = {i1 = 0; i2 = 0; i3 = 0; i4 = 0} - module Hash_consed_tree(P:Prefix)(V:Value) : S - with type value = V.t - and type key = P.key - and type prefix = P.prefix - and type mask = P.mask - = struct + let hash4int x1 x2 x3 x4 = + h4.i1 <- x1 ; + h4.i2 <- x2 ; + h4.i3 <- x3 ; + h4.i4 <- x4 ; + Hashtbl.hash h4 + module Hash_consed_tree (P : Prefix) (V : Value) : + S + with type value = V.t + and type key = P.key + and type prefix = P.prefix + and type mask = P.mask = struct type key = P.key + type mask = P.mask + type prefix = P.prefix + type value = V.t type not_empty = TNot_empty - type empty = TEmpty + + type empty = TEmpty type _ t = - | Leaf : { - mutable id: int; (* Mutable to get a good sharing semantics *) - mask : mask; - key : key; - value : value; - } -> not_empty t - | Node : { - mutable id : int; + | Leaf : + { mutable id : int; + (* Mutable to get a good sharing semantics *) mask : mask; - prefix : prefix; - true_ : not_empty t; - false_ : not_empty t; - } -> not_empty t + key : key; + value : value } + -> not_empty t + | Node : + { mutable id : int; + mask : mask; + prefix : prefix; + true_ : not_empty t; + false_ : not_empty t } + -> not_empty t | Empty : empty t let id : not_empty t -> int = function - | Leaf { id ; _ } -> id - | Node { id ; _ } -> id + | Leaf {id; _} -> + id + | Node {id; _} -> + id - let set_id (n : not_empty t) id = match n with - | Leaf r -> r.id <- id - | Node r -> r.id <- id + let set_id (n : not_empty t) id = + match n with Leaf r -> r.id <- id | Node r -> r.id <- id (*let mask : not_empty t -> mask = function | Leaf { mask ; _ } -> mask @@ -207,104 +251,115 @@ end = struct *) (* let prefix_table = WeakPrefixTbl.create 20 *) - module Tree : - Hashtbl.HashedType with type t = not_empty t - = struct - + module Tree : Hashtbl.HashedType with type t = not_empty t = struct type nonrec t = not_empty t - let equal (t1 : t) (t2 : t) = match t1, t2 with - | Leaf _, Node _ | Node _, Leaf _-> false - | Leaf { key = p1; value = v1; mask = m1 ; _ }, - Leaf { key = p2; value = v2; mask = m2 ; _ } -> + let equal (t1 : t) (t2 : t) = + match (t1, t2) with + | (Leaf _, Node _) | (Node _, Leaf _) -> + false + | ( Leaf {key = p1; value = v1; mask = m1; _}, + Leaf {key = p2; value = v2; mask = m2; _} ) -> P.equal_key p1 p2 && P.equal_mask m1 m2 && V.equal v1 v2 - | Node { prefix = p1; mask = m1; true_ = t1; false_ = f1 ; _ }, - Node { prefix = p2; mask = m2; true_ = t2; false_ = f2 ; _ } -> + | ( Node {prefix = p1; mask = m1; true_ = t1; false_ = f1; _}, + Node {prefix = p2; mask = m2; true_ = t2; false_ = f2; _} ) -> (* Assumes that only the head can be unshared: this means that structural equality implies physical one on children *) - P.equal_prefix p1 p2 && - P.equal_mask m1 m2 && t1 == t2 && f1 == f2 + P.equal_prefix p1 p2 && P.equal_mask m1 m2 && t1 == t2 && f1 == f2 let hash : t -> int = function - | Leaf { key; value; mask ; _ } -> + | Leaf {key; value; mask; _} -> hash3int (P.hash_key key) (V.hash value) (P.hash_mask mask) - | Node { mask; prefix; true_; false_ ; _ } -> + | Node {mask; prefix; true_; false_; _} -> hash4int - (P.hash_mask mask) (P.hash_prefix prefix) - (id true_) (id false_) - + (P.hash_mask mask) + (P.hash_prefix prefix) + (id true_) + (id false_) end - module WeakTreeTbl = Weak.Make(Tree) + module WeakTreeTbl = Weak.Make (Tree) (* Or move that to a state ? *) let weak_tree_tbl = WeakTreeTbl.create 10 let next = let r = ref 0 in - fun () -> incr r; !r + fun () -> incr r ; !r let leaf ~key ~mask value = - let l = Leaf { id = 0; key; value; mask } in + let l = Leaf {id = 0; key; value; mask} in match WeakTreeTbl.find_opt weak_tree_tbl l with | None -> - set_id l (next ()); - WeakTreeTbl.add weak_tree_tbl l; + set_id l (next ()) ; + WeakTreeTbl.add weak_tree_tbl l ; + l + | Some l -> l - | Some l -> l let node ~prefix ~mask ~true_ ~false_ = - let l = Node { id = 0; mask; prefix; true_; false_ } in + let l = Node {id = 0; mask; prefix; true_; false_} in match WeakTreeTbl.find_opt weak_tree_tbl l with | None -> - set_id l (next ()); - WeakTreeTbl.add weak_tree_tbl l; + set_id l (next ()) ; + WeakTreeTbl.add weak_tree_tbl l ; + l + | Some l -> l - | Some l -> l let empty = Empty - let equal (x:not_empty t) (y:not_empty t) = - x == y + let equal (x : not_empty t) (y : not_empty t) = x == y let fast_partial_equal = equal - - end [@@inline] - - module Simple_tree(P:Ptree_sig.Prefix)(V:sig type t val equal : t -> t -> bool end) : S - with type value = V.t - and type key = P.key - and type prefix = P.prefix - and type mask = P.mask - = struct - + end + [@@inline] + + module Simple_tree + (P : Ptree_sig.Prefix) (V : sig + type t + + val equal : t -> t -> bool + end) : + S + with type value = V.t + and type key = P.key + and type prefix = P.prefix + and type mask = P.mask = struct type key = P.key + type mask = P.mask + type prefix = P.prefix + type value = V.t type not_empty = TNot_empty - type empty = TEmpty + + type empty = TEmpty type _ t = - | Leaf : { - mutable id: int; (* Mutable to get a good sharing semantics *) + | Leaf : + { mutable id : int; + (* Mutable to get a good sharing semantics *) mask : mask; - key : key; - value : value; - } -> not_empty t - | Node : { - mutable id : int; - mask : mask; - prefix : prefix; - true_ : not_empty t; - false_ : not_empty t; - } -> not_empty t + key : key; + value : value } + -> not_empty t + | Node : + { mutable id : int; + mask : mask; + prefix : prefix; + true_ : not_empty t; + false_ : not_empty t } + -> not_empty t | Empty : empty t let id : not_empty t -> int = function - | Leaf { id ; _ } -> id - | Node { id ; _ } -> id + | Leaf {id; _} -> + id + | Node {id; _} -> + id (*let set_id (n : not_empty t) id = match n with | Leaf r -> r.id <- id @@ -314,47 +369,49 @@ end = struct | Leaf { mask ; _ } -> mask | Node { mask ; _ } -> mask *) - let leaf ~key ~mask value = - Leaf { id = 0; key; value; mask } + let leaf ~key ~mask value = Leaf {id = 0; key; value; mask} let node ~prefix ~mask ~true_ ~false_ = - Node { id = 0; mask; prefix; true_; false_ } + Node {id = 0; mask; prefix; true_; false_} let empty = Empty - let rec equal_not_empty (x:not_empty t) (y:not_empty t) = - x == y || - match x, y with - | Leaf l1, Leaf l2 -> - P.equal_key l1.key l2.key && - V.equal l1.value l2.value - | Node n1, Node n2 -> - P.equal_prefix n1.prefix n2.prefix && - P.equal_mask n1.mask n2.mask && - equal_not_empty n1.true_ n2.true_ && - equal_not_empty n1.false_ n2.false_ - | Node _, Leaf _ | Leaf _, Node _ -> false - - let equal : type a b. a t -> b t -> bool = fun x y -> - match x, y with - | Empty, Empty -> true - | Leaf _, Leaf _ -> + let rec equal_not_empty (x : not_empty t) (y : not_empty t) = + x == y + || + match (x, y) with + | (Leaf l1, Leaf l2) -> + P.equal_key l1.key l2.key && V.equal l1.value l2.value + | (Node n1, Node n2) -> + P.equal_prefix n1.prefix n2.prefix + && P.equal_mask n1.mask n2.mask + && equal_not_empty n1.true_ n2.true_ + && equal_not_empty n1.false_ n2.false_ + | (Node _, Leaf _) | (Leaf _, Node _) -> + false + + let equal : type a b. a t -> b t -> bool = + fun x y -> + match (x, y) with + | (Empty, Empty) -> + true + | (Leaf _, Leaf _) -> equal_not_empty x y - | Node _, Node _ -> + | (Node _, Node _) -> equal_not_empty x y - | _, _ -> + | (_, _) -> false - let fast_partial_equal (x:not_empty t) (y:not_empty t) = - x == y - - end [@@inline] - + let fast_partial_equal (x : not_empty t) (y : not_empty t) = x == y + end + [@@inline] end module type Value = sig type t + val equal : t -> t -> bool + val hash : t -> int end @@ -362,19 +419,29 @@ module type Bits = sig type t val lnot : t -> t - val (land) : t -> t -> t - val (lxor) : t -> t -> t - val (lor) : t -> t -> t - val (lsr) : t -> int -> t - val (lsl) : t -> int -> t + + val ( land ) : t -> t -> t + + val ( lxor ) : t -> t -> t + + val ( lor ) : t -> t -> t + + val ( lsr ) : t -> int -> t + + val ( lsl ) : t -> int -> t + val pred : t -> t val less_than : t -> t -> bool val highest_bit : t -> t + val equal : t -> t -> bool + val hash : t -> int + val zero : t + val one : t val size : int @@ -384,91 +451,98 @@ module type Size = sig val size : int end -module Bits(S:Size) = struct +module Bits (S : Size) = struct type t = Z.t + let size = S.size + let higher_bit = Z.shift_left Z.one size + let mask = Z.pred higher_bit let mark n = Z.logor higher_bit n + let unmark n = Z.logxor higher_bit n let one = mark Z.one + let zero = higher_bit + let hash = Z.hash + let equal = Z.equal + let less_than = Z.lt let highest_bit_unmarked n = - if Z.equal Z.zero n then - Z.zero - else - Z.(Z.one lsl (Pervasives.pred (numbits n))) + if Z.equal Z.zero n then Z.zero + else Z.(Z.one lsl Pervasives.pred (numbits n)) let highest_bit n = mark (highest_bit_unmarked (unmark n)) let lnot x = Z.logor (Z.lognot x) higher_bit - let (land) = Z.logand - let (lxor) a b = Z.logor (Z.logxor a b) higher_bit - let (lor) = Z.logor - let (lsr) a n = - Z.logor - (Z.shift_right_trunc (Z.logxor a higher_bit) n) - higher_bit - let (lsl) a n = - Z.logor - (Z.logand (Z.shift_left a n) mask) - higher_bit + + let ( land ) = Z.logand + + let ( lxor ) a b = Z.logor (Z.logxor a b) higher_bit + + let ( lor ) = Z.logor + + let ( lsr ) a n = + Z.logor (Z.shift_right_trunc (Z.logxor a higher_bit) n) higher_bit + + let ( lsl ) a n = Z.logor (Z.logand (Z.shift_left a n) mask) higher_bit let pred = Z.pred let of_z n = mark n + let to_z n = unmark n end -module BE_gen_prefix(Bits:Bits) : Ptree_sig.Prefix - with type key = Bits.t - and type prefix = Bits.t - and type mask = Bits.t -= struct +module BE_gen_prefix (Bits : Bits) : + Ptree_sig.Prefix + with type key = Bits.t + and type prefix = Bits.t + and type mask = Bits.t = struct type key = Bits.t + type mask = Bits.t (* Only a single bit set *) + type prefix = Bits.t let equal_key = Bits.equal + let equal_mask = Bits.equal + let equal_prefix = Bits.equal let hash_key x = Bits.hash x + let hash_mask x = Bits.hash x + let hash_prefix x = Bits.hash x open Bits let full_length_mask = Bits.one - let strictly_shorter_mask (m1:mask) m2 = - Bits.less_than m2 m1 + let strictly_shorter_mask (m1 : mask) m2 = Bits.less_than m2 m1 - let select_bit ~prefix ~mask = - not (Bits.equal (prefix land mask) Bits.zero) + let select_bit ~prefix ~mask = not (Bits.equal (prefix land mask) Bits.zero) - let apply_mask prefix mask = - prefix land (lnot (pred mask)) + let apply_mask prefix mask = prefix land lnot (pred mask) let match_prefix ~key ~prefix ~mask = equal_prefix (apply_mask key mask) prefix - let common_mask p0 p1 = - (Bits.highest_bit (* [@inlined] *)) (p0 lxor p1) + let common_mask p0 p1 = Bits.highest_bit (* [@inlined] *) (p0 lxor p1) let key_prefix x = x + let prefix_key p _m = p - let smaller_set_mask m1 m2 = - (lnot (pred m1)) - land - (lnot (pred m2)) + let smaller_set_mask m1 m2 = lnot (pred m1) land lnot (pred m2) let compare_prefix m1 p1 m2 p2 = let min_mask = smaller_set_mask m1 m2 in @@ -478,46 +552,51 @@ module BE_gen_prefix(Bits:Bits) : Ptree_sig.Prefix if m1 > m2 then Ptree_sig.Shorter else if m1 < m2 then Ptree_sig.Longer else Ptree_sig.Equal - else - Ptree_sig.Different + else Ptree_sig.Different end - -module LE_prefix : Ptree_sig.Prefix - with type key = int - and type prefix = int - and type mask = int -= struct +module LE_prefix : + Ptree_sig.Prefix + with type key = int + and type prefix = int + and type mask = int = struct type key = int + type mask = int (* Only a single bit set *) + type prefix = int - let equal_key = (==) - let equal_mask = (==) - let equal_prefix = (==) + let equal_key = ( == ) + + let equal_mask = ( == ) + + let equal_prefix = ( == ) let hash_key x = x + let hash_mask x = x + let hash_prefix x = x - let full_length_mask = (-1) lxor ((-1) lsr 1) + let full_length_mask = -1 lxor (-1 lsr 1) - let strictly_shorter_mask (m1:mask) m2 = - m1 < m2 + let strictly_shorter_mask (m1 : mask) m2 = m1 < m2 - let select_bit ~prefix ~mask = (prefix land mask) != 0 + let select_bit ~prefix ~mask = prefix land mask != 0 - let apply_mask prefix mask = prefix land (mask-1) - let match_prefix ~key ~prefix ~mask = - (apply_mask key mask) == prefix + let apply_mask prefix mask = prefix land (mask - 1) + + let match_prefix ~key ~prefix ~mask = apply_mask key mask == prefix + + let lowest_bit x = x land -x - let lowest_bit x = x land (-x) let common_mask p0 p1 = lowest_bit (p0 lxor p1) let key_prefix x = x + let prefix_key p _m = p - let smaller_set_mask m1 m2 = (m1-1) land (m2-1) + let smaller_set_mask m1 m2 = (m1 - 1) land (m2 - 1) let compare_prefix m1 p1 m2 p2 = let min_mask = smaller_set_mask m1 m2 in @@ -527,57 +606,61 @@ module LE_prefix : Ptree_sig.Prefix if m1 < m2 then Ptree_sig.Shorter else if m1 > m2 then Ptree_sig.Longer else Ptree_sig.Equal - else - Ptree_sig.Different + else Ptree_sig.Different end -module BE_prefix : Ptree_sig.Prefix - with type key = int - and type prefix = int - and type mask = int -= struct +module BE_prefix : + Ptree_sig.Prefix + with type key = int + and type prefix = int + and type mask = int = struct type key = int + type mask = int (* Only a single bit set *) + type prefix = int - let equal_key = (==) - let equal_mask = (==) - let equal_prefix = (==) + let equal_key = ( == ) + + let equal_mask = ( == ) + + let equal_prefix = ( == ) let hash_key x = x + let hash_mask x = x + let hash_prefix x = x let full_length_mask = 1 - let strictly_shorter_mask (m1:mask) m2 = - m1 > m2 + let strictly_shorter_mask (m1 : mask) m2 = m1 > m2 - let select_bit ~prefix ~mask = (prefix land mask) != 0 + let select_bit ~prefix ~mask = prefix land mask != 0 module Nativeint_infix = struct - let (lor) = Nativeint.logor + let ( lor ) = Nativeint.logor + (*let (lsl) = Nativeint.shift_left*) - let (lsr) = Nativeint.shift_right_logical + let ( lsr ) = Nativeint.shift_right_logical + (*let (asr) = Nativeint.shift_right*) - let (land) = Nativeint.logand - let (lnot) = Nativeint.lognot - let (lxor) = Nativeint.logxor - let (-) = Nativeint.sub + let ( land ) = Nativeint.logand + + let lnot = Nativeint.lognot + + let ( lxor ) = Nativeint.logxor + + let ( - ) = Nativeint.sub end let apply_mask prefix mask = let open Nativeint_infix in let prefix = Nativeint.of_int prefix in let mask = Nativeint.of_int mask in - Nativeint.to_int - ( - prefix land - (lnot (mask - 1n)) - ) + Nativeint.to_int (prefix land lnot (mask - 1n)) - let match_prefix ~key ~prefix ~mask = - (apply_mask key mask) == prefix + let match_prefix ~key ~prefix ~mask = apply_mask key mask == prefix let highest_bit x = Nativeint_infix.( @@ -586,14 +669,8 @@ module BE_prefix : Ptree_sig.Prefix let x = x lor (x lsr 4) in let x = x lor (x lsr 8) in let x = x lor (x lsr 16) in - let x = - if Sys.word_size > 32 then - x lor (x lsr 32) - else - x - in - Nativeint.to_int (x - (x lsr 1)) - ) + let x = if Sys.word_size > 32 then x lor (x lsr 32) else x in + Nativeint.to_int (x - (x lsr 1))) let common_mask p0 p1 = let open Nativeint_infix in @@ -602,13 +679,12 @@ module BE_prefix : Ptree_sig.Prefix highest_bit (p0 lxor p1) let key_prefix x = x + let prefix_key p _m = p let smaller_set_mask m1 m2 = let open Nativeint_infix in - (lnot (m1 - 1n)) - land - (lnot (m2 - 1n)) + lnot (m1 - 1n) land lnot (m2 - 1n) let compare_prefix m1 p1 m2 p2 = let open Nativeint_infix in @@ -623,19 +699,21 @@ module BE_prefix : Ptree_sig.Prefix if m1 > m2 then Ptree_sig.Shorter else if m1 < m2 then Ptree_sig.Longer else Ptree_sig.Equal - else - Ptree_sig.Different + else Ptree_sig.Different end -module Make(P:Ptree_sig.Prefix)(V:Value) = struct - - module T = Shared_tree.Hash_consed_tree(P)(V) +module Make (P : Ptree_sig.Prefix) (V : Value) = struct + module T = Shared_tree.Hash_consed_tree (P) (V) type t = E : 'a T.t -> t [@@ocaml.unboxed] + type key = T.key + type value = T.value + type mask = T.mask -(* + + (* let (=) = `Do_not_use_polymorphic_equality let (<=) = `Do_not_use_polymorphic_comparison let (>=) = `Do_not_use_polymorphic_comparison @@ -644,54 +722,60 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct let compare = `Do_not_use_polymorphic_comparison *) let equal (E t1) (E t2) = - match t1, t2 with - | T.Empty, T.Empty -> true - | T.Empty, T.Leaf _ -> false - | T.Empty, T.Node _ -> false - | T.Leaf _, T.Empty -> false - | T.Node _, T.Empty -> false - | T.Node _, T.Node _ -> T.equal t1 t2 - | T.Node _, T.Leaf _ -> T.equal t1 t2 - | T.Leaf _, T.Node _ -> T.equal t1 t2 - | T.Leaf _, T.Leaf _ -> T.equal t1 t2 - - let select_key_bit k m = - P.select_bit ~prefix:(P.key_prefix k) ~mask:m + match (t1, t2) with + | (T.Empty, T.Empty) -> + true + | (T.Empty, T.Leaf _) -> + false + | (T.Empty, T.Node _) -> + false + | (T.Leaf _, T.Empty) -> + false + | (T.Node _, T.Empty) -> + false + | (T.Node _, T.Node _) -> + T.equal t1 t2 + | (T.Node _, T.Leaf _) -> + T.equal t1 t2 + | (T.Leaf _, T.Node _) -> + T.equal t1 t2 + | (T.Leaf _, T.Leaf _) -> + T.equal t1 t2 + + let select_key_bit k m = P.select_bit ~prefix:(P.key_prefix k) ~mask:m let matching_key k1 k2 mask = let p1 = P.apply_mask (P.key_prefix k1) mask in let p2 = P.apply_mask (P.key_prefix k2) mask in P.equal_prefix p1 p2 - let rec mem : type k. key -> k T.t -> bool = fun k -> function + let rec mem : type k. key -> k T.t -> bool = + fun k -> function | T.Empty -> false - | T.Leaf { key; mask ; _} -> + | T.Leaf {key; mask; _} -> matching_key key k mask - | T.Node { prefix = _; mask; true_; false_ ; _ } -> - mem k - (if select_key_bit k mask then true_ else false_) + | T.Node {prefix = _; mask; true_; false_; _} -> + mem k (if select_key_bit k mask then true_ else false_) - let rec mem_exact : type k. key -> k T.t -> bool = fun k -> function + let rec mem_exact : type k. key -> k T.t -> bool = + fun k -> function | T.Empty -> false - | T.Leaf { key; mask ; _ } -> + | T.Leaf {key; mask; _} -> P.equal_key k key && P.equal_mask mask P.full_length_mask - | T.Node { prefix = _; mask; true_; false_ ; _ } -> - mem_exact k - (if select_key_bit k mask then true_ else false_) - - let rec find_ne k (t: T.not_empty T.t) = match t with - | T.Leaf { key; value; mask ; _ } -> - if matching_key key k mask then - Some value - else - None - | T.Node { prefix = _; mask; true_; false_ ; _ } -> - find_ne k - (if select_key_bit k mask then true_ else false_) + | T.Node {prefix = _; mask; true_; false_; _} -> + mem_exact k (if select_key_bit k mask then true_ else false_) - let find : type k. key -> k T.t -> value option = fun k -> function + let rec find_ne k (t : T.not_empty T.t) = + match t with + | T.Leaf {key; value; mask; _} -> + if matching_key key k mask then Some value else None + | T.Node {prefix = _; mask; true_; false_; _} -> + find_ne k (if select_key_bit k mask then true_ else false_) + + let find : type k. key -> k T.t -> value option = + fun k -> function | T.Empty -> None | T.Leaf _ as t -> @@ -699,75 +783,84 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct | T.Node _ as t -> find_ne k t - let singleton ~key ~value ~mask = - T.leaf ~key value ~mask + let singleton ~key ~value ~mask = T.leaf ~key value ~mask let join ~mask p0 t0 p1 t1 = (* assumes p0 <> p1 *) let c_mask = P.common_mask p0 p1 in let mask = if P.strictly_shorter_mask c_mask mask then c_mask else mask in let prefix = P.apply_mask p1 mask in - let true_, false_ = - if P.select_bit ~prefix:p0 ~mask then - t0, t1 - else - t1, t0 + let (true_, false_) = + if P.select_bit ~prefix:p0 ~mask then (t0, t1) else (t1, t0) in T.node ~prefix ~mask ~true_ ~false_ - let rebuild_ne_branch node prefix mask ~node_true ~node_false ~true_ ~false_ = - if T.fast_partial_equal node_true true_ && - T.fast_partial_equal node_false false_ then - node - else - T.node ~prefix ~mask ~true_ ~false_ + let rebuild_ne_branch node prefix mask ~node_true ~node_false ~true_ ~false_ + = + if + T.fast_partial_equal node_true true_ + && T.fast_partial_equal node_false false_ + then node + else T.node ~prefix ~mask ~true_ ~false_ - let rec add_ne combine ~key ~value ?(mask=P.full_length_mask) t = + let rec add_ne combine ~key ~value ?(mask = P.full_length_mask) t = match t with | T.Leaf leaf -> - if P.equal_key key leaf.key && P.equal_mask leaf.mask P.full_length_mask then - if value == leaf.value then - t - else - T.leaf ~key (combine value leaf.value) ~mask + if + P.equal_key key leaf.key && P.equal_mask leaf.mask P.full_length_mask + then + if value == leaf.value then t + else T.leaf ~key (combine value leaf.value) ~mask else if - P.strictly_shorter_mask leaf.mask mask && - P.match_prefix ~key ~prefix:(P.key_prefix leaf.key) ~mask:leaf.mask then - (* The previous leaf shadows the new one: no modification *) + P.strictly_shorter_mask leaf.mask mask + && P.match_prefix + ~key + ~prefix:(P.key_prefix leaf.key) + ~mask:leaf.mask + then (* The previous leaf shadows the new one: no modification *) t else if - P.strictly_shorter_mask mask leaf.mask && - P.match_prefix ~key:leaf.key ~prefix:(P.key_prefix key) ~mask then + P.strictly_shorter_mask mask leaf.mask + && P.match_prefix ~key:leaf.key ~prefix:(P.key_prefix key) ~mask + then (* The new leaf shadows the previous one: replace *) T.leaf ~key (combine value leaf.value) ~mask else - join ~mask - (P.key_prefix key) (T.leaf ~key value ~mask) - (P.key_prefix leaf.key) t + join + ~mask + (P.key_prefix key) + (T.leaf ~key value ~mask) + (P.key_prefix leaf.key) + t | T.Node node -> if P.match_prefix ~key ~prefix:node.prefix ~mask:node.mask then - let true_, false_ = + let (true_, false_) = if select_key_bit key node.mask then - add_ne combine ~key ~value ~mask node.true_, node.false_ - else - node.true_, add_ne combine ~key ~value ~mask node.false_ + (add_ne combine ~key ~value ~mask node.true_, node.false_) + else (node.true_, add_ne combine ~key ~value ~mask node.false_) in - rebuild_ne_branch t node.prefix node.mask - ~node_false:node.false_ ~node_true:node.true_ - ~true_ ~false_ + rebuild_ne_branch + t + node.prefix + node.mask + ~node_false:node.false_ + ~node_true:node.true_ + ~true_ + ~false_ else - join ~mask - (P.key_prefix key) (T.leaf ~key value ~mask) - node.prefix t - - let add : type k. - (value -> value -> value) -> key:key -> value:value -> - ?mask:P.mask -> k T.t -> - T.not_empty T.t = fun combine ~key ~value ?(mask=P.full_length_mask) -> - function + join ~mask (P.key_prefix key) (T.leaf ~key value ~mask) node.prefix t + + let add : + type k. + (value -> value -> value) -> + key:key -> + value:value -> + ?mask:P.mask -> + k T.t -> + T.not_empty T.t = + fun combine ~key ~value ?(mask = P.full_length_mask) -> function | T.Empty -> singleton ~key ~value ~mask - (* Should be merged by matcher *) | T.Leaf _ as t -> add_ne combine ~key ~value ~mask t @@ -776,45 +869,77 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct let empty = E T.empty - let rebuild_branch - node prefix mask ~node_true ~node_false - ~true_:(E true_) ~false_:(E false_) = - match true_, false_ with - | T.Empty, T.Empty -> + let rebuild_branch node prefix mask ~node_true ~node_false ~true_:(E true_) + ~false_:(E false_) = + match (true_, false_) with + | (T.Empty, T.Empty) -> empty - | T.Empty, t -> + | (T.Empty, t) -> E t - | t, T.Empty -> + | (t, T.Empty) -> E t - | T.Leaf _ as true_, (T.Leaf _ as false_) -> - E (rebuild_ne_branch node prefix mask ~node_true ~node_false ~true_ ~false_) - | T.Leaf _ as true_, (T.Node _ as false_) -> - E (rebuild_ne_branch node prefix mask ~node_true ~node_false ~true_ ~false_) - | T.Node _ as true_, (T.Leaf _ as false_) -> - E (rebuild_ne_branch node prefix mask ~node_true ~node_false ~true_ ~false_) - | T.Node _ as true_, (T.Node _ as false_) -> - E (rebuild_ne_branch node prefix mask ~node_true ~node_false ~true_ ~false_) + | ((T.Leaf _ as true_), (T.Leaf _ as false_)) -> + E + (rebuild_ne_branch + node + prefix + mask + ~node_true + ~node_false + ~true_ + ~false_) + | ((T.Leaf _ as true_), (T.Node _ as false_)) -> + E + (rebuild_ne_branch + node + prefix + mask + ~node_true + ~node_false + ~true_ + ~false_) + | ((T.Node _ as true_), (T.Leaf _ as false_)) -> + E + (rebuild_ne_branch + node + prefix + mask + ~node_true + ~node_false + ~true_ + ~false_) + | ((T.Node _ as true_), (T.Node _ as false_)) -> + E + (rebuild_ne_branch + node + prefix + mask + ~node_true + ~node_false + ~true_ + ~false_) let rec remove_ne : key -> T.not_empty T.t -> t = - fun key t -> - match t with - | T.Leaf leaf -> - if matching_key leaf.key key leaf.mask then - E T.empty - else - E t - | T.Node node -> - if P.match_prefix ~key ~prefix:node.prefix ~mask:node.mask then - let true_, false_ = - if select_key_bit key node.mask then - remove_ne key node.true_, E node.false_ - else - E node.true_, remove_ne key node.false_ - in - rebuild_branch t node.prefix node.mask - ~node_true:node.true_ ~node_false:node.false_ ~true_ ~false_ - else - E t + fun key t -> + match t with + | T.Leaf leaf -> + if matching_key leaf.key key leaf.mask then E T.empty else E t + | T.Node node -> + if P.match_prefix ~key ~prefix:node.prefix ~mask:node.mask then + let (true_, false_) = + if select_key_bit key node.mask then + (remove_ne key node.true_, E node.false_) + else (E node.true_, remove_ne key node.false_) + in + rebuild_branch + t + node.prefix + node.mask + ~node_true:node.true_ + ~node_false:node.false_ + ~true_ + ~false_ + else E t let remove key (E t) = match t with @@ -826,30 +951,32 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct remove_ne key t let rec remove_prefix_ne : key -> mask -> T.not_empty T.t -> t = - fun key mask t -> - match t with - | T.Leaf leaf -> - if matching_key key leaf.key mask then - E T.empty - else - E t - | T.Node node -> - match P.compare_prefix mask (P.key_prefix key) node.mask node.prefix with - | Different -> - E t - | Equal -> - E T.empty - | Shorter -> - E T.empty - | Longer -> - let true_, false_ = - if select_key_bit key node.mask then - remove_prefix_ne key mask node.true_, E node.false_ - else - E node.true_, remove_prefix_ne key mask node.false_ - in - rebuild_branch t node.prefix node.mask - ~node_true:node.true_ ~node_false:node.false_ ~true_ ~false_ + fun key mask t -> + match t with + | T.Leaf leaf -> + if matching_key key leaf.key mask then E T.empty else E t + | T.Node node -> ( + match P.compare_prefix mask (P.key_prefix key) node.mask node.prefix with + | Different -> + E t + | Equal -> + E T.empty + | Shorter -> + E T.empty + | Longer -> + let (true_, false_) = + if select_key_bit key node.mask then + (remove_prefix_ne key mask node.true_, E node.false_) + else (E node.true_, remove_prefix_ne key mask node.false_) + in + rebuild_branch + t + node.prefix + node.mask + ~node_true:node.true_ + ~node_false:node.false_ + ~true_ + ~false_ ) let remove_prefix key mask (E t) = match t with @@ -861,25 +988,29 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct remove_prefix_ne key mask t let rec remove_ne_exact : key -> T.not_empty T.t -> t = - fun key t -> - match t with - | T.Leaf leaf -> - if P.equal_key leaf.key key && P.equal_mask leaf.mask P.full_length_mask then - E T.empty - else - E t - | T.Node node -> - if P.match_prefix ~key ~prefix:node.prefix ~mask:node.mask then - let true_, false_ = - if select_key_bit key node.mask then - remove_ne_exact key node.true_, E node.false_ - else - E node.true_, remove_ne_exact key node.false_ - in - rebuild_branch t node.prefix node.mask - ~node_true:node.true_ ~node_false:node.false_ ~true_ ~false_ - else - E t + fun key t -> + match t with + | T.Leaf leaf -> + if + P.equal_key leaf.key key && P.equal_mask leaf.mask P.full_length_mask + then E T.empty + else E t + | T.Node node -> + if P.match_prefix ~key ~prefix:node.prefix ~mask:node.mask then + let (true_, false_) = + if select_key_bit key node.mask then + (remove_ne_exact key node.true_, E node.false_) + else (E node.true_, remove_ne_exact key node.false_) + in + rebuild_branch + t + node.prefix + node.mask + ~node_true:node.true_ + ~node_false:node.false_ + ~true_ + ~false_ + else E t let remove_exact key (E t) = match t with @@ -893,25 +1024,28 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct let rec replace_subtree_ne ~key ~id value t = match t with | T.Leaf leaf -> - if leaf.id == id then - T.leaf ~key:leaf.key ~mask:leaf.mask value - else - t + if leaf.id == id then T.leaf ~key:leaf.key ~mask:leaf.mask value else t | T.Node node -> if node.id == id then - T.leaf ~key:(P.prefix_key node.prefix node.mask) ~mask:node.mask value - else - if P.match_prefix ~key ~prefix:node.prefix ~mask:node.mask then - let true_, false_ = + T.leaf + ~key:(P.prefix_key node.prefix node.mask) + ~mask:node.mask + value + else if P.match_prefix ~key ~prefix:node.prefix ~mask:node.mask then + let (true_, false_) = if select_key_bit key node.mask then - replace_subtree_ne ~key ~id value node.true_, node.false_ - else - node.true_, replace_subtree_ne ~key ~id value node.false_ + (replace_subtree_ne ~key ~id value node.true_, node.false_) + else (node.true_, replace_subtree_ne ~key ~id value node.false_) in - rebuild_ne_branch t node.prefix node.mask - ~node_true:node.true_ ~node_false:node.false_ ~true_ ~false_ - else - t + rebuild_ne_branch + t + node.prefix + node.mask + ~node_true:node.true_ + ~node_false:node.false_ + ~true_ + ~false_ + else t let replace_subtree ~replaced:(E replaced) value t = let replace_subtree_aux ~key ~id value (E t) = @@ -931,17 +1065,19 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct | T.Node node -> replace_subtree_aux ~key:(P.prefix_key node.prefix node.mask) - ~id:node.id value t - + ~id:node.id + value + t - let rec fold_ne : (key -> mask -> value -> 'a -> 'a) -> T.not_empty T.t -> 'a -> 'a = - fun f t acc -> - match t with - | T.Leaf {key; mask; value; _} -> - f key mask value acc - | T.Node node -> - let acc = fold_ne f node.false_ acc in - fold_ne f node.true_ acc + let rec fold_ne : + (key -> mask -> value -> 'a -> 'a) -> T.not_empty T.t -> 'a -> 'a = + fun f t acc -> + match t with + | T.Leaf {key; mask; value; _} -> + f key mask value acc + | T.Node node -> + let acc = fold_ne f node.false_ acc in + fold_ne f node.true_ acc let fold f (E t) acc = match t with @@ -956,22 +1092,29 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct type t = T.not_empty T.t let hash = T.id + let equal t1 t2 = T.id t1 == T.id t2 end - module Map_cache = Ephemeron.K1.Make(T_id) + + module Map_cache = Ephemeron.K1.Make (T_id) module type Map_Reduce = sig type result + val default : result + val map : t -> key -> T.value -> result + val reduce : t -> result -> result -> result end - module Map_Reduce(M:Map_Reduce) = struct + + module Map_Reduce (M : Map_Reduce) = struct let cache : M.result Map_cache.t = Map_cache.create 10 let rec map_reduce_ne t = match Map_cache.find_opt cache t with - | Some v -> v + | Some v -> + v | None -> let v = match t with @@ -982,10 +1125,10 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct let v_false = map_reduce_ne node.false_ in M.reduce (E t) v_true v_false in - Map_cache.add cache t v; - v + Map_cache.add cache t v ; v - let run (E t) = match t with + let run (E t) = + match t with | T.Empty -> M.default | T.Leaf _ as t -> @@ -995,8 +1138,7 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct let rec filter_ne f t = let result = map_reduce_ne t in - if f result then - E t + if f result then E t else match t with | T.Leaf _ -> @@ -1004,73 +1146,92 @@ module Make(P:Ptree_sig.Prefix)(V:Value) = struct | T.Node node -> let true_ = filter_ne f node.true_ in let false_ = filter_ne f node.false_ in - rebuild_branch t node.prefix node.mask - ~node_true:node.true_ ~node_false:node.false_ ~true_ ~false_ - - let filter f (E t) = match t with + rebuild_branch + t + node.prefix + node.mask + ~node_true:node.true_ + ~node_false:node.false_ + ~true_ + ~false_ + + let filter f (E t) = + match t with | T.Empty -> empty | T.Leaf _ as t -> filter_ne f t | T.Node _ as t -> filter_ne f t - end (* Packing in the existential *) - let mem key (E t) = - mem key t - - let mem_exact key (E t) = - mem_exact key t + let mem key (E t) = mem key t - let find key (E t) = - find key t + let mem_exact key (E t) = mem_exact key t - let singleton ~key ~value ~mask = - E (singleton ~key ~value ~mask) + let find key (E t) = find key t - let add combine ~key ~value ?mask (E t) = - E (add combine ~key ~value ?mask t) + let singleton ~key ~value ~mask = E (singleton ~key ~value ~mask) -end [@@inline] + let add combine ~key ~value ?mask (E t) = E (add combine ~key ~value ?mask t) +end +[@@inline] module type S = sig type key + type value + type mask + type t val equal : t -> t -> bool val empty : t + val singleton : key:key -> value:value -> mask:mask -> t - val add : (value -> value -> value) -> key:key -> value:value -> - ?mask:mask -> t -> t + + val add : + (value -> value -> value) -> key:key -> value:value -> ?mask:mask -> t -> t + val remove : key -> t -> t + val remove_exact : key -> t -> t + val remove_prefix : key -> mask -> t -> t + val mem : key -> t -> bool + val mem_exact : key -> t -> bool + val find : key -> t -> value option + val replace_subtree : replaced:t -> value -> t -> t + val fold : (key -> mask -> value -> 'a -> 'a) -> t -> 'a -> 'a module type Map_Reduce = sig type result + val default : result + val map : t -> key -> value -> result + val reduce : t -> result -> result -> result end - module Map_Reduce(M:Map_Reduce) : sig + + module Map_Reduce (M : Map_Reduce) : sig val run : t -> M.result + val filter : (M.result -> bool) -> t -> t end - end -module Make_LE(V:Value) = Make(LE_prefix)(V) -module Make_BE(V:Value) = Make(BE_prefix)(V) -module Make_BE_gen(V:Value)(B:Bits) = Make(BE_gen_prefix(B))(V) -module Make_BE_sized(V:Value)(S:Size) = Make(BE_gen_prefix(Bits(S)))(V) +module Make_LE (V : Value) = Make (LE_prefix) (V) +module Make_BE (V : Value) = Make (BE_prefix) (V) +module Make_BE_gen (V : Value) (B : Bits) = Make (BE_gen_prefix (B)) (V) +module Make_BE_sized (V : Value) (S : Size) = + Make (BE_gen_prefix (Bits (S))) (V) diff --git a/src/lib_stdlib/hashPtree.mli b/src/lib_stdlib/hashPtree.mli index 9ec085b88d581810ee03c4ef10919cdfdea90930..2404820be8ff091250fcbce16d51c9ea4ceea515 100644 --- a/src/lib_stdlib/hashPtree.mli +++ b/src/lib_stdlib/hashPtree.mli @@ -27,7 +27,9 @@ module type Value = sig type t + val equal : t -> t -> bool + val hash : t -> int end @@ -35,19 +37,29 @@ module type Bits = sig type t val lnot : t -> t - val (land) : t -> t -> t - val (lxor) : t -> t -> t - val (lor) : t -> t -> t - val (lsr) : t -> int -> t - val (lsl) : t -> int -> t + + val ( land ) : t -> t -> t + + val ( lxor ) : t -> t -> t + + val ( lor ) : t -> t -> t + + val ( lsr ) : t -> int -> t + + val ( lsl ) : t -> int -> t + val pred : t -> t val less_than : t -> t -> bool val highest_bit : t -> t + val equal : t -> t -> bool + val hash : t -> int + val zero : t + val one : t val size : int @@ -57,21 +69,27 @@ module type Size = sig val size : int end -module Bits(S:Size) : sig +module Bits (S : Size) : sig include Bits + val of_z : Z.t -> t + val to_z : t -> Z.t end module type S = sig type key + type value + type mask + type t val equal : t -> t -> bool val empty : t + val singleton : key:key -> value:value -> mask:mask -> t (** [add combine ~key ~value ?mask t] @@ -80,8 +98,8 @@ module type S = sig Assumes that forall x, [combine x x = x] *) - val add : (value -> value -> value) -> key:key -> value:value -> - ?mask:mask -> t -> t + val add : + (value -> value -> value) -> key:key -> value:value -> ?mask:mask -> t -> t (** [remove key t] Remove the entire subtree speficied by the mask associated with key in the tree. Otherwise remove only the key *) @@ -115,11 +133,15 @@ module type S = sig module type Map_Reduce = sig type result + val default : result + val map : t -> key -> value -> result + val reduce : t -> result -> result -> result end - module Map_Reduce(M:Map_Reduce) : sig + + module Map_Reduce (M : Map_Reduce) : sig (** run has a constant amortized complexity *) val run : t -> M.result @@ -136,10 +158,16 @@ module type S = sig *) val filter : (M.result -> bool) -> t -> t end - end -module Make_LE(V:Value) : S with type key = int and type value = V.t and type mask = int -module Make_BE(V:Value) : S with type key = int and type value = V.t and type mask = int -module Make_BE_gen(V:Value)(B:Bits) : S with type key = B.t and type value = V.t and type mask = B.t -module Make_BE_sized(V:Value)(S:Size) : S with type key = Bits(S).t and type value = V.t and type mask = Bits(S).t +module Make_LE (V : Value) : + S with type key = int and type value = V.t and type mask = int + +module Make_BE (V : Value) : + S with type key = int and type value = V.t and type mask = int + +module Make_BE_gen (V : Value) (B : Bits) : + S with type key = B.t and type value = V.t and type mask = B.t + +module Make_BE_sized (V : Value) (S : Size) : + S with type key = Bits(S).t and type value = V.t and type mask = Bits(S).t diff --git a/src/lib_stdlib/lwt_canceler.ml b/src/lib_stdlib/lwt_canceler.ml index 91416b368de1723a0b4421050d19885a3ef74125..dad083c66cedcd3321202de441d7c273ff62edc7 100644 --- a/src/lib_stdlib/lwt_canceler.ml +++ b/src/lib_stdlib/lwt_canceler.ml @@ -26,44 +26,38 @@ open Lwt.Infix type t = { - cancelation: unit Lwt_condition.t ; - cancelation_complete: unit Lwt_condition.t ; - mutable cancel_hook: unit -> unit Lwt.t ; - mutable canceling: bool ; - mutable canceled: bool ; + cancelation : unit Lwt_condition.t; + cancelation_complete : unit Lwt_condition.t; + mutable cancel_hook : unit -> unit Lwt.t; + mutable canceling : bool; + mutable canceled : bool } let create () = let cancelation = Lwt_condition.create () in let cancelation_complete = Lwt_condition.create () in - { cancelation ; cancelation_complete ; - cancel_hook = (fun () -> Lwt.return_unit) ; - canceling = false ; - canceled = false ; - } + { cancelation; + cancelation_complete; + cancel_hook = (fun () -> Lwt.return_unit); + canceling = false; + canceled = false } let cancel st = - if st.canceled then - Lwt.return_unit - else if st.canceling then - Lwt_condition.wait st.cancelation_complete - else begin + if st.canceled then Lwt.return_unit + else if st.canceling then Lwt_condition.wait st.cancelation_complete + else ( st.canceling <- true ; Lwt_condition.broadcast st.cancelation () ; - Lwt.finalize - st.cancel_hook - (fun () -> - st.canceled <- true ; - Lwt_condition.broadcast st.cancelation_complete () ; - Lwt.return_unit) - end + Lwt.finalize st.cancel_hook (fun () -> + st.canceled <- true ; + Lwt_condition.broadcast st.cancelation_complete () ; + Lwt.return_unit) ) let on_cancel st cb = let hook = st.cancel_hook in st.cancel_hook <- (fun () -> hook () >>= cb) let cancelation st = - if st.canceling then Lwt.return_unit - else Lwt_condition.wait st.cancelation + if st.canceling then Lwt.return_unit else Lwt_condition.wait st.cancelation let canceled st = st.canceling diff --git a/src/lib_stdlib/lwt_dropbox.ml b/src/lib_stdlib/lwt_dropbox.ml index f20c7a1f45937fdab18691a45f78e9b13e76ee4c..89623573e8e157dbf7ff2da8d7e191184ba7111d 100644 --- a/src/lib_stdlib/lwt_dropbox.ml +++ b/src/lib_stdlib/lwt_dropbox.ml @@ -27,55 +27,43 @@ open Lwt.Infix exception Closed -type 'a t = - { mutable data : 'a option ; - mutable closed : bool ; - mutable put_waiter : (unit Lwt.t * unit Lwt.u) option ; - } +type 'a t = { + mutable data : 'a option; + mutable closed : bool; + mutable put_waiter : (unit Lwt.t * unit Lwt.u) option +} -let create () = - { data = None ; - closed = false ; - put_waiter = None ; - } +let create () = {data = None; closed = false; put_waiter = None} let notify_put dropbox = match dropbox.put_waiter with - | None -> () + | None -> + () | Some (_waiter, wakener) -> dropbox.put_waiter <- None ; Lwt.wakeup_later wakener () let put dropbox elt = - if dropbox.closed then - raise Closed - else begin + if dropbox.closed then raise Closed + else ( dropbox.data <- Some elt ; - notify_put dropbox - end + notify_put dropbox ) let peek dropbox = dropbox.data let close dropbox = - if not dropbox.closed then begin + if not dropbox.closed then ( dropbox.closed <- true ; - notify_put dropbox ; - end + notify_put dropbox ) let wait_put ~timeout dropbox = match dropbox.put_waiter with | Some (waiter, _wakener) -> - Lwt.choose [ - timeout ; - Lwt.protected waiter - ] + Lwt.choose [timeout; Lwt.protected waiter] | None -> - let waiter, wakener = Lwt.wait () in + let (waiter, wakener) = Lwt.wait () in dropbox.put_waiter <- Some (waiter, wakener) ; - Lwt.choose [ - timeout ; - Lwt.protected waiter ; - ] + Lwt.choose [timeout; Lwt.protected waiter] let rec take dropbox = match dropbox.data with @@ -83,11 +71,10 @@ let rec take dropbox = dropbox.data <- None ; Lwt.return elt | None -> - if dropbox.closed then - Lwt.fail Closed + if dropbox.closed then Lwt.fail Closed else - wait_put ~timeout:(Lwt_utils.never_ending ()) dropbox >>= fun () -> - take dropbox + wait_put ~timeout:(Lwt_utils.never_ending ()) dropbox + >>= fun () -> take dropbox let rec take_with_timeout timeout dropbox = match dropbox.data with @@ -97,10 +84,8 @@ let rec take_with_timeout timeout dropbox = Lwt.return_some elt | None -> if Lwt.is_sleeping timeout then - if dropbox.closed then - Lwt.fail Closed + if dropbox.closed then Lwt.fail Closed else - wait_put ~timeout dropbox >>= fun () -> - take_with_timeout timeout dropbox - else - Lwt.return_none + wait_put ~timeout dropbox + >>= fun () -> take_with_timeout timeout dropbox + else Lwt.return_none diff --git a/src/lib_stdlib/lwt_dropbox.mli b/src/lib_stdlib/lwt_dropbox.mli index d11244055402e4c76b08190f20f624f65fe7a425..bd50520b42cd19380c64a08f806b65e469250f99 100644 --- a/src/lib_stdlib/lwt_dropbox.mli +++ b/src/lib_stdlib/lwt_dropbox.mli @@ -25,38 +25,37 @@ (** A 'dropbox' with a single element. *) -type 'a t (** Type of dropbox holding a value of type ['a] *) +type 'a t -val create: unit -> 'a t (** Create an empty dropbox. *) +val create : unit -> 'a t -val put: 'a t -> 'a -> unit (** Put an element inside the dropbox. If the dropbox was already containing an element, the old element is replaced by the new one. The function might return [Closed] if the dropbox has been closed with [close]. *) +val put : 'a t -> 'a -> unit -val take: 'a t -> 'a Lwt.t (** Wait until the dropbox contains an element, then returns the elements. The elements is removed from the dropbox. The function might return [Closed] if the dropbox is empty and closed. *) +val take : 'a t -> 'a Lwt.t -val take_with_timeout: unit Lwt.t -> 'a t -> 'a option Lwt.t (** Like [take] except that it returns [None] after [timeout seconds] if the dropbox is still empty. *) +val take_with_timeout : unit Lwt.t -> 'a t -> 'a option Lwt.t -val peek: 'a t -> 'a option (** Read the current element of the dropbox without removing it. It immediatly returns [None] if the dropbox is empty. *) +val peek : 'a t -> 'a option -exception Closed (** The exception returned when trying to access a 'closed' dropbox. *) +exception Closed -val close: 'a t -> unit (** Close the dropox. It terminates all the waiting reader with the exception [Closed]. All further read or write will also immediatly fail with [Closed], except if the dropbox is not empty when [close] is called. In that can, a single (and last) [take] will succeed. *) - +val close : 'a t -> unit diff --git a/src/lib_stdlib/lwt_idle_waiter.ml b/src/lib_stdlib/lwt_idle_waiter.ml index bedc446e622f8278775e7ea54dde2918be54a4b4..d44fb5c1bf1a33ff641f589f7560ece15e5c1abc 100644 --- a/src/lib_stdlib/lwt_idle_waiter.ml +++ b/src/lib_stdlib/lwt_idle_waiter.ml @@ -25,31 +25,34 @@ open Lwt.Infix -type t = - { mutable pending_tasks : unit Lwt.u list ; - mutable pending_idle : (unit -> unit Lwt.t) list ; - mutable running_tasks : int ; - mutable running_idle : bool ; - mutable prevent_tasks : bool } +type t = { + mutable pending_tasks : unit Lwt.u list; + mutable pending_idle : (unit -> unit Lwt.t) list; + mutable running_tasks : int; + mutable running_idle : bool; + mutable prevent_tasks : bool +} let create () = - { pending_tasks = [] ; - pending_idle = [] ; - running_tasks = 0 ; - running_idle = false ; + { pending_tasks = []; + pending_idle = []; + running_tasks = 0; + running_idle = false; prevent_tasks = false } let rec may_run_idle_tasks w = if w.running_tasks = 0 && not w.running_idle then match w.pending_idle with - | [] -> () + | [] -> + () | pending_idle -> w.running_idle <- true ; w.prevent_tasks <- false ; w.pending_idle <- [] ; Lwt.async (fun () -> let pending_idle = List.rev pending_idle in - Lwt_list.iter_s (fun f -> f ()) pending_idle >>= fun () -> + Lwt_list.iter_s (fun f -> f ()) pending_idle + >>= fun () -> w.running_idle <- false ; let pending_tasks = List.rev w.pending_tasks in w.pending_tasks <- [] ; @@ -62,38 +65,35 @@ let wrap_error f = (fun () -> f () >>= fun r -> Lwt.return_ok r) (fun exn -> Lwt.return_error exn) -let unwrap_error = function - | Ok r -> Lwt.return r - | Error exn -> Lwt.fail exn +let unwrap_error = function Ok r -> Lwt.return r | Error exn -> Lwt.fail exn let wakeup_error u = function - | Ok r -> Lwt.wakeup u r - | Error exn -> Lwt.wakeup_exn u exn + | Ok r -> + Lwt.wakeup u r + | Error exn -> + Lwt.wakeup_exn u exn let rec task w f = - if w.running_idle || w.prevent_tasks then - let t, u = Lwt.task () in + if w.running_idle || w.prevent_tasks then ( + let (t, u) = Lwt.task () in w.pending_tasks <- u :: w.pending_tasks ; - t >>= fun () -> task w f - else begin + t >>= fun () -> task w f ) + else ( w.running_tasks <- w.running_tasks + 1 ; - wrap_error f >>= fun res -> + wrap_error f + >>= fun res -> w.running_tasks <- w.running_tasks - 1 ; may_run_idle_tasks w ; - unwrap_error res - end + unwrap_error res ) let when_idle w f = - let t, u = Lwt.task () in + let (t, u) = Lwt.task () in let canceled = ref false in Lwt.on_cancel t (fun () -> canceled := true) ; let f () = - if !canceled then - Lwt.return_unit - else - wrap_error f >>= fun res -> - wakeup_error u res ; - Lwt.return_unit in + if !canceled then Lwt.return_unit + else wrap_error f >>= fun res -> wakeup_error u res ; Lwt.return_unit + in w.pending_idle <- f :: w.pending_idle ; may_run_idle_tasks w ; t diff --git a/src/lib_stdlib/lwt_idle_waiter.mli b/src/lib_stdlib/lwt_idle_waiter.mli index c8e6fe48c3fcd39c3f84b9646f9914a503e248c8..2a96e08e92e8421c834996ca04b78097221e4d13 100644 --- a/src/lib_stdlib/lwt_idle_waiter.mli +++ b/src/lib_stdlib/lwt_idle_waiter.mli @@ -23,27 +23,27 @@ (* *) (*****************************************************************************) -type t (** A lightweight scheduler to run tasks concurrently as well as special callbacks that must be run in mutual exclusion with the tasks (and each other). *) +type t -val create : unit -> t (** Creates a new task / idle callback scheduler *) +val create : unit -> t -val task : t -> (unit -> 'a Lwt.t) -> 'a Lwt.t (** Schedule a task to be run as soon as no idle callbacks is running, or as soon as the next idle callback has been run if it was scheduled by {!force_idle}. *) +val task : t -> (unit -> 'a Lwt.t) -> 'a Lwt.t -val when_idle : t -> (unit -> 'a Lwt.t) -> 'a Lwt.t (** Runs a callback as soon as no task is running. Does not prevent new tasks from being scheduled, the calling code should ensure that some idle time will eventually come. Calling this function from inside the callback will result in a dead lock. *) +val when_idle : t -> (unit -> 'a Lwt.t) -> 'a Lwt.t -val force_idle : t -> (unit -> 'a Lwt.t) -> 'a Lwt.t (** Runs a callback as soon as possible. Lets all current tasks finish, but postpones all new tasks until the end of the callback. Calling this function from inside the callback will result in a dead lock. *) +val force_idle : t -> (unit -> 'a Lwt.t) -> 'a Lwt.t diff --git a/src/lib_stdlib/lwt_pipe.ml b/src/lib_stdlib/lwt_pipe.ml index 6b51645c62c83c6f942190ccc5730089abc4e9c5..03fe6538e42944896005f6ea05e653acb851e791 100644 --- a/src/lib_stdlib/lwt_pipe.ml +++ b/src/lib_stdlib/lwt_pipe.ml @@ -25,209 +25,178 @@ open Lwt.Infix -type 'a t = - { queue : (int * 'a) Queue.t ; - mutable current_size : int ; - max_size : int ; - compute_size : ('a -> int) ; - mutable closed : bool ; - mutable push_waiter : (unit Lwt.t * unit Lwt.u) option ; - mutable pop_waiter : (unit Lwt.t * unit Lwt.u) option ; - empty: unit Lwt_condition.t ; - } +type 'a t = { + queue : (int * 'a) Queue.t; + mutable current_size : int; + max_size : int; + compute_size : 'a -> int; + mutable closed : bool; + mutable push_waiter : (unit Lwt.t * unit Lwt.u) option; + mutable pop_waiter : (unit Lwt.t * unit Lwt.u) option; + empty : unit Lwt_condition.t +} let push_overhead = 4 * (Sys.word_size / 8) let create ?size () = - let max_size, compute_size = + let (max_size, compute_size) = match size with - | None -> max_int, (fun _ -> 0) - | Some (max_size, compute_size) -> max_size, compute_size in - { queue = Queue.create () ; - current_size = 0 ; - max_size ; - compute_size ; - closed = false ; - push_waiter = None ; - pop_waiter = None ; - empty = Lwt_condition.create () ; - } + | None -> + (max_int, fun _ -> 0) + | Some (max_size, compute_size) -> + (max_size, compute_size) + in + { queue = Queue.create (); + current_size = 0; + max_size; + compute_size; + closed = false; + push_waiter = None; + pop_waiter = None; + empty = Lwt_condition.create () } let notify_push q = match q.push_waiter with - | None -> () + | None -> + () | Some (_, w) -> q.push_waiter <- None ; Lwt.wakeup_later w () let notify_pop q = match q.pop_waiter with - | None -> () + | None -> + () | Some (_, w) -> q.pop_waiter <- None ; Lwt.wakeup_later w () let wait_push q = match q.push_waiter with - | Some (t, _) -> Lwt.protected t + | Some (t, _) -> + Lwt.protected t | None -> - let waiter, wakener = Lwt.wait () in + let (waiter, wakener) = Lwt.wait () in q.push_waiter <- Some (waiter, wakener) ; Lwt.protected waiter let wait_pop q = match q.pop_waiter with - | Some (t, _) -> Lwt.protected t + | Some (t, _) -> + Lwt.protected t | None -> - let waiter, wakener = Lwt.wait () in + let (waiter, wakener) = Lwt.wait () in q.pop_waiter <- Some (waiter, wakener) ; Lwt.protected waiter -let length { queue ; _ } = Queue.length queue -let is_empty { queue ; _ } = Queue.is_empty queue +let length {queue; _} = Queue.length queue + +let is_empty {queue; _} = Queue.is_empty queue let rec empty q = - if is_empty q - then Lwt.return_unit - else (Lwt_condition.wait q.empty >>= fun () -> empty q) + if is_empty q then Lwt.return_unit + else Lwt_condition.wait q.empty >>= fun () -> empty q exception Closed -let rec push ({ closed ; queue ; current_size ; - max_size ; compute_size ; _ } as q) elt = +let rec push ({closed; queue; current_size; max_size; compute_size; _} as q) + elt = let elt_size = compute_size elt in - if closed then - Lwt.fail Closed - else if current_size + elt_size < max_size || Queue.is_empty queue then begin + if closed then Lwt.fail Closed + else if current_size + elt_size < max_size || Queue.is_empty queue then ( Queue.push (elt_size, elt) queue ; q.current_size <- current_size + elt_size ; notify_push q ; - Lwt.return_unit - end else - wait_pop q >>= fun () -> - push q elt - -let push_now ({ closed ; queue ; compute_size ; - current_size ; max_size ; _ - } as q) elt = + Lwt.return_unit ) + else wait_pop q >>= fun () -> push q elt + +let push_now ({closed; queue; compute_size; current_size; max_size; _} as q) + elt = if closed then raise Closed ; let elt_size = compute_size elt in (current_size + elt_size < max_size || Queue.is_empty queue) - && begin - Queue.push (elt_size, elt) queue ; + && + ( Queue.push (elt_size, elt) queue ; q.current_size <- current_size + elt_size ; notify_push q ; - true - end + true ) exception Full -let push_now_exn q elt = - if not (push_now q elt) then raise Full +let push_now_exn q elt = if not (push_now q elt) then raise Full -let safe_push_now q elt = - try push_now_exn q elt - with _ -> () +let safe_push_now q elt = try push_now_exn q elt with _ -> () -let rec pop ({ closed ; queue ; empty ; current_size ; _ } as q) = - if not (Queue.is_empty queue) then +let rec pop ({closed; queue; empty; current_size; _} as q) = + if not (Queue.is_empty queue) then ( let (elt_size, elt) = Queue.pop queue in notify_pop q ; q.current_size <- current_size - elt_size ; - (if Queue.length queue = 0 then Lwt_condition.signal empty ()); - Lwt.return elt - else if closed then - Lwt.fail Closed - else - wait_push q >>= fun () -> - pop q + if Queue.length queue = 0 then Lwt_condition.signal empty () ; + Lwt.return elt ) + else if closed then Lwt.fail Closed + else wait_push q >>= fun () -> pop q let rec pop_with_timeout timeout q = - if not (Queue.is_empty q.queue) then begin + if not (Queue.is_empty q.queue) then ( Lwt.cancel timeout ; - pop q >>= Lwt.return_some - end else if Lwt.is_sleeping timeout then - if q.closed then begin - Lwt.cancel timeout ; - Lwt.fail Closed - end else + pop q >>= Lwt.return_some ) + else if Lwt.is_sleeping timeout then + if q.closed then (Lwt.cancel timeout ; Lwt.fail Closed) + else let waiter = wait_push q in - Lwt.choose [ - timeout ; - Lwt.protected waiter ; - ] >>= fun () -> - pop_with_timeout timeout q - else - Lwt.return_none - -let rec peek ({ closed ; queue ; _ } as q) = + Lwt.choose [timeout; Lwt.protected waiter] + >>= fun () -> pop_with_timeout timeout q + else Lwt.return_none + +let rec peek ({closed; queue; _} as q) = if not (Queue.is_empty queue) then let (_elt_size, elt) = Queue.peek queue in Lwt.return elt - else if closed then - Lwt.fail Closed - else - wait_push q >>= fun () -> - peek q - -let peek_all { queue ; closed ; _ } = - if closed then - [] - else - List.rev (Queue.fold (fun acc (_, e) -> e :: acc) [] queue) + else if closed then Lwt.fail Closed + else wait_push q >>= fun () -> peek q + +let peek_all {queue; closed; _} = + if closed then [] + else List.rev (Queue.fold (fun acc (_, e) -> e :: acc) [] queue) exception Empty -let pop_now_exn ({ closed ; queue ; empty ; current_size ; _ } as q) = - if Queue.is_empty queue then - (if closed then raise Closed else raise Empty) ; +let pop_now_exn ({closed; queue; empty; current_size; _} as q) = + if Queue.is_empty queue then if closed then raise Closed else raise Empty ; let (elt_size, elt) = Queue.pop queue in - (if Queue.length queue = 0 then Lwt_condition.signal empty ()); + if Queue.length queue = 0 then Lwt_condition.signal empty () ; q.current_size <- current_size - elt_size ; notify_pop q ; elt let pop_now q = - match pop_now_exn q with - | exception Empty -> None - | elt -> Some elt + match pop_now_exn q with exception Empty -> None | elt -> Some elt let rec values_available q = if is_empty q then - if q.closed then - raise Closed - else - wait_push q >>= fun () -> - values_available q - else - Lwt.return_unit + if q.closed then raise Closed + else wait_push q >>= fun () -> values_available q + else Lwt.return_unit let rec pop_all_loop q acc = match pop_now_exn q with - | exception Empty -> List.rev acc - | e -> pop_all_loop q (e :: acc) + | exception Empty -> + List.rev acc + | e -> + pop_all_loop q (e :: acc) -let pop_all q = - pop q >>= fun e -> - Lwt.return (pop_all_loop q [e]) +let pop_all q = pop q >>= fun e -> Lwt.return (pop_all_loop q [e]) -let pop_all_now q = - pop_all_loop q [] +let pop_all_now q = pop_all_loop q [] let close q = - if not q.closed then begin + if not q.closed then ( q.closed <- true ; notify_push q ; - notify_pop q ; - end + notify_pop q ) let rec iter q ~f = - Lwt.catch begin fun () -> - pop q >>= fun elt -> - f elt >>= fun () -> - iter q ~f - end begin function - | Closed -> Lwt.return_unit - | exn -> Lwt.fail exn - end - + Lwt.catch + (fun () -> pop q >>= fun elt -> f elt >>= fun () -> iter q ~f) + (function Closed -> Lwt.return_unit | exn -> Lwt.fail exn) diff --git a/src/lib_stdlib/lwt_pipe.mli b/src/lib_stdlib/lwt_pipe.mli index b9feaaaded574581ff57bbe53f46a0b3a5f0477f..60cf4d0a1b59bc79c1789b9a2c054fc6b9204cf1 100644 --- a/src/lib_stdlib/lwt_pipe.mli +++ b/src/lib_stdlib/lwt_pipe.mli @@ -27,92 +27,91 @@ library. They are implemented with [Queue]s, limited in size, and use lwt primitives for concurrent access. *) -type 'a t (** Type of queues holding values of type ['a]. *) +type 'a t -val create : ?size:(int * ('a -> int)) -> unit -> 'a t (** [create ~size:(max_size, compute_size)] is an empty queue that can hold max [size] bytes of data, using [compute_size] to compute the size of a datum. If want to count allocated bytes precisely, you need to add [push_overhead] to the result of[compute_size]. When no [size] argument is provided, the queue is unbounded. *) +val create : ?size:int * ('a -> int) -> unit -> 'a t -val push : 'a t -> 'a -> unit Lwt.t (** [push q v] is a thread that blocks while [q] contains more than [size] elements, then adds [v] at the end of [q]. *) +val push : 'a t -> 'a -> unit Lwt.t -val pop : 'a t -> 'a Lwt.t (** [pop q] is a thread that blocks while [q] is empty, then removes and returns the first element in [q]. *) +val pop : 'a t -> 'a Lwt.t -val pop_with_timeout : unit Lwt.t -> 'a t -> 'a option Lwt.t (** [pop t q] is a thread that blocks while [q] is empty, then removes and returns the first element [v] in [q] and to return [Some v], unless no message could be popped in [t] seconds, in which case it returns [None]. As concurrent readers are allowed, [None] does not necessarily mean that no value has been pushed. *) +val pop_with_timeout : unit Lwt.t -> 'a t -> 'a option Lwt.t -val pop_all : 'a t -> 'a list Lwt.t (** [pop_all q] is a thread that blocks while [q] is empty, then removes and returns all the element in [q] (in the order they were inserted). *) +val pop_all : 'a t -> 'a list Lwt.t -val pop_all_now : 'a t -> 'a list (** [pop_all_now q] returns all the element in [q] (in the order they were inserted), or [[]] if [q] is empty. *) +val pop_all_now : 'a t -> 'a list -val peek : 'a t -> 'a Lwt.t (** [peek] is like [pop] except it does not removes the first element. *) +val peek : 'a t -> 'a Lwt.t -val peek_all : 'a t -> 'a list (** [peek_all q] returns the elements in the [q] (oldest first), or [[]] if empty. *) +val peek_all : 'a t -> 'a list -val values_available : 'a t -> unit Lwt.t (** [values_available] is like [peek] but it ignores the value returned. *) +val values_available : 'a t -> unit Lwt.t -val push_now : 'a t -> 'a -> bool (** [push_now q v] adds [v] at the ends of [q] immediately and returns [false] if [q] is currently full, [true] otherwise. *) +val push_now : 'a t -> 'a -> bool exception Full -val push_now_exn : 'a t -> 'a -> unit (** [push_now q v] adds [v] at the ends of [q] immediately or raise [Full] if [q] is currently full. *) +val push_now_exn : 'a t -> 'a -> unit -val safe_push_now : 'a t -> 'a -> unit (** [safe_push_now q v] may adds [v] at the ends of [q]. It does nothing if the queue is fulled or closed. *) +val safe_push_now : 'a t -> 'a -> unit -val pop_now : 'a t -> 'a option (** [pop_now q] maybe removes and returns the first element in [q] if [q] contains at least one element. *) +val pop_now : 'a t -> 'a option exception Empty -val pop_now_exn : 'a t -> 'a (** [pop_now_exn q] removes and returns the first element in [q] if [q] contains at least one element, or raise [Empty] otherwise. *) +val pop_now_exn : 'a t -> 'a -val length : 'a t -> int (** [length q] is the number of elements in [q]. *) +val length : 'a t -> int -val is_empty : 'a t -> bool (** [is_empty q] is [true] if [q] is empty, [false] otherwise. *) +val is_empty : 'a t -> bool -val empty : 'a t -> unit Lwt.t (** [empty q] returns when [q] becomes empty. *) +val empty : 'a t -> unit Lwt.t -val iter : 'a t -> f:('a -> unit Lwt.t) -> unit Lwt.t (** [iter q ~f] pops all elements of [q] and applies [f] on them. *) +val iter : 'a t -> f:('a -> unit Lwt.t) -> unit Lwt.t exception Closed -val close : 'a t -> unit (** [close q] the write end of [q]: * Future write attempts will fail with [Closed]. @@ -122,6 +121,7 @@ val close : 'a t -> unit Thus, after a pipe has been closed, reads never block. Close is idempotent. *) +val close : 'a t -> unit -val push_overhead: int (** The allocated size in bytes when pushing in the queue. *) +val push_overhead : int diff --git a/src/lib_stdlib/lwt_utils.ml b/src/lib_stdlib/lwt_utils.ml index d6f6f831bce82df1a26f7f7a3649fe8843636b75..de84698a755d36613d610d2958535c9915558abe 100644 --- a/src/lib_stdlib/lwt_utils.ml +++ b/src/lib_stdlib/lwt_utils.ml @@ -24,161 +24,154 @@ (*****************************************************************************) module LC = Lwt_condition - open Lwt.Infix -let may ~f = function - | None -> Lwt.return_unit - | Some x -> f x +let may ~f = function None -> Lwt.return_unit | Some x -> f x let never_ending () = fst (Lwt.wait ()) -type trigger = - | Absent - | Present - | Waiting of unit Lwt.t * unit Lwt.u +type trigger = Absent | Present | Waiting of unit Lwt.t * unit Lwt.u let trigger () : (unit -> unit) * (unit -> unit Lwt.t) = let state = ref Absent in let trigger () = match !state with - | Absent -> state := Present - | Present -> () + | Absent -> + state := Present + | Present -> + () | Waiting (_waiter, wakener) -> - state := Absent; + state := Absent ; Lwt.wakeup wakener () in let wait () = match !state with | Absent -> - let waiter, wakener = Lwt.wait () in + let (waiter, wakener) = Lwt.wait () in state := Waiting (waiter, wakener) ; waiter | Present -> - state := Absent; + state := Absent ; Lwt.return_unit - | Waiting (waiter, _wakener) -> + | Waiting (waiter, _wakener) -> waiter in - trigger, wait + (trigger, wait) (* A worker launcher, takes a cancel callback to call upon *) let worker name ~on_event ~run ~cancel = let stop = LC.create () in let fail e = - on_event name + on_event + name (`Failed (Printf.sprintf "Exception: %s" (Printexc.to_string e))) - >>= fun () -> - cancel () + >>= fun () -> cancel () in let waiter = LC.wait stop in - on_event name `Started >>= fun () -> - Lwt.async - (fun () -> - Lwt.catch run fail >>= fun () -> - LC.signal stop (); - Lwt.return_unit) ; - waiter >>= fun () -> - on_event name `Ended >>= fun () -> - Lwt.return_unit - + on_event name `Started + >>= fun () -> + Lwt.async (fun () -> + Lwt.catch run fail >>= fun () -> LC.signal stop () ; Lwt.return_unit) ; + waiter >>= fun () -> on_event name `Ended >>= fun () -> Lwt.return_unit let rec chop k l = - if k = 0 then l else begin - match l with - | _::t -> chop (k-1) t - | _ -> assert false - end + if k = 0 then l + else match l with _ :: t -> chop (k - 1) t | _ -> assert false + let stable_sort cmp l = let rec rev_merge l1 l2 accu = - match l1, l2 with - | [], l2 -> Lwt.return (List.rev_append l2 accu) - | l1, [] -> Lwt.return (List.rev_append l1 accu) - | h1::t1, h2::t2 -> - cmp h1 h2 >>= function - | x when x <= 0 -> rev_merge t1 l2 (h1::accu) - | _ -> rev_merge l1 t2 (h2::accu) + match (l1, l2) with + | ([], l2) -> + Lwt.return (List.rev_append l2 accu) + | (l1, []) -> + Lwt.return (List.rev_append l1 accu) + | (h1 :: t1, h2 :: t2) -> ( + cmp h1 h2 + >>= function + | x when x <= 0 -> + rev_merge t1 l2 (h1 :: accu) + | _ -> + rev_merge l1 t2 (h2 :: accu) ) in let rec rev_merge_rev l1 l2 accu = - match l1, l2 with - | [], l2 -> Lwt.return (List.rev_append l2 accu) - | l1, [] -> Lwt.return (List.rev_append l1 accu) - | h1::t1, h2::t2 -> - cmp h1 h2 >>= function - | x when x > 0 -> rev_merge_rev t1 l2 (h1::accu) - | _ -> rev_merge_rev l1 t2 (h2::accu) + match (l1, l2) with + | ([], l2) -> + Lwt.return (List.rev_append l2 accu) + | (l1, []) -> + Lwt.return (List.rev_append l1 accu) + | (h1 :: t1, h2 :: t2) -> ( + cmp h1 h2 + >>= function + | x when x > 0 -> + rev_merge_rev t1 l2 (h1 :: accu) + | _ -> + rev_merge_rev l1 t2 (h2 :: accu) ) in let rec sort n l = - match n, l with - | 2, x1 :: x2 :: _ -> begin - cmp x1 x2 >|= function - | x when x <= 0 -> [x1; x2] - | _ -> [x2; x1] - end - | 3, x1 :: x2 :: x3 :: _ -> begin - cmp x1 x2 >>= function - | x when x <= 0 -> begin - cmp x2 x3 >>= function - | x when x <= 0 -> Lwt.return [x1; x2; x3] - | _ -> cmp x1 x3 >|= function - | x when x <= 0 -> [x1; x3; x2] - | _ -> [x3; x1; x2] - end - | _ -> begin - cmp x1 x3 >>= function - | x when x <= 0 -> Lwt.return [x2; x1; x3] - | _ -> cmp x2 x3 >|= function - | x when x <= 0 -> [x2; x3; x1] - | _ -> [x3; x2; x1] - end - end - | n, l -> + match (n, l) with + | (2, x1 :: x2 :: _) -> ( + cmp x1 x2 >|= function x when x <= 0 -> [x1; x2] | _ -> [x2; x1] ) + | (3, x1 :: x2 :: x3 :: _) -> ( + cmp x1 x2 + >>= function + | x when x <= 0 -> ( + cmp x2 x3 + >>= function + | x when x <= 0 -> + Lwt.return [x1; x2; x3] + | _ -> ( + cmp x1 x3 + >|= function x when x <= 0 -> [x1; x3; x2] | _ -> [x3; x1; x2] + ) ) + | _ -> ( + cmp x1 x3 + >>= function + | x when x <= 0 -> + Lwt.return [x2; x1; x3] + | _ -> ( + cmp x2 x3 + >|= function x when x <= 0 -> [x2; x3; x1] | _ -> [x3; x2; x1] + ) ) ) + | (n, l) -> let n1 = n asr 1 in let n2 = n - n1 in let l2 = chop n1 l in - rev_sort n1 l >>= fun s1 -> - rev_sort n2 l2 >>= fun s2 -> - rev_merge_rev s1 s2 [] + rev_sort n1 l + >>= fun s1 -> rev_sort n2 l2 >>= fun s2 -> rev_merge_rev s1 s2 [] and rev_sort n l = - match n, l with - | 2, x1 :: x2 :: _ -> begin - cmp x1 x2 >|= function - | x when x > 0 -> [x1; x2] - | _ -> [x2; x1] - end - | 3, x1 :: x2 :: x3 :: _ -> begin - cmp x1 x2 >>= function - | x when x > 0 -> begin - cmp x2 x3 >>= function - | x when x > 0 -> Lwt.return [x1; x2; x3] - | _ -> - cmp x1 x3 >|= function - | x when x > 0 -> [x1; x3; x2] - | _ -> [x3; x1; x2] - end - | _ -> begin - cmp x1 x3 >>= function - | x when x > 0 -> Lwt.return [x2; x1; x3] - | _ -> - cmp x2 x3 >|= function - | x when x > 0 -> [x2; x3; x1] - | _ -> [x3; x2; x1] - end - end - | n, l -> + match (n, l) with + | (2, x1 :: x2 :: _) -> ( + cmp x1 x2 >|= function x when x > 0 -> [x1; x2] | _ -> [x2; x1] ) + | (3, x1 :: x2 :: x3 :: _) -> ( + cmp x1 x2 + >>= function + | x when x > 0 -> ( + cmp x2 x3 + >>= function + | x when x > 0 -> + Lwt.return [x1; x2; x3] + | _ -> ( + cmp x1 x3 + >|= function x when x > 0 -> [x1; x3; x2] | _ -> [x3; x1; x2] ) + ) + | _ -> ( + cmp x1 x3 + >>= function + | x when x > 0 -> + Lwt.return [x2; x1; x3] + | _ -> ( + cmp x2 x3 + >|= function x when x > 0 -> [x2; x3; x1] | _ -> [x3; x2; x1] ) + ) ) + | (n, l) -> let n1 = n asr 1 in let n2 = n - n1 in let l2 = chop n1 l in - sort n1 l >>= fun s1 -> - sort n2 l2 >>= fun s2 -> - rev_merge s1 s2 [] + sort n1 l >>= fun s1 -> sort n2 l2 >>= fun s2 -> rev_merge s1 s2 [] in let len = List.length l in if len < 2 then Lwt.return l else sort len l let sort = stable_sort -let unless cond f = - if cond then Lwt.return_unit else f () - - +let unless cond f = if cond then Lwt.return_unit else f () diff --git a/src/lib_stdlib/lwt_utils.mli b/src/lib_stdlib/lwt_utils.mli index 87933586a0003c2962be16e963ef570ccf541724..dbeb1e1ceb53d35d404ee213ee12b05a7b0d5e00 100644 --- a/src/lib_stdlib/lwt_utils.mli +++ b/src/lib_stdlib/lwt_utils.mli @@ -23,24 +23,22 @@ (* *) (*****************************************************************************) -val may: f:('a -> unit Lwt.t) -> 'a option -> unit Lwt.t +val may : f:('a -> unit Lwt.t) -> 'a option -> unit Lwt.t -val never_ending: unit -> 'a Lwt.t +val never_ending : unit -> 'a Lwt.t (** [worker name ~on_event ~run ~cancel] runs worker [run], and logs worker creation, ending or failure using [~on_event]. [cancel] is called if worker fails. *) -val worker: +val worker : string -> - on_event:(string -> - [ `Ended | `Failed of string | `Started ] -> unit Lwt.t) -> + on_event:(string -> [`Ended | `Failed of string | `Started] -> unit Lwt.t) -> run:(unit -> unit Lwt.t) -> cancel:(unit -> unit Lwt.t) -> unit Lwt.t -val trigger: unit -> (unit -> unit) * (unit -> unit Lwt.t) +val trigger : unit -> (unit -> unit) * (unit -> unit Lwt.t) -val sort: ('a -> 'a -> int Lwt.t) -> 'a list -> 'a list Lwt.t - -val unless: bool -> (unit -> unit Lwt.t) -> unit Lwt.t +val sort : ('a -> 'a -> int Lwt.t) -> 'a list -> 'a list Lwt.t +val unless : bool -> (unit -> unit Lwt.t) -> unit Lwt.t diff --git a/src/lib_stdlib/lwt_watcher.ml b/src/lib_stdlib/lwt_watcher.ml index ad594e92346b69e814e70c8c370f6248a9a68651..77b7534b260a4273a3aead6daa40f206f5e0d066 100644 --- a/src/lib_stdlib/lwt_watcher.ml +++ b/src/lib_stdlib/lwt_watcher.ml @@ -24,52 +24,47 @@ (*****************************************************************************) type 'a inner_stopper = { - id: int ; - push: ('a option -> unit) ; + id : int; + push : 'a option -> unit; mutable active : bool; - input : 'a input; + input : 'a input } -and 'a input = - { mutable watchers : 'a inner_stopper list; - mutable cpt : int; } +and 'a input = {mutable watchers : 'a inner_stopper list; mutable cpt : int} type stopper = unit -> unit -let create_input () = - { watchers = []; - cpt = 0 } +let create_input () = {watchers = []; cpt = 0} let shutdown_input input = - let { watchers ; _ } = input in - List.iter (fun w -> + let {watchers; _} = input in + List.iter + (fun w -> w.active <- false ; - w.push None - ) watchers ; + w.push None) + watchers ; input.cpt <- 0 ; input.watchers <- [] let create_fake_stream () = - let str, push = Lwt_stream.create () in - str, (fun () -> push None) + let (str, push) = Lwt_stream.create () in + (str, fun () -> push None) -let notify input info = - List.iter (fun w -> w.push (Some info)) input.watchers +let notify input info = List.iter (fun w -> w.push (Some info)) input.watchers let shutdown_output output = - if output.active then begin - output.active <- false; - output.push None; + if output.active then ( + output.active <- false ; + output.push None ; output.input.watchers <- - List.filter (fun w -> w.id <> output.id) output.input.watchers; - end + List.filter (fun w -> w.id <> output.id) output.input.watchers ) let create_stream input = - input.cpt <- input.cpt + 1; + input.cpt <- input.cpt + 1 ; let id = input.cpt in - let stream, push = Lwt_stream.create () in - let output = { id; push; input; active = true } in - input.watchers <- output :: input.watchers; - stream, (fun () -> shutdown_output output) + let (stream, push) = Lwt_stream.create () in + let output = {id; push; input; active = true} in + input.watchers <- output :: input.watchers ; + (stream, fun () -> shutdown_output output) let shutdown f = f () diff --git a/src/lib_stdlib/mBytes.ml b/src/lib_stdlib/mBytes.ml index 70a8a382a0d04903128a90498f9eae6d314e2485..3a9d31b976f70c42f1f3f957adc6924664c34352 100644 --- a/src/lib_stdlib/mBytes.ml +++ b/src/lib_stdlib/mBytes.ml @@ -24,47 +24,46 @@ (*****************************************************************************) include Bigstring - include EndianBigstring.BigEndian module LE = EndianBigstring.LittleEndian let make sz c = let buf = create sz in - fill buf c ; - buf + fill buf c ; buf -let to_hex t = - Hex.of_cstruct (Cstruct.of_bigarray t) +let to_hex t = Hex.of_cstruct (Cstruct.of_bigarray t) -let of_hex hex = - Cstruct.to_bigarray (Hex.to_cstruct hex) +let of_hex hex = Cstruct.to_bigarray (Hex.to_cstruct hex) let pp_hex ppf s = - let `Hex hex = to_hex s in + let (`Hex hex) = to_hex s in Format.pp_print_string ppf hex -let cut ?(copy=false) sz bytes = +let cut ?(copy = false) sz bytes = let length = length bytes in - if length <= sz then - [bytes] (* if the result fits in the given sz *) + if length <= sz then [bytes] (* if the result fits in the given sz *) else let may_copy = if copy then Bigstring.copy else fun t -> t in - let nb_full = length / sz in (* nb of blocks of size sz *) - let sz_full = nb_full * sz in (* size of the full part *) - let acc = (* eventually init acc with a non-full block *) + let nb_full = length / sz in + (* nb of blocks of size sz *) + let sz_full = nb_full * sz in + (* size of the full part *) + let acc = + (* eventually init acc with a non-full block *) if sz_full = length then [] else [may_copy (sub bytes sz_full (length - sz_full))] in let rec split_full_blocks curr_upper_limit acc = let start = curr_upper_limit - sz in - assert (start >= 0); + assert (start >= 0) ; (* copy the block [ start, curr_upper_limit [ of size sz *) - let acc = (may_copy (sub bytes start sz)) :: acc in + let acc = may_copy (sub bytes start sz) :: acc in if start = 0 then acc else split_full_blocks start acc in split_full_blocks sz_full acc -include Compare.Make(struct - type nonrec t = t - let compare = Bigstring.compare - end) +include Compare.Make (struct + type nonrec t = t + + let compare = Bigstring.compare +end) diff --git a/src/lib_stdlib/mBytes.mli b/src/lib_stdlib/mBytes.mli index a96c7af134f2d87f27426d738c04cf423c8dbd06..956c79f6aceb6c33b861f026b2b507a730fd3e2c 100644 --- a/src/lib_stdlib/mBytes.mli +++ b/src/lib_stdlib/mBytes.mli @@ -29,14 +29,19 @@ Little-endian operations in the LE submodule. **) include module type of Bigstring + include Compare.S with type t := t include EndianBigstring.EndianBigstringSig + module LE : EndianBigstring.EndianBigstringSig val make : int -> char -> t + val of_hex : Hex.t -> t + val to_hex : t -> Hex.t + val pp_hex : Format.formatter -> t -> unit (** [cut ?copy size bytes] cut [bytes] the in a list of successive @@ -46,4 +51,4 @@ val pp_hex : Format.formatter -> t -> unit can be garbage-collected only when all the blocks are unreachable (because of the 'optimized' implementation of [sub] used internally. *) -val cut: ?copy:bool -> int -> t -> t list +val cut : ?copy:bool -> int -> t -> t list diff --git a/src/lib_stdlib/memory.ml b/src/lib_stdlib/memory.ml index 3300d48a7987f1bde9f467797ea70dbdc4353979..40235f557c9875ee485d458086e6e0ecacac2d58 100644 --- a/src/lib_stdlib/memory.ml +++ b/src/lib_stdlib/memory.ml @@ -23,21 +23,16 @@ (*****************************************************************************) type proc_statm = { - page_size : int ; + page_size : int; size : int64; - resident : int64 ; - shared : int64 ; - text : int64 ; - lib : int64 ; - data : int64 ; + resident : int64; + shared : int64; + text : int64; + lib : int64; + data : int64; dt : int64 } -type ps_stats = { - page_size : int ; - mem : float ; - resident : int64 } +type ps_stats = {page_size : int; mem : float; resident : int64} -type mem_stats = - | Statm of proc_statm - | Ps of ps_stats +type mem_stats = Statm of proc_statm | Ps of ps_stats diff --git a/src/lib_stdlib/memory.mli b/src/lib_stdlib/memory.mli index 3300d48a7987f1bde9f467797ea70dbdc4353979..40235f557c9875ee485d458086e6e0ecacac2d58 100644 --- a/src/lib_stdlib/memory.mli +++ b/src/lib_stdlib/memory.mli @@ -23,21 +23,16 @@ (*****************************************************************************) type proc_statm = { - page_size : int ; + page_size : int; size : int64; - resident : int64 ; - shared : int64 ; - text : int64 ; - lib : int64 ; - data : int64 ; + resident : int64; + shared : int64; + text : int64; + lib : int64; + data : int64; dt : int64 } -type ps_stats = { - page_size : int ; - mem : float ; - resident : int64 } +type ps_stats = {page_size : int; mem : float; resident : int64} -type mem_stats = - | Statm of proc_statm - | Ps of ps_stats +type mem_stats = Statm of proc_statm | Ps of ps_stats diff --git a/src/lib_stdlib/option.ml b/src/lib_stdlib/option.ml index d28cfe4ed8702a84eb9bd72c30476c99b70101e9..9941fee131d4e1abda951c13a51f71e8f4f32ead 100644 --- a/src/lib_stdlib/option.ml +++ b/src/lib_stdlib/option.ml @@ -24,48 +24,42 @@ (* *) (*****************************************************************************) -let map ~f = function - | None -> None - | Some x -> Some (f x) +let map ~f = function None -> None | Some x -> Some (f x) -let apply ~f = function - | None -> None - | Some x -> f x +let apply ~f = function None -> None | Some x -> f x -let (>>=) x f = apply ~f x -let (>>|) x f = map ~f x +let ( >>= ) x f = apply ~f x -let iter ~f = function - | None -> () - | Some x -> f x +let ( >>| ) x f = map ~f x -let unopt ~default = function - | None -> default - | Some x -> x +let iter ~f = function None -> () | Some x -> f x -let unopt_map ~f ~default = function - | None -> default - | Some x -> f x +let unopt ~default = function None -> default | Some x -> x -let unopt_exn err = function - | Some x -> x - | _ -> raise err +let unopt_map ~f ~default = function None -> default | Some x -> f x + +let unopt_exn err = function Some x -> x | _ -> raise err let unopt_assert ~loc:(name, line, pos, _) = function - | Some v -> v - | None -> raise (Assert_failure (name, line, pos)) + | Some v -> + v + | None -> + raise (Assert_failure (name, line, pos)) -let first_some a b = match a, b with - | None, None -> None - | None, Some v -> Some v - | Some v, _ -> Some v +let first_some a b = + match (a, b) with + | (None, None) -> + None + | (None, Some v) -> + Some v + | (Some v, _) -> + Some v -let try_with f = - try Some (f ()) with _ -> None +let try_with f = try Some (f ()) with _ -> None let some x = Some x -let pp ?(default="") data_pp ppf opt = +let pp ?(default = "") data_pp ppf opt = unopt_map ~f:(fun i -> data_pp ppf i) ~default:(Format.pp_print_string ppf default) diff --git a/src/lib_stdlib/option.mli b/src/lib_stdlib/option.mli index 5947ca8cb62787a8a1df46ad7104848fdd894a89..7395914c8ea1e4a5841254d91fd49cd7802beac5 100644 --- a/src/lib_stdlib/option.mli +++ b/src/lib_stdlib/option.mli @@ -25,22 +25,23 @@ (*****************************************************************************) (** [Some (f x)] if input is [Some x], or [None] if it's [None] **) -val map: f:('a -> 'b) -> 'a option -> 'b option +val map : f:('a -> 'b) -> 'a option -> 'b option (** [(f x)] if input is [Some x], or [None] if it's [None] **) -val apply: f:('a -> 'b option) -> 'a option -> 'b option +val apply : f:('a -> 'b option) -> 'a option -> 'b option -val (>>=) : 'a option -> ('a -> 'b option) -> 'b option -val (>>|) : 'a option -> ('a -> 'b) -> 'b option +val ( >>= ) : 'a option -> ('a -> 'b option) -> 'b option + +val ( >>| ) : 'a option -> ('a -> 'b) -> 'b option (** Call [(f x)] if input is [Some x], noop if it's [None] **) -val iter: f:('a -> unit) -> 'a option -> unit +val iter : f:('a -> unit) -> 'a option -> unit (** [x] if input is [Some x], default if it's [None] **) -val unopt: default:'a -> 'a option -> 'a +val unopt : default:'a -> 'a option -> 'a (** [unopt_map f d x] is [f y] if [x] is [Some y], [d] if [x] is [None] **) -val unopt_map: f:('a -> 'b) -> default:'b -> 'a option -> 'b +val unopt_map : f:('a -> 'b) -> default:'b -> 'a option -> 'b (** [unopt_exn exn x] is [y] if [x] is [Some y], or raises [exn] if [x] is [None] *) val unopt_exn : exn -> 'a option -> 'a @@ -49,7 +50,7 @@ val unopt_exn : exn -> 'a option -> 'a val unopt_assert : loc:string * int * int * 'a -> 'b option -> 'b (** First input of form [Some x], or [None] if none **) -val first_some: 'a option -> 'a option -> 'a option +val first_some : 'a option -> 'a option -> 'a option (** [Some (f ())] if [f] does not raise, [None] otherwise *) val try_with : (unit -> 'a) -> 'a option @@ -59,4 +60,9 @@ val some : 'a -> 'a option (** [pp ~default data_pp ppf x] pretty-print value [x] using [data_pp] or [default] ([""] by default) string if there is no value. *) -val pp: ?default:string ->(Format.formatter -> 'a -> unit) -> Format.formatter -> 'a option -> unit +val pp : + ?default:string -> + (Format.formatter -> 'a -> unit) -> + Format.formatter -> + 'a option -> + unit diff --git a/src/lib_stdlib/registry.ml b/src/lib_stdlib/registry.ml index c76d20912d3a499422fc3ff3e9c76a66443ad403..d674adee0ecc860da9069f43ed702cf3450fc4e3 100644 --- a/src/lib_stdlib/registry.ml +++ b/src/lib_stdlib/registry.ml @@ -25,33 +25,49 @@ module type S = sig type k + type v - val register: k -> v -> unit - val alter: k -> (v -> v) -> unit - val remove: k -> unit - val query: k -> v option - val iter_p: (k -> v -> unit Lwt.t) -> unit Lwt.t - val fold: (k -> v -> 'a -> 'a) -> 'a -> 'a + + val register : k -> v -> unit + + val alter : k -> (v -> v) -> unit + + val remove : k -> unit + + val query : k -> v option + + val iter_p : (k -> v -> unit Lwt.t) -> unit Lwt.t + + val fold : (k -> v -> 'a -> 'a) -> 'a -> 'a end -module Make (M: sig type v include Map.OrderedType end) : S - with type k = M.t - and type v = M.v = -struct +module Make (M : sig + type v + + include Map.OrderedType +end) : S with type k = M.t and type v = M.v = struct + module Reg = Map.Make (M) - module Reg = Map.Make(M) type v = M.v + type k = Reg.key - let registry: v Reg.t ref = ref Reg.empty + + let registry : v Reg.t ref = ref Reg.empty + let register k v = registry := Reg.add k v !registry + let alter k f = match Reg.find_opt k !registry with - | None -> () - | Some v -> registry := Reg.add k (f v) !registry + | None -> + () + | Some v -> + registry := Reg.add k (f v) !registry + let remove k = registry := Reg.remove k !registry + let query k = Reg.find_opt k !registry - let iter_p f = Lwt.join (Reg.fold (fun k v acc -> (f k v) :: acc) !registry []) - let fold f a = Reg.fold f !registry a -end + let iter_p f = Lwt.join (Reg.fold (fun k v acc -> f k v :: acc) !registry []) + let fold f a = Reg.fold f !registry a +end diff --git a/src/lib_stdlib/registry.mli b/src/lib_stdlib/registry.mli index f8e0d89a1cc92e71e68a621149ae8d8872c64fd3..34a3dd04bba4442853d5fac5745ac50146b61a36 100644 --- a/src/lib_stdlib/registry.mli +++ b/src/lib_stdlib/registry.mli @@ -27,15 +27,24 @@ module type S = sig type k + type v - val register: k -> v -> unit - val alter: k -> (v -> v) -> unit - val remove: k -> unit - val query: k -> v option - val iter_p: (k -> v -> unit Lwt.t) -> unit Lwt.t - val fold: (k -> v -> 'a -> 'a) -> 'a -> 'a + + val register : k -> v -> unit + + val alter : k -> (v -> v) -> unit + + val remove : k -> unit + + val query : k -> v option + + val iter_p : (k -> v -> unit Lwt.t) -> unit Lwt.t + + val fold : (k -> v -> 'a -> 'a) -> 'a -> 'a end -module Make (M: sig type v include Map.OrderedType end) : S - with type k = M.t - and type v = M.v +module Make (M : sig + type v + + include Map.OrderedType +end) : S with type k = M.t and type v = M.v diff --git a/src/lib_stdlib/ring.ml b/src/lib_stdlib/ring.ml index 29d1e2835edf7e3d46bbfd9beda70e1a340c059e..763cb82b7cbbca3ba096d02e72683637610437f6 100644 --- a/src/lib_stdlib/ring.ml +++ b/src/lib_stdlib/ring.ml @@ -24,61 +24,57 @@ (*****************************************************************************) module Ring = struct - type 'a raw = - | Empty of int - | Inited of { - data : 'a array ; - mutable pos : int ; - } + type 'a raw = Empty of int | Inited of {data : 'a array; mutable pos : int} type 'a t = 'a raw ref let create size = - if size <= 0 then - invalid_arg "Ring.create: size must be positive" - else - ref (Empty size) + if size <= 0 then invalid_arg "Ring.create: size must be positive" + else ref (Empty size) let add r v = match !r with | Empty size -> - r := Inited { data = Array.make size v ; pos = 0 } + r := Inited {data = Array.make size v; pos = 0} | Inited s -> s.pos <- - if s.pos = 2 * Array.length s.data - 1 then - Array.length s.data - else - s.pos + 1 ; + ( if s.pos = (2 * Array.length s.data) - 1 then Array.length s.data + else s.pos + 1 ) ; s.data.(s.pos mod Array.length s.data) <- v let add_and_return_erased r v = - let replaced = match !r with - | Empty _ -> None + let replaced = + match !r with + | Empty _ -> + None | Inited s -> if s.pos >= Array.length s.data - 1 then - Some (s.data.((s.pos + 1) mod Array.length s.data)) - else - None in + Some s.data.((s.pos + 1) mod Array.length s.data) + else None + in add r v ; replaced let clear r = match !r with - | Empty _ -> () - | Inited { data ; _ } -> + | Empty _ -> + () + | Inited {data; _} -> r := Empty (Array.length data) - let add_list r l = List.iter (add r) l let last r = match !r with - | Empty _ -> None - | Inited { data ; pos } -> Some data.(pos mod Array.length data) + | Empty _ -> + None + | Inited {data; pos} -> + Some data.(pos mod Array.length data) let fold r ~init ~f = match !r with - | Empty _ -> init - | Inited { data ; pos } -> + | Empty _ -> + init + | Inited {data; pos} -> let size = Array.length data in let acc = ref init in for i = 0 to min pos (size - 1) do @@ -86,15 +82,11 @@ module Ring = struct done ; !acc - let elements t = - fold t ~init:[] ~f:(fun acc elt -> elt :: acc) + let elements t = fold t ~init:[] ~f:(fun acc elt -> elt :: acc) exception Empty - let last_exn r = - match last r with - | None -> raise Empty - | Some d -> d + let last_exn r = match last r with None -> raise Empty | Some d -> d end include Ring @@ -102,45 +94,46 @@ include Ring (** Ring Buffer Table *) module type TABLE = sig type t + type v + val create : int -> t + val add : t -> v -> unit + val add_and_return_erased : t -> v -> v option + val mem : t -> v -> bool + val remove : t -> v -> unit + val clear : t -> unit + val elements : t -> v list end - (* fixed size set of Peers id. If the set exceed the maximal allowed capacity, the element that was added first is removed when a new one is added *) -module MakeTable (V: Hashtbl.HashedType) = struct - module Table = Hashtbl.Make(V) - - type raw = { - size : int ; - ring : V.t Ring.t ; - table : unit Table.t ; - } +module MakeTable (V : Hashtbl.HashedType) = struct + module Table = Hashtbl.Make (V) + + type raw = {size : int; ring : V.t Ring.t; table : unit Table.t} + type t = raw ref + type v = V.t - let create size = ref { - size; - ring = Ring.create size; - table = Table.create size } + let create size = + ref {size; ring = Ring.create size; table = Table.create size} - let add {contents = t } v = - Option.iter - (Ring.add_and_return_erased t.ring v) - ~f:(Table.remove t.table); + let add {contents = t} v = + Option.iter (Ring.add_and_return_erased t.ring v) ~f:(Table.remove t.table) ; Table.add t.table v () - let add_and_return_erased {contents = t } v = + + let add_and_return_erased {contents = t} v = match Ring.add_and_return_erased t.ring v with | None -> - Table.add t.table v () ; - None + Table.add t.table v () ; None | Some erased -> Table.remove t.table erased ; Table.add t.table v () ; @@ -148,16 +141,10 @@ module MakeTable (V: Hashtbl.HashedType) = struct let mem {contents = t} v = Table.mem t.table v - let remove {contents = t} v = - Table.remove t.table v + let remove {contents = t} v = Table.remove t.table v let clear ({contents = t} as tt) = - tt := { t with - ring = Ring.create t.size; - table = Table.create t.size - } - - let elements {contents = t} = - Table.fold (fun k _ acc -> k::acc) t.table [] + tt := {t with ring = Ring.create t.size; table = Table.create t.size} + let elements {contents = t} = Table.fold (fun k _ acc -> k :: acc) t.table [] end diff --git a/src/lib_stdlib/ring.mli b/src/lib_stdlib/ring.mli index 83e4f3b099c9cc372862a17a78fddcfbf22ccc75..df9dff46318020ccb6850ecc65d5a4907e01e1f8 100644 --- a/src/lib_stdlib/ring.mli +++ b/src/lib_stdlib/ring.mli @@ -63,6 +63,7 @@ val elements : 'a t -> 'a list (** Ring Buffer Table *) module type TABLE = sig type t + type v (** [create size] inizialize an empty ring *) @@ -71,6 +72,7 @@ module type TABLE = sig (** [add t v] add a value to the ring. If the ring already contains size elements, the first element is removed and [v] is added. *) val add : t -> v -> unit + val add_and_return_erased : t -> v -> v option (** [mem t v] check if v is in the ring. O(1) *) @@ -84,7 +86,6 @@ module type TABLE = sig (** [elements t] return the list of elements currently in the ring *) val elements : t -> v list - end -module MakeTable (V: Hashtbl.HashedType) : TABLE with type v = V.t +module MakeTable (V : Hashtbl.HashedType) : TABLE with type v = V.t diff --git a/src/lib_stdlib/tag.ml b/src/lib_stdlib/tag.ml index 3d0efacf28fd4352325c84782b07b60e76b9e7dd..41a3c0fa5c3ae4d42510b2780b1de46dac031710 100644 --- a/src/lib_stdlib/tag.ml +++ b/src/lib_stdlib/tag.ml @@ -27,8 +27,11 @@ type _ selector = .. module type DEF_ARG = sig val name : string + type t + val doc : string + val pp : Format.formatter -> t -> unit end @@ -36,148 +39,238 @@ module type DEF = sig include DEF_ARG type id - val id: id + + val id : id + type _ selector += Me : t selector val uid : int - end -module Def (X : DEF_ARG): DEF with type t = X.t = struct +module Def (X : DEF_ARG) : DEF with type t = X.t = struct include X type id = Id + let id = Id + type _ selector += Me : t selector let uid = Obj.(extension_id @@ extension_constructor @@ Me) - end type 'a def = (module DEF with type t = 'a) let def (type a) ?(doc = "undocumented") name pp = - (module Def(struct let name = name type t = a let doc = doc let pp = pp end): DEF with type t = a) - -type (_,_) eq = Refl : ('a,'a) eq - -let maybe_eq : type a b. a def -> b def -> (a,b) eq option = - fun s t -> - let module S = (val s) in - let module T = (val t) in - match S.Me with - | T.Me -> Some Refl - | _ -> None - -let selector_of : type a. a def -> a selector = fun d -> let module D = (val d) in D.Me -let name : type a. a def -> string = fun d -> let module D = (val d) in D.name -let doc : type a. a def -> string = fun d -> let module D = (val d) in D.doc -let printer : type a. a def -> Format.formatter -> a -> unit = fun d -> let module D = (val d) in D.pp + ( module Def (struct + let name = name + + type t = a + + let doc = doc + + let pp = pp + end) : DEF + with type t = a ) + +type (_, _) eq = Refl : ('a, 'a) eq + +let maybe_eq : type a b. a def -> b def -> (a, b) eq option = + fun s t -> + let module S = (val s) in + let module T = (val t) in + match S.Me with T.Me -> Some Refl | _ -> None + +let selector_of : type a. a def -> a selector = + fun d -> + let module D = (val d) in + D.Me + +let name : type a. a def -> string = + fun d -> + let module D = (val d) in + D.name + +let doc : type a. a def -> string = + fun d -> + let module D = (val d) in + D.doc + +let printer : type a. a def -> Format.formatter -> a -> unit = + fun d -> + let module D = (val d) in + D.pp + let pp_def ppf d = Format.fprintf ppf "tag:%s" (name d) module Key = struct type t = V : 'a def -> t + type s = S : 'a selector -> s + let compare (V k0) (V k1) = compare (S (selector_of k0)) (S (selector_of k1)) end -module TagSet = Map.Make(Key) +module TagSet = Map.Make (Key) type t = V : 'a def * 'a -> t + type binding = t + type set = binding TagSet.t let pp ppf (V (tag, v)) = Format.fprintf ppf "@[<1>(%a@ @[%a@])@]" pp_def tag (printer tag) v -let option_map f = function - | None -> None - | Some v -> Some (f v) +let option_map f = function None -> None | Some v -> Some (f v) -let option_bind f = function - | None -> None - | Some v -> f v +let option_bind f = function None -> None | Some v -> f v -let reveal2 : type a b. a def -> b def -> b -> a option = fun t u v -> - match maybe_eq t u with - | None -> None - | Some Refl -> Some v +let reveal2 : type a b. a def -> b def -> b -> a option = + fun t u v -> match maybe_eq t u with None -> None | Some Refl -> Some v -let reveal : 'a. 'a def -> binding -> 'a option = fun tag -> function - | V (another, v) -> reveal2 tag another v +let reveal : 'a. 'a def -> binding -> 'a option = + fun tag -> function V (another, v) -> reveal2 tag another v -let unveil : 'a. 'a def -> binding option -> 'a option = fun tag -> option_bind @@ reveal tag +let unveil : 'a. 'a def -> binding option -> 'a option = + fun tag -> option_bind @@ reveal tag let conceal : 'a. 'a def -> 'a -> binding = fun tag v -> V (tag, v) -let veil : 'a. 'a def -> 'a option -> binding option = fun tag -> option_map @@ conceal tag +let veil : 'a. 'a def -> 'a option -> binding option = + fun tag -> option_map @@ conceal tag let empty = TagSet.empty + let is_empty = TagSet.is_empty + let mem tag = TagSet.mem (Key.V tag) + let add tag v = TagSet.add (Key.V tag) (V (tag, v)) -let update tag f = TagSet.update (Key.V tag) (fun b -> veil tag @@ f @@ unveil tag b) + +let update tag f = + TagSet.update (Key.V tag) (fun b -> veil tag @@ f @@ unveil tag b) + let singleton tag v = TagSet.singleton (Key.V tag) (V (tag, v)) + let remove tag = TagSet.remove (Key.V tag) + let rem = remove -type merger = { merger : 'a. 'a def -> 'a option -> 'a option -> 'a option } -let merge f = TagSet.merge @@ function - | Key.V tag -> fun a b -> veil tag @@ f.merger tag (unveil tag a) (unveil tag b) -type unioner = { unioner : 'a . 'a def -> 'a -> 'a -> 'a } -let union f = merge { merger = fun tag a b -> - match (a,b) with - | (Some aa, Some bb) -> Some (f.unioner tag aa bb) - | (Some _, None) -> a - | (None, _) -> b - } + +type merger = {merger : 'a. 'a def -> 'a option -> 'a option -> 'a option} + +let merge f = + TagSet.merge + @@ function + | Key.V tag -> + fun a b -> veil tag @@ f.merger tag (unveil tag a) (unveil tag b) + +type unioner = {unioner : 'a. 'a def -> 'a -> 'a -> 'a} + +let union f = + merge + { merger = + (fun tag a b -> + match (a, b) with + | (Some aa, Some bb) -> + Some (f.unioner tag aa bb) + | (Some _, None) -> + a + | (None, _) -> + b) } + (* no compare and equal, compare especially makes little sense *) let iter f = TagSet.iter (fun _ -> f) + let fold f = TagSet.fold (fun _ -> f) + let for_all p = TagSet.for_all (fun _ -> p) + let exists p = TagSet.exists (fun _ -> p) + let filter p = TagSet.filter (fun _ -> p) + let partition p = TagSet.partition (fun _ -> p) + let cardinal = TagSet.cardinal + let bindings s = List.map snd @@ TagSet.bindings s + let min_binding s = snd @@ TagSet.min_binding s + let min_binding_opt s = option_map snd @@ TagSet.min_binding_opt s + let max_binding s = snd @@ TagSet.max_binding s + let max_binding_opt s = option_map snd @@ TagSet.max_binding_opt s + let choose s = snd @@ TagSet.choose s + let choose_opt s = option_map snd @@ TagSet.choose_opt s -let split tag s = (fun (l,m,r) -> (l,unveil tag m,r)) @@ TagSet.split (Key.V tag) s + +let split tag s = + (fun (l, m, r) -> (l, unveil tag m, r)) @@ TagSet.split (Key.V tag) s + (* In order to match the usual interface for maps, `find` should be different from `find_opt` but `Logs` has `find_opt` called `find` so we favor that. *) let find tag s = option_bind (reveal tag) @@ TagSet.find_opt (Key.V tag) s + let find_opt tag s = option_bind (reveal tag) @@ TagSet.find_opt (Key.V tag) s + (* This would usually be called `find` but `Logs` has it with this name. We can't have it at both named because `Logs` has `find_opt` as `find`. *) -let get tag s = find_opt tag s |> function - | None -> invalid_arg (Format.asprintf "tag named %s not found in set" (name tag)) - | Some v -> v +let get tag s = + find_opt tag s + |> function + | None -> + invalid_arg (Format.asprintf "tag named %s not found in set" (name tag)) + | Some v -> + v + let find_first p s = snd @@ TagSet.find_first p s + let find_first_opt p s = option_map snd @@ TagSet.find_first_opt p s + let find_last p s = snd @@ TagSet.find_last p s + let find_last_opt p s = option_map snd @@ TagSet.find_last_opt p s + let map = TagSet.map + let mapi = TagSet.map -let pp_set ppf s = Format.( - fprintf ppf "@[<1>{"; - pp_print_list pp ppf (bindings s); + +let pp_set ppf s = + Format.( + fprintf ppf "@[<1>{" ; + pp_print_list pp ppf (bindings s) ; Format.fprintf ppf "}@]") module DSL = struct - type (_,_,_,_) arg = | A : ('x def * 'x) -> (('b -> 'x -> 'c) -> 'x -> 'd, 'b, 'c, 'd) arg - | S : ('x def * 'x) -> ('x -> 'd, 'b, 'c, 'd) arg - | T : ('x def * 'x) -> ('d, 'b, 'c, 'd) arg - let a tag v = A (tag,v) - let s tag v = S (tag,v) - let t tag v = T (tag,v) - - let pp_of_def (type a) tag = let module Tg = (val tag : DEF with type t = a) in Tg.pp - - let (-%): type a d. (?tags:set -> a) -> (a,Format.formatter,unit,d) arg -> (?tags:set -> d) = fun f -> function - | A (tag,v) -> (fun ?(tags=empty) -> f ~tags:(add tag v tags) (pp_of_def tag) v) [@warning "-16"] - | S (tag,v) -> (fun ?(tags=empty) -> f ~tags:(add tag v tags) v) [@warning "-16"] - | T (tag,v) -> (fun ?(tags=empty) -> f ~tags:(add tag v tags)) [@warning "-16"] + type (_, _, _, _) arg = + | A : ('x def * 'x) -> (('b -> 'x -> 'c) -> 'x -> 'd, 'b, 'c, 'd) arg + | S : ('x def * 'x) -> ('x -> 'd, 'b, 'c, 'd) arg + | T : ('x def * 'x) -> ('d, 'b, 'c, 'd) arg + + let a tag v = A (tag, v) + + let s tag v = S (tag, v) + + let t tag v = T (tag, v) + + let pp_of_def (type a) tag = + let module Tg = (val tag : DEF with type t = a) in + Tg.pp + + let ( -% ) : + type a d. + (?tags:set -> a) -> (a, Format.formatter, unit, d) arg -> ?tags:set -> d + = + fun f -> function + | A (tag, v) -> + fun [@warning "-16"] ?(tags = empty) -> + f ~tags:(add tag v tags) (pp_of_def tag) v + | S (tag, v) -> + fun [@warning "-16"] ?(tags = empty) -> f ~tags:(add tag v tags) v + | T (tag, v) -> + fun [@warning "-16"] ?(tags = empty) -> f ~tags:(add tag v tags) end diff --git a/src/lib_stdlib/tag.mli b/src/lib_stdlib/tag.mli index a35f0605f8e7d2e7b454945e4efecfaba793aecc..d5600667b4c45cbf020e2a90c142e2de442fcabb 100644 --- a/src/lib_stdlib/tag.mli +++ b/src/lib_stdlib/tag.mli @@ -44,8 +44,10 @@ type _ def val def : ?doc:string -> string -> (Format.formatter -> 'a -> unit) -> 'a def val name : 'a def -> string + val doc : 'a def -> string -val printer : 'a def -> (Format.formatter -> 'a -> unit) + +val printer : 'a def -> Format.formatter -> 'a -> unit (** Print the name of a tag definition. *) val pp_def : Format.formatter -> 'a def -> unit @@ -53,6 +55,7 @@ val pp_def : Format.formatter -> 'a def -> unit (** A binding consisting of a tag and value. If a `def` is a constructor of an extensible variant type, a `t` is a value of that type. *) type t = V : 'a def * 'a -> t + val pp : Format.formatter -> t -> unit module Key : sig @@ -68,40 +71,75 @@ end type set val empty : set + val is_empty : set -> bool + val mem : 'a def -> set -> bool + val add : 'a def -> 'a -> set -> set -val update : 'a def -> ('a option -> 'a option) -> (set -> set) + +val update : 'a def -> ('a option -> 'a option) -> set -> set + val singleton : 'a def -> 'a -> set + val remove : 'a def -> set -> set + val rem : 'a def -> set -> set -type merger = { merger : 'a. 'a def -> 'a option -> 'a option -> 'a option } + +type merger = {merger : 'a. 'a def -> 'a option -> 'a option -> 'a option} + val merge : merger -> set -> set -> set -type unioner = { unioner : 'a. 'a def -> 'a -> 'a -> 'a } + +type unioner = {unioner : 'a. 'a def -> 'a -> 'a -> 'a} + val union : unioner -> set -> set -> set + val iter : (t -> unit) -> set -> unit -val fold : (t -> 'b -> 'b) -> (set -> 'b -> 'b) -val for_all : (t -> bool) -> (set -> bool) -val exists : (t -> bool) -> (set -> bool) + +val fold : (t -> 'b -> 'b) -> set -> 'b -> 'b + +val for_all : (t -> bool) -> set -> bool + +val exists : (t -> bool) -> set -> bool + val filter : (t -> bool) -> set -> set -val partition : (t -> bool) -> set -> (set * set) + +val partition : (t -> bool) -> set -> set * set + val cardinal : set -> int + val min_binding : set -> t + val min_binding_opt : set -> t option + val max_binding : set -> t + val max_binding_opt : set -> t option + val choose : set -> t + val choose_opt : set -> t option + val split : 'a def -> set -> set * 'a option * set + val find_opt : 'a def -> set -> 'a option + val find : 'a def -> set -> 'a option + val get : 'a def -> set -> 'a + val find_first : (Key.t -> bool) -> set -> t + val find_first_opt : (Key.t -> bool) -> set -> t option + val find_last : (Key.t -> bool) -> set -> t + val find_last_opt : (Key.t -> bool) -> set -> t option + val map : (t -> t) -> set -> set + val mapi : (t -> t) -> set -> set + val pp_set : Format.formatter -> set -> unit (** DSL for logging messages. Opening this locally makes it easy to supply a number @@ -117,7 +155,7 @@ val pp_set : Format.formatter -> set -> unit -% a P2p_peer.Id.Logging.tag pipeline.peer_id) ]} *) module DSL : sig - type (_,_,_,_) arg + type (_, _, _, _) arg (** Use a semantic tag with a `%a` format, supplying the pretty printer from the tag. *) val a : 'v def -> 'v -> (('b -> 'v -> 'c) -> 'v -> 'd, 'b, 'c, 'd) arg @@ -129,5 +167,9 @@ module DSL : sig val t : 'v def -> 'v -> ('d, 'b, 'c, 'd) arg (** Perform the actual application of a tag to a format. *) - val (-%) : (?tags:set -> 'a) -> ('a,Format.formatter,unit,'d) arg -> (?tags:set -> 'd) + val ( -% ) : + (?tags:set -> 'a) -> + ('a, Format.formatter, unit, 'd) arg -> + ?tags:set -> + 'd end diff --git a/src/lib_stdlib/test/assert.ml b/src/lib_stdlib/test/assert.ml index 5930d3990bc8d713cc099ae87b7e4bc09ae29c8b..e176720f8f53cd4e3ca9580911379e29d20116e3 100644 --- a/src/lib_stdlib/test/assert.ml +++ b/src/lib_stdlib/test/assert.ml @@ -24,11 +24,11 @@ (*****************************************************************************) let fail expected given msg = - Format.kasprintf failwith - "@[%s@ expected: %s@ got: %s@]" msg expected given + Format.kasprintf failwith "@[%s@ expected: %s@ got: %s@]" msg expected given + let fail_msg fmt = Format.kasprintf (fail "" "") fmt let default_printer _ = "" -let equal ?(eq=(=)) ?(prn=default_printer) ?(msg="") x y = +let equal ?(eq = ( = )) ?(prn = default_printer) ?(msg = "") x y = if not (eq x y) then fail (prn x) (prn y) msg diff --git a/src/lib_stdlib/test/test_lwt_pipe.ml b/src/lib_stdlib/test/test_lwt_pipe.ml index f1ab67b07203967679e6a727dcbdad48879a337f..3b3a423e6fea4c9c8cbf7dc16f98f1a36b0e7b4b 100644 --- a/src/lib_stdlib/test/test_lwt_pipe.ml +++ b/src/lib_stdlib/test/test_lwt_pipe.ml @@ -30,20 +30,16 @@ let rec producer queue = function Format.eprintf "Done producing." ; Lwt.return_unit | n -> - Lwt_pipe.push queue () >>= fun () -> - producer queue (pred n) + Lwt_pipe.push queue () >>= fun () -> producer queue (pred n) let rec consumer queue = function | 0 -> Format.eprintf "Done consuming." ; Lwt.return_unit | n -> - Lwt_pipe.pop queue >>= fun _ -> - consumer queue (pred n) + Lwt_pipe.pop queue >>= fun _ -> consumer queue (pred n) -let rec gen acc f = function - | 0 -> acc - | n -> gen (f () :: acc) f (pred n) +let rec gen acc f = function 0 -> acc | n -> gen (f () :: acc) f (pred n) let run qsize nbp nbc p c = let q = Lwt_pipe.create ~size:(qsize, fun () -> qsize) () in @@ -57,20 +53,32 @@ let main () = let nb_consumers = ref 10 in let produced_per_producer = ref 10 in let consumed_per_consumer = ref 10 in - let spec = Arg.[ - "-qsize", Set_int qsize, "<int> Size of the pipe"; - "-nc", Set_int nb_consumers, "<int> Number of consumers"; - "-np", Set_int nb_producers, "<int> Number of producers"; - "-n", Set_int consumed_per_consumer, "<int> Number of consumed items per consumers"; - "-p", Set_int produced_per_producer, "<int> Number of produced items per producers"; - "-v", Unit (fun () -> Lwt_log_core.(add_rule "*" Info)), " Log up to info msgs"; - "-vv", Unit (fun () -> Lwt_log_core.(add_rule "*" Debug)), " Log up to debug msgs"; - ] + let spec = + Arg. + [ ("-qsize", Set_int qsize, "<int> Size of the pipe"); + ("-nc", Set_int nb_consumers, "<int> Number of consumers"); + ("-np", Set_int nb_producers, "<int> Number of producers"); + ( "-n", + Set_int consumed_per_consumer, + "<int> Number of consumed items per consumers" ); + ( "-p", + Set_int produced_per_producer, + "<int> Number of produced items per producers" ); + ( "-v", + Unit (fun () -> Lwt_log_core.(add_rule "*" Info)), + " Log up to info msgs" ); + ( "-vv", + Unit (fun () -> Lwt_log_core.(add_rule "*" Debug)), + " Log up to debug msgs" ) ] in let anon_fun _ = () in let usage_msg = "Usage: %s <num_peers>.\nArguments are:" in - Arg.parse spec anon_fun usage_msg; - run !qsize !nb_producers - !nb_consumers !produced_per_producer !consumed_per_consumer + Arg.parse spec anon_fun usage_msg ; + run + !qsize + !nb_producers + !nb_consumers + !produced_per_producer + !consumed_per_consumer let () = Lwt_main.run @@ main () diff --git a/src/lib_stdlib/test/test_tzList.ml b/src/lib_stdlib/test/test_tzList.ml index 56169cdab5b75125bdb21063ce802ddbb59b44a8..23e54dedfb3993a937bb59b58ad25b4ce1f607f3 100644 --- a/src/lib_stdlib/test/test_tzList.ml +++ b/src/lib_stdlib/test/test_tzList.ml @@ -24,47 +24,55 @@ (*****************************************************************************) let rec permut = function - | [] -> [[]] + | [] -> + [[]] | x :: xs -> let insert xs = let rec loop acc left right = match right with - | [] -> List.rev (x :: left) :: acc + | [] -> + List.rev (x :: left) :: acc | y :: ys -> - loop - ((List.rev_append left (x :: right)) :: acc) - (y :: left) ys in - loop [] [] xs in + loop (List.rev_append left (x :: right) :: acc) (y :: left) ys + in + loop [] [] xs + in List.concat (List.map insert (permut xs)) let test_take_n _ = - ListLabels.iter (permut [1;2;3;4;5;6;7;8;9]) ~f:begin fun xs -> - Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 1 xs) [9] - end ; - ListLabels.iter (permut [1;2;3;4;5;6;7;8;9]) ~f:begin fun xs -> - Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 3 xs) [7;8;9] - end ; + ListLabels.iter + (permut [1; 2; 3; 4; 5; 6; 7; 8; 9]) + ~f:(fun xs -> Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 1 xs) [9]) ; + ListLabels.iter + (permut [1; 2; 3; 4; 5; 6; 7; 8; 9]) + ~f:(fun xs -> + Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 3 xs) [7; 8; 9]) ; let inv_compare x y = compare y x in - ListLabels.iter (permut [1;2;3;4;5;6;7;8;9]) ~f:begin fun xs -> - Assert.equal ~msg:__LOC__ (TzList.take_n ~compare:inv_compare 3 xs) [3;2;1] - end ; + ListLabels.iter + (permut [1; 2; 3; 4; 5; 6; 7; 8; 9]) + ~f:(fun xs -> + Assert.equal + ~msg:__LOC__ + (TzList.take_n ~compare:inv_compare 3 xs) + [3; 2; 1]) ; (* less elements than the bound. *) - ListLabels.iter (permut [1;2;3;4;5;6;7;8;9]) ~f:begin fun xs -> - Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 12 xs) [1;2;3;4;5;6;7;8;9] - end ; + ListLabels.iter + (permut [1; 2; 3; 4; 5; 6; 7; 8; 9]) + ~f:(fun xs -> + Assert.equal + ~msg:__LOC__ + (TzList.take_n ~compare 12 xs) + [1; 2; 3; 4; 5; 6; 7; 8; 9]) ; (* with duplicates. *) - ListLabels.iter (permut [1;2;3;3;4;5;5;5;6]) ~f:begin fun xs -> - Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 3 xs) [5;5;6] - end ; - ListLabels.iter (permut [1;2;3;3;4;5;5;5;6]) ~f:begin fun xs -> - Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 5 xs) [4;5;5;5;6] - end + ListLabels.iter + (permut [1; 2; 3; 3; 4; 5; 5; 5; 6]) + ~f:(fun xs -> + Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 3 xs) [5; 5; 6]) ; + ListLabels.iter + (permut [1; 2; 3; 3; 4; 5; 5; 5; 6]) + ~f:(fun xs -> + Assert.equal ~msg:__LOC__ (TzList.take_n ~compare 5 xs) [4; 5; 5; 5; 6]) -let tests = [ - "take_n", `Quick, test_take_n ; -] +let tests = [("take_n", `Quick, test_take_n)] -let () = - Alcotest.run "stdlib" [ - "tzList", tests ; - ] +let () = Alcotest.run "stdlib" [("tzList", tests)] diff --git a/src/lib_stdlib/tzList.ml b/src/lib_stdlib/tzList.ml index 55d724ce421a01d19e7e2dec437ac61e6153b34d..149bb879715f70e6c6656418c5a764e3bbe79325 100644 --- a/src/lib_stdlib/tzList.ml +++ b/src/lib_stdlib/tzList.ml @@ -29,143 +29,168 @@ let filter_map f l = List.rev @@ List.fold_left (fun acc x -> may_cons acc (f x)) [] l let rev_sub l n = - if n < 0 then - invalid_arg "Utils.rev_sub: `n` must be non-negative."; + if n < 0 then invalid_arg "Utils.rev_sub: `n` must be non-negative." ; let rec append_rev_sub acc l = function - | 0 -> acc - | n -> - match l with - | [] -> acc - | hd :: tl -> append_rev_sub (hd :: acc) tl (n - 1) in + | 0 -> + acc + | n -> ( + match l with + | [] -> + acc + | hd :: tl -> + append_rev_sub (hd :: acc) tl (n - 1) ) + in append_rev_sub [] l n let sub l n = rev_sub l n |> List.rev -let hd_opt = function - | [] -> None - | h :: _ -> Some h +let hd_opt = function [] -> None | h :: _ -> Some h let rec last_exn = function - | [] -> raise Not_found - | [x] -> x - | _ :: xs -> last_exn xs - -let merge_filter2 - ?(finalize = List.rev) ?(compare = compare) - ?(f = Option.first_some) - l1 l2 = + | [] -> + raise Not_found + | [x] -> + x + | _ :: xs -> + last_exn xs + +let merge_filter2 ?(finalize = List.rev) ?(compare = compare) + ?(f = Option.first_some) l1 l2 = let sort = List.sort compare in let rec merge_aux acc = function - | [], [] -> finalize acc - | r1, [] -> finalize acc @ (filter_map (fun x1 -> f (Some x1) None) r1) - | [], r2 -> finalize acc @ (filter_map (fun x2 -> f None (Some x2)) r2) - | ((h1 :: t1) as r1), ((h2 :: t2) as r2) -> + | ([], []) -> + finalize acc + | (r1, []) -> + finalize acc @ filter_map (fun x1 -> f (Some x1) None) r1 + | ([], r2) -> + finalize acc @ filter_map (fun x2 -> f None (Some x2)) r2 + | ((h1 :: t1 as r1), (h2 :: t2 as r2)) -> if compare h1 h2 > 0 then merge_aux (may_cons acc (f None (Some h2))) (r1, t2) else if compare h1 h2 < 0 then merge_aux (may_cons acc (f (Some h1) None)) (t1, r2) - else (* m1 = m2 *) + else + (* m1 = m2 *) merge_aux (may_cons acc (f (Some h1) (Some h2))) (t1, t2) in merge_aux [] (sort l1, sort l2) let merge2 ?finalize ?compare ?(f = fun x1 _x1 -> x1) l1 l2 = - merge_filter2 ?finalize ?compare - ~f:(fun x1 x2 -> match x1, x2 with - | None, None -> assert false - | Some x1, None -> Some x1 - | None, Some x2 -> Some x2 - | Some x1, Some x2 -> Some (f x1 x2)) - l1 l2 + merge_filter2 + ?finalize + ?compare + ~f:(fun x1 x2 -> + match (x1, x2) with + | (None, None) -> + assert false + | (Some x1, None) -> + Some x1 + | (None, Some x2) -> + Some x2 + | (Some x1, Some x2) -> + Some (f x1 x2)) + l1 + l2 let rec remove nb = function - | [] -> [] - | l when nb <= 0 -> l - | _ :: tl -> remove (nb - 1) tl + | [] -> + [] + | l when nb <= 0 -> + l + | _ :: tl -> + remove (nb - 1) tl let rec repeat n x = if n <= 0 then [] else x :: repeat (pred n) x let split_n n l = let rec loop acc n = function - | [] -> l, [] - | rem when n <= 0 -> List.rev acc, rem - | x :: xs -> loop (x :: acc) (pred n) xs in + | [] -> + (l, []) + | rem when n <= 0 -> + (List.rev acc, rem) + | x :: xs -> + loop (x :: acc) (pred n) xs + in loop [] n l let take_n_unsorted n l = fst (split_n n l) -module Bounded(E: Set.OrderedType) : sig - +module Bounded (E : Set.OrderedType) : sig type t - val create: int -> t - val insert: E.t -> t -> unit - val get: t -> E.t list -end = struct + val create : int -> t + val insert : E.t -> t -> unit + + val get : t -> E.t list +end = struct (* TODO one day replace the list by an heap array *) - type t = { - bound : int ; - mutable size : int ; - mutable data : E.t list ; - } + type t = {bound : int; mutable size : int; mutable data : E.t list} let create bound = if bound <= 0 then invalid_arg "Utils.Bounded(_).create" ; - { bound ; size = 0 ; data = [] } + {bound; size = 0; data = []} let rec push x = function - | [] -> [x] - | (y :: xs) as ys -> - if E.compare x y <= 0 - then x :: ys - else y :: push x xs + | [] -> + [x] + | y :: xs as ys -> + if E.compare x y <= 0 then x :: ys else y :: push x xs let insert x t = - if t.size < t.bound then begin + if t.size < t.bound then ( t.size <- t.size + 1 ; - t.data <- push x t.data - end else begin + t.data <- push x t.data ) + else match t.data with - | [] -> assert false + | [] -> + assert false | hd :: tl -> - if E.compare hd x < 0 then - t.data <- push x tl - end - - let get { data ; _ } = data + if E.compare hd x < 0 then t.data <- push x tl + let get {data; _} = data end let take_n_sorted (type a) compare n l = - let module B = Bounded(struct type t = a let compare = compare end) in + let module B = Bounded (struct + type t = a + + let compare = compare + end) in let t = B.create n in List.iter (fun x -> B.insert x t) l ; B.get t let take_n ?compare n l = match compare with - | None -> take_n_unsorted n l - | Some compare -> take_n_sorted compare n l + | None -> + take_n_unsorted n l + | Some compare -> + take_n_sorted compare n l let select n l = let rec loop n acc = function - | [] -> invalid_arg "Utils.select" - | x :: xs when n <= 0 -> x, List.rev_append acc xs - | x :: xs -> loop (pred n) (x :: acc) xs + | [] -> + invalid_arg "Utils.select" + | x :: xs when n <= 0 -> + (x, List.rev_append acc xs) + | x :: xs -> + loop (pred n) (x :: acc) xs in loop n [] l -let shift = function - | [] -> [] - | hd :: tl -> tl@[hd] +let shift = function [] -> [] | hd :: tl -> tl @ [hd] -let rec product a b = match a with - | [] -> [] - | hd :: tl -> (List.map (fun x -> (hd , x)) b) @ product tl b +let rec product a b = + match a with + | [] -> + [] + | hd :: tl -> + List.map (fun x -> (hd, x)) b @ product tl b let shuffle l = - l |> List.map (fun d -> (Random.bits () , d)) + l + |> List.map (fun d -> (Random.bits (), d)) |> List.sort (fun (x, _) (y, _) -> compare x y) |> List.map snd diff --git a/src/lib_stdlib/tzList.mli b/src/lib_stdlib/tzList.mli index 81eb44abd899f5375c4d3befd054cc0852bd28c5..33e66e14043e33d7838de18876c9e7caa1c30c37 100644 --- a/src/lib_stdlib/tzList.mli +++ b/src/lib_stdlib/tzList.mli @@ -24,10 +24,10 @@ (*****************************************************************************) (** [remove nb list] remove the first [nb] elements from the list [list]. *) -val remove: int -> 'a list -> 'a list +val remove : int -> 'a list -> 'a list (** [repeat n x] is a list of [n] [x]'s **) -val repeat: int -> 'a -> 'a list +val repeat : int -> 'a -> 'a list (** [shift (hd :: tl)] computes [tl @ [hd]] *) val shift : 'a list -> 'a list @@ -37,39 +37,41 @@ val product : 'a list -> 'b list -> ('a * 'b) list (** [take_n n l] returns the [n] first elements of [l]. When [compare] is provided, it returns the [n] greatest element of [l]. *) -val take_n: ?compare:('a -> 'a -> int) -> int -> 'a list -> 'a list +val take_n : ?compare:('a -> 'a -> int) -> int -> 'a list -> 'a list (** [split_n n l] is a pair of lists [(j, k)] where [j] contains the [n] first elements of [l] and [k] the remainder elements. If [l] has less than or exactly [n] elements, [j] is [l] and [k] is [[]]. *) -val split_n: int -> 'a list -> 'a list * 'a list +val split_n : int -> 'a list -> 'a list * 'a list (** Bounded sequence: keep only the [n] greatest elements. *) -module Bounded(E: Set.OrderedType) : sig +module Bounded (E : Set.OrderedType) : sig type t - val create: int -> t - val insert: E.t -> t -> unit - val get: t -> E.t list + + val create : int -> t + + val insert : E.t -> t -> unit + + val get : t -> E.t list end (** [select n l] is ([n]th element of [l], [l] without that element) **) -val select: int -> 'a list -> 'a * 'a list - +val select : int -> 'a list -> 'a * 'a list (** [filter_map f l] is [[y for x in l where (f x) = Some y]] **) -val filter_map: ('a -> 'b option) -> 'a list -> 'b list +val filter_map : ('a -> 'b option) -> 'a list -> 'b list (** [rev_sub l n] is [List.rev l] capped to max [n] elements *) val rev_sub : 'a list -> int -> 'a list (** [sub l n] is [l] capped to max [n] elements *) -val sub: 'a list -> int -> 'a list +val sub : 'a list -> int -> 'a list (** Like [List.hd], but [Some hd] or [None] if empty **) -val hd_opt: 'a list -> 'a option +val hd_opt : 'a list -> 'a option (** Last elt of list, or raise Not_found if empty **) -val last_exn: 'a list -> 'a +val last_exn : 'a list -> 'a (** [merge_filter2 ~compare ~f l1 l2] merges two lists ordered by [compare] and whose items can be merged with [f]. Item is discarded or kept whether @@ -78,7 +80,8 @@ val merge_filter2 : ?finalize:('a list -> 'a list) -> ?compare:('a -> 'a -> int) -> ?f:('a option -> 'a option -> 'a option) -> - 'a list -> 'a list -> + 'a list -> + 'a list -> 'a list (** [merge2 ~compare ~f l1 l2] merges two lists ordered by [compare] and @@ -87,7 +90,8 @@ val merge2 : ?finalize:('a list -> 'a list) -> ?compare:('a -> 'a -> int) -> ?f:('a -> 'a -> 'a) -> - 'a list -> 'a list -> + 'a list -> + 'a list -> 'a list (** [shuffle l] is a list that contains the same elements as [l] but in a random diff --git a/src/lib_stdlib/tzString.ml b/src/lib_stdlib/tzString.ml index f3d006bf21eea513a078c7de20182ebd95183fc5..82a9ed9d570738c6c66a4db987d5851d223da7e5 100644 --- a/src/lib_stdlib/tzString.ml +++ b/src/lib_stdlib/tzString.ml @@ -29,37 +29,24 @@ module Map = Map.Make (String) let split delim ?(dup = true) ?(limit = max_int) path = let l = String.length path in let rec do_slashes acc limit i = - if i >= l then - List.rev acc - else if String.get path i = delim then - if dup then - do_slashes acc limit (i + 1) - else - do_split acc limit (i + 1) - else - do_split acc limit i + if i >= l then List.rev acc + else if path.[i] = delim then + if dup then do_slashes acc limit (i + 1) else do_split acc limit (i + 1) + else do_split acc limit i and do_split acc limit i = if limit <= 0 then - if i = l then - List.rev acc - else - List.rev (String.sub path i (l - i) :: acc) - else - do_component acc (pred limit) i i + if i = l then List.rev acc + else List.rev (String.sub path i (l - i) :: acc) + else do_component acc (pred limit) i i and do_component acc limit i j = if j >= l then - if i = j then - List.rev acc - else - List.rev (String.sub path i (j - i) :: acc) - else if String.get path j = delim then + if i = j then List.rev acc + else List.rev (String.sub path i (j - i) :: acc) + else if path.[j] = delim then do_slashes (String.sub path i (j - i) :: acc) limit j - else - do_component acc limit i (j + 1) in - if limit > 0 then - do_slashes [] limit 0 - else - [ path ] + else do_component acc limit i (j + 1) + in + if limit > 0 then do_slashes [] limit 0 else [path] let split_path path = split '/' path @@ -71,23 +58,17 @@ let has_prefix ~prefix s = let remove_prefix ~prefix s = let x = String.length prefix in let n = String.length s in - if n >= x && String.sub s 0 x = prefix then - Some (String.sub s x (n - x)) - else - None + if n >= x && String.sub s 0 x = prefix then Some (String.sub s x (n - x)) + else None let common_prefix s1 s2 = let last = min (String.length s1) (String.length s2) in let rec loop i = - if last <= i then last - else if s1.[i] = s2.[i] then - loop (i+1) - else - i in + if last <= i then last else if s1.[i] = s2.[i] then loop (i + 1) else i + in loop 0 -let mem_char s c = - String.index_opt s c <> None +let mem_char s c = String.index_opt s c <> None let fold_left f init s = let acc = ref init in diff --git a/src/lib_stdlib/tzString.mli b/src/lib_stdlib/tzString.mli index b0971a9e842d423b2bc78c806b70b1cc524dc332..df184b66c3125ac38441f284be0230c722ca48ef 100644 --- a/src/lib_stdlib/tzString.mli +++ b/src/lib_stdlib/tzString.mli @@ -24,28 +24,29 @@ (*****************************************************************************) module Set : Set.S with type elt = string + module Map : Map.S with type key = string (** Splits a string on slashes, grouping multiple slashes, and ignoring slashes at the beginning and end of string. *) -val split_path: string -> string list +val split_path : string -> string list (** Splits a string on a delimiter character, grouping multiple delimiters, and ignoring delimiters at the beginning and end of string, if [limit] is passed, stops after [limit] split(s). *) -val split: char -> ?dup:bool -> ?limit: int -> string -> string list +val split : char -> ?dup:bool -> ?limit:int -> string -> string list (** [true] if input has prefix **) -val has_prefix: prefix:string -> string -> bool +val has_prefix : prefix:string -> string -> bool (** Some (input with [prefix] removed), if string has [prefix], else [None] **) -val remove_prefix: prefix:string -> string -> string option +val remove_prefix : prefix:string -> string -> string option (** Length of common prefix of input strings *) -val common_prefix: string -> string -> int +val common_prefix : string -> string -> int (** Test whether a string contains a given character *) -val mem_char: string -> char -> bool +val mem_char : string -> char -> bool (** Functional iteration over the characters of a string from first to last *) val fold_left : ('a -> char -> 'a) -> 'a -> string -> 'a diff --git a/src/lib_stdlib/utils.ml b/src/lib_stdlib/utils.ml index c0fcb6450587bc101ff86597ce8526fbb219164e..0b0e677218accdbf12ebb1ed5b517996788a30ee 100644 --- a/src/lib_stdlib/utils.ml +++ b/src/lib_stdlib/utils.ml @@ -24,46 +24,48 @@ (*****************************************************************************) module Infix = struct + let ( << ) g f a = g (f a) - let (<<) g f = fun a -> g (f a) - - let (--) i j = - let rec loop acc j = - if j < i then acc else loop (j :: acc) (pred j) in + let ( -- ) i j = + let rec loop acc j = if j < i then acc else loop (j :: acc) (pred j) in loop [] j - end let nbsp = Re.(compile (str "\xC2\xA0")) + let display_paragraph ppf description = - Format.fprintf ppf "@[%a@]" - (Format.pp_print_list ~pp_sep:Format.pp_print_newline - (fun ppf line -> - Format.pp_print_list ~pp_sep:Format.pp_print_space - (fun ppf w -> - (* replace   by real spaces... *) - Format.fprintf ppf "%s@ " - (Re.replace ~all:true nbsp ~f:(fun _ -> " ") w)) - ppf - (TzString.split ' ' line))) + Format.fprintf + ppf + "@[%a@]" + (Format.pp_print_list ~pp_sep:Format.pp_print_newline (fun ppf line -> + Format.pp_print_list + ~pp_sep:Format.pp_print_space + (fun ppf w -> + (* replace   by real spaces... *) + Format.fprintf + ppf + "%s@ " + (Re.replace ~all:true nbsp ~f:(fun _ -> " ") w)) + ppf + (TzString.split ' ' line))) (TzString.split ~dup:false '\n' description) -let finalize f g = try let res = f () in g (); res with exn -> g (); raise exn +let finalize f g = + try + let res = f () in + g () ; res + with exn -> g () ; raise exn let hide_progress_line s = let len = String.length s in if len > 0 then Printf.eprintf "\r%*s\r" len "" -let display_progress ?(refresh_rate = 1,1) fmt = - let prnt = - fun s -> - if Unix.isatty Unix.stderr then - let index, rate = refresh_rate in - if index mod rate == 0 then - begin - hide_progress_line s; - Format.eprintf "%s%!" s - end +let display_progress ?(refresh_rate = (1, 1)) fmt = + let prnt s = + if Unix.isatty Unix.stderr then + let (index, rate) = refresh_rate in + if index mod rate == 0 then ( + hide_progress_line s ; Format.eprintf "%s%!" s ) in Format.kasprintf prnt fmt diff --git a/src/lib_stdlib/utils.mli b/src/lib_stdlib/utils.mli index 4c4bf6cbc63f26bd25699f26d2d30220e5238c9a..8e9f3ee8e0f03aba711176259974cae7130d43f4 100644 --- a/src/lib_stdlib/utils.mli +++ b/src/lib_stdlib/utils.mli @@ -24,24 +24,22 @@ (*****************************************************************************) module Infix : sig - (** Compose functions from right to left. *) - val (<<) : ('b -> 'c) -> ('a -> 'b) -> 'a -> 'c + val ( << ) : ('b -> 'c) -> ('a -> 'b) -> 'a -> 'c (** Sequence: [i--j] is the sequence [i;i+1;...;j-1;j] *) - val (--) : int -> int -> int list - + val ( -- ) : int -> int -> int list end (** Print a paragraph in a box **) -val display_paragraph: Format.formatter -> string -> unit +val display_paragraph : Format.formatter -> string -> unit (** [finalize f g ] ensures g() called after f(), even if exception raised **) -val finalize: (unit -> 'a) -> (unit -> unit) -> 'a +val finalize : (unit -> 'a) -> (unit -> unit) -> 'a (** Print string over the current line **) -val display_progress: ?refresh_rate: int * int -> - ('a, Format.formatter, unit, unit) format4 -> 'a +val display_progress : + ?refresh_rate:int * int -> ('a, Format.formatter, unit, unit) format4 -> 'a (** Finalizes progress display **) -val display_progress_end: unit -> unit +val display_progress_end : unit -> unit diff --git a/src/lib_stdlib/weakRingTable.ml b/src/lib_stdlib/weakRingTable.ml index 1dab7529316a154504cb4603929d6078707c2fc0..9a540f663d7f82724130a5eba8793d9c0f5f03c6 100644 --- a/src/lib_stdlib/weakRingTable.ml +++ b/src/lib_stdlib/weakRingTable.ml @@ -22,80 +22,86 @@ (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************) -module type S = -sig +module type S = sig type 'a t + type key - val create: int -> 'a t - val add: 'a t -> key -> 'a -> unit - val add_and_return_erased: 'a t -> key -> 'a -> key option - val iter: (key -> 'a -> unit) -> 'a t -> unit - val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b - val find_opt: 'a t -> key -> 'a option - val remove: 'a t -> key -> unit + + val create : int -> 'a t + + val add : 'a t -> key -> 'a -> unit + + val add_and_return_erased : 'a t -> key -> 'a -> key option + + val iter : (key -> 'a -> unit) -> 'a t -> unit + + val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b + + val find_opt : 'a t -> key -> 'a option + + val remove : 'a t -> key -> unit + val length : 'a t -> int end +module Make (M : Hashtbl.HashedType) = struct + module Table = Ephemeron.K1.Make (struct + type t = int + + let hash a = a + + let equal = ( = ) + end) + module Ring = Ring.MakeTable (struct + type t = int * M.t -module Make(M: Hashtbl.HashedType) = struct + let hash (i, _) = i + + let equal = ( = ) + end) - module Table = Ephemeron.K1.Make( - struct - type t = int - let hash a = a - let equal = (=) - end) - module Ring = Ring.MakeTable( - struct - type t = int * M.t - let hash (i,_) = i - let equal = (=) - end) type key = M.t - module Visit_tracking = Set.Make( - struct - type t = int - let compare = Pervasives.compare - end) + module Visit_tracking = Set.Make (struct + type t = int + + let compare = Pervasives.compare + end) - type 'a t = { - table : 'a Table.t ; - ring : Ring.t ; - } + type 'a t = {table : 'a Table.t; ring : Ring.t} - let create n = { table = Table.create n ; ring = Ring.create n } + let create n = {table = Table.create n; ring = Ring.create n} - let add { ring ; table } k v = + let add {ring; table} k v = let i = M.hash k in - Ring.add ring (i,k) ; + Ring.add ring (i, k) ; Table.replace table i v - let add_and_return_erased { ring ; table } k v = + let add_and_return_erased {ring; table} k v = let i = M.hash k in - let erased = - Option.map ~f:snd (Ring.add_and_return_erased ring (i, k)) in - Table.replace table i v ; - erased + let erased = Option.map ~f:snd (Ring.add_and_return_erased ring (i, k)) in + Table.replace table i v ; erased - let find_opt { table ; _ } k = + let find_opt {table; _} k = let i = M.hash k in Table.find_opt table i - let fold f { table ; ring } acc = + let fold f {table; ring} acc = let elts = Ring.elements ring in let (res, _) = List.fold_left (fun (acc, visited) (i, k) -> - if Visit_tracking.mem i visited then - (acc, visited) - else - match Table.find_opt table i with - | None -> (acc, visited) - | Some elt -> (f k elt acc, Visit_tracking.add i visited)) + if Visit_tracking.mem i visited then (acc, visited) + else + match Table.find_opt table i with + | None -> + (acc, visited) + | Some elt -> + (f k elt acc, Visit_tracking.add i visited)) (acc, Visit_tracking.empty) - elts in + elts + in res let iter f t = fold (fun k v () -> f k v) t () @@ -104,6 +110,5 @@ module Make(M: Hashtbl.HashedType) = struct let i = M.hash k in Table.remove t.table i - let length { table ; _ } = Table.length table - + let length {table; _} = Table.length table end diff --git a/src/lib_stdlib/weakRingTable.mli b/src/lib_stdlib/weakRingTable.mli index 8e1298a275ac0819166b789cfa815c8bcce529be..5368cb82e1c83369e2886bed214776242bd7f466 100644 --- a/src/lib_stdlib/weakRingTable.mli +++ b/src/lib_stdlib/weakRingTable.mli @@ -22,50 +22,48 @@ (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************) -module type S = -sig +module type S = sig type 'a t + type key - val create: int -> 'a t (** [create n] is a table with at most [n] elements except when it has more. *) + val create : int -> 'a t - val add: 'a t -> key -> 'a -> unit (** [add t k v] adds a mapping from key [k] to value [v] in the table. NOTE: when n values are bound to the same key, it may count as up to n elements. However, NOTE: when n values are bound to the same key, only the last binding can be found with [find_opt] or traversed with [fold]. *) + val add : 'a t -> key -> 'a -> unit - val add_and_return_erased: 'a t -> key -> 'a -> key option + val add_and_return_erased : 'a t -> key -> 'a -> key option - val iter: (key -> 'a -> unit) -> 'a t -> unit + val iter : (key -> 'a -> unit) -> 'a t -> unit - val fold: (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b (** [fold f t acc] folds the function [f] and value [acc] through the recently added elements of [t]. It never folds over more elements than the size bound of the table, even if the table temporarily holds more elements. *) + val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b - val find_opt: 'a t -> key -> 'a option (** [find_opt t k] is [Some v] if [k] is bound to [v] in [t] and [None] otherwise. A key [k] is bound to a value [v] in [t] if [add t k v] has been called and not too many other bindings have been added since then. *) + val find_opt : 'a t -> key -> 'a option - val remove: 'a t -> key -> unit (** [remove t k] removes the binding from [key] to the associated element in [t]. Note that you may still be able to find the element using [find_opt] for some time. *) + val remove : 'a t -> key -> unit - val length: 'a t -> int (** [length t] is the number of elements currently in [t], including those that may be garbage collected. *) - + val length : 'a t -> int end - -module Make (K: Hashtbl.HashedType): S with type key = K.t (** A bounded table which optimistically cheats on the bound and sometimes counts wrong. Specifically, the table retains a bounded number of elements. It will also retain more if given more than that, but it will always drop back to the bound if the garbage collector intervenes. *) +module Make (K : Hashtbl.HashedType) : S with type key = K.t diff --git a/src/lib_stdlib_unix/.ocamlformat b/src/lib_stdlib_unix/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_stdlib_unix/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_stdlib_unix/file_event_sink.ml b/src/lib_stdlib_unix/file_event_sink.ml index c72fc4201a04ade26ea6f06f41ed22c367000c5c..a479ff32cccbb168142a72f2b007930774e9caae 100644 --- a/src/lib_stdlib_unix/file_event_sink.ml +++ b/src/lib_stdlib_unix/file_event_sink.ml @@ -26,33 +26,43 @@ module Micro_seconds : sig (** Module with time-stamps with “at least micro-seconds” precision. *) type t = private float + val now : unit -> t - val of_float: float -> t + + val of_float : float -> t + val encoding : t Data_encoding.t + val date_string : t -> string * string end = struct (* Time.t is in seconds, we want more precision. *) type t = float + let now () = Unix.gettimeofday () + let of_float f = f + let encoding = let open Data_encoding in conv (* Cf. https://github.com/OCamlPro/ocplib-json-typed/issues/25 *) (fun f -> f *. 1_000_000. |> Int64.of_float) (fun i64 -> Int64.to_float i64 /. 1_000_000.) int64 + let date_string time_value = let open Unix in let open Printf in let tm = gmtime time_value in - (sprintf "%04d%02d%02d" (1900 + tm.tm_year) - (tm.tm_mon + 1) tm.tm_mday, - sprintf "%02d%02d%02d-%06d" tm.tm_hour tm.tm_min tm.tm_sec - ((time_value -. floor time_value) *. 1_000_000. |> int_of_float)) + ( sprintf "%04d%02d%02d" (1900 + tm.tm_year) (tm.tm_mon + 1) tm.tm_mday, + sprintf + "%02d%02d%02d-%06d" + tm.tm_hour + tm.tm_min + tm.tm_sec + ((time_value -. floor time_value) *. 1_000_000. |> int_of_float) ) end module Event_filter = struct - type t = | True | False @@ -66,89 +76,127 @@ module Event_filter = struct let rec run ~section ~level ~name filter = let continue = run ~section ~level ~name in match filter with - | True -> true - | False -> false - | Or l -> List.exists continue l - | And l -> List.for_all continue l - | Name s -> String.equal s name - | Name_matches re -> Re.execp re name - | Level_in l -> List.mem level l - | Section_in l -> List.mem section l + | True -> + true + | False -> + false + | Or l -> + List.exists continue l + | And l -> + List.for_all continue l + | Name s -> + String.equal s name + | Name_matches re -> + Re.execp re name + | Level_in l -> + List.mem level l + | Section_in l -> + List.mem section l let rec pp fmt filter = let open Format in match filter with - | True -> pp_print_string fmt "true" - | False -> pp_print_string fmt "false" + | True -> + pp_print_string fmt "true" + | False -> + pp_print_string fmt "false" | Or l -> - fprintf fmt "(or@ @[<2>%a@]" - (pp_print_list ~pp_sep:(fun fmt () -> fprintf fmt "@ ") pp) l + fprintf + fmt + "(or@ @[<2>%a@]" + (pp_print_list ~pp_sep:(fun fmt () -> fprintf fmt "@ ") pp) + l | And l -> - fprintf fmt "(and@ @[<2>%a@]" - (pp_print_list ~pp_sep:(fun fmt () -> fprintf fmt "@ ") pp) l - | Name s -> fprintf fmt "(name-is@ %S)" s - | Name_matches re -> fprintf fmt "(name-matches@ %a)" Re.pp_re re + fprintf + fmt + "(and@ @[<2>%a@]" + (pp_print_list ~pp_sep:(fun fmt () -> fprintf fmt "@ ") pp) + l + | Name s -> + fprintf fmt "(name-is@ %S)" s + | Name_matches re -> + fprintf fmt "(name-matches@ %a)" Re.pp_re re | Level_in l -> - fprintf fmt "(level-in@ [%s])" + fprintf + fmt + "(level-in@ [%s])" (String.concat "," (List.map Internal_event.Level.to_string l)) | Section_in l -> - fprintf fmt "(section-in@ [%a])" - (pp_print_list ~pp_sep:(fun fmt () -> fprintf fmt ",@ ") - (fun fmt s -> fprintf fmt "(Some %s)" - (String.concat "," - (Internal_event.Section.to_string_list s)))) + fprintf + fmt + "(section-in@ [%a])" + (pp_print_list + ~pp_sep:(fun fmt () -> fprintf fmt ",@ ") + (fun fmt s -> + fprintf + fmt + "(Some %s)" + (String.concat "," (Internal_event.Section.to_string_list s)))) l - [@@warning "-32"] (* -> The "unused value" warning. *) + [@@warning "-32"] + + (* -> The "unused value" warning. *) let t = True - let f = False - [@@warning "-32"] (* -> The "unused value" warning. *) + + let f = False [@@warning "-32"] + + (* -> The "unused value" warning. *) + let any l = Or l - let all l = And l - [@@warning "-32"] (* -> The "unused value" warning. *) + + let all l = And l [@@warning "-32"] + + (* -> The "unused value" warning. *) + let name_is s = Name s + let name_matches s = Name_matches s + let name_matches_posix s = name_matches (Re.Posix.compile_pat s) + let level_in l = Level_in l + let section_in l = Section_in l let levels_in_order = - Internal_event.[ Debug ; Info ; Notice ; Warning ; Error ; Fatal] + Internal_event.[Debug; Info; Notice; Warning; Error; Fatal] let level_at_least lvl = List.fold_left (function - | None -> (function l when l = lvl -> Some [l] | _ -> None) - | Some s -> (fun l -> Some (l :: s))) + | None -> ( + function l when l = lvl -> Some [l] | _ -> None ) + | Some s -> + fun l -> Some (l :: s)) None levels_in_order |> Option.unopt_exn (Failure "level_at_least not found") |> level_in - end type t = { - path : string ; + path : string; (* Hopefully temporary hack to handle event which are emitted with the non-cooperative log functions in `Legacy_logging`: *) - lwt_bad_citizen_hack : (string * Data_encoding.json) list ref ; - event_filter: Event_filter.t ; + lwt_bad_citizen_hack : (string * Data_encoding.json) list ref; + event_filter : Event_filter.t } +type 'event wrapped = { + time_stamp : Micro_seconds.t; + section : Internal_event.Section.t; + event : 'event +} -type 'event wrapped = - { time_stamp : Micro_seconds.t ; - section : Internal_event.Section.t ; - event : 'event } - -let wrap time_stamp section event = { time_stamp ; section ; event } +let wrap time_stamp section event = {time_stamp; section; event} let wrapped_encoding event_encoding = let open Data_encoding in let v0 = conv - (fun { time_stamp ; section ; event } -> (time_stamp, section, event)) - (fun (time_stamp, section, event) -> { time_stamp ; section ; event }) + (fun {time_stamp; section; event} -> (time_stamp, section, event)) + (fun (time_stamp, section, event) -> {time_stamp; section; event}) (obj3 (req "time_stamp" Micro_seconds.encoding) (req "section" Internal_event.Section.encoding) @@ -157,23 +205,21 @@ let wrapped_encoding event_encoding = With_version.(encoding ~name:"file-event-sink-item" (first_version v0)) module Section_dir = struct - let of_section (section : Internal_event.Section.t) = String.concat "." (Internal_event.Section.to_string_list section) - let section_name = - function - | "no-section" -> Ok None - | other -> - (match String.remove_prefix ~prefix:"section-" other with - | None -> Error "wrong-dir-name" - | Some s -> Ok (Some s)) + let section_name = function + | "no-section" -> + Ok None + | other -> ( + match String.remove_prefix ~prefix:"section-" other with + | None -> + Error "wrong-dir-name" + | Some s -> + Ok (Some s) ) end - - module Sink_implementation : Internal_event.SINK with type t = t = struct - type nonrec t = t let uri_scheme = "unix-files" @@ -181,33 +227,40 @@ module Sink_implementation : Internal_event.SINK with type t = t = struct let configure uri = let event_filter = let name_res = - Uri.get_query_param' uri "name-matches" |> Option.unopt ~default:[] in - let names = Uri.get_query_param' uri "name" |> Option.unopt ~default:[] in + Uri.get_query_param' uri "name-matches" |> Option.unopt ~default:[] + in + let names = + Uri.get_query_param' uri "name" |> Option.unopt ~default:[] + in let levels = Option.( Uri.get_query_param uri "level-at-least" >>= Internal_event.Level.of_string >>= fun l -> (* some (fun all more -> all [Event_filter.level_at_least l ; more ]) *) - some [Event_filter.level_at_least l] - ) + some [Event_filter.level_at_least l]) |> Option.unopt ~default:[] in let sections = let somes = - Uri.get_query_param' uri "section" |> Option.unopt ~default:[] + Uri.get_query_param' uri "section" + |> Option.unopt ~default:[] |> List.map (fun s -> - (Internal_event.Section.make_sanitized - (String.split_on_char '.' s))) + Internal_event.Section.make_sanitized + (String.split_on_char '.' s)) in let none = match Uri.get_query_param uri "no-section" with - | Some "true" -> [Internal_event.Section.empty] - | _ -> [] + | Some "true" -> + [Internal_event.Section.empty] + | _ -> + [] in match somes @ none with - | [] -> [] - | more -> [Event_filter.section_in more] + | [] -> + [] + | more -> + [Event_filter.section_in more] in Event_filter.( match @@ -215,98 +268,107 @@ module Sink_implementation : Internal_event.SINK with type t = t = struct @ List.map name_matches_posix name_res @ List.map name_is names with - | [] -> t - | more -> any more - ) in + | [] -> + t + | more -> + any more) + in let t = - { path = Uri.path uri ; lwt_bad_citizen_hack = ref [] ; event_filter } in + {path = Uri.path uri; lwt_bad_citizen_hack = ref []; event_filter} + in return t - let output_json ~pp file_path event_json = - Lwt.catch (fun () -> + Lwt.catch + (fun () -> Lwt_utils_unix.create_dir ~perm:0o700 (Filename.dirname file_path) >>= fun () -> - Lwt_utils_unix.Json.write_file file_path - event_json + Lwt_utils_unix.Json.write_file file_path event_json >>= function - | Ok () -> return_unit + | Ok () -> + return_unit | Error el -> failwith "ERROR while Handling %a,@ cannot write JSON to %s:@ %a\n%!" - pp () file_path Error_monad.pp_print_error el - ) + pp + () + file_path + Error_monad.pp_print_error + el) (function | e -> - failwith "ERROR while Handling %a: %a\n%!" - pp () Error_monad.pp_exn e) - - let handle - (type a) { path ; lwt_bad_citizen_hack ; event_filter } - m ?(section = Internal_event.Section.empty) (v : unit -> a) = + failwith + "ERROR while Handling %a: %a\n%!" + pp + () + Error_monad.pp_exn + e) + + let handle (type a) {path; lwt_bad_citizen_hack; event_filter} m + ?(section = Internal_event.Section.empty) (v : unit -> a) = let module M = (val m : Internal_event.EVENT_DEFINITION with type t = a) in let now = Micro_seconds.now () in - let date, time = Micro_seconds.date_string now in + let (date, time) = Micro_seconds.date_string now in let forced = v () in let level = M.level forced in - match - Event_filter.run - ~section:section ~level ~name:M.name event_filter - with + match Event_filter.run ~section ~level ~name:M.name event_filter with | true -> let event_json = Data_encoding.Json.construct (wrapped_encoding M.encoding) - (wrap now section forced) in + (wrap now section forced) + in let tag = let hash = - Marshal.to_string event_json [] - |> Digest.string |> Digest.to_hex in - String.sub hash 0 8 in + Marshal.to_string event_json [] |> Digest.string |> Digest.to_hex + in + String.sub hash 0 8 + in let section_dir = Section_dir.of_section section in let dir_path = - List.fold_left Filename.concat path - [ section_dir; M.name ; date ; time ] in + List.fold_left Filename.concat path [section_dir; M.name; date; time] + in let file_path = - Filename.concat dir_path - (Printf.sprintf "%s_%s_%s.json" date time tag) in - lwt_bad_citizen_hack := (file_path, event_json) :: !lwt_bad_citizen_hack ; + Filename.concat + dir_path + (Printf.sprintf "%s_%s_%s.json" date time tag) + in + lwt_bad_citizen_hack := + (file_path, event_json) :: !lwt_bad_citizen_hack ; output_json file_path event_json ~pp:(fun fmt () -> M.pp fmt forced) >>=? fun () -> lwt_bad_citizen_hack := List.filter (fun (f, _) -> f <> file_path) !lwt_bad_citizen_hack ; return_unit - | false -> return_unit + | false -> + return_unit - let close { lwt_bad_citizen_hack ; _ } = + let close {lwt_bad_citizen_hack; _} = iter_s (fun (f, j) -> - output_json f j - ~pp:(fun fmt () -> Format.fprintf fmt "Destacking: %s" f)) + output_json f j ~pp:(fun fmt () -> + Format.fprintf fmt "Destacking: %s" f)) !lwt_bad_citizen_hack - >>=? fun () -> - return_unit + >>=? fun () -> return_unit end -let () = - Internal_event.All_sinks.register (module Sink_implementation) +let () = Internal_event.All_sinks.register (module Sink_implementation) open Sink_implementation module Query = struct - let with_file_kind dir p = protect (fun () -> - Lwt_unix.stat (Filename.concat dir p) >>= fun {Lwt_unix. st_kind ; _ } -> - return st_kind) + Lwt_unix.stat (Filename.concat dir p) + >>= fun {Lwt_unix.st_kind; _} -> return st_kind) >>=? function - | Unix.S_DIR -> return (`Directory p) - | Unix.S_REG -> return (`Regular_file p) - | Unix.S_CHR - | Unix.S_BLK - | Unix.S_LNK - | Unix.S_FIFO - | Unix.S_SOCK as k -> return (`Special (k, p)) + | Unix.S_DIR -> + return (`Directory p) + | Unix.S_REG -> + return (`Regular_file p) + | (Unix.S_CHR | Unix.S_BLK | Unix.S_LNK | Unix.S_FIFO | Unix.S_SOCK) as k + -> + return (`Special (k, p)) let fold_directory path ~init ~f = protect (fun () -> @@ -316,52 +378,56 @@ module Query = struct protect (fun () -> Lwt.catch (fun () -> - Lwt_unix.readdir dirhandle >>= fun d -> - with_file_kind path d - >>=? fun wk -> - return_some wk) + Lwt_unix.readdir dirhandle + >>= fun d -> with_file_kind path d >>=? fun wk -> return_some wk) (function | End_of_file -> - Lwt_unix.closedir dirhandle >>= fun () -> - return_none + Lwt_unix.closedir dirhandle >>= fun () -> return_none | (e : exn) -> - failwith "ERROR while folding %s: %s" - path (Printexc.to_string e))) + failwith + "ERROR while folding %s: %s" + path + (Printexc.to_string e))) >>=? fun opt -> - prev >>=? fun p -> - begin match opt with - | Some more -> iter (f p more) - | None -> prev - end + prev + >>=? fun p -> + match opt with Some more -> iter (f p more) | None -> prev in iter init - let (//) = Filename.concat + let ( // ) = Filename.concat module Time_constraint = struct - type op = [ `Lt | `Le | `Ge | `Gt ] - type t = [ - | `Date of op * float + type op = [`Lt | `Le | `Ge | `Gt] + + type t = + [ `Date of op * float | `Time of op * float | `And of t * t | `Or of t * t - | `All - ] + | `All ] let rec check_logic check_terminal (t : t) string = let continue = check_logic check_terminal in match t with - | `All -> true - | `And (a, b) -> continue a string && continue b string - | `Or (a, b) -> continue a string || continue b string - | `Date _ | `Time _ as term -> check_terminal term - - let op_with_string = - function - | `Lt -> (fun a b -> String.compare a b > 0) - | `Gt -> (fun a b -> String.compare a b < 0) - | `Le -> (fun a b -> String.compare a b >= 0) - | `Ge -> (fun a b -> String.compare a b <= 0) + | `All -> + true + | `And (a, b) -> + continue a string && continue b string + | `Or (a, b) -> + continue a string || continue b string + | (`Date _ | `Time _) as term -> + check_terminal term + + let op_with_string = function + | `Lt -> + fun a b -> String.compare a b > 0 + | `Gt -> + fun a b -> String.compare a b < 0 + | `Le -> + fun a b -> String.compare a b >= 0 + | `Ge -> + fun a b -> String.compare a b <= 0 let check_date (t : t) date_string = check_logic @@ -369,8 +435,10 @@ module Query = struct | `Date (op, f) -> let s = Micro_seconds.(date_string (of_float f) |> fst) in op_with_string op s date_string - | `Time _ -> true) - t date_string + | `Time _ -> + true) + t + date_string let check_time (t : t) string = check_logic @@ -378,171 +446,204 @@ module Query = struct | `Time (op, f) -> let s = Micro_seconds.(date_string (of_float f) |> snd) in op_with_string op s string - | `Date _ -> true) - t Micro_seconds.date_string + | `Date _ -> + true) + t + Micro_seconds.date_string end module Report = struct - type item = [ - | `Error of [ - | `Parsing_event of [ - | `Encoding of string * exn - | `Json of string * error list - ] - | `Cannot_recognize_section of string - ] - | `Warning of [ - | `Expecting_regular_file_at of string - | `Expecting_directory_at of string - | `Unknown_event_name_at of string * string - ] - ] + type item = + [ `Error of + [ `Parsing_event of + [`Encoding of string * exn | `Json of string * error list] + | `Cannot_recognize_section of string ] + | `Warning of + [ `Expecting_regular_file_at of string + | `Expecting_directory_at of string + | `Unknown_event_name_at of string * string ] ] let pp fmt (x : item) = let open Format in - let error fmt = - function - | `Parsing_event e -> - (match e with - | `Encoding (path, exn) -> - fprintf fmt "@[Parse error:@ wrong encoding for %S: %a@]" - path pp_exn exn - | `Json (path, el) -> - fprintf fmt "@[Parse error:@ wrong JSON for %S: %a@]" - path pp_print_error el) + let error fmt = function + | `Parsing_event e -> ( + match e with + | `Encoding (path, exn) -> + fprintf + fmt + "@[Parse error:@ wrong encoding for %S: %a@]" + path + pp_exn + exn + | `Json (path, el) -> + fprintf + fmt + "@[Parse error:@ wrong JSON for %S: %a@]" + path + pp_print_error + el ) | `Cannot_recognize_section sec -> - fprintf fmt + fprintf + fmt "@[Directory error:@ cannot recognize section directory@ %S@]" sec in - let warning fmt = - function - | `Expecting_regular_file_at path -> fprintf fmt "%S@ is not a regular file" path - | `Expecting_directory_at path -> fprintf fmt "%S@ is not a directory" path - | `Unknown_event_name_at (name, path) -> fprintf fmt "Unknown event name@ %S@ at@ %S" name path + let warning fmt = function + | `Expecting_regular_file_at path -> + fprintf fmt "%S@ is not a regular file" path + | `Expecting_directory_at path -> + fprintf fmt "%S@ is not a directory" path + | `Unknown_event_name_at (name, path) -> + fprintf fmt "Unknown event name@ %S@ at@ %S" name path in match x with - | `Error e -> fprintf fmt "@[Error:@ %a@]" error e - | `Warning e -> fprintf fmt "@[Warning:@ %a@]" warning e + | `Error e -> + fprintf fmt "@[Error:@ %a@]" error e + | `Warning e -> + fprintf fmt "@[Warning:@ %a@]" warning e let make_return m ((prev : item list), value) warning = - return ((m warning :: prev), value) + return (m warning :: prev, value) + let return_with_warning v e = make_return (fun e -> `Warning e) v e + let return_with_error v e = make_return (fun e -> `Error e) v e end - open Report + open Report let fold_event_kind_directory ~time_query path ~init ~f = - fold_directory path ~init:(return init) - ~f:(fun previous -> function - | `Directory "." | `Directory ".." -> return previous - | `Directory date when Time_constraint.check_date time_query date -> - fold_directory (path // date) - ~init:(return previous) - ~f:(fun previous -> function + fold_directory path ~init:(return init) ~f:(fun previous -> + function + | `Directory "." | `Directory ".." -> + return previous + | `Directory date when Time_constraint.check_date time_query date -> + fold_directory + (path // date) + ~init:(return previous) + ~f:(fun previous -> + function + | `Directory "." | `Directory ".." -> + return previous + | `Directory time when Time_constraint.check_time time_query time + -> + fold_directory + (path // date // time) + ~init:(return previous) + ~f:(fun previous -> function | `Directory "." | `Directory ".." -> return previous - | `Directory time when Time_constraint.check_time time_query time -> - fold_directory (path // date // time) - ~init:(return previous) - ~f:(fun previous -> function - | `Directory "." | `Directory ".." -> return previous - | `Regular_file file -> - f previous (path // date // time // file) - | `Directory p | `Special (_, p) -> - return_with_warning previous - (`Expecting_regular_file_at - (path // date // time // p)) - ) - | `Directory _ (* filtered out *) -> return previous - | `Regular_file p | `Special (_, p) -> - return_with_warning previous - (`Expecting_directory_at (path // date // p))) - | `Directory _ (* filtered out *) -> return previous - | `Regular_file p | `Special (_, p) -> - return_with_warning previous - (`Expecting_directory_at (path // p))) - - let handle_event_kind_directory (type a) ~time_query ~section_path ~init ~f ev = - let module Event = - (val ev : Internal_event.EVENT_DEFINITION with type t = a) in + | `Regular_file file -> + f previous (path // date // time // file) + | `Directory p | `Special (_, p) -> + return_with_warning + previous + (`Expecting_regular_file_at + (path // date // time // p))) + | `Directory _ (* filtered out *) -> + return previous + | `Regular_file p | `Special (_, p) -> + return_with_warning + previous + (`Expecting_directory_at (path // date // p))) + | `Directory _ (* filtered out *) -> + return previous + | `Regular_file p | `Special (_, p) -> + return_with_warning previous (`Expecting_directory_at (path // p))) + + let handle_event_kind_directory (type a) ~time_query ~section_path ~init ~f + ev = + let module Event = ( val ev : Internal_event.EVENT_DEFINITION + with type t = a ) + in let handle_event_file previous path = Lwt_utils_unix.Json.read_file path >>= function - | Ok json -> - begin try - let { time_stamp ; event ; _ } = - Data_encoding.Json.destruct - (wrapped_encoding Event.encoding) json in - f (snd previous) - ~time_stamp:(time_stamp :> float) - (Internal_event.Generic.Event - (Event.name, ev, event)) - >>=? fun user_return -> - return (fst previous, user_return) - with - e -> - return_with_error previous (`Parsing_event (`Encoding (path, e))) - end + | Ok json -> ( + try + let {time_stamp; event; _} = + Data_encoding.Json.destruct (wrapped_encoding Event.encoding) json + in + f + (snd previous) + ~time_stamp:(time_stamp :> float) + (Internal_event.Generic.Event (Event.name, ev, event)) + >>=? fun user_return -> return (fst previous, user_return) + with e -> + return_with_error previous (`Parsing_event (`Encoding (path, e))) ) | Error el -> return_with_error previous (`Parsing_event (`Json (path, el))) in - fold_event_kind_directory ~time_query - (section_path // Event.name) ~init + fold_event_kind_directory + ~time_query + (section_path // Event.name) + ~init ~f:(fun prev file -> handle_event_file prev file) - - let fold - ?on_unknown ?only_sections ?only_names ?(time_query = `All) uri ~init ~f = + let fold ?on_unknown ?only_sections ?only_names ?(time_query = `All) uri + ~init ~f = let name_matches = match only_names with - | None -> (fun _ -> true) - | Some l -> (fun name -> List.mem name l) in + | None -> + fun _ -> true + | Some l -> + fun name -> List.mem name l + in let section_matches = match only_sections with - | None -> (fun _ -> true) - | Some l -> (fun name -> List.mem name l) in + | None -> + fun _ -> true + | Some l -> + fun name -> List.mem name l + in configure uri - >>=? fun { path = sink_path ; _ } -> - fold_directory sink_path ~init:(return ([], init)) ~f:(fun previous -> function - | `Directory ("." | "..") -> return previous - | `Directory dir -> - begin match Section_dir.section_name dir with - | Ok sec when section_matches sec -> - fold_directory (sink_path // dir) - ~init:(return ([], init)) ~f:(fun previous -> function - | `Directory ("." | "..") -> return previous - | `Directory event_name when name_matches event_name -> - let open Internal_event in - begin match All_definitions.find ((=) event_name) with - | Some (Generic.Definition (_, ev)) -> - handle_event_kind_directory ~time_query ev - ~section_path:(sink_path // dir) - ~init:previous ~f - | None -> - begin match on_unknown with - | None -> - return_with_warning previous - (`Unknown_event_name_at - (event_name, sink_path // dir)) - | Some f -> - fold_event_kind_directory ~time_query - (sink_path // dir // event_name) - ~init:previous - ~f:(fun prev file -> - f file >>=? fun () -> - return prev) - end - end - | `Directory _ (* filtered out *) -> return previous - | `Regular_file p | `Special (_, p) -> - return_with_warning previous - (`Expecting_directory_at (sink_path // p))) - | Ok _ (* section does not match *) -> return previous - | Error _ -> - return_with_error previous (`Cannot_recognize_section dir) - end + >>=? fun {path = sink_path; _} -> + fold_directory + sink_path + ~init:(return ([], init)) + ~f:(fun previous -> function `Directory ("." | "..") -> return previous + | `Directory dir -> ( + match Section_dir.section_name dir with + | Ok sec when section_matches sec -> + fold_directory + (sink_path // dir) + ~init:(return ([], init)) + ~f:(fun previous -> function `Directory ("." | "..") -> + return previous + | `Directory event_name when name_matches event_name -> ( + let open Internal_event in + match All_definitions.find (( = ) event_name) with + | Some (Generic.Definition (_, ev)) -> + handle_event_kind_directory + ~time_query + ev + ~section_path:(sink_path // dir) + ~init:previous + ~f + | None -> ( + match on_unknown with + | None -> + return_with_warning + previous + (`Unknown_event_name_at + (event_name, sink_path // dir)) + | Some f -> + fold_event_kind_directory + ~time_query + (sink_path // dir // event_name) + ~init:previous + ~f:(fun prev file -> + f file >>=? fun () -> return prev) ) ) + | `Directory _ (* filtered out *) -> return previous + | `Regular_file p | `Special (_, p) -> + return_with_warning + previous + (`Expecting_directory_at (sink_path // p))) + | Ok _ (* section does not match *) -> + return previous + | Error _ -> + return_with_error previous (`Cannot_recognize_section dir) ) | `Regular_file p | `Special (_, p) -> - return_with_warning previous + return_with_warning + previous (`Expecting_directory_at (sink_path // p))) end diff --git a/src/lib_stdlib_unix/file_event_sink.mli b/src/lib_stdlib_unix/file_event_sink.mli index bfaae1a7fe24f3f02ca8b0e06c4b27e5b7bbbe46..f744e09aa2be47b829f912e03d04d400f3e23055 100644 --- a/src/lib_stdlib_unix/file_event_sink.mli +++ b/src/lib_stdlib_unix/file_event_sink.mli @@ -41,9 +41,9 @@ (** The module {!Query} provides a {!fold} function over the events stored by a given instantiation of the [SINK.t]. *) module Query : sig - module Time_constraint : sig - type op = [ `Lt | `Le | `Ge | `Gt ] + type op = [`Lt | `Le | `Ge | `Gt] + type t = [ `All | `And of t * t @@ -56,21 +56,17 @@ module Query : sig warnings that happened during the scan, those are defined in {!Report.item}. *) module Report : sig - type item = [ - | `Error of [ - | `Parsing_event of [ - | `Encoding of string * exn - | `Json of string * error list - ] - | `Cannot_recognize_section of string - ] - | `Warning of [ - | `Expecting_regular_file_at of string - | `Expecting_directory_at of string - | `Unknown_event_name_at of string * string - ] - ] - val pp: Format.formatter -> item -> unit + type item = + [ `Error of + [ `Parsing_event of + [`Encoding of string * exn | `Json of string * error list] + | `Cannot_recognize_section of string ] + | `Warning of + [ `Expecting_regular_file_at of string + | `Expecting_directory_at of string + | `Unknown_event_name_at of string * string ] ] + + val pp : Format.formatter -> item -> unit end (** Scan a folder for events. @@ -93,6 +89,9 @@ module Query : sig ?time_query:Time_constraint.t -> Uri.t -> init:'a -> - f:('a -> time_stamp:float -> Internal_event.Generic.event -> 'a tzresult Lwt.t) -> + f:('a -> + time_stamp:float -> + Internal_event.Generic.event -> + 'a tzresult Lwt.t) -> (Report.item list * 'a) tzresult Lwt.t end diff --git a/src/lib_stdlib_unix/internal_event_unix.ml b/src/lib_stdlib_unix/internal_event_unix.ml index aa5bcaf43cb5a780a871e66d55f2d25f648c7f64..4f5f75b8f41e276d6773394ab46a5386388f7772 100644 --- a/src/lib_stdlib_unix/internal_event_unix.ml +++ b/src/lib_stdlib_unix/internal_event_unix.ml @@ -24,32 +24,32 @@ (*****************************************************************************) module Configuration = struct - type t = { activate : Uri.t list } + type t = {activate : Uri.t list} let default = - { activate = [ - Uri.make ~scheme:Internal_event.Lwt_log_sink.uri_scheme () - ] } + {activate = [Uri.make ~scheme:Internal_event.Lwt_log_sink.uri_scheme ()]} let encoding = let open Data_encoding in conv - (fun { activate } -> List.map Uri.to_string activate) - (fun activate -> { activate = List.map Uri.of_string activate }) + (fun {activate} -> List.map Uri.to_string activate) + (fun activate -> {activate = List.map Uri.of_string activate}) (obj1 - (dft "activate" - ~description: "List of URIs to activate/configure sinks." - (list string) [])) + (dft + "activate" + ~description:"List of URIs to activate/configure sinks." + (list string) + [])) let of_file path = - Lwt_utils_unix.Json.read_file path >>=? fun json -> + Lwt_utils_unix.Json.read_file path + >>=? fun json -> protect (fun () -> return (Data_encoding.Json.destruct encoding json)) - let apply { activate } = + let apply {activate} = List.fold_left (fun prev uri -> - prev >>=? fun () -> - Internal_event.All_sinks.activate uri) + prev >>=? fun () -> Internal_event.All_sinks.activate uri) return_unit activate end @@ -59,47 +59,58 @@ let env_var_name = "TEZOS_EVENTS_CONFIG" let init ?lwt_log_sink ?(configuration = Configuration.default) () = Lwt_log_sink_unix.initialize ?cfg:lwt_log_sink () >>= fun () -> - begin - begin match Sys.(getenv_opt env_var_name) with - | None -> - return_unit - | Some s -> - let uris = - String.split ' ' s - |> List.map (String.split '\n') |> List.concat - |> List.map (String.split '\t') |> List.concat - |> List.filter ((<>) "") - |> List.map Uri.of_string in - List.fold_left - (fun prev uri -> - prev >>=? fun () -> - match Uri.scheme uri with - | None -> - Configuration.of_file (Uri.path uri) >>=? fun cfg -> - Configuration.apply cfg - | Some _ -> - Internal_event.All_sinks.activate uri) - return_unit - uris >>=? fun () -> - Internal_event.Debug_event.( - emit (make "Loaded URIs from environment" - ~attach:(`O [ "variable", `String env_var_name ; - "value", `String s ]))) - end >>=? fun () -> - Configuration.apply configuration - end + ( match Sys.(getenv_opt env_var_name) with + | None -> + return_unit + | Some s -> + let uris = + String.split ' ' s + |> List.map (String.split '\n') + |> List.concat + |> List.map (String.split '\t') + |> List.concat + |> List.filter (( <> ) "") + |> List.map Uri.of_string + in + List.fold_left + (fun prev uri -> + prev + >>=? fun () -> + match Uri.scheme uri with + | None -> + Configuration.of_file (Uri.path uri) + >>=? fun cfg -> Configuration.apply cfg + | Some _ -> + Internal_event.All_sinks.activate uri) + return_unit + uris + >>=? fun () -> + Internal_event.Debug_event.( + emit + (make + "Loaded URIs from environment" + ~attach: + (`O [("variable", `String env_var_name); ("value", `String s)]))) + ) + >>=? (fun () -> Configuration.apply configuration) >>= function - | Ok () -> Lwt.return_unit + | Ok () -> + Lwt.return_unit | Error el -> - Format.kasprintf Lwt.fail_with + Format.kasprintf + Lwt.fail_with "ERROR@ Initializing Internal_event_unix:@ %a\n%!" - Error_monad.pp_print_error el + Error_monad.pp_print_error + el let close () = Internal_event.All_sinks.close () >>= function - | Ok () -> Lwt.return_unit + | Ok () -> + Lwt.return_unit | Error el -> - Format.kasprintf Lwt.fail_with + Format.kasprintf + Lwt.fail_with "ERROR@ closing Internal_event_unix:@ %a\n%!" - Error_monad.pp_print_error el + Error_monad.pp_print_error + el diff --git a/src/lib_stdlib_unix/internal_event_unix.mli b/src/lib_stdlib_unix/internal_event_unix.mli index 4a124addf1f1f1f2d7d21fea81a7dc83a9dd6f4d..6a2eedbdcd93e87e069dc37f8238e517d76bee61 100644 --- a/src/lib_stdlib_unix/internal_event_unix.mli +++ b/src/lib_stdlib_unix/internal_event_unix.mli @@ -40,16 +40,11 @@ module Configuration : sig (** Parse a json file at [path] into a configuration. *) val of_file : string -> t tzresult Lwt.t - val apply : t -> unit tzresult Lwt.t (** Run {!Tezos_base.Internal_event.All_sinks.activate} for every URI in the configuration. *) + val apply : t -> unit tzresult Lwt.t end -val init : - ?lwt_log_sink:Lwt_log_sink_unix.cfg -> - ?configuration:Configuration.t -> - unit -> - unit Lwt.t (** Initialize the internal-event sinks by looking at the [?configuration] argument and then at the (whitespace separated) list of URIs in the ["TEZOS_EVENTS_CONFIG"] environment variable, if an URI @@ -61,6 +56,11 @@ val init : The function also initializes the {!Lwt_log_sink_unix} module (corresponding to the ["TEZOS_LOG"] environment variable). *) +val init : + ?lwt_log_sink:Lwt_log_sink_unix.cfg -> + ?configuration:Configuration.t -> + unit -> + unit Lwt.t -val close : unit -> unit Lwt.t (** Call [close] on all the sinks. *) +val close : unit -> unit Lwt.t diff --git a/src/lib_stdlib_unix/lwt_lock_file.ml b/src/lib_stdlib_unix/lwt_lock_file.ml index bf785c121e436775604f459366a4c2cf716f6727..5b15c17a3665b49b04053c239414a379210dec2f 100644 --- a/src/lib_stdlib_unix/lwt_lock_file.ml +++ b/src/lib_stdlib_unix/lwt_lock_file.ml @@ -25,50 +25,46 @@ open Error_monad -let create_inner - lock_command - ?(close_on_exec=true) - ?(unlink_on_exit=false) fn = - protect begin fun () -> - Lwt_unix.openfile fn Unix.[O_CREAT ; O_WRONLY; O_TRUNC] 0o644 >>= fun fd -> - if close_on_exec then Lwt_unix.set_close_on_exec fd ; - Lwt_unix.lockf fd lock_command 0 >>= fun () -> - if unlink_on_exit then - Lwt_main.at_exit (fun () -> Lwt_unix.unlink fn) ; - let pid_str = string_of_int @@ Unix.getpid () in - Lwt_unix.write_string fd pid_str 0 (String.length pid_str) >>= fun _ -> - return_unit - end +let create_inner lock_command ?(close_on_exec = true) ?(unlink_on_exit = false) + fn = + protect (fun () -> + Lwt_unix.openfile fn Unix.[O_CREAT; O_WRONLY; O_TRUNC] 0o644 + >>= fun fd -> + if close_on_exec then Lwt_unix.set_close_on_exec fd ; + Lwt_unix.lockf fd lock_command 0 + >>= fun () -> + if unlink_on_exit then Lwt_main.at_exit (fun () -> Lwt_unix.unlink fn) ; + let pid_str = string_of_int @@ Unix.getpid () in + Lwt_unix.write_string fd pid_str 0 (String.length pid_str) + >>= fun _ -> return_unit) let create = create_inner Unix.F_TLOCK -let blocking_create - ?timeout - ?(close_on_exec=true) - ?(unlink_on_exit=false) fn = - let create () = - create_inner Unix.F_LOCK ~close_on_exec ~unlink_on_exit fn in +let blocking_create ?timeout ?(close_on_exec = true) ?(unlink_on_exit = false) + fn = + let create () = create_inner Unix.F_LOCK ~close_on_exec ~unlink_on_exit fn in match timeout with - | None -> create () - | Some duration -> with_timeout (Lwt_unix.sleep duration) (fun _ -> create ()) + | None -> + create () + | Some duration -> + with_timeout (Lwt_unix.sleep duration) (fun _ -> create ()) let is_locked fn = - if not @@ Sys.file_exists fn then return_false else - protect begin fun () -> - Lwt_unix.openfile fn [Unix.O_RDONLY] 0o644 >>= fun fd -> - Lwt.finalize (fun () -> - Lwt.try_bind - (fun () -> Lwt_unix.(lockf fd F_TEST 0)) - (fun () -> return_false) - (fun _ -> return_true)) - (fun () -> Lwt_unix.close fd) - end + if not @@ Sys.file_exists fn then return_false + else + protect (fun () -> + Lwt_unix.openfile fn [Unix.O_RDONLY] 0o644 + >>= fun fd -> + Lwt.finalize + (fun () -> + Lwt.try_bind + (fun () -> Lwt_unix.(lockf fd F_TEST 0)) + (fun () -> return_false) + (fun _ -> return_true)) + (fun () -> Lwt_unix.close fd)) let get_pid fn = let open Lwt_io in - protect begin fun () -> - with_file ~mode:Input fn begin fun ic -> - read ic >>= fun content -> - return (int_of_string content) - end - end + protect (fun () -> + with_file ~mode:Input fn (fun ic -> + read ic >>= fun content -> return (int_of_string content))) diff --git a/src/lib_stdlib_unix/lwt_lock_file.mli b/src/lib_stdlib_unix/lwt_lock_file.mli index 4e7692e49d8f15b658d1fc555feeef7be57e08ee..14be5ee26e08a521256bf36cc9af0c0fbfcc1c2d 100644 --- a/src/lib_stdlib_unix/lwt_lock_file.mli +++ b/src/lib_stdlib_unix/lwt_lock_file.mli @@ -26,15 +26,15 @@ open Error_monad val create : - ?close_on_exec:bool -> - ?unlink_on_exit:bool -> - string -> unit tzresult Lwt.t + ?close_on_exec:bool -> ?unlink_on_exit:bool -> string -> unit tzresult Lwt.t val blocking_create : ?timeout:float -> ?close_on_exec:bool -> ?unlink_on_exit:bool -> - string -> unit tzresult Lwt.t + string -> + unit tzresult Lwt.t val is_locked : string -> bool tzresult Lwt.t + val get_pid : string -> int tzresult Lwt.t diff --git a/src/lib_stdlib_unix/lwt_log_sink_unix.ml b/src/lib_stdlib_unix/lwt_log_sink_unix.ml index 19ba5b09f357a12a19dbd8be3512cd9acae3642a..696ec7ba260b4d633f938da80fc7025900724bea 100644 --- a/src/lib_stdlib_unix/lwt_log_sink_unix.ml +++ b/src/lib_stdlib_unix/lwt_log_sink_unix.ml @@ -26,7 +26,6 @@ open Lwt.Infix module Output = struct - type t = | Null | Stdout @@ -35,61 +34,114 @@ module Output = struct | Syslog of Lwt_log.syslog_facility let to_string : t -> string = function - | Null -> "/dev/null" - | Stdout -> "stdout" - | Stderr -> "stderr" - | File fp -> fp - | Syslog `Auth -> "syslog:auth" - | Syslog `Authpriv -> "syslog:authpriv" - | Syslog `Cron -> "syslog:cron" - | Syslog `Daemon -> "syslog:daemon" - | Syslog `FTP -> "syslog:ftp" - | Syslog `Kernel -> "syslog:kernel" - | Syslog `Local0 -> "syslog:local0" - | Syslog `Local1 -> "syslog:local1" - | Syslog `Local2 -> "syslog:local2" - | Syslog `Local3 -> "syslog:local3" - | Syslog `Local4 -> "syslog:local4" - | Syslog `Local5 -> "syslog:local5" - | Syslog `Local6 -> "syslog:local6" - | Syslog `Local7 -> "syslog:local7" - | Syslog `LPR -> "syslog:lpr" - | Syslog `Mail -> "syslog:mail" - | Syslog `News -> "syslog:news" - | Syslog `Syslog -> "syslog:syslog" - | Syslog `User -> "syslog:user" - | Syslog `UUCP -> "syslog:uucp" - | Syslog `NTP -> "syslog:ntp" - | Syslog `Security -> "syslog:security" - | Syslog `Console -> "syslog:console" + | Null -> + "/dev/null" + | Stdout -> + "stdout" + | Stderr -> + "stderr" + | File fp -> + fp + | Syslog `Auth -> + "syslog:auth" + | Syslog `Authpriv -> + "syslog:authpriv" + | Syslog `Cron -> + "syslog:cron" + | Syslog `Daemon -> + "syslog:daemon" + | Syslog `FTP -> + "syslog:ftp" + | Syslog `Kernel -> + "syslog:kernel" + | Syslog `Local0 -> + "syslog:local0" + | Syslog `Local1 -> + "syslog:local1" + | Syslog `Local2 -> + "syslog:local2" + | Syslog `Local3 -> + "syslog:local3" + | Syslog `Local4 -> + "syslog:local4" + | Syslog `Local5 -> + "syslog:local5" + | Syslog `Local6 -> + "syslog:local6" + | Syslog `Local7 -> + "syslog:local7" + | Syslog `LPR -> + "syslog:lpr" + | Syslog `Mail -> + "syslog:mail" + | Syslog `News -> + "syslog:news" + | Syslog `Syslog -> + "syslog:syslog" + | Syslog `User -> + "syslog:user" + | Syslog `UUCP -> + "syslog:uucp" + | Syslog `NTP -> + "syslog:ntp" + | Syslog `Security -> + "syslog:security" + | Syslog `Console -> + "syslog:console" let of_string : string -> t = function - | "/dev/null" | "null" -> Null - | "stdout" -> Stdout - | "stderr" -> Stderr - | "syslog:auth" -> Syslog `Auth - | "syslog:authpriv" -> Syslog `Authpriv - | "syslog:cron" -> Syslog `Cron - | "syslog:daemon" -> Syslog `Daemon - | "syslog:ftp" -> Syslog `FTP - | "syslog:kernel" -> Syslog `Kernel - | "syslog:local0" -> Syslog `Local0 - | "syslog:local1" -> Syslog `Local1 - | "syslog:local2" -> Syslog `Local2 - | "syslog:local3" -> Syslog `Local3 - | "syslog:local4" -> Syslog `Local4 - | "syslog:local5" -> Syslog `Local5 - | "syslog:local6" -> Syslog `Local6 - | "syslog:local7" -> Syslog `Local7 - | "syslog:lpr" -> Syslog `LPR - | "syslog:mail" -> Syslog `Mail - | "syslog:news" -> Syslog `News - | "syslog:syslog" -> Syslog `Syslog - | "syslog:user" -> Syslog `User - | "syslog:uucp" -> Syslog `UUCP - | "syslog:ntp" -> Syslog `NTP - | "syslog:security" -> Syslog `Security - | "syslog:console" -> Syslog `Console + | "/dev/null" | "null" -> + Null + | "stdout" -> + Stdout + | "stderr" -> + Stderr + | "syslog:auth" -> + Syslog `Auth + | "syslog:authpriv" -> + Syslog `Authpriv + | "syslog:cron" -> + Syslog `Cron + | "syslog:daemon" -> + Syslog `Daemon + | "syslog:ftp" -> + Syslog `FTP + | "syslog:kernel" -> + Syslog `Kernel + | "syslog:local0" -> + Syslog `Local0 + | "syslog:local1" -> + Syslog `Local1 + | "syslog:local2" -> + Syslog `Local2 + | "syslog:local3" -> + Syslog `Local3 + | "syslog:local4" -> + Syslog `Local4 + | "syslog:local5" -> + Syslog `Local5 + | "syslog:local6" -> + Syslog `Local6 + | "syslog:local7" -> + Syslog `Local7 + | "syslog:lpr" -> + Syslog `LPR + | "syslog:mail" -> + Syslog `Mail + | "syslog:news" -> + Syslog `News + | "syslog:syslog" -> + Syslog `Syslog + | "syslog:user" -> + Syslog `User + | "syslog:uucp" -> + Syslog `UUCP + | "syslog:ntp" -> + Syslog `NTP + | "syslog:security" -> + Syslog `Security + | "syslog:console" -> + Syslog `Console (* | s when start_with "syslog:" FIXME error or warning. *) | fp -> (* TODO check absolute path *) @@ -100,113 +152,119 @@ module Output = struct conv to_string of_string string let of_string str = - try - Some (Data_encoding.Json.destruct encoding (`String str)) + try Some (Data_encoding.Json.destruct encoding (`String str)) with _ -> None let to_string output = match Data_encoding.Json.construct encoding output with - | `String res -> res - | #Data_encoding.json -> assert false + | `String res -> + res + | #Data_encoding.json -> + assert false - let pp fmt output = - Format.fprintf fmt "%s" (to_string output) + let pp fmt output = Format.fprintf fmt "%s" (to_string output) end let default_template = "$(date) - $(section): $(message)" type cfg = { - output : Output.t ; - default_level : Internal_event.level ; - rules : string option ; - template : Lwt_log_core.template ; + output : Output.t; + default_level : Internal_event.level; + rules : string option; + template : Lwt_log_core.template } -let create_cfg - ?(output = Output.Stderr) - ?(default_level = Internal_event.Notice) - ?rules ?(template = default_template) () = - { output ; default_level ; rules ; template } +let create_cfg ?(output = Output.Stderr) + ?(default_level = Internal_event.Notice) ?rules + ?(template = default_template) () = + {output; default_level; rules; template} let default_cfg = create_cfg () let cfg_encoding = let open Data_encoding in conv - (fun {output ; default_level ; rules ; template } -> - (output, default_level, rules, template)) + (fun {output; default_level; rules; template} -> + (output, default_level, rules, template)) (fun (output, default_level, rules, template) -> - { output ; default_level ; rules ; template }) + {output; default_level; rules; template}) (obj4 - (dft "output" - ~description: "Output for the logging function. Either 'stdout', \ - 'stderr' or the name of a log file ." - Output.encoding default_cfg.output) - (dft "level" - ~description: "Verbosity level: one of 'fatal', 'error', 'warn',\ - 'notice', 'info', 'debug'." - Internal_event.Level.encoding default_cfg.default_level) - (opt "rules" - ~description: "Fine-grained logging instructions. Same format as \ - described in `tezos-node run --help`, DEBUG section. \ - In the example below, sections 'p2p' and all sections \ - starting by 'client' will have their messages logged \ - up to the debug level, whereas the rest of log sections \ - will be logged up to the notice level." + (dft + "output" + ~description: + "Output for the logging function. Either 'stdout', 'stderr' or \ + the name of a log file ." + Output.encoding + default_cfg.output) + (dft + "level" + ~description: + "Verbosity level: one of 'fatal', 'error', 'warn','notice', \ + 'info', 'debug'." + Internal_event.Level.encoding + default_cfg.default_level) + (opt + "rules" + ~description: + "Fine-grained logging instructions. Same format as described in \ + `tezos-node run --help`, DEBUG section. In the example below, \ + sections 'p2p' and all sections starting by 'client' will have \ + their messages logged up to the debug level, whereas the rest of \ + log sections will be logged up to the notice level." string) - (dft "template" - ~description: "Format for the log file, see \ - http://ocsigen.org/lwt/dev/api/Lwt_log_core#2_Logtemplates." - string default_cfg.template)) + (dft + "template" + ~description: + "Format for the log file, see \ + http://ocsigen.org/lwt/dev/api/Lwt_log_core#2_Logtemplates." + string + default_cfg.template)) let init ?(template = default_template) output = let open Output in - begin - match output with - | Stderr -> - Lwt.return @@ - Lwt_log.channel ~template ~close_mode:`Keep ~channel:Lwt_io.stderr () - | Stdout -> - Lwt.return @@ - Lwt_log.channel ~template ~close_mode:`Keep ~channel:Lwt_io.stdout () - | File file_name -> - Lwt_log.file ~file_name ~template () - | Null -> - Lwt.return @@ - Lwt_log.null - | Syslog facility -> - Lwt.return @@ - Lwt_log.syslog ~template ~facility () - end >>= fun logger -> + ( match output with + | Stderr -> + Lwt.return + @@ Lwt_log.channel ~template ~close_mode:`Keep ~channel:Lwt_io.stderr () + | Stdout -> + Lwt.return + @@ Lwt_log.channel ~template ~close_mode:`Keep ~channel:Lwt_io.stdout () + | File file_name -> + Lwt_log.file ~file_name ~template () + | Null -> + Lwt.return @@ Lwt_log.null + | Syslog facility -> + Lwt.return @@ Lwt_log.syslog ~template ~facility () ) + >>= fun logger -> Lwt_log.default := logger ; Lwt.return_unit let find_log_rules default = match Sys.(getenv_opt "TEZOS_LOG", getenv_opt "LWT_LOG") with - | Some rules, None -> "environment variable TEZOS_LOG", Some rules - | None, Some rules -> "environment variable LWT_LOG", Some rules - | None, None -> "configuration file", default - | Some rules, Some _ -> + | (Some rules, None) -> + ("environment variable TEZOS_LOG", Some rules) + | (None, Some rules) -> + ("environment variable LWT_LOG", Some rules) + | (None, None) -> + ("configuration file", default) + | (Some rules, Some _) -> Format.eprintf - "@[<v 2>@{<warning>@{<title>Warning@}@} \ - Both environment variables TEZOS_LOG and LWT_LOG \ - defined, using TEZOS_LOG.@]@\n@." ; - "environment varible TEZOS_LOG", Some rules + "@[<v 2>@{<warning>@{<title>Warning@}@} Both environment variables \ + TEZOS_LOG and LWT_LOG defined, using TEZOS_LOG.@]@\n\ + @." ; + ("environment varible TEZOS_LOG", Some rules) let initialize ?(cfg = default_cfg) () = - Lwt_log_core.add_rule "*" - (Internal_event.Level.to_lwt_log cfg.default_level) ; - let origin, rules = find_log_rules cfg.rules in - begin match rules with - | None -> Lwt.return_unit - | Some rules -> - try - Lwt_log_core.load_rules rules ~fail_on_error:true ; - Lwt.return_unit - with _ -> - Printf.ksprintf Lwt.fail_with - "Incorrect log rules defined in %s" origin - end >>= fun () -> - init ~template:cfg.template cfg.output - - + Lwt_log_core.add_rule "*" (Internal_event.Level.to_lwt_log cfg.default_level) ; + let (origin, rules) = find_log_rules cfg.rules in + ( match rules with + | None -> + Lwt.return_unit + | Some rules -> ( + try + Lwt_log_core.load_rules rules ~fail_on_error:true ; + Lwt.return_unit + with _ -> + Printf.ksprintf Lwt.fail_with "Incorrect log rules defined in %s" origin + ) ) + >>= fun () -> init ~template:cfg.template cfg.output diff --git a/src/lib_stdlib_unix/lwt_log_sink_unix.mli b/src/lib_stdlib_unix/lwt_log_sink_unix.mli index fb66146e6282f93602a83bdf2bfc25d8f81ba412..2d4e26fdf679b7776ed7ab33ada9d079bd141f0d 100644 --- a/src/lib_stdlib_unix/lwt_log_sink_unix.mli +++ b/src/lib_stdlib_unix/lwt_log_sink_unix.mli @@ -32,16 +32,19 @@ module Output : sig | Syslog of Lwt_log.syslog_facility val encoding : t Data_encoding.t + val of_string : string -> t option + val to_string : t -> string + val pp : Format.formatter -> t -> unit end type cfg = { - output : Output.t ; - default_level : Internal_event.level ; - rules : string option ; - template : Lwt_log_core.template ; + output : Output.t; + default_level : Internal_event.level; + rules : string option; + template : Lwt_log_core.template } val default_cfg : cfg @@ -50,12 +53,14 @@ val create_cfg : ?output:Output.t -> ?default_level:Internal_event.level -> ?rules:string -> - ?template:Lwt_log_core.template -> unit -> cfg + ?template:Lwt_log_core.template -> + unit -> + cfg val cfg_encoding : cfg Data_encoding.t -val initialize: ?cfg:cfg -> unit -> unit Lwt.t (** Configure the event-logging sink defined in {!Internal_event.Lwt_log_sink} by merging the contents of [?cfg] (default: {!default_cfg}) and the value of the ["TEZOS_LOG"] environment variable. *) +val initialize : ?cfg:cfg -> unit -> unit Lwt.t diff --git a/src/lib_stdlib_unix/lwt_utils_unix.ml b/src/lib_stdlib_unix/lwt_utils_unix.ml index 42cd48126d0635192d953dfbf1f8cec662e71699..05b704b43bc78a760e9faacec3a47916c2444c17 100644 --- a/src/lib_stdlib_unix/lwt_utils_unix.ml +++ b/src/lib_stdlib_unix/lwt_utils_unix.ml @@ -34,97 +34,112 @@ let () = (function | Exn (Unix.Unix_error (err, fn, _)) -> Some ("Unix error in " ^ fn ^ ": " ^ Unix.error_message err) - | _ -> None) + | _ -> + None) (fun msg -> Exn (Failure msg)) let read_bytes ?(pos = 0) ?len fd buf = let len = match len with None -> Bytes.length buf - pos | Some l -> l in let rec inner pos len = - if len = 0 then - Lwt.return_unit + if len = 0 then Lwt.return_unit else - Lwt_unix.read fd buf pos len >>= function - | 0 -> Lwt.fail End_of_file (* other endpoint cleanly closed its connection *) - | nb_read -> inner (pos + nb_read) (len - nb_read) + Lwt_unix.read fd buf pos len + >>= function + | 0 -> + Lwt.fail End_of_file + (* other endpoint cleanly closed its connection *) + | nb_read -> + inner (pos + nb_read) (len - nb_read) in inner pos len let read_string ~len fd = let b = Bytes.create len in - read_bytes fd b >>= fun () -> - Lwt.return @@ Bytes.to_string b + read_bytes fd b >>= fun () -> Lwt.return @@ Bytes.to_string b -let read_mbytes ?(pos=0) ?len fd buf = +let read_mbytes ?(pos = 0) ?len fd buf = let len = match len with None -> MBytes.length buf - pos | Some l -> l in let rec inner pos len = - if len = 0 then - Lwt.return_unit + if len = 0 then Lwt.return_unit else - Lwt_bytes.read fd buf pos len >>= function - | 0 -> Lwt.fail End_of_file (* other endpoint cleanly closed its connection *) - | nb_read -> inner (pos + nb_read) (len - nb_read) + Lwt_bytes.read fd buf pos len + >>= function + | 0 -> + Lwt.fail End_of_file + (* other endpoint cleanly closed its connection *) + | nb_read -> + inner (pos + nb_read) (len - nb_read) in inner pos len -let write_mbytes ?(pos=0) ?len descr buf = +let write_mbytes ?(pos = 0) ?len descr buf = let len = match len with None -> MBytes.length buf - pos | Some l -> l in let rec inner pos len = - if len = 0 then - Lwt.return_unit + if len = 0 then Lwt.return_unit else - Lwt_bytes.write descr buf pos len >>= function - | 0 -> Lwt.fail End_of_file (* other endpoint cleanly closed its connection *) - | nb_written -> inner (pos + nb_written) (len - nb_written) in + Lwt_bytes.write descr buf pos len + >>= function + | 0 -> + Lwt.fail End_of_file + (* other endpoint cleanly closed its connection *) + | nb_written -> + inner (pos + nb_written) (len - nb_written) + in inner pos len -let write_bytes ?(pos=0) ?len descr buf = +let write_bytes ?(pos = 0) ?len descr buf = let len = match len with None -> Bytes.length buf - pos | Some l -> l in let rec inner pos len = - if len = 0 then - Lwt.return_unit + if len = 0 then Lwt.return_unit else - Lwt_unix.write descr buf pos len >>= function - | 0 -> Lwt.fail End_of_file (* other endpoint cleanly closed its connection *) - | nb_written -> inner (pos + nb_written) (len - nb_written) in + Lwt_unix.write descr buf pos len + >>= function + | 0 -> + Lwt.fail End_of_file + (* other endpoint cleanly closed its connection *) + | nb_written -> + inner (pos + nb_written) (len - nb_written) + in inner pos len -let write_string ?(pos=0) ?len descr buf = +let write_string ?(pos = 0) ?len descr buf = let len = match len with None -> String.length buf - pos | Some l -> l in let rec inner pos len = - if len = 0 then - Lwt.return_unit + if len = 0 then Lwt.return_unit else - Lwt_unix.write_string descr buf pos len >>= function - | 0 -> Lwt.fail End_of_file (* other endpoint cleanly closed its connection *) - | nb_written -> inner (pos + nb_written) (len - nb_written) in + Lwt_unix.write_string descr buf pos len + >>= function + | 0 -> + Lwt.fail End_of_file + (* other endpoint cleanly closed its connection *) + | nb_written -> + inner (pos + nb_written) (len - nb_written) + in inner pos len -let (>>=) = Lwt.bind +let ( >>= ) = Lwt.bind let remove_dir dir = let rec remove dir = let files = Lwt_unix.files_of_directory dir in Lwt_stream.iter_s (fun file -> - if file = "." || file = ".." then - Lwt.return_unit - else begin - let file = Filename.concat dir file in - if Sys.is_directory file - then remove file - else Lwt_unix.unlink file - end) - files >>= fun () -> - Lwt_unix.rmdir dir in - if Sys.file_exists dir && Sys.is_directory dir then - remove dir - else - Lwt.return_unit + if file = "." || file = ".." then Lwt.return_unit + else + let file = Filename.concat dir file in + if Sys.is_directory file then remove file else Lwt_unix.unlink file) + files + >>= fun () -> Lwt_unix.rmdir dir + in + if Sys.file_exists dir && Sys.is_directory dir then remove dir + else Lwt.return_unit let rec create_dir ?(perm = 0o755) dir = - Lwt_unix.file_exists dir >>= function + Lwt_unix.file_exists dir + >>= function | false -> - create_dir (Filename.dirname dir) >>= fun () -> + create_dir (Filename.dirname dir) + >>= fun () -> Lwt.catch (fun () -> Lwt_unix.mkdir dir perm) (function @@ -132,47 +147,47 @@ let rec create_dir ?(perm = 0o755) dir = (* This is the case where the directory has been created by another Lwt.t, after the call to Lwt_unix.file_exists. *) Lwt.return_unit - | e -> Lwt.fail e) - | true -> - Lwt_unix.stat dir >>= function - | { st_kind = S_DIR ; _ } -> Lwt.return_unit - | _ -> Pervasives.failwith "Not a directory" + | e -> + Lwt.fail e) + | true -> ( + Lwt_unix.stat dir + >>= function + | {st_kind = S_DIR; _} -> + Lwt.return_unit + | _ -> + Pervasives.failwith "Not a directory" ) let create_file ?(perm = 0o644) name content = - Lwt_unix.openfile name Unix.([O_TRUNC; O_CREAT; O_WRONLY]) perm >>= fun fd -> - Lwt_unix.write_string fd content 0 (String.length content) >>= fun _ -> - Lwt_unix.close fd - -let read_file fn = - Lwt_io.with_file fn ~mode:Input begin fun ch -> - Lwt_io.read ch - end - + Lwt_unix.openfile name Unix.[O_TRUNC; O_CREAT; O_WRONLY] perm + >>= fun fd -> + Lwt_unix.write_string fd content 0 (String.length content) + >>= fun _ -> Lwt_unix.close fd +let read_file fn = Lwt_io.with_file fn ~mode:Input (fun ch -> Lwt_io.read ch) let safe_close fd = - Lwt.catch - (fun () -> Lwt_unix.close fd) - (fun _ -> Lwt.return_unit) - - + Lwt.catch (fun () -> Lwt_unix.close fd) (fun _ -> Lwt.return_unit) let of_sockaddr = function - | Unix.ADDR_UNIX _ -> None - | Unix.ADDR_INET (addr, port) -> - match Ipaddr_unix.of_inet_addr addr with - | V4 addr -> Some (Ipaddr.v6_of_v4 addr, port) - | V6 addr -> Some (addr, port) + | Unix.ADDR_UNIX _ -> + None + | Unix.ADDR_INET (addr, port) -> ( + match Ipaddr_unix.of_inet_addr addr with + | V4 addr -> + Some (Ipaddr.v6_of_v4 addr, port) + | V6 addr -> + Some (addr, port) ) let getaddrinfo ~passive ~node ~service = let open Lwt_unix in - getaddrinfo node service - ( AI_SOCKTYPE SOCK_STREAM :: - (if passive then [AI_PASSIVE] else []) ) >>= fun addr -> + getaddrinfo + node + service + (AI_SOCKTYPE SOCK_STREAM :: (if passive then [AI_PASSIVE] else [])) + >>= fun addr -> let points = - TzList.filter_map - (fun { ai_addr ; _ } -> of_sockaddr ai_addr) - addr in + TzList.filter_map (fun {ai_addr; _} -> of_sockaddr ai_addr) addr + in Lwt.return points let getpass () = @@ -193,114 +208,121 @@ let getpass () = passwd module Json = struct - let to_root = function - | `O ctns -> `O ctns - | `A ctns -> `A ctns - | `Null -> `O [] - | oth -> `A [ oth ] + | `O ctns -> + `O ctns + | `A ctns -> + `A ctns + | `Null -> + `O [] + | oth -> + `A [oth] let write_file file json = let json = to_root json in - protect begin fun () -> - Lwt_io.with_file ~mode:Output file begin fun chan -> - let str = Data_encoding.Json.to_string ~minify:false json in - Lwt_io.write chan str >>= fun _ -> - return_unit - end - end + protect (fun () -> + Lwt_io.with_file ~mode:Output file (fun chan -> + let str = Data_encoding.Json.to_string ~minify:false json in + Lwt_io.write chan str >>= fun _ -> return_unit)) let read_file file = - protect begin fun () -> - Lwt_io.with_file ~mode:Input file begin fun chan -> - Lwt_io.read chan >>= fun str -> - return (Ezjsonm.from_string str :> Data_encoding.json) - end - end - + protect (fun () -> + Lwt_io.with_file ~mode:Input file (fun chan -> + Lwt_io.read chan + >>= fun str -> + return (Ezjsonm.from_string str :> Data_encoding.json))) end module Protocol = struct - let name = "TEZOS_PROTOCOL" open Protocol - let (//) = Filename.concat + let ( // ) = Filename.concat let to_file ~dir:dirname ?hash ?env_version modules = let config_file = Data_encoding.Json.construct Meta.encoding - { hash ; expected_env_version = env_version ; modules } in + {hash; expected_env_version = env_version; modules} + in Json.write_file (dirname // name) config_file let of_file ~dir:dirname = - Json.read_file (dirname // name) >>=? fun json -> - return (Data_encoding.Json.destruct Meta.encoding json) + Json.read_file (dirname // name) + >>=? fun json -> return (Data_encoding.Json.destruct Meta.encoding json) let find_component dirname module_name = let name_lowercase = String.uncapitalize_ascii module_name in - let implementation = dirname // name_lowercase ^ ".ml" in + let implementation = (dirname // name_lowercase) ^ ".ml" in let interface = implementation ^ "i" in - match Sys.file_exists implementation, Sys.file_exists interface with - | false, _ -> Pervasives.failwith @@ "Not such file: " ^ implementation - | true, false -> - read_file implementation >|= fun implementation -> - { name = module_name; interface = None; implementation } + match (Sys.file_exists implementation, Sys.file_exists interface) with + | (false, _) -> + Pervasives.failwith @@ "Not such file: " ^ implementation + | (true, false) -> + read_file implementation + >|= fun implementation -> + {name = module_name; interface = None; implementation} | _ -> - read_file interface >>= fun interface -> - read_file implementation >|= fun implementation -> - { name = module_name; interface = Some interface; implementation } + read_file interface + >>= fun interface -> + read_file implementation + >|= fun implementation -> + {name = module_name; interface = Some interface; implementation} let read_dir dir = - of_file ~dir >>=? fun meta -> - Lwt_list.map_p (find_component dir) meta.modules >>= fun components -> + of_file ~dir + >>=? fun meta -> + Lwt_list.map_p (find_component dir) meta.modules + >>= fun components -> let expected_env = - match meta.expected_env_version with - | None -> V1 - | Some v -> v in - return (meta.hash, { expected_env ; components }) + match meta.expected_env_version with None -> V1 | Some v -> v + in + return (meta.hash, {expected_env; components}) open Lwt.Infix let create_files dir units = - remove_dir dir >>= fun () -> - create_dir dir >>= fun () -> + remove_dir dir + >>= fun () -> + create_dir dir + >>= fun () -> Lwt_list.map_s - (fun { name ; interface ; implementation } -> - let name = String.lowercase_ascii name in - let ml = dir // (name ^ ".ml") in - let mli = dir // (name ^ ".mli") in - create_file ml implementation >>= fun () -> - match interface with - | None -> Lwt.return [ml] - | Some content -> - create_file mli content >>= fun () -> - Lwt.return [ mli ; ml ]) - units >>= fun files -> + (fun {name; interface; implementation} -> + let name = String.lowercase_ascii name in + let ml = dir // (name ^ ".ml") in + let mli = dir // (name ^ ".mli") in + create_file ml implementation + >>= fun () -> + match interface with + | None -> + Lwt.return [ml] + | Some content -> + create_file mli content >>= fun () -> Lwt.return [mli; ml]) + units + >>= fun files -> let files = List.concat files in Lwt.return files - let write_dir dir ?hash (p: t) = - create_files dir p.components >>= fun _files -> + let write_dir dir ?hash (p : t) = + create_files dir p.components + >>= fun _files -> to_file ~dir ?hash ~env_version:p.expected_env - (List.map (fun { name ; _ } -> String.capitalize_ascii name) p.components) - + (List.map (fun {name; _} -> String.capitalize_ascii name) p.components) end let with_tempdir name f = let base_dir = Filename.temp_file name "" in - Lwt_unix.unlink base_dir >>= fun () -> - Lwt_unix.mkdir base_dir 0o700 >>= fun () -> + Lwt_unix.unlink base_dir + >>= fun () -> + Lwt_unix.mkdir base_dir 0o700 + >>= fun () -> Lwt.finalize (fun () -> f base_dir) (fun () -> remove_dir base_dir) - module Socket = struct - type addr = | Unix of string | Tcp of string * string * Unix.getaddrinfo_option list @@ -308,80 +330,97 @@ module Socket = struct let handle_litteral_ipv6 host = (* To strip '[' and ']' when a litteral IPv6 is provided *) match Ipaddr.of_string host with - | Error (`Msg _) -> host - | Ok ipaddr -> Ipaddr.to_string ipaddr + | Error (`Msg _) -> + host + | Ok ipaddr -> + Ipaddr.to_string ipaddr - let connect ?(timeout=5.) = function + let connect ?(timeout = 5.) = function | Unix path -> let addr = Lwt_unix.ADDR_UNIX path in let sock = Lwt_unix.socket PF_UNIX SOCK_STREAM 0 in - Lwt_unix.connect sock addr >>= fun () -> - return sock - | Tcp (host, service, opts) -> + Lwt_unix.connect sock addr >>= fun () -> return sock + | Tcp (host, service, opts) -> ( let host = handle_litteral_ipv6 host in - Lwt_unix.getaddrinfo host service opts >>= function + Lwt_unix.getaddrinfo host service opts + >>= function | [] -> failwith "could not resolve host '%s'" host | addrs -> let rec try_connect acc = function | [] -> Lwt.return - (Error (failure "could not connect to '%s'" host :: List.rev acc)) - | { Unix.ai_family ; ai_socktype ; ai_protocol ; ai_addr ; _ } :: addrs -> - let sock = Lwt_unix.socket ai_family ai_socktype ai_protocol in - protect ~on_error:begin fun e -> - Lwt_unix.close sock >>= fun () -> - Lwt.return_error e - end begin fun () -> - with_timeout (Lwt_unix.sleep timeout) (fun _c -> - Lwt_unix.connect sock ai_addr >>= fun () -> - return sock) - end >>= function - | Ok sock -> return sock + (Error + ( failure "could not connect to '%s'" host + :: List.rev acc )) + | {Unix.ai_family; ai_socktype; ai_protocol; ai_addr; _} :: addrs + -> ( + let sock = + Lwt_unix.socket ai_family ai_socktype ai_protocol + in + protect + ~on_error:(fun e -> + Lwt_unix.close sock >>= fun () -> Lwt.return_error e) + (fun () -> + with_timeout (Lwt_unix.sleep timeout) (fun _c -> + Lwt_unix.connect sock ai_addr + >>= fun () -> return sock)) + >>= function + | Ok sock -> + return sock | Error e -> - try_connect (e @ acc) addrs in - try_connect [] addrs + try_connect (e @ acc) addrs ) + in + try_connect [] addrs ) let bind ?(backlog = 10) = function | Unix path -> let addr = Lwt_unix.ADDR_UNIX path in let sock = Lwt_unix.socket PF_UNIX SOCK_STREAM 0 in - Lwt_unix.bind sock addr >>= fun () -> + Lwt_unix.bind sock addr + >>= fun () -> Lwt_unix.listen sock backlog ; return [sock] - | Tcp (host, service, opts) -> + | Tcp (host, service, opts) -> ( Lwt_unix.getaddrinfo - (handle_litteral_ipv6 host) service (AI_PASSIVE :: opts) >>= function - | [] -> failwith "could not resolve host '%s'" host + (handle_litteral_ipv6 host) + service + (AI_PASSIVE :: opts) + >>= function + | [] -> + failwith "could not resolve host '%s'" host | addrs -> - let do_bind { Unix.ai_family ; ai_socktype ; ai_protocol ; ai_addr ; _ } = + let do_bind {Unix.ai_family; ai_socktype; ai_protocol; ai_addr; _} + = let sock = Lwt_unix.socket ai_family ai_socktype ai_protocol in Lwt_unix.setsockopt sock SO_REUSEADDR true ; - Lwt_unix.bind sock ai_addr >>= fun () -> + Lwt_unix.bind sock ai_addr + >>= fun () -> Lwt_unix.listen sock backlog ; - return sock in - map_s do_bind addrs + return sock + in + map_s do_bind addrs ) - type error += - | Encoding_error - | Decoding_error + type error += Encoding_error | Decoding_error let () = - register_error_kind `Permanent - ~id: "signer.encoding_error" - ~title: "Encoding_error" - ~description: "Error while encoding a remote signer message" - ~pp: (fun ppf () -> - Format.fprintf ppf "Could not encode a remote signer message") + register_error_kind + `Permanent + ~id:"signer.encoding_error" + ~title:"Encoding_error" + ~description:"Error while encoding a remote signer message" + ~pp:(fun ppf () -> + Format.fprintf ppf "Could not encode a remote signer message") Data_encoding.empty (function Encoding_error -> Some () | _ -> None) (fun () -> Encoding_error) ; - register_error_kind `Permanent - ~id: "signer.decoding_error" - ~title: "Decoding_error" - ~description: "Error while decoding a remote signer message" - ~pp: (fun ppf () -> - Format.fprintf ppf "Could not decode a remote signer message") + register_error_kind + `Permanent + ~id:"signer.decoding_error" + ~title:"Decoding_error" + ~description:"Error while decoding a remote signer message" + ~pp:(fun ppf () -> + Format.fprintf ppf "Could not decode a remote signer message") Data_encoding.empty (function Decoding_error -> Some () | _ -> None) (fun () -> Decoding_error) @@ -392,49 +431,51 @@ module Socket = struct let encoded_message_len = Data_encoding.Binary.length encoding message in fail_unless (encoded_message_len < 1 lsl (message_len_size * 8)) - Encoding_error >>=? fun () -> + Encoding_error + >>=? fun () -> (* len is the length of int16 plus the length of the message we want to send *) let len = message_len_size + encoded_message_len in let buf = MBytes.create len in - match Data_encoding.Binary.write - encoding message buf message_len_size encoded_message_len with + match + Data_encoding.Binary.write + encoding + message + buf + message_len_size + encoded_message_len + with | None -> fail Encoding_error | Some last -> - fail_unless (last = len) Encoding_error >>=? fun () -> + fail_unless (last = len) Encoding_error + >>=? fun () -> (* we set the beginning of the buf with the length of what is next *) MBytes.set_int16 buf 0 encoded_message_len ; - write_mbytes fd buf >>= fun () -> - return_unit + write_mbytes fd buf >>= fun () -> return_unit let recv fd encoding = let header_buf = MBytes.create message_len_size in - read_mbytes ~len:message_len_size fd header_buf >>= fun () -> + read_mbytes ~len:message_len_size fd header_buf + >>= fun () -> let len = MBytes.get_uint16 header_buf 0 in let buf = MBytes.create len in - read_mbytes ~len fd buf >>= fun () -> + read_mbytes ~len fd buf + >>= fun () -> match Data_encoding.Binary.read encoding buf 0 len with | None -> fail Decoding_error | Some (read_len, message) -> - if read_len <> len then - fail Decoding_error - else - return message - + if read_len <> len then fail Decoding_error else return message end - -let rec retry ?(log=(fun _ -> Lwt.return_unit)) ?(n=5) ?(sleep=1.) f = - f () >>= function - | Ok r -> Lwt.return_ok r - | (Error error) as x -> +let rec retry ?(log = fun _ -> Lwt.return_unit) ?(n = 5) ?(sleep = 1.) f = + f () + >>= function + | Ok r -> + Lwt.return_ok r + | Error error as x -> if n > 0 then - begin - log error >>= fun () -> - Lwt_unix.sleep sleep >>= fun () -> - retry ~log ~n:(n-1) ~sleep f - end - else - Lwt.return x - + log error + >>= fun () -> + Lwt_unix.sleep sleep >>= fun () -> retry ~log ~n:(n - 1) ~sleep f + else Lwt.return x diff --git a/src/lib_stdlib_unix/lwt_utils_unix.mli b/src/lib_stdlib_unix/lwt_utils_unix.mli index a9bedd06cfc5ffeab6ba08272624dc52836f372e..d8b726df20e37812e6ce1010254c2cb11f3e0b7d 100644 --- a/src/lib_stdlib_unix/lwt_utils_unix.mli +++ b/src/lib_stdlib_unix/lwt_utils_unix.mli @@ -25,33 +25,39 @@ open Error_monad -val read_string: len:int -> Lwt_unix.file_descr -> string Lwt.t +val read_string : len:int -> Lwt_unix.file_descr -> string Lwt.t -val read_bytes: +val read_bytes : ?pos:int -> ?len:int -> Lwt_unix.file_descr -> bytes -> unit Lwt.t -val read_mbytes: +val read_mbytes : ?pos:int -> ?len:int -> Lwt_unix.file_descr -> MBytes.t -> unit Lwt.t -val write_string: +val write_string : ?pos:int -> ?len:int -> Lwt_unix.file_descr -> string -> unit Lwt.t -val write_bytes: + +val write_bytes : ?pos:int -> ?len:int -> Lwt_unix.file_descr -> bytes -> unit Lwt.t -val write_mbytes: + +val write_mbytes : ?pos:int -> ?len:int -> Lwt_unix.file_descr -> MBytes.t -> unit Lwt.t -val remove_dir: string -> unit Lwt.t -val create_dir: ?perm:int -> string -> unit Lwt.t -val read_file: string -> string Lwt.t -val create_file: ?perm:int -> string -> string -> unit Lwt.t +val remove_dir : string -> unit Lwt.t + +val create_dir : ?perm:int -> string -> unit Lwt.t -val with_tempdir: string -> (string -> 'a Lwt.t) -> 'a Lwt.t +val read_file : string -> string Lwt.t -val safe_close: Lwt_unix.file_descr -> unit Lwt.t +val create_file : ?perm:int -> string -> string -> unit Lwt.t -val getaddrinfo: +val with_tempdir : string -> (string -> 'a Lwt.t) -> 'a Lwt.t + +val safe_close : Lwt_unix.file_descr -> unit Lwt.t + +val getaddrinfo : passive:bool -> - node:string -> service:string -> + node:string -> + service:string -> (Ipaddr.V6.t * int) list Lwt.t (** [getpass ()] reads a password from stdio while setting-up the @@ -59,31 +65,25 @@ val getaddrinfo: val getpass : unit -> string module Json : sig - (** Loads a JSON file in memory *) val read_file : string -> Data_encoding.json tzresult Lwt.t (** (Over)write a JSON file from in memory data *) val write_file : string -> Data_encoding.json -> unit tzresult Lwt.t - end module Protocol : sig + val read_dir : string -> (Protocol_hash.t option * Protocol.t) tzresult Lwt.t - val read_dir: string -> (Protocol_hash.t option * Protocol.t) tzresult Lwt.t - - val write_dir: string -> ?hash:Protocol_hash.t -> Protocol.t -> unit tzresult Lwt.t - + val write_dir : + string -> ?hash:Protocol_hash.t -> Protocol.t -> unit tzresult Lwt.t end module Socket : sig - type addr = | Unix of string | Tcp of string * string * Unix.getaddrinfo_option list - val connect: - ?timeout:float -> addr -> Lwt_unix.file_descr tzresult Lwt.t (** [connect ?timeout addr] tries connecting to [addr] and returns the resulting socket file descriptor on success. When using TCP, [Unix.getaddrinfo] is used to resolve the hostname and service @@ -93,24 +93,21 @@ module Socket : sig connection. If a connection is not obtained in less than [?timeout], the connection is canceled and and the next socket address (if it exists) is tried. *) + val connect : ?timeout:float -> addr -> Lwt_unix.file_descr tzresult Lwt.t - val bind: - ?backlog:int -> addr -> Lwt_unix.file_descr list tzresult Lwt.t + val bind : ?backlog:int -> addr -> Lwt_unix.file_descr list tzresult Lwt.t - type error += - | Encoding_error - | Decoding_error + type error += Encoding_error | Decoding_error - val send: + val send : Lwt_unix.file_descr -> 'a Data_encoding.t -> 'a -> unit tzresult Lwt.t - val recv: - Lwt_unix.file_descr -> 'a Data_encoding.t -> 'a tzresult Lwt.t + val recv : Lwt_unix.file_descr -> 'a Data_encoding.t -> 'a tzresult Lwt.t end -val retry: +val retry : ?log:('error -> unit Lwt.t) -> ?n:int -> ?sleep:float -> - (unit -> ('a, 'error) result Lwt.t) -> ('a, 'error) result Lwt.t - + (unit -> ('a, 'error) result Lwt.t) -> + ('a, 'error) result Lwt.t diff --git a/src/lib_stdlib_unix/moving_average.ml b/src/lib_stdlib_unix/moving_average.ml index bc2ed752a319e2aa93e4ad53a86184ff13e0086e..f9e8812aca4ba3dc0a8f3f6765a121f943428931 100644 --- a/src/lib_stdlib_unix/moving_average.ml +++ b/src/lib_stdlib_unix/moving_average.ml @@ -25,18 +25,20 @@ open Lwt.Infix -module Inttbl = Hashtbl.Make(struct - type t = int - let equal (x: int) (y: int) = x = y - let hash = Hashtbl.hash - end) +module Inttbl = Hashtbl.Make (struct + type t = int + + let equal (x : int) (y : int) = x = y + + let hash = Hashtbl.hash +end) type t = { - id: int; - alpha: int ; - mutable total: int64 ; - mutable current: int ; - mutable average: int ; + id : int; + alpha : int; + mutable total : int64; + mutable current : int; + mutable average : int } let counters = Inttbl.create 51 @@ -44,21 +46,24 @@ let counters = Inttbl.create 51 let updated = Lwt_condition.create () let update_hook = ref [] + let on_update f = update_hook := f :: !update_hook let worker_loop () = let prev = ref @@ Mtime_clock.elapsed () in let rec inner sleep = - sleep >>= fun () -> + sleep + >>= fun () -> let sleep = Lwt_unix.sleep 1. in let now = Mtime_clock.elapsed () in - let elapsed = int_of_float (Mtime.Span.(to_ms now -. to_ms !prev)) in - prev := now; + let elapsed = int_of_float Mtime.Span.(to_ms now -. to_ms !prev) in + prev := now ; Inttbl.iter (fun _ c -> - c.average <- - (c.alpha * c.current) / elapsed + (1000 - c.alpha) * c.average / 1000; - c.current <- 0) + c.average <- + (c.alpha * c.current / elapsed) + + ((1000 - c.alpha) * c.average / 1000) ; + c.current <- 0) counters ; List.iter (fun f -> f ()) !update_hook ; Lwt_condition.broadcast updated () ; @@ -67,14 +72,13 @@ let worker_loop () = inner (Lwt_unix.sleep 1.) let worker = - lazy begin - Lwt.async begin fun () -> - Lwt_utils.worker "counter" - ~on_event:Internal_event.Lwt_worker_event.on_event - ~run:worker_loop - ~cancel:(fun _ -> Lwt.return_unit) - end - end + lazy + (Lwt.async (fun () -> + Lwt_utils.worker + "counter" + ~on_event:Internal_event.Lwt_worker_event.on_event + ~run:worker_loop + ~cancel:(fun _ -> Lwt.return_unit))) let create = let cpt = ref 0 in @@ -84,21 +88,15 @@ let create = incr cpt ; assert (0. < alpha && alpha <= 1.) ; let alpha = int_of_float (1000. *. alpha) in - let c = { id ; alpha ; total = 0L ; current = 0 ; average = init } in - Inttbl.add counters id c ; - c + let c = {id; alpha; total = 0L; current = 0; average = init} in + Inttbl.add counters id c ; c let add c x = c.total <- Int64.(add c.total (of_int x)) ; c.current <- c.current + x -let destroy c = - Inttbl.remove counters c.id +let destroy c = Inttbl.remove counters c.id -type stat = { - total: int64 ; - average: int ; -} +type stat = {total : int64; average : int} -let stat ({ total ; average ; _ } : t) : stat = - { total ; average } +let stat ({total; average; _} : t) : stat = {total; average} diff --git a/src/lib_stdlib_unix/moving_average.mli b/src/lib_stdlib_unix/moving_average.mli index c53b279436455086c068ec9abf5c97f9860a48ac..517559a68adf66252a11d72682d76732da0aaf2f 100644 --- a/src/lib_stdlib_unix/moving_average.mli +++ b/src/lib_stdlib_unix/moving_average.mli @@ -34,33 +34,30 @@ for the algorithm. *) -type t (** Type of one bandwidth counter. *) +type t -val create: init:int -> alpha:float -> t (** [create ~init ~alpha] is a counter with initial value [init] and factor [alpha]. *) +val create : init:int -> alpha:float -> t -val destroy: t -> unit (** [destroy t] removes counter [t] from the internal hash table. *) +val destroy : t -> unit -val add: t -> int -> unit (** [add t id] adds [t] in the internal hash table under identifies [id]. *) +val add : t -> int -> unit -val on_update: (unit -> unit) -> unit (** [of_update f] registers [f] to be called on each update of the internal worker (currently every 1s). *) +val on_update : (unit -> unit) -> unit -val updated: unit Lwt_condition.t (** [updated] is a condition variable that gets signaled on each update of the internal worker (currently every 1s). *) +val updated : unit Lwt_condition.t -type stat = { - total: int64 ; - average: int ; -} +type stat = {total : int64; average : int} -val stat: t -> stat (** [stat t] is a stat record reflecting the state of [t] at the time of the call. *) +val stat : t -> stat diff --git a/src/lib_stdlib_unix/sys_info.ml b/src/lib_stdlib_unix/sys_info.ml index 83f5a1f9bfc8bfc2045d70dbca20ad402152d5e8..f6573ddf92acb5e4f549aaa49bca6151f3274ff3 100644 --- a/src/lib_stdlib_unix/sys_info.ml +++ b/src/lib_stdlib_unix/sys_info.ml @@ -32,125 +32,143 @@ let () = ~id:"unix.system_info" ~title:"Unix System_info failure" ~description:"Unix System_info failure" - ~pp:begin fun ppf s -> - Format.fprintf ppf - "@[<v 2>Unix system_info failure %s@]" - s - end + ~pp:(fun ppf s -> + Format.fprintf ppf "@[<v 2>Unix system_info failure %s@]" s) Data_encoding.(obj1 (req "failure" string)) (function Unix_system_info_failure s -> Some s | _ -> None) (fun s -> Unix_system_info_failure s) let error_info process error = Unix_system_info_failure - (Format.asprintf "Unix_system_info_failure (%s: %s)" - process error) + (Format.asprintf "Unix_system_info_failure (%s: %s)" process error) -type sysname = - | Linux - | Darwin - | Unknown of string +type sysname = Linux | Darwin | Unknown of string let uname = Lwt.catch - begin fun () -> - Lwt_process.with_process_in ~env:[| "LC_ALL=C" |] ("uname", [| "uname" |]) - (fun pc -> - Lwt_io.read_line pc#stdout ) >>= function - | "Linux" -> Lwt.return_ok Linux - | "Darwin" -> Lwt.return_ok Darwin - | os -> Lwt.return_ok ( Unknown os) end - begin function exn -> - Lwt.return_error - (error_info "uname" - (Printexc.to_string exn)) end + (fun () -> + Lwt_process.with_process_in + ~env:[|"LC_ALL=C"|] + ("uname", [|"uname"|]) + (fun pc -> Lwt_io.read_line pc#stdout) + >>= function + | "Linux" -> + Lwt.return_ok Linux + | "Darwin" -> + Lwt.return_ok Darwin + | os -> + Lwt.return_ok (Unknown os)) + (function + | exn -> Lwt.return_error (error_info "uname" (Printexc.to_string exn))) let page_size () = let get_conf_process = - uname >>= function - | Ok Linux -> Lwt.return_ok ("getconf", [| "getconf"; "PAGE_SIZE" |]) - | Ok Darwin -> Lwt.return_ok ("pagesize", [| "pagesize" |]) + uname + >>= function + | Ok Linux -> + Lwt.return_ok ("getconf", [|"getconf"; "PAGE_SIZE"|]) + | Ok Darwin -> + Lwt.return_ok ("pagesize", [|"pagesize"|]) | Ok (Unknown _) -> Lwt.return_error (error_info "pagesize" "Unknown unix system") - | Error Unix_system_info_failure e -> + | Error (Unix_system_info_failure e) -> Lwt.return_error (error_info "pagesize" e) - | Error e -> Lwt.return_error e + | Error e -> + Lwt.return_error e in - get_conf_process >>= function - | Error e -> Lwt.return_error e + get_conf_process + >>= function + | Error e -> + Lwt.return_error e | Ok process -> Lwt.catch - begin fun () -> - Lwt_process.with_process_in process ~env:[| "LC_ALL=C" |] - (fun pc -> Lwt_io.read_line pc#stdout >>= fun ps -> - Lwt.return_ok (int_of_string ps)) end - begin function exn -> - Lwt.return_error - (error_info "pagesize" - (Printexc.to_string exn)) end + (fun () -> + Lwt_process.with_process_in process ~env:[|"LC_ALL=C"|] (fun pc -> + Lwt_io.read_line pc#stdout + >>= fun ps -> Lwt.return_ok (int_of_string ps))) + (function + | exn -> + Lwt.return_error (error_info "pagesize" (Printexc.to_string exn))) let linux_statm pid = Lwt.catch - begin fun () -> - let fname = Format.asprintf ("/proc/%d/statm") pid in - Lwt_unix.file_exists fname >>= function - | true -> - begin Lwt_io.open_file ~mode:Input fname >>= fun ic -> - Lwt_io.read_line ic >>= fun line -> - match List.map Int64.of_string @@ String.split ' ' line with - | size::resident::shared::text::lib::data::dt::_ -> - begin page_size () >>= function - | Error e -> - Lwt.return_error e - | Ok page_size -> - Lwt.return_ok - (Statm { page_size ; size ; resident ; - shared ; text ; - lib ; data ; dt ; }) end - | _ -> Lwt.return_error - (error_info "procfs statm" - "Unexpected proc/<pid>/statm format") end + (fun () -> + let fname = Format.asprintf "/proc/%d/statm" pid in + Lwt_unix.file_exists fname + >>= function + | true -> ( + Lwt_io.open_file ~mode:Input fname + >>= fun ic -> + Lwt_io.read_line ic + >>= fun line -> + match List.map Int64.of_string @@ String.split ' ' line with + | size :: resident :: shared :: text :: lib :: data :: dt :: _ -> ( + page_size () + >>= function + | Error e -> + Lwt.return_error e + | Ok page_size -> + Lwt.return_ok + (Statm + {page_size; size; resident; shared; text; lib; data; dt}) + ) + | _ -> + Lwt.return_error + (error_info "procfs statm" "Unexpected proc/<pid>/statm format") + ) | false -> - Lwt.return_error (error_info - "procfs statm" - (Format.asprintf "%s not found" fname)) end - begin function exn -> - Lwt.return_error - (error_info "procfs statm" - (Printexc.to_string exn)) end + Lwt.return_error + (error_info "procfs statm" (Format.asprintf "%s not found" fname))) + (function + | exn -> + Lwt.return_error (error_info "procfs statm" (Printexc.to_string exn))) let darwin_ps pid = Lwt.catch - begin fun () -> - Lwt_process.with_process_in ~env:[| "LC_ALL=C" |] - ("ps", [| "ps" ; "-o" ; "pid,%mem,rss" ; "-p"; string_of_int pid |]) - (fun pc -> Lwt_io.read_line_opt pc#stdout >>= function - | None -> - Lwt.return_error (error_info "ps" "Unexpected ps answer (1st line)") - | Some _ -> (* first line is useless *) - Lwt_io.read_line_opt pc#stdout >>= function - | None -> - Lwt.return_error (error_info "ps" "Unexpected ps answer (2nd line)") - | Some ps_stats -> - match String.split ' ' ps_stats with - | _pid::mem::resident::_ -> - begin page_size () >>= function - | Error e -> Lwt.return_error e - | Ok page_size -> - Lwt.return_ok - (Ps { page_size ; - mem = float_of_string mem ; - resident = Int64.of_string resident }) end - | _ -> Lwt.return_error (error_info "ps" "Unexpected answer")) end - begin function exn -> - Lwt.return_error - (error_info "ps" - (Printexc.to_string exn)) end + (fun () -> + Lwt_process.with_process_in + ~env:[|"LC_ALL=C"|] + ("ps", [|"ps"; "-o"; "pid,%mem,rss"; "-p"; string_of_int pid|]) + (fun pc -> + Lwt_io.read_line_opt pc#stdout + >>= function + | None -> + Lwt.return_error + (error_info "ps" "Unexpected ps answer (1st line)") + | Some _ -> ( + (* first line is useless *) + Lwt_io.read_line_opt pc#stdout + >>= function + | None -> + Lwt.return_error + (error_info "ps" "Unexpected ps answer (2nd line)") + | Some ps_stats -> ( + match String.split ' ' ps_stats with + | _pid :: mem :: resident :: _ -> ( + page_size () + >>= function + | Error e -> + Lwt.return_error e + | Ok page_size -> + Lwt.return_ok + (Ps + { page_size; + mem = float_of_string mem; + resident = Int64.of_string resident }) ) + | _ -> + Lwt.return_error (error_info "ps" "Unexpected answer") ) ))) + (function + | exn -> Lwt.return_error (error_info "ps" (Printexc.to_string exn))) let memory_stats () = - let pid = Unix.getpid () in - uname >>= function - | Error e -> Lwt.return_error e - | Ok Linux -> linux_statm pid - | Ok Darwin -> darwin_ps pid - | _ -> Lwt.return_error (error_info "memory_stats" "Unknown unix system") + let pid = Unix.getpid () in + uname + >>= function + | Error e -> + Lwt.return_error e + | Ok Linux -> + linux_statm pid + | Ok Darwin -> + darwin_ps pid + | _ -> + Lwt.return_error (error_info "memory_stats" "Unknown unix system") diff --git a/src/lib_stdlib_unix/systime_os.ml b/src/lib_stdlib_unix/systime_os.ml index 5e1fff0c8c21ff79b47080c6b8678e0441212763..d5e7375ffefb412b9ccb9b486d139c97c06549ff 100644 --- a/src/lib_stdlib_unix/systime_os.ml +++ b/src/lib_stdlib_unix/systime_os.ml @@ -24,4 +24,5 @@ (*****************************************************************************) let now () = Ptime_clock.now () + let sleep s = Lwt_unix.sleep (Ptime.Span.to_float_s s) diff --git a/src/lib_stdlib_unix/systime_os.mli b/src/lib_stdlib_unix/systime_os.mli index a3b654ab017a8a3fbc2e236d685ffb7b6caaf416..e64e9b04339c5101920f80f7a5bd216d1ed7f6a4 100644 --- a/src/lib_stdlib_unix/systime_os.mli +++ b/src/lib_stdlib_unix/systime_os.mli @@ -23,9 +23,9 @@ (* *) (*****************************************************************************) -val now : unit -> Ptime.t (** The current time according to the system clock *) +val now : unit -> Ptime.t -val sleep : Ptime.Span.t -> unit Lwt.t (** [sleep t] is an Lwt promise that resolves after [t] time has elapsed. If [t] is negative, [sleep t] is already resolved. *) +val sleep : Ptime.Span.t -> unit Lwt.t diff --git a/src/lib_storage/.ocamlformat b/src/lib_storage/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_storage/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_storage/context.ml b/src/lib_storage/context.ml index cfdbc0ecd7c2f3821a62a05f4f97b15d11f390f3..267fda01bef6f40bb095821f6d10d6bcca187753 100644 --- a/src/lib_storage/context.ml +++ b/src/lib_storage/context.ml @@ -29,32 +29,40 @@ module IrminPath = Irmin.Path.String_list module MBytesContent = struct type t = MBytes.t + let t = Irmin.Type.(like cstruct) (fun x -> Cstruct.to_bigarray x) (fun x -> Cstruct.of_bigarray x) + let merge = Irmin.Merge.default Irmin.Type.(option t) + let pp ppf b = Format.pp_print_string ppf (MBytes.to_string b) + let of_string s = Ok (MBytes.of_string s) end module Metadata = struct type t = unit + let t = Irmin.Type.unit + let default = () + let merge = Irmin.Merge.default t end module IrminBlake2B : Irmin.Hash.S with type t = Context_hash.t = struct - type t = Context_hash.t let digest_size = Context_hash.size let to_raw t = Cstruct.of_bigarray (Context_hash.to_bytes t) + let of_raw t = match Context_hash.of_bytes_opt (Cstruct.to_bigarray t) with - | Some t -> t + | Some t -> + t | None -> let str = Cstruct.to_string t in Format.kasprintf invalid_arg "%s (%d)" str (String.length str) @@ -69,127 +77,126 @@ module IrminBlake2B : Irmin.Hash.S with type t = Context_hash.t = struct let of_string x = match Context_hash.of_b58check_exn x with - | exception (Invalid_argument s) -> Error (`Msg s) - | h -> Ok h + | exception Invalid_argument s -> + Error (`Msg s) + | h -> + Ok h - let has_kind = function - | `SHA1 -> true - | _ -> false + let has_kind = function `SHA1 -> true | _ -> false let to_raw_int c = Int64.to_int @@ MBytes.get_int64 (Context_hash.to_bytes c) 0 - end module GitStore = - Irmin_lmdb.Make - (Metadata) - (MBytesContent) - (Irmin.Path.String_list) + Irmin_lmdb.Make (Metadata) (MBytesContent) (Irmin.Path.String_list) (Irmin.Branch.String) (IrminBlake2B) type index = { - path: string ; - repo: GitStore.Repo.t ; - patch_context: context -> context Lwt.t ; + path : string; + repo : GitStore.Repo.t; + patch_context : context -> context Lwt.t } and context = { - index: index ; - parents: GitStore.Commit.t list ; - tree: GitStore.tree ; + index : index; + parents : GitStore.Commit.t list; + tree : GitStore.tree } + type t = context (*-- Version Access and Update -----------------------------------------------*) let current_protocol_key = ["protocol"] + let current_test_chain_key = ["test_chain"] + let current_data_key = ["data"] let exists index key = - GitStore.Commit.of_hash index.repo key >>= function - | None -> Lwt.return_false - | Some _ -> Lwt.return_true + GitStore.Commit.of_hash index.repo key + >>= function None -> Lwt.return_false | Some _ -> Lwt.return_true let checkout index key = - GitStore.Commit.of_hash index.repo key >>= function - | None -> Lwt.return_none + GitStore.Commit.of_hash index.repo key + >>= function + | None -> + Lwt.return_none | Some commit -> - GitStore.Commit.tree commit >>= fun tree -> - let ctxt = { index ; tree ; parents = [commit] } in + GitStore.Commit.tree commit + >>= fun tree -> + let ctxt = {index; tree; parents = [commit]} in Lwt.return_some ctxt let checkout_exn index key = - checkout index key >>= function - | None -> Lwt.fail Not_found - | Some p -> Lwt.return p + checkout index key + >>= function None -> Lwt.fail Not_found | Some p -> Lwt.return p let raw_commit ~time ?(message = "") context = let info = - Irmin.Info.v ~date:(Time.Protocol.to_seconds time) ~author:"Tezos" message in + Irmin.Info.v ~date:(Time.Protocol.to_seconds time) ~author:"Tezos" message + in GitStore.Commit.v - context.index.repo ~info ~parents:context.parents context.tree + context.index.repo + ~info + ~parents:context.parents + context.tree module P = GitStore.Private (* --- FIXME(samoht): I am so sorry --- *) module Hack = struct - module StepMap = struct module X = struct type t = GitStore.step + let t = GitStore.step_t + let compare = Irmin.Type.compare t end - include Map.Make(X) + + include Map.Make (X) end module Contents = struct - type key = P.Contents.key + type contents = P.Contents.value - type t = - | Key of key - | Contents of contents - | Both of key * contents + type t = Key of key | Contents of contents | Both of key * contents let t = let open Irmin.Type in - variant "Node.Contents" (fun key contents both -> function - | Key x -> key x - | Contents x -> contents x - | Both (x, y) -> both (x, y)) + variant "Node.Contents" (fun key contents both -> + function + | Key x -> key x | Contents x -> contents x | Both (x, y) -> both (x, y)) |~ case1 "Key" P.Contents.Key.t (fun x -> Key x) |~ case1 "Contents" P.Contents.Val.t (fun x -> Contents x) - |~ case1 "Both" (pair P.Contents.Key.t P.Contents.Val.t) - (fun (x, y) -> Both (x, y)) + |~ case1 "Both" (pair P.Contents.Key.t P.Contents.Val.t) (fun (x, y) -> + Both (x, y)) |> sealv let hash = function - | Key k | Both (k, _) -> k - | Contents c -> P.Contents.Key.digest P.Contents.Val.t c - + | Key k | Both (k, _) -> + k + | Contents c -> + P.Contents.Key.digest P.Contents.Val.t c end type key = P.Node.key - type value = [ `Node of node | `Contents of Contents.t * Metadata.t ] + type value = [`Node of node | `Contents of Contents.t * Metadata.t] and map = value StepMap.t - and node = - | Map of map - | Key of key - | Both of key * map + and node = Map of map | Key of key | Both of key * map let value t = let open Irmin.Type in - variant "Node.value" (fun node contents -> function - | `Node x -> node x - | `Contents x -> contents x) + variant "Node.value" (fun node contents -> + function `Node x -> node x | `Contents x -> contents x) |~ case1 "Node" t (fun x -> `Node x) |~ case1 "Contents" (pair Contents.t Metadata.t) (fun x -> `Contents x) |> sealv @@ -204,104 +211,109 @@ module Hack = struct let node map = let open Irmin.Type in - variant "Node.node" (fun map key both -> function - | Map x -> map x - | Key y -> key y - | Both (y,z) -> both (y, z)) + variant "Node.node" (fun map key both -> + function Map x -> map x | Key y -> key y | Both (y, z) -> both (y, z)) |~ case1 "Map" map (fun x -> Map x) |~ case1 "Key" P.Node.Key.t (fun x -> Key x) |~ case1 "Both" (pair P.Node.Key.t map) (fun (x, y) -> Both (x, y)) |> sealv - let node_t = Irmin.Type.mu (fun n -> - let value = value n in - node (map value) - ) + let node_t = + Irmin.Type.mu (fun n -> + let value = value n in + node (map value)) (* Mimick irmin-lmdb ordering *) module Sort_key = struct - exception Result of int - let compare (x, vx) (y, vy) = match vx, vy with - | `Contents _, `Contents _ -> String.compare x y - | _ -> + let compare (x, vx) (y, vy) = + match (vx, vy) with + | (`Contents _, `Contents _) -> + String.compare x y + | _ -> ( let lenx = String.length x in let leny = String.length y in let i = ref 0 in try while !i < lenx && !i < leny do match - Char.compare - (String.unsafe_get x !i) (String.unsafe_get y !i) + Char.compare (String.unsafe_get x !i) (String.unsafe_get y !i) with - | 0 -> incr i - | i -> raise (Result i) - done; + | 0 -> + incr i + | i -> + raise (Result i) + done ; let get len k v i = if i < len then String.unsafe_get k i - else if i = len then match v with - | `Node _ -> '/' - | `Contents _ -> '\000' + else if i = len then + match v with `Node _ -> '/' | `Contents _ -> '\000' else '\000' in match Char.compare (get lenx x vx !i) (get leny y vy !i) with - | 0 -> Char.compare (get lenx x vx (!i + 1)) (get leny y vy (!i + 1)) - | i -> i - with Result i -> - i - + | 0 -> + Char.compare (get lenx x vx (!i + 1)) (get leny y vy (!i + 1)) + | i -> + i + with Result i -> i ) end let sort_entries = List.fast_sort Sort_key.compare module Entry = struct - type kind = [ `Node | `Contents of Metadata.t ] - type entry = { kind : kind; name : string; node : IrminBlake2B.t; } + type kind = [`Node | `Contents of Metadata.t] + + type entry = {kind : kind; name : string; node : IrminBlake2B.t} let entry_t = let open Irmin.Type in - record "Tree.entry" - (fun kind name node -> - let kind = - match kind with - | None -> `Node - | Some m -> `Contents m in - { kind ; name ; node } ) + record "Tree.entry" (fun kind name node -> + let kind = match kind with None -> `Node | Some m -> `Contents m in + {kind; name; node}) |+ field "kind" (option Metadata.t) (function - | { kind = `Node ; _ } -> None - | { kind = `Contents m ; _ } -> Some m) - |+ field "name" string (fun { name ; _ } -> name) - |+ field "node" IrminBlake2B.t (fun { node ; _ } -> node) + | {kind = `Node; _} -> + None + | {kind = `Contents m; _} -> + Some m) + |+ field "name" string (fun {name; _} -> name) + |+ field "node" IrminBlake2B.t (fun {node; _} -> node) |> sealr - let of_entry e = e.name, match e.kind with - | `Node -> `Node e.node - | `Contents m -> `Contents (e.node, m) - - let to_entry (name, value) = match value with - | `Node node -> { name; kind = `Node; node } - | `Contents (node, m) -> { name; kind = `Contents m; node } + let of_entry e = + ( e.name, + match e.kind with + | `Node -> + `Node e.node + | `Contents m -> + `Contents (e.node, m) ) + + let to_entry (name, value) = + match value with + | `Node node -> + {name; kind = `Node; node} + | `Contents (node, m) -> + {name; kind = `Contents m; node} let t = Irmin.Type.like entry_t of_entry to_entry - end let rec export_map map = let alist = - StepMap.fold (fun step v acc -> - (step, hash_value v) :: acc - ) map [] + StepMap.fold (fun step v acc -> (step, hash_value v) :: acc) map [] in let l = sort_entries alist in P.Node.Val.v l and hash_value = function - | `Contents (c, m) -> `Contents (Contents.hash c, m) - | `Node n -> `Node (hash_node n) + | `Contents (c, m) -> + `Contents (Contents.hash c, m) + | `Node n -> + `Node (hash_node n) and hash_node = function - | Both (k, _) | Key k -> k + | Both (k, _) | Key k -> + k | Map m -> let v = export_map m in let entries = P.Node.Val.list v in @@ -309,26 +321,33 @@ module Hack = struct let v = Irmin.Type.encode_cstruct (Irmin.Type.list Entry.t) entries in IrminBlake2B.digest Irmin.Type.cstruct v - let cast: GitStore.node -> node = fun n -> + let cast : GitStore.node -> node = + fun n -> let buf = Irmin.Type.encode_cstruct GitStore.node_t n in match Irmin.Type.decode_cstruct node_t buf with - | Error (`Msg e) -> Fmt.failwith "invalid cast\n%s" e - | Ok x -> x - + | Error (`Msg e) -> + Fmt.failwith "invalid cast\n%s" e + | Ok x -> + x end -let tree_hash: GitStore.tree -> GitStore.Tree.hash = function - | `Contents (c, m) -> `Contents (P.Contents.Key.digest P.Contents.Val.t c, m) - | `Node n -> `Node (Hack.hash_node (Hack.cast n)) +let tree_hash : GitStore.tree -> GitStore.Tree.hash = function + | `Contents (c, m) -> + `Contents (P.Contents.Key.digest P.Contents.Val.t c, m) + | `Node n -> + `Node (Hack.hash_node (Hack.cast n)) let hash ~time ?(message = "") context = let info = Irmin.Info.v ~date:(Time.Protocol.to_seconds time) ~author:"Tezos" message in let parents = List.map (fun c -> GitStore.Commit.hash c) context.parents in - let node = match tree_hash context.tree with - | `Contents _ -> assert false - | `Node node -> node + let node = + match tree_hash context.tree with + | `Contents _ -> + assert false + | `Node node -> + node in let commit = P.Commit.Val.v ~parents ~node ~info in let x = P.Commit.Key.digest P.Commit.Val.t commit in @@ -336,7 +355,8 @@ let hash ~time ?(message = "") context = Lwt.return x let commit ~time ?message context = - raw_commit ~time ?message context >>= fun commit -> + raw_commit ~time ?message context + >>= fun commit -> let h = GitStore.Commit.hash commit in Lwt.return h @@ -345,104 +365,125 @@ let commit ~time ?message context = let data_key key = current_data_key @ key type key = string list + type value = MBytes.t let mem ctxt key = - GitStore.Tree.mem ctxt.tree (data_key key) >>= fun v -> - Lwt.return v + GitStore.Tree.mem ctxt.tree (data_key key) >>= fun v -> Lwt.return v let dir_mem ctxt key = - GitStore.Tree.mem_tree ctxt.tree (data_key key) >>= fun v -> - Lwt.return v + GitStore.Tree.mem_tree ctxt.tree (data_key key) >>= fun v -> Lwt.return v + +let raw_get ctxt key = GitStore.Tree.find ctxt.tree key -let raw_get ctxt key = - GitStore.Tree.find ctxt.tree key let get t key = raw_get t (data_key key) let raw_set ctxt key data = - GitStore.Tree.add ctxt.tree key data >>= fun tree -> - Lwt.return { ctxt with tree } + GitStore.Tree.add ctxt.tree key data + >>= fun tree -> Lwt.return {ctxt with tree} + let set t key data = raw_set t (data_key key) data let raw_del ctxt key = - GitStore.Tree.remove ctxt.tree key >>= fun tree -> - Lwt.return { ctxt with tree } + GitStore.Tree.remove ctxt.tree key >>= fun tree -> Lwt.return {ctxt with tree} + let del t key = raw_del t (data_key key) let remove_rec ctxt key = - GitStore.Tree.remove ctxt.tree (data_key key) >>= fun tree -> - Lwt.return { ctxt with tree } + GitStore.Tree.remove ctxt.tree (data_key key) + >>= fun tree -> Lwt.return {ctxt with tree} let copy ctxt ~from ~to_ = - GitStore.Tree.find_tree ctxt.tree (data_key from) >>= function - | None -> Lwt.return_none + GitStore.Tree.find_tree ctxt.tree (data_key from) + >>= function + | None -> + Lwt.return_none | Some sub_tree -> - GitStore.Tree.add_tree ctxt.tree (data_key to_) sub_tree >>= fun tree -> - Lwt.return_some { ctxt with tree } + GitStore.Tree.add_tree ctxt.tree (data_key to_) sub_tree + >>= fun tree -> Lwt.return_some {ctxt with tree} let fold ctxt key ~init ~f = - GitStore.Tree.list ctxt.tree (data_key key) >>= fun keys -> + GitStore.Tree.list ctxt.tree (data_key key) + >>= fun keys -> Lwt_list.fold_left_s - begin fun acc (name, kind) -> + (fun acc (name, kind) -> let key = match kind with - | `Contents -> `Key (key @ [name]) - | `Node -> `Dir (key @ [name]) in - f key acc - end - init keys + | `Contents -> + `Key (key @ [name]) + | `Node -> + `Dir (key @ [name]) + in + f key acc) + init + keys (*-- Predefined Fields -------------------------------------------------------*) let get_protocol v = - raw_get v current_protocol_key >>= function - | None -> assert false - | Some data -> Lwt.return (Protocol_hash.of_bytes_exn data) + raw_get v current_protocol_key + >>= function + | None -> + assert false + | Some data -> + Lwt.return (Protocol_hash.of_bytes_exn data) + let set_protocol v key = raw_set v current_protocol_key (Protocol_hash.to_bytes key) let get_test_chain v = - raw_get v current_test_chain_key >>= function - | None -> Lwt.fail (Failure "Unexpected error (Context.get_test_chain)") - | Some data -> - match Data_encoding.Binary.of_bytes Test_chain_status.encoding data with - | None -> Lwt.fail (Failure "Unexpected error (Context.get_test_chain)") - | Some r -> Lwt.return r + raw_get v current_test_chain_key + >>= function + | None -> + Lwt.fail (Failure "Unexpected error (Context.get_test_chain)") + | Some data -> ( + match Data_encoding.Binary.of_bytes Test_chain_status.encoding data with + | None -> + Lwt.fail (Failure "Unexpected error (Context.get_test_chain)") + | Some r -> + Lwt.return r ) let set_test_chain v id = - raw_set v current_test_chain_key + raw_set + v + current_test_chain_key (Data_encoding.Binary.to_bytes_exn Test_chain_status.encoding id) -let del_test_chain v = raw_del v current_test_chain_key + +let del_test_chain v = raw_del v current_test_chain_key let fork_test_chain v ~protocol ~expiration = - set_test_chain v (Forking { protocol ; expiration }) + set_test_chain v (Forking {protocol; expiration}) (*-- Initialisation ----------------------------------------------------------*) let init ?patch_context ?mapsize ?readonly root = - GitStore.Repo.v - (Irmin_lmdb.config ?mapsize ?readonly root) >>= fun repo -> - Lwt.return { - path = root ; - repo ; - patch_context = - match patch_context with - | None -> (fun ctxt -> Lwt.return ctxt) - | Some patch_context -> patch_context - } + GitStore.Repo.v (Irmin_lmdb.config ?mapsize ?readonly root) + >>= fun repo -> + Lwt.return + { path = root; + repo; + patch_context = + ( match patch_context with + | None -> + fun ctxt -> Lwt.return ctxt + | Some patch_context -> + patch_context ) } let get_branch chain_id = Format.asprintf "%a" Chain_id.pp chain_id - let commit_genesis index ~chain_id ~time ~protocol = let tree = GitStore.Tree.empty in - let ctxt = { index ; tree ; parents = [] } in - index.patch_context ctxt >>= fun ctxt -> - set_protocol ctxt protocol >>= fun ctxt -> - set_test_chain ctxt Not_running >>= fun ctxt -> - raw_commit ~time ~message:"Genesis" ctxt >>= fun commit -> - GitStore.Branch.set index.repo (get_branch chain_id) commit >>= fun () -> - Lwt.return (GitStore.Commit.hash commit) + let ctxt = {index; tree; parents = []} in + index.patch_context ctxt + >>= fun ctxt -> + set_protocol ctxt protocol + >>= fun ctxt -> + set_test_chain ctxt Not_running + >>= fun ctxt -> + raw_commit ~time ~message:"Genesis" ctxt + >>= fun commit -> + GitStore.Branch.set index.repo (get_branch chain_id) commit + >>= fun () -> Lwt.return (GitStore.Commit.hash commit) let compute_testchain_chain_id genesis = let genesis_hash = Block_hash.hash_bytes [Block_hash.to_bytes genesis] in @@ -454,26 +495,28 @@ let compute_testchain_genesis forked_block = let commit_test_chain_genesis ctxt (forked_header : Block_header.t) = let message = - Format.asprintf "Forking testchain at level %ld." forked_header.shell.level in - raw_commit ~time:forked_header.shell.timestamp ~message ctxt >>= fun commit -> - let faked_shell_header : Block_header.shell_header = { - forked_header.shell with - proto_level = succ forked_header.shell.proto_level ; - predecessor = Block_hash.zero ; - validation_passes = 0 ; - operations_hash = Operation_list_list_hash.empty ; - context = GitStore.Commit.hash commit ; - } in + Format.asprintf "Forking testchain at level %ld." forked_header.shell.level + in + raw_commit ~time:forked_header.shell.timestamp ~message ctxt + >>= fun commit -> + let faked_shell_header : Block_header.shell_header = + { forked_header.shell with + proto_level = succ forked_header.shell.proto_level; + predecessor = Block_hash.zero; + validation_passes = 0; + operations_hash = Operation_list_list_hash.empty; + context = GitStore.Commit.hash commit } + in let forked_block = Block_header.hash forked_header in let genesis_hash = compute_testchain_genesis forked_block in let chain_id = compute_testchain_chain_id genesis_hash in let genesis_header : Block_header.t = - { shell = { faked_shell_header with predecessor = genesis_hash } ; - protocol_data = MBytes.create 0 } in + { shell = {faked_shell_header with predecessor = genesis_hash}; + protocol_data = MBytes.create 0 } + in let branch = get_branch chain_id in - GitStore.Branch.set ctxt.index.repo branch commit >>= fun () -> - Lwt.return genesis_header - + GitStore.Branch.set ctxt.index.repo branch commit + >>= fun () -> Lwt.return genesis_header let clear_test_chain index chain_id = (* TODO remove commits... ??? *) @@ -482,39 +525,45 @@ let clear_test_chain index chain_id = let set_head index chain_id commit = let branch = get_branch chain_id in - GitStore.Commit.of_hash index.repo commit >>= function - | None -> assert false + GitStore.Commit.of_hash index.repo commit + >>= function + | None -> + assert false | Some commit -> GitStore.Branch.set index.repo branch commit let set_master index commit = - GitStore.Commit.of_hash index.repo commit >>= function - | None -> assert false + GitStore.Commit.of_hash index.repo commit + >>= function + | None -> + assert false | Some commit -> GitStore.Branch.set index.repo GitStore.Branch.master commit (* Context dumping *) module Pruned_block = struct - type t = { - block_header : Block_header.t ; - operations : (int * Operation.t list ) list ; - operation_hashes : (int * Operation_hash.t list) list ; + block_header : Block_header.t; + operations : (int * Operation.t list) list; + operation_hashes : (int * Operation_hash.t list) list } let encoding = let open Data_encoding in conv - (fun { block_header ; operations ; operation_hashes} -> - (operations, operation_hashes, block_header)) + (fun {block_header; operations; operation_hashes} -> + (operations, operation_hashes, block_header)) (fun (operations, operation_hashes, block_header) -> - { block_header ; operations ; operation_hashes}) + {block_header; operations; operation_hashes}) (obj3 - (req "operations" (list (tup2 int31 (list (dynamic_size Operation.encoding))))) - (req "operation_hashes" (list (tup2 int31 (list (dynamic_size Operation_hash.encoding))))) - (req "block_header" Block_header.encoding) - ) + (req + "operations" + (list (tup2 int31 (list (dynamic_size Operation.encoding))))) + (req + "operation_hashes" + (list (tup2 int31 (list (dynamic_size Operation_hash.encoding))))) + (req "block_header" Block_header.encoding)) let to_bytes pruned_block = Data_encoding.Binary.to_bytes_exn encoding pruned_block @@ -522,78 +571,56 @@ module Pruned_block = struct let of_bytes pruned_block = Data_encoding.Binary.of_bytes encoding pruned_block - let header { block_header } = block_header - + let header {block_header} = block_header end module Block_data = struct + type t = {block_header : Block_header.t; operations : Operation.t list list} - type t = { - block_header : Block_header.t ; - operations : Operation.t list list ; - } - - let header { block_header } = block_header + let header {block_header} = block_header let encoding = let open Data_encoding in conv - (fun { block_header ; - operations} -> - (operations, - block_header)) - (fun (operations, - block_header) -> - { block_header ; - operations}) + (fun {block_header; operations} -> (operations, block_header)) + (fun (operations, block_header) -> {block_header; operations}) (obj2 (req "operations" (list (list (dynamic_size Operation.encoding)))) - (req "block_header" Block_header.encoding - )) - - let to_bytes = - Data_encoding.Binary.to_bytes_exn encoding + (req "block_header" Block_header.encoding)) - let of_bytes = - Data_encoding.Binary.of_bytes encoding + let to_bytes = Data_encoding.Binary.to_bytes_exn encoding + let of_bytes = Data_encoding.Binary.of_bytes encoding end module Protocol_data = struct - - type info = { - author : string ; - message : string ; - timestamp : Time.Protocol.t ; - } + type info = {author : string; message : string; timestamp : Time.Protocol.t} let info_encoding = let open Data_encoding in conv - (fun {author ; message ; timestamp} -> - (author, message, timestamp)) - (fun (author, message, timestamp) -> - {author ; message ; timestamp} ) + (fun {author; message; timestamp} -> (author, message, timestamp)) + (fun (author, message, timestamp) -> {author; message; timestamp}) (obj3 (req "author" string) (req "message" string) (req "timestamp" Time.Protocol.encoding)) type data = { - info : info ; - protocol_hash : Protocol_hash.t ; - test_chain_status : Test_chain_status.t ; - data_key : Context_hash.t ; - parents : Context_hash.t list ; + info : info; + protocol_hash : Protocol_hash.t; + test_chain_status : Test_chain_status.t; + data_key : Context_hash.t; + parents : Context_hash.t list } let data_encoding = let open Data_encoding in conv - (fun { info ; protocol_hash ; test_chain_status ; data_key ; parents } -> - (info, protocol_hash, test_chain_status, data_key, parents)) + (fun {info; protocol_hash; test_chain_status; data_key; parents} -> + (info, protocol_hash, test_chain_status, data_key, parents)) (fun (info, protocol_hash, test_chain_status, data_key, parents) -> - { info ; protocol_hash ; test_chain_status ; data_key ; parents }) + {info; protocol_hash; test_chain_status; data_key; parents}) (obj5 (req "info" info_encoding) (req "protocol_hash" Protocol_hash.encoding) @@ -601,155 +628,170 @@ module Protocol_data = struct (req "data_key" Context_hash.encoding) (req "parents" (list Context_hash.encoding))) - type t = (Int32.t * data) + type t = Int32.t * data let encoding = let open Data_encoding in - tup2 - int32 - data_encoding - - let to_bytes = - Data_encoding.Binary.to_bytes_exn encoding + tup2 int32 data_encoding - let of_bytes = - Data_encoding.Binary.of_bytes encoding + let to_bytes = Data_encoding.Binary.to_bytes_exn encoding + let of_bytes = Data_encoding.Binary.of_bytes encoding end module Dumpable_context = struct type nonrec index = index + type nonrec context = context + type tree = GitStore.tree + type hash = GitStore.Tree.hash + type step = string + type key = step list + type commit_info = Irmin.Info.t let hash_export = function - | `Contents ( h, () ) -> `Blob, Context_hash.to_bytes h - | `Node h -> `Node, Context_hash.to_bytes h + | `Contents (h, ()) -> + (`Blob, Context_hash.to_bytes h) + | `Node h -> + (`Node, Context_hash.to_bytes h) + let hash_import ty mb = - Context_hash.of_bytes mb >>? fun h -> - match ty with - | `Node -> ok @@ `Node h - | `Blob -> ok @@ `Contents ( h, () ) + Context_hash.of_bytes mb + >>? fun h -> + match ty with `Node -> ok @@ `Node h | `Blob -> ok @@ `Contents (h, ()) + let hash_equal h1 h2 = - match h1, h2 with - | `Contents ( h1, () ), `Contents ( h2, () ) - | `Node h1, `Node h2 -> Context_hash.( h1 = h2 ) - | `Contents _, `Node _ | `Node _, `Contents _ -> false + match (h1, h2) with + | (`Contents (h1, ()), `Contents (h2, ())) | (`Node h1, `Node h2) -> + Context_hash.(h1 = h2) + | (`Contents _, `Node _) | (`Node _, `Contents _) -> + false let commit_info_encoding = let open Data_encoding in conv (fun irmin_info -> - let author = Irmin.Info.author irmin_info in - let message = Irmin.Info.message irmin_info in - let date = Irmin.Info.date irmin_info in - (author, message, date)) - (fun (author, message, date) -> - Irmin.Info.v ~author ~date message) - (obj3 - (req "author" string) - (req "message" string) - (req "date" int64)) + let author = Irmin.Info.author irmin_info in + let message = Irmin.Info.message irmin_info in + let date = Irmin.Info.date irmin_info in + (author, message, date)) + (fun (author, message, date) -> Irmin.Info.v ~author ~date message) + (obj3 (req "author" string) (req "message" string) (req "date" int64)) let blob_encoding = let open Data_encoding in - conv - (fun (`Blob h) -> h) - (fun h -> `Blob h) - (obj1 (req "blob" bytes)) + conv (fun (`Blob h) -> h) (fun h -> `Blob h) (obj1 (req "blob" bytes)) let node_encoding = let open Data_encoding in - conv - (fun (`Node h) -> h) - (fun h -> `Node h) - (obj1 (req "node" bytes)) + conv (fun (`Node h) -> h) (fun h -> `Node h) (obj1 (req "node" bytes)) let hash_encoding : hash Data_encoding.t = let open Data_encoding in - let kind_encoding = string_enum [("node", `Node) ; ("blob", `Blob) ] in + let kind_encoding = string_enum [("node", `Node); ("blob", `Blob)] in conv - begin fun hash -> hash_export hash end - begin function - | (`Node, h) -> `Node (Context_hash.of_bytes_exn h) - | (`Blob, h) -> `Contents (Context_hash.of_bytes_exn h, ()) - end + (fun hash -> hash_export hash) + (function + | (`Node, h) -> + `Node (Context_hash.of_bytes_exn h) + | (`Blob, h) -> + `Contents (Context_hash.of_bytes_exn h, ())) (obj2 (req "kind" kind_encoding) (req "value" bytes)) let context_parents ctxt = match ctxt with - | { parents = [commit]; _ } -> + | {parents = [commit]; _} -> (* XXX(samoht): fixed in irmin v2 *) let key = GitStore.Commit.hash commit in GitStore.Private.Commit.find - (GitStore.Private.Repo.commit_t ctxt.index.repo) key + (GitStore.Private.Repo.commit_t ctxt.index.repo) + key >|= fun v -> let commit = match v with None -> assert false | Some v -> v in let parents = GitStore.Private.Commit.Val.parents commit in List.sort Context_hash.compare parents - | _ -> assert false + | _ -> + assert false let context_info = function - | { parents = [c]; _ } -> GitStore.Commit.info c - | _ -> assert false - let context_info_export i = Irmin.Info.( date i, author i, message i ) - let context_info_import ( date, author, message) = Irmin.Info.v ~date ~author message + | {parents = [c]; _} -> + GitStore.Commit.info c + | _ -> + assert false + + let context_info_export i = Irmin.Info.(date i, author i, message i) + + let context_info_import (date, author, message) = + Irmin.Info.v ~date ~author message let get_context idx bh = checkout idx bh.Block_header.shell.context + let set_context ~info ~parents ctxt bh = let parents = List.sort Context_hash.compare parents in - GitStore.Tree.hash ctxt.index.repo ctxt.tree >>= function + GitStore.Tree.hash ctxt.index.repo ctxt.tree + >>= function | `Node node -> let v = GitStore.Private.Commit.Val.v ~info ~node ~parents in - GitStore.Private.Commit.add (GitStore.Private.Repo.commit_t ctxt.index.repo) v + GitStore.Private.Commit.add + (GitStore.Private.Repo.commit_t ctxt.index.repo) + v >>= fun ctxt_h -> - if Context_hash.equal bh.Block_header.shell.context ctxt_h - then Lwt.return_some bh + if Context_hash.equal bh.Block_header.shell.context ctxt_h then + Lwt.return_some bh else Lwt.return_none - | `Contents _ -> assert false + | `Contents _ -> + assert false let context_tree ctxt = ctxt.tree + let tree_hash ctxt = function - | `Node _ as node -> GitStore.Tree.hash ctxt.index.repo node - | contents -> Lwt.return (tree_hash contents) + | `Node _ as node -> + GitStore.Tree.hash ctxt.index.repo node + | contents -> + Lwt.return (tree_hash contents) + let sub_tree tree key = GitStore.Tree.find_tree tree key + let tree_list tree = GitStore.Tree.list tree [] + let tree_content tree = GitStore.Tree.find tree [] - let make_context index = { index ; tree = GitStore.Tree.empty ; parents = [] ; } - let update_context context tree = { context with tree ; } + let make_context index = {index; tree = GitStore.Tree.empty; parents = []} + + let update_context context tree = {context with tree} let add_hash index tree key hash = - GitStore.Tree.of_hash index.repo hash >>= function - | None -> Lwt.return_none + GitStore.Tree.of_hash index.repo hash + >>= function + | None -> + Lwt.return_none | Some sub_tree -> - GitStore.Tree.add_tree tree key sub_tree >>= - Lwt.return_some + GitStore.Tree.add_tree tree key sub_tree >>= Lwt.return_some let add_mbytes index bytes = let tree = GitStore.Tree.of_contents bytes in - GitStore.Tree.hash index.repo tree >|= fun _ -> - tree + GitStore.Tree.hash index.repo tree >|= fun _ -> tree let add_dir index l = let rec fold_list sub_tree = function - | [] -> Lwt.return_some sub_tree - | ( step, hash ) :: tl -> - begin - add_hash index sub_tree [step]hash >>= function - | None -> Lwt.return_none - | Some sub_tree -> fold_list sub_tree tl - end + | [] -> + Lwt.return_some sub_tree + | (step, hash) :: tl -> ( + add_hash index sub_tree [step] hash + >>= function + | None -> Lwt.return_none | Some sub_tree -> fold_list sub_tree tl ) in - fold_list GitStore.Tree.empty l >>= function - | None -> Lwt.return_none + fold_list GitStore.Tree.empty l + >>= function + | None -> + Lwt.return_none | Some tree -> - GitStore.Tree.hash index.repo tree >>= fun _ -> - Lwt.return_some tree + GitStore.Tree.hash index.repo tree >>= fun _ -> Lwt.return_some tree module Commit_hash = Context_hash module Block_header = Block_header @@ -761,24 +803,23 @@ end (* Protocol data *) let data_node_hash index context = - GitStore.Tree.get_tree context.tree current_data_key >>= fun dt -> - GitStore.Tree.hash index.repo dt >>= fun dt_hash -> + GitStore.Tree.get_tree context.tree current_data_key + >>= fun dt -> + GitStore.Tree.hash index.repo dt + >>= fun dt_hash -> match dt_hash with `Node x -> Lwt.return x | _ -> assert false let get_transition_block_headers pruned_blocks = - let rec aux hs x bs = match bs with + let rec aux hs x bs = + match bs with | [] -> x :: hs | b :: bs -> let xl = x.Pruned_block.block_header.shell.proto_level in let bl = b.Pruned_block.block_header.shell.proto_level in - if not (xl = bl) then - aux (x :: hs) b bs - else - aux hs b bs - in match pruned_blocks with - | [] -> assert false - | x :: xs -> aux [] x xs + if not (xl = bl) then aux (x :: hs) b bs else aux hs b bs + in + match pruned_blocks with [] -> assert false | x :: xs -> aux [] x xs let get_protocol_data_from_header index block_header = checkout_exn index block_header.Block_header.shell.context @@ -788,43 +829,39 @@ let get_protocol_data_from_header index block_header = let date = Irmin.Info.date irmin_info in let author = Irmin.Info.author irmin_info in let message = Irmin.Info.message irmin_info in - let info = { - Protocol_data.timestamp = Time.Protocol.of_seconds date ; - author ; - message ; - } in - Dumpable_context.context_parents context >>= fun parents -> - get_protocol context >>= fun protocol_hash -> - get_test_chain context >>= fun test_chain_status -> - data_node_hash index context >>= fun data_key -> - Lwt.return (level , { - Protocol_data.parents ; - protocol_hash ; - test_chain_status ; - data_key ; - info ; - }) + let info = + {Protocol_data.timestamp = Time.Protocol.of_seconds date; author; message} + in + Dumpable_context.context_parents context + >>= fun parents -> + get_protocol context + >>= fun protocol_hash -> + get_test_chain context + >>= fun test_chain_status -> + data_node_hash index context + >>= fun data_key -> + Lwt.return + ( level, + {Protocol_data.parents; protocol_hash; test_chain_status; data_key; info} + ) (* Mock some GitStore types, so we can build our own Merkle tree. *) module Mock : sig - val node : GitStore.Repo.t -> P.Node.key -> GitStore.node val commit : GitStore.repo -> Hack.key -> P.Commit.value -> GitStore.commit - end = struct - [@@@ocaml.warning "-37"] - type commit = { r: GitStore.Repo.t ; h: Context_hash.t; v: P.Commit.value } + type commit = {r : GitStore.Repo.t; h : Context_hash.t; v : P.Commit.value} type empty type u = | Map : empty -> u | Key : GitStore.Repo.t * P.Node.key -> u - | Both: empty * empty * empty -> u + | Both : empty * empty * empty -> u and node = {mutable v : u} @@ -834,30 +871,28 @@ end = struct (Obj.magic node : GitStore.node) let commit r h v = - let c : commit = {r ; h ; v} in + let c : commit = {r; h; v} in (Obj.magic c : GitStore.commit) - end -let validate_context_hash_consistency_and_commit - ~data_hash - ~expected_context_hash - ~timestamp - ~test_chain - ~protocol_hash - ~message - ~author - ~parents - ~index - = +let validate_context_hash_consistency_and_commit ~data_hash + ~expected_context_hash ~timestamp ~test_chain ~protocol_hash ~message + ~author ~parents ~index = let protocol_value = Protocol_hash.to_bytes protocol_hash in - let test_chain_value = Data_encoding.Binary.to_bytes_exn - Test_chain_status.encoding test_chain in + let test_chain_value = + Data_encoding.Binary.to_bytes_exn Test_chain_status.encoding test_chain + in let tree = GitStore.Tree.empty in - GitStore.Tree.add tree current_protocol_key protocol_value >>= fun tree -> - GitStore.Tree.add tree current_test_chain_key test_chain_value >>= fun tree -> - let info = Irmin.Info.v ~date:(Time.Protocol.to_seconds timestamp) ~author message in - let o_tree = Hack.cast (match tree with `Node n -> n | _ -> assert false) in + GitStore.Tree.add tree current_protocol_key protocol_value + >>= fun tree -> + GitStore.Tree.add tree current_test_chain_key test_chain_value + >>= fun tree -> + let info = + Irmin.Info.v ~date:(Time.Protocol.to_seconds timestamp) ~author message + in + let o_tree = + Hack.cast (match tree with `Node n -> n | _ -> assert false) + in let map = match o_tree with Map m -> m | _ -> assert false in let data_tree = Hack.Key data_hash in let new_map = Hack.Map (Hack.StepMap.add "data" (`Node data_tree) map) in @@ -865,65 +900,79 @@ let validate_context_hash_consistency_and_commit let commit = P.Commit.Val.v ~parents ~node ~info in let computed_context_hash = P.Commit.Key.digest P.Commit.Val.t commit in if Context_hash.equal expected_context_hash computed_context_hash then - let mock_parents = List.map (fun h -> Mock.commit index.repo h commit) parents in - let ctxt = {index ; tree = GitStore.Tree.empty ; parents = mock_parents} in - set_test_chain ctxt test_chain >>= fun ctxt -> - set_protocol ctxt protocol_hash >>= fun ctxt -> + let mock_parents = + List.map (fun h -> Mock.commit index.repo h commit) parents + in + let ctxt = {index; tree = GitStore.Tree.empty; parents = mock_parents} in + set_test_chain ctxt test_chain + >>= fun ctxt -> + set_protocol ctxt protocol_hash + >>= fun ctxt -> let data_t = `Node (Mock.node index.repo data_hash) in - GitStore.Tree.add_tree ctxt.tree current_data_key data_t >>= fun new_tree -> + GitStore.Tree.add_tree ctxt.tree current_data_key data_t + >>= fun new_tree -> GitStore.Commit.v ctxt.index.repo ~info ~parents:ctxt.parents new_tree >>= fun commit -> let ctxt_h = GitStore.Commit.hash commit in Lwt.return (Context_hash.equal ctxt_h expected_context_hash) - else - Lwt.return_false + else Lwt.return_false (* Context dumper *) -module Context_dumper = Context_dump.Make(Dumpable_context) +module Context_dumper = Context_dump.Make (Dumpable_context) +include Context_dumper -include Context_dumper (* provides functions dump_contexts and restore_contexts *) +(* provides functions dump_contexts and restore_contexts *) type error += Cannot_create_file of string -let () = register_error_kind `Permanent + +let () = + register_error_kind + `Permanent ~id:"context_dump.write.cannot_open" ~title:"Cannot open file for context dump" ~description:"" ~pp:(fun ppf uerr -> - Format.fprintf ppf - "@[Error while opening file for context dumping: %s@]" - uerr) - Data_encoding.(obj1 (req "context_dump_cannot_open" string) ) - (function Cannot_create_file e -> Some e - | _ -> None) + Format.fprintf + ppf + "@[Error while opening file for context dumping: %s@]" + uerr) + Data_encoding.(obj1 (req "context_dump_cannot_open" string)) + (function Cannot_create_file e -> Some e | _ -> None) (fun e -> Cannot_create_file e) type error += Cannot_open_file of string -let () = register_error_kind `Permanent + +let () = + register_error_kind + `Permanent ~id:"context_dump.read.cannot_open" ~title:"Cannot open file for context restoring" ~description:"" ~pp:(fun ppf uerr -> - Format.fprintf ppf - "@[Error while opening file for context restoring: %s@]" - uerr) - Data_encoding.(obj1 (req "context_restore_cannot_open" string) ) - (function Cannot_open_file e -> Some e - | _ -> None) + Format.fprintf + ppf + "@[Error while opening file for context restoring: %s@]" + uerr) + Data_encoding.(obj1 (req "context_restore_cannot_open" string)) + (function Cannot_open_file e -> Some e | _ -> None) (fun e -> Cannot_open_file e) type error += Suspicious_file of int -let () = register_error_kind `Permanent + +let () = + register_error_kind + `Permanent ~id:"context_dump.read.suspicious" ~title:"Suspicious file: data after end" ~description:"" ~pp:(fun ppf uerr -> - Format.fprintf ppf - "@[Remaining bytes in file after context restoring: %d@]" - uerr) - Data_encoding.(obj1 (req "context_restore_suspicious" int31) ) - (function Suspicious_file e -> Some e - | _ -> None) + Format.fprintf + ppf + "@[Remaining bytes in file after context restoring: %d@]" + uerr) + Data_encoding.(obj1 (req "context_restore_suspicious" int31)) + (function Suspicious_file e -> Some e | _ -> None) (fun e -> Suspicious_file e) let dump_contexts idx datas ~filename = @@ -931,38 +980,38 @@ let dump_contexts idx datas ~filename = Lwt_unix.openfile filename Lwt_unix.[O_WRONLY; O_CREAT; O_TRUNC] 0o666 >>= return in - Lwt.catch file_init - (function - | Unix.Unix_error (e,_,_) -> fail @@ Cannot_create_file (Unix.error_message e) + Lwt.catch file_init (function + | Unix.Unix_error (e, _, _) -> + fail @@ Cannot_create_file (Unix.error_message e) | exc -> - let msg = Printf.sprintf "unknown error: %s" (Printexc.to_string exc) in + let msg = + Printf.sprintf "unknown error: %s" (Printexc.to_string exc) + in fail (Cannot_create_file msg)) - >>=? fun fd -> - dump_contexts_fd idx datas ~fd + >>=? fun fd -> dump_contexts_fd idx datas ~fd let restore_contexts idx ~filename k_store_pruned_block pipeline_validation = let file_init () = - Lwt_unix.openfile filename Lwt_unix.[O_RDONLY;] 0o600 - >>= return + Lwt_unix.openfile filename Lwt_unix.[O_RDONLY] 0o600 >>= return in - Lwt.catch file_init - (function - | Unix.Unix_error (e,_,_) -> fail @@ Cannot_open_file (Unix.error_message e) + Lwt.catch file_init (function + | Unix.Unix_error (e, _, _) -> + fail @@ Cannot_open_file (Unix.error_message e) | exc -> - let msg = Printf.sprintf "unknown error: %s" (Printexc.to_string exc) in + let msg = + Printf.sprintf "unknown error: %s" (Printexc.to_string exc) + in fail (Cannot_open_file msg)) >>=? fun fd -> Lwt.finalize (fun () -> - restore_contexts_fd idx ~fd k_store_pruned_block pipeline_validation - >>=? fun result -> - Lwt_unix.lseek fd 0 Lwt_unix.SEEK_CUR - >>= fun current -> - Lwt_unix.fstat fd - >>= fun stats -> - let total = stats.Lwt_unix.st_size in - if current = total - then return result - else fail @@ Suspicious_file (total - current) - ) + restore_contexts_fd idx ~fd k_store_pruned_block pipeline_validation + >>=? fun result -> + Lwt_unix.lseek fd 0 Lwt_unix.SEEK_CUR + >>= fun current -> + Lwt_unix.fstat fd + >>= fun stats -> + let total = stats.Lwt_unix.st_size in + if current = total then return result + else fail @@ Suspicious_file (total - current)) (fun () -> Lwt_unix.close fd) diff --git a/src/lib_storage/context.mli b/src/lib_storage/context.mli index c4ff1b737abc094b14b5b2a420e9281020a61188..70afc2e802a15aa255616379840a3d8104413a8f 100644 --- a/src/lib_storage/context.mli +++ b/src/lib_storage/context.mli @@ -30,138 +30,143 @@ type index (** A (key x value) store for a given block. *) type t + type context = t (** Open or initialize a versioned store at a given path. *) -val init: +val init : ?patch_context:(context -> context Lwt.t) -> ?mapsize:int64 -> ?readonly:bool -> string -> index Lwt.t -val compute_testchain_chain_id: - Block_hash.t -> Chain_id.t +val compute_testchain_chain_id : Block_hash.t -> Chain_id.t -val compute_testchain_genesis: - Block_hash.t -> Block_hash.t +val compute_testchain_genesis : Block_hash.t -> Block_hash.t -val commit_genesis: +val commit_genesis : index -> chain_id:Chain_id.t -> time:Time.Protocol.t -> protocol:Protocol_hash.t -> Context_hash.t Lwt.t -val commit_test_chain_genesis: - context -> - Block_header.t -> - Block_header.t Lwt.t +val commit_test_chain_genesis : + context -> Block_header.t -> Block_header.t Lwt.t (** {2 Generic interface} *) -type key = string list (** [key] indicates a path in a context. *) +type key = string list type value = MBytes.t -val mem: context -> key -> bool Lwt.t -val dir_mem: context -> key -> bool Lwt.t -val get: context -> key -> value option Lwt.t -val set: context -> key -> value -> t Lwt.t -val del: context -> key -> t Lwt.t -val remove_rec: context -> key -> t Lwt.t +val mem : context -> key -> bool Lwt.t + +val dir_mem : context -> key -> bool Lwt.t + +val get : context -> key -> value option Lwt.t + +val set : context -> key -> value -> t Lwt.t + +val del : context -> key -> t Lwt.t + +val remove_rec : context -> key -> t Lwt.t (** [copy] returns None if the [from] key is not bound *) -val copy: context -> from:key -> to_:key -> context option Lwt.t +val copy : context -> from:key -> to_:key -> context option Lwt.t (** [fold] iterates over elements under a path (not recursive). Iteration order is undeterministic. *) -val fold: - context -> key -> init:'a -> - f:([ `Key of key | `Dir of key ] -> 'a -> 'a Lwt.t) -> +val fold : + context -> + key -> + init:'a -> + f:([`Key of key | `Dir of key] -> 'a -> 'a Lwt.t) -> 'a Lwt.t (** {2 Accessing and Updating Versions} *) -val exists: index -> Context_hash.t -> bool Lwt.t -val checkout: index -> Context_hash.t -> context option Lwt.t -val checkout_exn: index -> Context_hash.t -> context Lwt.t -val hash: time:Time.Protocol.t -> - ?message:string -> t -> Context_hash.t Lwt.t -val commit: - time:Time.Protocol.t -> - ?message:string -> - context -> - Context_hash.t Lwt.t -val set_head: index -> Chain_id.t -> Context_hash.t -> unit Lwt.t -val set_master: index -> Context_hash.t -> unit Lwt.t +val exists : index -> Context_hash.t -> bool Lwt.t + +val checkout : index -> Context_hash.t -> context option Lwt.t + +val checkout_exn : index -> Context_hash.t -> context Lwt.t +val hash : time:Time.Protocol.t -> ?message:string -> t -> Context_hash.t Lwt.t + +val commit : + time:Time.Protocol.t -> ?message:string -> context -> Context_hash.t Lwt.t + +val set_head : index -> Chain_id.t -> Context_hash.t -> unit Lwt.t + +val set_master : index -> Context_hash.t -> unit Lwt.t (** {2 Predefined Fields} *) -val get_protocol: context -> Protocol_hash.t Lwt.t -val set_protocol: context -> Protocol_hash.t -> context Lwt.t +val get_protocol : context -> Protocol_hash.t Lwt.t + +val set_protocol : context -> Protocol_hash.t -> context Lwt.t -val get_test_chain: context -> Test_chain_status.t Lwt.t -val set_test_chain: context -> Test_chain_status.t -> context Lwt.t +val get_test_chain : context -> Test_chain_status.t Lwt.t -val del_test_chain: context -> context Lwt.t +val set_test_chain : context -> Test_chain_status.t -> context Lwt.t -val fork_test_chain: - context -> protocol:Protocol_hash.t -> expiration:Time.Protocol.t -> context Lwt.t -val clear_test_chain: index -> Chain_id.t -> unit Lwt.t +val del_test_chain : context -> context Lwt.t + +val fork_test_chain : + context -> + protocol:Protocol_hash.t -> + expiration:Time.Protocol.t -> + context Lwt.t + +val clear_test_chain : index -> Chain_id.t -> unit Lwt.t (** {2 Context dumping} ******************************************************) module Pruned_block : sig - type t = { - block_header : Block_header.t ; - operations : ( int * Operation.t list ) list ; - operation_hashes : (int * Operation_hash.t list) list ; + block_header : Block_header.t; + operations : (int * Operation.t list) list; + operation_hashes : (int * Operation_hash.t list) list } val encoding : t Data_encoding.t val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option end module Block_data : sig - - type t = { - block_header : Block_header.t ; - operations : Operation.t list list ; - } + type t = {block_header : Block_header.t; operations : Operation.t list list} val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val encoding : t Data_encoding.t end module Protocol_data : sig - type t = Int32.t * data - and info = { - author : string ; - message : string ; - timestamp : Time.Protocol.t ; - } + and info = {author : string; message : string; timestamp : Time.Protocol.t} and data = { - info : info ; - protocol_hash : Protocol_hash.t ; - test_chain_status : Test_chain_status.t ; - data_key : Context_hash.t ; - parents : Context_hash.t list ; + info : info; + protocol_hash : Protocol_hash.t; + test_chain_status : Test_chain_status.t; + data_key : Context_hash.t; + parents : Context_hash.t list } val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option - val encoding : t Data_encoding.t + val encoding : t Data_encoding.t end val get_protocol_data_from_header : @@ -169,18 +174,30 @@ val get_protocol_data_from_header : val dump_contexts : index -> - (Block_header.t * Block_data.t * History_mode.t * - (Block_header.t -> (Pruned_block.t option * Protocol_data.t option) tzresult Lwt.t)) -> + Block_header.t + * Block_data.t + * History_mode.t + * (Block_header.t -> + (Pruned_block.t option * Protocol_data.t option) tzresult Lwt.t) -> filename:string -> unit tzresult Lwt.t -val restore_contexts : index -> filename:string -> +val restore_contexts : + index -> + filename:string -> ((Block_hash.t * Pruned_block.t) list -> unit tzresult Lwt.t) -> (Block_header.t option -> - Block_hash.t -> Pruned_block.t -> unit tzresult Lwt.t) -> - (Block_header.t * Block_data.t * History_mode.t * - Block_header.t option * Block_hash.t list * - Protocol_data.t list) tzresult Lwt.t + Block_hash.t -> + Pruned_block.t -> + unit tzresult Lwt.t) -> + ( Block_header.t + * Block_data.t + * History_mode.t + * Block_header.t option + * Block_hash.t list + * Protocol_data.t list ) + tzresult + Lwt.t val validate_context_hash_consistency_and_commit : data_hash:Context_hash.t -> diff --git a/src/lib_storage/context_dump.ml b/src/lib_storage/context_dump.ml index 6bb42e4c27d8ff8a04f52fed08aab8adab9212ae..3a719ca8715ae9bc506dc2eaa8958ee5415a8a08 100644 --- a/src/lib_storage/context_dump.ml +++ b/src/lib_storage/context_dump.ml @@ -29,60 +29,88 @@ let current_version = "tezos-snapshot-1.0.0" (*****************************************************************************) module type Dump_interface = sig type index + type context + type tree + type hash + type step = string + type key = step list + type commit_info val commit_info_encoding : commit_info Data_encoding.t val hash_encoding : hash Data_encoding.t - val blob_encoding : [ `Blob of MBytes.t ] Data_encoding.t - val node_encoding : [ `Node of MBytes.t ] Data_encoding.t + + val blob_encoding : [`Blob of MBytes.t] Data_encoding.t + + val node_encoding : [`Node of MBytes.t] Data_encoding.t module Block_header : sig type t = Block_header.t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val equal : t -> t -> bool + val encoding : t Data_encoding.t end module Pruned_block : sig type t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val header : t -> Block_header.t + val encoding : t Data_encoding.t end module Block_data : sig type t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val header : t -> Block_header.t + val encoding : t Data_encoding.t end module Protocol_data : sig type t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val encoding : t Data_encoding.t end module Commit_hash : sig type t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t tzresult + val encoding : t Data_encoding.t end (* hash manipulation *) - val hash_export : hash -> [ `Node | `Blob ] * MBytes.t - val hash_import : [ `Node | `Blob ] -> MBytes.t -> hash tzresult + val hash_export : hash -> [`Node | `Blob] * MBytes.t + + val hash_import : [`Node | `Blob] -> MBytes.t -> hash tzresult + val hash_equal : hash -> hash -> bool (* commit manipulation (for parents) *) @@ -90,192 +118,230 @@ module type Dump_interface = sig (* Commit info *) val context_info : context -> commit_info - val context_info_export : commit_info -> ( Int64.t * string * string ) - val context_info_import : ( Int64.t * string * string ) -> commit_info + + val context_info_export : commit_info -> Int64.t * string * string + + val context_info_import : Int64.t * string * string -> commit_info (* block header manipulation *) val get_context : index -> Block_header.t -> context option Lwt.t + val set_context : - info:commit_info -> parents:Commit_hash.t list -> context -> + info:commit_info -> + parents:Commit_hash.t list -> + context -> Block_header.t -> Block_header.t option Lwt.t (* for dumping *) val context_tree : context -> tree + val tree_hash : context -> tree -> hash Lwt.t + val sub_tree : tree -> key -> tree option Lwt.t - val tree_list : tree -> ( step * [`Contents|`Node] ) list Lwt.t + + val tree_list : tree -> (step * [`Contents | `Node]) list Lwt.t + val tree_content : tree -> MBytes.t option Lwt.t (* for restoring *) val make_context : index -> context + val update_context : context -> tree -> context + val add_hash : index -> tree -> key -> hash -> tree option Lwt.t + val add_mbytes : index -> MBytes.t -> tree Lwt.t - val add_dir : index -> ( step * hash ) list -> tree option Lwt.t + val add_dir : index -> (step * hash) list -> tree option Lwt.t end module type S = sig type index + type context + type block_header + type block_data + type pruned_block + type protocol_data val dump_contexts_fd : index -> - (block_header * block_data * History_mode.t * - (block_header -> (pruned_block option * protocol_data option) tzresult Lwt.t)) -> - fd:Lwt_unix.file_descr -> unit tzresult Lwt.t - - val restore_contexts_fd : index -> fd:Lwt_unix.file_descr -> + block_header + * block_data + * History_mode.t + * (block_header -> + (pruned_block option * protocol_data option) tzresult Lwt.t) -> + fd:Lwt_unix.file_descr -> + unit tzresult Lwt.t + + val restore_contexts_fd : + index -> + fd:Lwt_unix.file_descr -> ((Block_hash.t * pruned_block) list -> unit tzresult Lwt.t) -> (block_header option -> - Block_hash.t -> pruned_block -> unit tzresult Lwt.t) -> - (block_header * block_data * History_mode.t * - Block_header.t option * Block_hash.t list * protocol_data list) tzresult Lwt.t - + Block_hash.t -> + pruned_block -> + unit tzresult Lwt.t) -> + ( block_header + * block_data + * History_mode.t + * Block_header.t option + * Block_hash.t list + * protocol_data list ) + tzresult + Lwt.t end type error += System_write_error of string + type error += Bad_hash of string * MBytes.t * MBytes.t + type error += Context_not_found of MBytes.t + type error += System_read_error of string + type error += Inconsistent_snapshot_file + type error += Inconsistent_snapshot_data + type error += Missing_snapshot_data + type error += Invalid_snapshot_version of string * string + type error += Restore_context_failure -let () = begin +let () = let open Data_encoding in - - register_error_kind `Permanent + register_error_kind + `Permanent ~id:"Writing_error" ~title:"Writing error" ~description:"Cannot write in file for context dump" ~pp:(fun ppf s -> - Format.fprintf ppf - "Unable to write file for context dumping: %s" s - ) - (obj1 (req "context_dump_no_space" string) ) - (function System_write_error s -> Some s - | _ -> None) - (fun s -> System_write_error s); - - register_error_kind `Permanent + Format.fprintf ppf "Unable to write file for context dumping: %s" s) + (obj1 (req "context_dump_no_space" string)) + (function System_write_error s -> Some s | _ -> None) + (fun s -> System_write_error s) ; + register_error_kind + `Permanent ~id:"Bad_hash" ~title:"Bad hash" ~description:"Wrong hash given" - ~pp:(fun ppf ( ty, his, hshould ) -> - Format.fprintf ppf - "Wrong hash [%s] given: %s, should be %s" - ty (MBytes.to_string his) (MBytes.to_string hshould)) + ~pp:(fun ppf (ty, his, hshould) -> + Format.fprintf + ppf + "Wrong hash [%s] given: %s, should be %s" + ty + (MBytes.to_string his) + (MBytes.to_string hshould)) (obj3 - ( req "hash_ty" string ) - ( req "hash_is" bytes ) - ( req "hash_should" bytes ) ) - (function Bad_hash ( ty, his, hshould ) -> Some (ty, his, hshould ) - | _ -> None) - (fun (ty, his, hshould) -> Bad_hash (ty, his,hshould)); - - register_error_kind `Permanent + (req "hash_ty" string) + (req "hash_is" bytes) + (req "hash_should" bytes)) + (function + | Bad_hash (ty, his, hshould) -> Some (ty, his, hshould) | _ -> None) + (fun (ty, his, hshould) -> Bad_hash (ty, his, hshould)) ; + register_error_kind + `Permanent ~id:"Context_not_found" ~title:"Context not found" ~description:"Cannot find context corresponding to hash" ~pp:(fun ppf mb -> - Format.fprintf ppf - "No context with hash: %s" - (MBytes.to_string mb)) - (obj1 (req "context_not_found" bytes) ) - (function Context_not_found mb -> Some mb - | _ -> None) - (fun mb -> Context_not_found mb); - - register_error_kind `Permanent + Format.fprintf ppf "No context with hash: %s" (MBytes.to_string mb)) + (obj1 (req "context_not_found" bytes)) + (function Context_not_found mb -> Some mb | _ -> None) + (fun mb -> Context_not_found mb) ; + register_error_kind + `Permanent ~id:"System_read_error" ~title:"System read error" ~description:"Failed to read file" ~pp:(fun ppf uerr -> - Format.fprintf ppf - "Error while reading file for context dumping: %s" uerr) - (obj1 (req "system_read_error" string) ) - (function System_read_error e -> Some e - | _ -> None) - (fun e -> System_read_error e); - - register_error_kind `Permanent + Format.fprintf + ppf + "Error while reading file for context dumping: %s" + uerr) + (obj1 (req "system_read_error" string)) + (function System_read_error e -> Some e | _ -> None) + (fun e -> System_read_error e) ; + register_error_kind + `Permanent ~id:"Inconsistent_snapshot_file" ~title:"Inconsistent snapshot file" ~description:"Error while opening snapshot file" ~pp:(fun ppf () -> - Format.fprintf ppf - "Failed to read snapshot file. The provided file is inconsistent.") + Format.fprintf + ppf + "Failed to read snapshot file. The provided file is inconsistent.") empty (function Inconsistent_snapshot_file -> Some () | _ -> None) - (fun () -> Inconsistent_snapshot_file); - - register_error_kind `Permanent + (fun () -> Inconsistent_snapshot_file) ; + register_error_kind + `Permanent ~id:"Inconsistent_snapshot_data" ~title:"Inconsistent snapshot data" ~description:"The data provided by the snapshot is inconsistent" ~pp:(fun ppf () -> - Format.fprintf ppf - "The data provided by the snapshot file is inconsistent (context_hash does not correspond for block).") + Format.fprintf + ppf + "The data provided by the snapshot file is inconsistent (context_hash \ + does not correspond for block).") empty (function Inconsistent_snapshot_data -> Some () | _ -> None) - (fun () -> Inconsistent_snapshot_data); - - register_error_kind `Permanent + (fun () -> Inconsistent_snapshot_data) ; + register_error_kind + `Permanent ~id:"Missing_snapshot_data" ~title:"Missing data in imported snapshot" ~description:"Mandatory data missing while reaching end of snapshot file." ~pp:(fun ppf () -> - Format.fprintf ppf - "Mandatory data is missing is the provided snapshot file.") + Format.fprintf + ppf + "Mandatory data is missing is the provided snapshot file.") empty (function Missing_snapshot_data -> Some () | _ -> None) - (fun () -> Missing_snapshot_data); - - register_error_kind `Permanent + (fun () -> Missing_snapshot_data) ; + register_error_kind + `Permanent ~id:"Invalid_snapshot_version" ~title:"Invalid snapshot version" ~description:"The version of the snapshot to import is not valid" - ~pp:begin fun ppf (found, expected) -> - Format.fprintf ppf + ~pp:(fun ppf (found, expected) -> + Format.fprintf + ppf "The snapshot to import has version \"%s\" but \"%s\" was expected." - found expected end - (obj2 - (req "found" string) - (req "expected" string)) - (function Invalid_snapshot_version (found, expected) -> - Some (found, expected) | _ -> None) - (fun (found, expected) -> Invalid_snapshot_version (found, expected)); - - register_error_kind `Permanent + found + expected) + (obj2 (req "found" string) (req "expected" string)) + (function + | Invalid_snapshot_version (found, expected) -> + Some (found, expected) + | _ -> + None) + (fun (found, expected) -> Invalid_snapshot_version (found, expected)) ; + register_error_kind + `Permanent ~id:"Restore_context_failure" ~title:"Failed to restore context" ~description:"Internal error while restoring the context" ~pp:(fun ppf () -> - Format.fprintf ppf - "Internal error while restoring the context.") + Format.fprintf ppf "Internal error while restoring the context.") empty (function Restore_context_failure -> Some () | _ -> None) - (fun () -> Restore_context_failure); - -end - -module Make (I:Dump_interface) = struct + (fun () -> Restore_context_failure) +module Make (I : Dump_interface) = struct type command = - | Root of { - block_header: I.Block_header.t ; - info: I.commit_info ; - parents: I.Commit_hash.t list ; - block_data : I.Block_data.t ; - } + | Root of + { block_header : I.Block_header.t; + info : I.commit_info; + parents : I.Commit_hash.t list; + block_data : I.Block_data.t } | Node of (string * I.hash) list | Blob of MBytes.t | Proot of I.Pruned_block.t @@ -286,137 +352,146 @@ module Make (I:Dump_interface) = struct let blob_encoding = let open Data_encoding in - case ~title:"blob" (Tag (Char.code 'b')) + case + ~title:"blob" + (Tag (Char.code 'b')) bytes (function Blob bytes -> Some bytes | _ -> None) (function bytes -> Blob bytes) let node_encoding = let open Data_encoding in - case ~title:"node" (Tag (Char.code 'd')) - (list (obj2 - (req "name" string) - (req "hash" I.hash_encoding) - )) + case + ~title:"node" + (Tag (Char.code 'd')) + (list (obj2 (req "name" string) (req "hash" I.hash_encoding))) (function Node x -> Some x | _ -> None) (function x -> Node x) let end_encoding = let open Data_encoding in - case ~title:"end" (Tag (Char.code 'e')) + case + ~title:"end" + (Tag (Char.code 'e')) empty (function End -> Some () | _ -> None) (fun () -> End) let loot_encoding = let open Data_encoding in - case ~title:"loot" (Tag (Char.code 'l')) + case + ~title:"loot" + (Tag (Char.code 'l')) I.Protocol_data.encoding - (function - | Loot protocol_data -> Some protocol_data - | _ -> None) - (fun protocol_data -> - Loot protocol_data) + (function Loot protocol_data -> Some protocol_data | _ -> None) + (fun protocol_data -> Loot protocol_data) let proot_encoding = let open Data_encoding in - case ~title:"proot" (Tag (Char.code 'p')) + case + ~title:"proot" + (Tag (Char.code 'p')) (obj1 (req "pruned_block" I.Pruned_block.encoding)) - (function - | Proot pruned_block -> - Some pruned_block - | _ -> None) - (fun pruned_block -> - Proot pruned_block) + (function Proot pruned_block -> Some pruned_block | _ -> None) + (fun pruned_block -> Proot pruned_block) let root_encoding = let open Data_encoding in - case ~title:"root" (Tag (Char.code 'r')) + case + ~title:"root" + (Tag (Char.code 'r')) (obj4 (req "block_header" (dynamic_size I.Block_header.encoding)) (req "info" I.commit_info_encoding) (req "parents" (list I.Commit_hash.encoding)) - (req "block_data" I.Block_data.encoding) - ) + (req "block_data" I.Block_data.encoding)) (function - | Root { block_header ; info ; parents ; block_data } -> + | Root {block_header; info; parents; block_data} -> Some (block_header, info, parents, block_data) - | _ -> None) + | _ -> + None) (fun (block_header, info, parents, block_data) -> - Root { block_header ; info ; parents ; block_data }) - - let command_encoding = Data_encoding.union ~tag_size:`Uint8 [ - blob_encoding ; - node_encoding ; - end_encoding ; - loot_encoding ; - proot_encoding ; - root_encoding ; - ] + Root {block_header; info; parents; block_data}) + + let command_encoding = + Data_encoding.union + ~tag_size:`Uint8 + [ blob_encoding; + node_encoding; + end_encoding; + loot_encoding; + proot_encoding; + root_encoding ] (* IO toolkit. *) let rec read_string rbuf ~len = - let fd, buf, ofs, total = !rbuf in - if Bytes.length buf - ofs < len then + let (fd, buf, ofs, total) = !rbuf in + if Bytes.length buf - ofs < len then ( let blen = Bytes.length buf - ofs in let neu = Bytes.create (blen + 1_000_000) in Bytes.blit buf ofs neu 0 blen ; - Lwt_unix.read fd neu blen 1_000_000 >>= fun bread -> + Lwt_unix.read fd neu blen 1_000_000 + >>= fun bread -> total := !total + bread ; - if bread = 0 then - fail Inconsistent_snapshot_file + if bread = 0 then fail Inconsistent_snapshot_file else - let neu = if bread <> 1_000_000 then Bytes.sub neu 0 (blen + bread) else neu in + let neu = + if bread <> 1_000_000 then Bytes.sub neu 0 (blen + bread) else neu + in rbuf := (fd, neu, 0, total) ; - read_string rbuf ~len + read_string rbuf ~len ) else let res = Bytes.sub_string buf ofs len in rbuf := (fd, buf, ofs + len, total) ; return res let read_mbytes rbuf b = - read_string rbuf ~len:(MBytes.length b) >>=? fun string -> + read_string rbuf ~len:(MBytes.length b) + >>=? fun string -> MBytes.blit_of_string string 0 b 0 (MBytes.length b) ; return () let set_int64 buf i = let b = Bytes.create 8 in - EndianBytes.BigEndian.set_int64 b 0 i; + EndianBytes.BigEndian.set_int64 b 0 i ; Buffer.add_bytes buf b let get_int64 rbuf = - read_string ~len:8 rbuf >>=? fun s -> - return @@ EndianString.BigEndian.get_int64 s 0 + read_string ~len:8 rbuf + >>=? fun s -> return @@ EndianString.BigEndian.get_int64 s 0 let set_mbytes buf b = set_int64 buf (Int64.of_int (MBytes.length b)) ; Buffer.add_bytes buf (MBytes.to_bytes b) let get_mbytes rbuf = - get_int64 rbuf >>|? Int64.to_int >>=? fun l -> + get_int64 rbuf >>|? Int64.to_int + >>=? fun l -> let b = MBytes.create l in - read_mbytes rbuf b >>=? fun () -> - return b + read_mbytes rbuf b >>=? fun () -> return b (* Getter and setters *) let get_command rbuf = - get_mbytes rbuf >>|? fun bytes -> - Data_encoding.Binary.of_bytes_exn command_encoding bytes + get_mbytes rbuf + >>|? fun bytes -> Data_encoding.Binary.of_bytes_exn command_encoding bytes let set_root buf block_header info parents block_data = - let root = Root { block_header ; info ; parents ; block_data ; } in + let root = Root {block_header; info; parents; block_data} in let bytes = Data_encoding.Binary.to_bytes_exn command_encoding root in set_mbytes buf bytes let set_node buf contents = let bytes = - Data_encoding.Binary.to_bytes_exn command_encoding (Node contents) in + Data_encoding.Binary.to_bytes_exn command_encoding (Node contents) + in set_mbytes buf bytes let set_blob buf data = - let bytes = Data_encoding.Binary.to_bytes_exn command_encoding (Blob data) in + let bytes = + Data_encoding.Binary.to_bytes_exn command_encoding (Blob data) + in set_mbytes buf bytes let set_proot buf pruned_block = @@ -437,30 +512,29 @@ module Make (I:Dump_interface) = struct (* TODO add more info (e.g. nb context item, nb blocks, etc.) *) type snapshot_metadata = { - version : string ; - mode : Tezos_shell_services.History_mode.t ; + version : string; + mode : Tezos_shell_services.History_mode.t } let snapshot_metadata_encoding = let open Data_encoding in conv - (fun { version ; mode } -> (version, mode)) - (fun (version, mode) -> { version ; mode }) + (fun {version; mode} -> (version, mode)) + (fun (version, mode) -> {version; mode}) (obj2 (req "version" string) (req "mode" Tezos_shell_services.History_mode.encoding)) let write_snapshot_metadata ~mode buf = - let version = { - version = current_version ; - mode = mode ; - } in + let version = {version = current_version; mode} in let bytes = - Data_encoding.(Binary.to_bytes_exn snapshot_metadata_encoding version) in + Data_encoding.(Binary.to_bytes_exn snapshot_metadata_encoding version) + in set_mbytes buf bytes let read_snapshot_metadata rbuf = - get_mbytes rbuf >>|? fun bytes -> + get_mbytes rbuf + >>|? fun bytes -> Data_encoding.(Binary.of_bytes_exn snapshot_metadata_encoding) bytes let check_version v = @@ -472,201 +546,227 @@ module Make (I:Dump_interface) = struct (* Dumping *) let buf = Buffer.create 1_000_000 in let written = ref 0 in - let flush () = let contents = Buffer.contents buf in Buffer.clear buf ; written := !written + String.length contents ; - Lwt_utils_unix.write_string fd contents in - + Lwt_utils_unix.write_string fd contents + in let maybe_flush () = - if Buffer.length buf > 1_000_000 then flush () else Lwt.return_unit in - + if Buffer.length buf > 1_000_000 then flush () else Lwt.return_unit + in (* Noting the visited hashes *) let visited_hash = Hashtbl.create 1000 in let visited h = Hashtbl.mem visited_hash h in let set_visit h = Hashtbl.add visited_hash h () in - (* Folding through a node *) let fold_tree_path ctxt tree = let cpt = ref 0 in let rec fold_tree_path ctxt tree = - I.tree_list tree >>= fun keys -> - let keys = List.sort (fun (a,_) (b,_) -> String.compare a b) keys in + I.tree_list tree + >>= fun keys -> + let keys = List.sort (fun (a, _) (b, _) -> String.compare a b) keys in Lwt_list.map_s - begin fun (name, kind) -> - I.sub_tree tree [name] >>= function - | None -> assert false + (fun (name, kind) -> + I.sub_tree tree [name] + >>= function + | None -> + assert false | Some sub_tree -> - I.tree_hash ctxt sub_tree >>= fun hash -> - begin - if visited hash then Lwt.return_unit - else - begin - Tezos_stdlib.Utils.display_progress - ~refresh_rate:(!cpt, 1_000) - "Context: %dK elements, %dMiB written%!" - (!cpt / 1_000) (!written / 1_048_576) ; - incr cpt ; - set_visit hash; (* There cannot be a cycle *) - match kind with - | `Node -> - fold_tree_path ctxt sub_tree - | `Contents -> - begin I.tree_content sub_tree >>= function - | None -> - assert false - | Some data -> - set_blob buf data ; - maybe_flush () - end - end - end >>= fun () -> - Lwt.return (name, hash) - end - keys >>= fun sub_keys -> - set_node buf sub_keys; - maybe_flush () + I.tree_hash ctxt sub_tree + >>= fun hash -> + ( if visited hash then Lwt.return_unit + else ( + Tezos_stdlib.Utils.display_progress + ~refresh_rate:(!cpt, 1_000) + "Context: %dK elements, %dMiB written%!" + (!cpt / 1_000) + (!written / 1_048_576) ; + incr cpt ; + set_visit hash ; + (* There cannot be a cycle *) + match kind with + | `Node -> + fold_tree_path ctxt sub_tree + | `Contents -> ( + I.tree_content sub_tree + >>= function + | None -> + assert false + | Some data -> + set_blob buf data ; maybe_flush () ) ) ) + >>= fun () -> Lwt.return (name, hash)) + keys + >>= fun sub_keys -> set_node buf sub_keys ; maybe_flush () in fold_tree_path ctxt tree in - Lwt.catch begin fun () -> - let bh, block_data, mode, pruned_iterator = data in - write_snapshot_metadata ~mode buf ; - I.get_context idx bh >>= function - | None -> - fail @@ Context_not_found (I.Block_header.to_bytes bh) - | Some ctxt -> - let tree = I.context_tree ctxt in - fold_tree_path ctxt tree >>= fun () -> - Tezos_stdlib.Utils.display_progress_end (); - I.context_parents ctxt >>= fun parents -> - set_root buf bh (I.context_info ctxt) parents block_data; - (* Dump pruned blocks *) - let dump_pruned cpt pruned = - Tezos_stdlib.Utils.display_progress - ~refresh_rate:(cpt, 1_000) - "History: %dK block, %dMiB written" - (cpt / 1_000) (!written / 1_048_576) ; - set_proot buf pruned; - maybe_flush () in - let rec aux cpt acc header = - pruned_iterator header >>=? function - | (None, None) -> return acc (* assert false *) - | (None, Some protocol_data) -> - return (protocol_data :: acc) - | (Some pred_pruned, Some protocol_data) -> - dump_pruned cpt pred_pruned >>= fun () -> - aux (succ cpt) (protocol_data :: acc) - (I.Pruned_block.header pred_pruned) - | (Some pred_pruned, None) -> - dump_pruned cpt pred_pruned >>= fun () -> - aux (succ cpt) acc - (I.Pruned_block.header pred_pruned) - in - let starting_block_header = I.Block_data.header block_data in - aux 0 [] starting_block_header >>=? fun protocol_datas -> - (* Dump protocol data *) - Lwt_list.iter_s (fun proto -> - set_loot buf proto; - maybe_flush () ; - ) protocol_datas >>= fun () -> - Tezos_stdlib.Utils.display_progress_end (); - return_unit >>=? fun () -> - set_end buf; - flush () >>= fun () -> - return_unit - end - begin function - | Unix.Unix_error (e,_,_) -> + Lwt.catch + (fun () -> + let (bh, block_data, mode, pruned_iterator) = data in + write_snapshot_metadata ~mode buf ; + I.get_context idx bh + >>= function + | None -> + fail @@ Context_not_found (I.Block_header.to_bytes bh) + | Some ctxt -> + let tree = I.context_tree ctxt in + fold_tree_path ctxt tree + >>= fun () -> + Tezos_stdlib.Utils.display_progress_end () ; + I.context_parents ctxt + >>= fun parents -> + set_root buf bh (I.context_info ctxt) parents block_data ; + (* Dump pruned blocks *) + let dump_pruned cpt pruned = + Tezos_stdlib.Utils.display_progress + ~refresh_rate:(cpt, 1_000) + "History: %dK block, %dMiB written" + (cpt / 1_000) + (!written / 1_048_576) ; + set_proot buf pruned ; + maybe_flush () + in + let rec aux cpt acc header = + pruned_iterator header + >>=? function + | (None, None) -> + return acc (* assert false *) + | (None, Some protocol_data) -> + return (protocol_data :: acc) + | (Some pred_pruned, Some protocol_data) -> + dump_pruned cpt pred_pruned + >>= fun () -> + aux + (succ cpt) + (protocol_data :: acc) + (I.Pruned_block.header pred_pruned) + | (Some pred_pruned, None) -> + dump_pruned cpt pred_pruned + >>= fun () -> + aux (succ cpt) acc (I.Pruned_block.header pred_pruned) + in + let starting_block_header = I.Block_data.header block_data in + aux 0 [] starting_block_header + >>=? fun protocol_datas -> + (* Dump protocol data *) + Lwt_list.iter_s + (fun proto -> set_loot buf proto ; maybe_flush ()) + protocol_datas + >>= fun () -> + Tezos_stdlib.Utils.display_progress_end () ; + return_unit + >>=? fun () -> + set_end buf ; + flush () >>= fun () -> return_unit) + (function + | Unix.Unix_error (e, _, _) -> fail @@ System_write_error (Unix.error_message e) - | err -> Lwt.fail err - end + | err -> + Lwt.fail err) (* Restoring *) let restore_contexts_fd index ~fd k_store_pruned_blocks block_validation = - let read = ref 0 in let rbuf = ref (fd, Bytes.empty, 0, read) in - (* Editing the repository *) - let add_blob blob = - I.add_mbytes index blob >>= fun tree -> - return tree - in - + let add_blob blob = I.add_mbytes index blob >>= fun tree -> return tree in let add_dir keys = - I.add_dir index keys >>= function - | None -> fail Restore_context_failure - | Some tree -> return tree + I.add_dir index keys + >>= function + | None -> fail Restore_context_failure | Some tree -> return tree in - let restore history_mode = let rec first_pass ctxt cpt = Tezos_stdlib.Utils.display_progress ~refresh_rate:(cpt, 1_000) "Context: %dK elements, %dMiB read" - (cpt / 1_000) (!read / 1_048_576) ; - get_command rbuf >>=? function - | Root { block_header ; info ; parents ; block_data } -> - begin I.set_context ~info ~parents ctxt block_header >>= function - | None -> fail Inconsistent_snapshot_data - | Some block_header -> - return (block_header, block_data) - end + (cpt / 1_000) + (!read / 1_048_576) ; + get_command rbuf + >>=? function + | Root {block_header; info; parents; block_data} -> ( + I.set_context ~info ~parents ctxt block_header + >>= function + | None -> + fail Inconsistent_snapshot_data + | Some block_header -> + return (block_header, block_data) ) | Node contents -> - add_dir contents >>=? fun tree -> - first_pass (I.update_context ctxt tree) (cpt + 1) + add_dir contents + >>=? fun tree -> first_pass (I.update_context ctxt tree) (cpt + 1) | Blob data -> - add_blob data >>=? fun tree -> - first_pass (I.update_context ctxt tree) (cpt + 1) - | _ -> fail Inconsistent_snapshot_data in - - let rec second_pass pred_header (rev_block_hashes, protocol_datas) todo cpt = + add_blob data + >>=? fun tree -> first_pass (I.update_context ctxt tree) (cpt + 1) + | _ -> + fail Inconsistent_snapshot_data + in + let rec second_pass pred_header (rev_block_hashes, protocol_datas) todo + cpt = Tezos_stdlib.Utils.display_progress ~refresh_rate:(cpt, 1_000) "Store: %dK elements, %dMiB read" - (cpt / 1_000) (!read / 1_048_576) ; - get_command rbuf >>=? function + (cpt / 1_000) + (!read / 1_048_576) ; + get_command rbuf + >>=? function | Proot pruned_block -> let header = I.Pruned_block.header pruned_block in let hash = Block_header.hash header in - block_validation pred_header hash pruned_block >>=? fun () -> - begin if (cpt + 1) mod 5_000 = 0 then - k_store_pruned_blocks ((hash, pruned_block) :: todo) >>=? fun () -> - second_pass (Some header) - (hash :: rev_block_hashes, protocol_datas) [] (cpt + 1) - else - second_pass (Some header) - (hash :: rev_block_hashes, protocol_datas) ((hash, pruned_block) :: todo) (cpt + 1) - end + block_validation pred_header hash pruned_block + >>=? fun () -> + if (cpt + 1) mod 5_000 = 0 then + k_store_pruned_blocks ((hash, pruned_block) :: todo) + >>=? fun () -> + second_pass + (Some header) + (hash :: rev_block_hashes, protocol_datas) + [] + (cpt + 1) + else + second_pass + (Some header) + (hash :: rev_block_hashes, protocol_datas) + ((hash, pruned_block) :: todo) + (cpt + 1) | Loot protocol_data -> - k_store_pruned_blocks todo >>=? fun () -> - second_pass pred_header (rev_block_hashes, protocol_data :: protocol_datas) todo (cpt + 1) + k_store_pruned_blocks todo + >>=? fun () -> + second_pass + pred_header + (rev_block_hashes, protocol_data :: protocol_datas) + todo + (cpt + 1) | End -> return (pred_header, rev_block_hashes, List.rev protocol_datas) - | _ -> fail Inconsistent_snapshot_data in - first_pass (I.make_context index) 0 >>=? fun (block_header, block_data) -> + | _ -> + fail Inconsistent_snapshot_data + in + first_pass (I.make_context index) 0 + >>=? fun (block_header, block_data) -> Tezos_stdlib.Utils.display_progress_end () ; - second_pass None ([], []) [] 0 >>=? fun (oldest_header_opt, rev_block_hashes, protocol_datas) -> + second_pass None ([], []) [] 0 + >>=? fun (oldest_header_opt, rev_block_hashes, protocol_datas) -> Tezos_stdlib.Utils.display_progress_end () ; - return (block_header, - block_data, - history_mode, - oldest_header_opt, - rev_block_hashes, - protocol_datas) + return + ( block_header, + block_data, + history_mode, + oldest_header_opt, + rev_block_hashes, + protocol_datas ) in (* Check snapshot version *) - read_snapshot_metadata rbuf >>=? fun version -> - check_version version >>=? fun () -> - Lwt.catch begin fun () -> - restore version.mode - end - begin function - | Unix.Unix_error (e,_,_) -> + read_snapshot_metadata rbuf + >>=? fun version -> + check_version version + >>=? fun () -> + Lwt.catch + (fun () -> restore version.mode) + (function + | Unix.Unix_error (e, _, _) -> fail @@ System_read_error (Unix.error_message e) - | err -> Lwt.fail err - end + | err -> + Lwt.fail err) end diff --git a/src/lib_storage/context_dump.mli b/src/lib_storage/context_dump.mli index 511b2f6bcbb4c13d68e45b8b15f06babe1aa9a3b..0a95f38d25227b1c00f99878f3bd623e1078c28b 100644 --- a/src/lib_storage/context_dump.mli +++ b/src/lib_storage/context_dump.mli @@ -25,71 +25,107 @@ (*****************************************************************************) type error += System_write_error of string + type error += Bad_hash of string * MBytes.t * MBytes.t + type error += Context_not_found of MBytes.t + type error += System_read_error of string + type error += Inconsistent_snapshot_file + type error += Inconsistent_snapshot_data + type error += Missing_snapshot_data + type error += Invalid_snapshot_version of string * string + type error += Restore_context_failure module type Dump_interface = sig type index + type context + type tree + type hash + type step = string + type key = step list + type commit_info val commit_info_encoding : commit_info Data_encoding.t val hash_encoding : hash Data_encoding.t - val blob_encoding : [ `Blob of MBytes.t ] Data_encoding.t - val node_encoding : [ `Node of MBytes.t ] Data_encoding.t + + val blob_encoding : [`Blob of MBytes.t] Data_encoding.t + + val node_encoding : [`Node of MBytes.t] Data_encoding.t module Block_header : sig type t = Block_header.t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val equal : t -> t -> bool + val encoding : t Data_encoding.t end module Pruned_block : sig type t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val header : t -> Block_header.t + val encoding : t Data_encoding.t end module Block_data : sig type t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val header : t -> Block_header.t + val encoding : t Data_encoding.t end module Protocol_data : sig type t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t option + val encoding : t Data_encoding.t end module Commit_hash : sig type t + val to_bytes : t -> MBytes.t + val of_bytes : MBytes.t -> t tzresult + val encoding : t Data_encoding.t end (* hash manipulation *) - val hash_export : hash -> [ `Node | `Blob ] * MBytes.t - val hash_import : [ `Node | `Blob ] -> MBytes.t -> hash tzresult + val hash_export : hash -> [`Node | `Blob] * MBytes.t + + val hash_import : [`Node | `Blob] -> MBytes.t -> hash tzresult + val hash_equal : hash -> hash -> bool (* commit manipulation (for parents) *) @@ -97,58 +133,90 @@ module type Dump_interface = sig (* Commit info *) val context_info : context -> commit_info - val context_info_export : commit_info -> ( Int64.t * string * string ) - val context_info_import : ( Int64.t * string * string ) -> commit_info + + val context_info_export : commit_info -> Int64.t * string * string + + val context_info_import : Int64.t * string * string -> commit_info (* block header manipulation *) val get_context : index -> Block_header.t -> context option Lwt.t + val set_context : - info:commit_info -> parents:Commit_hash.t list -> context -> + info:commit_info -> + parents:Commit_hash.t list -> + context -> Block_header.t -> Block_header.t option Lwt.t (* for dumping *) val context_tree : context -> tree + val tree_hash : context -> tree -> hash Lwt.t + val sub_tree : tree -> key -> tree option Lwt.t - val tree_list : tree -> ( step * [`Contents|`Node] ) list Lwt.t + + val tree_list : tree -> (step * [`Contents | `Node]) list Lwt.t + val tree_content : tree -> MBytes.t option Lwt.t (* for restoring *) val make_context : index -> context + val update_context : context -> tree -> context + val add_hash : index -> tree -> key -> hash -> tree option Lwt.t + val add_mbytes : index -> MBytes.t -> tree Lwt.t - val add_dir : index -> ( step * hash ) list -> tree option Lwt.t + val add_dir : index -> (step * hash) list -> tree option Lwt.t end module type S = sig type index + type context + type block_header + type block_data + type pruned_block + type protocol_data val dump_contexts_fd : index -> - (block_header * block_data * History_mode.t * - (block_header -> (pruned_block option * protocol_data option) tzresult Lwt.t)) -> - fd:Lwt_unix.file_descr -> unit tzresult Lwt.t - - val restore_contexts_fd : index -> fd:Lwt_unix.file_descr -> + block_header + * block_data + * History_mode.t + * (block_header -> + (pruned_block option * protocol_data option) tzresult Lwt.t) -> + fd:Lwt_unix.file_descr -> + unit tzresult Lwt.t + + val restore_contexts_fd : + index -> + fd:Lwt_unix.file_descr -> ((Block_hash.t * pruned_block) list -> unit tzresult Lwt.t) -> (block_header option -> - Block_hash.t -> pruned_block -> unit tzresult Lwt.t) -> - (block_header * block_data * History_mode.t * - Block_header.t option * Block_hash.t list * protocol_data list) tzresult Lwt.t + Block_hash.t -> + pruned_block -> + unit tzresult Lwt.t) -> + ( block_header + * block_data + * History_mode.t + * Block_header.t option + * Block_hash.t list + * protocol_data list ) + tzresult + Lwt.t end -module Make (I:Dump_interface) : S - with type index := I.index - and type context := I.context - and type block_header := I.Block_header.t - and type block_data := I.Block_data.t - and type pruned_block := I.Pruned_block.t - and type protocol_data := I.Protocol_data.t +module Make (I : Dump_interface) : + S + with type index := I.index + and type context := I.context + and type block_header := I.Block_header.t + and type block_data := I.Block_data.t + and type pruned_block := I.Pruned_block.t + and type protocol_data := I.Protocol_data.t diff --git a/src/lib_storage/raw_store.ml b/src/lib_storage/raw_store.ml index 2b18af3b7c8d280c6975b328ff38d4f4c6186bbc..b86f368b8ac5459230aa329b268e76b306b353bb 100644 --- a/src/lib_storage/raw_store.ml +++ b/src/lib_storage/raw_store.ml @@ -26,11 +26,12 @@ open Rresult type t = { - dir : Lmdb.t ; - parent : (Lmdb.rw Lmdb.txn * Lmdb.db * Lmdb.rw Lmdb.cursor) Lwt.key ; + dir : Lmdb.t; + parent : (Lmdb.rw Lmdb.txn * Lmdb.db * Lmdb.rw Lmdb.cursor) Lwt.key } type key = string list + type value = MBytes.t type error += Unknown of string list @@ -42,157 +43,169 @@ let () = ~title:"Missing key in store" ~description:"Missing key in store" ~pp:(fun ppf keys -> - Format.fprintf ppf - "Missing key in store: %s" - (String.concat "/" keys)) + Format.fprintf ppf "Missing key in store: %s" (String.concat "/" keys)) Data_encoding.(obj1 (req "key" (list string))) (function Unknown keys -> Some keys | _ -> None) (fun keys -> Unknown keys) let concat = String.concat "/" + let split = String.split_on_char '/' -let lwt_fail_error err = - Lwt.fail_with (Lmdb.string_of_error err) +let lwt_fail_error err = Lwt.fail_with (Lmdb.string_of_error err) let of_result = function - | Ok res -> Lwt.return res - | Error err -> lwt_fail_error err + | Ok res -> + Lwt.return res + | Error err -> + lwt_fail_error err -let (>>=?) v f = - match v with - | Error err -> lwt_fail_error err - | Ok v -> f v +let ( >>=? ) v f = match v with Error err -> lwt_fail_error err | Ok v -> f v let init ?(readonly = false) ?mapsize path = if not (Sys.file_exists path) then Unix.mkdir path 0o755 ; let sync_flag = match Sys.getenv_opt "TEZOS_STORE_SYNC" with - | None -> [] - | Some s -> - match String.lowercase_ascii s with - | "nosync" -> [ Lmdb.NoSync ] - | "nometasync" -> [ Lmdb.NoMetaSync ] - | _ -> - Printf.eprintf "Unrecognized TEZOS_STORE_SYNC option : %s\n\ - allowed: nosync nometasync" s; - [] + | None -> + [] + | Some s -> ( + match String.lowercase_ascii s with + | "nosync" -> + [Lmdb.NoSync] + | "nometasync" -> + [Lmdb.NoMetaSync] + | _ -> + Printf.eprintf + "Unrecognized TEZOS_STORE_SYNC option : %s\n\ + allowed: nosync nometasync" + s ; + [] ) in - let readonly_flag = if readonly then [ Lmdb.RdOnly ] else [] in + let readonly_flag = if readonly then [Lmdb.RdOnly] else [] in let file_flags = if readonly then 0o444 else 0o644 in - match Lmdb.opendir ?mapsize ~flags:(sync_flag - @ readonly_flag - @ [NoTLS; NoMetaSync]) path file_flags with - | Ok dir -> return { dir ; parent = Lwt.new_key () } - | Error err -> failwith "%a" Lmdb.pp_error err + match + Lmdb.opendir + ?mapsize + ~flags:(sync_flag @ readonly_flag @ [NoTLS; NoMetaSync]) + path + file_flags + with + | Ok dir -> + return {dir; parent = Lwt.new_key ()} + | Error err -> + failwith "%a" Lmdb.pp_error err -let close { dir ; _ } = Lmdb.closedir dir +let close {dir; _} = Lmdb.closedir dir -let known { dir ; parent } key = - begin match Lwt.get parent with - | Some (txn, db, _cursor) -> Lmdb.mem txn db (concat key) - | None -> - Lmdb.with_ro_db dir ~f:begin fun txn db -> - Lmdb.mem txn db (concat key) - end - end |> of_result - -let read_opt { dir ; parent } key = - begin match Lwt.get parent with - | Some (txn, db, _cursor) -> Lmdb.get txn db (concat key) >>| MBytes.copy - | None -> - Lmdb.with_ro_db dir ~f:begin fun txn db -> - Lmdb.get txn db (concat key) >>| MBytes.copy - end - end |> function - | Ok v -> Lwt.return_some v - | Error KeyNotFound -> Lwt.return_none - | Error err -> lwt_fail_error err - -let read { dir ; parent } key = - begin match Lwt.get parent with - | Some (txn, db, _cursor) -> Lmdb.get txn db (concat key) >>| MBytes.copy - | None -> - Lmdb.with_ro_db dir ~f:begin fun txn db -> - Lmdb.get txn db (concat key) >>| MBytes.copy - end - end |> function - | Ok v -> return v - | Error _err -> fail (Unknown key) - -let store { dir ; parent } k v = - begin match Lwt.get parent with - | Some (txn, db, _cursor) -> Lmdb.put txn db (concat k) v - | None -> - Lmdb.with_rw_db dir ~f:begin fun txn db -> - Lmdb.put txn db (concat k) v - end - end |> of_result +let known {dir; parent} key = + ( match Lwt.get parent with + | Some (txn, db, _cursor) -> + Lmdb.mem txn db (concat key) + | None -> + Lmdb.with_ro_db dir ~f:(fun txn db -> Lmdb.mem txn db (concat key)) ) + |> of_result + +let read_opt {dir; parent} key = + ( match Lwt.get parent with + | Some (txn, db, _cursor) -> + Lmdb.get txn db (concat key) >>| MBytes.copy + | None -> + Lmdb.with_ro_db dir ~f:(fun txn db -> + Lmdb.get txn db (concat key) >>| MBytes.copy) ) + |> function + | Ok v -> + Lwt.return_some v + | Error KeyNotFound -> + Lwt.return_none + | Error err -> + lwt_fail_error err + +let read {dir; parent} key = + ( match Lwt.get parent with + | Some (txn, db, _cursor) -> + Lmdb.get txn db (concat key) >>| MBytes.copy + | None -> + Lmdb.with_ro_db dir ~f:(fun txn db -> + Lmdb.get txn db (concat key) >>| MBytes.copy) ) + |> function Ok v -> return v | Error _err -> fail (Unknown key) + +let store {dir; parent} k v = + ( match Lwt.get parent with + | Some (txn, db, _cursor) -> + Lmdb.put txn db (concat k) v + | None -> + Lmdb.with_rw_db dir ~f:(fun txn db -> Lmdb.put txn db (concat k) v) ) + |> of_result -let remove { dir ; parent } k = +let remove {dir; parent} k = let remove txn db = match Lmdb.del txn db (concat k) with - | Ok () -> Ok () - | Error KeyNotFound -> Ok () - | Error err -> Error err in - begin match Lwt.get parent with - | Some (txn, db, _cursor) -> remove txn db - | None -> Lmdb.with_rw_db dir ~f:remove - end |> of_result + | Ok () -> + Ok () + | Error KeyNotFound -> + Ok () + | Error err -> + Error err + in + ( match Lwt.get parent with + | Some (txn, db, _cursor) -> + remove txn db + | None -> + Lmdb.with_rw_db dir ~f:remove ) + |> of_result let is_prefix s s' = String.(length s <= length s' && compare s (sub s' 0 (length s)) = 0) -let known_dir { dir ; parent } k = +let known_dir {dir; parent} k = let k = concat k in let cursor_fun cursor = - Lmdb.cursor_at cursor k >>= fun () -> - Lmdb.cursor_get cursor >>| fun (first_k, _v) -> - (is_prefix k (MBytes.to_string first_k)) + Lmdb.cursor_at cursor k + >>= fun () -> + Lmdb.cursor_get cursor + >>| fun (first_k, _v) -> is_prefix k (MBytes.to_string first_k) in - begin match Lwt.get parent with - | Some (txn, db, _cursor) -> - Lmdb.with_cursor txn db ~f:cursor_fun - | None -> - Lmdb.with_ro_db dir ~f:begin fun txn db -> - Lmdb.with_cursor txn db ~f:cursor_fun - end - end |> of_result + ( match Lwt.get parent with + | Some (txn, db, _cursor) -> + Lmdb.with_cursor txn db ~f:cursor_fun + | None -> + Lmdb.with_ro_db dir ~f:(fun txn db -> + Lmdb.with_cursor txn db ~f:cursor_fun) ) + |> of_result -let remove_dir { dir ; parent } k = +let remove_dir {dir; parent} k = let k = concat k in let cursor_fun cursor = - Lmdb.cursor_at cursor k >>= fun () -> - Lmdb.cursor_iter cursor ~f:begin fun (kk, _v) -> - let kk_string = MBytes.to_string kk in - if is_prefix k kk_string then begin - Lmdb.cursor_del cursor - end - else Error KeyNotFound - end in - begin match Lwt.get parent with - | Some (txn, db, _cursor) -> - Lmdb.with_cursor txn db ~f:cursor_fun - | None -> - Lmdb.with_rw_db dir ~f:begin fun txn db -> - Lmdb.with_cursor txn db ~f:cursor_fun - end - end |> function - | Error KeyNotFound - | Ok () -> Lwt.return_unit - | Error err -> lwt_fail_error err + Lmdb.cursor_at cursor k + >>= fun () -> + Lmdb.cursor_iter cursor ~f:(fun (kk, _v) -> + let kk_string = MBytes.to_string kk in + if is_prefix k kk_string then Lmdb.cursor_del cursor + else Error KeyNotFound) + in + ( match Lwt.get parent with + | Some (txn, db, _cursor) -> + Lmdb.with_cursor txn db ~f:cursor_fun + | None -> + Lmdb.with_rw_db dir ~f:(fun txn db -> + Lmdb.with_cursor txn db ~f:cursor_fun) ) + |> function + | Error KeyNotFound | Ok () -> + Lwt.return_unit + | Error err -> + lwt_fail_error err let list_equal l1 l2 len = - if len < 0 || len > List.length l1 || len > List.length l2 - then invalid_arg "list_compare: invalid len" ; + if len < 0 || len > List.length l1 || len > List.length l2 then + invalid_arg "list_compare: invalid len" ; let rec inner l1 l2 len = - match len, l1, l2 with - | 0, _, _ -> true - | _, [], _ - | _, _, [] -> false - | _, h1 :: t1, h2 :: t2 -> - if h1 <> h2 then false - else inner t1 t2 (pred len) + match (len, l1, l2) with + | (0, _, _) -> + true + | (_, [], _) | (_, _, []) -> + false + | (_, h1 :: t1, h2 :: t2) -> + if h1 <> h2 then false else inner t1 t2 (pred len) in inner l1 l2 len @@ -205,110 +218,125 @@ let list_sub l pos len = if len < 0 || pos < 0 || pos + len > List.length l then invalid_arg "list_sub" ; let rec inner (acc, n) = function - | [] -> List.rev acc + | [] -> + List.rev acc | h :: t -> - if n = 0 then List.rev acc - else inner (h :: acc, pred n) t in + if n = 0 then List.rev acc else inner (h :: acc, pred n) t + in inner ([], len) l -let with_rw_cursor_lwt ?nosync ?nometasync ?flags ?name { dir ; parent } ~f = +let with_rw_cursor_lwt ?nosync ?nometasync ?flags ?name {dir; parent} ~f = let local_parent = match Lwt.get parent with - | None -> None - | Some (txn, _db, _cursor) -> Some txn in - Lmdb.create_rw_txn - ?nosync ?nometasync ?parent:local_parent dir >>=? fun txn -> - Lmdb.opendb ?flags ?name txn >>=? fun db -> - Lmdb.opencursor txn db >>=? fun cursor -> - Lwt.with_value parent (Some (txn, db, cursor)) begin fun () -> - Lwt.try_bind (fun () -> f cursor) - begin fun res -> - Lmdb.cursor_close cursor ; - Lmdb.commit_txn txn >>=? fun () -> - Lwt.return res - end - begin fun exn -> - Lmdb.cursor_close cursor ; - Lmdb.abort_txn txn ; - Lwt.fail exn - end - end + | None -> + None + | Some (txn, _db, _cursor) -> + Some txn + in + Lmdb.create_rw_txn ?nosync ?nometasync ?parent:local_parent dir + >>=? fun txn -> + Lmdb.opendb ?flags ?name txn + >>=? fun db -> + Lmdb.opencursor txn db + >>=? fun cursor -> + Lwt.with_value + parent + (Some (txn, db, cursor)) + (fun () -> + Lwt.try_bind + (fun () -> f cursor) + (fun res -> + Lmdb.cursor_close cursor ; + Lmdb.commit_txn txn >>=? fun () -> Lwt.return res) + (fun exn -> + Lmdb.cursor_close cursor ; Lmdb.abort_txn txn ; Lwt.fail exn)) let cursor_next_lwt cursor acc f = match Lmdb.cursor_next cursor with - | Error KeyNotFound -> acc - | Error err -> lwt_fail_error err - | Ok () -> Lwt.bind acc f + | Error KeyNotFound -> + acc + | Error err -> + lwt_fail_error err + | Ok () -> + Lwt.bind acc f let cursor_at_lwt cursor k acc f = match Lmdb.cursor_at cursor (concat k) with - | Error KeyNotFound -> acc - | Error err -> lwt_fail_error err - | Ok () -> Lwt.bind acc f + | Error KeyNotFound -> + acc + | Error err -> + lwt_fail_error err + | Ok () -> + Lwt.bind acc f (* assumption: store path segments have only characters different than the separator '/', which immediately precedes '0' *) let zero_char_str = String.make 1 (Char.chr (Char.code '/' + 1)) + let next_key_after_subdirs = function - | [] -> [ zero_char_str ] - | (_ :: _) as path -> - List.sub path (List.length path - 1) @ - [List.last_exn path ^ zero_char_str] + | [] -> + [zero_char_str] + | _ :: _ as path -> + List.sub path (List.length path - 1) + @ [List.last_exn path ^ zero_char_str] let fold t k ~init ~f = let base_len = List.length k in let rec inner ht cursor acc = - Lmdb.cursor_get cursor >>=? fun (kk, _v) -> + Lmdb.cursor_get cursor + >>=? fun (kk, _v) -> let kk = MBytes.to_string kk in let kk_split = split kk in match is_child ~child:kk_split ~parent:k with - | false -> Lwt.return acc + | false -> + Lwt.return acc | true -> let cur_len = List.length kk_split in - if cur_len = succ base_len then begin + if cur_len = succ base_len then cursor_next_lwt cursor (f (`Key kk_split) acc) (inner ht cursor) - end - else begin + else let dir = list_sub kk_split 0 (succ base_len) in if Hashtbl.mem ht dir then - cursor_at_lwt cursor (next_key_after_subdirs dir) - (Lwt.return acc) (inner ht cursor) - else begin + cursor_at_lwt + cursor + (next_key_after_subdirs dir) + (Lwt.return acc) + (inner ht cursor) + else ( Hashtbl.add ht dir () ; - cursor_next_lwt cursor (f (`Dir dir) acc) (inner ht cursor) - end - end in - with_rw_cursor_lwt t ~f:begin fun cursor -> - cursor_at_lwt cursor k - (Lwt.return init) - (fun acc -> - let ht = Hashtbl.create 31 in - inner ht cursor acc) - end + cursor_next_lwt cursor (f (`Dir dir) acc) (inner ht cursor) ) + in + with_rw_cursor_lwt t ~f:(fun cursor -> + cursor_at_lwt cursor k (Lwt.return init) (fun acc -> + let ht = Hashtbl.create 31 in + inner ht cursor acc)) let fold_keys t k ~init ~f = - with_rw_cursor_lwt t ~f:begin fun cursor -> - cursor_at_lwt cursor k - (Lwt.return init) - (let rec inner acc = - Lmdb.cursor_get cursor >>=? fun (kk, _v) -> - let kk = MBytes.to_string kk in - let kk_split = split kk in - match is_child ~child:kk_split ~parent:k with - | false -> Lwt.return acc - | true -> cursor_next_lwt cursor (f kk_split acc) inner - in inner) - end - -let keys t = - fold_keys t ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc)) + with_rw_cursor_lwt t ~f:(fun cursor -> + cursor_at_lwt + cursor + k + (Lwt.return init) + (let rec inner acc = + Lmdb.cursor_get cursor + >>=? fun (kk, _v) -> + let kk = MBytes.to_string kk in + let kk_split = split kk in + match is_child ~child:kk_split ~parent:k with + | false -> + Lwt.return acc + | true -> + cursor_next_lwt cursor (f kk_split acc) inner + in + inner)) + +let keys t = fold_keys t ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc)) let open_with_atomic_rw ?mapsize path f = let open Error_monad in - init ?mapsize path >>=? fun state -> - with_rw_cursor_lwt state ~f:(fun _c -> f state) >>=? fun res -> - close state ; - return res + init ?mapsize path + >>=? fun state -> + with_rw_cursor_lwt state ~f:(fun _c -> f state) + >>=? fun res -> close state ; return res -let with_atomic_rw state f = - with_rw_cursor_lwt state ~f:(fun _c -> f ()) +let with_atomic_rw state f = with_rw_cursor_lwt state ~f:(fun _c -> f ()) diff --git a/src/lib_storage/raw_store.mli b/src/lib_storage/raw_store.mli index 76222ac059908048e73ca6e3afebcf5dda483d74..f2c3ad19a14bf4d7e738d0a1397b8bd608f666ea 100644 --- a/src/lib_storage/raw_store.mli +++ b/src/lib_storage/raw_store.mli @@ -27,15 +27,14 @@ open Store_sigs include STORE -val init: ?readonly:bool -> ?mapsize:int64 -> string -> t tzresult Lwt.t -val close: t -> unit +val init : ?readonly:bool -> ?mapsize:int64 -> string -> t tzresult Lwt.t -val with_atomic_rw: - t -> - (unit -> 'a Lwt.t) -> - 'a Lwt.t +val close : t -> unit -val open_with_atomic_rw: - ?mapsize:int64 -> string -> +val with_atomic_rw : t -> (unit -> 'a Lwt.t) -> 'a Lwt.t + +val open_with_atomic_rw : + ?mapsize:int64 -> + string -> (t -> 'a Error_monad.tzresult Lwt.t) -> 'a tzresult Lwt.t diff --git a/src/lib_storage/store_helpers.ml b/src/lib_storage/store_helpers.ml index 2dcd66dafe387f0b34ef43724a07620c78f4fa20..533ba5f8307d0f14c9539a7b08d1ca9939150832 100644 --- a/src/lib_storage/store_helpers.ml +++ b/src/lib_storage/store_helpers.ml @@ -27,91 +27,126 @@ open Store_sigs module Make_value (V : ENCODED_VALUE) = struct type t = V.t + let of_bytes b = match Data_encoding.Binary.of_bytes V.encoding b with - | None -> generic_error "Cannot parse data" (* TODO personalize *) - | Some v -> ok v + | None -> + generic_error "Cannot parse data" (* TODO personalize *) + | Some v -> + ok v + let to_bytes v = try Data_encoding.Binary.to_bytes_exn V.encoding v with Data_encoding.Binary.Write_error error -> Store_logging.log_error "Exception while serializing value %a" - Data_encoding.Binary.pp_write_error error ; + Data_encoding.Binary.pp_write_error + error ; MBytes.create 0 end module Raw_value = struct type t = MBytes.t + let of_bytes b = ok b + let to_bytes b = b end module Make_single_store (S : STORE) (N : NAME) (V : VALUE) = struct type t = S.t + type value = V.t + let known t = S.known t N.name + let read t = S.read t N.name >>=? fun b -> Lwt.return (V.of_bytes b) - let read_opt t = - read t >|= function - | Error _ -> None - | Ok v -> Some v + + let read_opt t = read t >|= function Error _ -> None | Ok v -> Some v + let store t v = S.store t N.name (V.to_bytes v) + let remove t = S.remove t N.name end -let map_key f = function - |`Key k -> `Key (f k) - | `Dir k -> `Dir (f k) +let map_key f = function `Key k -> `Key (f k) | `Dir k -> `Dir (f k) -module Make_substore (S : STORE) (N : NAME) - : STORE with type t = S.t = struct +module Make_substore (S : STORE) (N : NAME) : STORE with type t = S.t = struct type t = S.t + type key = string list + type value = MBytes.t + let name_length = List.length N.name + let to_key k = N.name @ k + let of_key k = List.remove name_length k + let known t k = S.known t (to_key k) + let known_dir t k = S.known_dir t (to_key k) + let read t k = S.read t (to_key k) + let read_opt t k = S.read_opt t (to_key k) + let store t k v = S.store t (to_key k) v + let remove t k = S.remove t (to_key k) + let fold t k ~init ~f = - S.fold t (to_key k) ~init - ~f:(fun k acc -> f (map_key of_key k) acc) + S.fold t (to_key k) ~init ~f:(fun k acc -> f (map_key of_key k) acc) + let keys t k = S.keys t (to_key k) >|= fun keys -> List.map of_key keys + let fold_keys t k ~init ~f = S.fold_keys t (to_key k) ~init ~f:(fun k acc -> f (of_key k) acc) + let remove_dir t k = S.remove_dir t (to_key k) end module Make_indexed_substore (S : STORE) (I : INDEX) = struct - type t = S.t + type key = I.t module Store = struct type t = S.t * I.t + type key = string list + type value = MBytes.t + let to_key i k = assert (List.length (I.to_path i []) = I.path_length) ; I.to_path i k + let of_key k = List.remove I.path_length k - let known (t,i) k = S.known t (to_key i k) - let known_dir (t,i) k = S.known_dir t (to_key i k) - let read (t,i) k = S.read t (to_key i k) - let read_opt (t,i) k = S.read_opt t (to_key i k) - let store (t,i) k v = S.store t (to_key i k) v - let remove (t,i) k = S.remove t (to_key i k) - let fold (t,i) k ~init ~f = - S.fold t (to_key i k) ~init - ~f:(fun k acc -> f (map_key of_key k) acc) - let keys (t,i) k = S.keys t (to_key i k) >|= fun keys -> List.map of_key keys - let fold_keys (t,i) k ~init ~f = + + let known (t, i) k = S.known t (to_key i k) + + let known_dir (t, i) k = S.known_dir t (to_key i k) + + let read (t, i) k = S.read t (to_key i k) + + let read_opt (t, i) k = S.read_opt t (to_key i k) + + let store (t, i) k v = S.store t (to_key i k) v + + let remove (t, i) k = S.remove t (to_key i k) + + let fold (t, i) k ~init ~f = + S.fold t (to_key i k) ~init ~f:(fun k acc -> f (map_key of_key k) acc) + + let keys (t, i) k = + S.keys t (to_key i k) >|= fun keys -> List.map of_key keys + + let fold_keys (t, i) k ~init ~f = S.fold_keys t (to_key i k) ~init ~f:(fun k acc -> f (of_key k) acc) - let remove_dir (t,i) k = S.remove_dir t (to_key i k) + + let remove_dir (t, i) k = S.remove_dir t (to_key i k) end let remove_all t i = Store.remove_dir (t, i) [] @@ -120,266 +155,347 @@ module Make_indexed_substore (S : STORE) (I : INDEX) = struct let rec dig i path acc = if i <= 0 then match I.of_path path with - | None -> assert false - | Some path -> f path acc + | None -> + assert false + | Some path -> + f path acc else - S.fold t path ~init:acc ~f:begin fun k acc -> - match k with - | `Dir k -> dig (i-1) k acc - | `Key _ -> Lwt.return acc - end in + S.fold t path ~init:acc ~f:(fun k acc -> + match k with + | `Dir k -> + dig (i - 1) k acc + | `Key _ -> + Lwt.return acc) + in dig I.path_length [] init let indexes t = fold_indexes t ~init:[] ~f:(fun i acc -> Lwt.return (i :: acc)) let list t k = S.fold t k ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc)) + let resolve_index t prefix = let rec loop i prefix = function - | [] when i = I.path_length -> begin - match I.of_path prefix with - | None -> assert false - | Some path -> Lwt.return [path] - end + | [] when i = I.path_length -> ( + match I.of_path prefix with + | None -> + assert false + | Some path -> + Lwt.return [path] ) | [] -> - list t prefix >>= fun prefixes -> - Lwt_list.map_p (function - | `Key prefix | `Dir prefix -> loop (i+1) prefix []) prefixes + list t prefix + >>= fun prefixes -> + Lwt_list.map_p + (function `Key prefix | `Dir prefix -> loop (i + 1) prefix []) + prefixes >|= List.flatten | [d] when i = I.path_length - 1 -> - if (i >= I.path_length) then invalid_arg "IO.resolve" ; - list t prefix >>= fun prefixes -> - Lwt_list.map_p (function - | `Key prefix | `Dir prefix -> - match String.remove_prefix ~prefix:d (List.hd (List.rev prefix)) with - | None -> Lwt.return_nil - | Some _ -> loop (i+1) prefix []) + if i >= I.path_length then invalid_arg "IO.resolve" ; + list t prefix + >>= fun prefixes -> + Lwt_list.map_p + (function + | `Key prefix | `Dir prefix -> ( + match + String.remove_prefix ~prefix:d (List.hd (List.rev prefix)) + with + | None -> + Lwt.return_nil + | Some _ -> + loop (i + 1) prefix [] )) prefixes >|= List.flatten | "" :: ds -> - list t prefix >>= fun prefixes -> - Lwt_list.map_p (function - | `Key prefix | `Dir prefix -> loop (i+1) prefix ds) prefixes + list t prefix + >>= fun prefixes -> + Lwt_list.map_p + (function `Key prefix | `Dir prefix -> loop (i + 1) prefix ds) + prefixes >|= List.flatten - | d :: ds -> - if (i >= I.path_length) then invalid_arg "IO.resolve" ; - S.known_dir t (prefix @ [d]) >>= function - | true -> loop (i+1) (prefix @ [d]) ds - | false -> Lwt.return_nil in + | d :: ds -> ( + if i >= I.path_length then invalid_arg "IO.resolve" ; + S.known_dir t (prefix @ [d]) + >>= function + | true -> loop (i + 1) (prefix @ [d]) ds | false -> Lwt.return_nil ) + in loop 0 [] prefix module Make_set (N : NAME) = struct type t = S.t + type elt = I.t + let inited = MBytes.of_string "inited" + let known s i = Store.known (s, i) N.name + let store s i = Store.store (s, i) N.name inited + let remove s i = Store.remove (s, i) N.name - let remove_all s = - fold_indexes s ~init:() ~f:(fun i () -> remove s i) + + let remove_all s = fold_indexes s ~init:() ~f:(fun i () -> remove s i) + let fold s ~init ~f = - fold_indexes s ~init - ~f:(fun i acc -> - known s i >>= function - | true -> f i acc - | false -> Lwt.return acc) - let elements s = - fold s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc)) - let iter s ~f = - fold s ~init:() ~f:(fun p () -> f p) + fold_indexes s ~init ~f:(fun i acc -> + known s i >>= function true -> f i acc | false -> Lwt.return acc) + + let elements s = fold s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc)) + + let iter s ~f = fold s ~init:() ~f:(fun p () -> f p) end - module Make_buffered_set (N : NAME) (Set : Set.S with type elt = I.t) = struct + module Make_buffered_set (N : NAME) (Set : Set.S with type elt = I.t) = + struct include Make_set (N) module Set = Set + let read_all s = fold s ~init:Set.empty ~f:(fun i set -> Lwt.return (Set.add i set)) + let store_all s new_set = - read_all s >>= fun old_set -> - Lwt_list.iter_p (remove s) - Set.(elements (diff old_set new_set)) >>= fun () -> + read_all s + >>= fun old_set -> + Lwt_list.iter_p (remove s) Set.(elements (diff old_set new_set)) + >>= fun () -> Lwt_list.iter_p (store s) Set.(elements (diff new_set old_set)) end module Make_map (N : NAME) (V : VALUE) = struct type t = S.t + type key = I.t + type value = V.t - let known s i = Store.known (s,i) N.name + + let known s i = Store.known (s, i) N.name + let read s i = - Store.read (s,i) N.name >>=? fun b -> Lwt.return (V.of_bytes b) + Store.read (s, i) N.name >>=? fun b -> Lwt.return (V.of_bytes b) + let read_opt s i = - read s i >>= function - | Error _ -> Lwt.return_none - | Ok v -> Lwt.return_some v - let store s i v = Store.store (s,i) N.name (V.to_bytes v) - let remove s i = Store.remove (s,i) N.name + read s i + >>= function Error _ -> Lwt.return_none | Ok v -> Lwt.return_some v + + let store s i v = Store.store (s, i) N.name (V.to_bytes v) + + let remove s i = Store.remove (s, i) N.name + let remove_all s = fold_indexes s ~init:() ~f:(fun i () -> remove s i) + let fold s ~init ~f = - fold_indexes s ~init - ~f:(fun i acc -> - read_opt s i >>= function - | None -> Lwt.return acc - | Some v -> f i v acc) + fold_indexes s ~init ~f:(fun i acc -> + read_opt s i + >>= function None -> Lwt.return acc | Some v -> f i v acc) + let bindings s = - fold s ~init:[] ~f:(fun p v acc -> Lwt.return ((p,v) :: acc)) - let iter s ~f = - fold s ~init:() ~f:(fun p v () -> f p v) + fold s ~init:[] ~f:(fun p v acc -> Lwt.return ((p, v) :: acc)) + + let iter s ~f = fold s ~init:() ~f:(fun p v () -> f p v) + let fold_keys s ~init ~f = - fold_indexes s ~init - ~f:(fun i acc -> - known s i >>= function - | false -> Lwt.return acc - | true -> f i acc) - let keys s = - fold_keys s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc)) - let iter_keys s ~f = - fold_keys s ~init:() ~f:(fun p () -> f p) + fold_indexes s ~init ~f:(fun i acc -> + known s i >>= function false -> Lwt.return acc | true -> f i acc) + + let keys s = fold_keys s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc)) + + let iter_keys s ~f = fold_keys s ~init:() ~f:(fun p () -> f p) end module Make_buffered_map - (N : NAME) (V : VALUE) - (Map : Map.S with type key = I.t) = struct + (N : NAME) + (V : VALUE) + (Map : Map.S with type key = I.t) = + struct include Make_map (N) (V) module Map = Map + let read_all s = fold s ~init:Map.empty ~f:(fun i v set -> Lwt.return (Map.add i v set)) + let store_all s map = - remove_all s >>= fun () -> + remove_all s + >>= fun () -> Map.fold - (fun k v acc -> let res = store s k v in acc >>= fun () -> res) - map Lwt.return_unit + (fun k v acc -> + let res = store s k v in + acc >>= fun () -> res) + map + Lwt.return_unit end - end module Make_set (S : STORE) (I : INDEX) = struct type t = S.t + type elt = I.t + let inited = MBytes.of_string "inited" + let known s i = S.known s (I.to_path i []) + let store s i = S.store s (I.to_path i []) inited + let remove s i = S.remove s (I.to_path i []) + let remove_all s = S.remove_dir s [] let fold s ~init ~f = let rec dig i path acc = if i <= 1 then - S.fold s path ~init:acc ~f:begin fun k acc -> - match k with - | `Dir _ -> Lwt.return acc - | `Key file -> + S.fold s path ~init:acc ~f:(fun k acc -> + match k with + | `Dir _ -> + Lwt.return acc + | `Key file -> ( match I.of_path file with - | None -> assert false - | Some p -> f p acc - end + | None -> + assert false + | Some p -> + f p acc )) else - S.fold s path ~init:acc ~f:begin fun k acc -> - match k with - | `Dir k -> - dig (i-1) k acc - | `Key _ -> - Lwt.return acc - end in + S.fold s path ~init:acc ~f:(fun k acc -> + match k with + | `Dir k -> + dig (i - 1) k acc + | `Key _ -> + Lwt.return acc) + in dig I.path_length [] init - let elements s = - fold s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc)) - let iter s ~f = - fold s ~init:() ~f:(fun p () -> f p) + let elements s = fold s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc)) + + let iter s ~f = fold s ~init:() ~f:(fun p () -> f p) end module Make_buffered_set - (S : STORE) (I : INDEX) (Set : Set.S with type elt = I.t) = struct + (S : STORE) + (I : INDEX) + (Set : Set.S with type elt = I.t) = +struct include Make_set (S) (I) module Set = Set + let read_all s = fold s ~init:Set.empty ~f:(fun i set -> Lwt.return (Set.add i set)) + let store_all s new_set = - read_all s >>= fun old_set -> - Lwt_list.iter_p (remove s) Set.(elements (diff old_set new_set)) >>= fun () -> + read_all s + >>= fun old_set -> + Lwt_list.iter_p (remove s) Set.(elements (diff old_set new_set)) + >>= fun () -> Lwt_list.iter_p (store s) Set.(elements (diff new_set old_set)) end module Make_map (S : STORE) (I : INDEX) (V : VALUE) = struct type t = S.t + type key = I.t + type value = V.t + let known s i = S.known s (I.to_path i []) + let read s i = S.read s (I.to_path i []) >>=? fun b -> Lwt.return (V.of_bytes b) + let read_opt s i = - read s i >>= function - | Error _ -> Lwt.return_none - | Ok v -> Lwt.return_some v + read s i + >>= function Error _ -> Lwt.return_none | Ok v -> Lwt.return_some v + let store s i v = S.store s (I.to_path i []) (V.to_bytes v) + let remove s i = S.remove s (I.to_path i []) + let remove_all s = S.remove_dir s [] + let fold s ~init ~f = let rec dig i path acc = if i <= 1 then - S.fold s path ~init:acc ~f:begin fun k acc -> - match k with - | `Dir _ -> Lwt.return acc - | `Key file -> - S.read_opt s file >>= function - | None -> Lwt.return acc - | Some b -> + S.fold s path ~init:acc ~f:(fun k acc -> + match k with + | `Dir _ -> + Lwt.return acc + | `Key file -> ( + S.read_opt s file + >>= function + | None -> + Lwt.return acc + | Some b -> ( match V.of_bytes b with | Error _ -> (* Silently ignore unparsable data *) Lwt.return acc - | Ok v -> - match I.of_path file with - | None -> assert false - | Some path -> f path v acc - end + | Ok v -> ( + match I.of_path file with + | None -> + assert false + | Some path -> + f path v acc ) ) )) else - S.fold s path ~init:acc ~f:begin fun k acc -> - match k with - | `Dir k -> dig (i-1) k acc - | `Key _ -> Lwt.return acc - end in + S.fold s path ~init:acc ~f:(fun k acc -> + match k with + | `Dir k -> + dig (i - 1) k acc + | `Key _ -> + Lwt.return acc) + in dig I.path_length [] init let bindings s = - fold s ~init:[] ~f:(fun p v acc -> Lwt.return ((p,v) :: acc)) - let iter s ~f = - fold s ~init:() ~f:(fun p v () -> f p v) + fold s ~init:[] ~f:(fun p v acc -> Lwt.return ((p, v) :: acc)) + + let iter s ~f = fold s ~init:() ~f:(fun p v () -> f p v) + let fold_keys s ~init ~f = - S.fold s [] ~init - ~f:(fun p acc -> - match p with - | `Dir _ -> Lwt.return acc - | `Key p -> - match I.of_path p with - | None -> assert false - | Some path -> f path acc) - let keys s = - fold_keys s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc)) - let iter_keys s ~f = - fold_keys s ~init:() ~f:(fun p () -> f p) + S.fold s [] ~init ~f:(fun p acc -> + match p with + | `Dir _ -> + Lwt.return acc + | `Key p -> ( + match I.of_path p with + | None -> + assert false + | Some path -> + f path acc )) + + let keys s = fold_keys s ~init:[] ~f:(fun p acc -> Lwt.return (p :: acc)) + + let iter_keys s ~f = fold_keys s ~init:() ~f:(fun p () -> f p) end module Make_buffered_map - (S : STORE) (I : INDEX) (V : VALUE) - (Map : Map.S with type key = I.t) = struct + (S : STORE) + (I : INDEX) + (V : VALUE) + (Map : Map.S with type key = I.t) = +struct include Make_map (S) (I) (V) module Map = Map + let read_all s = fold s ~init:Map.empty ~f:(fun i v set -> Lwt.return (Map.add i v set)) + let store_all s map = - remove_all s >>= fun () -> + remove_all s + >>= fun () -> Map.fold - (fun k v acc -> let res = store s k v in acc >>= fun () -> res) - map Lwt.return_unit + (fun k v acc -> + let res = store s k v in + acc >>= fun () -> res) + map + Lwt.return_unit end module Integer_index = struct type t = int + let path_length = 1 + let to_path x l = string_of_int x :: l + let of_path = function - | [x] -> begin try Some (int_of_string x) with _ -> None end - | _ -> None + | [x] -> ( + try Some (int_of_string x) with _ -> None ) + | _ -> + None end diff --git a/src/lib_storage/store_helpers.mli b/src/lib_storage/store_helpers.mli index 127f8ebe1c1e4a7e16b7b37caa653891b8106f2b..166f2730b65d4c7016b519d8570f19a46f448ff8 100644 --- a/src/lib_storage/store_helpers.mli +++ b/src/lib_storage/store_helpers.mli @@ -29,37 +29,35 @@ module Make_value (V : ENCODED_VALUE) : VALUE with type t = V.t module Raw_value : VALUE with type t = MBytes.t -module Make_single_store (S : STORE) (N : NAME) (V : VALUE) - : SINGLE_STORE with type t = S.t - and type value = V.t +module Make_single_store (S : STORE) (N : NAME) (V : VALUE) : + SINGLE_STORE with type t = S.t and type value = V.t -module Make_substore (S : STORE) (N : NAME) - : STORE with type t = S.t +module Make_substore (S : STORE) (N : NAME) : STORE with type t = S.t -module Make_set (S : STORE) (I : INDEX) - : SET_STORE with type t = S.t and type elt = I.t +module Make_set (S : STORE) (I : INDEX) : + SET_STORE with type t = S.t and type elt = I.t module Make_buffered_set - (S : STORE) (I : INDEX) (Set : Set.S with type elt = I.t) - : BUFFERED_SET_STORE with type t = S.t - and type elt = I.t - and module Set = Set + (S : STORE) + (I : INDEX) + (Set : Set.S with type elt = I.t) : + BUFFERED_SET_STORE with type t = S.t and type elt = I.t and module Set = Set -module Make_map - (S : STORE) (I : INDEX) (V : VALUE) - : MAP_STORE with type t = S.t - and type key = I.t - and type value = V.t +module Make_map (S : STORE) (I : INDEX) (V : VALUE) : + MAP_STORE with type t = S.t and type key = I.t and type value = V.t module Make_buffered_map - (S : STORE) (I : INDEX) (V : VALUE) (Map : Map.S with type key = I.t) - : BUFFERED_MAP_STORE with type t = S.t - and type key = I.t - and type value = V.t - and module Map = Map - -module Make_indexed_substore (S : STORE) (I : INDEX) - : INDEXED_STORE with type t = S.t - and type key = I.t + (S : STORE) + (I : INDEX) + (V : VALUE) + (Map : Map.S with type key = I.t) : + BUFFERED_MAP_STORE + with type t = S.t + and type key = I.t + and type value = V.t + and module Map = Map + +module Make_indexed_substore (S : STORE) (I : INDEX) : + INDEXED_STORE with type t = S.t and type key = I.t module Integer_index : INDEX with type t = int diff --git a/src/lib_storage/store_logging.ml b/src/lib_storage/store_logging.ml index fc8d63dd304f8395250ad9985432625076f11301..23263f48c5499e027ebdefe305dc0e313e85be71 100644 --- a/src/lib_storage/store_logging.ml +++ b/src/lib_storage/store_logging.ml @@ -23,4 +23,6 @@ (* *) (*****************************************************************************) -include Internal_event.Legacy_logging.Make(struct let name = "db" end) +include Internal_event.Legacy_logging.Make (struct + let name = "db" +end) diff --git a/src/lib_storage/store_sigs.ml b/src/lib_storage/store_sigs.ml index 8d251f26cd0f377bc4f35e08c204bcd62a52ab7c..4e36c438a2612aa34ce17c592f0b0c5f2be142bf 100644 --- a/src/lib_storage/store_sigs.ml +++ b/src/lib_storage/store_sigs.ml @@ -29,134 +29,179 @@ end module type VALUE = sig type t - val of_bytes: MBytes.t -> t tzresult - val to_bytes: t -> MBytes.t + + val of_bytes : MBytes.t -> t tzresult + + val to_bytes : t -> MBytes.t end module type ENCODED_VALUE = sig type t - val encoding: t Data_encoding.t + + val encoding : t Data_encoding.t end module type INDEX = sig type t - val path_length: int - val to_path: t -> string list -> string list - val of_path: string list -> t option + + val path_length : int + + val to_path : t -> string list -> string list + + val of_path : string list -> t option end module type SINGLE_STORE = sig type t + type value - val known: t -> bool Lwt.t - val read: t -> value tzresult Lwt.t - val read_opt: t -> value option Lwt.t - val store: t -> value -> unit Lwt.t - val remove: t -> unit Lwt.t + + val known : t -> bool Lwt.t + + val read : t -> value tzresult Lwt.t + + val read_opt : t -> value option Lwt.t + + val store : t -> value -> unit Lwt.t + + val remove : t -> unit Lwt.t end module type STORE = sig - type t + type key = string list + type value = MBytes.t - val known: t -> key -> bool Lwt.t - val read: t -> key -> value tzresult Lwt.t - val read_opt: t -> key -> value option Lwt.t - val store: t -> key -> value -> unit Lwt.t - val remove: t -> key -> unit Lwt.t + val known : t -> key -> bool Lwt.t + + val read : t -> key -> value tzresult Lwt.t + + val read_opt : t -> key -> value option Lwt.t + + val store : t -> key -> value -> unit Lwt.t + + val remove : t -> key -> unit Lwt.t + + val known_dir : t -> key -> bool Lwt.t - val known_dir: t -> key -> bool Lwt.t - val remove_dir: t -> key -> unit Lwt.t + val remove_dir : t -> key -> unit Lwt.t - val fold: - t -> key -> init:'a -> - f:([ `Key of key | `Dir of key ] -> 'a -> 'a Lwt.t) -> + val fold : + t -> + key -> + init:'a -> + f:([`Key of key | `Dir of key] -> 'a -> 'a Lwt.t) -> 'a Lwt.t - val keys: t -> key -> key list Lwt.t - val fold_keys: t -> key -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t + val keys : t -> key -> key list Lwt.t + val fold_keys : t -> key -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t end module type SET_STORE = sig type t + type elt - val known: t -> elt -> bool Lwt.t - val store: t -> elt -> unit Lwt.t - val remove: t -> elt -> unit Lwt.t - val elements: t -> elt list Lwt.t - val remove_all: t -> unit Lwt.t - val iter: t -> f:(elt -> unit Lwt.t) -> unit Lwt.t - val fold: t -> init:'a -> f:(elt -> 'a -> 'a Lwt.t) -> 'a Lwt.t + + val known : t -> elt -> bool Lwt.t + + val store : t -> elt -> unit Lwt.t + + val remove : t -> elt -> unit Lwt.t + + val elements : t -> elt list Lwt.t + + val remove_all : t -> unit Lwt.t + + val iter : t -> f:(elt -> unit Lwt.t) -> unit Lwt.t + + val fold : t -> init:'a -> f:(elt -> 'a -> 'a Lwt.t) -> 'a Lwt.t end module type BUFFERED_SET_STORE = sig include SET_STORE + module Set : Set.S with type elt = elt - val read_all: t -> Set.t Lwt.t - val store_all: t -> Set.t -> unit Lwt.t + + val read_all : t -> Set.t Lwt.t + + val store_all : t -> Set.t -> unit Lwt.t end module type MAP_STORE = sig type t + type key + type value - val known: t -> key -> bool Lwt.t - val read: t -> key -> value tzresult Lwt.t - val read_opt: t -> key -> value option Lwt.t - val store: t -> key -> value -> unit Lwt.t - val remove: t -> key -> unit Lwt.t - val keys: t -> key list Lwt.t - val bindings: t -> (key * value) list Lwt.t - val remove_all: t -> unit Lwt.t - val iter: t -> f:(key -> value -> unit Lwt.t) -> unit Lwt.t - val iter_keys: t -> f:(key -> unit Lwt.t) -> unit Lwt.t - val fold: t -> init:'a -> f:(key -> value -> 'a -> 'a Lwt.t) -> 'a Lwt.t - val fold_keys: t -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t + + val known : t -> key -> bool Lwt.t + + val read : t -> key -> value tzresult Lwt.t + + val read_opt : t -> key -> value option Lwt.t + + val store : t -> key -> value -> unit Lwt.t + + val remove : t -> key -> unit Lwt.t + + val keys : t -> key list Lwt.t + + val bindings : t -> (key * value) list Lwt.t + + val remove_all : t -> unit Lwt.t + + val iter : t -> f:(key -> value -> unit Lwt.t) -> unit Lwt.t + + val iter_keys : t -> f:(key -> unit Lwt.t) -> unit Lwt.t + + val fold : t -> init:'a -> f:(key -> value -> 'a -> 'a Lwt.t) -> 'a Lwt.t + + val fold_keys : t -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t end module type BUFFERED_MAP_STORE = sig include MAP_STORE + module Map : Map.S with type key = key - val read_all: t -> value Map.t Lwt.t - val store_all: t -> value Map.t -> unit Lwt.t + + val read_all : t -> value Map.t Lwt.t + + val store_all : t -> value Map.t -> unit Lwt.t end module type INDEXED_STORE = sig - type t + type key module Store : STORE with type t = t * key - val remove_all: t -> key -> unit Lwt.t + val remove_all : t -> key -> unit Lwt.t - val fold_indexes: t -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t - val indexes: t -> key list Lwt.t + val fold_indexes : t -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t - val resolve_index: t -> string list -> key list Lwt.t + val indexes : t -> key list Lwt.t - module Make_set (N : NAME) - : SET_STORE with type t = t - and type elt = key + val resolve_index : t -> string list -> key list Lwt.t - module Make_buffered_set (N : NAME) (Set : Set.S with type elt = key) - : BUFFERED_SET_STORE with type t = t - and type elt = key - and module Set = Set + module Make_set (N : NAME) : SET_STORE with type t = t and type elt = key - module Make_map (N : NAME) (V : VALUE) - : MAP_STORE with type t = t - and type key = key - and type value = V.t + module Make_buffered_set (N : NAME) (Set : Set.S with type elt = key) : + BUFFERED_SET_STORE with type t = t and type elt = key and module Set = Set - module Make_buffered_map - (N : NAME) (V : VALUE) (Map : Map.S with type key = key) - : BUFFERED_MAP_STORE with type t = t - and type key = key - and type value = V.t - and module Map = Map + module Make_map (N : NAME) (V : VALUE) : + MAP_STORE with type t = t and type key = key and type value = V.t + module Make_buffered_map + (N : NAME) + (V : VALUE) + (Map : Map.S with type key = key) : + BUFFERED_MAP_STORE + with type t = t + and type key = key + and type value = V.t + and module Map = Map end diff --git a/src/lib_storage/test/assert.ml b/src/lib_storage/test/assert.ml index c3e86800bc8316bfe7658ac874a2d756ed7e82f4..9f8b458a41ce50cc8538a4f2f4589ac4fdb732fc 100644 --- a/src/lib_storage/test/assert.ml +++ b/src/lib_storage/test/assert.ml @@ -24,62 +24,68 @@ (*****************************************************************************) let fail expected given msg = - Format.kasprintf Pervasives.failwith - "@[%s@ expected: %s@ got: %s@]" msg expected given + Format.kasprintf + Pervasives.failwith + "@[%s@ expected: %s@ got: %s@]" + msg + expected + given + let fail_msg fmt = Format.kasprintf (fail "" "") fmt let default_printer _ = "" -let equal ?(eq=(=)) ?(prn=default_printer) ?(msg="") x y = +let equal ?(eq = ( = )) ?(prn = default_printer) ?(msg = "") x y = if not (eq x y) then fail (prn x) (prn y) msg -let equal_string ?msg s1 s2 = - equal ?msg ~prn:(fun s -> s) s1 s2 +let equal_string ?msg s1 s2 = equal ?msg ~prn:(fun s -> s) s1 s2 let equal_string_option ?msg o1 o2 = - let prn = function - | None -> "None" - | Some s -> s in + let prn = function None -> "None" | Some s -> s in equal ?msg ~prn o1 o2 -let is_none ?(msg="") x = - if x <> None then fail "None" "Some _" msg +let is_none ?(msg = "") x = if x <> None then fail "None" "Some _" msg -let make_equal_list eq prn ?(msg="") x y = +let make_equal_list eq prn ?(msg = "") x y = let rec iter i x y = - match x, y with - | hd_x :: tl_x, hd_y :: tl_y -> - if eq hd_x hd_y then - iter (succ i) tl_x tl_y + match (x, y) with + | (hd_x :: tl_x, hd_y :: tl_y) -> + if eq hd_x hd_y then iter (succ i) tl_x tl_y else let fm = Printf.sprintf "%s (at index %d)" msg i in fail (prn hd_x) (prn hd_y) fm - | _ :: _, [] | [], _ :: _ -> + | (_ :: _, []) | ([], _ :: _) -> let fm = Printf.sprintf "%s (lists of different sizes)" msg in fail_msg "%s" fm - | [], [] -> - () in + | ([], []) -> + () + in iter 0 x y let equal_string_list ?msg l1 l2 = - make_equal_list ?msg (=) (fun x -> x) l1 l2 + make_equal_list ?msg ( = ) (fun x -> x) l1 l2 let equal_string_list_list ?msg l1 l2 = let pr_persist l = let res = - String.concat ";" (List.map (fun s -> Printf.sprintf "%S" s) l) in - Printf.sprintf "[%s]" res in - make_equal_list ?msg (=) pr_persist l1 l2 + String.concat ";" (List.map (fun s -> Printf.sprintf "%S" s) l) + in + Printf.sprintf "[%s]" res + in + make_equal_list ?msg ( = ) pr_persist l1 l2 let equal_key_dir_list ?msg l1 l2 = - make_equal_list ?msg (=) + make_equal_list + ?msg + ( = ) (function - | `Key k -> "Key " ^ String.concat "/" k - | `Dir k -> "Dir " ^ String.concat "/" k) - l1 l2 + | `Key k -> + "Key " ^ String.concat "/" k + | `Dir k -> + "Dir " ^ String.concat "/" k) + l1 + l2 let equal_context_hash_list ?msg l1 l2 = - let pr_persist hash = - Printf.sprintf "[%s]" @@ Context_hash.to_string hash - in - make_equal_list ?msg Context_hash.(=) pr_persist l1 l2 + let pr_persist hash = Printf.sprintf "[%s]" @@ Context_hash.to_string hash in + make_equal_list ?msg Context_hash.( = ) pr_persist l1 l2 diff --git a/src/lib_storage/test/test.ml b/src/lib_storage/test/test.ml index 21a383a58b3b0994fb07d67441a0177cf63cc929..d7645447b05e44fde2471949121ca5aa1590c843 100644 --- a/src/lib_storage/test/test.ml +++ b/src/lib_storage/test/test.ml @@ -24,7 +24,6 @@ (*****************************************************************************) let () = - Alcotest.run "tezos-storage" [ - "context", Test_context.tests ; - "raw_store", Test_raw_store.tests ; - ] + Alcotest.run + "tezos-storage" + [("context", Test_context.tests); ("raw_store", Test_raw_store.tests)] diff --git a/src/lib_storage/test/test_context.ml b/src/lib_storage/test/test_context.ml index f9a0cf525de4a5c6239a1c024bd874227cbb9568..6111b464f919ceb4f3dc714a264c26e9f68f3629 100644 --- a/src/lib_storage/test/test_context.ml +++ b/src/lib_storage/test/test_context.ml @@ -25,9 +25,11 @@ open Context -let (>>=) = Lwt.bind -let (>|=) = Lwt.(>|=) -let (//) = Filename.concat +let ( >>= ) = Lwt.bind + +let ( >|= ) = Lwt.( >|= ) + +let ( // ) = Filename.concat (** Basic blocks *) @@ -52,27 +54,30 @@ let block2 = (`Hex "2222222222222222222222222222222222222222222222222222222222222222") let create_block2 idx genesis_commit = - checkout idx genesis_commit >>= function + checkout idx genesis_commit + >>= function | None -> Assert.fail_msg "checkout genesis_block" | Some ctxt -> - set ctxt ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt -> - set ctxt ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt -> - set ctxt ["version";] (MBytes.of_string "0.0") >>= fun ctxt -> - commit ctxt + set ctxt ["a"; "b"] (MBytes.of_string "Novembre") + >>= fun ctxt -> + set ctxt ["a"; "c"] (MBytes.of_string "Juin") + >>= fun ctxt -> + set ctxt ["version"] (MBytes.of_string "0.0") >>= fun ctxt -> commit ctxt let block3a = Block_hash.of_hex_exn (`Hex "3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a3a") let create_block3a idx block2_commit = - checkout idx block2_commit >>= function - | None -> + checkout idx block2_commit + >>= function + | None -> Assert.fail_msg "checkout block2" | Some ctxt -> - del ctxt ["a"; "b"] >>= fun ctxt -> - set ctxt ["a"; "d"] (MBytes.of_string "Mars") >>= fun ctxt -> - commit ctxt + del ctxt ["a"; "b"] + >>= fun ctxt -> + set ctxt ["a"; "d"] (MBytes.of_string "Mars") >>= fun ctxt -> commit ctxt let block3b = Block_hash.of_hex_exn @@ -83,212 +88,255 @@ let block3c = (`Hex "3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c3c") let create_block3b idx block2_commit = - checkout idx block2_commit >>= function + checkout idx block2_commit + >>= function | None -> Assert.fail_msg "checkout block3b" | Some ctxt -> - del ctxt ["a"; "c"] >>= fun ctxt -> - set ctxt ["a"; "d"] (MBytes.of_string "Février") >>= fun ctxt -> - commit ctxt + del ctxt ["a"; "c"] + >>= fun ctxt -> + set ctxt ["a"; "d"] (MBytes.of_string "Février") + >>= fun ctxt -> commit ctxt type t = { - idx: Context.index ; - genesis: Context_hash.t ; - block2: Context_hash.t ; - block3a: Context_hash.t ; - block3b: Context_hash.t ; + idx : Context.index; + genesis : Context_hash.t; + block2 : Context_hash.t; + block3a : Context_hash.t; + block3b : Context_hash.t } let wrap_context_init f _ () = - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun base_dir -> - let root = base_dir // "context" in - Context.init ~mapsize:4_096_000L root >>= fun idx -> - Context.commit_genesis idx - ~chain_id - ~time:genesis_time - ~protocol:genesis_protocol >>= fun genesis -> - create_block2 idx genesis >>= fun block2 -> - create_block3a idx block2 >>= fun block3a -> - create_block3b idx block2 >>= fun block3b -> - f { idx; genesis; block2 ; block3a; block3b } >>= fun result -> - Lwt.return result - end + Lwt_utils_unix.with_tempdir "tezos_test_" (fun base_dir -> + let root = base_dir // "context" in + Context.init ~mapsize:4_096_000L root + >>= fun idx -> + Context.commit_genesis + idx + ~chain_id + ~time:genesis_time + ~protocol:genesis_protocol + >>= fun genesis -> + create_block2 idx genesis + >>= fun block2 -> + create_block3a idx block2 + >>= fun block3a -> + create_block3b idx block2 + >>= fun block3b -> + f {idx; genesis; block2; block3a; block3b} + >>= fun result -> Lwt.return result) (** Simple test *) -let c = function - | None -> None - | Some s -> Some (MBytes.to_string s) +let c = function None -> None | Some s -> Some (MBytes.to_string s) -let test_simple { idx ; block2 ; _ } = - checkout idx block2 >>= function +let test_simple {idx; block2; _} = + checkout idx block2 + >>= function | None -> Assert.fail_msg "checkout block2" | Some ctxt -> - get ctxt ["version"] >>= fun version -> + get ctxt ["version"] + >>= fun version -> Assert.equal_string_option ~msg:__LOC__ (c version) (Some "0.0") ; - get ctxt ["a";"b"] >>= fun novembre -> + get ctxt ["a"; "b"] + >>= fun novembre -> Assert.equal_string_option (Some "Novembre") (c novembre) ; - get ctxt ["a";"c"] >>= fun juin -> + get ctxt ["a"; "c"] + >>= fun juin -> Assert.equal_string_option ~msg:__LOC__ (Some "Juin") (c juin) ; Lwt.return_unit -let test_continuation { idx ; block3a ; _ } = - checkout idx block3a >>= function - | None -> +let test_continuation {idx; block3a; _} = + checkout idx block3a + >>= function + | None -> Assert.fail_msg "checkout block3a" | Some ctxt -> - get ctxt ["version"] >>= fun version -> + get ctxt ["version"] + >>= fun version -> Assert.equal_string_option ~msg:__LOC__ (Some "0.0") (c version) ; - get ctxt ["a";"b"] >>= fun novembre -> + get ctxt ["a"; "b"] + >>= fun novembre -> Assert.is_none ~msg:__LOC__ (c novembre) ; - get ctxt ["a";"c"] >>= fun juin -> + get ctxt ["a"; "c"] + >>= fun juin -> Assert.equal_string_option ~msg:__LOC__ (Some "Juin") (c juin) ; - get ctxt ["a";"d"] >>= fun mars -> - Assert.equal_string_option ~msg:__LOC__ (Some "Mars") (c mars) ; + get ctxt ["a"; "d"] + >>= fun mars -> + Assert.equal_string_option ~msg:__LOC__ (Some "Mars") (c mars) ; Lwt.return_unit -let test_fork { idx ; block3b ; _ } = - checkout idx block3b >>= function - | None -> +let test_fork {idx; block3b; _} = + checkout idx block3b + >>= function + | None -> Assert.fail_msg "checkout block3b" | Some ctxt -> - get ctxt ["version"] >>= fun version -> + get ctxt ["version"] + >>= fun version -> Assert.equal_string_option ~msg:__LOC__ (Some "0.0") (c version) ; - get ctxt ["a";"b"] >>= fun novembre -> + get ctxt ["a"; "b"] + >>= fun novembre -> Assert.equal_string_option ~msg:__LOC__ (Some "Novembre") (c novembre) ; - get ctxt ["a";"c"] >>= fun juin -> + get ctxt ["a"; "c"] + >>= fun juin -> Assert.is_none ~msg:__LOC__ (c juin) ; - get ctxt ["a";"d"] >>= fun mars -> + get ctxt ["a"; "d"] + >>= fun mars -> Assert.equal_string_option ~msg:__LOC__ (Some "Février") (c mars) ; Lwt.return_unit -let test_replay { idx ; genesis ; _ } = - checkout idx genesis >>= function - | None -> +let test_replay {idx; genesis; _} = + checkout idx genesis + >>= function + | None -> Assert.fail_msg "checkout genesis_block" | Some ctxt0 -> - set ctxt0 ["version"] (MBytes.of_string "0.0") >>= fun ctxt1 -> - set ctxt1 ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt2 -> - set ctxt2 ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt3 -> - set ctxt3 ["a"; "d"] (MBytes.of_string "July") >>= fun ctxt4a -> - set ctxt3 ["a"; "d"] (MBytes.of_string "Juillet") >>= fun ctxt4b -> - set ctxt4a ["a"; "b"] (MBytes.of_string "November") >>= fun ctxt5a -> - get ctxt4a ["a";"b"] >>= fun novembre -> + set ctxt0 ["version"] (MBytes.of_string "0.0") + >>= fun ctxt1 -> + set ctxt1 ["a"; "b"] (MBytes.of_string "Novembre") + >>= fun ctxt2 -> + set ctxt2 ["a"; "c"] (MBytes.of_string "Juin") + >>= fun ctxt3 -> + set ctxt3 ["a"; "d"] (MBytes.of_string "July") + >>= fun ctxt4a -> + set ctxt3 ["a"; "d"] (MBytes.of_string "Juillet") + >>= fun ctxt4b -> + set ctxt4a ["a"; "b"] (MBytes.of_string "November") + >>= fun ctxt5a -> + get ctxt4a ["a"; "b"] + >>= fun novembre -> Assert.equal_string_option ~msg:__LOC__ (Some "Novembre") (c novembre) ; - get ctxt5a ["a";"b"] >>= fun november -> + get ctxt5a ["a"; "b"] + >>= fun november -> Assert.equal_string_option ~msg:__LOC__ (Some "November") (c november) ; - get ctxt5a ["a";"d"] >>= fun july -> + get ctxt5a ["a"; "d"] + >>= fun july -> Assert.equal_string_option ~msg:__LOC__ (Some "July") (c july) ; - get ctxt4b ["a";"b"] >>= fun novembre -> + get ctxt4b ["a"; "b"] + >>= fun novembre -> Assert.equal_string_option ~msg:__LOC__ (Some "Novembre") (c novembre) ; - get ctxt4b ["a";"d"] >>= fun juillet -> + get ctxt4b ["a"; "d"] + >>= fun juillet -> Assert.equal_string_option ~msg:__LOC__ (Some "Juillet") (c juillet) ; Lwt.return_unit let fold_keys s k ~init ~f = let rec loop k acc = - fold s k ~init:acc - ~f:(fun file acc -> - match file with - | `Key k -> f k acc - | `Dir k -> loop k acc) in + fold s k ~init:acc ~f:(fun file acc -> + match file with `Key k -> f k acc | `Dir k -> loop k acc) + in loop k init + let keys t = fold_keys t ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc)) -let test_fold { idx ; genesis ; _ } = - checkout idx genesis >>= function +let test_fold {idx; genesis; _} = + checkout idx genesis + >>= function | None -> Assert.fail_msg "checkout genesis_block" | Some ctxt -> - set ctxt ["a"; "b"] (MBytes.of_string "Novembre") >>= fun ctxt -> - set ctxt ["a"; "c"] (MBytes.of_string "Juin") >>= fun ctxt -> - set ctxt ["a"; "d"; "e"] (MBytes.of_string "Septembre") >>= fun ctxt -> - set ctxt ["f";] (MBytes.of_string "Avril") >>= fun ctxt -> - set ctxt ["g"; "h"] (MBytes.of_string "Avril") >>= fun ctxt -> - keys ctxt [] >>= fun l -> - Assert.equal_string_list_list ~msg:__LOC__ - [["a";"b"]; - ["a";"c"]; - ["a";"d";"e"]; - ["f"]; - ["g";"h"]] (List.sort compare l) ; - keys ctxt ["a"] >>= fun l -> + set ctxt ["a"; "b"] (MBytes.of_string "Novembre") + >>= fun ctxt -> + set ctxt ["a"; "c"] (MBytes.of_string "Juin") + >>= fun ctxt -> + set ctxt ["a"; "d"; "e"] (MBytes.of_string "Septembre") + >>= fun ctxt -> + set ctxt ["f"] (MBytes.of_string "Avril") + >>= fun ctxt -> + set ctxt ["g"; "h"] (MBytes.of_string "Avril") + >>= fun ctxt -> + keys ctxt [] + >>= fun l -> Assert.equal_string_list_list - ~msg:__LOC__ [["a";"b"]; ["a";"c"]; ["a";"d";"e"]] + ~msg:__LOC__ + [["a"; "b"]; ["a"; "c"]; ["a"; "d"; "e"]; ["f"]; ["g"; "h"]] (List.sort compare l) ; - keys ctxt ["f"] >>= fun l -> + keys ctxt ["a"] + >>= fun l -> + Assert.equal_string_list_list + ~msg:__LOC__ + [["a"; "b"]; ["a"; "c"]; ["a"; "d"; "e"]] + (List.sort compare l) ; + keys ctxt ["f"] + >>= fun l -> Assert.equal_string_list_list ~msg:__LOC__ [] l ; - keys ctxt ["g"] >>= fun l -> - Assert.equal_string_list_list ~msg:__LOC__ [["g";"h"]] l ; - keys ctxt ["i"] >>= fun l -> + keys ctxt ["g"] + >>= fun l -> + Assert.equal_string_list_list ~msg:__LOC__ [["g"; "h"]] l ; + keys ctxt ["i"] + >>= fun l -> Assert.equal_string_list_list ~msg:__LOC__ [] l ; Lwt.return_unit -let test_dump { idx ; block3b; _ } = - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun base_dir2 -> - let dumpfile = base_dir2 // "dump" in - let ctxt_hash = block3b in - let history_mode = Tezos_shell_services.History_mode.Full in - let empty_block_header context = - Block_header.{ - protocol_data = MBytes.empty; - shell = { - level = 0l; - proto_level = 0; - predecessor = Block_hash.zero; - timestamp = Time.Protocol.epoch; - validation_passes = 0; - operations_hash = Operation_list_list_hash.zero; - fitness = []; - context; - } } in - let _empty_pruned_block = ({ - block_header = empty_block_header Context_hash.zero ; - operations = [] ; - operation_hashes = [] ; - } : Context.Pruned_block.t) in - let empty = { - Context.Block_data.block_header = empty_block_header Context_hash.zero ; - operations = [[]] ; - } in - let bhs = - (fun context -> - empty_block_header context, - empty, - history_mode, - (fun _ -> return (None, None)) - ) ctxt_hash - in - Context.dump_contexts idx bhs ~filename:dumpfile >>=? fun () -> - let root = base_dir2 // "context" in - Context.init ?patch_context:None root >>= fun idx2 -> - Context.restore_contexts idx2 - ~filename:dumpfile (fun _ -> return_unit) - (fun _ _ _ -> return_unit) - >>=? fun imported -> - let (bh, _, _, _, _, _) = imported in - let expected_ctxt_hash = bh.Block_header.shell.context in - assert (Context_hash.equal ctxt_hash expected_ctxt_hash) ; - return () - end +let test_dump {idx; block3b; _} = + Lwt_utils_unix.with_tempdir "tezos_test_" (fun base_dir2 -> + let dumpfile = base_dir2 // "dump" in + let ctxt_hash = block3b in + let history_mode = Tezos_shell_services.History_mode.Full in + let empty_block_header context = + Block_header. + { protocol_data = MBytes.empty; + shell = + { level = 0l; + proto_level = 0; + predecessor = Block_hash.zero; + timestamp = Time.Protocol.epoch; + validation_passes = 0; + operations_hash = Operation_list_list_hash.zero; + fitness = []; + context } } + in + let _empty_pruned_block = + ( { block_header = empty_block_header Context_hash.zero; + operations = []; + operation_hashes = [] } + : Context.Pruned_block.t ) + in + let empty = + { Context.Block_data.block_header = empty_block_header Context_hash.zero; + operations = [[]] } + in + let bhs = + (fun context -> + ( empty_block_header context, + empty, + history_mode, + fun _ -> return (None, None) )) + ctxt_hash + in + Context.dump_contexts idx bhs ~filename:dumpfile + >>=? fun () -> + let root = base_dir2 // "context" in + Context.init ?patch_context:None root + >>= fun idx2 -> + Context.restore_contexts + idx2 + ~filename:dumpfile + (fun _ -> return_unit) + (fun _ _ _ -> return_unit) + >>=? fun imported -> + let (bh, _, _, _, _, _) = imported in + let expected_ctxt_hash = bh.Block_header.shell.context in + assert (Context_hash.equal ctxt_hash expected_ctxt_hash) ; + return ()) >>= function | Error err -> Error_monad.pp_print_error Format.err_formatter err ; assert false - | Ok () -> Lwt.return_unit + | Ok () -> + Lwt.return_unit (******************************************************************************) -let tests : (string * (t -> unit Lwt.t)) list = [ - "simple", test_simple ; - "continuation", test_continuation ; - "fork", test_fork ; - "replay", test_replay ; - "fold", test_fold ; - "dump", test_dump ; -] - +let tests : (string * (t -> unit Lwt.t)) list = + [ ("simple", test_simple); + ("continuation", test_continuation); + ("fork", test_fork); + ("replay", test_replay); + ("fold", test_fold); + ("dump", test_dump) ] let tests = List.map diff --git a/src/lib_storage/test/test_raw_store.ml b/src/lib_storage/test/test_raw_store.ml index 408cbdf6bae5dde1dbd3f4411cae53af01f23628..e907a97c8ab12ea043264b2b7f58f4002282aa52 100644 --- a/src/lib_storage/test/test_raw_store.ml +++ b/src/lib_storage/test/test_raw_store.ml @@ -25,62 +25,76 @@ open Raw_store -let (>>=) = Lwt.bind -let (>|=) = Lwt.(>|=) -let (//) = Filename.concat +let ( >>= ) = Lwt.bind + +let ( >|= ) = Lwt.( >|= ) + +let ( // ) = Filename.concat let wrap_store_init f _ () = - Lwt_utils_unix.with_tempdir "tezos_test_" begin fun base_dir -> - let root = base_dir // "store" in - init ~mapsize:4_096_000L root >>= function - | Error _ -> Assert.fail_msg "wrap_store_init" - | Ok store -> f store - end + Lwt_utils_unix.with_tempdir "tezos_test_" (fun base_dir -> + let root = base_dir // "store" in + init ~mapsize:4_096_000L root + >>= function + | Error _ -> Assert.fail_msg "wrap_store_init" | Ok store -> f store) -let entries s k = fold s k ~init:[] ~f:(fun e acc -> Lwt.return (e :: acc)) >|= List.rev +let entries s k = + fold s k ~init:[] ~f:(fun e acc -> Lwt.return (e :: acc)) >|= List.rev let test_fold st = - store st ["a"; "b"] (MBytes.of_string "Novembre") >>= fun _ -> - store st ["a"; "c"] (MBytes.of_string "Juin") >>= fun _ -> - store st ["a"; "d"; "e"] (MBytes.of_string "Septembre") >>= fun _ -> - store st ["f";] (MBytes.of_string "Avril") >>= fun _ -> + store st ["a"; "b"] (MBytes.of_string "Novembre") + >>= fun _ -> + store st ["a"; "c"] (MBytes.of_string "Juin") + >>= fun _ -> + store st ["a"; "d"; "e"] (MBytes.of_string "Septembre") + >>= fun _ -> + store st ["f"] (MBytes.of_string "Avril") + >>= fun _ -> (* The code of '.' is just below the one of '/' ! *) - store st ["g";".12";"a"] (MBytes.of_string "Mai") >>= fun _ -> - store st ["g";".12";"b"] (MBytes.of_string "Février") >>= fun _ -> - store st ["g";"123";"456"] (MBytes.of_string "Mars") >>= fun _ -> - store st ["g";"1230"] (MBytes.of_string "Janvier") >>= fun _ -> - - entries st [] >>= fun l -> + store st ["g"; ".12"; "a"] (MBytes.of_string "Mai") + >>= fun _ -> + store st ["g"; ".12"; "b"] (MBytes.of_string "Février") + >>= fun _ -> + store st ["g"; "123"; "456"] (MBytes.of_string "Mars") + >>= fun _ -> + store st ["g"; "1230"] (MBytes.of_string "Janvier") + >>= fun _ -> + entries st [] + >>= fun l -> Assert.equal_key_dir_list ~msg:__LOC__ [`Dir ["a"]; `Key ["f"]; `Dir ["g"]] l ; - - entries st ["0"] >>= fun l -> + entries st ["0"] + >>= fun l -> Assert.equal_key_dir_list ~msg:__LOC__ [] l ; - - entries st ["0"; "1"] >>= fun l -> + entries st ["0"; "1"] + >>= fun l -> Assert.equal_key_dir_list ~msg:__LOC__ [] l ; - - entries st ["a"] >>= fun l -> - Assert.equal_key_dir_list ~msg:__LOC__ [`Key ["a"; "b"]; `Key ["a"; "c"]; `Dir ["a"; "d"]] l ; - - entries st ["a"; "d"] >>= fun l -> + entries st ["a"] + >>= fun l -> + Assert.equal_key_dir_list + ~msg:__LOC__ + [`Key ["a"; "b"]; `Key ["a"; "c"]; `Dir ["a"; "d"]] + l ; + entries st ["a"; "d"] + >>= fun l -> Assert.equal_key_dir_list ~msg:__LOC__ [`Key ["a"; "d"; "e"]] l ; - - entries st ["f"] >>= fun l -> + entries st ["f"] + >>= fun l -> Assert.equal_key_dir_list ~msg:__LOC__ [] l ; - - entries st ["f"; "z"] >>= fun l -> + entries st ["f"; "z"] + >>= fun l -> Assert.equal_key_dir_list ~msg:__LOC__ [] l ; - - entries st ["g"] >>= fun l -> - Assert.equal_key_dir_list ~msg:__LOC__ [`Dir ["g";".12"]; `Dir ["g";"123"]; `Key ["g";"1230"]] l ; - - entries st ["g";"123"] >>= fun l -> - Assert.equal_key_dir_list ~msg:__LOC__ [`Key ["g";"123";"456"]] l ; - - entries st ["z"] >>= fun l -> + entries st ["g"] + >>= fun l -> + Assert.equal_key_dir_list + ~msg:__LOC__ + [`Dir ["g"; ".12"]; `Dir ["g"; "123"]; `Key ["g"; "1230"]] + l ; + entries st ["g"; "123"] + >>= fun l -> + Assert.equal_key_dir_list ~msg:__LOC__ [`Key ["g"; "123"; "456"]] l ; + entries st ["z"] + >>= fun l -> Assert.equal_key_dir_list ~msg:__LOC__ [] l ; - Lwt.return_unit -let tests = - [Alcotest_lwt.test_case "fold" `Quick (wrap_store_init test_fold)] +let tests = [Alcotest_lwt.test_case "fold" `Quick (wrap_store_init test_fold)] diff --git a/src/lib_validation/.ocamlformat b/src/lib_validation/.ocamlformat new file mode 100644 index 0000000000000000000000000000000000000000..9d2a5a5f36ace033597a4fade91b31f5d3e1ec47 --- /dev/null +++ b/src/lib_validation/.ocamlformat @@ -0,0 +1,11 @@ +wrap-fun-args=false +let-binding-spacing=compact +field-space=loose +break-separators=after-and-docked +sequence-style=separator +doc-comments=before +margin=80 +module-item-spacing=sparse +parens-tuple=always +parens-tuple-patterns=always +break-string-literals=newlines-and-wrap diff --git a/src/lib_validation/block_validation.ml b/src/lib_validation/block_validation.ml index c0a6f4ffd1a79fbdcda6952a4a4741342d71cdf8..df83c4a36477f9eb7bf8be47770be78544caf9f6 100644 --- a/src/lib_validation/block_validation.ml +++ b/src/lib_validation/block_validation.ml @@ -27,300 +27,322 @@ open Block_validator_errors type result = { - validation_result: Tezos_protocol_environment_shell.validation_result ; - block_metadata: MBytes.t ; - ops_metadata: MBytes.t list list ; - context_hash: Context_hash.t ; - forking_testchain : bool ; + validation_result : Tezos_protocol_environment_shell.validation_result; + block_metadata : MBytes.t; + ops_metadata : MBytes.t list list; + context_hash : Context_hash.t; + forking_testchain : bool } let update_testchain_status ctxt predecessor_header timestamp = - Context.get_test_chain ctxt >>= function - | Not_running -> return ctxt - | Running { expiration ; _ } -> + Context.get_test_chain ctxt + >>= function + | Not_running -> + return ctxt + | Running {expiration; _} -> if Time.Protocol.(expiration <= timestamp) then - Context.set_test_chain ctxt Not_running >>= fun ctxt -> - return ctxt - else - return ctxt - | Forking { protocol ; expiration } -> + Context.set_test_chain ctxt Not_running >>= fun ctxt -> return ctxt + else return ctxt + | Forking {protocol; expiration} -> let predecessor_hash = Block_header.hash predecessor_header in let genesis = Context.compute_testchain_genesis predecessor_hash in - let chain_id = Chain_id.of_block_hash genesis in (* legacy semantics *) - Context.set_test_chain ctxt - (Running { chain_id ; genesis ; - protocol ; expiration }) >>= fun ctxt -> - return ctxt + let chain_id = Chain_id.of_block_hash genesis in + (* legacy semantics *) + Context.set_test_chain + ctxt + (Running {chain_id; genesis; protocol; expiration}) + >>= fun ctxt -> return ctxt let is_testchain_forking ctxt = - Context.get_test_chain ctxt >>= function - | Not_running | Running _ -> Lwt.return_false - | Forking _ -> Lwt.return_true + Context.get_test_chain ctxt + >>= function + | Not_running | Running _ -> Lwt.return_false | Forking _ -> Lwt.return_true -let init_test_chain - ctxt forked_header = - Context.get_test_chain ctxt >>= function - | Not_running | Running _ -> assert false - | Forking { protocol ; _ } -> - begin match Registered_protocol.get protocol with - | Some proto -> return proto - | None -> fail (Missing_test_protocol protocol) - end >>=? fun (module Proto_test) -> - Proto_test.init ctxt forked_header.Block_header.shell >>=? fun { context = test_ctxt ; _ } -> - Context.set_test_chain test_ctxt Not_running >>= fun test_ctxt -> - Context.set_protocol test_ctxt protocol >>= fun test_ctxt -> - Context.commit_test_chain_genesis test_ctxt forked_header >>= fun genesis_header -> - return genesis_header +let init_test_chain ctxt forked_header = + Context.get_test_chain ctxt + >>= function + | Not_running | Running _ -> + assert false + | Forking {protocol; _} -> + ( match Registered_protocol.get protocol with + | Some proto -> + return proto + | None -> + fail (Missing_test_protocol protocol) ) + >>=? fun (module Proto_test) -> + Proto_test.init ctxt forked_header.Block_header.shell + >>=? fun {context = test_ctxt; _} -> + Context.set_test_chain test_ctxt Not_running + >>= fun test_ctxt -> + Context.set_protocol test_ctxt protocol + >>= fun test_ctxt -> + Context.commit_test_chain_genesis test_ctxt forked_header + >>= fun genesis_header -> return genesis_header -let may_patch_protocol - ~level +let may_patch_protocol ~level (validation_result : Tezos_protocol_environment_shell.validation_result) = match Block_header.get_forced_protocol_upgrade ~level with | None -> return validation_result | Some hash -> - Context.set_protocol validation_result.context hash >>= fun context -> - return { validation_result with context } - -module Make(Proto : Registered_protocol.T) = struct + Context.set_protocol validation_result.context hash + >>= fun context -> return {validation_result with context} - let check_block_header - ~(predecessor_block_header : Block_header.t) - hash (block_header: Block_header.t) = +module Make (Proto : Registered_protocol.T) = struct + let check_block_header ~(predecessor_block_header : Block_header.t) hash + (block_header : Block_header.t) = let validation_passes = List.length Proto.validation_passes in fail_unless - (Int32.succ predecessor_block_header.shell.level = block_header.shell.level) - (invalid_block hash @@ - Invalid_level { expected = Int32.succ predecessor_block_header.shell.level ; - found = block_header.shell.level }) >>=? fun () -> + ( Int32.succ predecessor_block_header.shell.level + = block_header.shell.level ) + ( invalid_block hash + @@ Invalid_level + { expected = Int32.succ predecessor_block_header.shell.level; + found = block_header.shell.level } ) + >>=? fun () -> fail_unless - Time.Protocol.(predecessor_block_header.shell.timestamp < block_header.shell.timestamp) - (invalid_block hash Non_increasing_timestamp) >>=? fun () -> + Time.Protocol.( + predecessor_block_header.shell.timestamp < block_header.shell.timestamp) + (invalid_block hash Non_increasing_timestamp) + >>=? fun () -> fail_unless - Fitness.(predecessor_block_header.shell.fitness < block_header.shell.fitness) - (invalid_block hash Non_increasing_fitness) >>=? fun () -> + Fitness.( + predecessor_block_header.shell.fitness < block_header.shell.fitness) + (invalid_block hash Non_increasing_fitness) + >>=? fun () -> fail_unless (block_header.shell.validation_passes = validation_passes) - (invalid_block hash - (Unexpected_number_of_validation_passes block_header.shell.validation_passes) - ) >>=? fun () -> - return_unit + (invalid_block + hash + (Unexpected_number_of_validation_passes + block_header.shell.validation_passes)) + >>=? fun () -> return_unit let parse_block_header block_hash (block_header : Block_header.t) = match Data_encoding.Binary.of_bytes Proto.block_header_data_encoding - block_header.protocol_data with + block_header.protocol_data + with | None -> fail (invalid_block block_hash Cannot_parse_block_header) | Some protocol_data -> - return ({ shell = block_header.shell ; protocol_data } : Proto.block_header) + return + ({shell = block_header.shell; protocol_data} : Proto.block_header) let check_operation_quota block_hash operations = let invalid_block = invalid_block block_hash in iteri2_p - begin fun i ops quota -> + (fun i ops quota -> fail_unless - (Option.unopt_map ~default:true + (Option.unopt_map + ~default:true ~f:(fun max -> List.length ops <= max) quota.Tezos_protocol_environment_shell.max_op) (let max = Option.unopt ~default:~-1 quota.max_op in invalid_block - (Too_many_operations - { pass = i + 1 ; found = List.length ops ; max })) >>=? fun () -> + (Too_many_operations {pass = i + 1; found = List.length ops; max})) + >>=? fun () -> iter_p - begin fun op -> + (fun op -> let size = Data_encoding.Binary.length Operation.encoding op in fail_unless (size <= Proto.max_operation_data_length) (invalid_block (Oversized_operation - { operation = Operation.hash op ; - size ; max = Proto.max_operation_data_length })) - end - ops >>=? fun () -> - return_unit - end - operations Proto.validation_passes + { operation = Operation.hash op; + size; + max = Proto.max_operation_data_length }))) + ops + >>=? fun () -> return_unit) + operations + Proto.validation_passes let parse_operations block_hash operations = let invalid_block = invalid_block block_hash in mapi_s - begin fun pass -> - map_s begin fun op -> - let op_hash = Operation.hash op in - match - Data_encoding.Binary.of_bytes - Proto.operation_data_encoding - op.Operation.proto with - | None -> - fail (invalid_block (Cannot_parse_operation op_hash)) - | Some protocol_data -> - let op = { Proto.shell = op.shell ; protocol_data } in - let allowed_pass = Proto.acceptable_passes op in - fail_unless (List.mem pass allowed_pass) - (invalid_block - (Unallowed_pass { operation = op_hash ; - pass ; allowed_pass } )) >>=? fun () -> - return op - end - end + (fun pass -> + map_s (fun op -> + let op_hash = Operation.hash op in + match + Data_encoding.Binary.of_bytes + Proto.operation_data_encoding + op.Operation.proto + with + | None -> + fail (invalid_block (Cannot_parse_operation op_hash)) + | Some protocol_data -> + let op = {Proto.shell = op.shell; protocol_data} in + let allowed_pass = Proto.acceptable_passes op in + fail_unless + (List.mem pass allowed_pass) + (invalid_block + (Unallowed_pass {operation = op_hash; pass; allowed_pass})) + >>=? fun () -> return op)) operations - let apply - chain_id - ~max_operations_ttl - ~(predecessor_block_header : Block_header.t) - ~predecessor_context - ~(block_header : Block_header.t) - operations = + let apply chain_id ~max_operations_ttl + ~(predecessor_block_header : Block_header.t) ~predecessor_context + ~(block_header : Block_header.t) operations = let block_hash = Block_header.hash block_header in let invalid_block = invalid_block block_hash in - check_block_header - ~predecessor_block_header - block_hash block_header >>=? fun () -> - parse_block_header block_hash block_header >>=? fun block_header -> - check_operation_quota block_hash operations >>=? fun () -> + check_block_header ~predecessor_block_header block_hash block_header + >>=? fun () -> + parse_block_header block_hash block_header + >>=? fun block_header -> + check_operation_quota block_hash operations + >>=? fun () -> update_testchain_status - predecessor_context predecessor_block_header - block_header.shell.timestamp >>=? fun context -> - parse_operations block_hash operations >>=? fun operations -> + predecessor_context + predecessor_block_header + block_header.shell.timestamp + >>=? fun context -> + parse_operations block_hash operations + >>=? fun operations -> (* TODO wrap 'proto_error' into 'block_error' *) Proto.begin_application ~chain_id ~predecessor_context:context ~predecessor_timestamp:predecessor_block_header.shell.timestamp ~predecessor_fitness:predecessor_block_header.shell.fitness - block_header >>=? fun state -> + block_header + >>=? fun state -> fold_left_s (fun (state, acc) ops -> - fold_left_s - (fun (state, acc) op -> - Proto.apply_operation state op >>=? fun (state, op_metadata) -> - return (state, op_metadata :: acc)) - (state, []) ops >>=? fun (state, ops_metadata) -> - return (state, List.rev ops_metadata :: acc)) - (state, []) operations >>=? fun (state, ops_metadata) -> + fold_left_s + (fun (state, acc) op -> + Proto.apply_operation state op + >>=? fun (state, op_metadata) -> return (state, op_metadata :: acc)) + (state, []) + ops + >>=? fun (state, ops_metadata) -> + return (state, List.rev ops_metadata :: acc)) + (state, []) + operations + >>=? fun (state, ops_metadata) -> let ops_metadata = List.rev ops_metadata in - Proto.finalize_block state >>=? fun (validation_result, block_data) -> + Proto.finalize_block state + >>=? fun (validation_result, block_data) -> (* reset_test_chain * validation_result.context * current_block_header * ~start_testchain >>=? fun forked_genesis_header -> *) - is_testchain_forking validation_result.context >>= fun forking_testchain -> - may_patch_protocol - ~level:block_header.shell.level validation_result >>=? fun validation_result -> - Context.get_protocol validation_result.context >>= fun new_protocol -> + is_testchain_forking validation_result.context + >>= fun forking_testchain -> + may_patch_protocol ~level:block_header.shell.level validation_result + >>=? fun validation_result -> + Context.get_protocol validation_result.context + >>= fun new_protocol -> let expected_proto_level = if Protocol_hash.equal new_protocol Proto.hash then predecessor_block_header.shell.proto_level - else - (predecessor_block_header.shell.proto_level + 1) mod 256 in - fail_when (block_header.shell.proto_level <> expected_proto_level) + else (predecessor_block_header.shell.proto_level + 1) mod 256 + in + fail_when + (block_header.shell.proto_level <> expected_proto_level) (invalid_block - (Invalid_proto_level { - found = block_header.shell.proto_level ; - expected = expected_proto_level ; - })) >>=? fun () -> + (Invalid_proto_level + { found = block_header.shell.proto_level; + expected = expected_proto_level })) + >>=? fun () -> fail_when Fitness.(validation_result.fitness <> block_header.shell.fitness) (invalid_block - (Invalid_fitness { - expected = block_header.shell.fitness ; - found = validation_result.fitness ; - })) >>=? fun () -> - begin - if Protocol_hash.equal new_protocol Proto.hash then - return validation_result - else - match Registered_protocol.get new_protocol with - | None -> - fail (Unavailable_protocol { block = block_hash ; - protocol = new_protocol }) - | Some (module NewProto) -> - NewProto.init validation_result.context block_header.shell - end >>=? fun validation_result -> + (Invalid_fitness + { expected = block_header.shell.fitness; + found = validation_result.fitness })) + >>=? fun () -> + ( if Protocol_hash.equal new_protocol Proto.hash then + return validation_result + else + match Registered_protocol.get new_protocol with + | None -> + fail + (Unavailable_protocol {block = block_hash; protocol = new_protocol}) + | Some (module NewProto) -> + NewProto.init validation_result.context block_header.shell ) + >>=? fun validation_result -> let max_operations_ttl = - max 0 - (min - ((max_operations_ttl)+1) - validation_result.max_operations_ttl) in - let validation_result = - { validation_result with max_operations_ttl } in + max 0 (min (max_operations_ttl + 1) validation_result.max_operations_ttl) + in + let validation_result = {validation_result with max_operations_ttl} in let block_metadata = Data_encoding.Binary.to_bytes_exn - Proto.block_header_metadata_encoding block_data in + Proto.block_header_metadata_encoding + block_data + in let ops_metadata = List.map (List.map - (Data_encoding.Binary.to_bytes_exn - Proto.operation_receipt_encoding)) - ops_metadata in + (Data_encoding.Binary.to_bytes_exn Proto.operation_receipt_encoding)) + ops_metadata + in Context.commit ~time:block_header.shell.timestamp ?message:validation_result.message - validation_result.context >>= fun context_hash -> - return ({ validation_result ; block_metadata ; - ops_metadata ; context_hash ; forking_testchain }) - + validation_result.context + >>= fun context_hash -> + return + { validation_result; + block_metadata; + ops_metadata; + context_hash; + forking_testchain } end let assert_no_duplicate_operations block_hash live_operations operations = fold_left_s - begin fold_left_s - begin fun live_operations op -> - let oph = Operation.hash op in - fail_when (Operation_hash.Set.mem oph live_operations) - (invalid_block block_hash @@ Replayed_operation oph) >>=? fun () -> - return (Operation_hash.Set.add oph live_operations) - end - end - live_operations operations >>=? fun _ -> - return_unit + (fold_left_s (fun live_operations op -> + let oph = Operation.hash op in + fail_when + (Operation_hash.Set.mem oph live_operations) + (invalid_block block_hash @@ Replayed_operation oph) + >>=? fun () -> return (Operation_hash.Set.add oph live_operations))) + live_operations + operations + >>=? fun _ -> return_unit let assert_operation_liveness block_hash live_blocks operations = iter_s - begin iter_s - begin fun op -> - fail_unless - (Block_hash.Set.mem op.Operation.shell.branch live_blocks) - (invalid_block block_hash @@ - Outdated_operation { operation = Operation.hash op ; - originating_block = op.shell.branch }) - end - end + (iter_s (fun op -> + fail_unless + (Block_hash.Set.mem op.Operation.shell.branch live_blocks) + ( invalid_block block_hash + @@ Outdated_operation + { operation = Operation.hash op; + originating_block = op.shell.branch } ))) operations let check_liveness ~live_blocks ~live_operations block_hash operations = - assert_no_duplicate_operations - block_hash live_operations operations >>=? fun () -> - assert_operation_liveness block_hash live_blocks operations >>=? fun () -> - return_unit + assert_no_duplicate_operations block_hash live_operations operations + >>=? fun () -> + assert_operation_liveness block_hash live_blocks operations + >>=? fun () -> return_unit -let apply - chain_id - ~max_operations_ttl - ~(predecessor_block_header : Block_header.t) - ~predecessor_context - ~(block_header : Block_header.t) - operations = +let apply chain_id ~max_operations_ttl + ~(predecessor_block_header : Block_header.t) ~predecessor_context + ~(block_header : Block_header.t) operations = let block_hash = Block_header.hash block_header in - Context.get_protocol predecessor_context >>= fun pred_protocol_hash -> - begin - match Registered_protocol.get pred_protocol_hash with - | None -> - fail (Unavailable_protocol { block = block_hash ; - protocol = pred_protocol_hash }) - | Some p -> return p - end >>=? fun (module Proto) -> - let module Block_validation = Make(Proto) in + Context.get_protocol predecessor_context + >>= fun pred_protocol_hash -> + ( match Registered_protocol.get pred_protocol_hash with + | None -> + fail + (Unavailable_protocol + {block = block_hash; protocol = pred_protocol_hash}) + | Some p -> + return p ) + >>=? fun (module Proto) -> + let module Block_validation = Make (Proto) in Block_validation.apply chain_id ~max_operations_ttl ~predecessor_block_header ~predecessor_context ~block_header - operations >>= function + operations + >>= function | Error (Exn (Unix.Unix_error (errno, fn, msg)) :: _) -> - fail (System_error { errno ; fn ; msg }) - | (Ok _ | Error _) as res -> Lwt.return res + fail (System_error {errno; fn; msg}) + | (Ok _ | Error _) as res -> + Lwt.return res diff --git a/src/lib_validation/block_validation.mli b/src/lib_validation/block_validation.mli index 5f065878a4cefc11020d305cdc36adcf0b4e9fac..185bdba32c5b3c2037a283231bef16f0cc992a74 100644 --- a/src/lib_validation/block_validation.mli +++ b/src/lib_validation/block_validation.mli @@ -24,24 +24,19 @@ (* *) (*****************************************************************************) -val may_patch_protocol: +val may_patch_protocol : level:Int32.t -> Tezos_protocol_environment_shell.validation_result -> Tezos_protocol_environment_shell.validation_result tzresult Lwt.t -val update_testchain_status: - Context.t -> - Block_header.t -> - Time.Protocol.t -> - Context.t tzresult Lwt.t +val update_testchain_status : + Context.t -> Block_header.t -> Time.Protocol.t -> Context.t tzresult Lwt.t (** [init_test_chain] must only be called on a forking block. *) -val init_test_chain: - Context.t -> - Block_header.t -> - Block_header.t tzresult Lwt.t +val init_test_chain : + Context.t -> Block_header.t -> Block_header.t tzresult Lwt.t -val check_liveness: +val check_liveness : live_blocks:Block_hash.Set.t -> live_operations:Operation_hash.Set.t -> Block_hash.t -> @@ -49,17 +44,18 @@ val check_liveness: unit tzresult Lwt.t type result = { - validation_result: Tezos_protocol_environment_shell.validation_result ; - block_metadata: MBytes.t ; - ops_metadata: MBytes.t list list ; - context_hash: Context_hash.t ; - forking_testchain: bool ; + validation_result : Tezos_protocol_environment_shell.validation_result; + block_metadata : MBytes.t; + ops_metadata : MBytes.t list list; + context_hash : Context_hash.t; + forking_testchain : bool } -val apply: +val apply : Chain_id.t -> max_operations_ttl:int -> predecessor_block_header:Block_header.t -> predecessor_context:Context.t -> block_header:Block_header.t -> - Operation.t list list -> result tzresult Lwt.t + Operation.t list list -> + result tzresult Lwt.t