From 7e0e9ef777b9a67e9b04fda1f10a60d2e6688fff Mon Sep 17 00:00:00 2001 From: Ambre Austen Suhamy Date: Wed, 18 Dec 2024 20:30:50 +0100 Subject: [PATCH] WIP: directory targets with empty subdirs Signed-off-by: Ambre Austen Suhamy --- bin/exec.ml | 4 +- bin/ocaml/utop.ml | 2 +- boot/libs.ml | 2 +- src/dune_cache/local.ml | 92 ++- src/dune_cache/shared.ml | 24 +- src/dune_cache_storage/dune_cache_storage.ml | 21 +- src/dune_cache_storage/dune_cache_storage.mli | 2 + src/dune_engine/build_system.ml | 46 +- src/dune_engine/build_system.mli | 9 +- src/dune_engine/rule_cache.ml | 6 +- src/dune_engine/target_promotion.ml | 32 +- src/dune_rules/action_builder.ml | 2 +- src/dune_rules/expander.ml | 2 +- src/dune_targets/dune_targets.ml | 639 +++++++++++++----- src/dune_targets/dune_targets.mli | 67 +- src/fs/fs.ml | 15 +- .../directory-targets/subdirs-only.t | 23 + .../test-cases/dune-cache/empty-dir.t | 23 +- 18 files changed, 756 insertions(+), 255 deletions(-) create mode 100644 test/blackbox-tests/test-cases/directory-targets/subdirs-only.t diff --git a/bin/exec.ml b/bin/exec.ml index dec09ddf16a..f5dd8a4dff8 100644 --- a/bin/exec.ml +++ b/bin/exec.ml @@ -223,7 +223,7 @@ let get_path_and_build_if_necessary sctx ~no_rebuild ~dir ~prog = | Ok p -> build_prog ~no_rebuild ~prog p) | Relative_to_current_dir -> let path = Path.relative_to_source_in_build_or_external ~dir prog in - Build_system.file_exists path + Build_system.path_exists path >>= (function | true -> Memo.return (Some path) | false -> @@ -231,7 +231,7 @@ let get_path_and_build_if_necessary sctx ~no_rebuild ~dir ~prog = then Memo.return None else ( let path = Path.extend_basename path ~suffix:".exe" in - Build_system.file_exists path + Build_system.path_exists path >>| function | true -> Some path | false -> None)) diff --git a/bin/ocaml/utop.ml b/bin/ocaml/utop.ml index c812830414c..6bc59c9479e 100644 --- a/bin/ocaml/utop.ml +++ b/bin/ocaml/utop.ml @@ -33,7 +33,7 @@ let term = let utop_target = Filename.concat dir Utop.utop_exe in Path.build (Path.Build.relative (Context.build_dir context) utop_target) in - Build_system.file_exists utop_target + Build_system.path_exists utop_target >>= function | false -> User_error.raise diff --git a/boot/libs.ml b/boot/libs.ml index 6faacb217ec..b6aeed5a6d5 100644 --- a/boot/libs.ml +++ b/boot/libs.ml @@ -1,4 +1,4 @@ -let external_libraries = [ "unix"; "threads" ] +let external_libraries = [ "threads.posix" ] let local_libraries = [ ("otherlibs/ordering", Some "Ordering", false, None) diff --git a/src/dune_cache/local.ml b/src/dune_cache/local.ml index 4a38e900201..fe8c2aff82d 100644 --- a/src/dune_cache/local.ml +++ b/src/dune_cache/local.ml @@ -79,11 +79,20 @@ module Artifacts = struct (artifacts : Digest.t Targets.Produced.t) = let entries = - Targets.Produced.foldi artifacts ~init:[] ~f:(fun target file_digest entries -> - let entry : Metadata_entry.t = - { file_path = Path.Local.to_string target; file_digest } - in - entry :: entries) + Targets.Produced.foldi + artifacts + ~init:[] + ~f:(fun target ~is_file file_digest entries -> + (if Targets.Produced.debug_out + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[StoreMeta %S]" (Path.Local.to_string target) ++ Pp.space)); + let entry : Metadata_entry.t = + { file_path = Path.Local.to_string target; file_digest; is_file } + in + entry :: entries) |> List.rev in Metadata_file.store ~mode { metadata; entries } ~rule_digest @@ -103,12 +112,29 @@ module Artifacts = struct Result.try_with (fun () -> (* CR-someday rleshchinskiy: We recreate the directory structure here but it might be simpler to just use file digests instead of file names and no subdirectories. *) - Path.Local.Map.iteri targets.dirs ~f:(fun path _ -> - Path.mkdir_p (Path.append_local temp_dir path)); - Targets.Produced.iteri targets ~f:(fun path _ -> - let path_in_build_dir = Path.build (Path.Build.append_local targets.root path) in - let path_in_temp_dir = Path.append_local temp_dir path in - portable_hardlink_or_copy ~src:path_in_build_dir ~dst:path_in_temp_dir)) + (* The comment above seems outdated wrt. 'no subdirectories'... *) + Targets.Produced.iteri + targets + ~d:(fun dir _ -> + (if Targets.Produced.debug_out + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Store_dir %S]" (Path.Local.to_string dir) ++ Pp.space)); + Path.mkdir_p (Path.append_local temp_dir dir)) + ~f:(fun file _ -> + let path_in_build_dir = + Path.build (Path.Build.append_local targets.root file) + in + let path_in_temp_dir = Path.append_local temp_dir file in + (if Targets.Produced.debug_out + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Store_file: %S]" (Path.Local.to_string file) ++ Pp.space)); + portable_hardlink_or_copy ~src:path_in_build_dir ~dst:path_in_temp_dir)) ;; (* Step II of [store_skipping_metadata]. @@ -118,10 +144,22 @@ module Artifacts = struct : Digest.t Targets.Produced.t Or_exn.t Fiber.t = let open Fiber.O in + let fff path { Target.executable } = + let file = Path.append_local temp_dir path in + (if Targets.Produced.debug_out + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[CompDigests %S]" (Path.Local.to_string path) ++ Pp.space)); + compute_digest ~executable file + in + (* FIXME: nothing special here? *) Fiber.collect_errors (fun () -> - Targets.Produced.parallel_map targets ~f:(fun path { Target.executable } -> - let file = Path.append_local temp_dir path in - compute_digest ~executable file)) + Targets.Produced.parallel_map targets ~f:fff ~d:(fun path -> + function + | None -> Fiber.return None + | Some exe -> Fiber.map ~f:Option.some (fff path exe))) >>| Result.map_error ~f:(function | exn :: _ -> exn.Exn_with_backtrace.exn | [] -> assert false) @@ -132,9 +170,18 @@ module Artifacts = struct Targets.Produced.foldi artifacts ~init:Store_result.empty - ~f:(fun target digest results -> + ~f:(fun target ~is_file digest results -> + (* FIXME: really? *) + let _ignored = is_file in let path_in_temp_dir = Path.append_local temp_dir target in let path_in_cache = file_path ~file_digest:digest in + (if Targets.Produced.debug_out + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Store_to_cache %S]" (Path.Local.to_string target) + ++ Pp.space)); let store_using_hardlinks () = match Dune_cache_storage.Util.Optimistically.link @@ -281,10 +328,7 @@ module Artifacts = struct | Copy -> copy ~src ~dst); Unwind.push unwind (fun () -> Path.Build.unlink_no_err target) in - try - Path.Local.Map.iteri artifacts.dirs ~f:(fun dir _ -> mk_dir dir); - Targets.Produced.iteri artifacts ~f:mk_file - with + try Targets.Produced.iteri artifacts ~f:mk_file ~d:(fun dir _ -> mk_dir dir) with | exn -> Unwind.unwind unwind; reraise exn @@ -296,8 +340,14 @@ module Artifacts = struct let artifacts = Path.Local.Map.of_list_map_exn entries - ~f:(fun { Metadata_entry.file_path; file_digest } -> - Path.Local.of_string file_path, file_digest) + ~f:(fun { Metadata_entry.file_path; file_digest; is_file } -> + (if Targets.Produced.debug_out + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Restore: %S]" file_path ++ Pp.space)); + Path.Local.of_string file_path, (file_digest, is_file)) |> Targets.Produced.of_files target_dir in try diff --git a/src/dune_cache/shared.ml b/src/dune_cache/shared.ml index 21577b83efb..8cf56a5f3e8 100644 --- a/src/dune_cache/shared.ml +++ b/src/dune_cache/shared.ml @@ -121,17 +121,33 @@ struct ] in let update_cached_digests ~targets_and_digests = - Targets.Produced.iteri targets_and_digests ~f:(fun path digest -> - Cached_digest.set (Path.Build.append_local targets_and_digests.root path) digest) + Targets.Produced.iteri + targets_and_digests + ~f:(fun path digest -> + Cached_digest.set (Path.Build.append_local targets_and_digests.root path) digest) + ~d:(fun path -> + function + | None -> () + | Some digest -> + Cached_digest.set + (Path.Build.append_local targets_and_digests.root path) + digest) in match Targets.Produced.map_with_errors produced_targets ~all_errors:false ~f:(fun target () -> + (* All of this monad boilerplate seems unnecessary since we don't care about errors... *) match Local.Target.create target with | Some t -> Ok t | None -> Error ()) + ~d:(fun target _meta -> + (* FIXME: maybe this vvvv create is only for files not dirs. + Also why ignore if meta is set or not? *) + match Local.Target.create target with + | Some t -> Ok (Some t) + | None -> Error ()) with | Error _ -> Fiber.return None | Ok targets -> @@ -194,6 +210,10 @@ struct produced_targets ~all_errors:true ~f:(fun target () -> compute_digest target) + ~d:(fun _target meta -> + (* FIXME: don't write code tired, I forgot why we can make that assertion... *) + assert (Option.is_none meta); + Ok None (* Result.map ~f:Option.some (compute_digest target) *)) with | Ok result -> result | Error errors -> diff --git a/src/dune_cache_storage/dune_cache_storage.ml b/src/dune_cache_storage/dune_cache_storage.ml index bea1de2b1e8..889ee4288ce 100644 --- a/src/dune_cache_storage/dune_cache_storage.ml +++ b/src/dune_cache_storage/dune_cache_storage.ml @@ -217,21 +217,28 @@ module Artifacts = struct type t = { file_path : string ; file_digest : Digest.t + ; is_file : bool } let equal x y = - Digest.equal x.file_digest y.file_digest && String.equal x.file_path y.file_path + Digest.equal x.file_digest y.file_digest + && String.equal x.file_path y.file_path + && x.is_file = y.is_file ;; - let to_sexp { file_path; file_digest } = - Sexp.List [ Atom file_path; Atom (Digest.to_string file_digest) ] + let to_sexp { file_path; file_digest; is_file } = + Sexp.List + [ Atom file_path + ; Atom (Digest.to_string file_digest) + ; Atom (Bool.to_string is_file) + ] ;; let of_sexp = function - | Sexp.List [ Atom file_path; Atom file_digest ] -> - (match Digest.from_hex file_digest with - | Some file_digest -> Ok { file_path; file_digest } - | None -> + | Sexp.List [ Atom file_path; Atom file_digest; Atom is_file ] -> + (match Digest.from_hex file_digest, Bool.of_string is_file with + | Some file_digest, Some is_file -> Ok { file_path; file_digest; is_file } + | None, _ | _, None -> Error (Failure (sprintf diff --git a/src/dune_cache_storage/dune_cache_storage.mli b/src/dune_cache_storage/dune_cache_storage.mli index 85503886345..06be69ad9ef 100644 --- a/src/dune_cache_storage/dune_cache_storage.mli +++ b/src/dune_cache_storage/dune_cache_storage.mli @@ -70,6 +70,8 @@ module Artifacts : sig type t = { file_path : string (** Can have more than one component for directory targets *) ; file_digest : Digest.t + ; is_file : bool + (* We need to be able to recreate them just from the metadata so we need to know the type *) } end diff --git a/src/dune_engine/build_system.ml b/src/dune_engine/build_system.ml index 7cb8e92c139..a021286dc29 100644 --- a/src/dune_engine/build_system.ml +++ b/src/dune_engine/build_system.ml @@ -160,14 +160,12 @@ and Exported : sig type target_kind = | File_target - | Dir_target of { targets : Digest.t Targets.Produced.t } + | Dir_target of Digest.t Targets.Produced.t (* The below two definitions are useless, but if we remove them we get an "Undefined_recursive_module" exception. *) - val build_file_memo : (Path.t, Digest.t * target_kind) Memo.Table.t Lazy.t - [@@warning "-32"] - + val build_memo : (Path.t, Digest.t * target_kind) Memo.Table.t Lazy.t [@@warning "-32"] val build_alias_memo : (Alias.t, Dep.Fact.Files.t) Memo.Table.t [@@warning "-32"] val dep_on_alias_definition : Rules.Dir_rules.Alias_spec.item -> unit Action_builder.t end = struct @@ -206,6 +204,7 @@ end = struct (* Fact: alias [a] expands to the set of file-digest pairs [digests] *) Dep.Fact.alias a digests | File f -> + (* Not necessarily a file. Can also be a directory... *) let+ digest = build_file f in (* Fact: file [f] has digest [digest] *) Dep.Fact.file f digest @@ -820,22 +819,19 @@ end = struct type target_kind = | File_target | Dir_target of - { targets : - (* All targets of the rule which produced the directory target in question. *) - Digest.t Targets.Produced.t - } + (* All targets of the rule which produced the directory target in question. *) + Digest.t Targets.Produced.t let target_kind_equal a b = match a, b with | File_target, File_target -> true - | Dir_target { targets = a }, Dir_target { targets = b } -> - Targets.Produced.equal a b ~equal:Digest.equal + | Dir_target t1, Dir_target t2 -> Targets.Produced.equal t1 t2 ~equal:Digest.equal | File_target, Dir_target _ | Dir_target _, File_target -> false ;; (* A rule can have multiple targets but calls to [execute_rule] are memoized, so the rule will be executed only once. *) - let build_file_impl path = + let build_impl path = Load_rules.get_rule_or_source path >>= function | Source digest -> Memo.return (digest, File_target) @@ -856,9 +852,12 @@ end = struct rleshchinskiy: Is this digest ever used? [build_dir] discards it and do we (or should we) ever use [build_file] to build directories? Perhaps this could be split in two memo tables, one for files and one for directories. *) + (* ElectreAAS: Tentative answer to above comments: a lot of functions are called + [build_file] or [create_file] even though they also handle directories. + Also yes this digest is used by [Exported.build_dep] defined above. *) (match Cached_digest.build_file ~allow_dirs:true path with - | Ok digest -> digest, Dir_target { targets } - (* Must be a directory target *) + (* Must be a directory target. *) + | Ok digest -> digest, Dir_target targets | Error _ -> (* CR-someday amokhov: The most important reason we end up here is [No_such_file]. I think some of the outcomes above are impossible @@ -980,7 +979,7 @@ end = struct let eval_pred = Memo.exec eval_pred_memo - let build_file_memo = + let build_memo = lazy (let cutoff = match Dune_config.Config.(get cutoffs_that_reduce_concurrency_in_watch_mode) with @@ -992,15 +991,15 @@ end = struct ~store:(module Path.Table) ~input:(module Path) ?cutoff - build_file_impl) + build_impl) ;; - let build_file path = Memo.exec (Lazy.force build_file_memo) path >>| fst + let build_file path = Memo.exec (Lazy.force build_memo) path >>| fst let build_dir path = - let+ (_ : Digest.t), kind = Memo.exec (Lazy.force build_file_memo) path in + let+ (_ : Digest.t), kind = Memo.exec (Lazy.force build_memo) path in match kind with - | Dir_target { targets } -> targets + | Dir_target targets -> targets | File_target -> Code_error.raise "build_dir called on a file target" [ "path", Path.to_dyn path ] ;; @@ -1050,7 +1049,7 @@ include Exported when executing the very same [Action_builder.t] with [Action_builder.exec] -- the results of both [Action_builder.static_deps] and [Action_builder.exec] are cached. *) -let file_exists fn = +let path_exists fn = Load_rules.load_dir ~dir:(Path.parent_exn fn) >>= function | Source { filenames } | External { filenames } -> @@ -1060,7 +1059,7 @@ let file_exists fn = (Path.Build.Map.mem rules_here.by_file_targets (Path.as_in_build_dir_exn fn)) | Build_under_directory_target { directory_target_ancestor } -> let+ path_map = build_dir (Path.build directory_target_ancestor) in - Targets.Produced.mem path_map (Path.as_in_build_dir_exn fn) + Targets.Produced.mem_dir path_map (Path.as_in_build_dir_exn fn) ;; let files_of ~dir = @@ -1157,7 +1156,12 @@ let run_exn f = ;; let build_file p = - let+ (_ : Digest.t) = build_file p in + let+ _digest = build_file p in + () +;; + +let build_dir p = + let+ _targets = build_dir p in () ;; diff --git a/src/dune_engine/build_system.mli b/src/dune_engine/build_system.mli index f521a117fc1..6e7ed2bc625 100644 --- a/src/dune_engine/build_system.mli +++ b/src/dune_engine/build_system.mli @@ -2,9 +2,12 @@ open Import -(** Build a file. *) +(** Build a target, maybe a file maybe a directory. *) val build_file : Path.t -> unit Memo.t +(** Build a directory. *) +val build_dir : Path.t -> unit Memo.t + (** Build a file and read its contents with [f]. The execution of [f] is not memoized, so call sites should be careful to avoid duplicating [f]'s work. *) val with_file : Path.t -> f:(Path.t -> 'a) -> 'a Memo.t @@ -13,7 +16,7 @@ val with_file : Path.t -> f:(Path.t -> 'a) -> 'a Memo.t val read_file : Path.t -> string Memo.t (** Return [true] if a file exists or is buildable *) -val file_exists : Path.t -> bool Memo.t +val path_exists : Path.t -> bool Memo.t (** Build a set of dependencies and return learned facts about them. *) val build_deps : Dep.Set.t -> Dep.Facts.t Memo.t @@ -26,7 +29,7 @@ val record_deps : Dep.Set.t -> unit Action_builder.t This function does the minimum amount of work necessary to produce the result, and may do some building (e.g., if [glob] points inside a directory target). To force building - the files you need, use [build_file]. *) + the files you need, use [build]. *) val eval_pred : File_selector.t -> Filename_set.t Memo.t (** Same as [eval_pred] with [Predicate.true_] as predicate. *) diff --git a/src/dune_engine/rule_cache.ml b/src/dune_engine/rule_cache.ml index fec4ffec41a..576fbdd8906 100644 --- a/src/dune_engine/rule_cache.ml +++ b/src/dune_engine/rule_cache.ml @@ -138,9 +138,11 @@ module Workspace_local = struct match Targets.Produced.of_validated targets with | Error error -> Miss (Error_while_collecting_directory_targets error) | Ok targets -> + let f target _ = Cached_digest.build_file ~allow_dirs:true target in (match - Targets.Produced.map_with_errors targets ~all_errors:false ~f:(fun target () -> - Cached_digest.build_file ~allow_dirs:true target) + Targets.Produced.map_with_errors targets ~all_errors:false ~f ~d:(fun t e -> + (* The important thing might be that [Cached_digest.build_file] is just for files? *) + Result.map ~f:Option.some (f t e)) with | Ok produced_targets -> Dune_cache.Hit_or_miss.Hit produced_targets | Error _ -> Miss Targets_missing) diff --git a/src/dune_engine/target_promotion.ml b/src/dune_engine/target_promotion.ml index ebe09cbaa2a..3f86ec87fae 100644 --- a/src/dune_engine/target_promotion.ml +++ b/src/dune_engine/target_promotion.ml @@ -184,7 +184,16 @@ let promote ~(targets : _ Targets.Produced.t) ~(promote : Rule.Promote.t) ~promo in (* Here we know that the promotion directory exists but we may need to create additional subdirectories for [targets.dirs]. *) - Path.Local.Map.iteri targets.dirs ~f:(fun dir (_ : Digest.t Filename.Map.t) -> + Targets.Produced.iter_dirs targets ~f:(fun dir _ -> + (if Targets.Produced.debug_out + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf + "[Promote: %S]" + (Path.Build.to_string (Path.Build.append_local targets.root dir)) + ++ Pp.space)); create_directory_if_needed ~dir:(Path.Build.append_local targets.root dir)); let promote_until_clean = match promote.lifetime with @@ -209,7 +218,7 @@ let promote ~(targets : _ Targets.Produced.t) ~(promote : Rule.Promote.t) ~promo in (* There can be some files or directories left over from earlier builds, so we need to remove them from [targets.dirs]. *) - let remove_stale_files_and_subdirectories ~dir ~expected_filenames = + let remove_stale_files_and_subdirectories ~dir = (* CR-someday rleshchinskiy: This can probably be made more efficient by relocating root once. *) let build_dir = Path.Build.append_local targets.root dir in @@ -224,17 +233,16 @@ let promote ~(targets : _ Targets.Produced.t) ~(promote : Rule.Promote.t) ~promo | Error unix_error -> directory_target_error ~unix_error ~dst_dir [] | Ok dir_contents -> Fs_cache.Dir_contents.iter dir_contents ~f:(function - | filename, S_REG -> - if not (String.Map.mem expected_filenames filename) - then Path.unlink_no_err (Path.relative dst_dir filename) - | dirname, S_DIR -> - let src_dir = Path.Local.relative dir dirname in - if not (Path.Local.Map.mem targets.dirs src_dir) - then Path.rm_rf (Path.relative dst_dir dirname) + | file_name, S_REG -> + if not (Targets.Produced.mem targets (Path.Build.relative build_dir file_name)) + then Path.unlink_no_err (Path.relative dst_dir file_name) + | dir_name, S_DIR -> + if not + (Targets.Produced.mem_dir targets (Path.Build.relative build_dir dir_name)) + then Path.rm_rf (Path.relative dst_dir dir_name) | name, _kind -> Path.unlink_no_err (Path.relative dst_dir name)) in Fiber.sequential_iter_seq - (Path.Local.Map.to_seq targets.dirs) - ~f:(fun (dir, filenames) -> - remove_stale_files_and_subdirectories ~dir ~expected_filenames:filenames) + (Targets.Produced.all_dirs_seq targets) + ~f:(fun (dir, _contents) -> remove_stale_files_and_subdirectories ~dir) ;; diff --git a/src/dune_rules/action_builder.ml b/src/dune_rules/action_builder.ml index 870987b0d79..acada5a5941 100644 --- a/src/dune_rules/action_builder.ml +++ b/src/dune_rules/action_builder.ml @@ -97,7 +97,7 @@ let progn ts = ;; let if_file_exists p ~then_ ~else_ = - let* exists = of_memo (Build_system.file_exists p) in + let* exists = of_memo (Build_system.path_exists p) in if exists then then_ else else_ ;; diff --git a/src/dune_rules/expander.ml b/src/dune_rules/expander.ml index a6512635195..f1ea0859bba 100644 --- a/src/dune_rules/expander.ml +++ b/src/dune_rules/expander.ml @@ -668,7 +668,7 @@ let expand_pform_macro (Without (let open Memo.O in let path = relative ~source dir s in - let+ available = Build_system.file_exists path in + let+ available = Build_system.path_exists path in available |> string_of_bool |> string)) | Read -> expand_read_macro ~dir ~source s ~read:string | Read_lines -> diff --git a/src/dune_targets/dune_targets.ml b/src/dune_targets/dune_targets.ml index 4fd1cf81fbc..984f1751275 100644 --- a/src/dune_targets/dune_targets.ml +++ b/src/dune_targets/dune_targets.ml @@ -63,6 +63,18 @@ module Validated = struct ; dirs : Filename.Set.t } + let pp { root; files; dirs } = + let open Pp.O in + Pp.hovbox + (Pp.textf "Validated: root=%S, files=[" (Path.Build.to_string root) + ++ Pp.concat + ~sep:(Pp.text "; ") + (Filename.Set.to_list_map files ~f:(Pp.textf "%S")) + ++ Pp.text "], dirs=[" + ++ Pp.concat ~sep:(Pp.text "; ") (Filename.Set.to_list_map dirs ~f:(Pp.textf "%S")) + ++ Pp.char ']') + ;; + let iter { root; files; dirs } ~file ~dir = Filename.Set.iter files ~f:(fun fn -> file (Path.Build.relative root fn)); Filename.Set.iter dirs ~f:(fun dn -> dir (Path.Build.relative root dn)) @@ -162,12 +174,77 @@ module Produced = struct (* CR-someday amokhov: A hierarchical representation of the produced file trees may be better. It would allow for hierarchical traversals and reduce the number of internal invariants. *) + + (** All file names and directory names are relative to the root (['a t]). *) + type 'a dir_contents = + { metadata : 'a option + ; files : 'a Filename.Map.t (* mapping file name -> 'a *) + ; subdirs : + 'a dir_contents Filename.Map.t (* mapping directory name -> 'a dir_contents *) + } + + let is_empty_dir_conts { files; subdirs; metadata = _ } = + Filename.Map.is_empty files && Filename.Map.is_empty subdirs + ;; + + let rec pp_dir_conts ?(payload_printer = fun _ -> Pp.char '?') contents = + if is_empty_dir_conts contents + then Pp.text "" + else ( + let { files; subdirs; metadata } = contents in + let open Pp.O in + Pp.text "{ " + ++ Pp.hovbox + (Pp.text "Meta: " + ++ (match metadata with + | Some meta -> payload_printer meta + | None -> Pp.text "None") + ++ Pp.text "; Files: (" + ++ Pp.box + (Pp.concat + ~sep:(Pp.text ", ") + (Filename.Map.to_list_map files ~f:(fun name payload -> + Pp.textf "%S -> " name ++ payload_printer payload))) + ++ Pp.text ");" + ++ Pp.space + ++ Pp.hovbox + (Pp.text "Subdirs: (" + ++ Pp.concat + ~sep:(Pp.text ", ") + (Filename.Map.to_list_map subdirs ~f:(fun name sub -> + Pp.textf "%S -> " name ++ pp_dir_conts ~payload_printer sub)) + ++ Pp.text ")")) + ++ Pp.char '}') + ;; + type 'a t = { root : Path.Build.t - ; files : 'a Filename.Map.t - ; dirs : 'a Filename.Map.t Path.Local.Map.t + ; contents : 'a dir_contents } + let pp ?payload_printer { root; contents } = + let open Pp.O in + Pp.hovbox + (Pp.textf "root=%S, contents=" (Path.Build.to_string root) + ++ pp_dir_conts ?payload_printer contents) + ;; + + let equal + { root = root1; contents = contents1 } + { root = root2; contents = contents2 } + ~equal + = + let rec eq_aux + { files = files1; subdirs = dirs1; metadata = metadata1 } + { files = files2; subdirs = dirs2; metadata = metadata2 } + = + Filename.Map.equal files1 files2 ~equal + && Filename.Map.equal dirs1 dirs2 ~equal:eq_aux + && Option.equal equal metadata1 metadata2 + in + Path.Build.equal root1 root2 && eq_aux contents1 contents2 + ;; + module Error = struct type t = | Missing_dir of Path.Build.t @@ -215,183 +292,416 @@ module Produced = struct ;; end - let of_validated = - (* The call sites ensure that [dir = Path.Build.append_local validated.root local]. *) - let rec collect (dir : Path.Build.t) (local : Path.Local.t) - : (unit Filename.Map.t Path.Local.Map.t, Error.t) result - = - match Path.readdir_unsorted_with_kinds (Path.build dir) with - | Error (Unix.ENOENT, _, _) -> Error (Missing_dir dir) - | Error e -> Error (Unreadable_dir (dir, e)) - | Ok dir_contents -> - let open Result.O in - let+ filenames, dirs = - Result.List.fold_left - dir_contents - ~init:(Filename.Map.empty, Path.Local.Map.empty) - ~f:(fun (acc_filenames, acc_dirs) (filename, kind) -> - match (kind : File_kind.t) with - (* CR-someday rleshchinskiy: Make semantics of symlinks more consistent. *) - | S_LNK | S_REG -> - Ok (Filename.Map.add_exn acc_filenames filename (), acc_dirs) - | S_DIR -> - let+ dir = - collect - (Path.Build.relative dir filename) - (Path.Local.relative local filename) - in - acc_filenames, Path.Local.Map.union_exn acc_dirs dir - | _ -> Error (Unsupported_file (Path.Build.relative dir filename, kind))) - in - if not (Filename.Map.is_empty filenames) - then Path.Local.Map.add_exn dirs local filenames - else dirs + let empty = + { files = Filename.Map.empty; subdirs = Filename.Map.empty; metadata = None } + ;; + + let debug_create = false + let debug_consume = false + let debug_out = false + + let rec merge_contents c1 c2 = + let files = + Filename.Map.union c1.files c2.files ~f:(fun _ p1 p2 -> + assert (Poly.equal p1 p2); + Some p1) in - let directory root dirname = - let open Result.O in - let dir = Path.Build.relative root dirname in - let* files = collect dir (Path.Local.of_string dirname) in - if Path.Local.Map.is_empty files then Error (Empty_dir dir) else Ok files + let subdirs = + Filename.Map.union c1.subdirs c2.subdirs ~f:(fun _ s1 s2 -> + Some (merge_contents s1 s2)) in - fun (validated : Validated.t) -> - match - Filename.Set.to_list validated.dirs - |> Result.List.map ~f:(directory validated.root) - with - | Error _ as error -> error - | Ok dirs -> - let files = - (* CR-someday rleshchinskiy: Check if the files actually exist here. Currently, - we check this here for directory targets but for files, the check is done by - the cache. *) - Filename.Set.to_map validated.files ~f:(fun _ -> ()) - in - (* The [union_exn] below can't raise because each map in [dirs] contains - unique keys, which are paths rooted at the corresponding [dir]s. *) - let dirs = - List.fold_left dirs ~init:Path.Local.Map.empty ~f:Path.Local.Map.union_exn + let metadata = + match c1.metadata, c2.metadata with + | None, None -> None + | Some x, None | None, Some x -> Some x + | Some x, Some y -> + assert (Poly.equal x y); + Some x + in + { files; subdirs; metadata } + ;; + + let merge t1 t2 = + if not (Path.Build.equal t1.root t2.root) + then Code_error.raise "Can't merge two targets with different roots" []; + let contents = merge_contents t1.contents t2.contents in + { root = t1.root; contents } + ;; + + (** The call sites ensure that [dir = Path.Build.append_local validated.root local]. + No need for [local] actually... *) + let rec contents_of_dir ~file_f (dir : Path.Build.t) : ('a dir_contents, Error.t) result + = + let open Result.O in + let init = empty in + match Path.readdir_unsorted_with_kinds (Path.build dir) with + | Error (Unix.ENOENT, _, _) -> Error (Missing_dir dir) + | Error e -> Error (Unreadable_dir (dir, e)) + | Ok dir_contents -> + let+ results = + Result.List.fold_left dir_contents ~init ~f:(fun dir_contents (name, kind) -> + match (kind : File_kind.t) with + | S_LNK | S_REG -> + let files = + match file_f (Path.Local.relative (Path.Build.local dir) name) with + | Some payload -> Filename.Map.add_exn dir_contents.files name payload + | None -> dir_contents.files + in + Ok { dir_contents with files } + | S_DIR -> + let+ subdirs_contents = + contents_of_dir ~file_f (Path.Build.relative dir name) + in + { dir_contents with + subdirs = Filename.Map.add_exn dir_contents.subdirs name subdirs_contents + } + | _ -> Error (Unsupported_file (Path.Build.relative dir name, kind))) + in + (if debug_create + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "In contents_of_dir %S => " (Path.Build.to_string dir) + ++ pp_dir_conts results + ++ Pp.space)); + results + ;; + + let of_validated (validated : Validated.t) = + let open Result.O in + (* We assume here that [dir_name] is either a child of [root], or that we're ok with having [root/a/b] but not [root/a]. *) + let aggregate_dir { root; contents } dir_name = + let dir = Path.Build.relative root dir_name in + let* new_contents = contents_of_dir ~file_f:(fun _ -> Some ()) dir in + if is_empty_dir_conts new_contents + then Error (Empty_dir dir) + else ( + let contents = + { contents with + subdirs = Filename.Map.add_exn contents.subdirs dir_name new_contents + } in - Ok { root = validated.root; files; dirs } + Ok { root; contents }) + in + let rooted_files = Filename.Set.to_map validated.files ~f:(Fun.const ()) in + let+ result = + Filename.Set.to_list validated.dirs + |> Result.List.fold_left + ~init: + { root = validated.root + ; contents = + { files = rooted_files; subdirs = Filename.Map.empty; metadata = None } + } + ~f:aggregate_dir + in + (if debug_create + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.hovbox + (Pp.paragraph "In of_validated(" + ++ Pp.cut + ++ Validated.pp validated + ++ Pp.text ") => " + ++ pp ~payload_printer:(fun () -> Pp.text "()") result) + ++ Pp.text "\n\n")); + result ;; - let of_files root files = - let f file payload t = - let parent = Path.Local.parent_exn file in - if Path.Local.is_root parent - then - { t with - files = Filename.Map.add_exn t.files (Path.Local.to_string file) payload + let of_files root (file_list : ('a * bool) Path.Local.Map.t) : 'a t = + let rec aux payload is_file contents = function + | [] -> + Code_error.raise + "I've been hoisted by my own petard! (path.explode)" + [ "file_list", Path.Local.Map.to_dyn Dyn.opaque file_list ] + | [ final ] when is_file -> + { contents with files = Filename.Map.add_exn contents.files final payload } + | [ final ] -> + { contents with + subdirs = Filename.Map.add_exn contents.subdirs final empty + ; metadata = Some payload } - else ( - let fn = Path.Local.basename file in - { t with - dirs = - Path.Local.Map.update t.dirs parent ~f:(fun files -> - let files = Option.value files ~default:Filename.Map.empty in - Some (Filename.Map.add_exn files fn payload)) - }) + | parent :: rest -> + let subdirs = + Filename.Map.update contents.subdirs parent ~f:(fun contents_opt -> + Some (aux payload is_file (Option.value contents_opt ~default:empty) rest)) + in + { contents with subdirs } + in + let init = empty in + let contents = + Path.Local.Map.foldi file_list ~init ~f:(fun file (payload, is_file) contents -> + let parent = Path.Local.parent_exn file in + if Path.Local.is_root parent + then + if is_file + then + { contents with + files = + Filename.Map.add_exn contents.files (Path.Local.to_string file) payload + } + else + { contents with + subdirs = + Filename.Map.add_exn contents.subdirs (Path.Local.to_string file) empty + ; metadata = Some payload + } + else aux payload is_file contents (Path.Local.explode file)) in - let init = { root; files = Filename.Map.empty; dirs = Path.Local.Map.empty } in - Path.Local.Map.foldi files ~init ~f + (if debug_create + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.text "In of_files (" + ++ Pp.hovbox + (Pp.concat + ~sep:(Pp.text ", ") + (Path.Local.Map.to_list_map file_list ~f:(fun file_name _payload -> + Pp.textf "%S -> ?" (Path.Local.to_string file_name)))) + ++ Pp.text ") => " + ++ pp { root; contents })); + { root; contents } ;; - let all_files_seq { root = _; files; dirs } = - Seq.append - (Filename.Map.to_seq files - |> Seq.map ~f:(fun (file, payload) -> Path.Local.of_string file, payload)) - (Seq.concat - (Path.Local.Map.to_seq dirs - |> Seq.map ~f:(fun (dir, filenames) -> - Filename.Map.to_seq filenames - |> Seq.map ~f:(fun (filename, payload) -> - Path.Local.relative dir filename, payload)))) + let all_files_seq { contents; root = _ } = + let rec aux path { files; subdirs; metadata = _ } = + Seq.append + (Filename.Map.to_seq files + |> Seq.map ~f:(fun (file_name, payload) -> + let file = Path.Local.relative path file_name in + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[FileSeq %S]" (Path.Local.to_string file) ++ Pp.space)); + file, payload)) + (Seq.concat + (Filename.Map.to_seq subdirs + |> Seq.map ~f:(fun (dir_name, subdir_contents) -> + aux (Path.Local.relative path dir_name) subdir_contents))) + in + aux Path.Local.root contents + ;; + + let all_dirs_seq { root = _; contents } = + (* TODO: why ignore meta here? *) + let rec aux path { subdirs; files = _; metadata = _ } = + Seq.concat + (Filename.Map.to_seq subdirs + |> Seq.map ~f:(fun (subdir_name, subdir_contents) -> + let subdir = Path.Local.relative path subdir_name in + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[DirSeq %S] " (Path.Local.to_string subdir) ++ Pp.space)); + Seq.cons (subdir, subdir_contents) (aux subdir subdir_contents))) + in + aux Path.Local.root contents ;; - let find { root; files; dirs } path = + let find ({ root; contents } as r) file = let open Option.O in - let* path = - Path.Local.descendant (Path.Build.local path) ~of_:(Path.Build.local root) + let rec find_aux path { files; subdirs; metadata = _ } = function + | [] -> + Code_error.raise + "I've been hoisted by my own petard! (path.explode)" + [ "file", Path.Build.to_dyn file ] + | [ final ] -> Filename.Map.find files final + | parent :: rest -> + let path = Path.Local.relative path parent in + let* subdir = Filename.Map.find subdirs parent in + find_aux path subdir rest in - let* parent = Path.Local.parent path in - if Path.Local.is_root parent - then Filename.Map.find files (Path.Local.to_string path) - else - let* files = Path.Local.Map.find dirs parent in - Filename.Map.find files (Path.Local.basename path) + let root = Path.Build.local root in + let* path = Path.Local.descendant (Path.Build.local file) ~of_:root in + let result = find_aux root contents (Path.Local.explode path) in + (let open Pp.O in + if debug_consume + then + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "In find (%S):" (Path.Build.to_string file) + ++ Pp.space + ++ pp r + ++ Pp.text " => " + ++ Pp.paragraph + (if Option.is_some result then "found something!" else "found nothing!") + ++ Pp.text "\n\n")); + result ;; let mem t path = Option.is_some (find t path) - let find_dir { root; files; dirs } path = - match Path.Local.descendant (Path.Build.local path) ~of_:(Path.Build.local root) with - | Some dir when Path.Local.is_root dir -> Some files - | Some dir -> Path.Local.Map.find dirs dir - | None -> None + let find_dir ({ root; contents } as r) dir = + let open Option.O in + let rec find_dir_aux path { subdirs; files = _; metadata = _ } = function + | [] -> + Code_error.raise + "I've been hoisted by my own petard! (path.explode)" + [ "dir", Path.Build.to_dyn dir ] + | [ final ] -> + let+ subdir = + Filename.Map.find subdirs final + (* (Path.Local.relative path final) *) + in + subdir.files + | parent :: rest -> + let path = Path.Local.relative path parent in + let* subdir = Filename.Map.find subdirs parent in + find_dir_aux path subdir rest + in + let root = Path.Build.local root in + let* path = Path.Local.descendant (Path.Build.local dir) ~of_:root in + let result = find_dir_aux root contents (Path.Local.explode path) in + (let open Pp.O in + if debug_consume + then + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "In find_dir (%S): " (Path.Build.to_string dir) + ++ pp r + ++ Pp.text " => " + ++ Pp.paragraph + (if Option.is_some result then "found something!" else "found nothing!") + ++ Pp.text "\n\n")); + result ;; - let equal - { root = root1; files = files1; dirs = dirs1 } - { root = root2; files = files2; dirs = dirs2 } - ~equal - = - Path.Build.equal root1 root2 - && Filename.Map.equal files1 files2 ~equal - && Path.Local.Map.equal dirs1 dirs2 ~equal:(Filename.Map.equal ~equal) - ;; + let mem_dir t path = Option.is_some (find_dir t path) - let exists { root = _; files; dirs } ~f = - Filename.Map.exists files ~f || Path.Local.Map.exists dirs ~f:(String.Map.exists ~f) + let exists { root = _; contents } ~f = + let rec aux { files; subdirs; metadata = _ } = + Filename.Map.exists files ~f || Filename.Map.exists subdirs ~f:aux + in + aux contents ;; - let foldi { root = _; files; dirs } ~init ~f = - let acc = - Filename.Map.foldi files ~init ~f:(fun file acc -> - f (Path.Local.of_string file) acc) + let foldi { contents; root = _ } ~init ~f = + let rec aux path { files; subdirs; metadata } acc = + let acc = + match metadata with + | None -> acc + | Some meta -> f path ~is_file:false meta acc + in + let acc = + Filename.Map.foldi files ~init:acc ~f:(fun file_name -> + let file = Path.Local.relative path file_name in + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Foldi %S] " (Path.Local.to_string file) ++ Pp.space)); + f file ~is_file:true) + in + Filename.Map.foldi subdirs ~init:acc ~f:(fun dir_name -> + aux (Path.Local.relative path dir_name)) in - Path.Local.Map.foldi dirs ~init:acc ~f:(fun dir filenames acc -> - String.Map.foldi filenames ~init:acc ~f:(fun filename payload acc -> - f (Path.Local.relative dir filename) payload acc)) + aux Path.Local.root contents init ;; - let iteri { root = _; files; dirs } ~f = - Filename.Map.iteri files ~f:(fun file acc -> f (Path.Local.of_string file) acc); - Path.Local.Map.iteri dirs ~f:(fun dir filenames -> - String.Map.iteri filenames ~f:(fun filename payload -> - f (Path.Local.relative dir filename) payload)) + let iteri { contents; root = _ } ~f ~d = + let rec aux path { files; subdirs; metadata } = + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Iteri M %S]" (Path.Local.to_string path) ++ Pp.space)); + d path metadata; + Filename.Map.iteri files ~f:(fun file_name payload -> + let file = Path.Local.relative path file_name in + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Iteri F %S]" (Path.Local.to_string file) ++ Pp.space)); + f file payload); + Filename.Map.iteri subdirs ~f:(fun dir_name dir_contents -> + let dir = Path.Local.relative path dir_name in + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Iteri D %S]" (Path.Local.to_string dir) ++ Pp.space)); + (* Depth-first traversal here. *) + aux dir dir_contents) + in + aux Path.Local.root contents ;; + let iter_files t ~f = iteri t ~f ~d:(fun _ _ -> ()) + let iter_dirs t ~f = iteri t ~f:(fun _ _ -> ()) ~d:f + module Path_traversal = Fiber.Make_parallel_map (Path.Local.Map) module Filename_traversal = Fiber.Make_parallel_map (String.Map) - let parallel_map { root; files; dirs } ~f = + let parallel_map { root; contents } ~f ~d = let open Fiber.O in - let+ files, dirs = - Fiber.fork_and_join - (fun () -> - Filename_traversal.parallel_map files ~f:(fun file -> - f (Path.Local.of_string file))) - (fun () -> - Path_traversal.parallel_map dirs ~f:(fun dir files -> - Filename_traversal.parallel_map files ~f:(fun file payload -> - f (Path.Local.relative dir file) payload))) + let rec aux path { files; subdirs; metadata } = + let+ files, (subdirs, metadata) = + Fiber.fork_and_join + (fun () -> + Filename_traversal.parallel_map files ~f:(fun file_name -> + let file = Path.Local.relative path file_name in + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Paramap F %S]" (Path.Local.to_string file) ++ Pp.space)); + f file)) + (fun () -> + Fiber.fork_and_join + (fun () -> + Filename_traversal.parallel_map subdirs ~f:(fun dir_name -> + let dir = Path.Local.relative path dir_name in + aux dir)) + (fun () -> + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Paramap D %S]" (Path.Local.to_string path) + ++ Pp.space)); + d path metadata)) + in + { files; subdirs; metadata } in - { root; files; dirs } + let+ contents = aux Path.Local.root contents in + { root; contents } ;; - let digest { root = _; files; dirs } = - let all_digests = - Filename.Map.values files - :: Path.Local.Map.to_list_map dirs ~f:(fun _ -> String.Map.values) + let digest { root = _; contents } = + (* FIXME: a mistake here might have bad consequences *) + let rec all_digests _ { files; subdirs; metadata } = + let ffiles = Filename.Map.values files in + let files_and_subdirs = + List.concat (ffiles :: Filename.Map.to_list_map subdirs ~f:all_digests) + in + match metadata with + | None -> files_and_subdirs + | Some meta -> meta :: files_and_subdirs in - Digest.generic (List.concat all_digests) + Digest.generic (all_digests "ignored" contents) ;; exception Short_circuit let map_with_errors - { root; files; dirs } + { root; contents } ~all_errors ~(f : Path.Build.t -> 'a -> ('b, 'e) result) + ~(d : Path.Build.t -> 'a option -> ('b option, 'e) result) = let errors = ref [] in let f path a = @@ -401,32 +711,47 @@ module Produced = struct errors := (path, e) :: !errors; if all_errors then None else raise_notrace Short_circuit in + let rec aux path { files; subdirs; metadata } = + (if debug_consume + then + let open Pp.O in + Pp.to_fmt + Format.std_formatter + (Pp.paragraphf "[Map/w/E %S]" (Path.Build.to_string path) ++ Pp.space)); + let files = + Filename.Map.filter_mapi files ~f:(fun file -> f (Path.Build.relative path file)) + in + let subdirs = + Filename.Map.mapi subdirs ~f:(fun dir subdirs_contents -> + let dir = Path.Build.relative path dir in + aux dir subdirs_contents) + in + let metadata = + match d path metadata with + | Ok s -> s + | Error e -> + errors := (path, e) :: !errors; + if all_errors then None else raise_notrace Short_circuit + in + { files; subdirs; metadata } + in let result = - try - let files = - Filename.Map.filter_mapi files ~f:(fun file -> - f (Path.Build.relative root file)) - in - let dirs = - Path.Local.Map.mapi dirs ~f:(fun dir -> - let dir = Path.Build.append_local root dir in - Filename.Map.filter_mapi ~f:(fun filename -> - f (Path.Build.relative dir filename))) - in - { root; files; dirs } - with - | Short_circuit -> { root; files = Filename.Map.empty; dirs = Path.Local.Map.empty } + try { root; contents = aux root contents } with + | Short_circuit -> { root; contents = empty } in match Nonempty_list.of_list !errors with | None -> Ok result | Some list -> Error list ;; - let to_dyn { root; files; dirs } = - Dyn.record - [ "root", Path.Build.to_dyn root - ; "files", Filename.Map.to_dyn Dyn.opaque files - ; "dirs", Path.Local.Map.to_dyn (Filename.Map.to_dyn Dyn.opaque) dirs - ] + let to_dyn { root; contents } = + let rec aux { files; subdirs; metadata } = + Dyn.record + [ "metadata", Dyn.opaque metadata + ; "files", Filename.Map.to_dyn Dyn.opaque files + ; "dirs", Filename.Map.to_dyn aux subdirs + ] + in + Dyn.record [ "root", Path.Build.to_dyn root; "contents", aux contents ] ;; end diff --git a/src/dune_targets/dune_targets.mli b/src/dune_targets/dune_targets.mli index 5b024d1decf..e345304a2e9 100644 --- a/src/dune_targets/dune_targets.mli +++ b/src/dune_targets/dune_targets.mli @@ -40,6 +40,7 @@ module Validated : sig ; dirs : Filename.Set.t } + val pp : t -> _ Pp.t val iter : t -> file:(Path.Build.t -> unit) -> dir:(Path.Build.t -> unit) -> unit val fold @@ -79,12 +80,26 @@ val all : t -> Path.Build.t list (** The set of targets produced by an action. Each target may be tagged with a payload, for example, the target's digest. *) module Produced : sig + (** All file names and directory names are relative to the root (['a t]). *) + type 'a dir_contents = private + { metadata : 'a option + ; files : 'a Filename.Map.t (* mapping file name -> 'a *) + ; subdirs : + 'a dir_contents Filename.Map.t (* mapping directory name -> 'a dir_contents *) + } + + val pp_dir_conts : ?payload_printer:('a -> 'b Pp.t) -> 'a dir_contents -> 'b Pp.t + type 'a t = private - { root : Path.Build.t (** [files] and [dirs] are relative to [root] *) - ; files : 'a Filename.Map.t - ; dirs : 'a Filename.Map.t Path.Local.Map.t + { root : Path.Build.t + ; contents : 'a dir_contents } + val equal : 'a t -> 'a t -> equal:('a -> 'a -> bool) -> bool + val merge : 'a t -> 'a t -> 'a t + val pp : ?payload_printer:('a -> 'b Pp.t) -> 'a t -> 'b Pp.t + val debug_out : bool + module Error : sig type t @@ -97,26 +112,59 @@ module Produced : sig val of_validated : Validated.t -> (unit t, Error.t) result (** Construct from a set of files in the root directory. *) - val of_files : Path.Build.t -> 'a Path.Local.Map.t -> 'a t + val of_files : Path.Build.t -> ('a * bool) Path.Local.Map.t -> 'a t - (** Union of [t.files] and all files in [t.dirs] as [Seq.t] for efficient traversal. + (** Union of all files in [t] and any [subdirs] as [Seq.t] for efficient traversal. The resulting [Path.Local.t]s are relative to [t.root]. *) val all_files_seq : 'a t -> (Path.Local.t * 'a) Seq.t + (** Union of all dirs and subdirs in [t] as [Seq.t], in depth-first order. *) + val all_dirs_seq : 'a t -> (Path.Local.t * 'a dir_contents) Seq.t + (** Check if a file is present in the targets. *) val mem : 'a t -> Path.Build.t -> bool + (* Check if a directory is present in the targets. *) + val mem_dir : 'a t -> Path.Build.t -> bool + (** Find the value associated with the file, if any. *) val find : 'a t -> Path.Build.t -> 'a option (** Find all files in a directory target or a subdirectory. *) val find_dir : 'a t -> Path.Build.t -> 'a Filename.Map.t option - val equal : 'a t -> 'a t -> equal:('a -> 'a -> bool) -> bool + (* type 'a target_kind = + | File_target of 'a + | Dir_target of 'a option * 'a t + + val target_kind_equal : Digest.t target_kind -> Digest.t target_kind -> bool *) + + (* val find_any : 'a t -> Path.Build.t -> 'a target_kind option *) val exists : 'a t -> f:('a -> bool) -> bool - val foldi : 'a t -> init:'acc -> f:(Path.Local.t -> 'a -> 'acc -> 'acc) -> 'acc - val iteri : 'a t -> f:(Path.Local.t -> 'a -> unit) -> unit - val parallel_map : 'a t -> f:(Path.Local.t -> 'a -> 'b Fiber.t) -> 'b t Fiber.t + + val foldi + : 'a t + -> init:'acc + -> f:(Path.Local.t -> is_file:bool -> 'a -> 'acc -> 'acc) + -> 'acc + + val iter_files : 'a t -> f:(Path.Local.t -> 'a -> unit) -> unit + val iter_dirs : 'a t -> f:(Path.Local.t -> 'a option -> unit) -> unit + + (** Iterate on all [f]iles & [d]irs in the targets, in depth-first order. + Will hit [dirA/fileA] before [dirA/dirB] before [dirA/dirB/fileB]. + All [Path.Local.t]s are relative to [t.root]. *) + val iteri + : 'a t + -> f:(Path.Local.t -> 'a -> unit) + -> d:(Path.Local.t -> 'a option -> unit) + -> unit + + val parallel_map + : 'a t + -> f:(Path.Local.t -> 'a -> 'b Fiber.t) + -> d:(Path.Local.t -> 'a option -> 'b option Fiber.t) + -> 'b t Fiber.t (** Aggregate all content digests. *) val digest : Digest.t t -> Digest.t @@ -125,6 +173,7 @@ module Produced : sig : 'a t -> all_errors:bool -> f:(Path.Build.t -> 'a -> ('b, 'e) result) + -> d:(Path.Build.t -> 'a option -> ('b option, 'e) result) -> ('b t, (Path.Build.t * 'e) Nonempty_list.t) result val to_dyn : _ t -> Dyn.t diff --git a/src/fs/fs.ml b/src/fs/fs.ml index 46bd1d4faa9..b1ae365602a 100644 --- a/src/fs/fs.ml +++ b/src/fs/fs.ml @@ -16,18 +16,18 @@ let dir_contents (dir : Path.t) = >>| Result.map ~f:(fun contents -> Fs_cache.Dir_contents.to_list contents |> List.map ~f:fst) | `Inside _ -> - let* () = Build_system.build_file dir in + let* () = Build_system.build_dir dir in Memo.return (Path.readdir_unsorted dir) ;; -let exists file kind = - Build_system.file_exists file +let exists path kind = + Build_system.path_exists path >>= function | false -> Memo.return false | true -> - let+ () = Build_system.build_file file in - (match Path.stat file with - | Ok { st_kind; _ } when kind = st_kind -> true + let+ () = Build_system.build_file path in + (match Path.stat path with + | Ok { st_kind; _ } -> kind = st_kind | _ -> false) ;; @@ -45,7 +45,8 @@ let dir_exists dir = | `Inside _ -> (* CR-rgrinberg: unfortunately, [Build_system.file_exists] always returns false for directories. *) - Memo.return true + (* CR-ElectreAAS: sike! [path_exists] now takes both into account! *) + exists dir Unix.S_DIR ;; let with_lexbuf_from_file file ~f = diff --git a/test/blackbox-tests/test-cases/directory-targets/subdirs-only.t b/test/blackbox-tests/test-cases/directory-targets/subdirs-only.t new file mode 100644 index 00000000000..0caf52f6481 --- /dev/null +++ b/test/blackbox-tests/test-cases/directory-targets/subdirs-only.t @@ -0,0 +1,23 @@ +We test that a directory target with only other subdirs can be +properly promoted. + + $ cat > dune-project < (lang dune 3.16) + > (using directory-targets 0.1) + > EOF + + $ cat > dune < (rule + > (targets (dir foo)) + > (mode (promote (until-clean))) + > (action + > (progn + > (run mkdir -p foo/bar) + > (run touch foo/bar/file1) + > (run mkdir -p foo/bar/baz/qux) + > (run touch foo/bar/baz/qux/file2)))) + > EOF + + $ dune build foo + $ ls foo/bar/baz/qux + file2 diff --git a/test/blackbox-tests/test-cases/dune-cache/empty-dir.t b/test/blackbox-tests/test-cases/dune-cache/empty-dir.t index 743b8020638..c2668805d80 100644 --- a/test/blackbox-tests/test-cases/dune-cache/empty-dir.t +++ b/test/blackbox-tests/test-cases/dune-cache/empty-dir.t @@ -19,16 +19,23 @@ Check the cache restores empty directories Build an empty directory. - $ dune build output - $ find _build/default/output | sort - _build/default/output - _build/default/output/child - _build/default/output/file + $ OCAMLRUNPARAM=b dune build output + $ tree -F _build/default + _build/default/ + `-- output/ + |-- child/ + `-- file + + 2 directories, 1 file Restore it from cache. $ rm -rf _build $ dune build output - $ find _build/default/output | sort - _build/default/output - _build/default/output/file + $ tree -F _build/default + _build/default/ + `-- output/ + |-- child/ + `-- file + + 2 directories, 1 file