diff --git a/CHANGELOG.md b/CHANGELOG.md index f84f50d8..b804d964 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- (cli) Added support for `add-feature` and `remove-feature` types to file import group modifiers. `add-feature` will add all files determined by evaluation of a named feature flag exposed by the import group's referenced pallet, while `remove-feature` will remove those files. Pallet feature flags are constructed with the same file schema as file import groups, but are located in the pallet's `/features` directory and have a `.feature.yml` file extension instead. +- (cli) Added a `[dev] plt ls-feat` command to list feature flags exposed by the local/development pallet. +- (cli) Added a `[dev] plt show-feat` command to show the specified feature exposed by the local/development pallet, including any deprecation notices of deprecated features referenced directly or indirectly by this feature. +- (cli) Added a `[dev] plt ls-plt-feat` command to list feature flags exposed by the specified pallet required by the local/development pallet. +- (cli) Added a `[dev] plt show-plt-feat` command to show the specified feature exposed by the specified pallet required by the local/development pallet, including any deprecation notices of deprecated features referenced directly or indirectly by this feature. - (cli) Added a `[dev] plt ls-plt-file` command to list files in the specified pallet required by the local/development pallet, including files imported by that required pallet from its own required pallets. - (cli) Added a `[dev] plt locate-plt-file` command to print the actual filesystem path of the specified file in the specified pallet required by the local/development pallet. - (cli) Added a `[dev] plt show-plt-file` command to print the contents of the specified file in the specified pallet required by the local/development pallet. @@ -17,7 +22,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - (Breaking change; cli) Removed some aliases for `[dev] plt add-plt` and `[dev] plt add-repo` which should not have been added, because they were constructed as a combination of an abbrebiation and an unabbreviated word. +- (Breaking change; cli) Now, by default `[dev] plt ls-file` and `[dev] plt ls-plt-file` don't list files in hidden directories (i.e. directories whose names start with `.`) at the root of the pallet. To list all files including those in hidden directories, you should now specify `**` as the file path glob (e.g. by running `[dev] plt ls-file '**'` or `[dev] plt ls-plt-file required_pallet_path '**'`). - (cli) Suppressed some noisy Git cloning output in `[dev] plt cache-plt`, `[dev] plt cache-all`, and other related commands. +- (cli) `[dev] plt show-imp` now shows any deprecated notices of deprecated features referenced directly or indirectly by the specified import group. ### Fixed diff --git a/cmd/forklift/dev/plt/cli.go b/cmd/forklift/dev/plt/cli.go index 1709f0b9..aeb4ccb3 100644 --- a/cmd/forklift/dev/plt/cli.go +++ b/cmd/forklift/dev/plt/cli.go @@ -26,14 +26,14 @@ func MakeCmd(versions Versions) *cli.Command { "directory", Flags: []cli.Flag{ &cli.StringSliceFlag{ - Name: "repos", - Aliases: []string{"repositories"}, + Name: "repo", + Aliases: []string{"repos", "repository", "repositories"}, Usage: "Replaces version-locked required repos from the cache with the corresponding " + "repos in the specified directory paths", }, &cli.StringSliceFlag{ - Name: "plts", - Aliases: []string{"pallets"}, + Name: "plt", + Aliases: []string{"plts", "pallet", "pallets"}, Usage: "Replaces version-locked required pallets from the cache with the corresponding " + "pallets in the specified directory paths", }, @@ -153,6 +153,7 @@ func makeQuerySubcmds() []*cli.Command { makeQueryFileSubcmds(category), makeQueryPkgSubcmds(category), makeQueryDeplSubcmds(category), + makeQueryFeatSubcmds(category), []*cli.Command{ { Name: "ls-dl", @@ -204,6 +205,7 @@ func makeQueryReqSubcmds(category string) []*cli.Command { }, }, makeQueryPltFileSubcmds(category), + makeQueryPltFeatSubcmds(category), []*cli.Command{ { Name: "ls-repo", @@ -265,6 +267,29 @@ func makeQueryPltFileSubcmds(category string) []*cli.Command { } } +func makeQueryPltFeatSubcmds(category string) []*cli.Command { + return []*cli.Command{ + { + Name: "ls-plt-feat", + Aliases: []string{"list-pallet-features"}, + Category: category, + Usage: "Lists feature flags exposed by the specified pallet which the development pallet " + + "may import files from", + ArgsUsage: "pallet_path", + Action: lsPltFeatAction, + }, + { + Name: "show-plt-feat", + Aliases: []string{"show-pallet-feature"}, + Category: category, + Usage: "Prints the specified feature exposed by the specified pallet which the development " + + "pallet may import files from", + ArgsUsage: "pallet_path feature_name", + Action: showPltFeatAction, + }, + } +} + func makeQueryImportSubcmds(category string) []*cli.Command { return []*cli.Command{ { @@ -376,6 +401,28 @@ func makeQueryDeplSubcmds(category string) []*cli.Command { } } +func makeQueryFeatSubcmds(category string) []*cli.Command { + return []*cli.Command{ + { + Name: "ls-feat", + Aliases: []string{"list-features"}, + Category: category, + Usage: "Lists the feature flags exposed by the development pallet for other pallets " + + "to import", + Action: lsFeatAction, + }, + { + Name: "show-feat", + Aliases: []string{"show-feature"}, + Category: category, + Usage: "Describes a feature exposed by the development pallet for other pallets " + + "to import", + ArgsUsage: "feature_name", + Action: showFeatAction, + }, + } +} + func makeModifySubcmds(versions Versions) []*cli.Command { return slices.Concat( makeModifyFileSubcmds(), @@ -542,8 +589,9 @@ func makeModifyDeplSubcmds( //nolint:funlen // this is already decomposed; it's Flags: slices.Concat( []cli.Flag{ &cli.StringSliceFlag{ - Name: "feature", - Usage: "Enable the specified feature flag in the package deployment", + Name: "feat", + Aliases: []string{"feature", "features"}, + Usage: "Enable the specified feature in the package deployment", }, &cli.BoolFlag{ Name: "disabled", @@ -582,7 +630,7 @@ func makeModifyDeplSubcmds( //nolint:funlen // this is already decomposed; it's &cli.BoolFlag{ Name: "force", Usage: "Use the specified package path even if it cannot be resolved or makes the " + - "enabled feature flags invalid", + "enabled package features invalid", }, }, baseFlags, @@ -605,7 +653,7 @@ func makeModifyDeplSubcmds( //nolint:funlen // this is already decomposed; it's []cli.Flag{ &cli.BoolFlag{ Name: "force", - Usage: "Enable the specified feature flags even if they're not allowed by the " + + Usage: "Enable the specified package features even if they're not allowed by the " + "deployment's package", }, }, diff --git a/cmd/forklift/dev/plt/deployments.go b/cmd/forklift/dev/plt/deployments.go index 84201353..3b7beacf 100644 --- a/cmd/forklift/dev/plt/deployments.go +++ b/cmd/forklift/dev/plt/deployments.go @@ -74,7 +74,7 @@ func addDeplAction(versions Versions) cli.ActionFunc { deplName := c.Args().Slice()[0] pkgPath := c.Args().Slice()[1] if err = fcli.AddDepl( - 0, plt, caches.r, deplName, pkgPath, c.StringSlice("feature"), c.Bool("disabled"), + 0, plt, caches.r, deplName, pkgPath, c.StringSlice("feat"), c.Bool("disabled"), c.Bool("force"), ); err != nil { return err diff --git a/cmd/forklift/dev/plt/features.go b/cmd/forklift/dev/plt/features.go new file mode 100644 index 00000000..4b83d670 --- /dev/null +++ b/cmd/forklift/dev/plt/features.go @@ -0,0 +1,37 @@ +package plt + +import ( + "github.com/urfave/cli/v2" + + fcli "github.com/PlanktoScope/forklift/internal/app/forklift/cli" +) + +// ls-feat + +func lsFeatAction(c *cli.Context) error { + plt, _, err := processFullBaseArgs(c, processingOptions{ + requirePalletCache: true, + enableOverrides: true, + merge: true, + }) + if err != nil { + return err + } + + return fcli.PrintPalletFeatures(0, plt) +} + +// show-feat + +func showFeatAction(c *cli.Context) error { + plt, caches, err := processFullBaseArgs(c, processingOptions{ + requirePalletCache: true, + enableOverrides: true, + merge: true, + }) + if err != nil { + return err + } + + return fcli.PrintFeatureInfo(0, plt, caches.p, c.Args().First()) +} diff --git a/cmd/forklift/dev/plt/files.go b/cmd/forklift/dev/plt/files.go index 2fff297b..d2f92108 100644 --- a/cmd/forklift/dev/plt/files.go +++ b/cmd/forklift/dev/plt/files.go @@ -19,7 +19,12 @@ func lsFileAction(c *cli.Context) error { return err } - paths, err := fcli.ListPalletFiles(plt, c.Args().First()) + filter := c.Args().First() + if filter == "" { + // Exclude hidden directories such as `.git` + filter = "{*,[^.]*/**}" + } + paths, err := fcli.ListPalletFiles(plt, filter) if err != nil { return err } diff --git a/cmd/forklift/dev/plt/pallets.go b/cmd/forklift/dev/plt/pallets.go index 939c9b1d..54715166 100644 --- a/cmd/forklift/dev/plt/pallets.go +++ b/cmd/forklift/dev/plt/pallets.go @@ -50,7 +50,7 @@ func processFullBaseArgs( } if opts.enableOverrides { if caches.p, err = overlayPalletCacheOverrides( - caches.p.Underlay, c.StringSlice("plts"), plt, + caches.p.Underlay, c.StringSlice("plt"), plt, ); err != nil { return nil, workspaceCaches{}, err } @@ -67,7 +67,7 @@ func processFullBaseArgs( } if opts.enableOverrides { if caches.r, err = overlayRepoCacheOverrides( - caches.r, c.StringSlice("repos"), plt, + caches.r, c.StringSlice("repo"), plt, ); err != nil { return nil, workspaceCaches{}, err } @@ -530,7 +530,12 @@ func lsPltFileAction(c *cli.Context) error { if err != nil { return nil } - paths, err := fcli.ListPalletFiles(plt, c.Args().Get(1)) + filter := c.Args().Get(1) + if filter == "" { + // Exclude hidden directories such as `.git` + filter = "{*,[^.]*/**}" + } + paths, err := fcli.ListPalletFiles(plt, filter) if err != nil { return err } @@ -578,3 +583,37 @@ func showPltFileAction(c *cli.Context) error { } return fcli.PrintFile(plt, c.Args().Get(1)) } + +// ls-plt-feat + +func lsPltFeatAction(c *cli.Context) error { + plt, caches, err := processFullBaseArgs(c, processingOptions{ + enableOverrides: true, + }) + if err != nil { + return err + } + + plt, err = fcli.GetRequiredPallet(plt, caches.p, c.Args().First()) + if err != nil { + return nil + } + return fcli.PrintPalletFeatures(0, plt) +} + +// show-plt-feat + +func showPltFeatAction(c *cli.Context) error { + plt, caches, err := processFullBaseArgs(c, processingOptions{ + enableOverrides: true, + }) + if err != nil { + return err + } + + plt, err = fcli.GetRequiredPallet(plt, caches.p, c.Args().First()) + if err != nil { + return nil + } + return fcli.PrintFeatureInfo(0, plt, caches.p, c.Args().Get(1)) +} diff --git a/cmd/forklift/plt/cli.go b/cmd/forklift/plt/cli.go index 3779f9d0..bb9c0c1f 100644 --- a/cmd/forklift/plt/cli.go +++ b/cmd/forklift/plt/cli.go @@ -220,6 +220,7 @@ func makeQuerySubcmds() []*cli.Command { makeQueryFileSubcmds(category), makeQueryPkgSubcmds(category), makeQueryDeplSubcmds(category), + makeQueryFeatSubcmds(category), []*cli.Command{ { Name: "ls-dl", @@ -271,6 +272,7 @@ func makeQueryReqSubcmds(category string) []*cli.Command { }, }, makeQueryPltFileSubcmds(category), + makeQueryPltFeatSubcmds(category), []*cli.Command{ { Name: "ls-repo", @@ -307,7 +309,7 @@ func makeQueryPltFileSubcmds(category string) []*cli.Command { Category: category, Usage: "Lists non-directory files in the specified pallet which the local pallet may " + "import files from", - ArgsUsage: "[path_glob]", + ArgsUsage: "pallet_path [path_glob]", Action: lsPltFileAction, }, { @@ -316,7 +318,7 @@ func makeQueryPltFileSubcmds(category string) []*cli.Command { Category: category, Usage: "Prints the absolute filesystem path of the specified file in the specified " + "pallet which the local pallet may import files from", - ArgsUsage: "file_path", + ArgsUsage: "pallet_path file_path", Action: locatePltFileAction, }, { @@ -325,12 +327,35 @@ func makeQueryPltFileSubcmds(category string) []*cli.Command { Category: category, Usage: "Prints the specified file in the specified pallet which the local pallet may " + "import files from", - ArgsUsage: "file_path", + ArgsUsage: "pallet_path file_path", Action: showPltFileAction, }, } } +func makeQueryPltFeatSubcmds(category string) []*cli.Command { + return []*cli.Command{ + { + Name: "ls-plt-feat", + Aliases: []string{"list-pallet-features"}, + Category: category, + Usage: "Lists features flags exposed by the specified pallet which the local pallet may " + + "import files from", + ArgsUsage: "pallet_path", + Action: lsPltFeatAction, + }, + { + Name: "show-plt-feat", + Aliases: []string{"show-pallet-feature"}, + Category: category, + Usage: "Prints the specified feature exposed by the specified pallet which the local " + + "pallet may import files from", + ArgsUsage: "pallet_path feature_name", + Action: showPltFeatAction, + }, + } +} + func makeQueryImportSubcmds(category string) []*cli.Command { return []*cli.Command{ { @@ -440,6 +465,26 @@ func makeQueryDeplSubcmds(category string) []*cli.Command { } } +func makeQueryFeatSubcmds(category string) []*cli.Command { + return []*cli.Command{ + { + Name: "ls-feat", + Aliases: []string{"list-features"}, + Category: category, + Usage: "Lists the feature flags exposed by the local pallet for other pallets to import", + Action: lsFeatAction, + }, + { + Name: "show-feat", + Aliases: []string{"show-feature"}, + Category: category, + Usage: "Describes a feature exposed by the local pallet for other pallets to import", + ArgsUsage: "feature_name", + Action: showFeatAction, + }, + } +} + func makeModifySubcmds(versions Versions) []*cli.Command { const category = "Modify the pallet" return slices.Concat( @@ -686,8 +731,9 @@ func makeModifyDeplSubcmds( //nolint:funlen // this is already decomposed; it's Flags: slices.Concat( []cli.Flag{ &cli.StringSliceFlag{ - Name: "feature", - Usage: "Enable the specified feature flag in the package deployment", + Name: "feat", + Aliases: []string{"feature", "features"}, + Usage: "Enable the specified feature in the package deployment", }, &cli.BoolFlag{ Name: "disabled", @@ -726,7 +772,7 @@ func makeModifyDeplSubcmds( //nolint:funlen // this is already decomposed; it's &cli.BoolFlag{ Name: "force", Usage: "Use the specified package path even if it cannot be resolved or makes the " + - "enabled feature flags invalid", + "enabled package features invalid", }, }, modifyDeplBaseFlags, @@ -749,7 +795,7 @@ func makeModifyDeplSubcmds( //nolint:funlen // this is already decomposed; it's []cli.Flag{ &cli.BoolFlag{ Name: "force", - Usage: "Enable the specified feature flags even if they're not allowed by the " + + Usage: "Enable the specified package features even if they're not allowed by the " + "deployment's package", }, }, diff --git a/cmd/forklift/plt/deployments.go b/cmd/forklift/plt/deployments.go index a430df44..e3935681 100644 --- a/cmd/forklift/plt/deployments.go +++ b/cmd/forklift/plt/deployments.go @@ -70,7 +70,7 @@ func addDeplAction(versions Versions) cli.ActionFunc { deplName := c.Args().Slice()[0] pkgPath := c.Args().Slice()[1] if err = fcli.AddDepl( - 0, plt, caches.r, deplName, pkgPath, c.StringSlice("feature"), c.Bool("disabled"), + 0, plt, caches.r, deplName, pkgPath, c.StringSlice("feat"), c.Bool("disabled"), c.Bool("force"), ); err != nil { return err diff --git a/cmd/forklift/plt/features.go b/cmd/forklift/plt/features.go new file mode 100644 index 00000000..735b73f4 --- /dev/null +++ b/cmd/forklift/plt/features.go @@ -0,0 +1,33 @@ +package plt + +import ( + "github.com/urfave/cli/v2" + + fcli "github.com/PlanktoScope/forklift/internal/app/forklift/cli" +) + +// ls-feat + +func lsFeatAction(c *cli.Context) error { + plt, _, err := processFullBaseArgs(c.String("workspace"), processingOptions{ + merge: true, + }) + if err != nil { + return err + } + + return fcli.PrintPalletFeatures(0, plt) +} + +// show-feat + +func showFeatAction(c *cli.Context) error { + plt, caches, err := processFullBaseArgs(c.String("workspace"), processingOptions{ + merge: true, + }) + if err != nil { + return err + } + + return fcli.PrintFeatureInfo(0, plt, caches.p, c.Args().First()) +} diff --git a/cmd/forklift/plt/files.go b/cmd/forklift/plt/files.go index 6e137815..ed0c708e 100644 --- a/cmd/forklift/plt/files.go +++ b/cmd/forklift/plt/files.go @@ -18,7 +18,12 @@ func lsFileAction(c *cli.Context) error { return err } - paths, err := fcli.ListPalletFiles(plt, c.Args().First()) + filter := c.Args().First() + if filter == "" { + // Exclude hidden directories such as `.git` + filter = "{*,[^.]*/**}" + } + paths, err := fcli.ListPalletFiles(plt, filter) if err != nil { return err } diff --git a/cmd/forklift/plt/pallets.go b/cmd/forklift/plt/pallets.go index ad40a466..ca9e544c 100644 --- a/cmd/forklift/plt/pallets.go +++ b/cmd/forklift/plt/pallets.go @@ -1039,7 +1039,12 @@ func lsPltFileAction(c *cli.Context) error { if err != nil { return nil } - paths, err := fcli.ListPalletFiles(plt, c.Args().Get(1)) + filter := c.Args().Get(1) + if filter == "" { + // Exclude hidden directories such as `.git` + filter = "{*,[^.]*/**}" + } + paths, err := fcli.ListPalletFiles(plt, filter) if err != nil { return err } @@ -1083,3 +1088,33 @@ func showPltFileAction(c *cli.Context) error { } return fcli.PrintFile(plt, c.Args().Get(1)) } + +// ls-plt-feat + +func lsPltFeatAction(c *cli.Context) error { + plt, caches, err := processFullBaseArgs(c.String("workspace"), processingOptions{}) + if err != nil { + return err + } + + plt, err = fcli.GetRequiredPallet(plt, caches.p, c.Args().First()) + if err != nil { + return nil + } + return fcli.PrintPalletFeatures(0, plt) +} + +// show-plt-feat + +func showPltFeatAction(c *cli.Context) error { + plt, caches, err := processFullBaseArgs(c.String("workspace"), processingOptions{}) + if err != nil { + return err + } + + plt, err = fcli.GetRequiredPallet(plt, caches.p, c.Args().First()) + if err != nil { + return nil + } + return fcli.PrintFeatureInfo(0, plt, caches.p, c.Args().Get(1)) +} diff --git a/internal/app/forklift/caching-pallets.go b/internal/app/forklift/caching-pallets.go index 163a083e..4e85fb86 100644 --- a/internal/app/forklift/caching-pallets.go +++ b/internal/app/forklift/caching-pallets.go @@ -35,21 +35,21 @@ func (c *FSPalletCache) Path() string { // LoadFSPallet loads the FSPallet with the specified path and version. // The loaded FSPallet instance is fully initialized. -func (c *FSPalletCache) LoadFSPallet(repoPath string, version string) (*FSPallet, error) { +func (c *FSPalletCache) LoadFSPallet(pltPath string, version string) (*FSPallet, error) { if c == nil { return nil, errors.New("cache is nil") } - repo, err := LoadFSPallet(c.FS, fmt.Sprintf("%s@%s", repoPath, version)) + plt, err := LoadFSPallet(c.FS, fmt.Sprintf("%s@%s", pltPath, version)) if err != nil { return nil, err } - repo.Version = version - return repo, nil + plt.Version = version + return plt, nil } // LoadFSPallets loads all FSPallets from the cache matching the specified search pattern. -// The search pattern should be a [doublestar] pattern, such as `**`, matching repo directories to +// The search pattern should be a [doublestar] pattern, such as `**`, matching pallet directories to // search for. // The loaded FSPallet instances are fully initialized. func (c *FSPalletCache) LoadFSPallets(searchPattern string) ([]*FSPallet, error) { @@ -57,30 +57,30 @@ func (c *FSPalletCache) LoadFSPallets(searchPattern string) ([]*FSPallet, error) return nil, nil } - repos, err := LoadFSPallets(c.FS, searchPattern) + plts, err := LoadFSPallets(c.FS, searchPattern) if err != nil { - return nil, errors.Wrap(err, "couldn't load repos from cache") + return nil, errors.Wrap(err, "couldn't load pallets from cache") } - // set the Version field of the repo based on its path in the cache - for _, repo := range repos { - var repoPath string + // set the Version field of the pallet based on its path in the cache + for _, plt := range plts { + var pltPath string var ok bool - if repoPath, repo.Version, ok = strings.Cut(core.GetSubdirPath(c, repo.FS.Path()), "@"); !ok { + if pltPath, plt.Version, ok = strings.Cut(core.GetSubdirPath(c, plt.FS.Path()), "@"); !ok { return nil, errors.Wrapf( - err, "couldn't parse path of cached repo configured at %s as repo_path@version", - repo.FS.Path(), + err, "couldn't parse path of cached pallet configured at %s as pallet_path@version", + plt.FS.Path(), ) } - if repoPath != repo.Path() { + if pltPath != plt.Path() { return nil, errors.Errorf( - "cached repo %s is in cache at %s@%s instead of %s@%s", - repo.Path(), repoPath, repo.Version, repo.Path(), repo.Version, + "cached pallet %s is in cache at %s@%s instead of %s@%s", + plt.Path(), pltPath, plt.Version, plt.Path(), plt.Version, ) } } - return repos, nil + return plts, nil } // LayeredPalletCache @@ -92,27 +92,27 @@ func (c *LayeredPalletCache) Path() string { // LoadFSPallet loads the FSPallet with the specified path and version. // The loaded FSPallet instance is fully initialized. -// If the overlay cache expects to have the repo, it will attempt to load the repo; otherwise, -// the underlay cache will attempt to load the repo. -func (c *LayeredPalletCache) LoadFSPallet(repoPath string, version string) (*FSPallet, error) { +// If the overlay cache expects to have the pallet, it will attempt to load the pallet; otherwise, +// the underlay cache will attempt to load the pallet. +func (c *LayeredPalletCache) LoadFSPallet(pltPath string, version string) (*FSPallet, error) { if c == nil { return nil, errors.New("cache is nil") } - if c.Overlay != nil && c.Overlay.IncludesFSPallet(repoPath, version) { - repo, err := c.Overlay.LoadFSPallet(repoPath, version) - return repo, errors.Wrap(err, "couldn't load repo from overlay") + if c.Overlay != nil && c.Overlay.IncludesFSPallet(pltPath, version) { + plt, err := c.Overlay.LoadFSPallet(pltPath, version) + return plt, errors.Wrap(err, "couldn't load pallet from overlay") } - repo, err := c.Underlay.LoadFSPallet(repoPath, version) - return repo, errors.Wrap(err, "couldn't load repo from underlay") + plt, err := c.Underlay.LoadFSPallet(pltPath, version) + return plt, errors.Wrap(err, "couldn't load pallet from underlay") } // LoadFSPallets loads all FSPallets from the cache matching the specified search pattern. -// The search pattern should be a [doublestar] pattern, such as `**`, matching repo directories to +// The search pattern should be a [doublestar] pattern, such as `**`, matching pallet directories to // search for. // The loaded FSPallet instances are fully initialized. -// All matching repos from the overlay cache will be included; all matching repos from the -// underlay cache will also be included, except for those repos which the overlay cache expected +// All matching pallets from the overlay cache will be included; all matching pallets from the +// underlay cache will also be included, except for those pallets which the overlay cache expected // to have. func (c *LayeredPalletCache) LoadFSPallets(searchPattern string) ([]*FSPallet, error) { if c == nil { @@ -121,18 +121,18 @@ func (c *LayeredPalletCache) LoadFSPallets(searchPattern string) ([]*FSPallet, e loadedPallets, err := c.Overlay.LoadFSPallets(searchPattern) if err != nil { - return nil, errors.Wrap(err, "couldn't load repos from overlay") + return nil, errors.Wrap(err, "couldn't load pallets from overlay") } underlayPallets, err := c.Underlay.LoadFSPallets(searchPattern) if err != nil { - return nil, errors.Wrap(err, "couldn't load repos from underlay") + return nil, errors.Wrap(err, "couldn't load pallets from underlay") } - for _, repo := range underlayPallets { - if c.Overlay.IncludesFSPallet(repo.Path(), repo.Version) { + for _, pallet := range underlayPallets { + if c.Overlay.IncludesFSPallet(pallet.Path(), pallet.Version) { continue } - loadedPallets = append(loadedPallets, repo) + loadedPallets = append(loadedPallets, pallet) } sort.Slice(loadedPallets, func(i, j int) bool { diff --git a/internal/app/forklift/cli/features-printing.go b/internal/app/forklift/cli/features-printing.go new file mode 100644 index 00000000..b44e7375 --- /dev/null +++ b/internal/app/forklift/cli/features-printing.go @@ -0,0 +1,96 @@ +package cli + +import ( + "fmt" + "slices" + + "github.com/pkg/errors" + + "github.com/PlanktoScope/forklift/internal/app/forklift" +) + +func PrintPalletFeatures(indent int, pallet *forklift.FSPallet) error { + imps, err := pallet.LoadFeatures("**/*") + if err != nil { + return err + } + for _, imp := range imps { + IndentedPrintf(indent, "%s\n", imp.Name) + } + return nil +} + +func PrintFeatureInfo( + indent int, pallet *forklift.FSPallet, cache forklift.PathedPalletCache, featureName string, +) error { + imp, err := pallet.LoadFeature(featureName, cache) + if err != nil { + return errors.Wrapf( + err, "couldn't find feature declaration %s in pallet %s", featureName, pallet.FS.Path(), + ) + } + resolved := &forklift.ResolvedImport{ + Import: imp, + Pallet: pallet, + } + resolved.Pallet, err = forklift.MergeFSPallet(resolved.Pallet, cache, nil) + if err != nil { + return errors.Wrapf( + err, "couldn't print merge pallet referenced by feature %s resolved as import group %s", + featureName, imp.Name, + ) + } + if err = PrintFeature(indent, resolved, cache); err != nil { + return errors.Wrapf( + err, "couldn't print feature %s resolved as import group %s", featureName, imp.Name, + ) + } + return nil +} + +func PrintFeature(indent int, imp *forklift.ResolvedImport, loader forklift.FSPalletLoader) error { + IndentedPrintf(indent, "Feature %s:\n", imp.Name) + indent++ + deprecations, err := imp.CheckDeprecations(loader) + if err != nil { + return errors.Wrapf(err, "couldn't check deprecations for import %s", imp.Name) + } + if len(deprecations) > 0 { + IndentedPrintln(indent, "Deprecation warnings:") + for _, deprecation := range deprecations { + BulletedPrintln(indent+1, deprecation) + } + } + + if err := printModifiers(indent, imp.Def.Modifiers, imp.Pallet, loader); err != nil { + return err + } + + fmt.Println() + IndentedPrintln(indent, "Files grouped for import:") + if err := printFeatureEvaluation(indent+1, imp, loader); err != nil { + return err + } + + return nil +} + +func printFeatureEvaluation( + indent int, imp *forklift.ResolvedImport, loader forklift.FSPalletLoader, +) error { + importMappings, err := imp.Evaluate(loader) + if err != nil { + return errors.Wrapf(err, "couldn't evaluate import group") + } + + targets := make([]string, 0, len(importMappings)) + for target := range importMappings { + targets = append(targets, target) + } + slices.Sort(targets) + for _, target := range targets { + BulletedPrintln(indent, target) + } + + return nil +} diff --git a/internal/app/forklift/cli/imports-printing.go b/internal/app/forklift/cli/imports-printing.go index 773ab0bb..abe874ed 100644 --- a/internal/app/forklift/cli/imports-printing.go +++ b/internal/app/forklift/cli/imports-printing.go @@ -40,34 +40,53 @@ func PrintImportInfo( err, "couldn't print merge pallet referenced by resolved import group %s", imp.Name, ) } - if err = PrintResolvedImport(indent, resolved); err != nil { + if err = PrintResolvedImport(indent, resolved, cache); err != nil { return errors.Wrapf(err, "couldn't print resolved import group %s", imp.Name) } return nil } -func PrintResolvedImport(indent int, imp *forklift.ResolvedImport) error { +func PrintResolvedImport( + indent int, imp *forklift.ResolvedImport, loader forklift.FSPalletLoader, +) error { IndentedPrint(indent, "Import group") if imp.Import.Def.Disabled { fmt.Print(" (disabled!)") } - fmt.Printf(": %s\n", imp.Name) + fmt.Printf(" %s:\n", imp.Name) indent++ + deprecations, err := imp.CheckDeprecations(loader) + if err != nil { + return errors.Wrapf(err, "couldn't check deprecations for import %s", imp.Name) + } + if len(deprecations) > 0 { + IndentedPrintln(indent, "Deprecation warnings:") + for _, deprecation := range deprecations { + BulletedPrintln(indent+1, deprecation) + } + } + IndentedPrintf(indent, "Import source: %s\n", imp.Pallet.Path()) - printModifiers(indent, imp.Def.Modifiers) + if err := printModifiers(indent, imp.Def.Modifiers, imp.Pallet, loader); err != nil { + return err + } fmt.Println() - if err := printEvaluation(indent, imp); err != nil { + IndentedPrintln(indent, "Imported files:") + if err := printImportEvaluation(indent+1, imp, loader); err != nil { return err } return nil } -func printModifiers(indent int, modifiers []forklift.ImportModifier) { - IndentedPrint(indent, "Group modifiers:") +func printModifiers( + indent int, modifiers []forklift.ImportModifier, plt *forklift.FSPallet, + loader forklift.FSPalletLoader, +) error { + IndentedPrint(indent, "Sequential definition:") if len(modifiers) == 0 { fmt.Print(" (none)") } @@ -79,10 +98,19 @@ func printModifiers(indent int, modifiers []forklift.ImportModifier) { printAddModifier(indent, i, modifier) case forklift.ImportModifierTypeRemove: printRemoveModifier(indent, i, modifier) + case forklift.ImportModifierTypeAddFeature: + if err := printAddFeatureModifier(indent, i, modifier, plt, loader); err != nil { + return err + } + case forklift.ImportModifierTypeRemoveFeature: + if err := printRemoveFeatureModifier(indent, i, modifier, plt, loader); err != nil { + return err + } default: BulletedPrintf(indent, "[%d] Unknown modifier type %s: %+v\n", i, modifier.Type, modifier) } } + return nil } func printAddModifier(indent, index int, modifier forklift.ImportModifier) { @@ -118,13 +146,77 @@ func printRemoveModifier(indent, index int, modifier forklift.ImportModifier) { } } -func printEvaluation(indent int, imp *forklift.ResolvedImport) error { - IndentedPrintln(indent, "Imported files:") - importMappings, err := imp.Evaluate() +func printAddFeatureModifier( + indent, index int, modifier forklift.ImportModifier, plt *forklift.FSPallet, + loader forklift.FSPalletLoader, +) error { + BulletedPrintf(indent, "[%d] Add feature-flagged files to group", index) + if modifier.Description == "" { + fmt.Println() + } else { + fmt.Printf(": %s\n", modifier.Description) + } + return errors.Wrap( + printReferencedFeature(indent+1, modifier.Source, plt, loader), + "couldn't load feature in modifier", + ) +} + +func printReferencedFeature( + indent int, name string, plt *forklift.FSPallet, loader forklift.FSPalletLoader, +) error { + IndentedPrintf(indent, "Feature %s", name) + feature, err := plt.LoadFeature(name, loader) + if err != nil { + return errors.Wrapf(err, "couldn't load feature %s", name) + } + + if feature.Def.Description != "" { + fmt.Printf(": %s\n", feature.Def.Description) + } else { + fmt.Println(" (no description)") + } + + resolved := &forklift.ResolvedImport{ + Import: feature, + Pallet: plt, + } + deprecations, err := resolved.CheckDeprecations(loader) + if err != nil { + return errors.Wrapf(err, "couldn't check deprecations for import %s", resolved.Name) + } + if len(deprecations) > 0 { + IndentedPrintln(indent, "Deprecation notices:") + for _, deprecation := range deprecations { + BulletedPrintln(indent+1, deprecation) + } + } + return nil +} + +func printRemoveFeatureModifier( + indent, index int, modifier forklift.ImportModifier, plt *forklift.FSPallet, + loader forklift.FSPalletLoader, +) error { + BulletedPrintf(indent, "[%d] Remove feature-flagged files from group", index) + if modifier.Description == "" { + fmt.Println() + } else { + fmt.Printf(": %s\n", modifier.Description) + } + return errors.Wrap( + printReferencedFeature(indent+1, modifier.Source, plt, loader), + "couldn't load feature in modifier", + ) +} + +func printImportEvaluation( + indent int, imp *forklift.ResolvedImport, loader forklift.FSPalletLoader, +) error { + importMappings, err := imp.Evaluate(loader) if err != nil { return errors.Wrapf(err, "couldn't evaluate import group") } - indent++ targets := make([]string, 0, len(importMappings)) for target := range importMappings { diff --git a/internal/app/forklift/cli/packages-printing.go b/internal/app/forklift/cli/packages-printing.go index 01e3c7b9..7124439c 100644 --- a/internal/app/forklift/cli/packages-printing.go +++ b/internal/app/forklift/cli/packages-printing.go @@ -233,7 +233,7 @@ func PrintPalletPkgs(indent int, pallet *forklift.FSPallet, loader forklift.FSPk // List local packages provided by the pallet itself loaded, err := pallet.LoadFSPkgs("**") if err != nil { - return errors.Wrapf(err, "couldn't load local packages pallet at %s", pallet.Path()) + return errors.Wrapf(err, "couldn't load local packages defined by pallet at %s", pallet.Path()) } for _, pkg := range loaded { pkg.Repo.Def.Repo.Path = "/" diff --git a/internal/app/forklift/cli/requirements-pallets.go b/internal/app/forklift/cli/requirements-pallets.go index 8ddb6cad..bdfd027f 100644 --- a/internal/app/forklift/cli/requirements-pallets.go +++ b/internal/app/forklift/cli/requirements-pallets.go @@ -249,6 +249,7 @@ func DownloadRequiredPallets( indent++ allSkip := make(structures.Set[string]) maps.Insert(allSkip, maps.All(skipPalletPaths)) + downloadedPallets = make(structures.Set[string]) for _, req := range loadedPalletReqs { palletPath := fmt.Sprintf("%s@%s", req.Path(), req.VersionLock.Version) if allSkip.Has(palletPath) { diff --git a/internal/app/forklift/pallets-imports.go b/internal/app/forklift/pallets-imports.go index 9c2626de..cb5c9974 100644 --- a/internal/app/forklift/pallets-imports.go +++ b/internal/app/forklift/pallets-imports.go @@ -2,6 +2,7 @@ package forklift import ( "io/fs" + "maps" "path" "slices" "strings" @@ -32,14 +33,21 @@ func ResolveImports( return resolvedImports, nil } -// ResolveImport loads the pallet from the [FSPalletLoader] instance based on the requirements in -// the provided file import group and the pallet. +// ResolveImport loads the import from a pallet loaded from the [FSPalletLoader] instance based on +// the requirements in the provided file import group and the pallet. func ResolveImport( pallet *FSPallet, palletLoader FSPalletLoader, imp Import, ) (resolved *ResolvedImport, err error) { resolved = &ResolvedImport{ Import: imp, } + if _, err = fs.Stat(pallet.FS, path.Join(FeaturesDirName, imp.Name+FeatureDefFileExt)); err == nil { + // Attach the import to the current pallet + resolved.Pallet = pallet + return resolved, nil + } + + // Attach the import to a required pallet palletReqsFS, err := pallet.GetPalletReqsFS() if err != nil { return nil, errors.Wrap(err, "couldn't open directory for pallet requirements from pallet") @@ -64,7 +72,7 @@ func ResolveImport( // Evaluate returns a list of target file paths and a mapping between target file paths and source // file paths relative to the attached pallet's FS member. Directories are excluded from this // mapping. -func (i *ResolvedImport) Evaluate() (map[string]string, error) { +func (i *ResolvedImport) Evaluate(loader FSPalletLoader) (map[string]string, error) { pathMappings := make(map[string]string) // target -> source for _, modifier := range i.Def.Modifiers { switch modifier.Type { @@ -78,6 +86,14 @@ func (i *ResolvedImport) Evaluate() (map[string]string, error) { if err := applyRemoveModifier(modifier, pathMappings); err != nil { return pathMappings, err } + case ImportModifierTypeAddFeature: + if err := applyAddFeatureModifier(modifier, i.Pallet, pathMappings, loader); err != nil { + return pathMappings, err + } + case ImportModifierTypeRemoveFeature: + if err := applyRemoveFeatureModifier(modifier, i.Pallet, pathMappings, loader); err != nil { + return pathMappings, err + } } } return pathMappings, nil @@ -167,6 +183,78 @@ func matchWithChildren(pattern, name string) (bool, error) { return childMatches, nil } +func applyAddFeatureModifier( + modifier ImportModifier, pallet *FSPallet, pathMappings map[string]string, loader FSPalletLoader, +) error { + feature, err := pallet.LoadFeature(modifier.Source, loader) + if err != nil { + return errors.Wrapf(err, "couldn't load feature %s", modifier.Source) + } + resolved := &ResolvedImport{ + Import: feature, + Pallet: pallet, + } + featureMappings, err := resolved.Evaluate(loader) + if err != nil { + return errors.Wrapf( + err, "couldn't evaluate feature %s to determine file imports to add", modifier.Source, + ) + } + maps.Insert(pathMappings, maps.All(featureMappings)) + return nil +} + +func applyRemoveFeatureModifier( + modifier ImportModifier, pallet *FSPallet, pathMappings map[string]string, loader FSPalletLoader, +) error { + feature, err := pallet.LoadFeature(modifier.Source, loader) + if err != nil { + return errors.Wrapf(err, "couldn't load feature %s", modifier.Source) + } + resolved := &ResolvedImport{ + Import: feature, + Pallet: pallet, + } + featureMappings, err := resolved.Evaluate(loader) + if err != nil { + return errors.Wrapf( + err, "couldn't evaluate feature %s to determine file imports to add", modifier.Source, + ) + } + maps.DeleteFunc(pathMappings, func(target, source string) bool { + _, ok := featureMappings[target] + return ok + }) + return nil +} + +// CheckDeprecations returns a list of [error]s for any directly-referenced or +// transitively-referenced features which are deprecated. +func (i *ResolvedImport) CheckDeprecations( + loader FSPalletLoader, +) (deprecations []error, err error) { + if i.Def.Deprecated != "" { + return []error{errors.New(i.Def.Deprecated)}, nil + } + + for _, modifier := range i.Def.Modifiers { + switch modifier.Type { + default: + continue + case ImportModifierTypeAddFeature, ImportModifierTypeRemove: + checked, err := modifier.CheckDeprecations(i.Pallet, loader) + if err != nil { + return deprecations, err + } + deprecations = append(deprecations, checked...) + } + } + return deprecations, nil +} + +// TODO: add a method to check whether any import modifiers don't match any files, so that we can +// issue a warning when that happens! + // Import // FilterImportsForEnabled filters a slice of Imports to only include those which are not disabled. @@ -183,11 +271,12 @@ func FilterImportsForEnabled(imps []Import) []Import { // loadImport loads the Import from a file path in the provided base filesystem, assuming the file path // is the specified name of the import followed by the import group file extension. -func loadImport(fsys core.PathedFS, name string) (imp Import, err error) { +func loadImport(fsys core.PathedFS, name, fileExt string) (imp Import, err error) { imp.Name = name - if imp.Def, err = loadImportDef(fsys, name+ImportDefFileExt); err != nil { + if imp.Def, err = loadImportDef(fsys, name+fileExt); err != nil { return Import{}, errors.Wrapf(err, "couldn't load import group") } + // TODO: if the import is deprecated, print a warning with the deprecation message return imp, nil } @@ -195,8 +284,8 @@ func loadImport(fsys core.PathedFS, name string) (imp Import, err error) { // the specified search pattern. // The search pattern should not include the file extension for import group files - the // file extension will be appended to the search pattern by LoadImports. -func loadImports(fsys core.PathedFS, searchPattern string) ([]Import, error) { - searchPattern += ImportDefFileExt +func loadImports(fsys core.PathedFS, searchPattern, fileExt string) ([]Import, error) { + searchPattern += fileExt impDefFiles, err := doublestar.Glob(fsys, searchPattern) if err != nil { return nil, errors.Wrapf( @@ -206,12 +295,12 @@ func loadImports(fsys core.PathedFS, searchPattern string) ([]Import, error) { imps := make([]Import, 0, len(impDefFiles)) for _, impDefFilePath := range impDefFiles { - if !strings.HasSuffix(impDefFilePath, ImportDefFileExt) { + if !strings.HasSuffix(impDefFilePath, fileExt) { continue } - impName := strings.TrimSuffix(impDefFilePath, ImportDefFileExt) - imp, err := loadImport(fsys, impName) + impName := strings.TrimSuffix(impDefFilePath, fileExt) + imp, err := loadImport(fsys, impName, fileExt) if err != nil { return nil, errors.Wrapf(err, "couldn't load import group from %s", impDefFilePath) } @@ -281,4 +370,32 @@ func (d ImportDef) RemoveDefaults() ImportDef { return d } -// TODO: add a method to validate the import definition +// ImportModifier + +// CheckDeprecations returns a list of [error]s for any directly-referenced or +// transitively-referenced features in the specified pallet which are deprecated. +func (m ImportModifier) CheckDeprecations( + pallet *FSPallet, loader FSPalletLoader, +) (deprecations []error, err error) { + feature, err := pallet.LoadFeature(m.Source, loader) + if err != nil { + return nil, errors.Wrapf(err, "couldn't load referenced feature %s", m.Source) + } + if deprecation := feature.Def.Deprecated; deprecation != "" { + return []error{errors.Errorf("feature %s is deprecated: %s", feature.Name, deprecation)}, nil + } + + resolved, err := ResolveImport(pallet, loader, feature) + if err != nil { + return deprecations, errors.Wrapf(err, "couldn't resolve feature %s", feature.Name) + } + deprecations, err = resolved.CheckDeprecations(loader) + if err != nil { + return deprecations, err + } + wrapped := make([]error, 0, len(deprecations)) + for _, deprecation := range deprecations { + wrapped = append(wrapped, errors.Wrapf(deprecation, "referenced by feature %s", feature.Name)) + } + return wrapped, nil +} diff --git a/internal/app/forklift/pallets-merging.go b/internal/app/forklift/pallets-merging.go index 2f5a099e..8b089ae7 100644 --- a/internal/app/forklift/pallets-merging.go +++ b/internal/app/forklift/pallets-merging.go @@ -120,7 +120,7 @@ func evaluatePalletImports( }) } if palletFileMappings[palletPath], err = consolidatePalletImports( - mergedPalletResolved, + mergedPalletResolved, palletLoader, ); err != nil { return nil, nil, errors.Wrapf( err, "couldn't evaluate import groups for pallet %s", palletPath, @@ -133,11 +133,13 @@ func evaluatePalletImports( // consolidatePalletImports checks the import groups loaded for a single required pallet and // consolidates into a single mapping between target paths and source paths relative to the // required pallet. -func consolidatePalletImports(imports []*ResolvedImport) (map[string]string, error) { +func consolidatePalletImports( + imports []*ResolvedImport, loader FSPalletLoader, +) (map[string]string, error) { union := make(map[string]string) // target -> source mappingOrigin := make(map[string][]string) // target -> import group names for _, imp := range imports { - importMappings, err := imp.Evaluate() + importMappings, err := imp.Evaluate(loader) if err != nil { return nil, errors.Wrapf(err, "couldn't evaluate import group %s", imp.Import.Name) } diff --git a/internal/app/forklift/pallets-models.go b/internal/app/forklift/pallets-models.go index 775821f6..002517e9 100644 --- a/internal/app/forklift/pallets-models.go +++ b/internal/app/forklift/pallets-models.go @@ -177,9 +177,9 @@ type ResolvedImport struct { // An Import is an import group, a declaration of a group of files to import from a required pallet. type Import struct { - // Name is the name of the package file import. + // Name is the name of the file import group. Name string - // Def is the file import definition for the file import. + // Def is the file import definition for the file import group. Def ImportDef } @@ -192,6 +192,9 @@ type ImportDef struct { Modifiers []ImportModifier `yaml:"modifiers"` // Disabled represents whether the import should be ignored. Disabled bool `yaml:"disabled,omitempty"` + // Deprecated is a deprecation notice which, if specified as a non-empty string, causes warnings + // to be issued whenever the file import group is used via a feature flag. + Deprecated string `yaml:"deprecated,omitempty"` } // An ImportModifier defines an operation for transforming a set of files for importing into a @@ -199,11 +202,14 @@ type ImportDef struct { type ImportModifier struct { // Description is a short description of the import modifier to be shown to users. Description string `yaml:"description,omitempty"` - // Type is either `add` (for adding one or more files to the set of files to import) or `remove` - // (for removing one or more files from the set of files to import) + // Type is either `add` (for adding one or more files to the set of files to import), `remove` + // (for removing one or more files from the set of files to import), `add-feature` (for adding + // files specified by a feature flag to the set of files to import), or `remove-feature` (for + // removing one or more files specified by a feature flag from the set of files to import). Type string `yaml:"type,omitempty"` // Source is the path in the required pallet of the file/directory to be imported, for an `add` - // modifier. If omitted, the source path will be inferred from the Target path. + // modifier; or the name of a feature flag, for an `add-feature` or `remove-feature` modifier. If + // omitted, the source path will be inferred from the Target path. Source string `yaml:"source,omitempty"` // Target is the path which the file/directory will be imported as, for an `add` modifier; or the // path of the file/directory which will be removed from the set of files to import, for a @@ -217,6 +223,18 @@ type ImportModifier struct { } const ( - ImportModifierTypeAdd = "add" - ImportModifierTypeRemove = "remove" + ImportModifierTypeAdd = "add" + ImportModifierTypeRemove = "remove" + ImportModifierTypeAddFeature = "add-feature" + ImportModifierTypeRemoveFeature = "remove-feature" +) + +// Features + +const ( + // FeaturesDirName is the directory in a pallet containing declarations of file import groups + // which can be referenced by name in file import groups. + FeaturesDirName = "features" + // FeatureDefFileExt is the file extension for import group files. + FeatureDefFileExt = ".feature.yml" ) diff --git a/internal/app/forklift/pallets.go b/internal/app/forklift/pallets.go index e7ba192b..02e16b90 100644 --- a/internal/app/forklift/pallets.go +++ b/internal/app/forklift/pallets.go @@ -320,7 +320,7 @@ func (p *FSPallet) LoadImport(name string) (imp Import, err error) { if err != nil { return Import{}, errors.Wrap(err, "couldn't open directory for import groups from pallet") } - if imp, err = loadImport(impsFS, name); err != nil { + if imp, err = loadImport(impsFS, name, ImportDefFileExt); err != nil { return Import{}, errors.Wrapf(err, "couldn't load import group for %s", name) } return imp, nil @@ -334,7 +334,66 @@ func (p *FSPallet) LoadImports(searchPattern string) ([]Import, error) { if err != nil { return nil, errors.Wrap(err, "couldn't open directory for import groups from pallet") } - return loadImports(fsys, searchPattern) + return loadImports(fsys, searchPattern, ImportDefFileExt) +} + +// FSPallet: Features + +// GetFeaturesFS returns the [fs.FS] in the pallet which contains pallet feature flag declarations. +func (p *FSPallet) GetFeaturesFS() (core.PathedFS, error) { + return p.FS.Sub(FeaturesDirName) +} + +// LoadFeature loads the Import declared by the specified feature flag name. The feature name is +// assumed to be either a path relative to the root of the pallet's filesystem, beginning with a +// "/", or (if the provided pallet loader is non-nil) a fully-qualified path in the form +// "github.com/repo-owner/repo-name/feature-subdir-path" +// (e.g. "github.com/PlanktoScope/pallet-standard/features/all"). +func (p *FSPallet) LoadFeature(name string, loader FSPalletLoader) (imp Import, err error) { + featuresFS, err := p.GetFeaturesFS() + if err != nil { + return Import{}, errors.Wrap(err, "couldn't open directory for feature declarations in pallet") + } + if imp, err = loadImport(featuresFS, name, FeatureDefFileExt); err != nil { + reqsFS, err := p.GetPalletReqsFS() + if err != nil { + return Import{}, errors.Wrap( + err, "couldn't open directory for pallet requirements from pallet", + ) + } + req, err := LoadFSPalletReqContaining(reqsFS, name) + if err != nil { + return Import{}, errors.Wrapf( + err, "couldn't find pallet requirement declaration for feature %s", name, + ) + } + if loader == nil { + return Import{}, errors.Errorf("no pallet loader provided for loading feature %s", name) + } + loaded, _, err := LoadRequiredFSPallet(p, loader, req.RequiredPath) + if err != nil { + return Import{}, errors.Wrapf( + err, "couldn't load pallet %s providing feature %s", req.RequiredPath, name, + ) + } + feature, err := loaded.LoadFeature( + strings.TrimPrefix(name, path.Join(loaded.Path(), FeaturesDirName)+"/"), nil, + ) + feature.Name = name + return feature, errors.Wrapf(err, "couldn't load import group for feature %s", name) + } + return imp, nil +} + +// LoadFeatures loads all Imports from the pallet matching the specified search pattern. +// The search pattern should be a [doublestar] pattern, such as `**`, matching the feature paths to +// search for (but excluding the file extension for feature declaration files). +func (p *FSPallet) LoadFeatures(searchPattern string) ([]Import, error) { + featuresFS, err := p.GetFeaturesFS() + if err != nil { + return nil, errors.Wrap(err, "couldn't open directory for feature declarations in pallet") + } + return loadImports(featuresFS, searchPattern, FeatureDefFileExt) } // Pallet