diff --git a/.gitignore b/.gitignore
index fe78e7b6..6810fd9c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
changelog-temp.md
+mangal
######
# Go #
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 516984f5..467ef792 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,66 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com), and this project adheres to
[Semantic Versioning](https://semver.org).
+## 4.0.0
+
+I've been actively working on this update lately, and I'm finally happy to share the 4th version of Mangal! 🐳
+
+The most important feature this major version brings is significantly improved caching mechanism
+which makes Mangal extremely fast and responsive.
+
+Now, mangal makes almost no requests to the servers.
+This includes Anilist, Scrapers, Update checker and so on!
+
+
+⚠️ BREAKING!!! ⚠️ Please, read!
+
+1. `mangal sources` will no longer list available sources, use `mangal sources list` instead.
+2. `mangal gen` and `mangal install` were removed. Use `mangal sources gen` and `mangal sources install` instead.
+3. `mangal sources remove` command improved and accepts flags instead of args.
+
+Inline JSON output is different now.
+
+- JSON fields now follow the [camelCase](https://en.wikipedia.org/wiki/Camel_case) style instead of `PascalCase`
+ (actually, using PascalCase was never a goal, I just forgot to properly configure it).
+ But since it's a major release I can finally fix this.
+- Structure was changed
+- Additional fields were added
+
+See [Inline mode wiki](https://github.com/metafates/mangal/wiki/Inline-mode) for new output schemas.
+
+Please, consider these changes when migrating your applications that use mangal from 3rd version to 4th.
+
+
+- Improved TUI experience
+- Search completions in TUI. `mangal config info -k search.show_query_suggestions`
+- Anilist caching significantly improved. Now, it will cache all search results (for 2 days)
+- Update metadata of already downloaded manga (ComicInfo.xml, series.json, cover image) after changing Anilist bind. #124
+ See `mangal inline anilist update` for more info
+- New command to generate json schema of inline output. See `mangal help inline schema`
+- **Breaking** `downloader.default_source` was changed to `downloader.default_sources` and accepts array of strings.
+ See `mangal config info -k downloader.default_sources` for more info
+- New `config reset` command
+- Add caching for custom (lua) sources
+- Include different cover sizes and color for json output #116
+- Add option to omit dates for ComicInfo.xml #117
+- By default, when reading a chapter, mangal will look for its downloaded copy, instead of downloading it again.
+ See `mangal config info -k downloader.read_downloaded`
+- Overwrite old `series.json` file each time a chapter is downloaded
+- Detect sources that use headless chrome and show that in the item description when selecting sources
+- Option to use alternative ComicInfo.xml date.
+ See `mangal config info -k metadata.comic_info_xml_alternative_date` for more info
+- Notify about new version in `help` command
+- Include staff in ComicInfo.xml #119
+- Add `--set-only` and `--unset-only` flags for `env` command. Old `--filter` flag was removed
+- `version` command now has `--short` to just print the version without extra information
+- **Breaking!** Your old reading history (via `mangal --continue`) will be reset
+- Improved `clear` command
+- Option to set threshold for tag relevance to be included in ComicInfo.xml #121
+- Improved inline command json output, fixes
+- Internal improvements
+
+Enjoy!
+
## 3.14.2
- Do not put an invalid value for dates #114
diff --git a/Makefile b/Makefile
index 137569b6..2e264201 100644
--- a/Makefile
+++ b/Makefile
@@ -18,6 +18,7 @@ help:
@echo " install Install the mangal binary"
@echo " uninstall Uninstall the mangal binary"
@echo " test Run the tests"
+ @echo " gif Generate usage gifs"
@echo " help Show this help message"
@echo ""
@@ -33,3 +34,7 @@ test:
uninstall:
@rm -f $(shell which mangal)
+
+gif:
+ @vhs assets/tui.tape
+ @vhs assets/inline.tape
diff --git a/README.md b/README.md
index 171143e3..324a3930 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,6 @@
-
Mangal 3 🪐
+
+Mangal 4 ☄️
+
@@ -11,7 +13,10 @@
The most advanced CLI manga downloader in the entire universe!
-https://user-images.githubusercontent.com/62389790/191430795-cb9859cc-5252-4155-b34b-ecf727003407.mp4
+
+
+
+
## Try it!
@@ -36,15 +41,17 @@ curl -sSL mangal.metafates.one/run | sh
- __Lua Scrapers!!!__ You can add any source you want by creating your own _(or using someone's else)_ scraper with
__Lua 5.1__. See [mangal-scrapers repository](https://github.com/metafates/mangal-scrapers)
-- __4 Built-in sources__ - [Mangadex](https://mangadex.org), [Manganelo](https://m.manganelo.com/wwww), [Mangakakalot](https://mangakakalot.com) & [Mangapill](https://mangapill.com)
+- __4 Built-in sources__ - [Mangadex](https://mangadex.org), [Manganelo](https://m.manganelo.com/wwww), [Manganato](https://manganato.com) & [Mangapill](https://mangapill.com)
- __Download & Read Manga__ - I mean, it would be strange if you couldn't, right?
+- __Caching__ - Mangal will cache as much data as possible, so you don't have to wait for it to download the same data over and over again.
- __4 Different export formats__ - PDF, CBZ, ZIP and plain images
-- __3 Different modes__ - TUI, Mini and Inline
+- __TUI ✨__ - You already know how to use it! (ノ>ω<)ノ :。・::・゚’★,。・:・゚’☆
+- __Scriptable__ - You can use Mangal in your scripts, it's just a CLI app after all. [Examples](https://github.com/metafates/mangal/wiki/Inline-mode)
+- __History__ - Resume your reading from where you left off!
- __Fast?__ - YES.
-- __Monolith__ - ZERO runtime dependencies. Even Lua is built in.
-- __Fancy__ - (ノ>ω<)ノ :。・::・゚’★,。・:・゚’☆
-- __Cross-Platform__ - Linux, macOS, Windows, Termux
-- __Anilist integration__ - Track your manga progress on Anilist when reading with Mangal.
+- __Monolith__ - ZERO runtime dependencies. Even Lua is built in. Easy to install and use.
+- __Cross-Platform__ - Linux, macOS, Windows, Termux, even your toaster. (¬‿¬ )
+- __Anilist integration__ - Mangal will collect additional data from Anilist and use it to improve your reading experience. It can also sync your progress!
## Installation
@@ -129,7 +136,7 @@ make build # if you want to just build the binary
If you don't have GNU Make use this
-
+
```shell
# To build
@@ -139,6 +146,8 @@ go build -ldflags "-X 'github.com/metafates/mangal/constant.BuiltAt=$(date -u)'
go install -ldflags "-X 'github.com/metafates/mangal/constant.BuiltAt=$(date -u)' -X 'github.com/metafates/mangal/constant.BuiltBy=$(whoami)' -X 'github.com/metafates/mangal/constant.Revision=$(git rev-parse --short HEAD)' -s -w"
```
+
+
If you want to build mangal for other architecture, say ARM, you'll have to set env variables `GOOS` and `GOARCH`
```shell
@@ -183,7 +192,11 @@ Just run `mangal` and you're ready to go.
-
+![TUI](https://user-images.githubusercontent.com/62389790/198830334-fd85c74f-cf3b-4e56-9262-5d62f7f829f4.png)
+
+> If you wonder what those icons mean - `D` stands for "downloaded", `*` shows that chapter is marked to be downloaded.
+> You can choose different icons, e.g. nerd font ones - just run mangal with `--icons nerd`.
+> Available options are `nerd`, `emoji`, `kaomoji` and `squares`
### Mini
@@ -191,21 +204,19 @@ Mini mode tries to mimic [ani-cli](https://github.com/pystardust/ani-cli)
To run: `mangal mini`
-
+![mini](https://user-images.githubusercontent.com/62389790/198830544-f2005ec4-c206-4fe0-bd08-862ffd08320e.png)
### Inline
Inline mode is intended for use with other scripts.
-Example of usage:
-
- mangal inline --source Manganelo --query "death note" --manga first --chapters all -d
-
-> This will download all chapters of the "Death Note" from Manganelo.
+Type `mangal help inline` for more information.
-See [Wiki](https://github.com/metafates/mangal/wiki/Inline-mode) for more information
+See [Wiki](https://github.com/metafates/mangal/wiki/Inline-mode) for more examples.
-
+
+
+
### Other
@@ -261,12 +272,11 @@ It should automatically appear in the list of available scrapers.
Mangal also supports integration with anilist.
-It will mark chapters as read on Anilsit when you read them inside mangal.
+Besides fetching metadata for each manga when downloading,
+mangal can also mark chapters as read on your Anilsit profile when you read them inside mangal.
For more information see [wiki](https://github.com/metafates/mangal/wiki/Anilist-Integration)
-> Maybe I'll add more sites in the future, like [myanimelist](https://myanimelist.net/). Open for suggestions!
-
## Honorable mentions
### Projects using mangal
@@ -294,11 +304,11 @@ For more information see [wiki](https://github.com/metafates/mangal/wiki/Anilist
### Contributors
-And of course, thanks to all the contributors! You are awesome!
+And of course, thanks to all contributors! You are awesome!
-
+
@@ -311,6 +321,6 @@ please consider starring it, that would mean a lot to me ⭐
-
+
diff --git a/anilist/cache.go b/anilist/cache.go
index 518aeebb..a4f6a46e 100644
--- a/anilist/cache.go
+++ b/anilist/cache.go
@@ -1,94 +1,98 @@
package anilist
import (
- "encoding/json"
- "github.com/metafates/mangal/filesystem"
- "github.com/metafates/mangal/log"
- "github.com/metafates/mangal/util"
+ "github.com/metafates/mangal/cache"
"github.com/metafates/mangal/where"
- "io"
- "os"
+ "github.com/samber/mo"
"path/filepath"
+ "time"
)
-var cache = anilistCache{
- data: &anilistCacheData{Mangas: make(map[string]*Manga)},
+type cacheData[K comparable, T any] struct {
+ Mangas map[K]T `json:"mangas"`
}
-type anilistCacheData struct {
- Mangas map[string]*Manga `json:"mangas"`
+type cacher[K comparable, T any] struct {
+ internal *cache.Cache[*cacheData[K, T]]
+ keyWrapper func(K) K
}
-type anilistCache struct {
- data *anilistCacheData
- path string
- initialized bool
-}
-
-func (a *anilistCache) Init() error {
- if a.initialized {
- return nil
+func (c *cacher[K, T]) Get(key K) mo.Option[T] {
+ data := c.internal.Get()
+ if data.IsPresent() {
+ mangas, ok := data.MustGet().Mangas[c.keyWrapper(key)]
+ if ok {
+ return mo.Some(mangas)
+ }
}
- log.Debug("Initializing anilist cacher")
-
- path := filepath.Join(where.Cache(), "anilist_cache.json")
- a.path = path
- log.Debugf("Opening anilist cache file at %s", path)
- file, err := filesystem.Api().OpenFile(path, os.O_RDONLY|os.O_CREATE, os.ModePerm)
-
- if err != nil {
- log.Warn(err)
- return err
- }
-
- defer util.Ignore(file.Close)
-
- contents, err := io.ReadAll(file)
- if err != nil {
- log.Warn(err)
- return err
- }
+ return mo.None[T]()
+}
- if len(contents) == 0 {
- log.Debug("Anilist cache file is empty, skipping unmarshal")
- return nil
+func (c *cacher[K, T]) Set(key K, t T) error {
+ data := c.internal.Get()
+ if data.IsPresent() {
+ internal := data.MustGet()
+ internal.Mangas[c.keyWrapper(key)] = t
+ return c.internal.Set(internal)
+ } else {
+ internal := &cacheData[K, T]{Mangas: make(map[K]T)}
+ internal.Mangas[c.keyWrapper(key)] = t
+ return c.internal.Set(internal)
}
+}
- err = json.Unmarshal(contents, a.data)
- if err != nil {
- log.Warn(err)
- return err
+func (c *cacher[K, T]) Delete(key K) error {
+ data := c.internal.Get()
+ if data.IsPresent() {
+ internal := data.MustGet()
+ delete(internal.Mangas, c.keyWrapper(key))
+ return c.internal.Set(internal)
}
- log.Debugf("Anilist cache file unmarshalled successfully, len is %d", len(a.data.Mangas))
return nil
}
-func (a *anilistCache) Get(name string) (*Manga, bool) {
- _ = a.Init()
-
- mangas, ok := a.data.Mangas[normalizeName(name)]
- return mangas, ok
+var relationCacher = &cacher[string, int]{
+ internal: cache.New[*cacheData[string, int]](
+ where.AnilistBinds(),
+ &cache.Options{
+ // never expire
+ ExpireEvery: mo.None[time.Duration](),
+ },
+ ),
+ keyWrapper: normalizedName,
}
-func (a *anilistCache) Set(name string, manga *Manga) error {
- _ = a.Init()
-
- log.Debug("Setting anilist cacher entry")
- a.data.Mangas[normalizeName(name)] = manga
- marshalled, err := json.Marshal(a.data)
- if err != nil {
- log.Warn(err)
- return err
- }
-
- file, err := filesystem.Api().OpenFile(a.path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
+var searchCacher = &cacher[string, []int]{
+ internal: cache.New[*cacheData[string, []int]](
+ filepath.Join(where.Cache(), "anilist_search_cache.json"),
+ &cache.Options{
+ // update ids every 10 days, since new manga are not added that often
+ ExpireEvery: mo.Some(time.Hour * 24 * 10),
+ },
+ ),
+ keyWrapper: normalizedName,
+}
- _, err = file.Write(marshalled)
- if err != nil {
- log.Warn(err)
- }
+var idCacher = &cacher[int, *Manga]{
+ internal: cache.New[*cacheData[int, *Manga]](
+ filepath.Join(where.Cache(), "anilist_id_cache.json"),
+ &cache.Options{
+ // update manga data every 2 days since it can change often
+ ExpireEvery: mo.Some(time.Hour * 24 * 2),
+ },
+ ),
+ keyWrapper: func(id int) int { return id },
+}
- return err
+var failCacher = &cacher[string, bool]{
+ internal: cache.New[*cacheData[string, bool]](
+ filepath.Join(where.Cache(), "anilist_fail_cache.json"),
+ &cache.Options{
+ // expire every minute
+ ExpireEvery: mo.Some(time.Minute),
+ },
+ ),
+ keyWrapper: normalizedName,
}
diff --git a/anilist/find.go b/anilist/find.go
index bea2f480..0b0fbe61 100644
--- a/anilist/find.go
+++ b/anilist/find.go
@@ -9,35 +9,54 @@ import (
"strings"
)
-var (
- retries uint8
- limit uint8 = 3
-)
-
-func normalizeName(name string) string {
+// normalizedName returns a normalized name for comparison
+func normalizedName(name string) string {
return strings.ToLower(strings.TrimSpace(name))
}
+// SetRelation sets the relation between a manga name and an anilist id
func SetRelation(name string, to *Manga) error {
- return cache.Set(name, to)
-}
+ err := relationCacher.Set(name, to.ID)
+ if err != nil {
+ return err
+ }
-func GetRelation(name string) (*Manga, bool) {
- return cache.Get(name)
+ if id := idCacher.Get(to.ID); id.IsAbsent() {
+ return idCacher.Set(to.ID, to)
+ }
+
+ return nil
}
+// FindClosest returns the closest manga to the given name.
+// It will levenshtein compare the given name with all the manga names in the cache.
func FindClosest(name string) (*Manga, error) {
- if retries >= limit {
- retries = 0
+ name = normalizedName(name)
+ return findClosest(name, name, 0, 3)
+}
+
+// findClosest returns the closest manga to the given name.
+// It will levenshtein compare the given name with all the manga names in the cache.
+func findClosest(name, originalName string, try, limit int) (*Manga, error) {
+ if try >= limit {
err := fmt.Errorf("no results found on Anilist for manga %s", name)
log.Error(err)
+ _ = relationCacher.Set(originalName, -1)
return nil, err
}
- name = normalizeName(name)
+ id := relationCacher.Get(name)
+ if id.IsPresent() {
+ if id.MustGet() == -1 {
+ return nil, fmt.Errorf("no results found on Anilist for manga %s", name)
+ }
- if manga, ok := cache.Get(name); ok {
- return manga, nil
+ if manga, ok := idCacher.Get(id.MustGet()).Get(); ok {
+ if try > 0 {
+ _ = relationCacher.Set(originalName, manga.ID)
+ }
+ return manga, nil
+ }
}
// search for manga on anilist
@@ -47,35 +66,58 @@ func FindClosest(name string) (*Manga, error) {
return nil, err
}
+ if id.IsPresent() {
+ found, ok := lo.Find(mangas, func(item *Manga) bool {
+ return item.ID == id.MustGet()
+ })
+
+ if ok {
+ return found, nil
+ }
+
+ // there should be a manga with the id in the cache, but it wasn't found
+ // this means that the manga was deleted from anilist
+ // remove the id from the cache
+ _ = relationCacher.Delete(name)
+ log.Infof("Manga with id %d was deleted from Anilist", id.MustGet())
+ }
+
if len(mangas) == 0 {
// try again with a different name
- retries++
words := strings.Split(name, " ")
- if len(words) == 1 {
- // trigger limit
- retries = limit
- return FindClosest("")
+ if len(words) <= 2 {
+ // trigger limit, proceeding further will only make things worse
+ return findClosest(name, originalName, limit, limit)
}
// one word less
alternateName := strings.Join(words[:util.Max(len(words)-1, 1)], " ")
log.Infof(`No results found on Anilist for manga "%s", trying "%s"`, name, alternateName)
- return FindClosest(alternateName)
+ return findClosest(alternateName, originalName, try+1, limit)
}
// find the closest match
closest := lo.MinBy(mangas, func(a, b *Manga) bool {
return levenshtein.Distance(
name,
- normalizeName(a.Name()),
+ normalizedName(a.Name()),
) < levenshtein.Distance(
name,
- normalizeName(b.Name()),
+ normalizedName(b.Name()),
)
})
log.Info("Found closest match: " + closest.Name())
- retries = 0
- _ = cache.Set(name, closest)
+
+ save := func(n string) {
+ if id := relationCacher.Get(n); id.IsAbsent() {
+ _ = relationCacher.Set(n, closest.ID)
+ }
+ }
+
+ save(name)
+ save(originalName)
+
+ _ = idCacher.Set(closest.ID, closest)
return closest, nil
}
diff --git a/anilist/manga.go b/anilist/manga.go
index 983017ec..0879cd7a 100644
--- a/anilist/manga.go
+++ b/anilist/manga.go
@@ -7,40 +7,88 @@ type date struct {
}
type Manga struct {
- URL string `json:"url"`
+ // Title of the manga
Title struct {
- Romaji string `json:"romaji"`
- English string `json:"english"`
- Native string `json:"native"`
+ // Romaji is the romanized title of the manga.
+ Romaji string `json:"romaji" jsonschema:"description=Romanized title of the manga."`
+ // English is the english title of the manga.
+ English string `json:"english" jsonschema:"description=English title of the manga."`
+ // Native is the native title of the manga. (Usually in kanji)
+ Native string `json:"native" jsonschema:"description=Native title of the manga. Usually in kanji."`
} `json:"title"`
- ID int `json:"id"`
- Description string `json:"description"`
- CoverImage struct {
- ExtraLarge string `json:"extraLarge"`
- } `json:"coverImage"`
+ // ID is the id of the manga on Anilist.
+ ID int `json:"id" jsonschema:"description=ID of the manga on Anilist."`
+ // Description is the description of the manga in html format.
+ Description string `json:"description" jsonschema:"description=Description of the manga in html format."`
+ // CoverImage is the cover image of the manga.
+ CoverImage struct {
+ // ExtraLarge is the url of the extra large cover image.
+ // If the image is not available, large will be used instead.
+ ExtraLarge string `json:"extraLarge" jsonschema:"description=URL of the extra large cover image. If the image is not available, large will be used instead."`
+ // Large is the url of the large cover image.
+ Large string `json:"large" jsonschema:"description=URL of the large cover image."`
+ // Medium is the url of the medium cover image.
+ Medium string `json:"medium" jsonschema:"description=URL of the medium cover image."`
+ // Color is the average color of the cover image.
+ Color string `json:"color" jsonschema:"description=Average color of the cover image."`
+ } `json:"coverImage" jsonschema:"description=Cover image of the manga."`
+ // BannerImage of the media
+ BannerImage string `json:"bannerImage" jsonschema:"description=Banner image of the manga."`
+ // Tags are the tags of the manga.
Tags []struct {
- Name string `json:"name"`
+ // Name of the tag.
+ Name string `json:"name" jsonschema:"description=Name of the tag."`
+ // Description of the tag.
+ Description string `json:"description" jsonschema:"description=Description of the tag."`
+ // Rank of the tag. How relevant it is to the manga from 1 to 100.
+ Rank int `json:"rank" jsonschema:"description=Rank of the tag. How relevant it is to the manga from 1 to 100."`
} `json:"tags"`
- Genres []string `json:"genres"`
+ // Genres of the manga
+ Genres []string `json:"genres" jsonschema:"description=Genres of the manga."`
+ // Characters are the primary characters of the manga.
Characters struct {
Nodes []struct {
Name struct {
- Full string `json:"full"`
+ // Full is the full name of the character.
+ Full string `json:"full" jsonschema:"description=Full name of the character."`
+ // Native is the native name of the character. Usually in kanji.
+ Native string `json:"native" jsonschema:"description=Native name of the character. Usually in kanji."`
} `json:"name"`
} `json:"nodes"`
} `json:"characters"`
- StartDate date `json:"startDate"`
- EndDate date `json:"endDate"`
- Synonyms []string `json:"synonyms"`
- Status string `json:"status"`
- IDMal int `json:"idMal"`
- SiteURL string `json:"siteUrl"`
- Country string `json:"countryOfOrigin"`
- External []struct {
- URL string `json:"url"`
- } `json:"externalLinks"`
+ Staff struct {
+ Edges []struct {
+ Role string `json:"role" jsonschema:"description=Role of the staff member."`
+ Node struct {
+ Name struct {
+ Full string `json:"full" jsonschema:"description=Full name of the staff member."`
+ } `json:"name"`
+ } `json:"node"`
+ } `json:"edges"`
+ } `json:"staff"`
+ // StartDate is the date the manga started publishing.
+ StartDate date `json:"startDate" jsonschema:"description=Date the manga started publishing."`
+ // EndDate is the date the manga ended publishing.
+ EndDate date `json:"endDate" jsonschema:"description=Date the manga ended publishing."`
+ // Synonyms are the synonyms of the manga (Alternative titles).
+ Synonyms []string `json:"synonyms" jsonschema:"description=Synonyms of the manga (Alternative titles)."`
+ // Status is the status of the manga. (FINISHED, RELEASING, NOT_YET_RELEASED, CANCELLED)
+ Status string `json:"status" jsonschema:"enum=FINISHED,enum=RELEASING,enum=NOT_YET_RELEASED,enum=CANCELLED,enum=HIATUS"`
+ // IDMal is the id of the manga on MyAnimeList.
+ IDMal int `json:"idMal" jsonschema:"description=ID of the manga on MyAnimeList."`
+ // Chapters is the amount of chapters the manga has when complete.
+ Chapters int `json:"chapters" jsonschema:"description=Amount of chapters the manga has when complete."`
+ // SiteURL is the url of the manga on Anilist.
+ SiteURL string `json:"siteUrl" jsonschema:"description=URL of the manga on Anilist."`
+ // Country of origin of the manga.
+ Country string `json:"countryOfOrigin" jsonschema:"description=Country of origin of the manga."`
+ // External urls related to the manga.
+ External []struct {
+ URL string `json:"url" jsonschema:"description=URL of the external link."`
+ } `json:"externalLinks" jsonschema:"description=External links related to the manga."`
}
+// Name of the manga. If English is available, it will be used. Otherwise, Romaji will be used.
func (m *Manga) Name() string {
if m.Title.English == "" {
return m.Title.Romaji
diff --git a/anilist/query.go b/anilist/query.go
index ccffb295..7ea12724 100644
--- a/anilist/query.go
+++ b/anilist/query.go
@@ -2,6 +2,7 @@ package anilist
import "fmt"
+// mangaSubquery common manga query used for getting manga by id or searching it by name
var mangaSubquery = `
id
idMal
@@ -13,15 +14,23 @@ title {
description(asHtml: false)
tags {
name
+ description
+ rank
}
genres
coverImage {
extraLarge
+ large
+ medium
+ color
}
+bannerImage
characters (page: 1, perPage: 10, role: MAIN) {
nodes {
+ id
name {
full
+ native
}
}
}
@@ -35,15 +44,27 @@ endDate {
month
day
}
+staff {
+ edges {
+ role
+ node {
+ name {
+ full
+ }
+ }
+ }
+}
status
synonyms
siteUrl
+chapters
countryOfOrigin
externalLinks {
url
}
`
+// searchByNameQuery query used for searching manga by name
var searchByNameQuery = fmt.Sprintf(`
query ($query: String) {
Page (page: 1, perPage: 30) {
@@ -54,6 +75,7 @@ query ($query: String) {
}
`, mangaSubquery)
+// searchByIDQuery query used for searching manga by id
var searchByIDQuery = fmt.Sprintf(`
query ($id: Int) {
Media (id: $id, type: MANGA) {
diff --git a/anilist/search.go b/anilist/search.go
index 0c5e4a2c..d855e16d 100644
--- a/anilist/search.go
+++ b/anilist/search.go
@@ -6,6 +6,8 @@ import (
"fmt"
"github.com/metafates/mangal/log"
"github.com/metafates/mangal/network"
+ "github.com/metafates/mangal/query"
+ "github.com/samber/lo"
"net/http"
"strconv"
)
@@ -24,9 +26,13 @@ type searchByIDResponse struct {
} `json:"data"`
}
-var searchCache = make(map[string][]*Manga)
-
+// GetByID returns the manga with the given id.
+// If the manga is not found, it returns nil.
func GetByID(id int) (*Manga, error) {
+ if manga := idCacher.Get(id); manga.IsPresent() {
+ return manga.MustGet(), nil
+ }
+
// prepare body
log.Infof("Searching anilist for manga with id: %d", id)
body := map[string]interface{}{
@@ -75,19 +81,38 @@ func GetByID(id int) (*Manga, error) {
manga := response.Data.Media
log.Infof("Got response from Anilist, found manga with id %d", manga.ID)
+ _ = idCacher.Set(id, manga)
return manga, nil
}
+// SearchByName returns a list of mangas that match the given name.
+// TODO: keep failed names in cache for a minute
func SearchByName(name string) ([]*Manga, error) {
- if mangas, ok := searchCache[name]; ok {
+ name = normalizedName(name)
+ _ = query.Remember(name, 1)
+
+ if _, failed := failCacher.Get(name).Get(); failed {
+ return nil, fmt.Errorf("failed to search for %s", name)
+ }
+
+ if ids, ok := searchCacher.Get(name).Get(); ok {
+ mangas := lo.FilterMap(ids, func(item, _ int) (*Manga, bool) {
+ return idCacher.Get(item).Get()
+ })
+
+ if len(mangas) == 0 {
+ _ = searchCacher.Delete(name)
+ return SearchByName(name)
+ }
+
return mangas, nil
}
// prepare body
- log.Info("Searching anilist for manga: " + name)
- body := map[string]interface{}{
+ log.Infof("Searching anilist for manga %s", name)
+ body := map[string]any{
"query": searchByNameQuery,
- "variables": map[string]interface{}{
+ "variables": map[string]any{
"query": name,
},
}
@@ -113,11 +138,13 @@ func SearchByName(name string) ([]*Manga, error) {
if err != nil {
log.Error(err)
+ _ = failCacher.Set(name, true)
return nil, err
}
if resp.StatusCode != http.StatusOK {
log.Error("Anilist returned status code " + strconv.Itoa(resp.StatusCode))
+ _ = failCacher.Set(name, true)
return nil, fmt.Errorf("invalid response code %d", resp.StatusCode)
}
@@ -130,7 +157,12 @@ func SearchByName(name string) ([]*Manga, error) {
}
mangas := response.Data.Page.Media
- log.Info("Got response from Anilist, found " + strconv.Itoa(len(mangas)) + " results")
- searchCache[name] = mangas
+ log.Infof("Got response from Anilist, found %d results", len(mangas))
+ ids := make([]int, len(mangas))
+ for i, manga := range mangas {
+ ids[i] = manga.ID
+ _ = idCacher.Set(manga.ID, manga)
+ }
+ _ = searchCacher.Set(name, ids)
return mangas, nil
}
diff --git a/assets/anilist.schema.json b/assets/anilist.schema.json
new file mode 100644
index 00000000..45fd15c8
--- /dev/null
+++ b/assets/anilist.schema.json
@@ -0,0 +1,297 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$defs": {
+ "anilist.Manga": {
+ "properties": {
+ "title": {
+ "properties": {
+ "romaji": {
+ "type": "string",
+ "description": "Romanized title of the manga."
+ },
+ "english": {
+ "type": "string",
+ "description": "English title of the manga."
+ },
+ "native": {
+ "type": "string",
+ "description": "Native title of the manga. Usually in kanji."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "romaji",
+ "english",
+ "native"
+ ]
+ },
+ "id": {
+ "type": "integer",
+ "description": "ID of the manga on Anilist."
+ },
+ "description": {
+ "type": "string",
+ "description": "Description of the manga in html format."
+ },
+ "coverImage": {
+ "properties": {
+ "extraLarge": {
+ "type": "string",
+ "description": "URL of the extra large cover image. If the image is not available"
+ },
+ "large": {
+ "type": "string",
+ "description": "URL of the large cover image."
+ },
+ "medium": {
+ "type": "string",
+ "description": "URL of the medium cover image."
+ },
+ "color": {
+ "type": "string",
+ "description": "Average color of the cover image."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "extraLarge",
+ "large",
+ "medium",
+ "color"
+ ],
+ "description": "Cover image of the manga."
+ },
+ "bannerImage": {
+ "type": "string",
+ "description": "Banner image of the manga."
+ },
+ "tags": {
+ "items": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Name of the tag."
+ },
+ "description": {
+ "type": "string",
+ "description": "Description of the tag."
+ },
+ "rank": {
+ "type": "integer",
+ "description": "Rank of the tag. How relevant it is to the manga from 1 to 100."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "name",
+ "description",
+ "rank"
+ ]
+ },
+ "type": "array"
+ },
+ "genres": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Genres of the manga."
+ },
+ "characters": {
+ "properties": {
+ "nodes": {
+ "items": {
+ "properties": {
+ "name": {
+ "properties": {
+ "full": {
+ "type": "string",
+ "description": "Full name of the character."
+ },
+ "native": {
+ "type": "string",
+ "description": "Native name of the character. Usually in kanji."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "full",
+ "native"
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "name"
+ ]
+ },
+ "type": "array"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "nodes"
+ ]
+ },
+ "staff": {
+ "properties": {
+ "edges": {
+ "items": {
+ "properties": {
+ "role": {
+ "type": "string",
+ "description": "Role of the staff member."
+ },
+ "node": {
+ "properties": {
+ "name": {
+ "properties": {
+ "full": {
+ "type": "string",
+ "description": "Full name of the staff member."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "full"
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "name"
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "role",
+ "node"
+ ]
+ },
+ "type": "array"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "edges"
+ ]
+ },
+ "startDate": {
+ "$ref": "#/$defs/anilist.date",
+ "description": "Date the manga started publishing."
+ },
+ "endDate": {
+ "$ref": "#/$defs/anilist.date",
+ "description": "Date the manga ended publishing."
+ },
+ "synonyms": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Synonyms of the manga (Alternative titles)."
+ },
+ "status": {
+ "type": "string",
+ "enum": [
+ "FINISHED",
+ "RELEASING",
+ "NOT_YET_RELEASED",
+ "CANCELLED",
+ "HIATUS"
+ ]
+ },
+ "idMal": {
+ "type": "integer",
+ "description": "ID of the manga on MyAnimeList."
+ },
+ "chapters": {
+ "type": "integer",
+ "description": "Amount of chapters the manga has when complete."
+ },
+ "siteUrl": {
+ "type": "string",
+ "description": "URL of the manga on Anilist."
+ },
+ "countryOfOrigin": {
+ "type": "string",
+ "description": "Country of origin of the manga."
+ },
+ "externalLinks": {
+ "items": {
+ "properties": {
+ "url": {
+ "type": "string",
+ "description": "URL of the external link."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "url"
+ ]
+ },
+ "type": "array",
+ "description": "External links related to the manga."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "title",
+ "id",
+ "description",
+ "coverImage",
+ "bannerImage",
+ "tags",
+ "genres",
+ "characters",
+ "staff",
+ "startDate",
+ "endDate",
+ "synonyms",
+ "status",
+ "idMal",
+ "chapters",
+ "siteUrl",
+ "countryOfOrigin",
+ "externalLinks"
+ ]
+ },
+ "anilist.date": {
+ "properties": {
+ "year": {
+ "type": "integer"
+ },
+ "month": {
+ "type": "integer"
+ },
+ "day": {
+ "type": "integer"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "year",
+ "month",
+ "day"
+ ]
+ }
+ },
+ "items": {
+ "$ref": "#/$defs/anilist.Manga"
+ },
+ "type": "array"
+}
diff --git a/assets/inline.gif b/assets/inline.gif
new file mode 100644
index 00000000..2fbd60bd
Binary files /dev/null and b/assets/inline.gif differ
diff --git a/assets/inline.schema.json b/assets/inline.schema.json
new file mode 100644
index 00000000..1b1c8463
--- /dev/null
+++ b/assets/inline.schema.json
@@ -0,0 +1,615 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$ref": "#/$defs/inline.Output",
+ "$defs": {
+ "anilist.Manga": {
+ "properties": {
+ "title": {
+ "properties": {
+ "romaji": {
+ "type": "string",
+ "description": "Romanized title of the manga."
+ },
+ "english": {
+ "type": "string",
+ "description": "English title of the manga."
+ },
+ "native": {
+ "type": "string",
+ "description": "Native title of the manga. Usually in kanji."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "romaji",
+ "english",
+ "native"
+ ]
+ },
+ "id": {
+ "type": "integer",
+ "description": "ID of the manga on Anilist."
+ },
+ "description": {
+ "type": "string",
+ "description": "Description of the manga in html format."
+ },
+ "coverImage": {
+ "properties": {
+ "extraLarge": {
+ "type": "string",
+ "description": "URL of the extra large cover image. If the image is not available"
+ },
+ "large": {
+ "type": "string",
+ "description": "URL of the large cover image."
+ },
+ "medium": {
+ "type": "string",
+ "description": "URL of the medium cover image."
+ },
+ "color": {
+ "type": "string",
+ "description": "Average color of the cover image."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "extraLarge",
+ "large",
+ "medium",
+ "color"
+ ],
+ "description": "Cover image of the manga."
+ },
+ "bannerImage": {
+ "type": "string",
+ "description": "Banner image of the manga."
+ },
+ "tags": {
+ "items": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Name of the tag."
+ },
+ "description": {
+ "type": "string",
+ "description": "Description of the tag."
+ },
+ "rank": {
+ "type": "integer",
+ "description": "Rank of the tag. How relevant it is to the manga from 1 to 100."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "name",
+ "description",
+ "rank"
+ ]
+ },
+ "type": "array"
+ },
+ "genres": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Genres of the manga."
+ },
+ "characters": {
+ "properties": {
+ "nodes": {
+ "items": {
+ "properties": {
+ "name": {
+ "properties": {
+ "full": {
+ "type": "string",
+ "description": "Full name of the character."
+ },
+ "native": {
+ "type": "string",
+ "description": "Native name of the character. Usually in kanji."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "full",
+ "native"
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "name"
+ ]
+ },
+ "type": "array"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "nodes"
+ ]
+ },
+ "staff": {
+ "properties": {
+ "edges": {
+ "items": {
+ "properties": {
+ "role": {
+ "type": "string",
+ "description": "Role of the staff member."
+ },
+ "node": {
+ "properties": {
+ "name": {
+ "properties": {
+ "full": {
+ "type": "string",
+ "description": "Full name of the staff member."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "full"
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "name"
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "role",
+ "node"
+ ]
+ },
+ "type": "array"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "edges"
+ ]
+ },
+ "startDate": {
+ "$ref": "#/$defs/anilist.date",
+ "description": "Date the manga started publishing."
+ },
+ "endDate": {
+ "$ref": "#/$defs/anilist.date",
+ "description": "Date the manga ended publishing."
+ },
+ "synonyms": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Synonyms of the manga (Alternative titles)."
+ },
+ "status": {
+ "type": "string",
+ "enum": [
+ "FINISHED",
+ "RELEASING",
+ "NOT_YET_RELEASED",
+ "CANCELLED",
+ "HIATUS"
+ ]
+ },
+ "idMal": {
+ "type": "integer",
+ "description": "ID of the manga on MyAnimeList."
+ },
+ "chapters": {
+ "type": "integer",
+ "description": "Amount of chapters the manga has when complete."
+ },
+ "siteUrl": {
+ "type": "string",
+ "description": "URL of the manga on Anilist."
+ },
+ "countryOfOrigin": {
+ "type": "string",
+ "description": "Country of origin of the manga."
+ },
+ "externalLinks": {
+ "items": {
+ "properties": {
+ "url": {
+ "type": "string",
+ "description": "URL of the external link."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "url"
+ ]
+ },
+ "type": "array",
+ "description": "External links related to the manga."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "title",
+ "id",
+ "description",
+ "coverImage",
+ "bannerImage",
+ "tags",
+ "genres",
+ "characters",
+ "staff",
+ "startDate",
+ "endDate",
+ "synonyms",
+ "status",
+ "idMal",
+ "chapters",
+ "siteUrl",
+ "countryOfOrigin",
+ "externalLinks"
+ ]
+ },
+ "anilist.date": {
+ "properties": {
+ "year": {
+ "type": "integer"
+ },
+ "month": {
+ "type": "integer"
+ },
+ "day": {
+ "type": "integer"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "year",
+ "month",
+ "day"
+ ]
+ },
+ "inline.Manga": {
+ "properties": {
+ "source": {
+ "type": "string",
+ "description": "Source that the manga belongs to."
+ },
+ "mangal": {
+ "$ref": "#/$defs/source.Manga",
+ "description": "Mangal variant of the manga"
+ },
+ "anilist": {
+ "$ref": "#/$defs/anilist.Manga",
+ "description": "Anilist is the closest anilist match to mangal manga"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "source",
+ "mangal",
+ "anilist"
+ ]
+ },
+ "inline.Output": {
+ "properties": {
+ "query": {
+ "type": "string",
+ "description": "Query that was used to search for the manga."
+ },
+ "result": {
+ "items": {
+ "$ref": "#/$defs/inline.Manga"
+ },
+ "type": "array",
+ "description": "Result of the search."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "query",
+ "result"
+ ]
+ },
+ "source.Chapter": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Name of the chapter"
+ },
+ "url": {
+ "type": "string",
+ "description": "URL of the chapter"
+ },
+ "index": {
+ "type": "integer",
+ "description": "Index of the chapter in the manga"
+ },
+ "id": {
+ "type": "string",
+ "description": "ID of the chapter in the source"
+ },
+ "volume": {
+ "type": "string",
+ "description": "Volume which the chapter belongs to"
+ },
+ "pages": {
+ "items": {
+ "$ref": "#/$defs/source.Page"
+ },
+ "type": "array",
+ "description": "Pages of the chapter"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "name",
+ "url",
+ "index",
+ "id",
+ "volume",
+ "pages"
+ ]
+ },
+ "source.Manga": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Name of the manga"
+ },
+ "url": {
+ "type": "string",
+ "description": "URL of the manga"
+ },
+ "index": {
+ "type": "integer",
+ "description": "Index of the manga in the source"
+ },
+ "id": {
+ "type": "string",
+ "description": "ID of manga in the source"
+ },
+ "chapters": {
+ "items": {
+ "$ref": "#/$defs/source.Chapter"
+ },
+ "type": "array",
+ "description": "Chapters of the manga"
+ },
+ "metadata": {
+ "properties": {
+ "genres": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Genres of the manga"
+ },
+ "summary": {
+ "type": "string",
+ "description": "Summary in the plain text with newlines"
+ },
+ "staff": {
+ "properties": {
+ "story": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Story authors"
+ },
+ "art": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Art authors"
+ },
+ "translation": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Translation group"
+ },
+ "lettering": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Lettering group"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "story",
+ "art",
+ "translation",
+ "lettering"
+ ],
+ "description": "Staff that worked on the manga"
+ },
+ "cover": {
+ "properties": {
+ "extraLarge": {
+ "type": "string",
+ "description": "ExtraLarge is the largest cover image. If not available"
+ },
+ "large": {
+ "type": "string",
+ "description": "Large is the second-largest cover image."
+ },
+ "medium": {
+ "type": "string",
+ "description": "Medium cover image. The smallest one."
+ },
+ "color": {
+ "type": "string",
+ "description": "Color average color of the cover image."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "extraLarge",
+ "large",
+ "medium",
+ "color"
+ ],
+ "description": "Cover images of the manga"
+ },
+ "bannerImage": {
+ "type": "string",
+ "description": "BannerImage is the banner image of the manga."
+ },
+ "tags": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Tags of the manga"
+ },
+ "characters": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Characters of the manga"
+ },
+ "status": {
+ "type": "string",
+ "enum": [
+ "FINISHED",
+ "RELEASING",
+ "NOT_YET_RELEASED",
+ "CANCELLED",
+ "HIATUS"
+ ]
+ },
+ "startDate": {
+ "$ref": "#/$defs/source.date",
+ "description": "StartDate is the date when the manga started."
+ },
+ "endDate": {
+ "$ref": "#/$defs/source.date",
+ "description": "EndDate is the date when the manga ended."
+ },
+ "synonyms": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "Synonyms other names of the manga."
+ },
+ "chapters": {
+ "type": "integer",
+ "description": "The amount of chapters the manga will have when completed."
+ },
+ "urls": {
+ "items": {
+ "type": "string"
+ },
+ "type": "array",
+ "description": "External URLs of the manga."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "genres",
+ "summary",
+ "staff",
+ "cover",
+ "bannerImage",
+ "tags",
+ "characters",
+ "status",
+ "startDate",
+ "endDate",
+ "synonyms",
+ "chapters",
+ "urls"
+ ]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "name",
+ "url",
+ "index",
+ "id",
+ "chapters",
+ "metadata"
+ ]
+ },
+ "source.Page": {
+ "properties": {
+ "url": {
+ "type": "string",
+ "description": "URL of the page. Used to download the image."
+ },
+ "index": {
+ "type": "integer",
+ "description": "Index of the page in the chapter."
+ },
+ "extension": {
+ "type": "string",
+ "description": "Extension of the page image."
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "url",
+ "index",
+ "extension"
+ ]
+ },
+ "source.date": {
+ "properties": {
+ "year": {
+ "type": "integer"
+ },
+ "month": {
+ "type": "integer"
+ },
+ "day": {
+ "type": "integer"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": [
+ "year",
+ "month",
+ "day"
+ ]
+ }
+ }
+}
diff --git a/assets/inline.tape b/assets/inline.tape
new file mode 100644
index 00000000..c76ffeca
--- /dev/null
+++ b/assets/inline.tape
@@ -0,0 +1,28 @@
+Require "mangal"
+Require "bat"
+Require "jq"
+Output assets/inline.gif
+
+
+Set FontSize 21
+Set Width 1200
+Set Height 1000
+
+Type@20ms "# see 'mangal inline schema' to view output schema" Enter
+Type@70ms "mangal inline -S ComicK -q 'chainsaw' --manga first --json --include-anilist-manga | jq . | bat -l json"
+Sleep 1s
+Enter
+Sleep 3s
+Type@200ms "ddddddd"
+Sleep 2
+Type "q"
+
+Enter
+
+Type@20ms "# see 'mangal inline schema --anilist' to view output schema" Enter
+Type@70ms "mangal inline anilist search --name 'chainsaw man' | jq . | bat -l json"
+Sleep 1s
+Enter
+Sleep 3s
+Type@200ms "ddddddd"
+Sleep 2
diff --git a/assets/tui.gif b/assets/tui.gif
new file mode 100644
index 00000000..438d5006
Binary files /dev/null and b/assets/tui.gif differ
diff --git a/assets/tui.tape b/assets/tui.tape
new file mode 100644
index 00000000..955584c0
--- /dev/null
+++ b/assets/tui.tape
@@ -0,0 +1,70 @@
+Require "mangal"
+Output assets/tui.gif
+
+Set FontSize 21
+Set Width 1200
+Set Height 1000
+
+Hide
+Type "mangal inline anilist set --name "Chainsaw Man" --id 138829" Enter
+Type "rm -rf $(mangal where --downloads)/Chainsaw_Man" Enter
+Type "clear" Enter
+Show
+
+Type@200ms "mangal" Sleep 200 ms Enter
+Sleep 3s
+
+# move cursor up and down
+# select the first source
+Down@400ms 2 Up@400ms 2 Enter
+Sleep 2s
+
+# start searching for "chain"
+Type@200ms "chain"
+Sleep 1s
+
+# Accept auto-complete
+Tab Sleep 1s Enter Sleep 4s
+
+# Open filter
+Type "/" Sleep 1s
+
+# Filter out mangas that has "man" in their title
+# Choose the first one
+Type@200ms "man" Enter
+Sleep 1s
+
+# Open chapters
+Enter
+
+Sleep 5s
+
+# Open anilist mangas that corresponds to the manga
+Type "a"
+Sleep 4s
+# Move cursor up
+Type "k"
+Sleep 2s
+# Select it to bind with
+Enter
+Sleep 3s
+
+# Mark chapter for download
+Type " "
+Sleep 1s
+# Move cursor down and mark another chapter
+Type "j"
+Sleep 1s
+Type " "
+Sleep 1s
+
+# Download
+Sleep 2s
+Enter
+
+# Confirm download
+Sleep 3s
+Enter
+
+# Wait for download to finish
+Sleep 20s
diff --git a/cache/cache.go b/cache/cache.go
new file mode 100644
index 00000000..98efd4d6
--- /dev/null
+++ b/cache/cache.go
@@ -0,0 +1,45 @@
+package cache
+
+import (
+ "github.com/metafates/mangal/util"
+ "github.com/samber/mo"
+ "path/filepath"
+ "sync"
+ "time"
+)
+
+// internalData is a struct that contains the data that is stored in the cache file with time of last update.
+// Used to expire the cache.
+type internalData[T any] struct {
+ Internal mo.Option[T] `json:"internal"`
+ Time mo.Option[time.Time] `json:"time"`
+}
+
+// Cache is a generic thread-safe cache that can be used to cache any type of data.
+// It is used to cache data that is expensive to fetch, such as API responses.
+// Cached data is stored in a file, and is automatically expired after a certain amount of time
+// (if expiration time is spcified)
+type Cache[T any] struct {
+ data *internalData[T]
+ name string
+ path string
+ expireEvery mo.Option[time.Duration]
+ initialized bool
+ mutex *sync.RWMutex
+}
+
+// New creates a new cache with the specified name and path.
+// Name will be automatically converted to a valid file name.
+func New[T any](path string, options *Options) *Cache[T] {
+ name := util.FileStem(filepath.Base(path))
+ return &Cache[T]{
+ data: &internalData[T]{
+ Internal: mo.None[T](),
+ },
+ expireEvery: options.ExpireEvery,
+ name: name,
+ path: path,
+ initialized: false,
+ mutex: &sync.RWMutex{},
+ }
+}
diff --git a/cache/get.go b/cache/get.go
new file mode 100644
index 00000000..e69e17e5
--- /dev/null
+++ b/cache/get.go
@@ -0,0 +1,13 @@
+package cache
+
+import "github.com/samber/mo"
+
+// Get returns the cached data if it exists and is not expired, otherwise none.
+func (c *Cache[T]) Get() mo.Option[T] {
+ c.mutex.RLock()
+ defer c.mutex.RUnlock()
+
+ _ = c.init()
+
+ return c.data.Internal
+}
diff --git a/cache/init.go b/cache/init.go
new file mode 100644
index 00000000..b4951dbb
--- /dev/null
+++ b/cache/init.go
@@ -0,0 +1,67 @@
+package cache
+
+import (
+ "encoding/json"
+ "github.com/metafates/mangal/filesystem"
+ "github.com/metafates/mangal/log"
+ "github.com/metafates/mangal/util"
+ "github.com/samber/mo"
+ "io"
+ "os"
+ "time"
+)
+
+// init initializes the cache.
+func (c *Cache[T]) init() error {
+ if c.initialized {
+ return nil
+ }
+
+ c.initialized = true
+ log.Debugf("Initializing %s cacher", c.name)
+
+ log.Debugf("Opening cache file at %s", c.path)
+ file, err := filesystem.Api().OpenFile(c.path, os.O_RDONLY|os.O_CREATE, os.ModePerm)
+
+ if err != nil {
+ log.Warn(err)
+ return err
+ }
+
+ defer util.Ignore(file.Close)
+
+ contents, err := io.ReadAll(file)
+ if err != nil {
+ log.Warn(err)
+ return err
+ }
+
+ if len(contents) == 0 {
+ log.Debugf("%s cache file is empty, skipping unmarshal", c.name)
+ if c.expireEvery.IsPresent() {
+ c.data.Time = mo.Some(time.Now())
+ }
+ return nil
+ }
+
+ var unmarshalled internalData[T]
+ err = json.Unmarshal(contents, &unmarshalled)
+ if err != nil {
+ log.Warn(err)
+ return err
+ }
+
+ c.data = &unmarshalled
+
+ if c.expireEvery.IsPresent() &&
+ c.data.Time.IsPresent() &&
+ time.Since(c.data.Time.MustGet()) >= c.expireEvery.MustGet() {
+ log.Debugf("%s cache is expired, reseting cache", c.name)
+ c.data.Time = mo.Some(time.Now())
+ c.data.Internal = mo.None[T]()
+ return filesystem.Api().WriteFile(c.path, []byte{}, os.ModePerm)
+ }
+
+ log.Debugf("%s cache file unmarshalled successfully", c.name)
+ return nil
+}
diff --git a/cache/options.go b/cache/options.go
new file mode 100644
index 00000000..4365985b
--- /dev/null
+++ b/cache/options.go
@@ -0,0 +1,13 @@
+package cache
+
+import (
+ "github.com/samber/mo"
+ "time"
+)
+
+// Options is a struct that contains options for the cache.
+type Options struct {
+ // ExpireEvery is the duration after which the cache will be expired.
+ // If the value is not specified (mo.None), the cache will never expire.
+ ExpireEvery mo.Option[time.Duration]
+}
diff --git a/cache/set.go b/cache/set.go
new file mode 100644
index 00000000..ff5dd779
--- /dev/null
+++ b/cache/set.go
@@ -0,0 +1,33 @@
+package cache
+
+import (
+ "encoding/json"
+ "github.com/metafates/mangal/filesystem"
+ "github.com/metafates/mangal/log"
+ "github.com/samber/mo"
+ "os"
+)
+
+// Set sets the cache data.
+// May return error if writing to file failed
+func (c *Cache[T]) Set(data T) error {
+ c.mutex.Lock()
+ defer c.mutex.Unlock()
+
+ _ = c.init()
+
+ c.data.Internal = mo.Some(data)
+ marshalled, err := json.Marshal(c.data)
+ if err != nil {
+ log.Warn(err)
+ return err
+ }
+
+ log.Debugf("Writing %s cache file to %s", c.name, c.path)
+ err = filesystem.Api().WriteFile(c.path, marshalled, os.ModePerm)
+ if err != nil {
+ log.Warn(err)
+ }
+
+ return err
+}
diff --git a/cmd/clear.go b/cmd/clear.go
index f5567c37..ea6e3382 100644
--- a/cmd/clear.go
+++ b/cmd/clear.go
@@ -7,13 +7,35 @@ import (
"github.com/metafates/mangal/util"
"github.com/metafates/mangal/where"
"github.com/samber/lo"
+ "github.com/samber/mo"
"github.com/spf13/cobra"
)
+type clearTarget struct {
+ name string
+ argLong string
+ argShort mo.Option[string]
+ location func() string
+}
+
+var clearTargets = []clearTarget{
+ {"cache directory", "cache", mo.Some("c"), where.Cache},
+ {"history file", "history", mo.Some("s"), where.History},
+ {"anilist binds", "anilist", mo.Some("a"), where.AnilistBinds},
+ {"queries history", "queries", mo.Some("q"), where.Queries},
+}
+
func init() {
rootCmd.AddCommand(clearCmd)
- clearCmd.Flags().Bool("cache", false, "Clear cache files")
- clearCmd.Flags().Bool("history", false, "Clear history")
+
+ for _, target := range clearTargets {
+ help := fmt.Sprintf("clear %s", target.name)
+ if target.argShort.IsPresent() {
+ clearCmd.Flags().BoolP(target.argLong, target.argShort.MustGet(), false, help)
+ } else {
+ clearCmd.Flags().Bool(target.argLong, false, help)
+ }
+ }
}
var clearCmd = &cobra.Command{
@@ -26,16 +48,14 @@ var clearCmd = &cobra.Command{
return lo.Must(cmd.Flags().GetBool(what))
}
- for name, clear := range map[string]func(){
- "cache": clearCache,
- "history": clearHistory,
- } {
- if doClear(name) {
+ for _, target := range clearTargets {
+ if doClear(target.argLong) {
anyCleared = true
- e := util.PrintErasable(fmt.Sprintf("%s Clearing %s...", icon.Get(icon.Progress), util.Capitalize(name)))
- clear()
+ e := util.PrintErasable(fmt.Sprintf("%s Clearing %s...", icon.Get(icon.Progress), util.Capitalize(target.name)))
+ _ = util.Delete(target.location())
e()
- fmt.Printf("%s %s cleared\n", icon.Get(icon.Success), util.Capitalize(name))
+ fmt.Printf("%s %s cleared\n", icon.Get(icon.Success), util.Capitalize(target.name))
+ handleErr(filesystem.Api().RemoveAll(target.location()))
}
}
@@ -44,18 +64,3 @@ var clearCmd = &cobra.Command{
}
},
}
-
-func clearCache() {
- path := where.Cache()
- handleErr(filesystem.Api().RemoveAll(path))
-}
-
-func clearTemp() {
- path := where.Temp()
- handleErr(filesystem.Api().RemoveAll(path))
-}
-
-func clearHistory() {
- path := where.History()
- handleErr(filesystem.Api().Remove(path))
-}
diff --git a/cmd/config.go b/cmd/config.go
index 04206c5a..9d09becb 100644
--- a/cmd/config.go
+++ b/cmd/config.go
@@ -3,6 +3,7 @@ package cmd
import (
"errors"
"fmt"
+ "github.com/metafates/mangal/color"
"path/filepath"
"sort"
"strconv"
@@ -25,8 +26,8 @@ func errUnknownKey(key string) error {
})
msg := fmt.Sprintf(
"unknown key %s, did you mean %s?",
- style.Red(key),
- style.Yellow(closest),
+ style.Fg(color.Red)(key),
+ style.Fg(color.Yellow)(closest),
)
return errors.New(msg)
@@ -96,7 +97,7 @@ func init() {
lo.Must0(configSetCmd.MarkFlagRequired("key"))
_ = configSetCmd.RegisterFlagCompletionFunc("key", completionConfigKeys)
- configSetCmd.Flags().StringP("value", "v", "", "The value to set")
+ configSetCmd.Flags().StringSliceP("value", "v", []string{}, "The value to set")
lo.Must0(configSetCmd.MarkFlagRequired("value"))
// deprecated flags for backwards compatibility
@@ -110,7 +111,7 @@ var configSetCmd = &cobra.Command{
Run: func(cmd *cobra.Command, args []string) {
var (
key = lo.Must(cmd.Flags().GetString("key"))
- value = lo.Must(cmd.Flags().GetString("value"))
+ value = lo.Must(cmd.Flags().GetStringSlice("value"))
)
if _, ok := config.Default[key]; !ok {
@@ -122,19 +123,21 @@ var configSetCmd = &cobra.Command{
case string:
v = value
case int:
- parsedInt, err := strconv.ParseInt(value, 10, 64)
+ parsedInt, err := strconv.ParseInt(value[0], 10, 64)
if err != nil {
handleErr(fmt.Errorf("invalid integer value: %s", value))
}
v = int(parsedInt)
case bool:
- parsedBool, err := strconv.ParseBool(value)
+ parsedBool, err := strconv.ParseBool(value[0])
if err != nil {
handleErr(fmt.Errorf("invalid boolean value: %s", value))
}
v = parsedBool
+ case []string:
+ v = value
}
viper.Set(key, v)
@@ -147,9 +150,9 @@ var configSetCmd = &cobra.Command{
fmt.Printf(
"%s set %s to %s\n",
- style.Green(icon.Get(icon.Success)),
- style.Magenta(key),
- style.Yellow(fmt.Sprintf("%v", v)),
+ style.Fg(color.Green)(icon.Get(icon.Success)),
+ style.Fg(color.Purple)(key),
+ style.Fg(color.Yellow)(fmt.Sprintf("%v", v)),
)
},
}
@@ -205,7 +208,7 @@ var configWriteCmd = &cobra.Command{
handleErr(viper.SafeWriteConfig())
fmt.Printf(
"%s wrote config to %s\n",
- style.Green(icon.Get(icon.Success)),
+ style.Fg(color.Green)(icon.Get(icon.Success)),
configFilePath,
)
},
@@ -232,7 +235,43 @@ var configDeleteCmd = &cobra.Command{
handleErr(err)
fmt.Printf(
"%s deleted config\n",
- style.Green(icon.Get(icon.Success)),
+ style.Fg(color.Green)(icon.Get(icon.Success)),
+ )
+ },
+}
+
+func init() {
+ configCmd.AddCommand(configResetCmd)
+
+ configResetCmd.Flags().StringP("key", "k", "", "The key to reset the value for")
+ _ = configResetCmd.RegisterFlagCompletionFunc("key", completionConfigKeys)
+}
+
+var configResetCmd = &cobra.Command{
+ Use: "reset",
+ Short: "Reset the config key to default",
+ Run: func(cmd *cobra.Command, args []string) {
+ var (
+ key = lo.Must(cmd.Flags().GetString("key"))
+ )
+
+ if _, ok := config.Default[key]; !ok {
+ handleErr(errUnknownKey(key))
+ }
+
+ viper.Set(key, config.Default[key].Value)
+ switch err := viper.WriteConfig(); err.(type) {
+ case viper.ConfigFileNotFoundError:
+ handleErr(viper.SafeWriteConfig())
+ default:
+ handleErr(err)
+ }
+
+ fmt.Printf(
+ "%s reset %s to default value %s\n",
+ style.Fg(color.Green)(icon.Get(icon.Success)),
+ style.Fg(color.Purple)(key),
+ style.Fg(color.Yellow)(fmt.Sprintf("%v", config.Default[key].Value)),
)
},
}
diff --git a/cmd/env.go b/cmd/env.go
index 6879ca4d..5795601f 100644
--- a/cmd/env.go
+++ b/cmd/env.go
@@ -1,6 +1,7 @@
package cmd
import (
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/config"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/style"
@@ -14,7 +15,10 @@ import (
func init() {
rootCmd.AddCommand(envCmd)
- envCmd.Flags().BoolP("filter", "f", false, "filter out variables that are not set")
+ envCmd.Flags().BoolP("set-only", "s", false, "only show variables that are set")
+ envCmd.Flags().BoolP("unset-only", "u", false, "only show variables that are unset")
+
+ envCmd.MarkFlagsMutuallyExclusive("set-only", "unset-only")
}
var envCmd = &cobra.Command{
@@ -22,26 +26,35 @@ var envCmd = &cobra.Command{
Short: "Show available environment variables",
Long: `Show available environment variables.`,
Run: func(cmd *cobra.Command, args []string) {
- filter := lo.Must(cmd.Flags().GetBool("filter"))
+ setOnly := lo.Must(cmd.Flags().GetBool("set-only"))
+ unsetOnly := lo.Must(cmd.Flags().GetBool("unset-only"))
config.EnvExposed = append(config.EnvExposed, where.EnvConfigPath)
slices.Sort(config.EnvExposed)
for _, env := range config.EnvExposed {
- env = strings.ToUpper(constant.Mangal + "_" + config.EnvKeyReplacer.Replace(env))
+ if env != where.EnvConfigPath {
+ env = strings.ToUpper(constant.Mangal + "_" + config.EnvKeyReplacer.Replace(env))
+ }
value := os.Getenv(env)
present := value != ""
- if !present && filter {
- continue
+ if setOnly || unsetOnly {
+ if !present && setOnly {
+ continue
+ }
+
+ if present && unsetOnly {
+ continue
+ }
}
- cmd.Print(style.Combined(style.Bold, style.Magenta)(env))
+ cmd.Print(style.New().Bold(true).Foreground(color.Purple).Render(env))
cmd.Print("=")
if present {
- cmd.Println(style.Green(value))
+ cmd.Println(style.Fg(color.Green)(value))
} else {
- cmd.Println(style.Red("unset"))
+ cmd.Println(style.Fg(color.Red)("unset"))
}
}
},
diff --git a/cmd/gen.go b/cmd/gen.go
deleted file mode 100644
index 48ada7b7..00000000
--- a/cmd/gen.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package cmd
-
-import (
- "github.com/samber/lo"
- "github.com/spf13/cobra"
-)
-
-func init() {
- rootCmd.AddCommand(genCmd)
-
- genCmd.Flags().StringP("name", "n", "", "name of the source")
- genCmd.Flags().StringP("url", "u", "", "url of the website")
-
- lo.Must0(genCmd.MarkFlagRequired("name"))
- lo.Must0(genCmd.MarkFlagRequired("url"))
-}
-
-var genCmd = &cobra.Command{
- Use: "gen",
- Short: "Generate a new lua source",
- Long: `Generate a new lua source.`,
- Deprecated: "use `mangal sources gen` instead.",
- Run: sourcesGenCmd.Run,
-}
diff --git a/cmd/inline.go b/cmd/inline.go
index 72c11a02..b7173369 100644
--- a/cmd/inline.go
+++ b/cmd/inline.go
@@ -4,18 +4,25 @@ import (
"encoding/json"
"errors"
"fmt"
+ "github.com/invopop/jsonschema"
"github.com/metafates/mangal/anilist"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/converter"
"github.com/metafates/mangal/filesystem"
"github.com/metafates/mangal/inline"
"github.com/metafates/mangal/provider"
- "github.com/metafates/mangal/util"
+ "github.com/metafates/mangal/query"
+ "github.com/metafates/mangal/source"
+ "github.com/metafates/mangal/update"
"github.com/samber/lo"
+ "github.com/samber/mo"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"io"
"os"
+ "path/filepath"
+ "reflect"
+ "strings"
)
func init() {
@@ -28,12 +35,18 @@ func init() {
inlineCmd.Flags().BoolP("json", "j", false, "JSON output")
inlineCmd.Flags().BoolP("populate-pages", "p", false, "Populate chapters pages")
inlineCmd.Flags().BoolP("fetch-metadata", "f", false, "Populate manga metadata")
+ inlineCmd.Flags().BoolP("include-anilist-manga", "a", false, "Include anilist manga in the output")
lo.Must0(viper.BindPFlag(constant.MetadataFetchAnilist, inlineCmd.Flags().Lookup("fetch-metadata")))
inlineCmd.Flags().StringP("output", "o", "", "output file")
lo.Must0(inlineCmd.MarkFlagRequired("query"))
inlineCmd.MarkFlagsMutuallyExclusive("download", "json")
+ inlineCmd.MarkFlagsMutuallyExclusive("include-anilist-manga", "download")
+
+ inlineCmd.RegisterFlagCompletionFunc("query", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ return query.SuggestMany(toComplete), cobra.ShellCompDirectiveNoFileComp
+ })
}
var inlineCmd = &cobra.Command{
@@ -73,17 +86,26 @@ When using the json flag manga selector could be omitted. That way, it will sele
}
},
Run: func(cmd *cobra.Command, args []string) {
- sourceName := viper.GetString(constant.DownloaderDefaultSource)
- if sourceName == "" {
- handleErr(errors.New("source not set"))
- }
- p, ok := provider.Get(sourceName)
- if !ok {
- handleErr(fmt.Errorf("source not found: %s", sourceName))
- }
+ var (
+ sources []source.Source
+ err error
+ )
+
+ for _, name := range viper.GetStringSlice(constant.DownloaderDefaultSources) {
+ if name == "" {
+ handleErr(errors.New("source not set"))
+ }
+
+ p, ok := provider.Get(name)
+ if !ok {
+ handleErr(fmt.Errorf("source not found: %s", name))
+ }
+
+ src, err := p.CreateSource()
+ handleErr(err)
- src, err := p.CreateSource()
- handleErr(err)
+ sources = append(sources, src)
+ }
output := lo.Must(cmd.Flags().GetString("output"))
var writer io.Writer
@@ -95,30 +117,31 @@ When using the json flag manga selector could be omitted. That way, it will sele
}
mangaFlag := lo.Must(cmd.Flags().GetString("manga"))
- mangaPicker := util.None[inline.MangaPicker]()
+ mangaPicker := mo.None[inline.MangaPicker]()
if mangaFlag != "" {
fn, err := inline.ParseMangaPicker(lo.Must(cmd.Flags().GetString("manga")))
handleErr(err)
- mangaPicker = util.Some(fn)
+ mangaPicker = mo.Some(fn)
}
chapterFlag := lo.Must(cmd.Flags().GetString("chapters"))
- chapterFilter := util.None[inline.ChaptersFilter]()
+ chapterFilter := mo.None[inline.ChaptersFilter]()
if chapterFlag != "" {
fn, err := inline.ParseChaptersFilter(chapterFlag)
handleErr(err)
- chapterFilter = util.Some(fn)
+ chapterFilter = mo.Some(fn)
}
options := &inline.Options{
- Source: src,
- Download: lo.Must(cmd.Flags().GetBool("download")),
- Json: lo.Must(cmd.Flags().GetBool("json")),
- Query: lo.Must(cmd.Flags().GetString("query")),
- PopulatePages: lo.Must(cmd.Flags().GetBool("populate-pages")),
- MangaPicker: mangaPicker,
- ChaptersFilter: chapterFilter,
- Out: writer,
+ Sources: sources,
+ Download: lo.Must(cmd.Flags().GetBool("download")),
+ Json: lo.Must(cmd.Flags().GetBool("json")),
+ Query: lo.Must(cmd.Flags().GetString("query")),
+ PopulatePages: lo.Must(cmd.Flags().GetBool("populate-pages")),
+ IncludeAnilistManga: lo.Must(cmd.Flags().GetBool("include-anilist-manga")),
+ MangaPicker: mangaPicker,
+ ChaptersFilter: chapterFilter,
+ Out: writer,
}
handleErr(inline.Run(options))
@@ -182,16 +205,20 @@ var inlineAnilistGetCmd = &cobra.Command{
Use: "get",
Short: "Get anilist manga that is bind to manga name",
Run: func(cmd *cobra.Command, args []string) {
+ var (
+ m *anilist.Manga
+ err error
+ )
+
name := lo.Must(cmd.Flags().GetString("name"))
- anilistManga, ok := anilist.GetRelation(name)
+ m, err = anilist.FindClosest(name)
- if !ok {
- var err error
- anilistManga, err = anilist.FindClosest(name)
+ if err != nil {
+ m, err = anilist.FindClosest(name)
handleErr(err)
}
- handleErr(json.NewEncoder(os.Stdout).Encode(anilistManga))
+ handleErr(json.NewEncoder(os.Stdout).Encode(m))
},
}
@@ -219,3 +246,54 @@ var inlineAnilistBindCmd = &cobra.Command{
handleErr(anilist.SetRelation(mangaName, anilistManga))
},
}
+
+func init() {
+ inlineAnilistCmd.AddCommand(inlineAnilistUpdateCmd)
+
+ inlineAnilistUpdateCmd.Flags().StringP("path", "p", "", "path to the manga")
+ lo.Must0(inlineAnilistUpdateCmd.MarkFlagRequired("path"))
+}
+
+var inlineAnilistUpdateCmd = &cobra.Command{
+ Use: "update",
+ Short: "Update old manga metadata according to the current anilist bind",
+ Run: func(cmd *cobra.Command, args []string) {
+ path := lo.Must(cmd.Flags().GetString("path"))
+ handleErr(update.Metadata(path))
+ },
+}
+
+func init() {
+ inlineCmd.AddCommand(inlineSchemaCmd)
+
+ inlineSchemaCmd.Flags().BoolP("anilist", "a", false, "generate anilist search output schema")
+}
+
+var inlineSchemaCmd = &cobra.Command{
+ Use: "schema",
+ Short: "Schemas for the inline json outputs",
+ Run: func(cmd *cobra.Command, args []string) {
+ reflector := new(jsonschema.Reflector)
+ reflector.Anonymous = true
+ reflector.Namer = func(t reflect.Type) string {
+ name := t.Name()
+ switch strings.ToLower(name) {
+ case "manga", "chapter", "page", "date", "output":
+ return filepath.Base(t.PkgPath()) + "." + name
+ }
+
+ return name
+ }
+
+ var schema *jsonschema.Schema
+
+ switch {
+ case lo.Must(cmd.Flags().GetBool("anilist")):
+ schema = reflector.Reflect([]*anilist.Manga{})
+ default:
+ schema = reflector.Reflect(&inline.Output{})
+ }
+
+ handleErr(json.NewEncoder(os.Stdout).Encode(schema))
+ },
+}
diff --git a/cmd/install.go b/cmd/install.go
deleted file mode 100644
index 4314e566..00000000
--- a/cmd/install.go
+++ /dev/null
@@ -1,18 +0,0 @@
-package cmd
-
-import (
- "github.com/spf13/cobra"
-)
-
-func init() {
- rootCmd.AddCommand(installCmd)
-}
-
-var installCmd = &cobra.Command{
- Use: "install",
- Short: "Browse and install custom scrapers",
- Deprecated: "use `mangal sources install` instead.",
- Long: `Browse and install custom scrapers from official GitHub repo.
-https://github.com/metafates/mangal-scrapers`,
- Run: sourcesInstallCmd.Run,
-}
diff --git a/cmd/integration.go b/cmd/integration.go
index 3067282b..4a1b8201 100644
--- a/cmd/integration.go
+++ b/cmd/integration.go
@@ -4,8 +4,10 @@ import (
"fmt"
"github.com/AlecAivazis/survey/v2"
"github.com/metafates/mangal/constant"
- "github.com/metafates/mangal/integration/anilistintegration"
+ "github.com/metafates/mangal/icon"
+ "github.com/metafates/mangal/integration/anilist"
"github.com/metafates/mangal/log"
+ "github.com/metafates/mangal/open"
"github.com/samber/lo"
"github.com/spf13/cobra"
"github.com/spf13/viper"
@@ -102,13 +104,30 @@ See https://github.com/metafates/mangal/wiki/Anilist-Integration for more inform
}
if viper.GetString(constant.AnilistCode) == "" {
- fmt.Println(anilistintegration.New().AuthURL())
+ authURL := anilist.New().AuthURL()
+ confirmOpenInBrowser := survey.Confirm{
+ Message: "Open browser to authenticate with Anilist?",
+ Default: false,
+ }
+
+ var openInBrowser bool
+ err := survey.AskOne(&confirmOpenInBrowser, &openInBrowser)
+ if err == nil && openInBrowser {
+ err = open.Start(authURL)
+ }
+
+ if err != nil || !openInBrowser {
+ fmt.Println("Please open the following URL in your browser:")
+ fmt.Println(authURL)
+ }
+
input := survey.Input{
- Message: "Anilsit code is not set. Please copy it from the link above and paste in here:",
+ Message: "Anilsit code is not set. Please copy it from the link and paste in here:",
Help: "",
}
+
var response string
- err := survey.AskOne(&input, &response)
+ err = survey.AskOne(&input, &response)
handleErr(err)
if response == "" {
@@ -119,5 +138,7 @@ See https://github.com/metafates/mangal/wiki/Anilist-Integration for more inform
err = viper.WriteConfig()
handleErr(err)
}
+
+ fmt.Printf("%s Anilist integration was set up\n", icon.Get(icon.Success))
},
}
diff --git a/cmd/root.go b/cmd/root.go
index a8cebd78..f3adfb03 100644
--- a/cmd/root.go
+++ b/cmd/root.go
@@ -3,6 +3,7 @@ package cmd
import (
"fmt"
cc "github.com/ivanpirog/coloredcobra"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/converter"
"github.com/metafates/mangal/icon"
@@ -10,6 +11,9 @@ import (
"github.com/metafates/mangal/provider"
"github.com/metafates/mangal/style"
"github.com/metafates/mangal/tui"
+ "github.com/metafates/mangal/util"
+ "github.com/metafates/mangal/version"
+ "github.com/metafates/mangal/where"
"github.com/samber/lo"
"github.com/spf13/cobra"
"github.com/spf13/viper"
@@ -18,6 +22,8 @@ import (
)
func init() {
+ rootCmd.Flags().BoolP("version", "v", false, "Print version")
+
rootCmd.PersistentFlags().StringP("format", "F", "", "output format")
lo.Must0(rootCmd.RegisterFlagCompletionFunc("format", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return converter.Available(), cobra.ShellCompDirectiveDefault
@@ -33,7 +39,7 @@ func init() {
rootCmd.PersistentFlags().BoolP("write-history", "H", true, "write history of the read chapters")
lo.Must0(viper.BindPFlag(constant.HistorySaveOnRead, rootCmd.PersistentFlags().Lookup("write-history")))
- rootCmd.PersistentFlags().StringP("source", "S", "", "default source to use")
+ rootCmd.PersistentFlags().StringArrayP("source", "S", []string{}, "default source to use")
lo.Must0(rootCmd.RegisterFlagCompletionFunc("source", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
var sources []string
@@ -47,27 +53,39 @@ func init() {
return sources, cobra.ShellCompDirectiveDefault
}))
- lo.Must0(viper.BindPFlag(constant.DownloaderDefaultSource, rootCmd.PersistentFlags().Lookup("source")))
+ lo.Must0(viper.BindPFlag(constant.DownloaderDefaultSources, rootCmd.PersistentFlags().Lookup("source")))
rootCmd.Flags().BoolP("continue", "c", false, "continue reading")
+ helpFunc := rootCmd.HelpFunc()
+ rootCmd.SetHelpFunc(func(cmd *cobra.Command, args []string) {
+ helpFunc(cmd, args)
+ version.Notify()
+ })
+
// Clear temporary files on startup
- go clearTemp()
+ go func() {
+ _ = util.Delete(where.Temp())
+ }()
}
// rootCmd represents the base command when called without any subcommands
var rootCmd = &cobra.Command{
- Use: constant.Mangal,
- Version: constant.Version,
- Short: "The ultimate manga downloader",
+ Use: constant.Mangal,
+ Short: "The ultimate manga downloader",
Long: constant.AsciiArtLogo + "\n" +
- style.Combined(style.HiRed, style.Italic)(" - The ultimate cli manga downloader"),
+ style.New().Italic(true).Foreground(color.HiRed).Render(" - The ultimate cli manga downloader"),
PreRun: func(cmd *cobra.Command, args []string) {
if _, err := converter.Get(viper.GetString(constant.FormatsUse)); err != nil {
handleErr(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
+ if cmd.Flags().Changed("version") {
+ versionCmd.Run(versionCmd, args)
+ return
+ }
+
options := tui.Options{
Continue: lo.Must(cmd.Flags().GetBool("continue")),
}
diff --git a/cmd/sources.go b/cmd/sources.go
index 9b628d62..362f8181 100644
--- a/cmd/sources.go
+++ b/cmd/sources.go
@@ -2,6 +2,7 @@ package cmd
import (
"fmt"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/tui"
"github.com/metafates/mangal/util"
@@ -23,21 +24,30 @@ import (
func init() {
rootCmd.AddCommand(sourcesCmd)
- sourcesCmd.Flags().BoolP("raw", "r", false, "do not print headers")
- sourcesCmd.Flags().BoolP("custom", "c", false, "show only custom sources")
- sourcesCmd.Flags().BoolP("builtin", "b", false, "show only builtin sources")
-
- sourcesCmd.MarkFlagsMutuallyExclusive("custom", "builtin")
- sourcesCmd.SetOut(os.Stdout)
}
var sourcesCmd = &cobra.Command{
- Use: "sources",
- Short: "List an available sources",
- Example: "mangal sources",
+ Use: "sources",
+ Short: "Manage sources",
+}
+
+func init() {
+ sourcesCmd.AddCommand(sourcesListCmd)
+
+ sourcesListCmd.Flags().BoolP("raw", "r", false, "do not print headers")
+ sourcesListCmd.Flags().BoolP("custom", "c", false, "show only custom sources")
+ sourcesListCmd.Flags().BoolP("builtin", "b", false, "show only builtin sources")
+
+ sourcesListCmd.MarkFlagsMutuallyExclusive("custom", "builtin")
+ sourcesListCmd.SetOut(os.Stdout)
+}
+
+var sourcesListCmd = &cobra.Command{
+ Use: "list",
+ Short: "List an available sources",
Run: func(cmd *cobra.Command, args []string) {
printHeader := !lo.Must(cmd.Flags().GetBool("raw"))
- headerStyle := style.Combined(style.Bold, style.HiBlue)
+ headerStyle := style.New().Foreground(color.HiBlue).Bold(true).Render
h := func(s string) {
if printHeader {
cmd.Println(headerStyle(s))
@@ -65,7 +75,9 @@ var sourcesCmd = &cobra.Command{
printCustom()
default:
printBuiltin()
- cmd.Println()
+ if printHeader {
+ cmd.Println()
+ }
printCustom()
}
},
@@ -73,22 +85,33 @@ var sourcesCmd = &cobra.Command{
func init() {
sourcesCmd.AddCommand(sourcesRemoveCmd)
+
+ sourcesRemoveCmd.Flags().StringArrayP("name", "n", []string{}, "name of the source to remove")
+ lo.Must0(sourcesRemoveCmd.RegisterFlagCompletionFunc("name", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ sources, err := filesystem.Api().ReadDir(where.Sources())
+ if err != nil {
+ return nil, cobra.ShellCompDirectiveError
+ }
+
+ return lo.FilterMap(sources, func(item os.FileInfo, _ int) (string, bool) {
+ name := item.Name()
+ if !strings.HasSuffix(name, provider.CustomProviderExtension) {
+ return "", false
+ }
+
+ return util.FileStem(filepath.Base(name)), true
+ }), cobra.ShellCompDirectiveNoFileComp
+ }))
}
var sourcesRemoveCmd = &cobra.Command{
- Use: "remove",
- Short: "Remove a custom source",
- Example: "mangal sources remove ",
+ Use: "remove",
+ Short: "Remove a custom source",
Run: func(cmd *cobra.Command, args []string) {
- if len(args) == 0 {
- handleErr(cmd.Help())
- return
- }
-
- for _, name := range args {
+ for _, name := range lo.Must(cmd.Flags().GetStringArray("name")) {
path := filepath.Join(where.Sources(), name+provider.CustomProviderExtension)
handleErr(filesystem.Api().Remove(path))
- fmt.Printf("%s successfully removed %s\n", icon.Get(icon.Success), style.Yellow(name))
+ fmt.Printf("%s successfully removed %s\n", icon.Get(icon.Success), style.Fg(color.Yellow)(name))
}
},
}
diff --git a/cmd/version.go b/cmd/version.go
index 7f5241cc..b1fc0910 100644
--- a/cmd/version.go
+++ b/cmd/version.go
@@ -1,11 +1,10 @@
package cmd
import (
- "fmt"
- "github.com/metafates/mangal/icon"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/style"
- "github.com/metafates/mangal/updater"
- "github.com/metafates/mangal/util"
+ "github.com/metafates/mangal/version"
+ "github.com/samber/lo"
"os"
"runtime"
"strings"
@@ -18,6 +17,7 @@ import (
func init() {
rootCmd.AddCommand(versionCmd)
versionCmd.SetOut(os.Stdout)
+ versionCmd.Flags().BoolP("short", "s", false, "print short version")
}
var versionCmd = &cobra.Command{
@@ -25,44 +25,36 @@ var versionCmd = &cobra.Command{
Short: "Print the version number of mangal",
Long: `All software has versions. This is mangal's`,
Run: func(cmd *cobra.Command, args []string) {
- erase := util.PrintErasable(fmt.Sprintf("%s Checking if new version is available...", icon.Get(icon.Progress)))
- var newVersion string
-
- version, err := updater.LatestVersion()
- if err == nil {
- comp, err := util.CompareVersions(constant.Version, version)
- if err == nil && comp == -1 {
- newVersion = version
- }
+ if lo.Must(cmd.Flags().GetBool("short")) {
+ cmd.Println(constant.Version)
+ return
}
- erase()
+ defer version.Notify()
versionInfo := struct {
- Version string
- OS string
- Arch string
- BuiltAt string
- BuiltBy string
- Revision string
- App string
- NewVersion string
+ Version string
+ OS string
+ Arch string
+ BuiltAt string
+ BuiltBy string
+ Revision string
+ App string
}{
- Version: constant.Version,
- App: constant.Mangal,
- OS: runtime.GOOS,
- Arch: runtime.GOARCH,
- BuiltAt: strings.TrimSpace(constant.BuiltAt),
- BuiltBy: constant.BuiltBy,
- Revision: constant.Revision,
- NewVersion: newVersion,
+ Version: constant.Version,
+ App: constant.Mangal,
+ OS: runtime.GOOS,
+ Arch: runtime.GOARCH,
+ BuiltAt: strings.TrimSpace(constant.BuiltAt),
+ BuiltBy: constant.BuiltBy,
+ Revision: constant.Revision,
}
t, err := template.New("version").Funcs(map[string]any{
"faint": style.Faint,
"bold": style.Bold,
- "magenta": style.Magenta,
- "green": style.Green,
+ "magenta": style.Fg(color.Purple),
+ "green": style.Fg(color.Green),
"repeat": strings.Repeat,
"concat": func(a, b string) string {
return a + b
@@ -74,11 +66,7 @@ var versionCmd = &cobra.Command{
{{ faint "Build Date" }} {{ bold .BuiltAt }}
{{ faint "Built By" }} {{ bold .BuiltBy }}
{{ faint "Platform" }} {{ bold .OS }}/{{ bold .Arch }}
-
-{{ if not (eq .NewVersion "") }}
-{{ green "▇▇▇" }} New version available {{ bold .NewVersion }}
-{{ faint (concat "https://github.com/metafates/mangal/releases/tag/v" .NewVersion) }}
-{{ end }}`)
+`)
handleErr(err)
handleErr(t.Execute(cmd.OutOrStdout(), versionInfo))
},
diff --git a/cmd/where.go b/cmd/where.go
index b6a306b7..1cf618ab 100644
--- a/cmd/where.go
+++ b/cmd/where.go
@@ -1,32 +1,53 @@
package cmd
import (
+ "github.com/metafates/mangal/color"
+ "github.com/samber/mo"
"os"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/style"
- "github.com/metafates/mangal/util"
"github.com/metafates/mangal/where"
"github.com/samber/lo"
"github.com/spf13/cobra"
)
-var wherePaths = []lo.Tuple2[string, func() string]{
- {"downloads", where.Downloads},
- {"config", where.Config},
- {"sources", where.Sources},
- {"logs", where.Logs},
+type whereTarget struct {
+ name string
+ where func() string
+ argLong string
+ argShort mo.Option[string]
+ hidden bool
+}
+
+var wherePaths = []*whereTarget{
+ {"Downloads", where.Downloads, "downloads", mo.Some("d"), false},
+ {"Config", where.Config, "config", mo.Some("c"), false},
+ {"Sources", where.Sources, "sources", mo.Some("s"), false},
+ {"Logs", where.Logs, "logs", mo.Some("l"), false},
+ {"Cache", where.Cache, "cache", mo.None[string](), true},
+ {"Temp", where.Temp, "temp", mo.None[string](), true},
+ {"History", where.History, "history", mo.None[string](), true},
}
func init() {
rootCmd.AddCommand(whereCmd)
for _, n := range wherePaths {
- whereCmd.Flags().BoolP(n.A, string(n.A[0]), false, n.A+" path")
+ if n.argShort.IsPresent() {
+ whereCmd.Flags().BoolP(n.argLong, n.argShort.MustGet(), false, n.name+" path")
+ } else {
+ whereCmd.Flags().Bool(n.argLong, false, n.name+" path")
+ }
+
+ if n.hidden {
+ lo.Must0(whereCmd.Flags().MarkHidden(n.argLong))
+ }
+
}
- whereCmd.MarkFlagsMutuallyExclusive(lo.Map(wherePaths, func(t lo.Tuple2[string, func() string], _ int) string {
- return t.A
+ whereCmd.MarkFlagsMutuallyExclusive(lo.Map(wherePaths, func(t *whereTarget, _ int) string {
+ return t.argLong
})...)
whereCmd.SetOut(os.Stdout)
@@ -36,18 +57,26 @@ var whereCmd = &cobra.Command{
Use: "where",
Short: "Show the paths for a files related to the " + constant.Mangal,
Run: func(cmd *cobra.Command, args []string) {
- headerStyle := style.Combined(style.Bold, style.HiMagenta)
+ headerStyle := style.New().Bold(true).Foreground(color.HiPurple).Render
for _, n := range wherePaths {
- if lo.Must(cmd.Flags().GetBool(n.A)) {
- cmd.Println(n.B())
+ if lo.Must(cmd.Flags().GetBool(n.argLong)) {
+ cmd.Println(n.where())
return
}
}
+ wherePaths = lo.Filter(wherePaths, func(t *whereTarget, _ int) bool {
+ return !t.hidden
+ })
+
for i, n := range wherePaths {
- cmd.Printf("%s %s\n", headerStyle(util.Capitalize(n.A)+"?"), style.Yellow("--"+n.A))
- cmd.Println(n.B())
+ if n.hidden {
+ continue
+ }
+
+ cmd.Printf("%s %s\n", headerStyle(n.name+"?"), style.Fg(color.Yellow)("--"+n.argLong))
+ cmd.Println(n.where())
if i < len(wherePaths)-1 {
cmd.Println()
diff --git a/color/color.go b/color/color.go
new file mode 100644
index 00000000..ae9113a9
--- /dev/null
+++ b/color/color.go
@@ -0,0 +1,29 @@
+package color
+
+import "github.com/charmbracelet/lipgloss"
+
+var (
+ Red = New("1")
+ Green = New("2")
+ Yellow = New("3")
+ Blue = New("4")
+ Purple = New("5")
+ Cyan = New("6")
+ White = New("7")
+ Black = New("8")
+)
+
+var (
+ HiRed = New("9")
+ HiGreen = New("10")
+ HiYellow = New("11")
+ HiBlue = New("12")
+ HiPurple = New("13")
+ HiCyan = New("14")
+ HiWhite = New("15")
+ HiBlack = New("16")
+)
+
+func New(color string) lipgloss.Color {
+ return lipgloss.Color(color)
+}
diff --git a/color/extra.go b/color/extra.go
new file mode 100644
index 00000000..8d53cbea
--- /dev/null
+++ b/color/extra.go
@@ -0,0 +1,5 @@
+package color
+
+var (
+ Orange = New("#ffb703")
+)
diff --git a/config/default.go b/config/default.go
index af397c9f..69905e5d 100644
--- a/config/default.go
+++ b/config/default.go
@@ -3,19 +3,42 @@ package config
import (
"encoding/json"
"fmt"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/style"
"github.com/samber/lo"
"github.com/spf13/viper"
- "reflect"
)
+// Field represents a single config field
type Field struct {
- Key string
- Value any
+ // Key is the key of the field
+ Key string
+ // Value is the default value of the field
+ Value any
+ // Description is the description of the field
Description string
}
+// typeName returns the type of the field without reflection
+func (f *Field) typeName() string {
+ switch f.Value.(type) {
+ case string:
+ return "string"
+ case int:
+ return "int"
+ case bool:
+ return "bool"
+ case []string:
+ return "[]string"
+ case []int:
+ return "[]int"
+ default:
+ return "unknown"
+ }
+}
+
+// Json returns the config as json string
func (f *Field) Json() string {
field := struct {
Key string `json:"key"`
@@ -26,39 +49,41 @@ func (f *Field) Json() string {
Key: f.Key,
Value: f.Value,
Description: f.Description,
- Type: reflect.TypeOf(f.Value).String(),
+ Type: f.typeName(),
}
output := lo.Must(json.Marshal(field))
return string(output)
}
+// Pretty format field as string for further cli output
func (f *Field) Pretty() string {
return fmt.Sprintf(
`%s
%s: %s = %s
`,
style.Faint(f.Description),
- style.Magenta(f.Key),
- style.Yellow(reflect.TypeOf(f.Value).String()),
- style.Cyan(fmt.Sprintf("%v", viper.Get(f.Key))),
+ style.Fg(color.Purple)(f.Key),
+ style.Fg(color.Yellow)(f.typeName()),
+ style.Fg(color.Cyan)(fmt.Sprintf("%v", viper.Get(f.Key))),
)
}
-func init() {
- fields := []Field{
- {
- constant.DownloaderPath,
- ".",
- `Where to download manga
+// defaults contains all default values for the config.
+// It must contain all fields defined in the constant package.
+var defaults = [constant.DefinedFieldsCount]Field{
+ {
+ constant.DownloaderPath,
+ ".",
+ `Where to download manga
Absolute or relative.
You can also use tilde (~) to refer to your home directory or use env variables.
Examples: ~/... or $HOME/... or ${MANGA_PATH}-mangal`,
- },
- {
- constant.DownloaderChapterNameTemplate,
- "[{padded-index}] {chapter}",
- `Key template of the downloaded chapters
+ },
+ {
+ constant.DownloaderChapterNameTemplate,
+ "[{padded-index}] {chapter}",
+ `Key template of the downloaded chapters
Path forbidden symbols will be replaced with "_"
Available variables:
{index} - index of the chapters
@@ -68,245 +93,281 @@ Available variables:
{manga} - name of the manga
{volume} - volume of the chapter
{source} - name of the source`,
- },
- {
- constant.DownloaderAsync,
- true,
- `Use asynchronous downloader (faster)
+ },
+ {
+ constant.DownloaderAsync,
+ true,
+ `Use asynchronous downloader (faster)
Do no turn it off unless you have some issues`,
- },
- {
- constant.DownloaderCreateMangaDir,
- true,
- `Create a subdirectory for each manga`,
- },
- {
- constant.DownloaderCreateVolumeDir,
- false,
- `Create a subdirectory for each volume`,
- },
- {
- constant.DownloaderRedownloadExisting,
- false,
- `Redownload chapters that already exist`,
- },
- {
- constant.DownloaderDefaultSource,
- "",
- `Default source to use.
+ },
+ {
+ constant.DownloaderCreateMangaDir,
+ true,
+ `Create a subdirectory for each manga`,
+ },
+ {
+ constant.DownloaderCreateVolumeDir,
+ false,
+ `Create a subdirectory for each volume`,
+ },
+ {
+ constant.DownloaderReadDownloaded,
+ true,
+ "If chapter is already downloaded, read it instead of downloading it to temp",
+ },
+ {
+ constant.DownloaderRedownloadExisting,
+ false,
+ `Redownload chapters that already exist`,
+ },
+ {
+ constant.DownloaderDefaultSources,
+ []string{},
+ `Default sources to use.
Will prompt if not set.
-Type "mangal sources" to show available sources`,
- },
- {
- constant.DownloaderStopOnError,
- false,
- `Stop downloading other chapters on error`,
- },
- {
- constant.DownloaderDownloadCover,
- false,
- `Whether to download manga cover or not`,
- },
- {
- constant.FormatsUse,
- "pdf",
- `Default format to export chapters
+Type "mangal sources list" to show available sources`,
+ },
+ {
+ constant.DownloaderStopOnError,
+ false,
+ `Stop downloading other chapters on error`,
+ },
+ {
+ constant.DownloaderDownloadCover,
+ true,
+ `Whether to download manga cover or not`,
+ },
+ {
+ constant.FormatsUse,
+ "pdf",
+ `Default format to export chapters
Available options are: pdf, zip, cbz, plain`,
- },
- {
- constant.FormatsSkipUnsupportedImages,
- true,
- `Will skip images that can't be converted to the specified format
+ },
+ {
+ constant.FormatsSkipUnsupportedImages,
+ true,
+ `Will skip images that can't be converted to the specified format
Example: if you want to export to pdf, but some images are gifs, they will be skipped`,
- },
+ },
- {
- constant.MetadataFetchAnilist,
- true,
- `Fetch metadata from Anilist
+ {
+ constant.MetadataFetchAnilist,
+ true,
+ `Fetch metadata from Anilist
It will also cache the results to not spam the API`,
- },
+ },
- {
- constant.MetadataComicInfoXML,
- true,
- `Generate ComicInfo.xml file for each chapter`,
- },
- {
- constant.MetadataSeriesJSON,
- true,
- `Generate series.json file for each manga`,
- },
- {
- constant.MiniSearchLimit,
- 20,
- `Limit of search results to show`,
- },
- {
- constant.IconsVariant,
- "plain",
- `Icons variant.
+ {
+ constant.MetadataComicInfoXML,
+ true,
+ `Generate ComicInfo.xml file for each chapter`,
+ },
+ {
+ constant.MetadataComicInfoXMLAddDate,
+ true,
+ `Add series release date to each chapter in ComicInfo.xml file`,
+ },
+ {
+ constant.MetadataComicInfoXMLAlternativeDate,
+ false,
+ "Use download date instead of series release date in ComicInfo.xml file",
+ },
+ {
+ constant.MetadataComicInfoXMLTagRelevanceThreshold,
+ 60,
+ "Minimum relevance of a tag to be added to ComicInfo.xml file. From 0 to 100",
+ },
+ {
+ constant.MetadataSeriesJSON,
+ true,
+ `Generate series.json file for each manga`,
+ },
+ {
+ constant.MiniSearchLimit,
+ 20,
+ `Limit of search results to show`,
+ },
+ {
+ constant.IconsVariant,
+ "plain",
+ `Icons variant.
Available options are: emoji, kaomoji, plain, squares, nerd (nerd-font required)`,
- },
- {
- constant.ReaderPDF,
- "",
- "What app to use to open pdf files",
- },
- {
- constant.ReaderCBZ,
- "",
- "What app to use to open cbz files",
- },
- {
- constant.ReaderZIP,
- "",
- "What app to use to open zip files",
- },
- {
- constant.RaderPlain,
- "",
- "What app to use to open folders",
- },
- {
- constant.ReaderBrowser,
- "",
- "What browser to use to open webpages",
- },
- {
- constant.ReaderFolder,
- "",
- "What app to use to open folders",
- },
- {
- constant.ReaderReadInBrowser,
- false,
- "Open chapter url in browser instead of downloading it",
- },
- {
- constant.HistorySaveOnRead,
- true,
- "Save history on chapter read",
- },
-
- {
- constant.HistorySaveOnDownload,
- false,
- "Save history on chapter download",
- },
- {
- constant.MangadexLanguage,
- "en",
- `Preferred language for mangadex
+ },
+ {
+ constant.ReaderPDF,
+ "",
+ "What app to use to open pdf files",
+ },
+ {
+ constant.ReaderCBZ,
+ "",
+ "What app to use to open cbz files",
+ },
+ {
+ constant.ReaderZIP,
+ "",
+ "What app to use to open zip files",
+ },
+ {
+ constant.RaderPlain,
+ "",
+ "What app to use to open folders",
+ },
+ {
+ constant.ReaderBrowser,
+ "",
+ "What browser to use to open webpages",
+ },
+ {
+ constant.ReaderFolder,
+ "",
+ "What app to use to open folders",
+ },
+ {
+ constant.ReaderReadInBrowser,
+ false,
+ "Open chapter url in browser instead of downloading it",
+ },
+ {
+ constant.HistorySaveOnRead,
+ true,
+ "Save history on chapter read",
+ },
+ {
+ constant.HistorySaveOnDownload,
+ false,
+ "Save history on chapter download",
+ },
+ {
+ constant.SearchShowQuerySuggestions,
+ true,
+ "Show query suggestions in when searching",
+ },
+ {
+ constant.MangadexLanguage,
+ "en",
+ `Preferred language for mangadex
Use "any" to show all languages`,
- },
- {
- constant.MangadexNSFW,
- false,
- "Show NSFW content",
- },
- {
- constant.MangadexShowUnavailableChapters,
- false,
- "Show chapters that cannot be downloaded",
- },
- {
- constant.InstallerUser,
- "metafates",
- "Custom scrapers repository owner",
- },
- {
- constant.InstallerRepo,
- "mangal-scrapers",
- "Custom scrapers repository name",
- },
- {
- constant.InstallerBranch,
- "main",
- "Custom scrapers repository branch",
- },
- {
- constant.GenAuthor,
- "",
- "Key to use in generated scrapers as author",
- },
- {
- constant.LogsWrite,
- false,
- "Write logs",
- },
- {
- constant.LogsLevel,
- "info",
- `Available options are: (from less to most verbose)
+ },
+ {
+ constant.MangadexNSFW,
+ false,
+ "Show NSFW content",
+ },
+ {
+ constant.MangadexShowUnavailableChapters,
+ false,
+ "Show chapters that cannot be downloaded",
+ },
+ {
+ constant.InstallerUser,
+ "metafates",
+ "Custom scrapers repository owner",
+ },
+ {
+ constant.InstallerRepo,
+ "mangal-scrapers",
+ "Custom scrapers repository name",
+ },
+ {
+ constant.InstallerBranch,
+ "main",
+ "Custom scrapers repository branch",
+ },
+ {
+ constant.GenAuthor,
+ "",
+ "Key to use in generated scrapers as author",
+ },
+ {
+ constant.LogsWrite,
+ false,
+ "Write logs",
+ },
+ {
+ constant.LogsLevel,
+ "info",
+ `Available options are: (from less to most verbose)
panic, fatal, error, warn, info, debug, trace`,
- },
- {
- constant.LogsJson,
- false,
- "Use json format for logs",
- },
- {
- constant.AnilistEnable,
- false,
- "Enable Anilist integration",
- },
- {
- constant.AnilistCode,
- "",
- "Anilist code to use for authentication",
- },
- {
- constant.AnilistID,
- "",
- "Anilist ID to use for authentication",
- },
- {
- constant.AnilistSecret,
- "",
- "Anilist secret to use for authentication",
- },
- {
- constant.AnilistLinkOnMangaSelect,
- true,
- "Show link to Anilist on manga select",
- },
- {
- constant.TUIItemSpacing,
- 1,
- "Spacing between items in the TUI",
- },
- {
- constant.TUIReadOnEnter,
- true,
- "Read chapter on enter if other chapters aren't selected",
- },
- {
- constant.TUISearchPromptString,
- "> ",
- "Search prompt string to use",
- },
- {
- constant.TUIShowURLs,
- true,
- "Show URLs under list items",
- },
- {
- constant.TUIReverseChapters,
- false,
- "Reverse chapters order",
- },
- {
- constant.TUIShowDownloadedPath,
- true,
- "Show path where chapters were downloaded",
- },
- }
+ },
+ {
+ constant.LogsJson,
+ false,
+ "Use json format for logs",
+ },
+ {
+ constant.AnilistEnable,
+ false,
+ "Enable Anilist integration",
+ },
+ {
+ constant.AnilistCode,
+ "",
+ "Anilist code to use for authentication",
+ },
+ {
+ constant.AnilistID,
+ "",
+ "Anilist ID to use for authentication",
+ },
+ {
+ constant.AnilistSecret,
+ "",
+ "Anilist secret to use for authentication",
+ },
+ {
+ constant.AnilistLinkOnMangaSelect,
+ true,
+ "Show link to Anilist on manga select",
+ },
+ {
+ constant.TUIItemSpacing,
+ 1,
+ "Spacing between items in the TUI",
+ },
+ {
+ constant.TUIReadOnEnter,
+ true,
+ "Read chapter on enter if other chapters aren't selected",
+ },
+ {
+ constant.TUISearchPromptString,
+ "> ",
+ "Search prompt string to use",
+ },
+ {
+ constant.TUIShowURLs,
+ true,
+ "Show URLs under list items",
+ },
+ {
+ constant.TUIReverseChapters,
+ false,
+ "Reverse chapters order",
+ },
+ {
+ constant.TUIShowDownloadedPath,
+ true,
+ "Show path where chapters were downloaded",
+ },
+}
+
+func init() {
+ var count int
+
+ for _, field := range defaults {
+ if _, ok := Default[field.Key]; ok {
+ panic("Duplicate key in defaults: " + field.Key)
+ }
- for _, field := range fields {
Default[field.Key] = field
EnvExposed = append(EnvExposed, field.Key)
+ count++
+ }
+
+ if count != constant.DefinedFieldsCount {
+ panic(fmt.Sprintf("Expected %d default values, got %d", constant.DefinedFieldsCount, count))
}
}
-var Default = make(map[string]Field)
+var Default = make(map[string]Field, constant.DefinedFieldsCount)
diff --git a/constant/ascii.go b/constant/ascii.go
index 4286b789..73f80f64 100644
--- a/constant/ascii.go
+++ b/constant/ascii.go
@@ -1,12 +1,6 @@
package constant
-import "github.com/metafates/mangal/style"
+import _ "embed"
-var AsciiArtLogo = style.Combined(style.Yellow, style.Bold)(`
- _ _____
- /\/\ __ _ _ __ __ _ __ _| | |___ /
- / \ / _' | '_ \ / _' |/ _' | | |_ \
-/ /\/\ \ (_| | | | | (_| | (_| | | ___) |
-\/ \/\__,_|_| |_|\__, |\__,_|_| |____/
- |___/
-`)
+//go:embed ascii.txt
+var AsciiArtLogo string
diff --git a/constant/ascii.txt b/constant/ascii.txt
new file mode 100644
index 00000000..b84b5afe
--- /dev/null
+++ b/constant/ascii.txt
@@ -0,0 +1,5 @@
+• ▌ ▄ ·. ▄▄▄· ▐ ▄ ▄▄ • ▄▄▄· ▄▄▌
+·██ ▐███▪▐█ ▀█ •█▌▐█▐█ ▀ ▪▐█ ▀█ ██•
+▐█ ▌▐▌▐█·▄█▀▀█ ▐█▐▐▌▄█ ▀█▄▄█▀▀█ ██▪
+██ ██▌▐█▌▐█ ▪▐▌██▐█▌▐█▄▪▐█▐█ ▪▐▌▐█▌▐▌
+▀▀ █▪▀▀▀ ▀ ▀ ▀▀ █▪·▀▀▀▀ ▀ ▀ .▀▀▀
diff --git a/constant/config.go b/constant/config.go
index 3a5d9503..6add2886 100644
--- a/constant/config.go
+++ b/constant/config.go
@@ -1,15 +1,21 @@
package constant
+// DefinedFieldsCount is the number of fields defined in this package.
+// You have to manually update this number when you add a new field
+// to check later if every field has a defined default value
+const DefinedFieldsCount = 51
+
const (
DownloaderPath = "downloader.path"
DownloaderChapterNameTemplate = "downloader.chapter_name_template"
DownloaderAsync = "downloader.async"
DownloaderCreateMangaDir = "downloader.create_manga_dir"
DownloaderCreateVolumeDir = "downloader.create_volume_dir"
- DownloaderDefaultSource = "downloader.default_source"
+ DownloaderDefaultSources = "downloader.default_sources"
DownloaderStopOnError = "downloader.stop_on_error"
DownloaderDownloadCover = "downloader.download_cover"
DownloaderRedownloadExisting = "downloader.redownload_existing"
+ DownloaderReadDownloaded = "downloader.read_downloaded"
)
const (
@@ -18,9 +24,12 @@ const (
)
const (
- MetadataFetchAnilist = "metadata.fetch_anilist"
- MetadataComicInfoXML = "metadata.comic_info_xml"
- MetadataSeriesJSON = "metadata.series_json"
+ MetadataFetchAnilist = "metadata.fetch_anilist"
+ MetadataComicInfoXML = "metadata.comic_info_xml"
+ MetadataComicInfoXMLAddDate = "metadata.comic_info_xml_add_date"
+ MetadataComicInfoXMLAlternativeDate = "metadata.comic_info_xml_alternative_date"
+ MetadataComicInfoXMLTagRelevanceThreshold = "metadata.comic_info_xml_tag_relevance_threshold"
+ MetadataSeriesJSON = "metadata.series_json"
)
const (
@@ -38,6 +47,10 @@ const (
HistorySaveOnDownload = "history.save_on_download"
)
+const (
+ SearchShowQuerySuggestions = "search.show_query_suggestions"
+)
+
const (
MiniSearchLimit = "mini.search_limit"
)
diff --git a/constant/duration.go b/constant/duration.go
new file mode 100644
index 00000000..bb7d40f2
--- /dev/null
+++ b/constant/duration.go
@@ -0,0 +1,5 @@
+package constant
+
+import "time"
+
+const Forever = time.Duration(1<<63 - 1)
diff --git a/constant/meta.go b/constant/meta.go
index 97ee8c28..267f36b9 100644
--- a/constant/meta.go
+++ b/constant/meta.go
@@ -2,6 +2,6 @@ package constant
const (
Mangal = "mangal"
- Version = "3.14.2"
+ Version = "4.0.0"
UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36"
)
diff --git a/constant/prefix.go b/constant/prefix.go
deleted file mode 100644
index 33f79fa7..00000000
--- a/constant/prefix.go
+++ /dev/null
@@ -1,6 +0,0 @@
-package constant
-
-const (
- TempPrefix = Mangal + "Temp_"
- CachePrefix = Mangal + "Cache_"
-)
diff --git a/converter/cbz/cbz.go b/converter/cbz/cbz.go
index 46601234..dbb6390d 100644
--- a/converter/cbz/cbz.go
+++ b/converter/cbz/cbz.go
@@ -2,6 +2,8 @@ package cbz
import (
"archive/zip"
+ "bytes"
+ "encoding/xml"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/filesystem"
"github.com/metafates/mangal/source"
@@ -30,9 +32,18 @@ func save(chapter *source.Chapter, temp bool) (path string, err error) {
return
}
- cbzFile, err := filesystem.Api().Create(path)
+ err = SaveTo(chapter, path)
if err != nil {
- return
+ return "", err
+ }
+
+ return path, nil
+}
+
+func SaveTo(chapter *source.Chapter, to string) error {
+ cbzFile, err := filesystem.Api().Create(to)
+ if err != nil {
+ return err
}
defer util.Ignore(cbzFile.Close)
@@ -42,15 +53,20 @@ func save(chapter *source.Chapter, temp bool) (path string, err error) {
for _, page := range chapter.Pages {
if err = addToZip(zipWriter, page.Contents, page.Filename()); err != nil {
- return
+ return err
}
}
if viper.GetBool(constant.MetadataComicInfoXML) {
- err = addToZip(zipWriter, chapter.ComicInfoXML(), "ComicInfo.xml")
+ comicInfo := chapter.ComicInfo()
+ marshalled, err := xml.MarshalIndent(comicInfo, "", " ")
+ if err == nil {
+ buf := bytes.NewBuffer(marshalled)
+ err = addToZip(zipWriter, buf, "ComicInfo.xml")
+ }
}
- return
+ return err
}
func addToZip(writer *zip.Writer, file io.Reader, name string) error {
diff --git a/downloader/download.go b/downloader/download.go
index 72d50369..84fcd41a 100644
--- a/downloader/download.go
+++ b/downloader/download.go
@@ -1,7 +1,9 @@
package downloader
import (
+ "encoding/json"
"fmt"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/converter"
"github.com/metafates/mangal/filesystem"
@@ -18,16 +20,8 @@ import (
func Download(chapter *source.Chapter, progress func(string)) (string, error) {
log.Info("downloading " + chapter.Name)
- log.Info("checking if chapter is already downloaded")
path, err := chapter.Path(false)
if err != nil {
- log.Error(err)
- return "", err
- }
-
- exists, err := filesystem.Api().Exists(path)
- if err != nil {
- log.Error(err)
return "", err
}
@@ -38,7 +32,8 @@ func Download(chapter *source.Chapter, progress func(string)) (string, error) {
log.Warn(err)
}
} else {
- if exists {
+ log.Info("checking if chapter is already downloaded")
+ if chapter.IsDownloaded() {
log.Info("chapter already downloaded, skipping")
return path, nil
}
@@ -52,7 +47,7 @@ func Download(chapter *source.Chapter, progress func(string)) (string, error) {
}
log.Info("found " + fmt.Sprintf("%d", len(pages)) + " pages")
- err = chapter.DownloadPages(progress)
+ err = chapter.DownloadPages(false, progress)
if err != nil {
log.Error(err)
return "", err
@@ -71,13 +66,13 @@ func Download(chapter *source.Chapter, progress func(string)) (string, error) {
log.Warn(err)
} else {
path = filepath.Join(path, "series.json")
- exists, err := filesystem.Api().Exists(path)
+ progress("Generating series.json")
+ seriesJSON := chapter.Manga.SeriesJSON()
+ buf, err := json.Marshal(seriesJSON)
if err != nil {
log.Warn(err)
- } else if !exists {
- progress("Generating series.json")
- data := chapter.Manga.SeriesJSON()
- err = filesystem.Api().WriteFile(path, data.Bytes(), os.ModePerm)
+ } else {
+ err = filesystem.Api().WriteFile(path, buf, os.ModePerm)
if err != nil {
log.Warn(err)
}
@@ -86,14 +81,17 @@ func Download(chapter *source.Chapter, progress func(string)) (string, error) {
}
if viper.GetBool(constant.DownloaderDownloadCover) {
- _ = chapter.Manga.DownloadCover(progress)
+ coverDir, err := chapter.Manga.Path(false)
+ if err == nil {
+ _ = chapter.Manga.DownloadCover(false, coverDir, progress)
+ }
}
log.Info("getting " + viper.GetString(constant.FormatsUse) + " converter")
progress(fmt.Sprintf(
"Converting %d pages to %s %s",
len(pages),
- style.Yellow(viper.GetString(constant.FormatsUse)),
+ style.Fg(color.Yellow)(viper.GetString(constant.FormatsUse)),
style.Faint(chapter.SizeHuman())),
)
conv, err := converter.Get(viper.GetString(constant.FormatsUse))
diff --git a/downloader/read.go b/downloader/read.go
index 725ac72d..238b4131 100644
--- a/downloader/read.go
+++ b/downloader/read.go
@@ -2,6 +2,7 @@ package downloader
import (
"fmt"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/converter"
"github.com/metafates/mangal/history"
@@ -22,8 +23,15 @@ func Read(chapter *source.Chapter, progress func(string)) error {
)
}
- log.Info(fmt.Sprintf("downloading %s for reading. Provider is %s", chapter.Name, chapter.Source().ID()))
- log.Info("getting pages of " + chapter.Name)
+ if viper.GetBool(constant.DownloaderReadDownloaded) && chapter.IsDownloaded() {
+ path, err := chapter.Path(false)
+ if err == nil {
+ return openRead(path, chapter, progress)
+ }
+ }
+
+ log.Infof("downloading %s for reading. Provider is %s", chapter.Name, chapter.Source().ID())
+ log.Infof("getting pages of %s", chapter.Name)
progress("Getting pages")
pages, err := chapter.Source().PagesOf(chapter)
if err != nil {
@@ -31,7 +39,7 @@ func Read(chapter *source.Chapter, progress func(string)) error {
return err
}
- err = chapter.DownloadPages(progress)
+ err = chapter.DownloadPages(true, progress)
if err != nil {
log.Error(err)
return err
@@ -48,7 +56,7 @@ func Read(chapter *source.Chapter, progress func(string)) error {
progress(fmt.Sprintf(
"Converting %d pages to %s %s",
len(pages),
- style.Yellow(viper.GetString(constant.FormatsUse)),
+ style.Fg(color.Yellow)(viper.GetString(constant.FormatsUse)),
style.Faint(chapter.SizeHuman())),
)
path, err := conv.SaveTemp(chapter)
@@ -57,12 +65,17 @@ func Read(chapter *source.Chapter, progress func(string)) error {
return err
}
- err = openRead(path, progress)
+ err = openRead(path, chapter, progress)
if err != nil {
log.Error(err)
return err
}
+ progress("Done")
+ return nil
+}
+
+func openRead(path string, chapter *source.Chapter, progress func(string)) error {
if viper.GetBool(constant.HistorySaveOnRead) {
go func() {
err := history.Save(chapter)
@@ -74,11 +87,6 @@ func Read(chapter *source.Chapter, progress func(string)) error {
}()
}
- progress("Done")
- return nil
-}
-
-func openRead(path string, progress func(string)) error {
var (
reader string
err error
diff --git a/go.mod b/go.mod
index add98be8..24f0e451 100644
--- a/go.mod
+++ b/go.mod
@@ -11,19 +11,22 @@ require (
github.com/darylhjd/mangodex v0.0.0-20211231093527-e4a91c518fa0
github.com/dustin/go-humanize v1.0.0
github.com/gocolly/colly/v2 v2.1.0
+ github.com/invopop/jsonschema v0.6.0
github.com/ivanpirog/coloredcobra v1.0.1
github.com/ka-weihe/fast-levenshtein v0.0.0-20201227151214-4c99ee36a1ba
- github.com/metafates/mangal-lua-libs v0.4.1
+ github.com/lithammer/fuzzysearch v1.1.5
+ github.com/metafates/mangal-lua-libs v0.4.2
github.com/muesli/reflow v0.3.0
github.com/pdfcpu/pdfcpu v0.3.13
github.com/samber/lo v1.33.0
+ github.com/samber/mo v1.5.1
github.com/sirupsen/logrus v1.9.0
github.com/smartystreets/goconvey v1.7.2
github.com/spf13/afero v1.9.2
- github.com/spf13/cobra v1.6.0
+ github.com/spf13/cobra v1.6.1
github.com/spf13/viper v1.13.0
github.com/yuin/gopher-lua v0.0.0-20220504180219-658193537a64
- golang.org/x/exp v0.0.0-20221019170559-20944726eadf
+ golang.org/x/exp v0.0.0-20221028150844-83b7d23a625f
golang.org/x/term v0.1.0
)
@@ -48,6 +51,7 @@ require (
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/hhrutter/lzw v0.0.0-20190829144645-6f07a24e8650 // indirect
github.com/hhrutter/tiff v0.0.0-20190829141212-736cae8d0bc7 // indirect
+ github.com/iancoleman/orderedmap v0.2.0 // indirect
github.com/inconshreveable/mousetrap v1.0.1 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
diff --git a/go.sum b/go.sum
index 5238f764..00e03f65 100644
--- a/go.sum
+++ b/go.sum
@@ -202,11 +202,17 @@ github.com/hhrutter/tiff v0.0.0-20190829141212-736cae8d0bc7 h1:o1wMw7uTNyA58IlEd
github.com/hhrutter/tiff v0.0.0-20190829141212-736cae8d0bc7/go.mod h1:WkUxfS2JUu3qPo6tRld7ISb8HiC0gVSU91kooBMDVok=
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog=
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68=
+github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 h1:i462o439ZjprVSFSZLZxcsoAe592sZB1rci2Z8j4wdk=
+github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
+github.com/iancoleman/orderedmap v0.2.0 h1:sq1N/TFpYH++aViPcaKjys3bDClUEU7s5B+z6jq8pNA=
+github.com/iancoleman/orderedmap v0.2.0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
+github.com/invopop/jsonschema v0.6.0 h1:8e+xY8ZEn8gDHUYylSlLHy22P+SLeIRIHv3nM3hCbmY=
+github.com/invopop/jsonschema v0.6.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
github.com/ivanpirog/coloredcobra v1.0.1 h1:aURSdEmlR90/tSiWS0dMjdwOvCVUeYLfltLfbgNxrN4=
github.com/ivanpirog/coloredcobra v1.0.1/go.mod h1:iho4nEKcnwZFiniGSdcgdvRgZNjxm+h20acv8vqmN6Q=
github.com/jawher/mow.cli v1.1.0/go.mod h1:aNaQlc7ozF3vw6IJ2dHjp2ZFiA4ozMIYY6PyuRJwlUg=
@@ -230,6 +236,8 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
+github.com/lithammer/fuzzysearch v1.1.5 h1:Ag7aKU08wp0R9QCfF4GoGST9HbmAIeLP7xwMrOBEp1c=
+github.com/lithammer/fuzzysearch v1.1.5/go.mod h1:1R1LRNk7yKid1BaQkmuLQaHruxcC4HmAH30Dh61Ih1Q=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo=
@@ -250,10 +258,10 @@ github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRC
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
-github.com/metafates/mangal-lua-libs v0.4.1-0.20220920195433-ce228b4abd95 h1:8EbMD890jS9WWjtgSzqnURJMTvX1Eat4BKq3Ydpqz6s=
-github.com/metafates/mangal-lua-libs v0.4.1-0.20220920195433-ce228b4abd95/go.mod h1:/g3V2cAx3iZHdUcDt1Hr4O69xKhI+jlEiRdvwAReBiA=
github.com/metafates/mangal-lua-libs v0.4.1 h1:wqHgFbZ82gOqiPIFlWAeJGPMvZNSvA8CaQ98hVjPXOo=
github.com/metafates/mangal-lua-libs v0.4.1/go.mod h1:+0gexBk9l//rXWwjcPRmO222qnet+Akm3Sps4jNYnHk=
+github.com/metafates/mangal-lua-libs v0.4.2 h1:zTSYhQ0uz/2SSBOjvi1oflJv3faAWt5k4g8mDhRl2mE=
+github.com/metafates/mangal-lua-libs v0.4.2/go.mod h1:+0gexBk9l//rXWwjcPRmO222qnet+Akm3Sps4jNYnHk=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
@@ -299,6 +307,8 @@ github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxT
github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
github.com/samber/lo v1.33.0 h1:2aKucr+rQV6gHpY3bpeZu69uYoQOzVhGT3J22Op6Cjk=
github.com/samber/lo v1.33.0/go.mod h1:HLeWcJRRyLKp3+/XBJvOrerCQn9mhdKMHyd7IRlgeQ8=
+github.com/samber/mo v1.5.1 h1:5dRSevAB33Q/OrYwTmtksHHxquuf2urnRSUTsdTFysY=
+github.com/samber/mo v1.5.1/go.mod h1:pDuQgWscOVGGoEz+NAeth/Xq+MPAcXxCeph1XIAm/DU=
github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=
@@ -310,8 +320,8 @@ github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcD
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
-github.com/spf13/cobra v1.6.0 h1:42a0n6jwCot1pUmomAp4T7DeMD+20LFv4Q54pxLf2LI=
-github.com/spf13/cobra v1.6.0/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
+github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
+github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
@@ -323,6 +333,7 @@ github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoH
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
@@ -380,8 +391,10 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
-golang.org/x/exp v0.0.0-20221019170559-20944726eadf h1:nFVjjKDgNY37+ZSYCJmtYf7tOlfQswHqplG2eosjOMg=
-golang.org/x/exp v0.0.0-20221019170559-20944726eadf/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE=
+golang.org/x/exp v0.0.0-20221026153819-32f3d567a233 h1:9bNbSKT4RPLEzne0Xh1v3NaNecsa1DKjkOuTbY6V9rI=
+golang.org/x/exp v0.0.0-20221026153819-32f3d567a233/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
+golang.org/x/exp v0.0.0-20221028150844-83b7d23a625f h1:Al51T6tzvuh3oiwX11vex3QgJ2XTedFPGmbEVh8cdoc=
+golang.org/x/exp v0.0.0-20221028150844-83b7d23a625f/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20190823064033-3a9bac650e44/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
diff --git a/history/chapter.go b/history/chapter.go
new file mode 100644
index 00000000..98878c61
--- /dev/null
+++ b/history/chapter.go
@@ -0,0 +1,40 @@
+package history
+
+import (
+ "fmt"
+ "github.com/metafates/mangal/source"
+)
+
+type SavedChapter struct {
+ SourceID string `json:"source_id"`
+ MangaName string `json:"manga_name"`
+ MangaURL string `json:"manga_url"`
+ MangaChaptersTotal int `json:"manga_chapters_total"`
+ Name string `json:"name"`
+ URL string `json:"url"`
+ ID string `json:"id"`
+ Index int `json:"index"`
+ MangaID string `json:"manga_id"`
+}
+
+func (c *SavedChapter) encode() string {
+ return fmt.Sprintf("%s (%s)", c.MangaName, c.SourceID)
+}
+
+func (c *SavedChapter) String() string {
+ return fmt.Sprintf("%s : %d / %d", c.MangaName, c.Index, c.MangaChaptersTotal)
+}
+
+func newSavedChapter(chapter *source.Chapter) *SavedChapter {
+ return &SavedChapter{
+ SourceID: chapter.Manga.Source.ID(),
+ MangaName: chapter.Manga.Name,
+ MangaURL: chapter.Manga.URL,
+ Name: chapter.Name,
+ URL: chapter.URL,
+ ID: chapter.ID,
+ MangaID: chapter.Manga.ID,
+ MangaChaptersTotal: len(chapter.Manga.Chapters),
+ Index: int(chapter.Index),
+ }
+}
\ No newline at end of file
diff --git a/history/history.go b/history/history.go
index cade44b7..164983f4 100644
--- a/history/history.go
+++ b/history/history.go
@@ -1,161 +1,65 @@
package history
import (
- "encoding/json"
- "fmt"
+ "github.com/metafates/mangal/cache"
"github.com/metafates/mangal/constant"
- "github.com/metafates/mangal/filesystem"
"github.com/metafates/mangal/integration"
"github.com/metafates/mangal/log"
"github.com/metafates/mangal/source"
"github.com/metafates/mangal/where"
+ "github.com/samber/mo"
"github.com/spf13/viper"
- "os"
+ "time"
)
-type SavedChapter struct {
- SourceID string `json:"source_id"`
- MangaName string `json:"manga_name"`
- MangaURL string `json:"manga_url"`
- MangaChaptersTotal int `json:"manga_chapters_total"`
- Name string `json:"name"`
- URL string `json:"url"`
- ID string `json:"id"`
- Index int `json:"index"`
- MangaID string `json:"manga_id"`
-}
-
-func (c *SavedChapter) String() string {
- return fmt.Sprintf("%s : %d / %d", c.MangaName, c.Index, c.MangaChaptersTotal)
-}
+var cacher = cache.New[map[string]*SavedChapter](
+ where.History(),
+ &cache.Options{
+ ExpireEvery: mo.None[time.Duration](),
+ },
+)
// Get returns all chapters from the history file
func Get() (chapters map[string]*SavedChapter, err error) {
- log.Info("Getting history location")
- historyFile := where.History()
-
- // decode json into slice of structs
- log.Info("Reading history file")
- contents, err := filesystem.Api().ReadFile(historyFile)
- if err != nil {
- log.Error(err)
- return
+ cached, ok := cacher.Get().Get()
+ if !ok {
+ return make(map[string]*SavedChapter), nil
}
- log.Info("Decoding history from json")
- err = json.Unmarshal(contents, &chapters)
- if err != nil {
- log.Error(err)
- return
- }
-
- return
+ return cached, nil
}
// Save saves the chapter to the history file
func Save(chapter *source.Chapter) error {
if viper.GetBool(constant.AnilistEnable) {
- defer func() {
+ go func() {
log.Info("Saving chapter to anilist")
err := integration.Anilist.MarkRead(chapter)
if err != nil {
- log.Error("Saving chapter to anilist failed: " + err.Error())
+ log.Warn("Saving chapter to anilist failed: " + err.Error())
}
}()
}
- log.Info("Saving chapter to history")
-
- historyFile := where.History()
-
- // decode json into slice of structs
- var chapters map[string]*SavedChapter
- log.Info("Reading history file")
- contents, err := filesystem.Api().ReadFile(historyFile)
- if err != nil {
- log.Error(err)
- return err
- }
-
- log.Info("Decoding history from json")
- err = json.Unmarshal(contents, &chapters)
+ saved, err := Get()
if err != nil {
- log.Error(err)
return err
}
- jsonChapter := SavedChapter{
- SourceID: chapter.Manga.Source.ID(),
- MangaName: chapter.Manga.Name,
- MangaURL: chapter.Manga.URL,
- Name: chapter.Name,
- URL: chapter.URL,
- ID: chapter.ID,
- MangaID: chapter.Manga.ID,
- MangaChaptersTotal: len(chapter.Manga.Chapters),
- Index: int(chapter.Index),
- }
-
- chapters[fmt.Sprintf("%s (%s)", jsonChapter.MangaName, jsonChapter.SourceID)] = &jsonChapter
+ savedChapter := newSavedChapter(chapter)
+ saved[savedChapter.encode()] = savedChapter
- // encode json
- log.Info("Encoding history to json")
- encoded, err := json.Marshal(chapters)
- if err != nil {
- log.Error(err)
- return err
- }
-
- // write to file
- log.Info("Writing history to file")
- err = filesystem.Api().WriteFile(historyFile, encoded, os.ModePerm)
- if err != nil {
- log.Error(err)
- return err
- }
-
- return nil
+ return cacher.Set(saved)
}
// Remove removes the chapter from the history file
func Remove(chapter *SavedChapter) error {
- log.Info("Removing chapter from history")
-
- historyFile := where.History()
-
- // decode json into slice of structs
- var chapters map[string]*SavedChapter
- log.Info("Reading history file")
- contents, err := filesystem.Api().ReadFile(historyFile)
+ saved, err := Get()
if err != nil {
- log.Error(err)
return err
}
- log.Info("Decoding history from json")
- err = json.Unmarshal(contents, &chapters)
- if err != nil {
- log.Error(err)
- return err
- }
-
- delete(chapters, fmt.Sprintf("%s (%s)", chapter.MangaName, chapter.SourceID))
-
- // encode json
- log.Info("Encoding history to json")
- encoded, err := json.Marshal(chapters)
- if err != nil {
- log.Error(err)
- return err
- }
-
- // write to file
- log.Info("Writing history to file")
- err = filesystem.Api().WriteFile(historyFile, encoded, os.ModePerm)
- if err != nil {
- log.Error(err)
- return err
- }
+ delete(saved, chapter.encode())
- return nil
+ return cacher.Set(saved)
}
diff --git a/icon/list.go b/icon/list.go
index 6b605ae8..c2ec7462 100644
--- a/icon/list.go
+++ b/icon/list.go
@@ -1,6 +1,9 @@
package icon
-import "github.com/metafates/mangal/style"
+import (
+ "github.com/metafates/mangal/color"
+ "github.com/metafates/mangal/style"
+)
type Icon int
@@ -11,57 +14,81 @@ const (
Success
Question
Mark
+ Downloaded
Progress
+ Search
+ Link
)
var icons = map[Icon]*iconDef{
Lua: {
emoji: "🌙",
- nerd: style.Blue("\uE620"),
- plain: style.Blue("Lua"),
- kaomoji: style.Blue("(=^・ω・^=)"),
- squares: style.Blue("◧"),
+ nerd: style.Fg(color.Blue)("\uE620"),
+ plain: style.Fg(color.Blue)("Lua"),
+ kaomoji: style.Fg(color.Blue)("(=^・ω・^=)"),
+ squares: style.Fg(color.Blue)("◧"),
},
Go: {
emoji: "🐹",
- nerd: style.Cyan("\uE627"),
- plain: style.Cyan("Go"),
- kaomoji: style.Cyan("ʕ •ᴥ• ʔ"),
- squares: style.Cyan("◨"),
+ nerd: style.Fg(color.Cyan)("\uE627"),
+ plain: style.Fg(color.Cyan)("Go"),
+ kaomoji: style.Fg(color.Cyan)("ʕ •ᴥ• ʔ"),
+ squares: style.Fg(color.Cyan)("◨"),
},
Fail: {
emoji: "💀",
- nerd: style.Red("ﮊ"),
- plain: style.Red("X"),
- kaomoji: style.Red("┐('~`;)┌"),
- squares: style.Red("▨"),
+ nerd: style.Fg(color.Red)("ﮊ"),
+ plain: style.Fg(color.Red)("X"),
+ kaomoji: style.Fg(color.Red)("┐('~`;)┌"),
+ squares: style.Fg(color.Red)("▨"),
},
Success: {
emoji: "🎉",
- nerd: style.Green("\uF65F "),
- plain: style.Green("✓"),
- kaomoji: style.Green("(ᵔ◡ᵔ)"),
- squares: style.Green("▣"),
+ nerd: style.Fg(color.Green)("\uF65F "),
+ plain: style.Fg(color.Green)("✓"),
+ kaomoji: style.Fg(color.Green)("(ᵔ◡ᵔ)"),
+ squares: style.Fg(color.Green)("▣"),
},
Mark: {
emoji: "🦐",
- nerd: style.Green("\uF6D9"),
- plain: style.Combined(style.Green, style.Bold)("*"),
- kaomoji: style.Combined(style.Red, style.Bold)("炎"),
- squares: style.Combined(style.Green, style.Bold)("■"),
+ nerd: style.Fg(color.Green)("\uF6D9"),
+ plain: style.New().Bold(true).Foreground(color.Orange).Render("*"),
+ kaomoji: style.New().Bold(true).Foreground(color.Red).Render("炎"),
+ squares: style.New().Bold(true).Foreground(color.Orange).Render("■"),
},
Question: {
emoji: "🤨",
- nerd: style.Yellow("\uF128"),
- plain: style.Yellow("?"),
- kaomoji: style.Yellow("(¬ ¬)"),
- squares: style.Yellow("◲"),
+ nerd: style.Fg(color.Yellow)("\uF128"),
+ plain: style.Fg(color.Yellow)("?"),
+ kaomoji: style.Fg(color.Yellow)("(¬ ¬)"),
+ squares: style.Fg(color.Yellow)("◲"),
},
Progress: {
emoji: "👾",
- nerd: style.Blue("\uF0ED "),
- plain: style.Blue("@"),
- kaomoji: style.Blue("┌( >_<)┘"),
- squares: style.Blue("◫"),
+ nerd: style.Fg(color.Blue)("\uF0ED "),
+ plain: style.Fg(color.Blue)("@"),
+ kaomoji: style.Fg(color.Blue)("┌( >_<)┘"),
+ squares: style.Fg(color.Blue)("◫"),
+ },
+ Downloaded: {
+ emoji: "📦",
+ nerd: style.Bold("\uF0C5 "),
+ plain: style.New().Bold(true).Faint(true).Render("D"),
+ kaomoji: style.Bold("⊂(◉‿◉)つ"),
+ squares: style.Bold("◬"),
+ },
+ Search: {
+ emoji: "🔍",
+ nerd: style.Fg(color.Blue)("\uF002"),
+ plain: style.Fg(color.Blue)("S"),
+ kaomoji: style.Fg(color.Blue)("⌐■-■"),
+ squares: style.Fg(color.Blue)("◪"),
+ },
+ Link: {
+ emoji: "🔗",
+ nerd: style.Fg(color.Blue)("\uF0C1"),
+ plain: style.Fg(color.Blue)("L"),
+ kaomoji: style.Fg(color.Blue)("⌐■-■"),
+ squares: style.Fg(color.Blue)("◪"),
},
}
diff --git a/inline/inline.go b/inline/inline.go
index e6cd0bcf..1301f2d1 100644
--- a/inline/inline.go
+++ b/inline/inline.go
@@ -9,24 +9,29 @@ import (
"os"
)
-func Run(options *Options) error {
+func Run(options *Options) (err error) {
if options.Out == nil {
options.Out = os.Stdout
}
- mangas, err := options.Source.Search(options.Query)
- if err != nil {
- return err
+ var mangas []*source.Manga
+ for _, src := range options.Sources {
+ m, err := src.Search(options.Query)
+ if err != nil {
+ return err
+ }
+
+ mangas = append(mangas, m...)
}
- if options.MangaPicker.IsNone() && options.ChaptersFilter.IsNone() {
+ if options.MangaPicker.IsAbsent() && options.ChaptersFilter.IsAbsent() {
if viper.GetBool(constant.MetadataFetchAnilist) {
for _, manga := range mangas {
_ = manga.PopulateMetadata(func(string) {})
}
}
- marshalled, err := asJson(mangas)
+ marshalled, err := asJson(mangas, options)
if err != nil {
return err
}
@@ -36,15 +41,15 @@ func Run(options *Options) error {
}
// manga picker can only be none if json is set
- if options.MangaPicker.IsNone() {
+ if options.MangaPicker.IsAbsent() {
// preload all chapters
for _, manga := range mangas {
- if err = jsonUpdateChapters(manga, options); err != nil {
+ if err = prepareManga(manga, options); err != nil {
return err
}
}
- marshalled, err := asJson(mangas)
+ marshalled, err := asJson(mangas, options)
if err != nil {
return err
}
@@ -56,35 +61,34 @@ func Run(options *Options) error {
var chapters []*source.Chapter
if len(mangas) == 0 {
- chapters = []*source.Chapter{}
- } else {
- manga := options.MangaPicker.Unwrap()(mangas)
+ return nil
+ }
- chapters, err = options.Source.ChaptersOf(manga)
+ manga := options.MangaPicker.MustGet()(mangas)
+ chapters, err = manga.Source.ChaptersOf(manga)
+ if err != nil {
+ return err
+ }
+
+ if options.ChaptersFilter.IsPresent() {
+ chapters, err = options.ChaptersFilter.MustGet()(chapters)
if err != nil {
return err
}
+ }
- if options.ChaptersFilter.IsSome() {
- chapters, err = options.ChaptersFilter.Unwrap()(chapters)
- if err != nil {
- return err
- }
+ if options.Json {
+ if err = prepareManga(manga, options); err != nil {
+ return err
}
- if options.Json {
- if err = jsonUpdateChapters(manga, options); err != nil {
- return err
- }
-
- marshalled, err := asJson([]*source.Manga{manga})
- if err != nil {
- return err
- }
-
- _, err = options.Out.Write(marshalled)
+ marshalled, err := asJson([]*source.Manga{manga}, options)
+ if err != nil {
return err
}
+
+ _, err = options.Out.Write(marshalled)
+ return err
}
for _, chapter := range chapters {
diff --git a/inline/json.go b/inline/json.go
index caa60de4..42c9b6ff 100644
--- a/inline/json.go
+++ b/inline/json.go
@@ -2,38 +2,81 @@ package inline
import (
"encoding/json"
+ "github.com/metafates/mangal/anilist"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/source"
"github.com/spf13/viper"
)
-func asJson(manga []*source.Manga) (marshalled []byte, err error) {
- return json.Marshal(&struct {
- Manga []*source.Manga
- }{
- Manga: manga,
+type Manga struct {
+ // Source that the manga belongs to.
+ Source string `json:"source" jsonschema:"description=Source that the manga belongs to."`
+ // Mangal variant of the manga
+ Mangal *source.Manga `json:"mangal" jsonschema:"description=Mangal variant of the manga"`
+ // Anilist is the closest anilist match to mangal manga
+ Anilist *anilist.Manga `json:"anilist" jsonschema:"description=Anilist is the closest anilist match to mangal manga"`
+}
+
+type Output struct {
+ Query string `json:"query" jsonschema:"description=Query that was used to search for the manga."`
+ Result []*Manga `json:"result" jsonschema:"description=Result of the search."`
+}
+
+func asJson(manga []*source.Manga, options *Options) (marshalled []byte, err error) {
+ var m = make([]*Manga, len(manga))
+ for i, manga := range manga {
+ al := manga.Anilist.OrElse(nil)
+ if !options.IncludeAnilistManga {
+ al = nil
+ }
+
+ m[i] = &Manga{
+ Mangal: manga,
+ Anilist: al,
+ Source: manga.Source.Name(),
+ }
+ }
+
+ return json.Marshal(&Output{
+ Result: m,
+ Query: options.Query,
})
}
-func jsonUpdateChapters(manga *source.Manga, options *Options) error {
+func prepareManga(manga *source.Manga, options *Options) error {
var err error
- chapters, _ := options.Source.ChaptersOf(manga)
- if options.ChaptersFilter.IsSome() {
- chapters, err = options.ChaptersFilter.Unwrap()(chapters)
+
+ if options.IncludeAnilistManga {
+ err = manga.BindWithAnilist()
if err != nil {
return err
}
}
- manga.Chapters = chapters
+ if options.ChaptersFilter.IsPresent() {
+ chapters, err := manga.Source.ChaptersOf(manga)
+ if err != nil {
+ return err
+ }
+
+ chapters, err = options.ChaptersFilter.MustGet()(chapters)
+ if err != nil {
+ return err
+ }
+
+ manga.Chapters = chapters
- if options.PopulatePages {
- for _, chapter := range chapters {
- _, err := options.Source.PagesOf(chapter)
- if err != nil {
- return err
+ if options.PopulatePages {
+ for _, chapter := range chapters {
+ _, err := chapter.Source().PagesOf(chapter)
+ if err != nil {
+ return err
+ }
}
}
+ } else {
+ // clear chapters in case they were loaded from cache or something
+ manga.Chapters = make([]*source.Chapter, 0)
}
if viper.GetBool(constant.MetadataFetchAnilist) {
diff --git a/inline/options.go b/inline/options.go
index 39fcfb60..22214dd0 100644
--- a/inline/options.go
+++ b/inline/options.go
@@ -5,6 +5,7 @@ import (
"github.com/metafates/mangal/source"
"github.com/metafates/mangal/util"
"github.com/samber/lo"
+ "github.com/samber/mo"
"io"
"regexp"
"strconv"
@@ -17,14 +18,15 @@ type (
)
type Options struct {
- Out io.Writer
- Source source.Source
- Download bool
- Json bool
- PopulatePages bool
- Query string
- MangaPicker util.Option[MangaPicker]
- ChaptersFilter util.Option[ChaptersFilter]
+ Out io.Writer
+ Sources []source.Source
+ IncludeAnilistManga bool
+ Download bool
+ Json bool
+ PopulatePages bool
+ Query string
+ MangaPicker mo.Option[MangaPicker]
+ ChaptersFilter mo.Option[ChaptersFilter]
}
func ParseMangaPicker(description string) (MangaPicker, error) {
diff --git a/integration/anilistintegration/anilist.go b/integration/anilist/anilist.go
similarity index 82%
rename from integration/anilistintegration/anilist.go
rename to integration/anilist/anilist.go
index 75fea13c..078c1933 100644
--- a/integration/anilistintegration/anilist.go
+++ b/integration/anilist/anilist.go
@@ -1,4 +1,4 @@
-package anilistintegration
+package anilist
import (
"github.com/metafates/mangal/constant"
@@ -9,6 +9,7 @@ type Anilist struct {
token string
}
+// New cereates a new Anilist integration instance
func New() *Anilist {
return &Anilist{}
}
@@ -25,6 +26,7 @@ func (a *Anilist) code() string {
return viper.GetString(constant.AnilistCode)
}
+// AuthURL returns the URL to authenticate with Anilist
func (a *Anilist) AuthURL() string {
return "https://anilist.co/api/v2/oauth/authorize?client_id=" + a.id() + "&response_type=code&redirect_uri=https://anilist.co/api/v2/oauth/pin"
}
diff --git a/integration/anilistintegration/login.go b/integration/anilist/login.go
similarity index 98%
rename from integration/anilistintegration/login.go
rename to integration/anilist/login.go
index 81a691c2..fed3a717 100644
--- a/integration/anilistintegration/login.go
+++ b/integration/anilist/login.go
@@ -1,4 +1,4 @@
-package anilistintegration
+package anilist
import (
"bytes"
diff --git a/integration/anilistintegration/mark.go b/integration/anilist/mark.go
similarity index 98%
rename from integration/anilistintegration/mark.go
rename to integration/anilist/mark.go
index 31f8783e..762279df 100644
--- a/integration/anilistintegration/mark.go
+++ b/integration/anilist/mark.go
@@ -1,4 +1,4 @@
-package anilistintegration
+package anilist
import (
"bytes"
diff --git a/integration/integration.go b/integration/integration.go
index 2e126f29..2f7be452 100644
--- a/integration/integration.go
+++ b/integration/integration.go
@@ -1,14 +1,16 @@
package integration
import (
- "github.com/metafates/mangal/integration/anilistintegration"
+ "github.com/metafates/mangal/integration/anilist"
"github.com/metafates/mangal/source"
)
+// Integrator is the interface that wraps the basic integration methods.
type Integrator interface {
+ // MarkRead marks a chapter as read
MarkRead(chapter *source.Chapter) error
}
var (
- Anilist Integrator = anilistintegration.New()
+ Anilist Integrator = anilist.New()
)
diff --git a/mini/input.go b/mini/input.go
index 69f0c189..753fc624 100644
--- a/mini/input.go
+++ b/mini/input.go
@@ -3,6 +3,7 @@ package mini
import (
"bufio"
"fmt"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/style"
"os"
"strconv"
@@ -20,7 +21,7 @@ func (o input) asInt() (n int64, ok bool) {
}
func getInput(validator func(string) bool) (*input, error) {
- fmt.Print(style.Magenta("> "))
+ fmt.Print(style.Fg(color.Purple)("> "))
reader := bufio.NewReader(os.Stdin)
in, err := reader.ReadString('\n')
if err != nil {
@@ -34,7 +35,7 @@ func getInput(validator func(string) bool) (*input, error) {
}
if !validator(in) {
- fmt.Println(style.Red("Invalid choice entered"))
+ fmt.Println(style.Fg(color.Red)("Invalid choice entered"))
return getInput(validator)
}
diff --git a/mini/states.go b/mini/states.go
index 74b7a788..a39db722 100644
--- a/mini/states.go
+++ b/mini/states.go
@@ -32,7 +32,7 @@ const (
func (m *mini) handleSourceSelectState() error {
var err error
- if name := viper.GetString(constant.DownloaderDefaultSource); name != "" {
+ if name := viper.GetString(constant.DownloaderDefaultSources); name != "" {
p, ok := provider.Get(name)
if !ok {
return fmt.Errorf("unknown source \"%s\"", name)
@@ -339,7 +339,7 @@ func (m *mini) handleChaptersDownloadState() error {
}
util.ClearScreen()
- title(fmt.Sprintf("%s downloaded.", util.Quantity(len(m.selectedChapters), "chapter")))
+ title(fmt.Sprintf("%s downloaded.", util.Quantify(len(m.selectedChapters), "chapter", "chapters")))
b, _, err := menu([]fmt.Stringer{}, back, search)
if err != nil {
return err
diff --git a/mini/ui.go b/mini/ui.go
index ee533acf..861bf85b 100644
--- a/mini/ui.go
+++ b/mini/ui.go
@@ -2,6 +2,7 @@ package mini
import (
"fmt"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/style"
"github.com/samber/lo"
"os"
@@ -10,7 +11,7 @@ import (
)
func progress(msg string) (eraser func()) {
- msg = style.Combined(style.Truncate(truncateAt), style.Blue)(msg)
+ msg = style.New().Foreground(color.Blue).Render(msg)
fmt.Printf("\r%s", msg)
return func() {
@@ -19,18 +20,18 @@ func progress(msg string) (eraser func()) {
}
func title(t string) {
- fmt.Println(style.Combined(style.Truncate(truncateAt), style.Magenta, style.Bold)(t))
+ fmt.Println(style.New().Bold(true).Width(truncateAt).Foreground(color.Purple).Render(t))
}
func fail(t string) {
- fmt.Println(style.Combined(style.Truncate(truncateAt), style.Red, style.Bold)(t))
+ fmt.Println(style.New().Bold(true).Width(truncateAt).Foreground(color.Red).Render(t))
}
func menu[T fmt.Stringer](items []T, options ...*bind) (*bind, T, error) {
styles := map[int]func(string) string{
- 0: style.Combined(style.Truncate(truncateAt), style.Yellow),
- 1: style.Combined(style.Truncate(truncateAt), style.Cyan),
- 2: style.Combined(style.Truncate(truncateAt), style.Bold, style.Red),
+ 0: style.New().Width(truncateAt).Foreground(color.Yellow).Render,
+ 1: style.New().Width(truncateAt).Foreground(color.Cyan).Render,
+ 2: style.New().Width(truncateAt).Foreground(color.Red).Render,
}
for i, item := range items {
diff --git a/network/client.go b/network/client.go
index 5a059126..b61ea260 100644
--- a/network/client.go
+++ b/network/client.go
@@ -10,13 +10,13 @@ var transport = http.DefaultTransport.(*http.Transport).Clone()
func init() {
transport.MaxIdleConns = 100
transport.MaxIdleConnsPerHost = 100
- transport.MaxConnsPerHost = 100
+ transport.MaxConnsPerHost = 200
transport.IdleConnTimeout = 30 * time.Second
transport.ResponseHeaderTimeout = 30 * time.Second
transport.ExpectContinueTimeout = 30 * time.Second
}
var Client = &http.Client{
- Timeout: 30 * time.Second,
+ Timeout: time.Minute,
Transport: transport,
}
diff --git a/provider/custom/cache.go b/provider/custom/cache.go
new file mode 100644
index 00000000..aac9d391
--- /dev/null
+++ b/provider/custom/cache.go
@@ -0,0 +1,50 @@
+package custom
+
+import (
+ "github.com/metafates/mangal/cache"
+ "github.com/metafates/mangal/util"
+ "github.com/metafates/mangal/where"
+ "github.com/samber/mo"
+ "path/filepath"
+ "time"
+)
+
+type cacher[T any] struct {
+ internal *cache.Cache[map[string]T]
+}
+
+func newCacher[T any](name string) *cacher[T] {
+ return &cacher[T]{
+ internal: cache.New[map[string]T](filepath.Join(where.Cache(), util.SanitizeFilename(name)+".json"), &cache.Options{
+ ExpireEvery: mo.Some(time.Hour * 24),
+ }),
+ }
+}
+
+func (c *cacher[T]) Get(key string) mo.Option[T] {
+ if c.internal.Get().IsAbsent() {
+ return mo.None[T]()
+ }
+
+ data := c.internal.Get().MustGet()
+
+ if x, ok := data[key]; ok {
+ return mo.Some(x)
+ }
+
+ return mo.None[T]()
+}
+
+func (c *cacher[T]) Set(key string, t T) error {
+ var data map[string]T
+
+ if c.internal.Get().IsAbsent() {
+ data = make(map[string]T)
+ } else {
+ data = c.internal.Get().MustGet()
+ }
+
+ data[key] = t
+
+ return c.internal.Set(data)
+}
diff --git a/provider/custom/chapters.go b/provider/custom/chapters.go
index e26cb4dd..b847ec64 100644
--- a/provider/custom/chapters.go
+++ b/provider/custom/chapters.go
@@ -8,8 +8,13 @@ import (
)
func (s *luaSource) ChaptersOf(manga *source.Manga) ([]*source.Chapter, error) {
- if cached, ok := s.cachedChapters[manga.URL]; ok {
- return cached, nil
+ if chapters := s.cache.chapters.Get(manga.URL); chapters.IsPresent() {
+ c := chapters.MustGet()
+ for _, chapter := range c {
+ chapter.Manga = manga
+ }
+
+ return c, nil
}
_, err := s.call(constant.MangaChaptersFn, lua.LTTable, lua.LString(manga.URL))
@@ -19,8 +24,7 @@ func (s *luaSource) ChaptersOf(manga *source.Manga) ([]*source.Chapter, error) {
}
table := s.state.CheckTable(-1)
- chapters := make([]*source.Chapter, table.Len())
- var i uint16
+ chapters := make([]*source.Chapter, 0)
table.ForEach(func(k lua.LValue, v lua.LValue) {
if k.Type() != lua.LTNumber {
@@ -42,10 +46,9 @@ func (s *luaSource) ChaptersOf(manga *source.Manga) ([]*source.Chapter, error) {
s.state.RaiseError(err.Error())
}
- chapters[i] = chapter
- i++
+ chapters = append(chapters, chapter)
})
- s.cachedChapters[manga.URL] = chapters
+ _ = s.cache.chapters.Set(manga.URL, chapters)
return chapters, nil
}
diff --git a/provider/custom/pages.go b/provider/custom/pages.go
index 7b084866..2608b708 100644
--- a/provider/custom/pages.go
+++ b/provider/custom/pages.go
@@ -7,10 +7,6 @@ import (
)
func (s *luaSource) PagesOf(chapter *source.Chapter) ([]*source.Page, error) {
- if cached, ok := s.cachedPages[chapter.URL]; ok {
- return cached, nil
- }
-
_, err := s.call(constant.ChapterPagesFn, lua.LTTable, lua.LString(chapter.URL))
if err != nil {
@@ -18,8 +14,7 @@ func (s *luaSource) PagesOf(chapter *source.Chapter) ([]*source.Page, error) {
}
table := s.state.CheckTable(-1)
- pages := make([]*source.Page, table.Len())
- var i uint16
+ pages := make([]*source.Page, 0)
table.ForEach(func(k lua.LValue, v lua.LValue) {
if k.Type() != lua.LTNumber {
@@ -36,10 +31,8 @@ func (s *luaSource) PagesOf(chapter *source.Chapter) ([]*source.Page, error) {
s.state.RaiseError(err.Error())
}
- pages[i] = page
- i++
+ pages = append(pages, page)
})
- s.cachedPages[chapter.URL] = pages
return pages, nil
}
diff --git a/provider/custom/search.go b/provider/custom/search.go
index c7f13821..71de1459 100644
--- a/provider/custom/search.go
+++ b/provider/custom/search.go
@@ -8,8 +8,13 @@ import (
)
func (s *luaSource) Search(query string) ([]*source.Manga, error) {
- if cached, ok := s.cachedMangas[query]; ok {
- return cached, nil
+ if mangas := s.cache.mangas.Get(query); mangas.IsPresent() {
+ m := mangas.MustGet()
+ for _, manga := range m {
+ manga.Source = s
+ }
+
+ return m, nil
}
_, err := s.call(constant.SearchMangaFn, lua.LTTable, lua.LString(query))
@@ -45,6 +50,6 @@ func (s *luaSource) Search(query string) ([]*source.Manga, error) {
mangas = append(mangas, manga)
})
- s.cachedMangas[query] = mangas
+ _ = s.cache.mangas.Set(query, mangas)
return mangas, nil
}
diff --git a/provider/custom/source.go b/provider/custom/source.go
index 5e17c91b..a7c400f7 100644
--- a/provider/custom/source.go
+++ b/provider/custom/source.go
@@ -1,16 +1,18 @@
package custom
import (
+ "fmt"
"github.com/metafates/mangal/source"
lua "github.com/yuin/gopher-lua"
)
type luaSource struct {
- name string
- state *lua.LState
- cachedMangas map[string][]*source.Manga
- cachedChapters map[string][]*source.Chapter
- cachedPages map[string][]*source.Page
+ name string
+ state *lua.LState
+ cache struct {
+ mangas *cacher[[]*source.Manga]
+ chapters *cacher[[]*source.Chapter]
+ }
}
func (s *luaSource) Name() string {
@@ -18,13 +20,19 @@ func (s *luaSource) Name() string {
}
func newLuaSource(name string, state *lua.LState) (*luaSource, error) {
- return &luaSource{
- name: name,
- state: state,
- cachedMangas: make(map[string][]*source.Manga),
- cachedChapters: make(map[string][]*source.Chapter),
- cachedPages: make(map[string][]*source.Page),
- }, nil
+ s := &luaSource{
+ name: name,
+ state: state,
+ }
+
+ cacheName := func(cacheFor string) string {
+ return fmt.Sprintf("%s_%s", s.ID(), cacheFor)
+ }
+
+ s.cache.mangas = newCacher[[]*source.Manga](cacheName("mangas"))
+ s.cache.chapters = newCacher[[]*source.Chapter](cacheName("chapters"))
+
+ return s, nil
}
func (s *luaSource) call(fn string, ret lua.LValueType, args ...lua.LValue) (lua.LValue, error) {
diff --git a/provider/custom/translator.go b/provider/custom/translator.go
index 7f0e687d..38fecff6 100644
--- a/provider/custom/translator.go
+++ b/provider/custom/translator.go
@@ -66,7 +66,7 @@ func mangaFromTable(table *lua.LTable, index uint16) (manga *source.Manga, err e
return err
}
- manga.Metadata.Cover = v
+ manga.Metadata.Cover.ExtraLarge = v
return nil
}},
"genres": {A: lua.LTString, B: false, C: func(v string) error {
@@ -108,7 +108,7 @@ func chapterFromTable(table *lua.LTable, manga *source.Manga, index uint16) (cha
return err
}
- manga.Metadata.Cover = v
+ manga.Metadata.Cover.ExtraLarge = v
return nil
}},
}
diff --git a/provider/generic/chapters.go b/provider/generic/chapters.go
index e4474950..03e4bb1f 100644
--- a/provider/generic/chapters.go
+++ b/provider/generic/chapters.go
@@ -6,6 +6,7 @@ import (
"net/http"
)
+// ChaptersOf given source.Manga
func (s *Scraper) ChaptersOf(manga *source.Manga) ([]*source.Chapter, error) {
if chapters, ok := s.chapters[manga.URL]; ok {
return chapters, nil
diff --git a/provider/generic/configuration.go b/provider/generic/configuration.go
index a842b1aa..857f1d9e 100644
--- a/provider/generic/configuration.go
+++ b/provider/generic/configuration.go
@@ -5,26 +5,43 @@ import (
"time"
)
+// Extractor is responsible for finding specified elements by selector and extracting required data from them
type Extractor struct {
+ // Selector CSS selector
Selector string
- Name func(*goquery.Selection) string
- URL func(*goquery.Selection) string
- Volume func(*goquery.Selection) string
- Cover func(*goquery.Selection) string
+ // Name function to get name from element found by selector.
+ Name func(*goquery.Selection) string
+ // URL function to get URL from element found by selector.
+ URL func(*goquery.Selection) string
+ // Volume function to get volume from element found by selector. Used by chapters extractor
+ Volume func(*goquery.Selection) string
+ // Cover function to get cover from element found by selector. Used by manga extractor
+ Cover func(*goquery.Selection) string
}
+// Configuration is a generic scraper configuration that defines behavior of the scraper
type Configuration struct {
- Name string
- Delay time.Duration
+ // Name of the scraper
+ Name string
+ // Delay between requests
+ Delay time.Duration
+ // Parallelism of the scraper
Parallelism uint8
+ // ReverseChapters if true, chapters will be shown in reverse order
ReverseChapters bool
- BaseURL string
+ // BaseURL of the source
+ BaseURL string
+ // GenerateSearchURL function to create search URL from the query.
+ // E.g. "one piece" -> "https://manganelo.com/search/story/one%20piece"
GenerateSearchURL func(query string) string
+ // MangaExtractor is responsible for finding manga elements and extracting required data from them
MangaExtractor,
+ // ChapterExtractor is responsible for finding chapter elements and extracting required data from them
ChapterExtractor,
+ // PageExtractor is responsible for finding page elements and extracting required data from them
PageExtractor *Extractor
}
diff --git a/provider/generic/new.go b/provider/generic/new.go
index c9863554..e2050a11 100644
--- a/provider/generic/new.go
+++ b/provider/generic/new.go
@@ -7,9 +7,11 @@ import (
"github.com/metafates/mangal/source"
"github.com/metafates/mangal/where"
"path/filepath"
+ "strings"
"time"
)
+// New generates a new scraper with given configuration
func New(conf *Configuration) source.Source {
s := Scraper{
mangas: make(map[string][]*source.Manga),
@@ -53,7 +55,7 @@ func New(conf *Configuration) source.Source {
ID: filepath.Base(url),
Source: &s,
}
- manga.Metadata.Cover = s.config.MangaExtractor.Cover(selection)
+ manga.Metadata.Cover.ExtraLarge = s.config.MangaExtractor.Cover(selection)
s.mangas[path][i] = &manga
})
@@ -122,6 +124,9 @@ func New(conf *Configuration) source.Source {
elements.Each(func(i int, selection *goquery.Selection) {
link := s.config.PageExtractor.URL(selection)
ext := filepath.Ext(link)
+ // remove some query params from the extension
+ ext = strings.Split(ext, "?")[0]
+
page := source.Page{
URL: link,
Index: uint16(i),
diff --git a/provider/generic/pages.go b/provider/generic/pages.go
index 45689cfb..a48e8a44 100644
--- a/provider/generic/pages.go
+++ b/provider/generic/pages.go
@@ -6,6 +6,7 @@ import (
"net/http"
)
+// PagesOf given source.Chapter
func (s *Scraper) PagesOf(chapter *source.Chapter) ([]*source.Page, error) {
if pages, ok := s.pages[chapter.URL]; ok {
return pages, nil
diff --git a/provider/generic/scraper.go b/provider/generic/scraper.go
index 916c2c89..ea9b5a90 100644
--- a/provider/generic/scraper.go
+++ b/provider/generic/scraper.go
@@ -5,6 +5,7 @@ import (
"github.com/metafates/mangal/source"
)
+// Scraper is a generic scraper downloads html pages and parses them
type Scraper struct {
mangasCollector *colly.Collector
chaptersCollector *colly.Collector
@@ -17,10 +18,12 @@ type Scraper struct {
config *Configuration
}
+// Name of the scraper
func (s *Scraper) Name() string {
return s.config.Name
}
+// ID of the scraper
func (s *Scraper) ID() string {
return s.config.ID()
}
diff --git a/provider/generic/search.go b/provider/generic/search.go
index 7e0f850f..43b625d3 100644
--- a/provider/generic/search.go
+++ b/provider/generic/search.go
@@ -4,6 +4,7 @@ import (
"github.com/metafates/mangal/source"
)
+// Search for mangas by given title
func (s *Scraper) Search(query string) ([]*source.Manga, error) {
address := s.config.GenerateSearchURL(query)
diff --git a/provider/init.go b/provider/init.go
new file mode 100644
index 00000000..690c4a32
--- /dev/null
+++ b/provider/init.go
@@ -0,0 +1,39 @@
+package provider
+
+import (
+ "github.com/metafates/mangal/provider/generic"
+ "github.com/metafates/mangal/provider/mangadex"
+ "github.com/metafates/mangal/provider/manganato"
+ "github.com/metafates/mangal/provider/manganelo"
+ "github.com/metafates/mangal/provider/mangapill"
+ "github.com/metafates/mangal/source"
+)
+
+const CustomProviderExtension = ".lua"
+
+var builtinProviders = []*Provider{
+ {
+ ID: mangadex.ID,
+ Name: mangadex.Name,
+ CreateSource: func() (source.Source, error) {
+ return mangadex.New(), nil
+ },
+ },
+}
+
+func init() {
+ for _, conf := range []*generic.Configuration{
+ manganelo.Config,
+ manganato.Config,
+ mangapill.Config,
+ } {
+ conf := conf
+ builtinProviders = append(builtinProviders, &Provider{
+ ID: conf.ID(),
+ Name: conf.Name,
+ CreateSource: func() (source.Source, error) {
+ return generic.New(conf), nil
+ },
+ })
+ }
+}
diff --git a/provider/mangadex/cache.go b/provider/mangadex/cache.go
new file mode 100644
index 00000000..4eaa3fda
--- /dev/null
+++ b/provider/mangadex/cache.go
@@ -0,0 +1,47 @@
+package mangadex
+
+import (
+ "github.com/metafates/mangal/cache"
+ "github.com/metafates/mangal/where"
+ "github.com/samber/mo"
+ "path/filepath"
+ "time"
+)
+
+type cacher[T any] struct {
+ internal *cache.Cache[map[string]T]
+}
+
+func newCacher[T any](name string) *cacher[T] {
+ return &cacher[T]{
+ internal: cache.New[map[string]T](
+ filepath.Join(where.Cache(), name+".json"),
+ &cache.Options{
+ ExpireEvery: mo.Some(time.Hour * 24),
+ },
+ ),
+ }
+}
+
+func (c *cacher[T]) Get(key string) mo.Option[T] {
+ cached, ok := c.internal.Get().Get()
+ if !ok {
+ return mo.None[T]()
+ }
+
+ if value, ok := cached[key]; ok {
+ return mo.Some[T](value)
+ }
+
+ return mo.None[T]()
+}
+
+func (c *cacher[T]) Set(key string, value T) error {
+ cached, ok := c.internal.Get().Get()
+ if !ok {
+ cached = map[string]T{}
+ }
+
+ cached[key] = value
+ return c.internal.Set(cached)
+}
diff --git a/provider/mangadex/chapters.go b/provider/mangadex/chapters.go
index 57e7bec3..35985398 100644
--- a/provider/mangadex/chapters.go
+++ b/provider/mangadex/chapters.go
@@ -12,7 +12,11 @@ import (
)
func (m *Mangadex) ChaptersOf(manga *source.Manga) ([]*source.Chapter, error) {
- if cached, ok := m.cachedChapters[manga.URL]; ok {
+ if cached, ok := m.cache.chapters.Get(manga.URL).Get(); ok {
+ for _, chapter := range cached {
+ chapter.Manga = manga
+ }
+
return cached, nil
}
@@ -99,6 +103,6 @@ func (m *Mangadex) ChaptersOf(manga *source.Manga) ([]*source.Chapter, error) {
})
manga.Chapters = chapters
- m.cachedChapters[manga.URL] = chapters
+ _ = m.cache.chapters.Set(manga.URL, chapters)
return chapters, nil
}
diff --git a/provider/mangadex/mangadex.go b/provider/mangadex/mangadex.go
index 07e4995b..179c7e61 100644
--- a/provider/mangadex/mangadex.go
+++ b/provider/mangadex/mangadex.go
@@ -11,9 +11,11 @@ const (
)
type Mangadex struct {
- client *mangodex.DexClient
- cachedMangas map[string][]*source.Manga
- cachedChapters map[string][]*source.Chapter
+ client *mangodex.DexClient
+ cache struct {
+ mangas *cacher[[]*source.Manga]
+ chapters *cacher[[]*source.Chapter]
+ }
}
func (*Mangadex) Name() string {
@@ -25,9 +27,12 @@ func (*Mangadex) ID() string {
}
func New() *Mangadex {
- return &Mangadex{
- client: mangodex.NewDexClient(),
- cachedMangas: make(map[string][]*source.Manga),
- cachedChapters: make(map[string][]*source.Chapter),
+ dex := &Mangadex{
+ client: mangodex.NewDexClient(),
}
+
+ dex.cache.mangas = newCacher[[]*source.Manga](ID + "_mangas")
+ dex.cache.chapters = newCacher[[]*source.Chapter](ID + "_chapters")
+
+ return dex
}
diff --git a/provider/mangadex/search.go b/provider/mangadex/search.go
index c09d34f9..b3c839c0 100644
--- a/provider/mangadex/search.go
+++ b/provider/mangadex/search.go
@@ -12,7 +12,11 @@ import (
)
func (m *Mangadex) Search(query string) ([]*source.Manga, error) {
- if cached, ok := m.cachedMangas[query]; ok {
+ if cached, ok := m.cache.mangas.Get(query).Get(); ok {
+ for _, manga := range cached {
+ manga.Source = m
+ }
+
return cached, nil
}
@@ -53,6 +57,6 @@ func (m *Mangadex) Search(query string) ([]*source.Manga, error) {
mangas = append(mangas, &m)
}
- m.cachedMangas[query] = mangas
+ _ = m.cache.mangas.Set(query, mangas)
return mangas, nil
}
diff --git a/provider/provider.go b/provider/provider.go
index 601628fe..505a5e2c 100644
--- a/provider/provider.go
+++ b/provider/provider.go
@@ -3,11 +3,6 @@ package provider
import (
"github.com/metafates/mangal/filesystem"
"github.com/metafates/mangal/provider/custom"
- "github.com/metafates/mangal/provider/generic"
- "github.com/metafates/mangal/provider/mangadex"
- "github.com/metafates/mangal/provider/manganato"
- "github.com/metafates/mangal/provider/manganelo"
- "github.com/metafates/mangal/provider/mangapill"
"github.com/metafates/mangal/source"
"github.com/metafates/mangal/util"
"github.com/metafates/mangal/where"
@@ -16,26 +11,11 @@ import (
"path/filepath"
)
-func init() {
- for _, conf := range []*generic.Configuration{
- manganelo.Config,
- manganato.Config,
- mangapill.Config,
- } {
- conf := conf
- builtinProviders = append(builtinProviders, &Provider{
- ID: conf.ID(),
- Name: conf.Name,
- CreateSource: func() (source.Source, error) {
- return generic.New(conf), nil
- },
- })
- }
-}
-
type Provider struct {
ID string
Name string
+ UsesHeadless bool
+ IsCustom bool
CreateSource func() (source.Source, error)
}
@@ -43,18 +23,6 @@ func (p Provider) String() string {
return p.Name
}
-const CustomProviderExtension = ".lua"
-
-var builtinProviders = []*Provider{
- {
- ID: mangadex.ID,
- Name: mangadex.Name,
- CreateSource: func() (source.Source, error) {
- return mangadex.New(), nil
- },
- },
-}
-
func Builtins() []*Provider {
return builtinProviders
}
@@ -75,11 +43,23 @@ func Customs() []*Provider {
providers := make([]*Provider, len(paths))
for i, path := range paths {
+ // Check if source contains line `require("headless")`
+ // if so, set UsesHeadless to true.
+ // This approach is not ideal, but it's the only way to do it without
+ // actually loading the source.
+ usesHeadless, _ := filesystem.Api().FileContainsAnyBytes(path, [][]byte{
+ []byte("require(\"headless\")"),
+ []byte("require('headless')"),
+ []byte("require(headless)"),
+ })
+
name := util.FileStem(path)
path := path
providers[i] = &Provider{
- ID: custom.IDfromName(name),
- Name: name,
+ ID: custom.IDfromName(name),
+ UsesHeadless: usesHeadless,
+ IsCustom: true,
+ Name: name,
CreateSource: func() (source.Source, error) {
return custom.LoadSource(path, true)
},
diff --git a/query/query.go b/query/query.go
new file mode 100644
index 00000000..0252e9ea
--- /dev/null
+++ b/query/query.go
@@ -0,0 +1,20 @@
+package query
+
+import (
+ "github.com/metafates/mangal/cache"
+ "github.com/metafates/mangal/where"
+ "github.com/samber/mo"
+ "time"
+)
+
+type queryRecord struct {
+ Rank int `json:"rank"`
+ Query string `json:"query"`
+}
+
+var cacher = cache.New[map[string]*queryRecord](
+ where.Queries(),
+ &cache.Options{
+ ExpireEvery: mo.None[time.Duration](),
+ },
+)
diff --git a/query/remember.go b/query/remember.go
new file mode 100644
index 00000000..4e809192
--- /dev/null
+++ b/query/remember.go
@@ -0,0 +1,25 @@
+package query
+
+// Remember will add a query to the history.
+// If query is already in the history, it will increment the rank by given weight
+func Remember(query string, weight int) error {
+ query = sanitize(query)
+
+ cached, ok := cacher.Get().Get()
+ if !ok {
+ cached = map[string]*queryRecord{}
+ }
+
+ // if the query is already in the cache
+ // increment its rank
+ if record, ok := cached[query]; ok {
+ record.Rank += weight
+ } else {
+ cached[query] = &queryRecord{
+ Rank: weight,
+ Query: query,
+ }
+ }
+
+ return cacher.Set(cached)
+}
diff --git a/query/sanitize.go b/query/sanitize.go
new file mode 100644
index 00000000..ea41e491
--- /dev/null
+++ b/query/sanitize.go
@@ -0,0 +1,7 @@
+package query
+
+import "strings"
+
+func sanitize(query string) string {
+ return strings.TrimSpace(strings.ToLower(query))
+}
diff --git a/query/suggest.go b/query/suggest.go
new file mode 100644
index 00000000..e8d39a79
--- /dev/null
+++ b/query/suggest.go
@@ -0,0 +1,64 @@
+package query
+
+import (
+ "github.com/lithammer/fuzzysearch/fuzzy"
+ "github.com/metafates/mangal/constant"
+ "github.com/samber/lo"
+ "github.com/samber/mo"
+ "github.com/spf13/viper"
+ "golang.org/x/exp/slices"
+)
+
+var (
+ suggestionCache = make(map[string][]*queryRecord)
+)
+
+func SuggestMany(query string) []string {
+ if !viper.GetBool(constant.SearchShowQuerySuggestions) {
+ return []string{}
+ }
+
+ query = sanitize(query)
+
+ var records []*queryRecord
+
+ if prev, ok := suggestionCache[query]; ok {
+ records = prev
+ } else {
+ cached, ok := cacher.Get().Get()
+ if !ok {
+ return []string{}
+ }
+
+ for _, record := range cached {
+ if fuzzy.Match(query, record.Query) {
+ records = append(records, record)
+ }
+ }
+
+ slices.SortFunc(records, func(a, b *queryRecord) bool {
+ return a.Rank > b.Rank
+ })
+
+ suggestionCache[query] = records
+ }
+
+ return lo.Map(records, func(record *queryRecord, _ int) string {
+ return record.Query
+ })
+}
+
+// Suggest gives a suggestion for a query
+func Suggest(query string) mo.Option[string] {
+ records := SuggestMany(query)
+
+ var suggestion mo.Option[string]
+
+ if len(records) == 0 {
+ suggestion = mo.None[string]()
+ } else {
+ suggestion = mo.Some(records[0])
+ }
+
+ return suggestion
+}
diff --git a/source/chapter.go b/source/chapter.go
index ae212ac0..462bb876 100644
--- a/source/chapter.go
+++ b/source/chapter.go
@@ -1,41 +1,39 @@
package source
import (
- "bytes"
"fmt"
"github.com/dustin/go-humanize"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/filesystem"
"github.com/metafates/mangal/style"
"github.com/metafates/mangal/util"
- "github.com/samber/lo"
+ "github.com/samber/mo"
"github.com/spf13/viper"
- "html"
"os"
"path/filepath"
"strings"
"sync"
- "text/template"
)
// Chapter is a struct that represents a chapter of a manga.
type Chapter struct {
// Name of the chapter
- Name string
+ Name string `json:"name" jsonschema:"description=Name of the chapter"`
// URL of the chapter
- URL string
+ URL string `json:"url" jsonschema:"description=URL of the chapter"`
// Index of the chapter in the manga.
- Index uint16
+ Index uint16 `json:"index" jsonschema:"description=Index of the chapter in the manga"`
// ID of the chapter in the source.
- ID string
+ ID string `json:"id" jsonschema:"description=ID of the chapter in the source"`
// Volume which the chapter belongs to.
- Volume string
+ Volume string `json:"volume" jsonschema:"description=Volume which the chapter belongs to"`
// Manga that the chapter belongs to.
Manga *Manga `json:"-"`
// Pages of the chapter.
- Pages []*Page
+ Pages []*Page `json:"pages" jsonschema:"description=Pages of the chapter"`
- size uint64
+ isDownloaded mo.Option[bool]
+ size uint64
}
func (c *Chapter) String() string {
@@ -44,12 +42,12 @@ func (c *Chapter) String() string {
// DownloadPages downloads the Pages contents of the Chapter.
// Pages needs to be set before calling this function.
-func (c *Chapter) DownloadPages(progress func(string)) (err error) {
+func (c *Chapter) DownloadPages(temp bool, progress func(string)) (err error) {
c.size = 0
status := func() string {
return fmt.Sprintf(
"Downloading %s %s",
- util.Quantity(len(c.Pages), "page"),
+ util.Quantify(len(c.Pages), "page", "pages"),
style.Faint(c.SizeHuman()),
)
}
@@ -80,6 +78,7 @@ func (c *Chapter) DownloadPages(progress func(string)) (err error) {
}
wg.Wait()
+ c.isDownloaded = mo.Some(!temp && err == nil)
return
}
@@ -124,13 +123,19 @@ func (c *Chapter) Filename() (filename string) {
return
}
-func (c *Chapter) Path(temp bool) (path string, err error) {
- path, err = c.Manga.Path(temp)
- if err != nil {
- return
+func (c *Chapter) IsDownloaded() bool {
+ if c.isDownloaded.IsPresent() {
+ return c.isDownloaded.MustGet()
}
- if c.Volume != "" && viper.GetBool(constant.DownloaderCreateVolumeDir) {
+ path, _ := c.path(c.Manga.peekPath(), false)
+ exists, _ := filesystem.Api().Exists(path)
+ c.isDownloaded = mo.Some(exists)
+ return exists
+}
+
+func (c *Chapter) path(relativeTo string, createVolumeDir bool) (path string, err error) {
+ if createVolumeDir {
path = filepath.Join(path, util.SanitizeFilename(c.Volume))
err = filesystem.Api().MkdirAll(path, os.ModePerm)
if err != nil {
@@ -138,45 +143,48 @@ func (c *Chapter) Path(temp bool) (path string, err error) {
}
}
- path = filepath.Join(path, c.Filename())
+ path = filepath.Join(relativeTo, c.Filename())
return
}
+func (c *Chapter) Path(temp bool) (path string, err error) {
+ var manga string
+ manga, err = c.Manga.Path(temp)
+ if err != nil {
+ return
+ }
+
+ return c.path(manga, c.Volume != "" && viper.GetBool(constant.DownloaderCreateVolumeDir))
+}
+
func (c *Chapter) Source() Source {
return c.Manga.Source
}
-func (c *Chapter) ComicInfoXML() *bytes.Buffer {
- // language=gotemplate
- t := `
-
- {{ escape .Name }}
- {{ escape .Manga.Name }}
- {{ .Index }}
- {{ .URL }}
- {{ join .Manga.Metadata.Genres "," }}
- {{ len .Pages }}
- {{ escape .Manga.Metadata.Summary }}
- {{ len .Manga.Chapters }}
- {{ escape .Manga.Metadata.Author }}
- {{ join .Manga.Metadata.Characters "," }}
- {{ if geq .Manga.Metadata.StartDate.Year 1 }}{{ .Manga.Metadata.StartDate.Year }}{{ end }}
- {{ if geq .Manga.Metadata.StartDate.Month 1 }}{{ .Manga.Metadata.StartDate.Month }}{{ end }}
- {{ if geq .Manga.Metadata.StartDate.Day 1 }}{{ .Manga.Metadata.StartDate.Day }}{{ end }}
- {{ join .Manga.Metadata.Tags "," }}
- Downloaded with Mangal. https://github.com/metafates/mangal
- YesAndRightToLeft
-`
-
- funcs := template.FuncMap{
- "join": strings.Join,
- "escape": html.EscapeString,
- "geq": func(a, b int) bool { return a >= b },
+func (c *Chapter) ComicInfo() *ComicInfo {
+ return &ComicInfo{
+ XmlnsXsd: "http://www.w3.org/2001/XMLSchema",
+ XmlnsXsi: "http://www.w3.org/2001/XMLSchema-instance",
+
+ Title: c.Manga.Name,
+ Series: c.Manga.Name,
+ Number: int(c.Index),
+ Web: c.URL,
+ Genre: strings.Join(c.Manga.Metadata.Tags, ","),
+ PageCount: len(c.Pages),
+ Summary: c.Manga.Metadata.Summary,
+ Count: c.Manga.Metadata.Chapters,
+ Characters: strings.Join(c.Manga.Metadata.Characters, ","),
+ Year: c.Manga.Metadata.StartDate.Year,
+ Month: c.Manga.Metadata.StartDate.Month,
+ Day: c.Manga.Metadata.StartDate.Day,
+ Writer: strings.Join(c.Manga.Metadata.Staff.Story, ","),
+ Penciller: strings.Join(c.Manga.Metadata.Staff.Art, ","),
+ Letterer: strings.Join(c.Manga.Metadata.Staff.Lettering, ","),
+ Translator: strings.Join(c.Manga.Metadata.Staff.Translation, ","),
+ Tags: strings.Join(c.Manga.Metadata.Tags, ","),
+ Notes: "Downloaded with Mangal. https://github.com/metafates/mangal",
+ Manga: "YesAndRightToLeft",
}
- parsed := lo.Must(template.New("ComicInfo").Funcs(funcs).Parse(t))
- buf := bytes.NewBufferString("")
- lo.Must0(parsed.Execute(buf, c))
-
- return buf
}
diff --git a/source/chapter_test.go b/source/chapter_test.go
index c66fe673..783ceb61 100644
--- a/source/chapter_test.go
+++ b/source/chapter_test.go
@@ -43,9 +43,9 @@ func TestChapter_Filename(t *testing.T) {
func TestChapter_ComicInfoXML(t *testing.T) {
Convey("Given a chapter", t, func() {
- Convey("When ComicInfoXML is called", func() {
+ Convey("When ComicInfo is called", func() {
Convey("It should return a ComicInfo XML", func() {
- xml := testChapter.ComicInfoXML()
+ xml := testChapter.ComicInfo()
So(xml, ShouldNotBeEmpty)
})
})
diff --git a/source/comicinfo.go b/source/comicinfo.go
new file mode 100644
index 00000000..7c657ef7
--- /dev/null
+++ b/source/comicinfo.go
@@ -0,0 +1,30 @@
+package source
+
+import "encoding/xml"
+
+type ComicInfo struct {
+ XMLName xml.Name `xml:"ComicInfo"`
+ XmlnsXsi string `xml:"xmlns:xsi,attr"`
+ XmlnsXsd string `xml:"xmlns:xsd,attr"`
+
+ // General
+ Title string `xml:"Title,omitempty"`
+ Series string `xml:"Series,omitempty"`
+ Number int `xml:"Number,omitempty"`
+ Web string `xml:"Web,omitempty"`
+ Genre string `xml:"Genre,omitempty"`
+ PageCount int `xml:"PageCount,omitempty"`
+ Summary string `xml:"Summary,omitempty"`
+ Count int `xml:"Count,omitempty"`
+ Characters string `xml:"Characters,omitempty"`
+ Year int `xml:"Year,omitempty"`
+ Month int `xml:"Month,omitempty"`
+ Day int `xml:"Day,omitempty"`
+ Writer string `xml:"Writer,omitempty"`
+ Penciller string `xml:"Penciller,omitempty"`
+ Letterer string `xml:"Letterer,omitempty"`
+ Translator string `xml:"Translator,omitempty"`
+ Tags string `xml:"Tags,omitempty"`
+ Notes string `xml:"Notes,omitempty"`
+ Manga string `xml:"Manga,omitempty"`
+}
diff --git a/source/manga.go b/source/manga.go
index 3e208003..3169cd66 100644
--- a/source/manga.go
+++ b/source/manga.go
@@ -1,8 +1,6 @@
package source
import (
- "bytes"
- "encoding/json"
"fmt"
"github.com/metafates/mangal/anilist"
"github.com/metafates/mangal/constant"
@@ -11,6 +9,7 @@ import (
"github.com/metafates/mangal/util"
"github.com/metafates/mangal/where"
"github.com/samber/lo"
+ "github.com/samber/mo"
"github.com/spf13/viper"
"io"
"net/http"
@@ -21,37 +20,73 @@ import (
)
type date struct {
- Year int
- Month int
- Day int
+ Year int `json:"year"`
+ Month int `json:"month"`
+ Day int `json:"day"`
}
// Manga is a manga from a source.
type Manga struct {
// Name of the manga
- Name string
+ Name string `json:"name" jsonschema:"description=Name of the manga"`
// URL of the manga
- URL string
+ URL string `json:"url" jsonschema:"description=URL of the manga"`
// Index of the manga in the source.
- Index uint16
+ Index uint16 `json:"index" jsonschema:"description=Index of the manga in the source"`
// ID of manga in the source.
- ID string
+ ID string `json:"id" jsonschema:"description=ID of manga in the source"`
// Chapters of the manga
- Chapters []*Chapter
- Source Source `json:"-"`
+ Chapters []*Chapter `json:"chapters" jsonschema:"description=Chapters of the manga"`
+ // Source that the manga belongs to.
+ Source Source `json:"-"`
+ // Anilist is the closest anilist match
+ Anilist mo.Option[*anilist.Manga] `json:"-"`
Metadata struct {
- Genres []string
- Summary string
- Author string
- Cover string
- Tags []string
- Characters []string
- Status string
- StartDate date
- EndDate date
- Synonyms []string
- URLs []string
- }
+ // Genres of the manga
+ Genres []string `json:"genres" jsonschema:"description=Genres of the manga"`
+ // Summary in the plain text with newlines
+ Summary string `json:"summary" jsonschema:"description=Summary in the plain text with newlines"`
+ // Staff that worked on the manga
+ Staff struct {
+ // Story authors
+ Story []string `json:"story" jsonschema:"description=Story authors"`
+ // Art authors
+ Art []string `json:"art" jsonschema:"description=Art authors"`
+ // Translation group
+ Translation []string `json:"translation" jsonschema:"description=Translation group"`
+ // Lettering group
+ Lettering []string `json:"lettering" jsonschema:"description=Lettering group"`
+ } `json:"staff" jsonschema:"description=Staff that worked on the manga"`
+ // Cover images of the manga
+ Cover struct {
+ // ExtraLarge is the largest cover image. If not available, Large will be used.
+ ExtraLarge string `json:"extraLarge" jsonschema:"description=ExtraLarge is the largest cover image. If not available, Large will be used."`
+ // Large is the second-largest cover image.
+ Large string `json:"large" jsonschema:"description=Large is the second-largest cover image."`
+ // Medium cover image. The smallest one.
+ Medium string `json:"medium" jsonschema:"description=Medium cover image. The smallest one."`
+ // Color average color of the cover image.
+ Color string `json:"color" jsonschema:"description=Color average color of the cover image."`
+ } `json:"cover" jsonschema:"description=Cover images of the manga"`
+ // BannerImage is the banner image of the manga.
+ BannerImage string `json:"bannerImage" jsonschema:"description=BannerImage is the banner image of the manga."`
+ // Tags of the manga
+ Tags []string `json:"tags" jsonschema:"description=Tags of the manga"`
+ // Characters of the manga
+ Characters []string `json:"characters" jsonschema:"description=Characters of the manga"`
+ // Status of the manga
+ Status string `json:"status" jsonschema:"enum=FINISHED,enum=RELEASING,enum=NOT_YET_RELEASED,enum=CANCELLED,enum=HIATUS"`
+ // StartDate is the date when the manga started.
+ StartDate date `json:"startDate" jsonschema:"description=StartDate is the date when the manga started."`
+ // EndDate is the date when the manga ended.
+ EndDate date `json:"endDate" jsonschema:"description=EndDate is the date when the manga ended."`
+ // Synonyms other names of the manga.
+ Synonyms []string `json:"synonyms" jsonschema:"description=Synonyms other names of the manga."`
+ // Chapters is the amount of chapters the manga will have when completed.
+ Chapters int `json:"chapters" jsonschema:"description=The amount of chapters the manga will have when completed."`
+ // URLs external URLs of the manga.
+ URLs []string `json:"urls" jsonschema:"description=External URLs of the manga."`
+ } `json:"metadata"`
cachedTempPath string
populated bool
coverDownloaded bool
@@ -61,10 +96,20 @@ func (m *Manga) String() string {
return m.Name
}
-func (m *Manga) Filename() string {
+func (m *Manga) Dirname() string {
return util.SanitizeFilename(m.Name)
}
+func (m *Manga) peekPath() string {
+ path := where.Downloads()
+
+ if viper.GetBool(constant.DownloaderCreateMangaDir) {
+ path = filepath.Join(path, m.Dirname())
+ }
+
+ return path
+}
+
func (m *Manga) Path(temp bool) (path string, err error) {
if temp {
if path = m.cachedTempPath; path != "" {
@@ -76,17 +121,28 @@ func (m *Manga) Path(temp bool) (path string, err error) {
return
}
- path = where.Downloads()
+ path = m.peekPath()
+ _ = filesystem.Api().MkdirAll(path, os.ModePerm)
+ return
+}
+
+func (m *Manga) GetCover() (string, error) {
+ var covers = []string{
+ m.Metadata.Cover.ExtraLarge,
+ m.Metadata.Cover.Large,
+ m.Metadata.Cover.Medium,
+ }
- if viper.GetBool(constant.DownloaderCreateMangaDir) {
- path = filepath.Join(path, m.Filename())
+ for _, cover := range covers {
+ if cover != "" {
+ return cover, nil
+ }
}
- _ = filesystem.Api().MkdirAll(path, os.ModePerm)
- return
+ return "", fmt.Errorf("no cover found")
}
-func (m *Manga) DownloadCover(progress func(string)) error {
+func (m *Manga) DownloadCover(overwrite bool, path string, progress func(string)) error {
if m.coverDownloaded {
return nil
}
@@ -95,36 +151,33 @@ func (m *Manga) DownloadCover(progress func(string)) error {
log.Info("Downloading cover for ", m.Name)
progress("Downloading cover")
- if m.Metadata.Cover == "" {
- log.Warn("No cover to download")
- return nil
- }
-
- path, err := m.Path(false)
+ cover, err := m.GetCover()
if err != nil {
- log.Error(err)
- return err
+ log.Warn(err)
+ return nil
}
- extension := ".jpg"
- if ext := filepath.Ext(m.Metadata.Cover); ext != "" {
- extension = ext
+ var extension string
+ if extension = filepath.Ext(cover); extension == "" {
+ extension = ".jpg"
}
path = filepath.Join(path, "cover"+extension)
- exists, err := filesystem.Api().Exists(path)
- if err != nil {
- log.Error(err)
- return err
- }
+ if !overwrite {
+ exists, err := filesystem.Api().Exists(path)
+ if err != nil {
+ log.Error(err)
+ return err
+ }
- if exists {
- log.Warn("Cover already exists")
- return nil
+ if exists {
+ log.Warn("Cover already exists")
+ return nil
+ }
}
- resp, err := http.Get(m.Metadata.Cover)
+ resp, err := http.Get(cover)
if err != nil {
log.Error(err)
return err
@@ -143,7 +196,31 @@ func (m *Manga) DownloadCover(progress func(string)) error {
return err
}
- return filesystem.Api().WriteFile(path, data, os.ModePerm)
+ err = filesystem.Api().WriteFile(path, data, os.ModePerm)
+ if err != nil {
+ log.Error(err)
+ return err
+ }
+
+ log.Info("Cover downloaded")
+ return nil
+}
+
+func (m *Manga) BindWithAnilist() error {
+ if m.Anilist.IsPresent() {
+ return nil
+ }
+
+ log.Infof("binding %s with anilist", m.Name)
+
+ manga, err := anilist.FindClosest(m.Name)
+ if err != nil {
+ log.Error(err)
+ return err
+ }
+
+ m.Anilist = mo.Some(manga)
+ return nil
}
func (m *Manga) PopulateMetadata(progress func(string)) error {
@@ -154,14 +231,16 @@ func (m *Manga) PopulateMetadata(progress func(string)) error {
progress("Fetching metadata from anilist")
log.Infof("Populating metadata for %s", m.Name)
-
- manga, err := anilist.FindClosest(m.Name)
- if err != nil {
- log.Error(err)
+ if err := m.BindWithAnilist(); err != nil {
progress("Failed to fetch metadata")
return err
}
+ manga, ok := m.Anilist.Get()
+ if !ok || manga == nil {
+ return fmt.Errorf("manga '%s' not found on Anilist", m.Name)
+ }
+
m.Metadata.Genres = manga.Genres
// replace
with newlines and remove other html tags
m.Metadata.Summary = regexp.
@@ -182,23 +261,53 @@ func (m *Manga) PopulateMetadata(progress func(string)) error {
}
m.Metadata.Characters = characters
- var tags = make([]string, len(manga.Tags))
- for i, tag := range manga.Tags {
- tags[i] = tag.Name
+ var tags = make([]string, 0)
+ for _, tag := range manga.Tags {
+ if tag.Rank >= viper.GetInt(constant.MetadataComicInfoXMLTagRelevanceThreshold) {
+ tags = append(tags, tag.Name)
+ }
}
m.Metadata.Tags = tags
- m.Metadata.Cover = manga.CoverImage.ExtraLarge
+ m.Metadata.Cover.ExtraLarge = manga.CoverImage.ExtraLarge
+ m.Metadata.Cover.Large = manga.CoverImage.Large
+ m.Metadata.Cover.Medium = manga.CoverImage.Medium
+ m.Metadata.Cover.Color = manga.CoverImage.Color
+
+ m.Metadata.BannerImage = manga.BannerImage
+
m.Metadata.StartDate = date(manga.StartDate)
m.Metadata.EndDate = date(manga.EndDate)
m.Metadata.Status = strings.ReplaceAll(manga.Status, "_", " ")
m.Metadata.Synonyms = manga.Synonyms
- urls := []string{manga.URL}
- urls = append(urls, manga.SiteURL)
- for _, e := range manga.External {
- urls = append(urls, e.URL)
+ m.Metadata.Staff.Story = make([]string, 0)
+ m.Metadata.Staff.Art = make([]string, 0)
+ m.Metadata.Staff.Translation = make([]string, 0)
+ m.Metadata.Staff.Lettering = make([]string, 0)
+
+ m.Metadata.Chapters = manga.Chapters
+
+ for _, staff := range manga.Staff.Edges {
+ role := strings.ToLower(staff.Role)
+ switch {
+ case strings.Contains(role, "story"):
+ m.Metadata.Staff.Story = append(m.Metadata.Staff.Story, staff.Node.Name.Full)
+ case strings.Contains(role, "art"):
+ m.Metadata.Staff.Art = append(m.Metadata.Staff.Art, staff.Node.Name.Full)
+ case strings.Contains(role, "translator"):
+ m.Metadata.Staff.Translation = append(m.Metadata.Staff.Translation, staff.Node.Name.Full)
+ case strings.Contains(role, "lettering"):
+ m.Metadata.Staff.Lettering = append(m.Metadata.Staff.Lettering, staff.Node.Name.Full)
+ }
+ }
+
+ // Anilist & Myanimelist + external
+ urls := make([]string, 2+len(manga.External))
+ urls[0] = manga.SiteURL
+ for i, e := range manga.External {
+ urls[i+1] = e.URL
}
urls = lo.Filter(urls, func(url string, _ int) bool {
@@ -211,22 +320,7 @@ func (m *Manga) PopulateMetadata(progress func(string)) error {
return nil
}
-func (m *Manga) SeriesJSON() *bytes.Buffer {
- type metadata struct {
- Type string `json:"type"`
- Name string `json:"name"`
- DescriptionFormatted string `json:"description_formatted"`
- DescriptionText string `json:"description_text"`
- Status string `json:"status"`
- Year int `json:"year"`
- ComicImage string `json:"ComicImage"`
- Publisher string `json:"publisher"`
- ComicID int `json:"comicId"`
- BookType string `json:"booktype"`
- TotalIssues int `json:"total_issues"`
- PublicationRun string `json:"publication_run"`
- }
-
+func (m *Manga) SeriesJSON() *SeriesJSON {
var status string
switch m.Metadata.Status {
case "FINISHED":
@@ -237,24 +331,22 @@ func (m *Manga) SeriesJSON() *bytes.Buffer {
status = "Unknown"
}
- seriesJSON := struct {
- Metadata metadata `json:"metadata"`
- }{
- Metadata: metadata{
- Type: "comicSeries",
- Name: m.Name,
- DescriptionFormatted: m.Metadata.Summary,
- Status: status,
- Year: m.Metadata.StartDate.Year,
- ComicImage: m.Metadata.Cover,
- Publisher: m.Metadata.Author,
- BookType: "Print",
- TotalIssues: len(m.Chapters),
- PublicationRun: fmt.Sprintf("%d %d - %d %d", m.Metadata.StartDate.Month, m.Metadata.StartDate.Year, m.Metadata.EndDate.Month, m.Metadata.EndDate.Year),
- },
+ var publisher string
+ if len(m.Metadata.Staff.Story) > 0 {
+ publisher = m.Metadata.Staff.Story[0]
}
- var buf bytes.Buffer
- lo.Must0(json.NewEncoder(&buf).Encode(seriesJSON))
- return &buf
+ seriesJSON := &SeriesJSON{}
+ seriesJSON.Metadata.Type = "comicSeries"
+ seriesJSON.Metadata.Name = m.Name
+ seriesJSON.Metadata.DescriptionFormatted = m.Metadata.Summary
+ seriesJSON.Metadata.Status = status
+ seriesJSON.Metadata.Year = m.Metadata.StartDate.Year
+ seriesJSON.Metadata.ComicImage = m.Metadata.Cover.ExtraLarge
+ seriesJSON.Metadata.Publisher = publisher
+ seriesJSON.Metadata.BookType = "Print"
+ seriesJSON.Metadata.TotalIssues = m.Metadata.Chapters
+ seriesJSON.Metadata.PublicationRun = fmt.Sprintf("%d %d - %d %d", m.Metadata.StartDate.Month, m.Metadata.StartDate.Year, m.Metadata.EndDate.Month, m.Metadata.EndDate.Year)
+
+ return seriesJSON
}
diff --git a/source/manga_test.go b/source/manga_test.go
index 61df987b..b6eb86e2 100644
--- a/source/manga_test.go
+++ b/source/manga_test.go
@@ -49,7 +49,7 @@ func TestManga_Filename(t *testing.T) {
Convey("Given a manga", t, func() {
Convey("When Filename is called", func() {
Convey("It should return a sanitized filename", func() {
- So(testManga.Filename(), ShouldEqual, util.SanitizeFilename(testManga.Name))
+ So(testManga.Dirname(), ShouldEqual, util.SanitizeFilename(testManga.Name))
})
})
})
diff --git a/source/page.go b/source/page.go
index 36fcadd6..33cf9c67 100644
--- a/source/page.go
+++ b/source/page.go
@@ -16,11 +16,11 @@ import (
// Page represents a page in a chapter
type Page struct {
// URL of the page. Used to download the page.
- URL string
+ URL string `json:"url" jsonschema:"description=URL of the page. Used to download the image."`
// Index of the page in the chapter.
- Index uint16
+ Index uint16 `json:"index" jsonschema:"description=Index of the page in the chapter."`
// Extension of the page image.
- Extension string
+ Extension string `json:"extension" jsonschema:"description=Extension of the page image."`
// Size of the page in bytes
Size uint64 `json:"-"`
// Contents of the page
@@ -29,16 +29,16 @@ type Page struct {
Chapter *Chapter `json:"-"`
}
-func (p *Page) request() (req *http.Request, err error) {
- req, err = http.NewRequest(http.MethodGet, p.URL, nil)
+func (p *Page) request() (*http.Request, error) {
+ req, err := http.NewRequest(http.MethodGet, p.URL, nil)
if err != nil {
log.Error(err)
- return
+ return nil, err
}
req.Header.Set("Referer", p.Chapter.URL)
req.Header.Set("User-Agent", constant.UserAgent)
- return
+ return req, nil
}
// Download Page contents.
@@ -108,7 +108,7 @@ func (p *Page) Close() error {
// Read reads from the page contents.
func (p *Page) Read(b []byte) (int, error) {
- log.Debugf("Reading page contents #%d", p.Index)
+ log.Tracef("Reading page contents #%d", p.Index)
if p.Contents == nil {
err := errors.New("page not downloaded")
log.Error(err)
diff --git a/source/series.go b/source/series.go
new file mode 100644
index 00000000..9d1a274e
--- /dev/null
+++ b/source/series.go
@@ -0,0 +1,18 @@
+package source
+
+type SeriesJSON struct {
+ Metadata struct {
+ Type string `json:"type"`
+ Name string `json:"name"`
+ DescriptionFormatted string `json:"description_formatted"`
+ DescriptionText string `json:"description_text"`
+ Status string `json:"status"`
+ Year int `json:"year"`
+ ComicImage string `json:"ComicImage"`
+ Publisher string `json:"publisher"`
+ ComicID int `json:"comicId"`
+ BookType string `json:"booktype"`
+ TotalIssues int `json:"total_issues"`
+ PublicationRun string `json:"publication_run"`
+ } `json:"metadata"`
+}
diff --git a/style/bg.go b/style/bg.go
deleted file mode 100644
index d6282c47..00000000
--- a/style/bg.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package style
-
-import "github.com/charmbracelet/lipgloss"
-
-var (
- BgRed = lipgloss.NewStyle().Background(lipgloss.Color("1")).Render
- BgGreen = lipgloss.NewStyle().Background(lipgloss.Color("2")).Render
- BgYellow = lipgloss.NewStyle().Background(lipgloss.Color("3")).Render
- BgBlue = lipgloss.NewStyle().Background(lipgloss.Color("4")).Render
- BgMagenta = lipgloss.NewStyle().Background(lipgloss.Color("5")).Render
- BgCyan = lipgloss.NewStyle().Background(lipgloss.Color("6")).Render
- BgWhite = lipgloss.NewStyle().Background(lipgloss.Color("7")).Render
- BgBlack = lipgloss.NewStyle().Background(lipgloss.Color("8")).Render
-)
-
-var (
- BgHiBlack = lipgloss.NewStyle().Background(lipgloss.Color("9")).Render
- BgHiRed = lipgloss.NewStyle().Background(lipgloss.Color("10")).Render
- BgHiGreen = lipgloss.NewStyle().Background(lipgloss.Color("11")).Render
- BgHiYellow = lipgloss.NewStyle().Background(lipgloss.Color("12")).Render
- BgHiBlue = lipgloss.NewStyle().Background(lipgloss.Color("13")).Render
- BgHiMagenta = lipgloss.NewStyle().Background(lipgloss.Color("14")).Render
- BgHiCyan = lipgloss.NewStyle().Background(lipgloss.Color("15")).Render
- BgHiWhite = lipgloss.NewStyle().Background(lipgloss.Color("16")).Render
-)
-
-func BgColor(color string) func(string) string {
- return func(s string) string {
- return lipgloss.NewStyle().Background(lipgloss.Color(color)).Render(s)
- }
-}
diff --git a/style/extra.go b/style/extra.go
index 620b8971..1b7a840c 100644
--- a/style/extra.go
+++ b/style/extra.go
@@ -1,6 +1,15 @@
package style
+import (
+ "github.com/charmbracelet/lipgloss"
+ "github.com/metafates/mangal/color"
+)
+
var (
- Title = Combined(Padding(0, 1), BgColor("62"), Color("230"))
- ErrorTitle = Combined(Padding(0, 1), BgRed, Color("230"))
+ Title = NewColored(color.New("230"), color.New("62")).Padding(0, 1).Render
+ ErrorTitle = NewColored(color.New("230"), color.Red).Padding(0, 1).Render
)
+
+func Tag(foreground, background lipgloss.Color) func(string) string {
+ return NewColored(foreground, background).Padding(0, 1).Render
+}
diff --git a/style/fg.go b/style/fg.go
deleted file mode 100644
index e42ef2ac..00000000
--- a/style/fg.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package style
-
-import "github.com/charmbracelet/lipgloss"
-
-var (
- Red = lipgloss.NewStyle().Foreground(lipgloss.Color("1")).Render
- Green = lipgloss.NewStyle().Foreground(lipgloss.Color("2")).Render
- Yellow = lipgloss.NewStyle().Foreground(lipgloss.Color("3")).Render
- Blue = lipgloss.NewStyle().Foreground(lipgloss.Color("4")).Render
- Magenta = lipgloss.NewStyle().Foreground(lipgloss.Color("5")).Render
- Cyan = lipgloss.NewStyle().Foreground(lipgloss.Color("6")).Render
- White = lipgloss.NewStyle().Foreground(lipgloss.Color("7")).Render
- Black = lipgloss.NewStyle().Foreground(lipgloss.Color("8")).Render
-)
-
-var (
- HiBlack = lipgloss.NewStyle().Foreground(lipgloss.Color("9")).Render
- HiRed = lipgloss.NewStyle().Foreground(lipgloss.Color("10")).Render
- HiGreen = lipgloss.NewStyle().Foreground(lipgloss.Color("11")).Render
- HiYellow = lipgloss.NewStyle().Foreground(lipgloss.Color("12")).Render
- HiBlue = lipgloss.NewStyle().Foreground(lipgloss.Color("13")).Render
- HiMagenta = lipgloss.NewStyle().Foreground(lipgloss.Color("14")).Render
- HiCyan = lipgloss.NewStyle().Foreground(lipgloss.Color("15")).Render
- HiWhite = lipgloss.NewStyle().Foreground(lipgloss.Color("16")).Render
-)
-
-func Color(color string) func(string) string {
- return func(s string) string {
- return lipgloss.NewStyle().Foreground(lipgloss.Color(color)).Render(s)
- }
-}
diff --git a/style/font.go b/style/font.go
deleted file mode 100644
index 31e6cc5b..00000000
--- a/style/font.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package style
-
-import "github.com/charmbracelet/lipgloss"
-
-var (
- Bold = lipgloss.NewStyle().Bold(true).Render
- Italic = lipgloss.NewStyle().Italic(true).Render
- Underline = lipgloss.NewStyle().Underline(true).Render
- Faint = lipgloss.NewStyle().Faint(true).Render
-)
diff --git a/style/style.go b/style/style.go
index 62efcd2b..dde356a4 100644
--- a/style/style.go
+++ b/style/style.go
@@ -2,31 +2,29 @@ package style
import "github.com/charmbracelet/lipgloss"
-type Style func(string) string
+func New() lipgloss.Style {
+ return lipgloss.NewStyle()
+}
-// Combined combines multiple styles into one.
-func Combined(styles ...func(string) string) func(string) string {
- return func(s string) string {
- for _, style := range styles {
- s = style(s)
- }
- return s
- }
+func NewColored(foreground, background lipgloss.Color) lipgloss.Style {
+ return New().Foreground(foreground).Background(background)
}
-func Padding(padding ...int) Style {
- return func(s string) string {
- return lipgloss.NewStyle().Padding(padding...).Render(s)
- }
+func Fg(color lipgloss.Color) func(string) string {
+ return NewColored(color, "").Render
}
-func Truncate(max int) Style {
- return func(s string) string {
- if len(s) <= max {
- return s
- }
+func Bg(color lipgloss.Color) func(string) string {
+ return NewColored("", color).Render
+}
- // Minus one for the ellipsis
- return s[:max-1] + "…"
- }
+func Truncate(max int) func(string) string {
+ return New().Width(max).Render
}
+
+var (
+ Faint = New().Faint(true).Render
+ Bold = New().Bold(true).Render
+ Italic = New().Italic(true).Render
+ Underline = New().Underline(true).Render
+)
diff --git a/style/style_test.go b/style/style_test.go
deleted file mode 100644
index 1c0e4292..00000000
--- a/style/style_test.go
+++ /dev/null
@@ -1,44 +0,0 @@
-package style
-
-import (
- . "github.com/smartystreets/goconvey/convey"
- "testing"
-)
-
-func TestTrim(t *testing.T) {
- Convey("Given a string", t, func() {
- s := "lorem ipsum dolor sit amet"
- Convey("When trimming with a max of 10", func() {
- result := Truncate(10)(s)
- Convey("Then the result should be 'lorem ipsu…'", func() {
- So(result, ShouldEqual, "lorem ips…")
- })
- })
-
- Convey("When trimming with a max of 30h", func() {
- result := Truncate(30)(s)
- Convey("Then the result should be lorem ipsum dolor sit amet", func() {
- So(result, ShouldEqual, "lorem ipsum dolor sit amet")
- })
- })
- })
-}
-
-func TestCombined(t *testing.T) {
- Convey("Given a string", t, func() {
- s := "lorem ipsum dolor sit amet"
- Convey("When using combined with red and italic", func() {
- res := Combined(Red, Italic)(s)
- Convey("Then the result should be the same as Italic(Red(string))", func() {
- So(res, ShouldEqual, Italic(Red(s)))
- })
- })
-
- Convey("When using combined without arguments", func() {
- res := Combined()(s)
- Convey("Then the result should be the same as original", func() {
- So(res, ShouldEqual, s)
- })
- })
- })
-}
diff --git a/tui/bubble.go b/tui/bubble.go
index 5bb9fec4..0ac9a827 100644
--- a/tui/bubble.go
+++ b/tui/bubble.go
@@ -10,14 +10,16 @@ import (
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
"github.com/metafates/mangal/anilist"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/history"
- "github.com/metafates/mangal/icon"
"github.com/metafates/mangal/installer"
"github.com/metafates/mangal/provider"
"github.com/metafates/mangal/source"
+ "github.com/metafates/mangal/style"
"github.com/metafates/mangal/util"
"github.com/samber/lo"
+ "github.com/samber/mo"
"github.com/spf13/viper"
"golang.org/x/exp/slices"
"strings"
@@ -71,6 +73,8 @@ type statefulBubble struct {
failedChapters []*source.Chapter
succededChapters []*source.Chapter
+
+ searchSuggestion mo.Option[string]
}
func (b *statefulBubble) raiseError(err error) {
@@ -186,7 +190,11 @@ func newBubble() *statefulBubble {
succededChapters: make([]*source.Chapter, 0),
}
- makeList := func(title string, description bool) list.Model {
+ type listOptions struct {
+ TitleStyle mo.Option[lipgloss.Style]
+ }
+
+ makeList := func(title string, description bool, options *listOptions) list.Model {
delegate := list.NewDefaultDelegate()
delegate.SetSpacing(viper.GetInt(constant.TUIItemSpacing))
delegate.ShowDescription = description
@@ -207,6 +215,10 @@ func newBubble() *statefulBubble {
}
listC.Title = title
listC.Styles.NoItems = paddingStyle
+ if titleStyle, ok := options.TitleStyle.Get(); ok {
+ listC.Styles.Title = titleStyle
+ }
+
//listC.StatusMessageLifetime = time.Second * 5
listC.StatusMessageLifetime = time.Hour * 999 // forever
@@ -226,23 +238,43 @@ func newBubble() *statefulBubble {
bubble.progressC = progress.New(progress.WithDefaultGradient())
- bubble.scrapersInstallC = makeList("Install Scrapers", true)
+ bubble.scrapersInstallC = makeList("Install Scrapers", true, &listOptions{
+ TitleStyle: mo.Some(
+ style.NewColored("#212529", "#ced4da").Padding(0, 1),
+ ),
+ })
bubble.scrapersInstallC.SetStatusBarItemName("scraper", "scrapers")
- bubble.historyC = makeList("History", true)
+ bubble.historyC = makeList("History", true, &listOptions{})
bubble.sourcesC.SetStatusBarItemName("chapter", "chapters")
- bubble.sourcesC = makeList("Select Source", true)
+ bubble.sourcesC = makeList("Select Source", true, &listOptions{
+ TitleStyle: mo.Some(
+ style.NewColored("#fefae0", "#bc6c25").Padding(0, 1),
+ ),
+ })
bubble.sourcesC.SetStatusBarItemName("source", "sources")
showURLs := viper.GetBool(constant.TUIShowURLs)
- bubble.mangasC = makeList("Mangas", showURLs)
+ bubble.mangasC = makeList("Mangas", showURLs, &listOptions{
+ TitleStyle: mo.Some(
+ style.NewColored("#f2e8cf", "#386641").Padding(0, 1),
+ ),
+ })
bubble.mangasC.SetStatusBarItemName("manga", "mangas")
- bubble.chaptersC = makeList("Chapters", showURLs)
+ bubble.chaptersC = makeList("Chapters", showURLs, &listOptions{
+ TitleStyle: mo.Some(
+ style.NewColored("#000814", color.Orange).Padding(0, 1),
+ ),
+ })
bubble.chaptersC.SetStatusBarItemName("chapter", "chapters")
- bubble.anilistC = makeList("Anilist Mangas", showURLs)
+ bubble.anilistC = makeList("Anilist Mangas", showURLs, &listOptions{
+ TitleStyle: mo.Some(
+ style.NewColored("#bcbedc", "#2b2d42").Padding(0, 1),
+ ),
+ })
bubble.anilistC.SetStatusBarItemName("manga", "mangas")
if w, h, err := util.TerminalSize(); err == nil {
@@ -261,9 +293,7 @@ func (b *statefulBubble) loadProviders() tea.Cmd {
var items []list.Item
for _, p := range providers {
items = append(items, &listItem{
- title: p.Name,
- description: "Built-in provider " + icon.Get(icon.Go),
- internal: p,
+ internal: p,
})
}
slices.SortFunc(items, func(a, b list.Item) bool {
@@ -275,9 +305,7 @@ func (b *statefulBubble) loadProviders() tea.Cmd {
var customItems []list.Item
for _, p := range customProviders {
customItems = append(customItems, &listItem{
- title: p.Name,
- description: "Custom provider " + icon.Get(icon.Lua),
- internal: p,
+ internal: p,
})
}
slices.SortFunc(customItems, func(a, b list.Item) bool {
@@ -294,10 +322,18 @@ func (b *statefulBubble) loadHistory() (tea.Cmd, error) {
return nil, err
}
+ chapters := lo.Values(saved)
+ slices.SortFunc(chapters, func(a, b *history.SavedChapter) bool {
+ if a.MangaName == b.MangaName {
+ return a.Name < b.Name
+ }
+ return a.MangaName < b.MangaName
+ })
+
var items []list.Item
- for _, s := range saved {
+ for _, c := range chapters {
items = append(items, &listItem{
- internal: s,
+ internal: c,
})
}
diff --git a/tui/handlers.go b/tui/handlers.go
index 51aeab29..5c98a94a 100644
--- a/tui/handlers.go
+++ b/tui/handlers.go
@@ -5,6 +5,7 @@ import (
"github.com/charmbracelet/bubbles/list"
tea "github.com/charmbracelet/bubbletea"
"github.com/metafates/mangal/anilist"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/downloader"
"github.com/metafates/mangal/installer"
@@ -37,9 +38,7 @@ func (b *statefulBubble) loadScrapers() tea.Cmd {
var items = make([]list.Item, len(scrapers))
for i, s := range scrapers {
items[i] = &listItem{
- title: s.Name,
- description: s.GithubURL(),
- internal: s,
+ internal: s,
}
}
@@ -145,7 +144,7 @@ func (b *statefulBubble) waitForSourcesLoaded() tea.Cmd {
func (b *statefulBubble) searchManga(query string) tea.Cmd {
return func() tea.Msg {
log.Info("searching for " + query)
- b.progressStatus = fmt.Sprintf("Searching among %s", util.Quantity(len(b.selectedSources), "source"))
+ b.progressStatus = fmt.Sprintf("Searching among %s", util.Quantify(len(b.selectedSources), "source", "sources"))
var mangas = make([]*source.Manga, 0)
@@ -161,7 +160,7 @@ func (b *statefulBubble) searchManga(query string) tea.Cmd {
b.errorChannel <- err
}
- log.Infof("found %s from source %s", util.Quantity(len(sourceMangas), "manga"), s.Name())
+ log.Infof("found %s from source %s", util.Quantify(len(sourceMangas), "manga", "mangas"), s.Name())
mangas = append(mangas, sourceMangas...)
}(s)
}
@@ -196,7 +195,7 @@ func (b *statefulBubble) getChapters(manga *source.Manga) tea.Cmd {
log.Error(err)
b.errorChannel <- err
} else {
- log.Infof("found %s", util.Quantity(len(chapters), "chapter"))
+ log.Infof("found %s", util.Quantify(len(chapters), "chapter", "chapters"))
b.foundChaptersChannel <- chapters
}
@@ -303,13 +302,13 @@ func (b *statefulBubble) waitForAnilistFetchAndSet() tea.Cmd {
func (b *statefulBubble) fetchAnilist(manga *source.Manga) tea.Cmd {
return func() tea.Msg {
log.Info("fetching anilist for " + manga.Name)
- b.progressStatus = fmt.Sprintf("Fetching anilist for %s", style.Magenta(manga.Name))
+ b.progressStatus = fmt.Sprintf("Fetching anilist for %s", style.Fg(color.Purple)(manga.Name))
mangas, err := anilist.SearchByName(manga.Name)
if err != nil {
log.Error(err)
b.errorChannel <- err
} else {
- log.Infof("found %s", util.Quantity(len(mangas), "manga"))
+ log.Infof("found %s", util.Quantify(len(mangas), "manga", "mangas"))
b.fetchedAnilistMangasChannel <- mangas
}
diff --git a/tui/init.go b/tui/init.go
index 65dcf5b6..bb9aab97 100644
--- a/tui/init.go
+++ b/tui/init.go
@@ -10,15 +10,21 @@ import (
)
func (b *statefulBubble) Init() tea.Cmd {
- if name := viper.GetString(constant.DownloaderDefaultSource); name != "" {
- p, ok := provider.Get(name)
- if !ok {
- b.raiseError(fmt.Errorf("provider %s not found", name))
- return nil
+ if names := viper.GetStringSlice(constant.DownloaderDefaultSources); len(names) != 0 {
+ var providers []*provider.Provider
+
+ for _, name := range names {
+ p, ok := provider.Get(name)
+ if !ok {
+ b.raiseError(fmt.Errorf("provider %s not found", name))
+ return nil
+ }
+
+ providers = append(providers, p)
}
b.setState(loadingState)
- return tea.Batch(b.startLoading(), b.loadSources([]*provider.Provider{p}), b.waitForSourcesLoaded())
+ return tea.Batch(b.startLoading(), b.loadSources(providers), b.waitForSourcesLoaded())
}
return tea.Batch(textinput.Blink, b.loadProviders())
diff --git a/tui/item.go b/tui/item.go
index b6b53bd2..1511835d 100644
--- a/tui/item.go
+++ b/tui/item.go
@@ -5,41 +5,59 @@ import (
"github.com/metafates/mangal/anilist"
"github.com/metafates/mangal/history"
"github.com/metafates/mangal/icon"
+ "github.com/metafates/mangal/installer"
+ "github.com/metafates/mangal/provider"
"github.com/metafates/mangal/source"
"github.com/metafates/mangal/style"
"strings"
)
type listItem struct {
- title string
- description string
- internal interface{}
- marked bool
+ internal interface{}
+ marked bool
}
func (t *listItem) toggleMark() {
t.marked = !t.marked
}
+func (t *listItem) getMark() string {
+ switch t.internal.(type) {
+ case *source.Chapter:
+ return style.Bold(icon.Get(icon.Mark))
+ case *anilist.Manga:
+ return icon.Get(icon.Link)
+ case *provider.Provider:
+ return icon.Get(icon.Search)
+ default:
+ return ""
+ }
+}
+
func (t *listItem) Title() (title string) {
switch e := t.internal.(type) {
case *source.Chapter:
- title = e.Name
+ var sb = strings.Builder{}
+
+ sb.WriteString(t.FilterValue())
if e.Volume != "" {
- title += " " + style.Faint(e.Volume)
+ sb.WriteString(" ")
+ sb.WriteString(style.Faint(e.Volume))
}
- case *source.Manga:
- title = e.Name
- case *history.SavedChapter:
- title = e.MangaName
- case *anilist.Manga:
- title = e.Name()
+
+ if e.IsDownloaded() {
+ sb.WriteString(" ")
+ sb.WriteString(icon.Get(icon.Downloaded))
+ }
+
+ title = sb.String()
default:
- title = t.title
+ title = t.FilterValue()
}
if title != "" && t.marked {
- title = fmt.Sprintf("%s %s", title, icon.Get(icon.Mark))
+ //title = fmt.Sprintf("%s %s", title, icon.Get(icon.Mark))
+ title = fmt.Sprintf("%s %s", title, t.getMark())
}
return
@@ -51,17 +69,45 @@ func (t *listItem) Description() (description string) {
description = e.URL
case *source.Manga:
description = e.URL
+ case *installer.Scraper:
+ description = e.GithubURL()
case *history.SavedChapter:
description = fmt.Sprintf("%s : %d / %d", e.Name, e.Index, e.MangaChaptersTotal)
+ case *provider.Provider:
+ sb := strings.Builder{}
+ if e.IsCustom {
+ sb.WriteString("Custom")
+ } else {
+ sb.WriteString("Builtin")
+ }
+
+ if e.UsesHeadless {
+ sb.WriteString(", uses headless chrome")
+ }
+
+ description = sb.String()
case *anilist.Manga:
description = e.SiteURL
- default:
- description = t.description
}
return
}
func (t *listItem) FilterValue() string {
- return strings.Split(t.Title(), "\033")[0]
+ switch e := t.internal.(type) {
+ case *source.Chapter:
+ return e.Name
+ case *source.Manga:
+ return e.Name
+ case *history.SavedChapter:
+ return e.MangaName
+ case *anilist.Manga:
+ return e.Name()
+ case *provider.Provider:
+ return e.Name
+ case *installer.Scraper:
+ return e.Name
+ default:
+ return ""
+ }
}
diff --git a/tui/keymap.go b/tui/keymap.go
index 1efab220..292b0fb2 100644
--- a/tui/keymap.go
+++ b/tui/keymap.go
@@ -3,6 +3,7 @@ package tui
import (
"github.com/charmbracelet/bubbles/key"
"github.com/charmbracelet/bubbles/list"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/style"
)
@@ -11,6 +12,7 @@ type statefulKeymap struct {
quit, forceQuit,
selectOne, selectAll, selectVolume, clearSelection,
+ acceptSearchSuggestion,
anilistSelect,
remove,
redownloadFailed,
@@ -73,7 +75,11 @@ func newStatefulKeymap() *statefulKeymap {
),
read: k(
keys("r"),
- help(style.Yellow("r"), style.Yellow("read")),
+ help(style.Fg(color.Orange)("r"), style.Fg(color.Orange)("read")),
+ ),
+ acceptSearchSuggestion: k(
+ keys("tab"),
+ help("tab", "accept search suggestion"),
),
redownloadFailed: k(
keys("r"),
@@ -150,7 +156,7 @@ func (k *statefulKeymap) help() ([]key.Binding, []key.Binding) {
search := withDescription(k.confirm, "search with selected")
return h(k.selectOne, k.selectAll, search), h(k.selectOne, k.selectAll, k.clearSelection, search)
case searchState:
- return to2(h(k.confirm, k.forceQuit))
+ return to2(h(k.confirm, k.acceptSearchSuggestion, k.forceQuit))
case mangasState:
return to2(h(k.confirm, k.back, k.openURL))
case chaptersState:
diff --git a/tui/update.go b/tui/update.go
index 90d67bb7..afd9b28d 100644
--- a/tui/update.go
+++ b/tui/update.go
@@ -7,15 +7,18 @@ import (
"github.com/charmbracelet/bubbles/progress"
tea "github.com/charmbracelet/bubbletea"
"github.com/metafates/mangal/anilist"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/history"
"github.com/metafates/mangal/installer"
"github.com/metafates/mangal/open"
"github.com/metafates/mangal/provider"
+ "github.com/metafates/mangal/query"
"github.com/metafates/mangal/source"
"github.com/metafates/mangal/style"
"github.com/metafates/mangal/util"
"github.com/samber/lo"
+ "github.com/samber/mo"
"github.com/spf13/viper"
"golang.org/x/exp/slices"
"time"
@@ -173,14 +176,19 @@ func (b *statefulBubble) updateLoading(msg tea.Msg) (tea.Model, tea.Cmd) {
b.previousState()
}
case []*anilist.Manga:
- manga, ok := anilist.GetRelation(b.selectedManga.Name)
+ manga, err := anilist.FindClosest(b.selectedManga.Name)
id := -1
- if ok {
+ if err == nil {
id = manga.ID
}
items := make([]list.Item, len(msg))
+ var marked int
for i, manga := range msg {
+ if manga.ID == id {
+ marked = i
+ }
+
items[i] = &listItem{
internal: manga,
marked: manga.ID == id,
@@ -189,6 +197,7 @@ func (b *statefulBubble) updateLoading(msg tea.Msg) (tea.Model, tea.Cmd) {
cmd = b.anilistC.SetItems(items)
b.newState(anilistSelectState)
+ b.anilistC.Select(marked)
return b, tea.Batch(cmd, b.stopLoading())
case []*installer.Scraper:
b.newState(scrapersInstallState)
@@ -385,12 +394,28 @@ func (b *statefulBubble) updateSearch(msg tea.Msg) (tea.Model, tea.Cmd) {
case key.Matches(msg, b.keymap.confirm) && b.inputC.Value() != "":
b.startLoading()
b.newState(loadingState)
+ go query.Remember(b.inputC.Value(), 1)
return b, tea.Batch(b.searchManga(b.inputC.Value()), b.waitForMangas(), b.spinnerC.Tick)
+ case key.Matches(msg, b.keymap.acceptSearchSuggestion) && b.searchSuggestion.IsPresent():
+ b.inputC.SetValue(b.searchSuggestion.MustGet())
+ b.searchSuggestion = mo.None[string]()
+ b.inputC.SetCursor(len(b.inputC.Value()))
+ return b, nil
}
-
}
b.inputC, cmd = b.inputC.Update(msg)
+
+ if b.inputC.Value() != "" {
+ if suggestion, ok := query.Suggest(b.inputC.Value()).Get(); ok && suggestion != b.inputC.Value() {
+ b.searchSuggestion = mo.Some(suggestion)
+ } else {
+ b.searchSuggestion = mo.None[string]()
+ }
+ } else if b.searchSuggestion.IsPresent() {
+ b.searchSuggestion = mo.None[string]()
+ }
+
return b, cmd
}
@@ -409,6 +434,7 @@ func (b *statefulBubble) updateMangas(msg tea.Msg) (tea.Model, tea.Cmd) {
m, _ := b.mangasC.SelectedItem().(*listItem).internal.(*source.Manga)
b.selectedManga = m
+ go query.Remember(m.Name, 2)
return b, tea.Batch(b.getChapters(m), b.waitForChapters(), b.startLoading())
case key.Matches(msg, b.keymap.openURL):
if b.mangasC.SelectedItem() == nil {
@@ -454,7 +480,7 @@ func (b *statefulBubble) updateChapters(msg tea.Msg) (tea.Model, tea.Cmd) {
switch msg := msg.(type) {
case *anilist.Manga:
- cmd = b.chaptersC.NewStatusMessage(fmt.Sprintf(`Linked to %s %s`, style.Blue(msg.Name()), style.Faint(msg.SiteURL)))
+ cmd = b.chaptersC.NewStatusMessage(fmt.Sprintf(`Linked to %s %s`, style.Fg(color.Orange)(msg.Name()), style.Faint(msg.SiteURL)))
return b, cmd
case tea.KeyMsg:
switch {
@@ -576,7 +602,7 @@ func (b *statefulBubble) updateAnilistSelect(msg tea.Msg) (tea.Model, tea.Cmd) {
if err != nil {
b.raiseError(err)
}
- case key.Matches(msg, b.keymap.confirm):
+ case key.Matches(msg, b.keymap.confirm, b.keymap.selectOne):
if b.anilistC.SelectedItem() == nil {
break
}
@@ -589,7 +615,7 @@ func (b *statefulBubble) updateAnilistSelect(msg tea.Msg) (tea.Model, tea.Cmd) {
}
b.previousState()
- cmd = b.chaptersC.NewStatusMessage(fmt.Sprintf(`Linked %s to %s %s`, style.Magenta(b.selectedManga.Name), style.Blue(manga.Name()), style.Faint(manga.SiteURL)))
+ cmd = b.chaptersC.NewStatusMessage(fmt.Sprintf(`Linked to %s %s`, style.Fg(color.Orange)(manga.Name()), style.Faint(manga.SiteURL)))
return b, cmd
}
}
diff --git a/tui/view.go b/tui/view.go
index c64d109d..80248479 100644
--- a/tui/view.go
+++ b/tui/view.go
@@ -3,6 +3,7 @@ package tui
import (
"fmt"
"github.com/charmbracelet/lipgloss"
+ "github.com/metafates/mangal/color"
"github.com/metafates/mangal/constant"
"github.com/metafates/mangal/icon"
"github.com/metafates/mangal/style"
@@ -67,13 +68,25 @@ func (b *statefulBubble) viewSources() string {
}
func (b *statefulBubble) viewSearch() string {
+ lines := []string{
+ style.Title("Search Manga"),
+ "",
+ b.inputC.View(),
+ }
+
+ if b.searchSuggestion.IsPresent() {
+ lines = append(
+ lines,
+ "",
+ fmt.Sprintf("Search %s ?", style.Fg(color.Orange)(b.searchSuggestion.MustGet())),
+ "",
+ fmt.Sprintf("Press %s to accept", style.Bold(style.Faint(b.keymap.acceptSearchSuggestion.Help().Key))),
+ )
+ }
+
return b.renderLines(
true,
- []string{
- style.Title("Search Manga"),
- "",
- b.inputC.View(),
- },
+ lines,
)
}
@@ -95,11 +108,27 @@ func (b *statefulBubble) viewConfirm() string {
[]string{
style.Title("Confirm"),
"",
- fmt.Sprintf("%s Download %s?", icon.Get(icon.Question), util.Quantity(len(b.selectedChapters), "chapter")),
+ fmt.Sprintf("%s Download %s?", icon.Get(icon.Question), util.Quantify(len(b.selectedChapters), "chapter", "chapters")),
},
)
}
+func (b *statefulBubble) downloadingChapterMetainfo() string {
+ metainfo := strings.Builder{}
+
+ // Even though when this function is called chapter isn't supposed to be nil,
+ // it can be one for a brief moment.
+ // I assume that it's because View() is called before Update()
+ if b.currentDownloadingChapter != nil {
+ metainfo.WriteString("From ")
+ metainfo.WriteString(style.Fg(color.Orange)(b.currentDownloadingChapter.Source().Name()))
+ metainfo.WriteString(" as ")
+ }
+
+ metainfo.WriteString(style.Fg(color.Purple)(viper.GetString(constant.FormatsUse)))
+ return metainfo.String()
+}
+
func (b *statefulBubble) viewRead() string {
var chapterName string
@@ -113,9 +142,11 @@ func (b *statefulBubble) viewRead() string {
[]string{
style.Title("Reading"),
"",
- style.Truncate(b.width)(fmt.Sprintf(icon.Get(icon.Progress)+" Downloading %s", style.Magenta(chapterName))),
+ style.Truncate(b.width)(fmt.Sprintf(icon.Get(icon.Progress)+" Downloading %s", style.Fg(color.Purple)(chapterName))),
"",
style.Truncate(b.width)(b.spinnerC.View() + b.progressStatus),
+ "",
+ style.Truncate(b.width)(b.downloadingChapterMetainfo()),
},
)
}
@@ -133,11 +164,13 @@ func (b *statefulBubble) viewDownload() string {
[]string{
style.Title("Downloading"),
"",
- style.Truncate(b.width)(fmt.Sprintf(icon.Get(icon.Progress)+" Downloading %s", style.Magenta(chapterName))),
+ style.Truncate(b.width)(fmt.Sprintf(icon.Get(icon.Progress)+" Downloading %s", style.Fg(color.Purple)(chapterName))),
"",
b.progressC.View(),
"",
style.Truncate(b.width)(b.spinnerC.View() + b.progressStatus),
+ "",
+ style.Truncate(b.width)(b.downloadingChapterMetainfo()),
},
)
}
@@ -149,10 +182,10 @@ func (b *statefulBubble) viewDownloadDone() string {
var msg string
{
- temp := strings.Split(util.Quantity(succeded, "chapter"), " ")
- temp[0] = style.Green(temp[0])
+ temp := strings.Split(util.Quantify(succeded, "chapter", "chapters"), " ")
+ temp[0] = style.Fg(color.Green)(temp[0])
s := strings.Join(temp, " ") + " downloaded"
- f := fmt.Sprintf("%s failed", style.Red(strconv.Itoa(failed)))
+ f := fmt.Sprintf("%s failed", style.Fg(color.Red)(strconv.Itoa(failed)))
msg = fmt.Sprintf("%s, %s", s, f)
}
@@ -178,7 +211,7 @@ func (b *statefulBubble) viewDownloadDone() string {
}
func (b *statefulBubble) viewError() string {
- errorMsg := wrap.String(style.Combined(style.Italic, style.Red)(b.lastError.Error()), b.width)
+ errorMsg := wrap.String(style.New().Italic(true).Foreground(color.Red).Render(b.lastError.Error()), b.width)
return b.renderLines(
true,
append([]string{
diff --git a/update/chapter.go b/update/chapter.go
new file mode 100644
index 00000000..6ee0dda6
--- /dev/null
+++ b/update/chapter.go
@@ -0,0 +1,40 @@
+package update
+
+import (
+ "github.com/metafates/mangal/constant"
+ "github.com/metafates/mangal/filesystem"
+ "github.com/metafates/mangal/log"
+ "os"
+ "path/filepath"
+)
+
+type downloadedChapter struct {
+ path string
+ format string
+}
+
+func getChapters(manga string) ([]*downloadedChapter, error) {
+ log.Infof("getting chapters for %s", manga)
+ var chapters []*downloadedChapter
+
+ err := filesystem.Api().Walk(manga, func(path string, info os.FileInfo, err error) error {
+ // we will ignore plain chapter (aka folder ones) for the sake of simplicity
+ if info.IsDir() {
+ return nil
+ }
+
+ name := info.Name()
+ switch filepath.Ext(name)[1:] {
+ case constant.CBZ:
+ chapters = append(chapters, &downloadedChapter{path: path, format: constant.CBZ})
+ case constant.PDF:
+ chapters = append(chapters, &downloadedChapter{path: path, format: constant.PDF})
+ case constant.ZIP:
+ chapters = append(chapters, &downloadedChapter{path: path, format: constant.ZIP})
+ }
+
+ return nil
+ })
+
+ return chapters, err
+}
diff --git a/update/comicinfo.go b/update/comicinfo.go
new file mode 100644
index 00000000..43e588bf
--- /dev/null
+++ b/update/comicinfo.go
@@ -0,0 +1,82 @@
+package update
+
+import (
+ "encoding/xml"
+ "fmt"
+ "github.com/metafates/mangal/filesystem"
+ "github.com/metafates/mangal/source"
+ "github.com/metafates/mangal/util"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+func getAnyChapterComicInfo(mangaPath string) (*source.ComicInfo, error) {
+ // recursively search for .cbz files
+ // find the first one and get the name from it
+ var cbzFiles []string
+ err := filepath.Walk(mangaPath, func(path string, info os.FileInfo, err error) error {
+ if strings.HasSuffix(path, ".cbz") {
+ cbzFiles = append(cbzFiles, path)
+ }
+ return nil
+ })
+
+ if err != nil {
+ return nil, err
+ }
+
+ if len(cbzFiles) == 0 {
+ return nil, fmt.Errorf("no .cbz files found")
+ }
+
+ comicInfo, err := getComicInfoXML(cbzFiles[0])
+ if err != nil {
+ return nil, err
+ }
+
+ return comicInfo, nil
+}
+
+func getComicInfoXML(chapter string) (*source.ComicInfo, error) {
+ if !strings.HasSuffix(chapter, ".cbz") {
+ return nil, fmt.Errorf("chapter must be a .cbz file")
+ }
+
+ // open chapter as ReaderAt
+ file, err := filesystem.Api().Open(chapter)
+ if err != nil {
+ return nil, err
+ }
+
+ filesystem.SetMemMapFs()
+ defer filesystem.SetOsFs()
+
+ // extract ComicInfo.xml
+ stat, err := file.Stat()
+ if err != nil {
+ return nil, err
+ }
+
+ // No need to delete the file as it will be deleted when the memmap filesystem is reset
+ err = util.Unzip(file, stat.Size(), "T")
+ if err != nil {
+ return nil, err
+ }
+
+ // read ComicInfo.xml
+ contents, err := filesystem.Api().ReadFile(filepath.Join("T", "ComicInfo.xml"))
+ if err != nil {
+ return nil, err
+ }
+
+ // parse ComicInfo.xml
+ var comicInfo source.ComicInfo
+ err = xml.Unmarshal(contents, &comicInfo)
+ if err != nil {
+ return nil, err
+ }
+
+ return &comicInfo, nil
+
+}
diff --git a/update/name.go b/update/name.go
new file mode 100644
index 00000000..ed5e91d7
--- /dev/null
+++ b/update/name.go
@@ -0,0 +1,23 @@
+package update
+
+var nameCache = make(map[string]string)
+
+func GetName(manga string) (string, error) {
+ if name, ok := nameCache[manga]; ok {
+ return name, nil
+ }
+
+ seriesJSON, err := getSeriesJSON(manga)
+ if err == nil {
+ nameCache[manga] = seriesJSON.Metadata.Name
+ return seriesJSON.Metadata.Name, nil
+ }
+
+ comicInfo, err := getAnyChapterComicInfo(manga)
+ if err != nil {
+ return "", err
+ }
+
+ nameCache[manga] = comicInfo.Series
+ return comicInfo.Series, nil
+}
diff --git a/update/series.go b/update/series.go
new file mode 100644
index 00000000..fe179dd8
--- /dev/null
+++ b/update/series.go
@@ -0,0 +1,32 @@
+package update
+
+import (
+ "encoding/json"
+ "fmt"
+ "github.com/metafates/mangal/filesystem"
+ "github.com/metafates/mangal/source"
+ "path/filepath"
+)
+
+func getSeriesJSON(manga string) (*source.SeriesJSON, error) {
+ // check if series.json exists at manga dir
+ serisJSONPath := filepath.Join(manga, "series.json")
+ exists, err := filesystem.Api().Exists(serisJSONPath)
+ if err != nil {
+ return nil, err
+ }
+
+ if !exists {
+ return nil, fmt.Errorf("series.json must be present")
+ }
+
+ contents, err := filesystem.Api().ReadFile(serisJSONPath)
+ if err != nil {
+ return nil, err
+ }
+
+ var seriesJSON source.SeriesJSON
+
+ err = json.Unmarshal(contents, &seriesJSON)
+ return &seriesJSON, err
+}
diff --git a/update/update.go b/update/update.go
new file mode 100644
index 00000000..9ba0ff74
--- /dev/null
+++ b/update/update.go
@@ -0,0 +1,178 @@
+package update
+
+import (
+ "bytes"
+ "encoding/json"
+ "github.com/metafates/mangal/constant"
+ "github.com/metafates/mangal/converter/cbz"
+ "github.com/metafates/mangal/filesystem"
+ "github.com/metafates/mangal/log"
+ "github.com/metafates/mangal/source"
+ "github.com/metafates/mangal/util"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+func Metadata(mangaPath string) error {
+ log.Infof("extracting series name from %s", mangaPath)
+ name, err := GetName(mangaPath)
+ if err != nil {
+ log.Error(err)
+ return err
+ }
+
+ log.Infof("extracted name: %s", name)
+ log.Infof("finding %s on anilist", name)
+ manga := &source.Manga{
+ Name: name,
+ }
+
+ // will set new metadata from anilist
+ err = manga.PopulateMetadata(func(string) {})
+ if err != nil {
+ log.Error()
+ return err
+ }
+
+ chapters, err := getChapters(mangaPath)
+ if err != nil {
+ log.Error(err)
+ return err
+ }
+
+ manga.Chapters = make([]*source.Chapter, 0)
+ chaptersPaths := make(map[*source.Chapter]string)
+ for _, chapter := range chapters {
+ // since we are trying to update ComicInfo.xml here, we do not care about any other formats other than CBZ
+ if chapter.format != constant.CBZ {
+ continue
+ }
+
+ log.Infof("getting ComicInfoXML from %s", chapter.path)
+ comicInfo, err := getComicInfoXML(chapter.path)
+ if err != nil {
+ log.Error(err)
+ continue
+ }
+
+ chap := &source.Chapter{
+ Name: comicInfo.Title,
+ Manga: manga,
+ URL: comicInfo.Web,
+ Index: uint16(comicInfo.Number),
+ }
+ manga.Chapters = append(manga.Chapters, chap)
+ chaptersPaths[chap] = chapter.path
+ }
+
+ // okay, we're ready to regenerate series.json and ComicInfo.xml now
+ seriesJSON := manga.SeriesJSON()
+ buf, err := json.Marshal(seriesJSON)
+ if err != nil {
+ log.Error(err)
+ return err
+ }
+
+ // update series.json
+ log.Info("updating series json")
+ err = filesystem.Api().WriteFile(filepath.Join(mangaPath, "series.json"), buf, os.ModePerm)
+ if err != nil {
+ log.Error(err)
+ return err
+ }
+
+ log.Info("downloading new cover")
+ // remove old cover(s).
+ // even though DownloadCover() will overwrite previous one
+ // there may be a sitation when new cover has a different extension
+ // which would result having duplicates
+ files, err := filesystem.Api().ReadDir(mangaPath)
+ if err == nil {
+ for _, file := range files {
+ if util.FileStem(file.Name()) == "cover" {
+ _ = filesystem.Api().Remove(filepath.Join(mangaPath, file.Name()))
+ }
+ }
+ }
+ err = manga.DownloadCover(true, mangaPath, func(string) {})
+ if err != nil {
+ log.Error(err)
+ }
+
+ log.Infof("updating ComicInfo.xml for %d chapters", len(manga.Chapters))
+ for _, chapter := range manga.Chapters {
+ path := chaptersPaths[chapter]
+ file, err := filesystem.Api().Open(path)
+ if err != nil {
+ log.Error(err)
+ continue
+ }
+
+ stat, err := file.Stat()
+ if err != nil {
+ _ = file.Close()
+ continue
+ }
+
+ // go to memmap fs to unzip
+ filesystem.SetMemMapFs()
+ err = util.Unzip(file, stat.Size(), chapter.Name)
+ if err != nil {
+ log.Error(err)
+ _ = file.Close()
+ continue
+ }
+
+ // add pages before converting back to cbz
+ files, err := filesystem.Api().ReadDir(chapter.Name)
+ if err != nil {
+ log.Error(err)
+ _ = file.Close()
+ continue
+ }
+
+ for _, file := range files {
+ // skip ComicInfo.xml
+ if strings.HasSuffix(file.Name(), ".xml") {
+ continue
+ }
+
+ image, err := filesystem.Api().ReadFile(filepath.Join(chapter.Name, file.Name()))
+ // we can not let some pages be gone
+ // so if we can't open any - whole process should stop
+ if err != nil {
+ log.Error(err)
+ return err
+ }
+
+ chapter.Pages = append(chapter.Pages, &source.Page{
+ Chapter: chapter,
+ Size: uint64(file.Size()),
+ Index: uint16(len(chapter.Pages)),
+ Extension: filepath.Ext(file.Name()),
+ Contents: bytes.NewBuffer(image),
+ })
+ }
+
+ _ = file.Close()
+
+ filesystem.SetOsFs()
+
+ log.Debugf("removing old %s", path)
+ err = filesystem.Api().Remove(path)
+ if err != nil {
+ log.Error(err)
+ continue
+ }
+
+ log.Debugf("saving to %s", path)
+ err = cbz.SaveTo(chapter, path)
+ if err != nil {
+ log.Error(err)
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/updater/version.go b/updater/version.go
deleted file mode 100644
index d8af8b8e..00000000
--- a/updater/version.go
+++ /dev/null
@@ -1,108 +0,0 @@
-package updater
-
-import (
- "encoding/json"
- "errors"
- "github.com/metafates/mangal/filesystem"
- "github.com/metafates/mangal/util"
- "github.com/metafates/mangal/where"
- "net/http"
- "os"
- "path/filepath"
- "time"
-)
-
-var (
- cachedLatestVersion string
- versionCacheFile = filepath.Join(where.Cache(), "version.json")
-)
-
-type versionCache struct {
- Version string `json:"version"`
- Updated time.Time `json:"updated"`
-}
-
-func getCachedVersion() (version string, err error) {
- if cachedLatestVersion != "" {
- return cachedLatestVersion, nil
- }
-
- exists, err := filesystem.Api().Exists(versionCacheFile)
- if err != nil {
- return
- }
-
- if !exists {
- return
- }
-
- var data []byte
- data, err = filesystem.Api().ReadFile(versionCacheFile)
- if err != nil {
- return
- }
-
- var cache versionCache
-
- err = json.Unmarshal(data, &cache)
- if err != nil {
- return
- }
-
- if time.Since(cache.Updated) > time.Hour {
- return
- }
-
- version = cache.Version
- return
-}
-
-func cacheVersion(version string) error {
- cachedLatestVersion = version
- cache := versionCache{
- Version: version,
- Updated: time.Now(),
- }
-
- data, err := json.Marshal(cache)
- if err != nil {
- return err
- }
-
- return filesystem.Api().WriteFile(versionCacheFile, data, os.ModePerm)
-}
-
-// LatestVersion returns the latest version of mangal.
-// It will fetch the latest version from the GitHub API.
-func LatestVersion() (version string, err error) {
- version, err = getCachedVersion()
- if err == nil && version != "" {
- return
- }
-
- resp, err := http.Get("https://api.github.com/repos/metafates/mangal/releases/latest")
- if err != nil {
- return
- }
-
- defer util.Ignore(resp.Body.Close)
-
- var release struct {
- TagName string `json:"tag_name"`
- }
-
- err = json.NewDecoder(resp.Body).Decode(&release)
- if err != nil {
- return
- }
-
- // remove the v from the tag name
- if release.TagName == "" {
- err = errors.New("empty tag name")
- return
- }
-
- version = release.TagName[1:]
- _ = cacheVersion(version)
- return
-}
diff --git a/util/option.go b/util/option.go
deleted file mode 100644
index f17a2a1e..00000000
--- a/util/option.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package util
-
-// Option encapsulates an optional value.
-// Similar to the Maybe monad in Haskell or Option in Rust.
-type Option[T any] struct {
- value T
- isSome bool
-}
-
-// Unwrap returns an internal value of the option.
-// Panics if the option is None.
-func (o Option[T]) Unwrap() T {
- if !o.IsSome() {
- panic("called `Option.Unwrap()` on a `None` value")
- }
-
- return o.value
-}
-
-// UnwrapOr returns an internal value of the option.
-// Returns the default value if the option is None.
-func (o Option[T]) UnwrapOr(or T) T {
- if !o.IsSome() {
- return or
- }
-
- return o.value
-}
-
-// IsSome returns true if the option is Some.
-func (o Option[T]) IsSome() bool {
- return o.isSome
-}
-
-// IsNone returns true if the option is None.
-func (o Option[T]) IsNone() bool {
- return !o.isSome
-}
-
-// Some returns a new Some option.
-func Some[T any](value T) Option[T] {
- return Option[T]{value: value, isSome: true}
-}
-
-// None returns a new None option.
-func None[T any]() Option[T] {
- return Option[T]{isSome: false}
-}
diff --git a/util/unzip.go b/util/unzip.go
new file mode 100644
index 00000000..b83f16d8
--- /dev/null
+++ b/util/unzip.go
@@ -0,0 +1,76 @@
+package util
+
+import (
+ "archive/zip"
+ "fmt"
+ "github.com/metafates/mangal/filesystem"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+func Unzip(zipStream io.ReaderAt, size int64, dest string) error {
+ r, err := zip.NewReader(zipStream, size)
+ if err != nil {
+ return err
+ }
+
+ err = filesystem.Api().MkdirAll(dest, os.ModePerm)
+ if err != nil {
+ return err
+ }
+
+ // Closure to address file descriptors issue with all the deferred .Close() methods
+ extractAndWriteFile := func(f *zip.File) error {
+ rc, err := f.Open()
+ if err != nil {
+ return err
+ }
+
+ defer Ignore(rc.Close)
+
+ path := filepath.Join(dest, f.Name)
+
+ // Check for ZipSlip (Directory traversal)
+ if !strings.HasPrefix(path, filepath.Clean(dest)+string(os.PathSeparator)) {
+ return fmt.Errorf("illegal file path: %s", path)
+ }
+
+ if f.FileInfo().IsDir() {
+ err = filesystem.Api().MkdirAll(path, f.Mode())
+ if err != nil {
+ return err
+ }
+ } else {
+ err = filesystem.Api().MkdirAll(filepath.Dir(path), f.Mode())
+ if err != nil {
+ return err
+ }
+
+ f, err := filesystem.Api().OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())
+ if err != nil {
+ return err
+ }
+
+ defer Ignore(f.Close)
+
+ _, err = io.Copy(f, rc)
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+
+ for _, f := range r.File {
+ err := extractAndWriteFile(f)
+
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/util/unzip_test.go b/util/unzip_test.go
new file mode 100644
index 00000000..8d74b30d
--- /dev/null
+++ b/util/unzip_test.go
@@ -0,0 +1,48 @@
+package util
+
+import (
+ "github.com/metafates/mangal/filesystem"
+ "github.com/samber/lo"
+ . "github.com/smartystreets/goconvey/convey"
+ "path/filepath"
+ "testing"
+)
+
+func TestUnzip(t *testing.T) {
+ Convey("Given a zip file", t, func() {
+ // Set system filesystem to access the testdata folder.
+ filesystem.SetOsFs()
+
+ path := filepath.Join(filepath.Dir(lo.Must(filepath.Abs("."))), filepath.Join("assets", "testdata", "zipdata.zip"))
+ file := lo.Must(filesystem.Api().Open(path))
+
+ // zip file acquired, switch back to memory filesystem.
+ filesystem.SetMemMapFs()
+
+ Convey("When unzipping it", func() {
+ err := Unzip(file, lo.Must(file.Stat()).Size(), "a")
+ Convey("Then the error should be nil", func() {
+ So(err, ShouldBeNil)
+ Convey("And the files should be extracted", func() {
+ for _, info := range []lo.Tuple2[string, bool]{
+ {filepath.Join("a", "zipdata", "hey.jpeg"), false},
+ {filepath.Join("a", "zipdata", "a"), true},
+ {filepath.Join("a", "zipdata", "a", "b"), true},
+ {filepath.Join("a", "zipdata", "a", "hello.txt"), false},
+ } {
+ filename := info.A
+ isDir := info.B
+
+ exists := lo.Must(filesystem.Api().Exists(filename))
+ So(exists, ShouldBeTrue)
+
+ if isDir {
+ isDir := lo.Must(filesystem.Api().IsDir(filename))
+ So(isDir, ShouldBeTrue)
+ }
+ }
+ })
+ })
+ })
+ })
+}
diff --git a/util/util.go b/util/util.go
index 444ebc45..57109b0a 100644
--- a/util/util.go
+++ b/util/util.go
@@ -3,6 +3,7 @@ package util
import (
"fmt"
"github.com/metafates/mangal/constant"
+ "github.com/metafates/mangal/filesystem"
"github.com/samber/lo"
"golang.org/x/exp/constraints"
"golang.org/x/term"
@@ -35,18 +36,13 @@ func SanitizeFilename(filename string) string {
return filename
}
-// Quantity returns formatted quantity.
-// Example:
-//
-// Quantity(1, "manga") -> "1 manga"
-// Quantity(2, "manga") -> "2 mangas"
-func Quantity(count int, thing string) string {
- thing = strings.TrimSuffix(thing, "s")
+// Quantify returns a string with the given number and unit.
+func Quantify(count int, singular, plural string) string {
if count == 1 {
- return fmt.Sprintf("%d %s", count, thing)
+ return fmt.Sprintf("%d %s", count, singular)
}
- return fmt.Sprintf("%d %ss", count, thing)
+ return fmt.Sprintf("%d %s", count, plural)
}
// TerminalSize returns the dimensions of the given terminal.
@@ -134,43 +130,24 @@ func PrintErasable(msg string) (eraser func()) {
// Capitalize returns a string with the first letter capitalized.
func Capitalize(s string) string {
- return strings.ToUpper(s[:1]) + s[1:]
-}
-
-func CompareVersions(a, b string) (int, error) {
- type version struct {
- major, minor, patch int
+ if len(s) == 0 {
+ return s
}
- parse := func(s string) (version, error) {
- var v version
- _, err := fmt.Sscanf(strings.TrimPrefix(s, "v"), "%d.%d.%d", &v.major, &v.minor, &v.patch)
- return v, err
- }
-
- av, err := parse(a)
- if err != nil {
- return 0, err
- }
+ return strings.ToUpper(s[:1]) + s[1:]
+}
- bv, err := parse(b)
+// Delete removes the given path from the filesystem.
+// It can handle both files and directories (recursively).
+func Delete(path string) error {
+ stat, err := filesystem.Api().Stat(path)
if err != nil {
- return 0, err
+ return err
}
- for _, pair := range []lo.Tuple2[int, int]{
- {av.major, bv.major},
- {av.minor, bv.minor},
- {av.patch, bv.patch},
- } {
- if pair.A > pair.B {
- return 1, nil
- }
-
- if pair.A < pair.B {
- return -1, nil
- }
+ if stat.IsDir() {
+ return filesystem.Api().RemoveAll(path)
}
- return 0, nil
+ return filesystem.Api().Remove(path)
}
diff --git a/util/util_test.go b/util/util_test.go
index 0809809b..faf4267c 100644
--- a/util/util_test.go
+++ b/util/util_test.go
@@ -60,24 +60,27 @@ func TestFileStem(t *testing.T) {
}
func TestQuantity(t *testing.T) {
- plural := "Apples"
- Convey("Given a string "+plural, t, func() {
- Convey("When the quantity is 1", func() {
- result := Quantity(1, plural)
- Convey("Then the result should be '1 Apple'", func() {
- So(result, ShouldEqual, "1 Apple")
+ var (
+ singular = "singular"
+ plural = "plural"
+ )
+
+ Convey("Given a quantity of 1", t, func() {
+ quantity := 1
+ Convey("When the quantity is converted to a string", func() {
+ result := Quantify(quantity, singular, plural)
+ Convey("Then the result should be '1 singular'", func() {
+ So(result, ShouldEqual, "1 "+singular)
})
})
- Convey("When the quantity is 2", func() {
- result := Quantity(2, plural)
- Convey("Then the result should be '2 Apples'", func() {
- So(result, ShouldEqual, "2 Apples")
- })
- Convey("When the quantity is 0", func() {
- result := Quantity(0, plural)
- Convey("Then the result should be '0 Apples'", func() {
- So(result, ShouldEqual, "0 Apples")
- })
+ })
+
+ Convey("Given a quantity of 2", t, func() {
+ quantity := 2
+ Convey("When the quantity is converted to a string", func() {
+ result := Quantify(quantity, singular, plural)
+ Convey("Then the result should be '2 plural'", func() {
+ So(result, ShouldEqual, "2 "+plural)
})
})
})
@@ -108,87 +111,3 @@ func TestSanitizeFilename(t *testing.T) {
func TestTerminalSize(t *testing.T) {
t.Skipf("Cannot test terminal size")
}
-
-func TestCompareVersions(t *testing.T) {
- Convey("Given two versions with different patches", t, func() {
- v1, v2 := "1.0.0", "1.0.1"
- Convey("When comparing "+v1+" to "+v2, func() {
- result, err := CompareVersions(v1, v2)
- Convey("Error should be nil", func() {
- So(err, ShouldBeNil)
- Convey("Then the result should be -1", func() {
- So(result, ShouldEqual, -1)
- })
- })
- })
-
- Convey("When comparing "+v2+" to "+v1, func() {
- result, err := CompareVersions(v2, v1)
- Convey("Error should be nil", func() {
- So(err, ShouldBeNil)
- Convey("Then the result should be 1", func() {
- So(result, ShouldEqual, 1)
- })
- })
- })
- })
-
- Convey("Given two versions with different minor versions", t, func() {
- v1, v2 := "1.0.0", "1.1.0"
- Convey("When comparing "+v1+" to "+v2, func() {
- result, err := CompareVersions(v1, v2)
- Convey("Error should be nil", func() {
- So(err, ShouldBeNil)
- Convey("Then the result should be -1", func() {
- So(result, ShouldEqual, -1)
- })
- })
- })
-
- Convey("When comparing "+v2+" to "+v1, func() {
- result, err := CompareVersions(v2, v1)
- Convey("Error should be nil", func() {
- So(err, ShouldBeNil)
- Convey("Then the result should be 1", func() {
- So(result, ShouldEqual, 1)
- })
- })
- })
- })
-
- Convey("Given two versions with different major versions", t, func() {
- v1, v2 := "1.0.0", "2.0.0"
- Convey("When comparing "+v1+" to "+v2, func() {
- result, err := CompareVersions(v1, v2)
- Convey("Error should be nil", func() {
- So(err, ShouldBeNil)
- Convey("Then the result should be -1", func() {
- So(result, ShouldEqual, -1)
- })
- })
- })
-
- Convey("When comparing "+v2+" to "+v1, func() {
- result, err := CompareVersions(v2, v1)
- Convey("Error should be nil", func() {
- So(err, ShouldBeNil)
- Convey("Then the result should be 1", func() {
- So(result, ShouldEqual, 1)
- })
- })
- })
- })
-
- Convey("Given two same versions", t, func() {
- v1, v2 := "1.0.0", "1.0.0"
- Convey("When comparing "+v1+" to "+v2, func() {
- result, err := CompareVersions(v1, v2)
- Convey("Error should be nil", func() {
- So(err, ShouldBeNil)
- Convey("Then the result should be 0", func() {
- So(result, ShouldEqual, 0)
- })
- })
- })
- })
-}
diff --git a/vendor/github.com/iancoleman/orderedmap/LICENSE b/vendor/github.com/iancoleman/orderedmap/LICENSE
new file mode 100644
index 00000000..2732e379
--- /dev/null
+++ b/vendor/github.com/iancoleman/orderedmap/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Ian Coleman
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, Subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or Substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/iancoleman/orderedmap/orderedmap.go b/vendor/github.com/iancoleman/orderedmap/orderedmap.go
new file mode 100644
index 00000000..2341f916
--- /dev/null
+++ b/vendor/github.com/iancoleman/orderedmap/orderedmap.go
@@ -0,0 +1,262 @@
+package orderedmap
+
+import (
+ "bytes"
+ "encoding/json"
+ "sort"
+)
+
+type Pair struct {
+ key string
+ value interface{}
+}
+
+func (kv *Pair) Key() string {
+ return kv.key
+}
+
+func (kv *Pair) Value() interface{} {
+ return kv.value
+}
+
+type ByPair struct {
+ Pairs []*Pair
+ LessFunc func(a *Pair, j *Pair) bool
+}
+
+func (a ByPair) Len() int { return len(a.Pairs) }
+func (a ByPair) Swap(i, j int) { a.Pairs[i], a.Pairs[j] = a.Pairs[j], a.Pairs[i] }
+func (a ByPair) Less(i, j int) bool { return a.LessFunc(a.Pairs[i], a.Pairs[j]) }
+
+type OrderedMap struct {
+ keys []string
+ values map[string]interface{}
+ escapeHTML bool
+}
+
+func New() *OrderedMap {
+ o := OrderedMap{}
+ o.keys = []string{}
+ o.values = map[string]interface{}{}
+ o.escapeHTML = true
+ return &o
+}
+
+func (o *OrderedMap) SetEscapeHTML(on bool) {
+ o.escapeHTML = on
+}
+
+func (o *OrderedMap) Get(key string) (interface{}, bool) {
+ val, exists := o.values[key]
+ return val, exists
+}
+
+func (o *OrderedMap) Set(key string, value interface{}) {
+ _, exists := o.values[key]
+ if !exists {
+ o.keys = append(o.keys, key)
+ }
+ o.values[key] = value
+}
+
+func (o *OrderedMap) Delete(key string) {
+ // check key is in use
+ _, ok := o.values[key]
+ if !ok {
+ return
+ }
+ // remove from keys
+ for i, k := range o.keys {
+ if k == key {
+ o.keys = append(o.keys[:i], o.keys[i+1:]...)
+ break
+ }
+ }
+ // remove from values
+ delete(o.values, key)
+}
+
+func (o *OrderedMap) Keys() []string {
+ return o.keys
+}
+
+// SortKeys Sort the map keys using your sort func
+func (o *OrderedMap) SortKeys(sortFunc func(keys []string)) {
+ sortFunc(o.keys)
+}
+
+// Sort Sort the map using your sort func
+func (o *OrderedMap) Sort(lessFunc func(a *Pair, b *Pair) bool) {
+ pairs := make([]*Pair, len(o.keys))
+ for i, key := range o.keys {
+ pairs[i] = &Pair{key, o.values[key]}
+ }
+
+ sort.Sort(ByPair{pairs, lessFunc})
+
+ for i, pair := range pairs {
+ o.keys[i] = pair.key
+ }
+}
+
+func (o *OrderedMap) UnmarshalJSON(b []byte) error {
+ if o.values == nil {
+ o.values = map[string]interface{}{}
+ }
+ err := json.Unmarshal(b, &o.values)
+ if err != nil {
+ return err
+ }
+ dec := json.NewDecoder(bytes.NewReader(b))
+ if _, err = dec.Token(); err != nil { // skip '{'
+ return err
+ }
+ o.keys = make([]string, 0, len(o.values))
+ return decodeOrderedMap(dec, o)
+}
+
+func decodeOrderedMap(dec *json.Decoder, o *OrderedMap) error {
+ hasKey := make(map[string]bool, len(o.values))
+ for {
+ token, err := dec.Token()
+ if err != nil {
+ return err
+ }
+ if delim, ok := token.(json.Delim); ok && delim == '}' {
+ return nil
+ }
+ key := token.(string)
+ if hasKey[key] {
+ // duplicate key
+ for j, k := range o.keys {
+ if k == key {
+ copy(o.keys[j:], o.keys[j+1:])
+ break
+ }
+ }
+ o.keys[len(o.keys)-1] = key
+ } else {
+ hasKey[key] = true
+ o.keys = append(o.keys, key)
+ }
+
+ token, err = dec.Token()
+ if err != nil {
+ return err
+ }
+ if delim, ok := token.(json.Delim); ok {
+ switch delim {
+ case '{':
+ if values, ok := o.values[key].(map[string]interface{}); ok {
+ newMap := OrderedMap{
+ keys: make([]string, 0, len(values)),
+ values: values,
+ escapeHTML: o.escapeHTML,
+ }
+ if err = decodeOrderedMap(dec, &newMap); err != nil {
+ return err
+ }
+ o.values[key] = newMap
+ } else if oldMap, ok := o.values[key].(OrderedMap); ok {
+ newMap := OrderedMap{
+ keys: make([]string, 0, len(oldMap.values)),
+ values: oldMap.values,
+ escapeHTML: o.escapeHTML,
+ }
+ if err = decodeOrderedMap(dec, &newMap); err != nil {
+ return err
+ }
+ o.values[key] = newMap
+ } else if err = decodeOrderedMap(dec, &OrderedMap{}); err != nil {
+ return err
+ }
+ case '[':
+ if values, ok := o.values[key].([]interface{}); ok {
+ if err = decodeSlice(dec, values, o.escapeHTML); err != nil {
+ return err
+ }
+ } else if err = decodeSlice(dec, []interface{}{}, o.escapeHTML); err != nil {
+ return err
+ }
+ }
+ }
+ }
+}
+
+func decodeSlice(dec *json.Decoder, s []interface{}, escapeHTML bool) error {
+ for index := 0; ; index++ {
+ token, err := dec.Token()
+ if err != nil {
+ return err
+ }
+ if delim, ok := token.(json.Delim); ok {
+ switch delim {
+ case '{':
+ if index < len(s) {
+ if values, ok := s[index].(map[string]interface{}); ok {
+ newMap := OrderedMap{
+ keys: make([]string, 0, len(values)),
+ values: values,
+ escapeHTML: escapeHTML,
+ }
+ if err = decodeOrderedMap(dec, &newMap); err != nil {
+ return err
+ }
+ s[index] = newMap
+ } else if oldMap, ok := s[index].(OrderedMap); ok {
+ newMap := OrderedMap{
+ keys: make([]string, 0, len(oldMap.values)),
+ values: oldMap.values,
+ escapeHTML: escapeHTML,
+ }
+ if err = decodeOrderedMap(dec, &newMap); err != nil {
+ return err
+ }
+ s[index] = newMap
+ } else if err = decodeOrderedMap(dec, &OrderedMap{}); err != nil {
+ return err
+ }
+ } else if err = decodeOrderedMap(dec, &OrderedMap{}); err != nil {
+ return err
+ }
+ case '[':
+ if index < len(s) {
+ if values, ok := s[index].([]interface{}); ok {
+ if err = decodeSlice(dec, values, escapeHTML); err != nil {
+ return err
+ }
+ } else if err = decodeSlice(dec, []interface{}{}, escapeHTML); err != nil {
+ return err
+ }
+ } else if err = decodeSlice(dec, []interface{}{}, escapeHTML); err != nil {
+ return err
+ }
+ case ']':
+ return nil
+ }
+ }
+ }
+}
+
+func (o OrderedMap) MarshalJSON() ([]byte, error) {
+ var buf bytes.Buffer
+ buf.WriteByte('{')
+ encoder := json.NewEncoder(&buf)
+ encoder.SetEscapeHTML(o.escapeHTML)
+ for i, k := range o.keys {
+ if i > 0 {
+ buf.WriteByte(',')
+ }
+ // add key
+ if err := encoder.Encode(k); err != nil {
+ return nil, err
+ }
+ buf.WriteByte(':')
+ // add value
+ if err := encoder.Encode(o.values[k]); err != nil {
+ return nil, err
+ }
+ }
+ buf.WriteByte('}')
+ return buf.Bytes(), nil
+}
diff --git a/vendor/github.com/iancoleman/orderedmap/readme.md b/vendor/github.com/iancoleman/orderedmap/readme.md
new file mode 100644
index 00000000..5da0e1cd
--- /dev/null
+++ b/vendor/github.com/iancoleman/orderedmap/readme.md
@@ -0,0 +1,79 @@
+# orderedmap
+
+A golang data type equivalent to python's collections.OrderedDict
+
+Retains order of keys in maps
+
+Can be JSON serialized / deserialized
+
+# Usage
+
+```go
+package main
+
+import (
+ "encoding/json"
+ "github.com/iancoleman/orderedmap"
+)
+
+func main() {
+
+ // use New() instead of o := map[string]interface{}{}
+ o := orderedmap.New()
+
+ // use SetEscapeHTML() to whether escape problematic HTML characters or not, defaults is true
+ o.SetEscapeHTML(false)
+
+ // use Set instead of o["a"] = 1
+ o.Set("a", 1)
+
+ // add some value with special characters
+ o.Set("b", "\\.<>[]{}_-")
+
+ // use Get instead of i, ok := o["a"]
+ val, ok := o.Get("a")
+
+ // use Keys instead of for k, v := range o
+ keys := o.Keys()
+ for _, k := range keys {
+ v, _ := o.Get(k)
+ }
+
+ // use o.Delete instead of delete(o, key)
+ o.Delete("a")
+
+ // serialize to a json string using encoding/json
+ bytes, err := json.Marshal(o)
+ prettyBytes, err := json.MarshalIndent(o, "", " ")
+
+ // deserialize a json string using encoding/json
+ // all maps (including nested maps) will be parsed as orderedmaps
+ s := `{"a": 1}`
+ err := json.Unmarshal([]byte(s), &o)
+
+ // sort the keys
+ o.SortKeys(sort.Strings)
+
+ // sort by Pair
+ o.Sort(func(a *orderedmap.Pair, b *orderedmap.Pair) bool {
+ return a.Value().(float64) < b.Value().(float64)
+ })
+}
+```
+
+# Caveats
+
+* OrderedMap only takes strings for the key, as per [the JSON spec](http://json.org/).
+
+# Tests
+
+```
+go test
+```
+
+# Alternatives
+
+None of the alternatives offer JSON serialization.
+
+* [cevaris/ordered_map](https://github.com/cevaris/ordered_map)
+* [mantyr/iterator](https://github.com/mantyr/iterator)
diff --git a/vendor/github.com/invopop/jsonschema/.golangci.yml b/vendor/github.com/invopop/jsonschema/.golangci.yml
new file mode 100644
index 00000000..f1922d81
--- /dev/null
+++ b/vendor/github.com/invopop/jsonschema/.golangci.yml
@@ -0,0 +1,89 @@
+run:
+ tests: true
+ max-same-issues: 50
+ skip-dirs:
+ - resources
+ - old
+ skip-files:
+ - cmd/protopkg/main.go
+
+output:
+ print-issued-lines: false
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - megacheck
+ - lll
+ - typecheck # `go build` catches this, and it doesn't currently work with Go 1.11 modules
+ - goimports # horrendously slow with go modules :(
+ - dupl # has never been actually useful
+ - gochecknoglobals
+ - gochecknoinits
+ - interfacer # author deprecated it because it provides bad suggestions
+ - funlen
+ - whitespace
+ - godox
+ - wsl
+ - dogsled
+ - gomnd
+ - gocognit
+ - gocyclo
+ - scopelint
+ - godot
+ - nestif
+ - testpackage
+ - goerr113
+ - gci
+ - gofumpt
+ - exhaustivestruct
+ - nlreturn
+ - forbidigo
+ - cyclop
+ - paralleltest
+ - ifshort # so annoying
+ - golint
+ - tagliatelle
+ - forcetypeassert
+ - wrapcheck
+ - revive
+ - structcheck
+ - stylecheck
+ - exhaustive
+ - varnamelen
+
+linters-settings:
+ govet:
+ check-shadowing: true
+ use-installed-packages: true
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 8
+ min-occurrences: 3
+ gocyclo:
+ min-complexity: 20
+ gocritic:
+ disabled-checks:
+ - ifElseChain
+
+
+issues:
+ max-per-linter: 0
+ max-same: 0
+ exclude-use-default: false
+ exclude:
+ # Captured by errcheck.
+ - '^(G104|G204):'
+ # Very commonly not checked.
+ - 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*Print(f|ln|)|os\.(Un)?Setenv). is not checked'
+ # Weird error only seen on Kochiku...
+ - 'internal error: no range for'
+ - 'exported method `.*\.(MarshalJSON|UnmarshalJSON|URN|Payload|GoString|Close|Provides|Requires|ExcludeFromHash|MarshalText|UnmarshalText|Description|Check|Poll|Severity)` should have comment or be unexported'
+ - 'composite literal uses unkeyed fields'
+ - 'declaration of "err" shadows declaration'
+ - 'by other packages, and that stutters'
+ - 'Potential file inclusion via variable'
+ - 'at least one file in a package should have a package comment'
+ - 'bad syntax for struct tag pair'
diff --git a/vendor/github.com/invopop/jsonschema/COPYING b/vendor/github.com/invopop/jsonschema/COPYING
new file mode 100644
index 00000000..2993ec08
--- /dev/null
+++ b/vendor/github.com/invopop/jsonschema/COPYING
@@ -0,0 +1,19 @@
+Copyright (C) 2014 Alec Thomas
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/invopop/jsonschema/README.md b/vendor/github.com/invopop/jsonschema/README.md
new file mode 100644
index 00000000..100ad0c7
--- /dev/null
+++ b/vendor/github.com/invopop/jsonschema/README.md
@@ -0,0 +1,363 @@
+# Go JSON Schema Reflection
+
+[![Lint](https://github.com/invopop/jsonschema/actions/workflows/lint.yaml/badge.svg)](https://github.com/invopop/jsonschema/actions/workflows/lint.yaml)
+[![Test Go](https://github.com/invopop/jsonschema/actions/workflows/test.yaml/badge.svg)](https://github.com/invopop/jsonschema/actions/workflows/test.yaml)
+[![Go Report Card](https://goreportcard.com/badge/github.com/invopop/jsonschema)](https://goreportcard.com/report/github.com/invopop/jsonschema)
+[![GoDoc](https://godoc.org/github.com/invopop/jsonschema?status.svg)](https://godoc.org/github.com/invopop/jsonschema)
+![Latest Tag](https://img.shields.io/github/v/tag/invopop/jsonschema)
+
+This package can be used to generate [JSON Schemas](http://json-schema.org/latest/json-schema-validation.html) from Go types through reflection.
+
+- Supports arbitrarily complex types, including `interface{}`, maps, slices, etc.
+- Supports json-schema features such as minLength, maxLength, pattern, format, etc.
+- Supports simple string and numeric enums.
+- Supports custom property fields via the `jsonschema_extras` struct tag.
+
+This repository is a fork of the original [jsonschema](https://github.com/alecthomas/jsonschema) by [@alecthomas](https://github.com/alecthomas). At [Invopop](https://invopop.com) we use jsonschema as a cornerstone in our [GOBL library](https://github.com/invopop/gobl), and wanted to be able to continue building and adding features without taking up Alec's time. There have been a few significant changes that probably mean this version is a not compatible with with Alec's:
+
+- The original was stuck on the draft-04 version of JSON Schema, we've now moved to the latest JSON Schema Draft 2020-12.
+- Schema IDs are added automatically from the current Go package's URL in order to be unique, and can be disabled with the `Anonymous` option.
+- Support for the `FullyQualifyTypeName` option has been removed. If you have conflicts, you should use multiple schema files with different IDs, set the `DoNotReference` option to true to hide definitions completely, or add your own naming strategy using the `Namer` property.
+- Support for `yaml` tags and related options has been dropped for the sake of simplification. There were a [few inconsistencies](https://github.com/invopop/jsonschema/pull/21) around this that have now been fixed.
+
+## Versions
+
+This project is still under v0 scheme, as per Go convention, breaking changes are likely. Please pin go modules to branches, and reach out if you think something can be improved.
+
+## Example
+
+The following Go type:
+
+```go
+type TestUser struct {
+ ID int `json:"id"`
+ Name string `json:"name" jsonschema:"title=the name,description=The name of a friend,example=joe,example=lucy,default=alex"`
+ Friends []int `json:"friends,omitempty" jsonschema_description:"The list of IDs, omitted when empty"`
+ Tags map[string]interface{} `json:"tags,omitempty" jsonschema_extras:"a=b,foo=bar,foo=bar1"`
+ BirthDate time.Time `json:"birth_date,omitempty" jsonschema:"oneof_required=date"`
+ YearOfBirth string `json:"year_of_birth,omitempty" jsonschema:"oneof_required=year"`
+ Metadata interface{} `json:"metadata,omitempty" jsonschema:"oneof_type=string;array"`
+ FavColor string `json:"fav_color,omitempty" jsonschema:"enum=red,enum=green,enum=blue"`
+}
+```
+
+Results in following JSON Schema:
+
+```go
+jsonschema.Reflect(&TestUser{})
+```
+
+```json
+{
+ "$schema": "http://json-schema.org/draft/2020-12/schema",
+ "$ref": "#/$defs/SampleUser",
+ "$defs": {
+ "SampleUser": {
+ "oneOf": [
+ {
+ "required": ["birth_date"],
+ "title": "date"
+ },
+ {
+ "required": ["year_of_birth"],
+ "title": "year"
+ }
+ ],
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "name": {
+ "type": "string",
+ "title": "the name",
+ "description": "The name of a friend",
+ "default": "alex",
+ "examples": ["joe", "lucy"]
+ },
+ "friends": {
+ "items": {
+ "type": "integer"
+ },
+ "type": "array",
+ "description": "The list of IDs, omitted when empty"
+ },
+ "tags": {
+ "type": "object",
+ "a": "b",
+ "foo": ["bar", "bar1"]
+ },
+ "birth_date": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "year_of_birth": {
+ "type": "string"
+ },
+ "metadata": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ }
+ ]
+ },
+ "fav_color": {
+ "type": "string",
+ "enum": ["red", "green", "blue"]
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "required": ["id", "name"]
+ }
+ }
+}
+```
+
+## YAML
+
+Support for `yaml` tags has now been removed. If you feel very strongly about this, we've opened a discussion to hear your comments: https://github.com/invopop/jsonschema/discussions/28
+
+The recommended approach if you need to deal with YAML data is to first convert to JSON. The [invopop/yaml](https://github.com/invopop/yaml) library will make this trivial.
+
+## Configurable behaviour
+
+The behaviour of the schema generator can be altered with parameters when a `jsonschema.Reflector`
+instance is created.
+
+### ExpandedStruct
+
+If set to `true`, makes the top level struct not to reference itself in the definitions. But type passed should be a struct type.
+
+eg.
+
+```go
+type GrandfatherType struct {
+ FamilyName string `json:"family_name" jsonschema:"required"`
+}
+
+type SomeBaseType struct {
+ SomeBaseProperty int `json:"some_base_property"`
+ // The jsonschema required tag is nonsensical for private and ignored properties.
+ // Their presence here tests that the fields *will not* be required in the output
+ // schema, even if they are tagged required.
+ somePrivateBaseProperty string `json:"i_am_private" jsonschema:"required"`
+ SomeIgnoredBaseProperty string `json:"-" jsonschema:"required"`
+ SomeSchemaIgnoredProperty string `jsonschema:"-,required"`
+ SomeUntaggedBaseProperty bool `jsonschema:"required"`
+ someUnexportedUntaggedBaseProperty bool
+ Grandfather GrandfatherType `json:"grand"`
+}
+```
+
+will output:
+
+```json
+{
+ "$schema": "http://json-schema.org/draft/2020-12/schema",
+ "required": ["some_base_property", "grand", "SomeUntaggedBaseProperty"],
+ "properties": {
+ "SomeUntaggedBaseProperty": {
+ "type": "boolean"
+ },
+ "grand": {
+ "$schema": "http://json-schema.org/draft/2020-12/schema",
+ "$ref": "#/definitions/GrandfatherType"
+ },
+ "some_base_property": {
+ "type": "integer"
+ }
+ },
+ "type": "object",
+ "$defs": {
+ "GrandfatherType": {
+ "required": ["family_name"],
+ "properties": {
+ "family_name": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object"
+ }
+ }
+}
+```
+
+### Using Go Comments
+
+Writing a good schema with descriptions inside tags can become cumbersome and tedious, especially if you already have some Go comments around your types and field definitions. If you'd like to take advantage of these existing comments, you can use the `AddGoComments(base, path string)` method that forms part of the reflector to parse your go files and automatically generate a dictionary of Go import paths, types, and fields, to individual comments. These will then be used automatically as description fields, and can be overridden with a manual definition if needed.
+
+Take a simplified example of a User struct which for the sake of simplicity we assume is defined inside this package:
+
+```go
+package main
+
+// User is used as a base to provide tests for comments.
+type User struct {
+ // Unique sequential identifier.
+ ID int `json:"id" jsonschema:"required"`
+ // Name of the user
+ Name string `json:"name"`
+}
+```
+
+To get the comments provided into your JSON schema, use a regular `Reflector` and add the go code using an import module URL and path. Fully qualified go module paths cannot be determined reliably by the `go/parser` library, so we need to introduce this manually:
+
+```go
+r := new(Reflector)
+if err := r.AddGoComments("github.com/invopop/jsonschema", "./"); err != nil {
+ // deal with error
+}
+s := r.Reflect(&User{})
+// output
+```
+
+Expect the results to be similar to:
+
+```json
+{
+ "$schema": "http://json-schema.org/draft/2020-12/schema",
+ "$ref": "#/$defs/User",
+ "$defs": {
+ "User": {
+ "required": ["id"],
+ "properties": {
+ "id": {
+ "type": "integer",
+ "description": "Unique sequential identifier."
+ },
+ "name": {
+ "type": "string",
+ "description": "Name of the user"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+ "description": "User is used as a base to provide tests for comments."
+ }
+ }
+}
+```
+
+### Custom Key Naming
+
+In some situations, the keys actually used to write files are different from Go structs'.
+
+This is often the case when writing a configuration file to YAML or JSON from a Go struct, or when returning a JSON response for a Web API: APIs typically use snake_case, while Go uses PascalCase.
+
+You can pass a `func(string) string` function to `Reflector`'s `KeyNamer` option to map Go field names to JSON key names and reflect the aforementioned transformations, without having to specify `json:"..."` on every struct field.
+
+For example, consider the following struct
+
+```go
+type User struct {
+ GivenName string
+ PasswordSalted []byte `json:"salted_password"`
+}
+```
+
+We can transform field names to snake_case in the generated JSON schema:
+
+```go
+r := new(jsonschema.Reflector)
+r.KeyNamer = strcase.SnakeCase // from package github.com/stoewer/go-strcase
+
+r.Reflect(&User{})
+```
+
+Will yield
+
+```diff
+ {
+ "$schema": "http://json-schema.org/draft/2020-12/schema",
+ "$ref": "#/$defs/User",
+ "$defs": {
+ "User": {
+ "properties": {
+- "GivenName": {
++ "given_name": {
+ "type": "string"
+ },
+ "salted_password": {
+ "type": "string",
+ "contentEncoding": "base64"
+ }
+ },
+ "additionalProperties": false,
+ "type": "object",
+- "required": ["GivenName", "salted_password"]
++ "required": ["given_name", "salted_password"]
+ }
+ }
+ }
+```
+
+As you can see, if a field name has a `json:""` tag set, the `key` argument to `KeyNamer` will have the value of that tag.
+
+### Custom Type Definitions
+
+Sometimes it can be useful to have custom JSON Marshal and Unmarshal methods in your structs that automatically convert for example a string into an object.
+
+To override auto-generating an object type for your type, implement the `JSONSchema() *Schema` method and whatever is defined will be provided in the schema definitions.
+
+Take the following simplified example of a `CompactDate` that only includes the Year and Month:
+
+```go
+type CompactDate struct {
+ Year int
+ Month int
+}
+
+func (d *CompactDate) UnmarshalJSON(data []byte) error {
+ if len(data) != 9 {
+ return errors.New("invalid compact date length")
+ }
+ var err error
+ d.Year, err = strconv.Atoi(string(data[1:5]))
+ if err != nil {
+ return err
+ }
+ d.Month, err = strconv.Atoi(string(data[7:8]))
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func (d *CompactDate) MarshalJSON() ([]byte, error) {
+ buf := new(bytes.Buffer)
+ buf.WriteByte('"')
+ buf.WriteString(fmt.Sprintf("%d-%02d", d.Year, d.Month))
+ buf.WriteByte('"')
+ return buf.Bytes(), nil
+}
+
+func (CompactDate) JSONSchema() *Schema {
+ return &Schema{
+ Type: "string",
+ Title: "Compact Date",
+ Description: "Short date that only includes year and month",
+ Pattern: "^[0-9]{4}-[0-1][0-9]$",
+ }
+}
+```
+
+The resulting schema generated for this struct would look like:
+
+```json
+{
+ "$schema": "http://json-schema.org/draft/2020-12/schema",
+ "$ref": "#/$defs/CompactDate",
+ "$defs": {
+ "CompactDate": {
+ "pattern": "^[0-9]{4}-[0-1][0-9]$",
+ "type": "string",
+ "title": "Compact Date",
+ "description": "Short date that only includes year and month"
+ }
+ }
+}
+```
diff --git a/vendor/github.com/invopop/jsonschema/comment_extractor.go b/vendor/github.com/invopop/jsonschema/comment_extractor.go
new file mode 100644
index 00000000..0088b412
--- /dev/null
+++ b/vendor/github.com/invopop/jsonschema/comment_extractor.go
@@ -0,0 +1,90 @@
+package jsonschema
+
+import (
+ "fmt"
+ "io/fs"
+ gopath "path"
+ "path/filepath"
+ "strings"
+
+ "go/ast"
+ "go/doc"
+ "go/parser"
+ "go/token"
+)
+
+// ExtractGoComments will read all the go files contained in the provided path,
+// including sub-directories, in order to generate a dictionary of comments
+// associated with Types and Fields. The results will be added to the `commentsMap`
+// provided in the parameters and expected to be used for Schema "description" fields.
+//
+// The `go/parser` library is used to extract all the comments and unfortunately doesn't
+// have a built-in way to determine the fully qualified name of a package. The `base` paremeter,
+// the URL used to import that package, is thus required to be able to match reflected types.
+//
+// When parsing type comments, we use the `go/doc`'s Synopsis method to extract the first phrase
+// only. Field comments, which tend to be much shorter, will include everything.
+func ExtractGoComments(base, path string, commentMap map[string]string) error {
+ fset := token.NewFileSet()
+ dict := make(map[string][]*ast.Package)
+ err := filepath.Walk(path, func(path string, info fs.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if info.IsDir() {
+ d, err := parser.ParseDir(fset, path, nil, parser.ParseComments)
+ if err != nil {
+ return err
+ }
+ for _, v := range d {
+ // paths may have multiple packages, like for tests
+ k := gopath.Join(base, path)
+ dict[k] = append(dict[k], v)
+ }
+ }
+ return nil
+ })
+ if err != nil {
+ return err
+ }
+
+ for pkg, p := range dict {
+ for _, f := range p {
+ gtxt := ""
+ typ := ""
+ ast.Inspect(f, func(n ast.Node) bool {
+ switch x := n.(type) {
+ case *ast.TypeSpec:
+ typ = x.Name.String()
+ if !ast.IsExported(typ) {
+ typ = ""
+ } else {
+ txt := x.Doc.Text()
+ if txt == "" && gtxt != "" {
+ txt = gtxt
+ gtxt = ""
+ }
+ txt = doc.Synopsis(txt)
+ commentMap[fmt.Sprintf("%s.%s", pkg, typ)] = strings.TrimSpace(txt)
+ }
+ case *ast.Field:
+ txt := x.Doc.Text()
+ if typ != "" && txt != "" {
+ for _, n := range x.Names {
+ if ast.IsExported(n.String()) {
+ k := fmt.Sprintf("%s.%s.%s", pkg, typ, n)
+ commentMap[k] = strings.TrimSpace(txt)
+ }
+ }
+ }
+ case *ast.GenDecl:
+ // remember for the next type
+ gtxt = x.Doc.Text()
+ }
+ return true
+ })
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/invopop/jsonschema/id.go b/vendor/github.com/invopop/jsonschema/id.go
new file mode 100644
index 00000000..73fafb38
--- /dev/null
+++ b/vendor/github.com/invopop/jsonschema/id.go
@@ -0,0 +1,76 @@
+package jsonschema
+
+import (
+ "errors"
+ "fmt"
+ "net/url"
+ "strings"
+)
+
+// ID represents a Schema ID type which should always be a URI.
+// See draft-bhutton-json-schema-00 section 8.2.1
+type ID string
+
+// EmptyID is used to explicitly define an ID with no value.
+const EmptyID ID = ""
+
+// Validate is used to check if the ID looks like a proper schema.
+// This is done by parsing the ID as a URL and checking it has all the
+// relevant parts.
+func (id ID) Validate() error {
+ u, err := url.Parse(id.String())
+ if err != nil {
+ return fmt.Errorf("invalid URL: %w", err)
+ }
+ if u.Hostname() == "" {
+ return errors.New("missing hostname")
+ }
+ if !strings.Contains(u.Hostname(), ".") {
+ return errors.New("hostname does not look valid")
+ }
+ if u.Path == "" {
+ return errors.New("path is expected")
+ }
+ if u.Scheme != "https" && u.Scheme != "http" {
+ return errors.New("unexpected schema")
+ }
+ return nil
+}
+
+// Anchor sets the anchor part of the schema URI.
+func (id ID) Anchor(name string) ID {
+ b := id.Base()
+ return ID(b.String() + "#" + name)
+}
+
+// Def adds or replaces a definition identifier.
+func (id ID) Def(name string) ID {
+ b := id.Base()
+ return ID(b.String() + "#/$defs/" + name)
+}
+
+// Add appends the provided path to the id, and removes any
+// anchor data that might be there.
+func (id ID) Add(path string) ID {
+ b := id.Base()
+ if !strings.HasPrefix(path, "/") {
+ path = "/" + path
+ }
+ return ID(b.String() + path)
+}
+
+// Base removes any anchor information from the schema
+func (id ID) Base() ID {
+ s := id.String()
+ i := strings.LastIndex(s, "#")
+ if i != -1 {
+ s = s[0:i]
+ }
+ s = strings.TrimRight(s, "/")
+ return ID(s)
+}
+
+// String provides string version of ID
+func (id ID) String() string {
+ return string(id)
+}
diff --git a/vendor/github.com/invopop/jsonschema/reflect.go b/vendor/github.com/invopop/jsonschema/reflect.go
new file mode 100644
index 00000000..1b6732d2
--- /dev/null
+++ b/vendor/github.com/invopop/jsonschema/reflect.go
@@ -0,0 +1,1070 @@
+// Package jsonschema uses reflection to generate JSON Schemas from Go types [1].
+//
+// If json tags are present on struct fields, they will be used to infer
+// property names and if a property is required (omitempty is present).
+//
+// [1] http://json-schema.org/latest/json-schema-validation.html
+package jsonschema
+
+import (
+ "bytes"
+ "encoding/json"
+ "net"
+ "net/url"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/iancoleman/orderedmap"
+)
+
+// Version is the JSON Schema version.
+var Version = "https://json-schema.org/draft/2020-12/schema"
+
+// Schema represents a JSON Schema object type.
+// RFC draft-bhutton-json-schema-00 section 4.3
+type Schema struct {
+ // RFC draft-bhutton-json-schema-00
+ Version string `json:"$schema,omitempty"` // section 8.1.1
+ ID ID `json:"$id,omitempty"` // section 8.2.1
+ Anchor string `json:"$anchor,omitempty"` // section 8.2.2
+ Ref string `json:"$ref,omitempty"` // section 8.2.3.1
+ DynamicRef string `json:"$dynamicRef,omitempty"` // section 8.2.3.2
+ Definitions Definitions `json:"$defs,omitempty"` // section 8.2.4
+ Comments string `json:"$comment,omitempty"` // section 8.3
+ // RFC draft-bhutton-json-schema-00 section 10.2.1 (Sub-schemas with logic)
+ AllOf []*Schema `json:"allOf,omitempty"` // section 10.2.1.1
+ AnyOf []*Schema `json:"anyOf,omitempty"` // section 10.2.1.2
+ OneOf []*Schema `json:"oneOf,omitempty"` // section 10.2.1.3
+ Not *Schema `json:"not,omitempty"` // section 10.2.1.4
+ // RFC draft-bhutton-json-schema-00 section 10.2.2 (Apply sub-schemas conditionally)
+ If *Schema `json:"if,omitempty"` // section 10.2.2.1
+ Then *Schema `json:"then,omitempty"` // section 10.2.2.2
+ Else *Schema `json:"else,omitempty"` // section 10.2.2.3
+ DependentSchemas map[string]*Schema `json:"dependentSchemas,omitempty"` // section 10.2.2.4
+ // RFC draft-bhutton-json-schema-00 section 10.3.1 (arrays)
+ PrefixItems []*Schema `json:"prefixItems,omitempty"` // section 10.3.1.1
+ Items *Schema `json:"items,omitempty"` // section 10.3.1.2 (replaces additionalItems)
+ Contains *Schema `json:"contains,omitempty"` // section 10.3.1.3
+ // RFC draft-bhutton-json-schema-00 section 10.3.2 (sub-schemas)
+ Properties *orderedmap.OrderedMap `json:"properties,omitempty"` // section 10.3.2.1
+ PatternProperties map[string]*Schema `json:"patternProperties,omitempty"` // section 10.3.2.2
+ AdditionalProperties *Schema `json:"additionalProperties,omitempty"` // section 10.3.2.3
+ PropertyNames *Schema `json:"propertyNames,omitempty"` // section 10.3.2.4
+ // RFC draft-bhutton-json-schema-validation-00, section 6
+ Type string `json:"type,omitempty"` // section 6.1.1
+ Enum []interface{} `json:"enum,omitempty"` // section 6.1.2
+ Const interface{} `json:"const,omitempty"` // section 6.1.3
+ MultipleOf int `json:"multipleOf,omitempty"` // section 6.2.1
+ Maximum int `json:"maximum,omitempty"` // section 6.2.2
+ ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"` // section 6.2.3
+ Minimum int `json:"minimum,omitempty"` // section 6.2.4
+ ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"` // section 6.2.5
+ MaxLength int `json:"maxLength,omitempty"` // section 6.3.1
+ MinLength int `json:"minLength,omitempty"` // section 6.3.2
+ Pattern string `json:"pattern,omitempty"` // section 6.3.3
+ MaxItems int `json:"maxItems,omitempty"` // section 6.4.1
+ MinItems int `json:"minItems,omitempty"` // section 6.4.2
+ UniqueItems bool `json:"uniqueItems,omitempty"` // section 6.4.3
+ MaxContains uint `json:"maxContains,omitempty"` // section 6.4.4
+ MinContains uint `json:"minContains,omitempty"` // section 6.4.5
+ MaxProperties int `json:"maxProperties,omitempty"` // section 6.5.1
+ MinProperties int `json:"minProperties,omitempty"` // section 6.5.2
+ Required []string `json:"required,omitempty"` // section 6.5.3
+ DependentRequired map[string][]string `json:"dependentRequired,omitempty"` // section 6.5.4
+ // RFC draft-bhutton-json-schema-validation-00, section 7
+ Format string `json:"format,omitempty"`
+ // RFC draft-bhutton-json-schema-validation-00, section 8
+ ContentEncoding string `json:"contentEncoding,omitempty"` // section 8.3
+ ContentMediaType string `json:"contentMediaType,omitempty"` // section 8.4
+ ContentSchema *Schema `json:"contentSchema,omitempty"` // section 8.5
+ // RFC draft-bhutton-json-schema-validation-00, section 9
+ Title string `json:"title,omitempty"` // section 9.1
+ Description string `json:"description,omitempty"` // section 9.1
+ Default interface{} `json:"default,omitempty"` // section 9.2
+ Deprecated bool `json:"deprecated,omitempty"` // section 9.3
+ ReadOnly bool `json:"readOnly,omitempty"` // section 9.4
+ WriteOnly bool `json:"writeOnly,omitempty"` // section 9.4
+ Examples []interface{} `json:"examples,omitempty"` // section 9.5
+
+ Extras map[string]interface{} `json:"-"`
+
+ // Special boolean representation of the Schema - section 4.3.2
+ boolean *bool
+}
+
+var (
+ // TrueSchema defines a schema with a true value
+ TrueSchema = &Schema{boolean: &[]bool{true}[0]}
+ // FalseSchema defines a schema with a false value
+ FalseSchema = &Schema{boolean: &[]bool{false}[0]}
+)
+
+// customSchemaImpl is used to detect if the type provides it's own
+// custom Schema Type definition to use instead. Very useful for situations
+// where there are custom JSON Marshal and Unmarshal methods.
+type customSchemaImpl interface {
+ JSONSchema() *Schema
+}
+
+var customType = reflect.TypeOf((*customSchemaImpl)(nil)).Elem()
+
+// customSchemaGetFieldDocString
+type customSchemaGetFieldDocString interface {
+ GetFieldDocString(fieldName string) string
+}
+
+type customGetFieldDocString func(fieldName string) string
+
+var customStructGetFieldDocString = reflect.TypeOf((*customSchemaGetFieldDocString)(nil)).Elem()
+
+// Reflect reflects to Schema from a value using the default Reflector
+func Reflect(v interface{}) *Schema {
+ return ReflectFromType(reflect.TypeOf(v))
+}
+
+// ReflectFromType generates root schema using the default Reflector
+func ReflectFromType(t reflect.Type) *Schema {
+ r := &Reflector{}
+ return r.ReflectFromType(t)
+}
+
+// A Reflector reflects values into a Schema.
+type Reflector struct {
+ // BaseSchemaID defines the URI that will be used as a base to determine Schema
+ // IDs for models. For example, a base Schema ID of `https://invopop.com/schemas`
+ // when defined with a struct called `User{}`, will result in a schema with an
+ // ID set to `https://invopop.com/schemas/user`.
+ //
+ // If no `BaseSchemaID` is provided, we'll take the type's complete package path
+ // and use that as a base instead. Set `Anonymous` to try if you do not want to
+ // include a schema ID.
+ BaseSchemaID ID
+
+ // Anonymous when true will hide the auto-generated Schema ID and provide what is
+ // known as an "anonymous schema". As a rule, this is not recommended.
+ Anonymous bool
+
+ // AssignAnchor when true will use the original struct's name as an anchor inside
+ // every definition, including the root schema. These can be useful for having a
+ // reference to the original struct's name in CamelCase instead of the snake-case used
+ // by default for URI compatibility.
+ //
+ // Anchors do not appear to be widely used out in the wild, so at this time the
+ // anchors themselves will not be used inside generated schema.
+ AssignAnchor bool
+
+ // AllowAdditionalProperties will cause the Reflector to generate a schema
+ // without additionalProperties set to 'false' for all struct types. This means
+ // the presence of additional keys in JSON objects will not cause validation
+ // to fail. Note said additional keys will simply be dropped when the
+ // validated JSON is unmarshaled.
+ AllowAdditionalProperties bool
+
+ // RequiredFromJSONSchemaTags will cause the Reflector to generate a schema
+ // that requires any key tagged with `jsonschema:required`, overriding the
+ // default of requiring any key *not* tagged with `json:,omitempty`.
+ RequiredFromJSONSchemaTags bool
+
+ // Do not reference definitions. This will remove the top-level $defs map and
+ // instead cause the entire structure of types to be output in one tree. The
+ // list of type definitions (`$defs`) will not be included.
+ DoNotReference bool
+
+ // ExpandedStruct when true will include the reflected type's definition in the
+ // root as opposed to a definition with a reference.
+ ExpandedStruct bool
+
+ // IgnoredTypes defines a slice of types that should be ignored in the schema,
+ // switching to just allowing additional properties instead.
+ IgnoredTypes []interface{}
+
+ // Lookup allows a function to be defined that will provide a custom mapping of
+ // types to Schema IDs. This allows existing schema documents to be referenced
+ // by their ID instead of being embedded into the current schema definitions.
+ // Reflected types will never be pointers, only underlying elements.
+ Lookup func(reflect.Type) ID
+
+ // Mapper is a function that can be used to map custom Go types to jsonschema schemas.
+ Mapper func(reflect.Type) *Schema
+
+ // Namer allows customizing of type names. The default is to use the type's name
+ // provided by the reflect package.
+ Namer func(reflect.Type) string
+
+ // KeyNamer allows customizing of key names.
+ // The default is to use the key's name as is, or the json tag if present.
+ // If a json tag is present, KeyNamer will receive the tag's name as an argument, not the original key name.
+ KeyNamer func(string) string
+
+ // AdditionalFields allows adding structfields for a given type
+ AdditionalFields func(reflect.Type) []reflect.StructField
+
+ // CommentMap is a dictionary of fully qualified go types and fields to comment
+ // strings that will be used if a description has not already been provided in
+ // the tags. Types and fields are added to the package path using "." as a
+ // separator.
+ //
+ // Type descriptions should be defined like:
+ //
+ // map[string]string{"github.com/invopop/jsonschema.Reflector": "A Reflector reflects values into a Schema."}
+ //
+ // And Fields defined as:
+ //
+ // map[string]string{"github.com/invopop/jsonschema.Reflector.DoNotReference": "Do not reference definitions."}
+ //
+ // See also: AddGoComments
+ CommentMap map[string]string
+}
+
+// Reflect reflects to Schema from a value.
+func (r *Reflector) Reflect(v interface{}) *Schema {
+ return r.ReflectFromType(reflect.TypeOf(v))
+}
+
+// ReflectFromType generates root schema
+func (r *Reflector) ReflectFromType(t reflect.Type) *Schema {
+ if t.Kind() == reflect.Ptr {
+ t = t.Elem() // re-assign from pointer
+ }
+
+ name := r.typeName(t)
+
+ s := new(Schema)
+ definitions := Definitions{}
+ s.Definitions = definitions
+ bs := r.reflectTypeToSchemaWithID(definitions, t)
+ if r.ExpandedStruct {
+ *s = *definitions[name]
+ delete(definitions, name)
+ } else {
+ *s = *bs
+ }
+
+ // Attempt to set the schema ID
+ if !r.Anonymous && s.ID == EmptyID {
+ baseSchemaID := r.BaseSchemaID
+ if baseSchemaID == EmptyID {
+ id := ID("https://" + t.PkgPath())
+ if err := id.Validate(); err == nil {
+ // it's okay to silently ignore URL errors
+ baseSchemaID = id
+ }
+ }
+ if baseSchemaID != EmptyID {
+ s.ID = baseSchemaID.Add(ToSnakeCase(name))
+ }
+ }
+
+ s.Version = Version
+ if !r.DoNotReference {
+ s.Definitions = definitions
+ }
+
+ return s
+}
+
+// Definitions hold schema definitions.
+// http://json-schema.org/latest/json-schema-validation.html#rfc.section.5.26
+// RFC draft-wright-json-schema-validation-00, section 5.26
+type Definitions map[string]*Schema
+
+// Available Go defined types for JSON Schema Validation.
+// RFC draft-wright-json-schema-validation-00, section 7.3
+var (
+ timeType = reflect.TypeOf(time.Time{}) // date-time RFC section 7.3.1
+ ipType = reflect.TypeOf(net.IP{}) // ipv4 and ipv6 RFC section 7.3.4, 7.3.5
+ uriType = reflect.TypeOf(url.URL{}) // uri RFC section 7.3.6
+)
+
+// Byte slices will be encoded as base64
+var byteSliceType = reflect.TypeOf([]byte(nil))
+
+// Except for json.RawMessage
+var rawMessageType = reflect.TypeOf(json.RawMessage{})
+
+// Go code generated from protobuf enum types should fulfil this interface.
+type protoEnum interface {
+ EnumDescriptor() ([]byte, []int)
+}
+
+var protoEnumType = reflect.TypeOf((*protoEnum)(nil)).Elem()
+
+// SetBaseSchemaID is a helper use to be able to set the reflectors base
+// schema ID from a string as opposed to then ID instance.
+func (r *Reflector) SetBaseSchemaID(id string) {
+ r.BaseSchemaID = ID(id)
+}
+
+func (r *Reflector) refOrReflectTypeToSchema(definitions Definitions, t reflect.Type) *Schema {
+ id := r.lookupID(t)
+ if id != EmptyID {
+ return &Schema{
+ Ref: id.String(),
+ }
+ }
+
+ // Already added to definitions?
+ if def := r.refDefinition(definitions, t); def != nil {
+ return def
+ }
+
+ return r.reflectTypeToSchemaWithID(definitions, t)
+}
+
+func (r *Reflector) reflectTypeToSchemaWithID(defs Definitions, t reflect.Type) *Schema {
+ s := r.reflectTypeToSchema(defs, t)
+ if s != nil {
+ if r.Lookup != nil {
+ id := r.Lookup(t)
+ if id != EmptyID {
+ s.ID = id
+ }
+ }
+ }
+ return s
+}
+
+func (r *Reflector) reflectTypeToSchema(definitions Definitions, t reflect.Type) *Schema {
+ // only try to reflect non-pointers
+ if t.Kind() == reflect.Ptr {
+ return r.refOrReflectTypeToSchema(definitions, t.Elem())
+ }
+
+ // Do any pre-definitions exist?
+ if r.Mapper != nil {
+ if t := r.Mapper(t); t != nil {
+ return t
+ }
+ }
+ if rt := r.reflectCustomSchema(definitions, t); rt != nil {
+ return rt
+ }
+
+ // Prepare a base to which details can be added
+ st := new(Schema)
+
+ // jsonpb will marshal protobuf enum options as either strings or integers.
+ // It will unmarshal either.
+ if t.Implements(protoEnumType) {
+ st.OneOf = []*Schema{
+ {Type: "string"},
+ {Type: "integer"},
+ }
+ return st
+ }
+
+ // Defined format types for JSON Schema Validation
+ // RFC draft-wright-json-schema-validation-00, section 7.3
+ // TODO email RFC section 7.3.2, hostname RFC section 7.3.3, uriref RFC section 7.3.7
+ if t == ipType {
+ // TODO differentiate ipv4 and ipv6 RFC section 7.3.4, 7.3.5
+ st.Type = "string"
+ st.Format = "ipv4"
+ return st
+ }
+
+ switch t.Kind() {
+ case reflect.Struct:
+ r.reflectStruct(definitions, t, st)
+
+ case reflect.Slice, reflect.Array:
+ r.reflectSliceOrArray(definitions, t, st)
+
+ case reflect.Map:
+ r.reflectMap(definitions, t, st)
+
+ case reflect.Interface:
+ // empty
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
+ reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ st.Type = "integer"
+
+ case reflect.Float32, reflect.Float64:
+ st.Type = "number"
+
+ case reflect.Bool:
+ st.Type = "boolean"
+
+ case reflect.String:
+ st.Type = "string"
+
+ default:
+ panic("unsupported type " + t.String())
+ }
+
+ // Always try to reference the definition which may have just been created
+ if def := r.refDefinition(definitions, t); def != nil {
+ return def
+ }
+
+ return st
+}
+
+func (r *Reflector) reflectCustomSchema(definitions Definitions, t reflect.Type) *Schema {
+ if t.Kind() == reflect.Ptr {
+ return r.reflectCustomSchema(definitions, t.Elem())
+ }
+
+ if t.Implements(customType) {
+ v := reflect.New(t)
+ o := v.Interface().(customSchemaImpl)
+ st := o.JSONSchema()
+ r.addDefinition(definitions, t, st)
+ if ref := r.refDefinition(definitions, t); ref != nil {
+ return ref
+ }
+ return st
+ }
+
+ return nil
+}
+
+func (r *Reflector) reflectSliceOrArray(definitions Definitions, t reflect.Type, st *Schema) {
+ if t == rawMessageType {
+ return
+ }
+
+ r.addDefinition(definitions, t, st)
+
+ if st.Description == "" {
+ st.Description = r.lookupComment(t, "")
+ }
+
+ if t.Kind() == reflect.Array {
+ st.MinItems = t.Len()
+ st.MaxItems = st.MinItems
+ }
+ if t.Kind() == reflect.Slice && t.Elem() == byteSliceType.Elem() {
+ st.Type = "string"
+ // NOTE: ContentMediaType is not set here
+ st.ContentEncoding = "base64"
+ } else {
+ st.Type = "array"
+ st.Items = r.refOrReflectTypeToSchema(definitions, t.Elem())
+ }
+}
+
+func (r *Reflector) reflectMap(definitions Definitions, t reflect.Type, st *Schema) {
+ r.addDefinition(definitions, t, st)
+
+ st.Type = "object"
+ if st.Description == "" {
+ st.Description = r.lookupComment(t, "")
+ }
+
+ switch t.Key().Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ st.PatternProperties = map[string]*Schema{
+ "^[0-9]+$": r.refOrReflectTypeToSchema(definitions, t.Elem()),
+ }
+ st.AdditionalProperties = FalseSchema
+ return
+ }
+ if t.Elem().Kind() != reflect.Interface {
+ st.PatternProperties = map[string]*Schema{
+ ".*": r.refOrReflectTypeToSchema(definitions, t.Elem()),
+ }
+ }
+}
+
+// Reflects a struct to a JSON Schema type.
+func (r *Reflector) reflectStruct(definitions Definitions, t reflect.Type, s *Schema) {
+ // Handle special types
+ switch t {
+ case timeType: // date-time RFC section 7.3.1
+ s.Type = "string"
+ s.Format = "date-time"
+ return
+ case uriType: // uri RFC section 7.3.6
+ s.Type = "string"
+ s.Format = "uri"
+ return
+ }
+
+ r.addDefinition(definitions, t, s)
+ s.Type = "object"
+ s.Properties = orderedmap.New()
+ s.Description = r.lookupComment(t, "")
+ if r.AssignAnchor {
+ s.Anchor = t.Name()
+ }
+ if !r.AllowAdditionalProperties {
+ s.AdditionalProperties = FalseSchema
+ }
+
+ ignored := false
+ for _, it := range r.IgnoredTypes {
+ if reflect.TypeOf(it) == t {
+ ignored = true
+ break
+ }
+ }
+ if !ignored {
+ r.reflectStructFields(s, definitions, t)
+ }
+}
+
+func (r *Reflector) reflectStructFields(st *Schema, definitions Definitions, t reflect.Type) {
+ if t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ }
+ if t.Kind() != reflect.Struct {
+ return
+ }
+
+ var getFieldDocString customGetFieldDocString
+ if t.Implements(customStructGetFieldDocString) {
+ v := reflect.New(t)
+ o := v.Interface().(customSchemaGetFieldDocString)
+ getFieldDocString = o.GetFieldDocString
+ }
+
+ handleField := func(f reflect.StructField) {
+ name, shouldEmbed, required, nullable := r.reflectFieldName(f)
+ // if anonymous and exported type should be processed recursively
+ // current type should inherit properties of anonymous one
+ if name == "" {
+ if shouldEmbed {
+ r.reflectStructFields(st, definitions, f.Type)
+ }
+ return
+ }
+
+ property := r.refOrReflectTypeToSchema(definitions, f.Type)
+ property.structKeywordsFromTags(f, st, name)
+ if property.Description == "" {
+ property.Description = r.lookupComment(t, f.Name)
+ }
+ if getFieldDocString != nil {
+ property.Description = getFieldDocString(f.Name)
+ }
+
+ if nullable {
+ property = &Schema{
+ OneOf: []*Schema{
+ property,
+ {
+ Type: "null",
+ },
+ },
+ }
+ }
+
+ st.Properties.Set(name, property)
+ if required {
+ st.Required = appendUniqueString(st.Required, name)
+ }
+ }
+
+ for i := 0; i < t.NumField(); i++ {
+ f := t.Field(i)
+ handleField(f)
+ }
+ if r.AdditionalFields != nil {
+ if af := r.AdditionalFields(t); af != nil {
+ for _, sf := range af {
+ handleField(sf)
+ }
+ }
+ }
+}
+
+func appendUniqueString(base []string, value string) []string {
+ for _, v := range base {
+ if v == value {
+ return base
+ }
+ }
+ return append(base, value)
+}
+
+func (r *Reflector) lookupComment(t reflect.Type, name string) string {
+ if r.CommentMap == nil {
+ return ""
+ }
+
+ n := fullyQualifiedTypeName(t)
+ if name != "" {
+ n = n + "." + name
+ }
+
+ return r.CommentMap[n]
+}
+
+// addDefinition will append the provided schema. If needed, an ID and anchor will also be added.
+func (r *Reflector) addDefinition(definitions Definitions, t reflect.Type, s *Schema) {
+ name := r.typeName(t)
+ if name == "" {
+ return
+ }
+ definitions[name] = s
+}
+
+// refDefinition will provide a schema with a reference to an existing definition.
+func (r *Reflector) refDefinition(definitions Definitions, t reflect.Type) *Schema {
+ if r.DoNotReference {
+ return nil
+ }
+ name := r.typeName(t)
+ if name == "" {
+ return nil
+ }
+ if _, ok := definitions[name]; !ok {
+ return nil
+ }
+ return &Schema{
+ Ref: "#/$defs/" + name,
+ }
+}
+
+func (r *Reflector) lookupID(t reflect.Type) ID {
+ if r.Lookup != nil {
+ if t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ }
+ return r.Lookup(t)
+
+ }
+ return EmptyID
+}
+
+func (t *Schema) structKeywordsFromTags(f reflect.StructField, parent *Schema, propertyName string) {
+ t.Description = f.Tag.Get("jsonschema_description")
+
+ tags := splitOnUnescapedCommas(f.Tag.Get("jsonschema"))
+ t.genericKeywords(tags, parent, propertyName)
+
+ switch t.Type {
+ case "string":
+ t.stringKeywords(tags)
+ case "number":
+ t.numbericKeywords(tags)
+ case "integer":
+ t.numbericKeywords(tags)
+ case "array":
+ t.arrayKeywords(tags)
+ case "boolean":
+ t.booleanKeywords(tags)
+ }
+ extras := strings.Split(f.Tag.Get("jsonschema_extras"), ",")
+ t.extraKeywords(extras)
+}
+
+// read struct tags for generic keyworks
+func (t *Schema) genericKeywords(tags []string, parent *Schema, propertyName string) {
+ for _, tag := range tags {
+ nameValue := strings.Split(tag, "=")
+ if len(nameValue) == 2 {
+ name, val := nameValue[0], nameValue[1]
+ switch name {
+ case "title":
+ t.Title = val
+ case "description":
+ t.Description = val
+ case "type":
+ t.Type = val
+ case "anchor":
+ t.Anchor = val
+ case "oneof_required":
+ var typeFound *Schema
+ for i := range parent.OneOf {
+ if parent.OneOf[i].Title == nameValue[1] {
+ typeFound = parent.OneOf[i]
+ }
+ }
+ if typeFound == nil {
+ typeFound = &Schema{
+ Title: nameValue[1],
+ Required: []string{},
+ }
+ parent.OneOf = append(parent.OneOf, typeFound)
+ }
+ typeFound.Required = append(typeFound.Required, propertyName)
+ case "oneof_type":
+ if t.OneOf == nil {
+ t.OneOf = make([]*Schema, 0, 1)
+ }
+ t.Type = ""
+ types := strings.Split(nameValue[1], ";")
+ for _, ty := range types {
+ t.OneOf = append(t.OneOf, &Schema{
+ Type: ty,
+ })
+ }
+ case "enum":
+ switch t.Type {
+ case "string":
+ t.Enum = append(t.Enum, val)
+ case "integer":
+ i, _ := strconv.Atoi(val)
+ t.Enum = append(t.Enum, i)
+ case "number":
+ f, _ := strconv.ParseFloat(val, 64)
+ t.Enum = append(t.Enum, f)
+ }
+ }
+ }
+ }
+}
+
+// read struct tags for boolean type keyworks
+func (t *Schema) booleanKeywords(tags []string) {
+ for _, tag := range tags {
+ nameValue := strings.Split(tag, "=")
+ if len(nameValue) != 2 {
+ continue
+ }
+ name, val := nameValue[0], nameValue[1]
+ if name == "default" {
+ if val == "true" {
+ t.Default = true
+ } else if val == "false" {
+ t.Default = false
+ }
+ }
+ }
+}
+
+// read struct tags for string type keyworks
+func (t *Schema) stringKeywords(tags []string) {
+ for _, tag := range tags {
+ nameValue := strings.Split(tag, "=")
+ if len(nameValue) == 2 {
+ name, val := nameValue[0], nameValue[1]
+ switch name {
+ case "minLength":
+ i, _ := strconv.Atoi(val)
+ t.MinLength = i
+ case "maxLength":
+ i, _ := strconv.Atoi(val)
+ t.MaxLength = i
+ case "pattern":
+ t.Pattern = val
+ case "format":
+ switch val {
+ case "date-time", "email", "hostname", "ipv4", "ipv6", "uri", "uuid":
+ t.Format = val
+ break
+ }
+ case "readOnly":
+ i, _ := strconv.ParseBool(val)
+ t.ReadOnly = i
+ case "writeOnly":
+ i, _ := strconv.ParseBool(val)
+ t.WriteOnly = i
+ case "default":
+ t.Default = val
+ case "example":
+ t.Examples = append(t.Examples, val)
+ }
+ }
+ }
+}
+
+// read struct tags for numberic type keyworks
+func (t *Schema) numbericKeywords(tags []string) {
+ for _, tag := range tags {
+ nameValue := strings.Split(tag, "=")
+ if len(nameValue) == 2 {
+ name, val := nameValue[0], nameValue[1]
+ switch name {
+ case "multipleOf":
+ i, _ := strconv.Atoi(val)
+ t.MultipleOf = i
+ case "minimum":
+ i, _ := strconv.Atoi(val)
+ t.Minimum = i
+ case "maximum":
+ i, _ := strconv.Atoi(val)
+ t.Maximum = i
+ case "exclusiveMaximum":
+ b, _ := strconv.ParseBool(val)
+ t.ExclusiveMaximum = b
+ case "exclusiveMinimum":
+ b, _ := strconv.ParseBool(val)
+ t.ExclusiveMinimum = b
+ case "default":
+ i, _ := strconv.Atoi(val)
+ t.Default = i
+ case "example":
+ if i, err := strconv.Atoi(val); err == nil {
+ t.Examples = append(t.Examples, i)
+ }
+ }
+ }
+ }
+}
+
+// read struct tags for object type keyworks
+// func (t *Type) objectKeywords(tags []string) {
+// for _, tag := range tags{
+// nameValue := strings.Split(tag, "=")
+// name, val := nameValue[0], nameValue[1]
+// switch name{
+// case "dependencies":
+// t.Dependencies = val
+// break;
+// case "patternProperties":
+// t.PatternProperties = val
+// break;
+// }
+// }
+// }
+
+// read struct tags for array type keyworks
+func (t *Schema) arrayKeywords(tags []string) {
+ var defaultValues []interface{}
+ for _, tag := range tags {
+ nameValue := strings.Split(tag, "=")
+ if len(nameValue) == 2 {
+ name, val := nameValue[0], nameValue[1]
+ switch name {
+ case "minItems":
+ i, _ := strconv.Atoi(val)
+ t.MinItems = i
+ case "maxItems":
+ i, _ := strconv.Atoi(val)
+ t.MaxItems = i
+ case "uniqueItems":
+ t.UniqueItems = true
+ case "default":
+ defaultValues = append(defaultValues, val)
+ case "enum":
+ switch t.Items.Type {
+ case "string":
+ t.Items.Enum = append(t.Items.Enum, val)
+ case "integer":
+ i, _ := strconv.Atoi(val)
+ t.Items.Enum = append(t.Items.Enum, i)
+ case "number":
+ f, _ := strconv.ParseFloat(val, 64)
+ t.Items.Enum = append(t.Items.Enum, f)
+ }
+ case "format":
+ t.Items.Format = val
+ }
+ }
+ }
+ if len(defaultValues) > 0 {
+ t.Default = defaultValues
+ }
+}
+
+func (t *Schema) extraKeywords(tags []string) {
+ for _, tag := range tags {
+ nameValue := strings.Split(tag, "=")
+ if len(nameValue) == 2 {
+ t.setExtra(nameValue[0], nameValue[1])
+ }
+ }
+}
+
+func (t *Schema) setExtra(key, val string) {
+ if t.Extras == nil {
+ t.Extras = map[string]interface{}{}
+ }
+ if existingVal, ok := t.Extras[key]; ok {
+ switch existingVal := existingVal.(type) {
+ case string:
+ t.Extras[key] = []string{existingVal, val}
+ case []string:
+ t.Extras[key] = append(existingVal, val)
+ case int:
+ t.Extras[key], _ = strconv.Atoi(val)
+ }
+ } else {
+ switch key {
+ case "minimum":
+ t.Extras[key], _ = strconv.Atoi(val)
+ default:
+ t.Extras[key] = val
+ }
+ }
+}
+
+func requiredFromJSONTags(tags []string) bool {
+ if ignoredByJSONTags(tags) {
+ return false
+ }
+
+ for _, tag := range tags[1:] {
+ if tag == "omitempty" {
+ return false
+ }
+ }
+ return true
+}
+
+func requiredFromJSONSchemaTags(tags []string) bool {
+ if ignoredByJSONSchemaTags(tags) {
+ return false
+ }
+ for _, tag := range tags {
+ if tag == "required" {
+ return true
+ }
+ }
+ return false
+}
+
+func nullableFromJSONSchemaTags(tags []string) bool {
+ if ignoredByJSONSchemaTags(tags) {
+ return false
+ }
+ for _, tag := range tags {
+ if tag == "nullable" {
+ return true
+ }
+ }
+ return false
+}
+
+func ignoredByJSONTags(tags []string) bool {
+ return tags[0] == "-"
+}
+
+func ignoredByJSONSchemaTags(tags []string) bool {
+ return tags[0] == "-"
+}
+
+func (r *Reflector) reflectFieldName(f reflect.StructField) (string, bool, bool, bool) {
+ jsonTagString, _ := f.Tag.Lookup("json")
+ jsonTags := strings.Split(jsonTagString, ",")
+
+ if ignoredByJSONTags(jsonTags) {
+ return "", false, false, false
+ }
+
+ schemaTags := strings.Split(f.Tag.Get("jsonschema"), ",")
+ if ignoredByJSONSchemaTags(schemaTags) {
+ return "", false, false, false
+ }
+
+ required := requiredFromJSONTags(jsonTags)
+ if r.RequiredFromJSONSchemaTags {
+ required = requiredFromJSONSchemaTags(schemaTags)
+ }
+
+ nullable := nullableFromJSONSchemaTags(schemaTags)
+
+ if f.Anonymous && jsonTags[0] == "" {
+ // As per JSON Marshal rules, anonymous structs are inherited
+ if f.Type.Kind() == reflect.Struct {
+ return "", true, false, false
+ }
+ }
+
+ // Try to determine the name from the different combos
+ name := f.Name
+ if jsonTags[0] != "" {
+ name = jsonTags[0]
+ }
+ if !f.Anonymous && f.PkgPath != "" {
+ // field not anonymous and not export has no export name
+ name = ""
+ } else if r.KeyNamer != nil {
+ name = r.KeyNamer(name)
+ }
+
+ return name, false, required, nullable
+}
+
+// UnmarshalJSON is used to parse a schema object or boolean.
+func (t *Schema) UnmarshalJSON(data []byte) error {
+ if bytes.Equal(data, []byte("true")) {
+ *t = *TrueSchema
+ return nil
+ } else if bytes.Equal(data, []byte("false")) {
+ *t = *FalseSchema
+ return nil
+ }
+ type Schema_ Schema
+ aux := &struct {
+ *Schema_
+ }{
+ Schema_: (*Schema_)(t),
+ }
+ return json.Unmarshal(data, aux)
+}
+
+func (t *Schema) MarshalJSON() ([]byte, error) {
+ if t.boolean != nil {
+ if *t.boolean {
+ return []byte("true"), nil
+ } else {
+ return []byte("false"), nil
+ }
+ }
+ if reflect.DeepEqual(&Schema{}, t) {
+ // Don't bother returning empty schemas
+ return []byte("true"), nil
+ }
+ type Schema_ Schema
+ b, err := json.Marshal((*Schema_)(t))
+ if err != nil {
+ return nil, err
+ }
+ if t.Extras == nil || len(t.Extras) == 0 {
+ return b, nil
+ }
+ m, err := json.Marshal(t.Extras)
+ if err != nil {
+ return nil, err
+ }
+ if len(b) == 2 {
+ return m, nil
+ }
+ b[len(b)-1] = ','
+ return append(b, m[1:]...), nil
+}
+
+func (r *Reflector) typeName(t reflect.Type) string {
+ if r.Namer != nil {
+ if name := r.Namer(t); name != "" {
+ return name
+ }
+ }
+ return t.Name()
+}
+
+// Split on commas that are not preceded by `\`.
+// This way, we prevent splitting regexes
+func splitOnUnescapedCommas(tagString string) []string {
+ ret := make([]string, 0)
+ separated := strings.Split(tagString, ",")
+ ret = append(ret, separated[0])
+ i := 0
+ for _, nextTag := range separated[1:] {
+ if len(ret[i]) == 0 {
+ ret = append(ret, nextTag)
+ i++
+ continue
+ }
+
+ if ret[i][len(ret[i])-1] == '\\' {
+ ret[i] = ret[i][:len(ret[i])-1] + "," + nextTag
+ } else {
+ ret = append(ret, nextTag)
+ i++
+ }
+ }
+
+ return ret
+}
+
+func fullyQualifiedTypeName(t reflect.Type) string {
+ return t.PkgPath() + "." + t.Name()
+}
+
+// AddGoComments will update the reflectors comment map with all the comments
+// found in the provided source directories. See the #ExtractGoComments method
+// for more details.
+func (r *Reflector) AddGoComments(base, path string) error {
+ if r.CommentMap == nil {
+ r.CommentMap = make(map[string]string)
+ }
+ return ExtractGoComments(base, path, r.CommentMap)
+}
diff --git a/vendor/github.com/invopop/jsonschema/utils.go b/vendor/github.com/invopop/jsonschema/utils.go
new file mode 100644
index 00000000..9813b11c
--- /dev/null
+++ b/vendor/github.com/invopop/jsonschema/utils.go
@@ -0,0 +1,18 @@
+package jsonschema
+
+import (
+ "regexp"
+ "strings"
+)
+
+var matchFirstCap = regexp.MustCompile("(.)([A-Z][a-z]+)")
+var matchAllCap = regexp.MustCompile("([a-z0-9])([A-Z])")
+
+// ToSnakeCase converts the provided string into snake case using dashes.
+// This is useful for Schema IDs and definitions to be coherent with
+// common JSON Schema examples.
+func ToSnakeCase(str string) string {
+ snake := matchFirstCap.ReplaceAllString(str, "${1}-${2}")
+ snake = matchAllCap.ReplaceAllString(snake, "${1}-${2}")
+ return strings.ToLower(snake)
+}
diff --git a/vendor/github.com/lithammer/fuzzysearch/LICENSE b/vendor/github.com/lithammer/fuzzysearch/LICENSE
new file mode 100644
index 00000000..dee3d1de
--- /dev/null
+++ b/vendor/github.com/lithammer/fuzzysearch/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2018 Peter Lithammer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/lithammer/fuzzysearch/fuzzy/fuzzy.go b/vendor/github.com/lithammer/fuzzysearch/fuzzy/fuzzy.go
new file mode 100644
index 00000000..7ae7091f
--- /dev/null
+++ b/vendor/github.com/lithammer/fuzzysearch/fuzzy/fuzzy.go
@@ -0,0 +1,273 @@
+// Fuzzy searching allows for flexibly matching a string with partial input,
+// useful for filtering data very quickly based on lightweight user input.
+package fuzzy
+
+import (
+ "bytes"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/text/runes"
+ "golang.org/x/text/transform"
+ "golang.org/x/text/unicode/norm"
+)
+
+func noopTransformer() transform.Transformer {
+ return nopTransformer{}
+}
+
+func foldTransformer() transform.Transformer {
+ return unicodeFoldTransformer{}
+}
+
+func normalizeTransformer() transform.Transformer {
+ return transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC)
+}
+
+func normalizedFoldTransformer() transform.Transformer {
+ return transform.Chain(normalizeTransformer(), foldTransformer())
+}
+
+// Match returns true if source matches target using a fuzzy-searching
+// algorithm. Note that it doesn't implement Levenshtein distance (see
+// RankMatch instead), but rather a simplified version where there's no
+// approximation. The method will return true only if each character in the
+// source can be found in the target and occurs after the preceding matches.
+func Match(source, target string) bool {
+ return match(source, target, noopTransformer())
+}
+
+// MatchFold is a case-insensitive version of Match.
+func MatchFold(source, target string) bool {
+ return match(source, target, foldTransformer())
+}
+
+// MatchNormalized is a unicode-normalized version of Match.
+func MatchNormalized(source, target string) bool {
+ return match(source, target, normalizeTransformer())
+}
+
+// MatchNormalizedFold is a unicode-normalized and case-insensitive version of Match.
+func MatchNormalizedFold(source, target string) bool {
+ return match(source, target, normalizedFoldTransformer())
+}
+
+func match(source, target string, transformer transform.Transformer) bool {
+ source = stringTransform(source, transformer)
+ target = stringTransform(target, transformer)
+
+ lenDiff := len(target) - len(source)
+
+ if lenDiff < 0 {
+ return false
+ }
+
+ if lenDiff == 0 && source == target {
+ return true
+ }
+
+Outer:
+ for _, r1 := range source {
+ for i, r2 := range target {
+ if r1 == r2 {
+ target = target[i+utf8.RuneLen(r2):]
+ continue Outer
+ }
+ }
+ return false
+ }
+
+ return true
+}
+
+// Find will return a list of strings in targets that fuzzy matches source.
+func Find(source string, targets []string) []string {
+ return find(source, targets, noopTransformer())
+}
+
+// FindFold is a case-insensitive version of Find.
+func FindFold(source string, targets []string) []string {
+ return find(source, targets, foldTransformer())
+}
+
+// FindNormalized is a unicode-normalized version of Find.
+func FindNormalized(source string, targets []string) []string {
+ return find(source, targets, normalizeTransformer())
+}
+
+// FindNormalizedFold is a unicode-normalized and case-insensitive version of Find.
+func FindNormalizedFold(source string, targets []string) []string {
+ return find(source, targets, normalizedFoldTransformer())
+}
+
+func find(source string, targets []string, transformer transform.Transformer) []string {
+ var matches []string
+
+ for _, target := range targets {
+ if match(source, target, transformer) {
+ matches = append(matches, target)
+ }
+ }
+
+ return matches
+}
+
+// RankMatch is similar to Match except it will measure the Levenshtein
+// distance between the source and the target and return its result. If there
+// was no match, it will return -1.
+// Given the requirements of match, RankMatch only needs to perform a subset of
+// the Levenshtein calculation, only deletions need be considered, required
+// additions and substitutions would fail the match test.
+func RankMatch(source, target string) int {
+ return rank(source, target, noopTransformer())
+}
+
+// RankMatchFold is a case-insensitive version of RankMatch.
+func RankMatchFold(source, target string) int {
+ return rank(source, target, foldTransformer())
+}
+
+// RankMatchNormalized is a unicode-normalized version of RankMatch.
+func RankMatchNormalized(source, target string) int {
+ return rank(source, target, normalizeTransformer())
+}
+
+// RankMatchNormalizedFold is a unicode-normalized and case-insensitive version of RankMatch.
+func RankMatchNormalizedFold(source, target string) int {
+ return rank(source, target, normalizedFoldTransformer())
+}
+
+func rank(source, target string, transformer transform.Transformer) int {
+ lenDiff := len(target) - len(source)
+
+ if lenDiff < 0 {
+ return -1
+ }
+
+ source = stringTransform(source, transformer)
+ target = stringTransform(target, transformer)
+
+ if lenDiff == 0 && source == target {
+ return 0
+ }
+
+ runeDiff := 0
+
+Outer:
+ for _, r1 := range source {
+ for i, r2 := range target {
+ if r1 == r2 {
+ target = target[i+utf8.RuneLen(r2):]
+ continue Outer
+ } else {
+ runeDiff++
+ }
+ }
+ return -1
+ }
+
+ // Count up remaining char
+ runeDiff += utf8.RuneCountInString(target)
+
+ return runeDiff
+}
+
+// RankFind is similar to Find, except it will also rank all matches using
+// Levenshtein distance.
+func RankFind(source string, targets []string) Ranks {
+ return rankFind(source, targets, noopTransformer())
+}
+
+// RankFindFold is a case-insensitive version of RankFind.
+func RankFindFold(source string, targets []string) Ranks {
+ return rankFind(source, targets, foldTransformer())
+}
+
+// RankFindNormalized is a unicode-normalized version of RankFind.
+func RankFindNormalized(source string, targets []string) Ranks {
+ return rankFind(source, targets, normalizeTransformer())
+}
+
+// RankFindNormalizedFold is a unicode-normalized and case-insensitive version of RankFind.
+func RankFindNormalizedFold(source string, targets []string) Ranks {
+ return rankFind(source, targets, normalizedFoldTransformer())
+}
+
+func rankFind(source string, targets []string, transformer transform.Transformer) Ranks {
+ var r Ranks
+
+ for index, target := range targets {
+ if match(source, target, transformer) {
+ distance := LevenshteinDistance(source, target)
+ r = append(r, Rank{source, target, distance, index})
+ }
+ }
+ return r
+}
+
+type Rank struct {
+ // Source is used as the source for matching.
+ Source string
+
+ // Target is the word matched against.
+ Target string
+
+ // Distance is the Levenshtein distance between Source and Target.
+ Distance int
+
+ // Location of Target in original list
+ OriginalIndex int
+}
+
+type Ranks []Rank
+
+func (r Ranks) Len() int {
+ return len(r)
+}
+
+func (r Ranks) Swap(i, j int) {
+ r[i], r[j] = r[j], r[i]
+}
+
+func (r Ranks) Less(i, j int) bool {
+ return r[i].Distance < r[j].Distance
+}
+
+func stringTransform(s string, t transform.Transformer) (transformed string) {
+ // Fast path for the nop transformer to prevent unnecessary allocations.
+ if _, ok := t.(nopTransformer); ok {
+ return s
+ }
+
+ var err error
+ transformed, _, err = transform.String(t, s)
+ if err != nil {
+ transformed = s
+ }
+
+ return
+}
+
+type unicodeFoldTransformer struct{ transform.NopResetter }
+
+func (unicodeFoldTransformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
+ runes := bytes.Runes(src)
+ var lowerRunes []rune
+ for _, r := range runes {
+ lowerRunes = append(lowerRunes, unicode.ToLower(r))
+ }
+
+ srcBytes := []byte(string(lowerRunes))
+ n := copy(dst, srcBytes)
+ if n < len(srcBytes) {
+ err = transform.ErrShortDst
+ }
+
+ return n, n, err
+}
+
+type nopTransformer struct{ transform.NopResetter }
+
+func (nopTransformer) Transform(dst []byte, src []byte, atEOF bool) (int, int, error) {
+ return 0, len(src), nil
+}
diff --git a/vendor/github.com/lithammer/fuzzysearch/fuzzy/levenshtein.go b/vendor/github.com/lithammer/fuzzysearch/fuzzy/levenshtein.go
new file mode 100644
index 00000000..4fb5838c
--- /dev/null
+++ b/vendor/github.com/lithammer/fuzzysearch/fuzzy/levenshtein.go
@@ -0,0 +1,43 @@
+package fuzzy
+
+// LevenshteinDistance measures the difference between two strings.
+// The Levenshtein distance between two words is the minimum number of
+// single-character edits (i.e. insertions, deletions or substitutions)
+// required to change one word into the other.
+//
+// This implemention is optimized to use O(min(m,n)) space and is based on the
+// optimized C version found here:
+// http://en.wikibooks.org/wiki/Algorithm_implementation/Strings/Levenshtein_distance#C
+func LevenshteinDistance(s, t string) int {
+ r1, r2 := []rune(s), []rune(t)
+ column := make([]int, 1, 64)
+
+ for y := 1; y <= len(r1); y++ {
+ column = append(column, y)
+ }
+
+ for x := 1; x <= len(r2); x++ {
+ column[0] = x
+
+ for y, lastDiag := 1, x-1; y <= len(r1); y++ {
+ oldDiag := column[y]
+ cost := 0
+ if r1[y-1] != r2[x-1] {
+ cost = 1
+ }
+ column[y] = min(column[y]+1, column[y-1]+1, lastDiag+cost)
+ lastDiag = oldDiag
+ }
+ }
+
+ return column[len(r1)]
+}
+
+func min(a, b, c int) int {
+ if a < b && a < c {
+ return a
+ } else if b < c {
+ return b
+ }
+ return c
+}
diff --git a/vendor/github.com/metafates/mangal-lua-libs/http/client/client.go b/vendor/github.com/metafates/mangal-lua-libs/http/client/client.go
index a8dbfb43..6a13057e 100644
--- a/vendor/github.com/metafates/mangal-lua-libs/http/client/client.go
+++ b/vendor/github.com/metafates/mangal-lua-libs/http/client/client.go
@@ -17,9 +17,9 @@ import (
const (
// default http User Agent
- DefaultUserAgent = `gopher-lua`
+ DefaultUserAgent = `Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36`
// default http timeout
- DefaultTimeout = 20 * time.Second
+ DefaultTimeout = 2 * time.Minute
// default don't ignore ssl
insecureSkipVerify = false
)
diff --git a/vendor/github.com/samber/mo/.gitignore b/vendor/github.com/samber/mo/.gitignore
new file mode 100644
index 00000000..3aa3a0ad
--- /dev/null
+++ b/vendor/github.com/samber/mo/.gitignore
@@ -0,0 +1,36 @@
+
+# Created by https://www.toptal.com/developers/gitignore/api/go
+# Edit at https://www.toptal.com/developers/gitignore?templates=go
+
+### Go ###
+# If you prefer the allow list template instead of the deny list, see community template:
+# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
+#
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Dependency directories (remove the comment below to include it)
+# vendor/
+
+# Go workspace file
+go.work
+
+### Go Patch ###
+/vendor/
+/Godeps/
+
+# End of https://www.toptal.com/developers/gitignore/api/go
+
+cover.out
+cover.html
+.vscode
diff --git a/vendor/github.com/samber/mo/Dockerfile b/vendor/github.com/samber/mo/Dockerfile
new file mode 100644
index 00000000..ceb17087
--- /dev/null
+++ b/vendor/github.com/samber/mo/Dockerfile
@@ -0,0 +1,8 @@
+
+FROM golang:1.18-bullseye
+
+WORKDIR /go/src/github.com/samber/mo
+
+COPY Makefile go.* ./
+
+RUN make tools
diff --git a/vendor/github.com/samber/mo/LICENSE b/vendor/github.com/samber/mo/LICENSE
new file mode 100644
index 00000000..c3dc72d9
--- /dev/null
+++ b/vendor/github.com/samber/mo/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 Samuel Berthe
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/samber/mo/Makefile b/vendor/github.com/samber/mo/Makefile
new file mode 100644
index 00000000..de1ac20e
--- /dev/null
+++ b/vendor/github.com/samber/mo/Makefile
@@ -0,0 +1,46 @@
+
+BIN=go
+
+build:
+ ${BIN} build -v ./...
+
+test:
+ go test -v ./...
+watch-test:
+ reflex -t 50ms -s -- sh -c 'gotest -v ./...'
+
+bench:
+ go test -benchmem -count 3 -bench ./...
+watch-bench:
+ reflex -t 50ms -s -- sh -c 'go test -benchmem -count 3 -bench ./...'
+
+coverage:
+ ${BIN} test -v -coverprofile=cover.out -covermode=atomic .
+ ${BIN} tool cover -html=cover.out -o cover.html
+
+# tools
+tools:
+ ${BIN} install github.com/cespare/reflex@latest
+ ${BIN} install github.com/rakyll/gotest@latest
+ ${BIN} install github.com/psampaz/go-mod-outdated@latest
+ ${BIN} install github.com/jondot/goweight@latest
+ ${BIN} install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
+ ${BIN} get -t -u golang.org/x/tools/cmd/cover
+ ${BIN} get -t -u github.com/sonatype-nexus-community/nancy@latest
+ go mod tidy
+
+lint:
+ golangci-lint run --timeout 60s --max-same-issues 50 ./...
+lint-fix:
+ golangci-lint run --timeout 60s --max-same-issues 50 --fix ./...
+
+audit:
+ ${BIN} mod tidy
+ ${BIN} list -json -m all | nancy sleuth
+
+outdated:
+ ${BIN} mod tidy
+ ${BIN} list -u -m -json all | go-mod-outdated -update -direct
+
+weight:
+ goweight
diff --git a/vendor/github.com/samber/mo/README.md b/vendor/github.com/samber/mo/README.md
new file mode 100644
index 00000000..b8e2e162
--- /dev/null
+++ b/vendor/github.com/samber/mo/README.md
@@ -0,0 +1,374 @@
+# mo - Monads
+
+[![tag](https://img.shields.io/github/tag/samber/mo.svg)](https://github.com/samber/mo/releases)
+[![GoDoc](https://godoc.org/github.com/samber/mo?status.svg)](https://pkg.go.dev/github.com/samber/mo)
+![Build Status](https://github.com/samber/mo/actions/workflows/go.yml/badge.svg)
+[![Go report](https://goreportcard.com/badge/github.com/samber/mo)](https://goreportcard.com/report/github.com/samber/mo)
+[![codecov](https://codecov.io/gh/samber/mo/branch/master/graph/badge.svg)](https://codecov.io/gh/samber/mo)
+
+🦄 **`samber/mo` brings monads and popular FP abstractions to Go projects. `samber/mo` uses the recent Go 1.18+ Generics.**
+
+**Inspired by:**
+
+- Scala
+- Rust
+- FP-TS
+
+**See also:**
+
+- [samber/lo](https://github.com/samber/lo): A Lodash-style Go library based on Go 1.18+ Generics
+- [samber/do](https://github.com/samber/do): A dependency injection toolkit based on Go 1.18+ Generics
+
+**Why this name?**
+
+I love **short name** for such utility library. This name is similar to "Monad Go" and no Go package currently uses this name.
+
+## 💡 Features
+
+We currently support the following data types:
+
+- `Option[T]` (Maybe)
+- `Result[T]`
+- `Either[A, B]`
+- `EitherX[T1, ..., TX]` (With X between 3 and 5)
+- `Future[T]`
+- `IO[T]`
+- `IOEither[T]`
+- `Task[T]`
+- `TaskEither[T]`
+- `State[S, A]`
+
+## 🚀 Install
+
+```sh
+go get github.com/samber/mo@v1
+```
+
+This library is v1 and follows SemVer strictly.
+
+No breaking changes will be made to exported APIs before v2.0.0.
+
+## 💡 Quick start
+
+You can import `mo` using:
+
+```go
+import (
+ "github.com/samber/mo"
+)
+```
+
+Then use one of the helpers below:
+
+```go
+option1 := mo.Some(42)
+// Some(42)
+
+option1.
+ FlatMap(func (value int) Option[int] {
+ return Some(value*2)
+ }).
+ FlatMap(func (value int) Option[int] {
+ return Some(value%2)
+ }).
+ FlatMap(func (value int) Option[int] {
+ return Some(value+21)
+ }).
+ OrElse(1234)
+// 21
+
+option2 := mo.None[int]()
+// None
+
+option2.OrElse(1234)
+// 1234
+
+option3 := option1.Match(
+ func(i int) (int, bool) {
+ // when value is present
+ return i * 2, true
+ },
+ func() (int, bool) {
+ // when value is absent
+ return 0, false
+ },
+)
+// Some(42)
+```
+
+More examples in [documentation](https://godoc.org/github.com/samber/mo).
+
+## 🤠 Documentation and examples
+
+[GoDoc: https://godoc.org/github.com/samber/mo](https://godoc.org/github.com/samber/mo)
+
+### Option[T any]
+
+`Option` is a container for an optional value of type `T`. If value exists, `Option` is of type `Some`. If the value is absent, `Option` is of type `None`.
+
+Constructors:
+
+- `mo.Some()` [doc](https://pkg.go.dev/github.com/samber/mo#Some) - [play](https://go.dev/play/p/iqz2n9n0tDM)
+- `mo.None()` [doc](https://pkg.go.dev/github.com/samber/mo#None) - [play](https://go.dev/play/p/yYQPsYCSYlD)
+- `mo.TupleToOption()` [doc](https://pkg.go.dev/github.com/samber/mo#TupleToOption) - [play](https://go.dev/play/p/gkrg2pZwOty)
+- `mo.EmptyableToOption()` [doc](https://pkg.go.dev/github.com/samber/mo#EmptyableToOption) - [play](https://go.dev/play/p/GSpQQ-q-UES)
+
+Methods:
+
+- `.IsPresent()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.IsPresent) - [play](https://go.dev/play/p/nDqIaiihyCA)
+- `.IsAbsent()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.IsAbsent) - [play](https://go.dev/play/p/23e2zqyVOQm)
+- `.Size()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.Size) - [play](https://go.dev/play/p/7ixCNG1E9l7)
+- `.Get()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.Get) - [play](https://go.dev/play/p/0-JBa1usZRT)
+- `.MustGet()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.MustGet) - [play](https://go.dev/play/p/RVBckjdi5WR)
+- `.OrElse()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.OrElse) - [play](https://go.dev/play/p/TrGByFWCzXS)
+- `.OrEmpty()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.OrEmpty) - [play](https://go.dev/play/p/SpSUJcE-tQm)
+- `.ForEach()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.ForEach)
+- `.Match()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.Match) - [play](https://go.dev/play/p/1V6st3LDJsM)
+- `.Map()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.Map) - [play](https://go.dev/play/p/mvfP3pcP_eJ)
+- `.MapNone()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.MapNone) - [play](https://go.dev/play/p/_KaHWZ6Q17b)
+- `.FlatMap()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.FlatMap) - [play](https://go.dev/play/p/OXO-zJx6n5r)
+- `.MarshalJSON()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.MarshalJSON)
+- `.UnmarshalJSON()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.UnmarshalJSON)
+- `.MarshalText()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.MarshalText)
+- `.UnmarshalText()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.UnmarshalText)
+- `.MarshalBinary()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.MarshalBinary)
+- `.UnmarshalBinary()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.UnmarshalBinary)
+- `.GobEncode()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.GobEncode)
+- `.GobDecode()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.GobDecode)
+- `.Scan()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.Scan)
+- `.Value()` [doc](https://pkg.go.dev/github.com/samber/mo#Option.Value)
+
+### Result[T any]
+
+`Result` respresent a result of an action having one of the following output: success or failure. An instance of `Result` is an instance of either `Ok` or `Err`. It could be compared to `Either[error, T]`.
+
+Constructors:
+
+- `mo.Ok()` [doc](https://pkg.go.dev/github.com/samber/mo#Ok) - [play](https://go.dev/play/p/PDwADdzNoyZ)
+- `mo.Err()` [doc](https://pkg.go.dev/github.com/samber/mo#Err) - [play](https://go.dev/play/p/PDwADdzNoyZ)
+- `mo.TupleToResult()` [doc](https://pkg.go.dev/github.com/samber/mo#TupleToResult) - [play](https://go.dev/play/p/KWjfqQDHQwa)
+- `mo.Try()` [doc](https://pkg.go.dev/github.com/samber/mo#Try) - [play](https://go.dev/play/p/ilOlQx-Mx42)
+
+Methods:
+
+- `.IsOk()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.IsOk) - [play](https://go.dev/play/p/sfNvBQyZfgU)
+- `.IsError()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.IsError) - [play](https://go.dev/play/p/xkV9d464scV)
+- `.Error()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.Error) - [play](https://go.dev/play/p/CSkHGTyiXJ5)
+- `.Get()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.Get) - [play](https://go.dev/play/p/8KyX3z6TuNo)
+- `.MustGet()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.MustGet) - [play](https://go.dev/play/p/8LSlndHoTAE)
+- `.OrElse()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.OrElse) - [play](https://go.dev/play/p/MN_ULx0soi6)
+- `.OrEmpty()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.OrEmpty) - [play](https://go.dev/play/p/rdKtBmOcMLh)
+- `.ToEither()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.ToEither) - [play](https://go.dev/play/p/Uw1Zz6b952q)
+- `.ForEach()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.ForEach)
+- `.Match()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.Match) - [play](https://go.dev/play/p/-_eFaLJ31co)
+- `.Map()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.Map) - [play](https://go.dev/play/p/-ndpN_b_OSc)
+- `.MapErr()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.MapErr) - [play](https://go.dev/play/p/WraZixg9GGf)
+- `.FlatMap()` [doc](https://pkg.go.dev/github.com/samber/mo#Result.FlatMap) - [play](https://go.dev/play/p/Ud5QjZOqg-7)
+
+### Either[L any, R any]
+
+`Either` respresents a value of 2 possible types. An instance of `Either` is an instance of either `A` or `B`.
+
+Constructors:
+
+- `mo.Left()` [doc](https://pkg.go.dev/github.com/samber/mo#Left)
+- `mo.Right()` [doc](https://pkg.go.dev/github.com/samber/mo#Right)
+
+Methods:
+
+- `.IsLeft()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.IsLeft)
+- `.IsRight()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.IsRight)
+- `.Left()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.Left)
+- `.Right()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.Right)
+- `.MustLeft()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.MustLeft)
+- `.MustRight()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.MustRight)
+- `.LeftOrElse()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.LeftOrElse)
+- `.RightOrElse()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.RightOrElse)
+- `.LeftOrEmpty()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.LeftOrEmpty)
+- `.RightOrEmpty()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.RightOrEmpty)
+- `.Swap()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.Swap)
+- `.ForEach()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.ForEach)
+- `.Match()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.Match)
+- `.MapLeft()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.MapLeft)
+- `.MapRight()` [doc](https://pkg.go.dev/github.com/samber/mo#Either.MapRight)
+
+### EitherX[T1, ..., TX] (With X between 3 and 5)
+
+`EitherX` respresents a value of X possible types. For example, an `Either3` value is either `T1`, `T2` or `T3`.
+
+Constructors:
+
+- `mo.NewEitherXArgY()` [doc](https://pkg.go.dev/github.com/samber/mo#NewEither5Arg1). Eg:
+ - `mo.NewEither3Arg1[A, B, C](A)`
+ - `mo.NewEither3Arg2[A, B, C](B)`
+ - `mo.NewEither3Arg3[A, B, C](C)`
+ - `mo.NewEither4Arg1[A, B, C, D](A)`
+ - `mo.NewEither4Arg2[A, B, C, D](B)`
+ - ...
+
+Methods:
+
+- `.IsArgX()` [doc](https://pkg.go.dev/github.com/samber/mo#Either5.IsArg1)
+- `.ArgX()` [doc](https://pkg.go.dev/github.com/samber/mo#Either5.Arg1)
+- `.MustArgX()` [doc](https://pkg.go.dev/github.com/samber/mo#Either5.MustArg1)
+- `.ArgXOrElse()` [doc](https://pkg.go.dev/github.com/samber/mo#Either5.Arg1OrElse)
+- `.ArgXOrEmpty()` [doc](https://pkg.go.dev/github.com/samber/mo#Either5.Arg1OrEmpty)
+- `.ForEach()` [doc](https://pkg.go.dev/github.com/samber/mo#Either5.ForEach)
+- `.Match()` [doc](https://pkg.go.dev/github.com/samber/mo#Either5.Match)
+- `.MapArgX()` [doc](https://pkg.go.dev/github.com/samber/mo#Either5.MapArg1)
+
+### Future[T any]
+
+`Future` represents a value which may or may not currently be available, but will be available at some point, or an exception if that value could not be made available.
+
+Constructors:
+
+- `mo.NewFuture()` [doc](https://pkg.go.dev/github.com/samber/mo#NewFuture)
+
+Methods:
+
+- `.Then()` [doc](https://pkg.go.dev/github.com/samber/mo#Future.Then)
+- `.Catch()` [doc](https://pkg.go.dev/github.com/samber/mo#Future.Catch)
+- `.Finally()` [doc](https://pkg.go.dev/github.com/samber/mo#Future.Finally)
+- `.Collect()` [doc](https://pkg.go.dev/github.com/samber/mo#Future.Collect)
+- `.Result()` [doc](https://pkg.go.dev/github.com/samber/mo#Future.Result)
+- `.Cancel()` [doc](https://pkg.go.dev/github.com/samber/mo#Future.Cancel)
+
+### IO[T any]
+
+`IO` represents a non-deterministic synchronous computation that can cause side effects, yields a value of type `R` and never fails.
+
+Constructors:
+
+- `mo.NewIO()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIO)
+- `mo.NewIO1()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIO1)
+- `mo.NewIO2()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIO2)
+- `mo.NewIO3()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIO3)
+- `mo.NewIO4()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIO4)
+- `mo.NewIO5()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIO5)
+
+Methods:
+
+- `.Run()` [doc](https://pkg.go.dev/github.com/samber/mo#Future.Run)
+
+### IOEither[T any]
+
+`IO` represents a non-deterministic synchronous computation that can cause side effects, yields a value of type `R` and can fail.
+
+Constructors:
+
+- `mo.NewIOEither()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIOEither)
+- `mo.NewIOEither1()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIOEither1)
+- `mo.NewIOEither2()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIOEither2)
+- `mo.NewIOEither3()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIOEither3)
+- `mo.NewIOEither4()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIOEither4)
+- `mo.NewIOEither5()` [doc](https://pkg.go.dev/github.com/samber/mo#NewIOEither5)
+
+Methods:
+
+- `.Run()` [doc](https://pkg.go.dev/github.com/samber/mo#IOEither.Run)
+
+### Task[T any]
+
+`Task` represents a non-deterministic asynchronous computation that can cause side effects, yields a value of type `R` and never fails.
+
+Constructors:
+
+- `mo.NewTask()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTask)
+- `mo.NewTask1()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTask1)
+- `mo.NewTask2()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTask2)
+- `mo.NewTask3()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTask3)
+- `mo.NewTask4()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTask4)
+- `mo.NewTask5()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTask5)
+- `mo.NewTaskFromIO()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTaskFromIO)
+- `mo.NewTaskFromIO1()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTaskFromIO1)
+- `mo.NewTaskFromIO2()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTaskFromIO2)
+- `mo.NewTaskFromIO3()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTaskFromIO3)
+- `mo.NewTaskFromIO4()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTaskFromIO4)
+- `mo.NewTaskFromIO5()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTaskFromIO5)
+
+Methods:
+
+- `.Run()` [doc](https://pkg.go.dev/github.com/samber/mo#Task.Run)
+
+### TaskEither[T any]
+
+`TaskEither` represents a non-deterministic asynchronous computation that can cause side effects, yields a value of type `R` and can fail.
+
+Constructors:
+
+- `mo.NewTaskEither()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTaskEither)
+- `mo.NewTaskEitherFromIOEither()` [doc](https://pkg.go.dev/github.com/samber/mo#NewTaskEitherFromIOEither)
+
+Methods:
+
+- `.Run()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.Run)
+- `.OrElse()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.OrElse)
+- `.Match()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.Match)
+- `.TryCatch()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.TryCatch)
+- `.ToTask()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.ToTask)
+- `.ToEither()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.ToEither)
+
+### State[S any, A any]
+
+`State` represents a function `(S) -> (A, S)`, where `S` is state, `A` is result.
+
+Constructors:
+
+- `mo.NewState()` [doc](https://pkg.go.dev/github.com/samber/mo#NewState)
+- `mo.ReturnState()` [doc](https://pkg.go.dev/github.com/samber/mo#ReturnState)
+
+Methods:
+
+- `.Run()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.Run)
+- `.Get()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.Get)
+- `.Modify()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.Modify)
+- `.Put()` [doc](https://pkg.go.dev/github.com/samber/mo#TaskEither.Put)
+
+## 🛩 Benchmark
+
+// @TODO
+
+This library does not use `reflect` package. We don't expect overhead.
+
+## 🤝 Contributing
+
+- Ping me on twitter [@samuelberthe](https://twitter.com/samuelberthe) (DMs, mentions, whatever :))
+- Fork the [project](https://github.com/samber/mo)
+- Fix [open issues](https://github.com/samber/mo/issues) or request new features
+
+Don't hesitate ;)
+
+### With Docker
+
+```bash
+docker-compose run --rm dev
+```
+
+### Without Docker
+
+```bash
+# Install some dev dependencies
+make tools
+
+# Run tests
+make test
+# or
+make watch-test
+```
+
+## 👤 Authors
+
+- Samuel Berthe
+
+## 💫 Show your support
+
+Give a ⭐️ if this project helped you!
+
+[![support us](https://c5.patreon.com/external/logo/become_a_patron_button.png)](https://www.patreon.com/samber)
+
+## 📝 License
+
+Copyright © 2022 [Samuel Berthe](https://github.com/samber).
+
+This project is [MIT](./LICENSE) licensed.
diff --git a/vendor/github.com/samber/mo/docker-compose.yml b/vendor/github.com/samber/mo/docker-compose.yml
new file mode 100644
index 00000000..51393361
--- /dev/null
+++ b/vendor/github.com/samber/mo/docker-compose.yml
@@ -0,0 +1,9 @@
+version: '3'
+
+services:
+ dev:
+ build: .
+ volumes:
+ - ./:/go/src/github.com/samber/mo
+ working_dir: /go/src/github.com/samber/mo
+ command: bash -c 'make tools ; make watch-test'
diff --git a/vendor/github.com/samber/mo/either.go b/vendor/github.com/samber/mo/either.go
new file mode 100644
index 00000000..f3aadeb8
--- /dev/null
+++ b/vendor/github.com/samber/mo/either.go
@@ -0,0 +1,163 @@
+package mo
+
+import "fmt"
+
+var eitherShouldBeLeftOrRight = fmt.Errorf("either should be Left or Right")
+var eitherMissingLeftValue = fmt.Errorf("no such Left value")
+var eitherMissingRightValue = fmt.Errorf("no such Right value")
+
+// Left builds the left side of the Either struct, as opposed to the Right side.
+func Left[L any, R any](value L) Either[L, R] {
+ return Either[L, R]{
+ isLeft: true,
+ left: value,
+ }
+}
+
+// Right builds the right side of the Either struct, as opposed to the Left side.
+func Right[L any, R any](value R) Either[L, R] {
+ return Either[L, R]{
+ isLeft: false,
+ right: value,
+ }
+}
+
+// Either respresents a value of 2 possible types.
+// An instance of Either is an instance of either A or B.
+type Either[L any, R any] struct {
+ isLeft bool
+
+ left L
+ right R
+}
+
+// IsLeft returns true if Either is an instance of Left.
+func (e Either[L, R]) IsLeft() bool {
+ return e.isLeft
+}
+
+// IsRight returns true if Either is an instance of Right.
+func (e Either[L, R]) IsRight() bool {
+ return !e.isLeft
+}
+
+// Left returns left value of a Either struct.
+func (e Either[L, R]) Left() (L, bool) {
+ if e.IsLeft() {
+ return e.left, true
+ }
+ return empty[L](), false
+}
+
+// Right returns right value of a Either struct.
+func (e Either[L, R]) Right() (R, bool) {
+ if e.IsRight() {
+ return e.right, true
+ }
+ return empty[R](), false
+}
+
+// MustLeft returns left value of a Either struct or panics.
+func (e Either[L, R]) MustLeft() L {
+ if !e.IsLeft() {
+ panic(eitherMissingLeftValue)
+ }
+
+ return e.left
+}
+
+// MustRight returns right value of a Either struct or panics.
+func (e Either[L, R]) MustRight() R {
+ if !e.IsRight() {
+ panic(eitherMissingRightValue)
+ }
+
+ return e.right
+}
+
+// LeftOrElse returns left value of a Either struct or fallback.
+func (e Either[L, R]) LeftOrElse(fallback L) L {
+ if e.IsLeft() {
+ return e.left
+ }
+
+ return fallback
+}
+
+// RightOrElse returns right value of a Either struct or fallback.
+func (e Either[L, R]) RightOrElse(fallback R) R {
+ if e.IsRight() {
+ return e.right
+ }
+
+ return fallback
+}
+
+// LeftOrEmpty returns left value of a Either struct or empty value.
+func (e Either[L, R]) LeftOrEmpty() L {
+ if e.IsLeft() {
+ return e.left
+ }
+
+ return empty[L]()
+}
+
+// RightOrEmpty returns right value of a Either struct or empty value.
+func (e Either[L, R]) RightOrEmpty() R {
+ if e.IsRight() {
+ return e.right
+ }
+
+ return empty[R]()
+}
+
+// Swap returns the left value in Right and vice versa.
+func (e Either[L, R]) Swap() Either[R, L] {
+ if e.IsLeft() {
+ return Right[R, L](e.left)
+ }
+
+ return Left[R, L](e.right)
+}
+
+// ForEach executes the given side-effecting function, depending of value is Left or Right.
+func (e Either[L, R]) ForEach(leftCb func(L), rightCb func(R)) {
+ if e.IsLeft() {
+ leftCb(e.left)
+ } else if e.IsRight() {
+ rightCb(e.right)
+ }
+}
+
+// Match executes the given function, depending of value is Left or Right, and returns result.
+func (e Either[L, R]) Match(onLeft func(L) Either[L, R], onRight func(R) Either[L, R]) Either[L, R] {
+ if e.IsLeft() {
+ return onLeft(e.left)
+ } else if e.IsRight() {
+ return onRight(e.right)
+ }
+
+ panic(eitherShouldBeLeftOrRight)
+}
+
+// MapLeft executes the given function, if Either is of type Left, and returns result.
+func (e Either[L, R]) MapLeft(mapper func(L) Either[L, R]) Either[L, R] {
+ if e.IsLeft() {
+ return mapper(e.left)
+ } else if e.IsRight() {
+ return Right[L, R](e.right)
+ }
+
+ panic(eitherShouldBeLeftOrRight)
+}
+
+// MapRight executes the given function, if Either is of type Right, and returns result.
+func (e Either[L, R]) MapRight(mapper func(R) Either[L, R]) Either[L, R] {
+ if e.isLeft {
+ return Left[L, R](e.left)
+ } else if e.IsRight() {
+ return mapper(e.right)
+ }
+
+ panic(eitherShouldBeLeftOrRight)
+}
diff --git a/vendor/github.com/samber/mo/either3.go b/vendor/github.com/samber/mo/either3.go
new file mode 100644
index 00000000..6660a964
--- /dev/null
+++ b/vendor/github.com/samber/mo/either3.go
@@ -0,0 +1,218 @@
+package mo
+
+import "fmt"
+
+const (
+ either3ArgId1 = iota
+ either3ArgId2
+ either3ArgId3
+)
+
+var (
+ either3InvalidArgumentId = fmt.Errorf("either3 argument should be between 1 and 3")
+ either3MissingArg1 = fmt.Errorf("either3 doesn't contain expected argument 1")
+ either3MissingArg2 = fmt.Errorf("either3 doesn't contain expected argument 2")
+ either3MissingArg3 = fmt.Errorf("either3 doesn't contain expected argument 3")
+)
+
+// NewEither3Arg1 builds the first argument of the Either3 struct.
+func NewEither3Arg1[T1 any, T2 any, T3 any](value T1) Either3[T1, T2, T3] {
+ return Either3[T1, T2, T3]{
+ argId: either3ArgId1,
+ arg1: value,
+ }
+}
+
+// NewEither3Arg2 builds the second argument of the Either3 struct.
+func NewEither3Arg2[T1 any, T2 any, T3 any](value T2) Either3[T1, T2, T3] {
+ return Either3[T1, T2, T3]{
+ argId: either3ArgId2,
+ arg2: value,
+ }
+}
+
+// NewEither3Arg3 builds the third argument of the Either3 struct.
+func NewEither3Arg3[T1 any, T2 any, T3 any](value T3) Either3[T1, T2, T3] {
+ return Either3[T1, T2, T3]{
+ argId: either3ArgId3,
+ arg3: value,
+ }
+}
+
+// Either3 respresents a value of 3 possible types.
+// An instance of Either3 is an instance of either T1, T2 or T3.
+type Either3[T1 any, T2 any, T3 any] struct {
+ argId int8
+
+ arg1 T1
+ arg2 T2
+ arg3 T3
+}
+
+// IsArg1 returns true if Either3 uses the first argument.
+func (e Either3[T1, T2, T3]) IsArg1() bool {
+ return e.argId == either3ArgId1
+}
+
+// IsArg2 returns true if Either3 uses the second argument.
+func (e Either3[T1, T2, T3]) IsArg2() bool {
+ return e.argId == either3ArgId2
+}
+
+// IsArg3 returns true if Either3 uses the third argument.
+func (e Either3[T1, T2, T3]) IsArg3() bool {
+ return e.argId == either3ArgId3
+}
+
+// Arg1 returns the first argument of a Either3 struct.
+func (e Either3[T1, T2, T3]) Arg1() (T1, bool) {
+ if e.IsArg1() {
+ return e.arg1, true
+ }
+ return empty[T1](), false
+}
+
+// Arg2 returns the second argument of a Either3 struct.
+func (e Either3[T1, T2, T3]) Arg2() (T2, bool) {
+ if e.IsArg2() {
+ return e.arg2, true
+ }
+ return empty[T2](), false
+}
+
+// Arg3 returns the third argument of a Either3 struct.
+func (e Either3[T1, T2, T3]) Arg3() (T3, bool) {
+ if e.IsArg3() {
+ return e.arg3, true
+ }
+ return empty[T3](), false
+}
+
+// MustArg1 returns the first argument of a Either3 struct or panics.
+func (e Either3[T1, T2, T3]) MustArg1() T1 {
+ if !e.IsArg1() {
+ panic(either3MissingArg1)
+ }
+ return e.arg1
+}
+
+// MustArg2 returns the second argument of a Either3 struct or panics.
+func (e Either3[T1, T2, T3]) MustArg2() T2 {
+ if !e.IsArg2() {
+ panic(either3MissingArg2)
+ }
+ return e.arg2
+}
+
+// MustArg3 returns the third argument of a Either3 struct or panics.
+func (e Either3[T1, T2, T3]) MustArg3() T3 {
+ if !e.IsArg3() {
+ panic(either3MissingArg3)
+ }
+ return e.arg3
+}
+
+// Arg1OrElse returns the first argument of a Either3 struct or fallback.
+func (e Either3[T1, T2, T3]) Arg1OrElse(fallback T1) T1 {
+ if e.IsArg1() {
+ return e.arg1
+ }
+ return fallback
+}
+
+// Arg2OrElse returns the second argument of a Either3 struct or fallback.
+func (e Either3[T1, T2, T3]) Arg2OrElse(fallback T2) T2 {
+ if e.IsArg2() {
+ return e.arg2
+ }
+ return fallback
+}
+
+// Arg3OrElse returns the third argument of a Either3 struct or fallback.
+func (e Either3[T1, T2, T3]) Arg3OrElse(fallback T3) T3 {
+ if e.IsArg3() {
+ return e.arg3
+ }
+ return fallback
+}
+
+// Arg1OrEmpty returns the first argument of a Either3 struct or empty value.
+func (e Either3[T1, T2, T3]) Arg1OrEmpty() T1 {
+ if e.IsArg1() {
+ return e.arg1
+ }
+ return empty[T1]()
+}
+
+// Arg2OrEmpty returns the second argument of a Either3 struct or empty value.
+func (e Either3[T1, T2, T3]) Arg2OrEmpty() T2 {
+ if e.IsArg2() {
+ return e.arg2
+ }
+ return empty[T2]()
+}
+
+// Arg3OrEmpty returns the third argument of a Either3 struct or empty value.
+func (e Either3[T1, T2, T3]) Arg3OrEmpty() T3 {
+ if e.IsArg3() {
+ return e.arg3
+ }
+ return empty[T3]()
+}
+
+// ForEach executes the given side-effecting function, depending of the argument set.
+func (e Either3[T1, T2, T3]) ForEach(arg1Cb func(T1), arg2Cb func(T2), arg3Cb func(T3)) {
+ switch e.argId {
+ case either3ArgId1:
+ arg1Cb(e.arg1)
+ case either3ArgId2:
+ arg2Cb(e.arg2)
+ case either3ArgId3:
+ arg3Cb(e.arg3)
+ }
+}
+
+// Match executes the given function, depending of the argument set, and returns result.
+func (e Either3[T1, T2, T3]) Match(
+ onArg1 func(T1) Either3[T1, T2, T3],
+ onArg2 func(T2) Either3[T1, T2, T3],
+ onArg3 func(T3) Either3[T1, T2, T3]) Either3[T1, T2, T3] {
+
+ switch e.argId {
+ case either3ArgId1:
+ return onArg1(e.arg1)
+ case either3ArgId2:
+ return onArg2(e.arg2)
+ case either3ArgId3:
+ return onArg3(e.arg3)
+ }
+
+ panic(either3InvalidArgumentId)
+}
+
+// MapArg1 executes the given function, if Either3 use the first argument, and returns result.
+func (e Either3[T1, T2, T3]) MapArg1(mapper func(T1) Either3[T1, T2, T3]) Either3[T1, T2, T3] {
+ if e.IsArg1() {
+ return mapper(e.arg1)
+ }
+
+ return e
+}
+
+// MapArg2 executes the given function, if Either3 use the second argument, and returns result.
+func (e Either3[T1, T2, T3]) MapArg2(mapper func(T2) Either3[T1, T2, T3]) Either3[T1, T2, T3] {
+ if e.IsArg2() {
+ return mapper(e.arg2)
+ }
+
+ return e
+}
+
+// MapArg3 executes the given function, if Either3 use the third argument, and returns result.
+func (e Either3[T1, T2, T3]) MapArg3(mapper func(T3) Either3[T1, T2, T3]) Either3[T1, T2, T3] {
+ if e.IsArg3() {
+ return mapper(e.arg3)
+ }
+
+ return e
+}
diff --git a/vendor/github.com/samber/mo/either4.go b/vendor/github.com/samber/mo/either4.go
new file mode 100644
index 00000000..fb4cf812
--- /dev/null
+++ b/vendor/github.com/samber/mo/either4.go
@@ -0,0 +1,280 @@
+package mo
+
+import "fmt"
+
+const (
+ either4ArgId1 = iota
+ either4ArgId2
+ either4ArgId3
+ either4ArgId4
+)
+
+var (
+ either4InvalidArgumentId = fmt.Errorf("either4 argument should be between 1 and 4")
+ either4MissingArg1 = fmt.Errorf("either4 doesn't contain expected argument 1")
+ either4MissingArg2 = fmt.Errorf("either4 doesn't contain expected argument 2")
+ either4MissingArg3 = fmt.Errorf("either4 doesn't contain expected argument 3")
+ either4MissingArg4 = fmt.Errorf("either4 doesn't contain expected argument 4")
+)
+
+// NewEither4Arg1 builds the first argument of the Either4 struct.
+func NewEither4Arg1[T1 any, T2 any, T3 any, T4 any](value T1) Either4[T1, T2, T3, T4] {
+ return Either4[T1, T2, T3, T4]{
+ argId: either4ArgId1,
+ arg1: value,
+ }
+}
+
+// NewEither4Arg2 builds the second argument of the Either4 struct.
+func NewEither4Arg2[T1 any, T2 any, T3 any, T4 any](value T2) Either4[T1, T2, T3, T4] {
+ return Either4[T1, T2, T3, T4]{
+ argId: either4ArgId2,
+ arg2: value,
+ }
+}
+
+// NewEither4Arg3 builds the third argument of the Either4 struct.
+func NewEither4Arg3[T1 any, T2 any, T3 any, T4 any](value T3) Either4[T1, T2, T3, T4] {
+ return Either4[T1, T2, T3, T4]{
+ argId: either4ArgId3,
+ arg3: value,
+ }
+}
+
+// NewEither4Arg4 builds the fourth argument of the Either4 struct.
+func NewEither4Arg4[T1 any, T2 any, T3 any, T4 any](value T4) Either4[T1, T2, T3, T4] {
+ return Either4[T1, T2, T3, T4]{
+ argId: either4ArgId4,
+ arg4: value,
+ }
+}
+
+// Either4 respresents a value of 4 possible types.
+// An instance of Either4 is an instance of either T1, T2, T3 or T4.
+type Either4[T1 any, T2 any, T3 any, T4 any] struct {
+ argId int8
+
+ arg1 T1
+ arg2 T2
+ arg3 T3
+ arg4 T4
+}
+
+// IsArg1 returns true if Either4 uses the first argument.
+func (e Either4[T1, T2, T3, T4]) IsArg1() bool {
+ return e.argId == either4ArgId1
+}
+
+// IsArg2 returns true if Either4 uses the second argument.
+func (e Either4[T1, T2, T3, T4]) IsArg2() bool {
+ return e.argId == either4ArgId2
+}
+
+// IsArg3 returns true if Either4 uses the third argument.
+func (e Either4[T1, T2, T3, T4]) IsArg3() bool {
+ return e.argId == either4ArgId3
+}
+
+// IsArg4 returns true if Either4 uses the fourth argument.
+func (e Either4[T1, T2, T3, T4]) IsArg4() bool {
+ return e.argId == either4ArgId4
+}
+
+// Arg1 returns the first argument of a Either4 struct.
+func (e Either4[T1, T2, T3, T4]) Arg1() (T1, bool) {
+ if e.IsArg1() {
+ return e.arg1, true
+ }
+ return empty[T1](), false
+}
+
+// Arg2 returns the second argument of a Either4 struct.
+func (e Either4[T1, T2, T3, T4]) Arg2() (T2, bool) {
+ if e.IsArg2() {
+ return e.arg2, true
+ }
+ return empty[T2](), false
+}
+
+// Arg3 returns the third argument of a Either4 struct.
+func (e Either4[T1, T2, T3, T4]) Arg3() (T3, bool) {
+ if e.IsArg3() {
+ return e.arg3, true
+ }
+ return empty[T3](), false
+}
+
+// Arg4 returns the fourth argument of a Either4 struct.
+func (e Either4[T1, T2, T3, T4]) Arg4() (T4, bool) {
+ if e.IsArg4() {
+ return e.arg4, true
+ }
+ return empty[T4](), false
+}
+
+// MustArg1 returns the first argument of a Either4 struct or panics.
+func (e Either4[T1, T2, T3, T4]) MustArg1() T1 {
+ if !e.IsArg1() {
+ panic(either4MissingArg1)
+ }
+ return e.arg1
+}
+
+// MustArg2 returns the second argument of a Either4 struct or panics.
+func (e Either4[T1, T2, T3, T4]) MustArg2() T2 {
+ if !e.IsArg2() {
+ panic(either4MissingArg2)
+ }
+ return e.arg2
+}
+
+// MustArg3 returns the third argument of a Either4 struct or panics.
+func (e Either4[T1, T2, T3, T4]) MustArg3() T3 {
+ if !e.IsArg3() {
+ panic(either4MissingArg3)
+ }
+ return e.arg3
+}
+
+// MustArg4 returns the fourth argument of a Either4 struct or panics.
+func (e Either4[T1, T2, T3, T4]) MustArg4() T4 {
+ if !e.IsArg4() {
+ panic(either4MissingArg4)
+ }
+ return e.arg4
+}
+
+// Arg1OrElse returns the first argument of a Either4 struct or fallback.
+func (e Either4[T1, T2, T3, T4]) Arg1OrElse(fallback T1) T1 {
+ if e.IsArg1() {
+ return e.arg1
+ }
+ return fallback
+}
+
+// Arg2OrElse returns the second argument of a Either4 struct or fallback.
+func (e Either4[T1, T2, T3, T4]) Arg2OrElse(fallback T2) T2 {
+ if e.IsArg2() {
+ return e.arg2
+ }
+ return fallback
+}
+
+// Arg3OrElse returns the third argument of a Either4 struct or fallback.
+func (e Either4[T1, T2, T3, T4]) Arg3OrElse(fallback T3) T3 {
+ if e.IsArg3() {
+ return e.arg3
+ }
+ return fallback
+}
+
+// Arg4OrElse returns the fourth argument of a Either4 struct or fallback.
+func (e Either4[T1, T2, T3, T4]) Arg4OrElse(fallback T4) T4 {
+ if e.IsArg4() {
+ return e.arg4
+ }
+ return fallback
+}
+
+// Arg1OrEmpty returns the first argument of a Either4 struct or empty value.
+func (e Either4[T1, T2, T3, T4]) Arg1OrEmpty() T1 {
+ if e.IsArg1() {
+ return e.arg1
+ }
+ return empty[T1]()
+}
+
+// Arg2OrEmpty returns the second argument of a Either4 struct or empty value.
+func (e Either4[T1, T2, T3, T4]) Arg2OrEmpty() T2 {
+ if e.IsArg2() {
+ return e.arg2
+ }
+ return empty[T2]()
+}
+
+// Arg3OrEmpty returns the third argument of a Either4 struct or empty value.
+func (e Either4[T1, T2, T3, T4]) Arg3OrEmpty() T3 {
+ if e.IsArg3() {
+ return e.arg3
+ }
+ return empty[T3]()
+}
+
+// Arg4OrEmpty returns the fourth argument of a Either4 struct or empty value.
+func (e Either4[T1, T2, T3, T4]) Arg4OrEmpty() T4 {
+ if e.IsArg4() {
+ return e.arg4
+ }
+ return empty[T4]()
+}
+
+// ForEach executes the given side-effecting function, depending of the argument set.
+func (e Either4[T1, T2, T3, T4]) ForEach(arg1Cb func(T1), arg2Cb func(T2), arg3Cb func(T3), arg4Cb func(T4)) {
+ switch e.argId {
+ case either4ArgId1:
+ arg1Cb(e.arg1)
+ case either4ArgId2:
+ arg2Cb(e.arg2)
+ case either4ArgId3:
+ arg3Cb(e.arg3)
+ case either4ArgId4:
+ arg4Cb(e.arg4)
+ }
+}
+
+// Match executes the given function, depending of the argument set, and returns result.
+func (e Either4[T1, T2, T3, T4]) Match(
+ onArg1 func(T1) Either4[T1, T2, T3, T4],
+ onArg2 func(T2) Either4[T1, T2, T3, T4],
+ onArg3 func(T3) Either4[T1, T2, T3, T4],
+ onArg4 func(T4) Either4[T1, T2, T3, T4]) Either4[T1, T2, T3, T4] {
+
+ switch e.argId {
+ case either4ArgId1:
+ return onArg1(e.arg1)
+ case either4ArgId2:
+ return onArg2(e.arg2)
+ case either4ArgId3:
+ return onArg3(e.arg3)
+ case either4ArgId4:
+ return onArg4(e.arg4)
+ }
+
+ panic(either4InvalidArgumentId)
+}
+
+// MapArg1 executes the given function, if Either4 use the first argument, and returns result.
+func (e Either4[T1, T2, T3, T4]) MapArg1(mapper func(T1) Either4[T1, T2, T3, T4]) Either4[T1, T2, T3, T4] {
+ if e.IsArg1() {
+ return mapper(e.arg1)
+ }
+
+ return e
+}
+
+// MapArg2 executes the given function, if Either4 use the second argument, and returns result.
+func (e Either4[T1, T2, T3, T4]) MapArg2(mapper func(T2) Either4[T1, T2, T3, T4]) Either4[T1, T2, T3, T4] {
+ if e.IsArg2() {
+ return mapper(e.arg2)
+ }
+
+ return e
+}
+
+// MapArg3 executes the given function, if Either4 use the third argument, and returns result.
+func (e Either4[T1, T2, T3, T4]) MapArg3(mapper func(T3) Either4[T1, T2, T3, T4]) Either4[T1, T2, T3, T4] {
+ if e.IsArg3() {
+ return mapper(e.arg3)
+ }
+
+ return e
+}
+
+// MapArg4 executes the given function, if Either4 use the fourth argument, and returns result.
+func (e Either4[T1, T2, T3, T4]) MapArg4(mapper func(T4) Either4[T1, T2, T3, T4]) Either4[T1, T2, T3, T4] {
+ if e.IsArg4() {
+ return mapper(e.arg4)
+ }
+
+ return e
+}
diff --git a/vendor/github.com/samber/mo/either5.go b/vendor/github.com/samber/mo/either5.go
new file mode 100644
index 00000000..1eadc073
--- /dev/null
+++ b/vendor/github.com/samber/mo/either5.go
@@ -0,0 +1,342 @@
+package mo
+
+import "fmt"
+
+const (
+ either5ArgId1 = iota
+ either5ArgId2
+ either5ArgId3
+ either5ArgId4
+ either5ArgId5
+)
+
+var (
+ either5InvalidArgumentId = fmt.Errorf("either5 argument should be between 1 and 5")
+ either5MissingArg1 = fmt.Errorf("either5 doesn't contain expected argument 1")
+ either5MissingArg2 = fmt.Errorf("either5 doesn't contain expected argument 2")
+ either5MissingArg3 = fmt.Errorf("either5 doesn't contain expected argument 3")
+ either5MissingArg4 = fmt.Errorf("either5 doesn't contain expected argument 4")
+ either5MissingArg5 = fmt.Errorf("either5 doesn't contain expected argument 5")
+)
+
+// NewEither5Arg1 builds the first argument of the Either5 struct.
+func NewEither5Arg1[T1 any, T2 any, T3 any, T4 any, T5 any](value T1) Either5[T1, T2, T3, T4, T5] {
+ return Either5[T1, T2, T3, T4, T5]{
+ argId: either5ArgId1,
+ arg1: value,
+ }
+}
+
+// NewEither5Arg2 builds the second argument of the Either5 struct.
+func NewEither5Arg2[T1 any, T2 any, T3 any, T4 any, T5 any](value T2) Either5[T1, T2, T3, T4, T5] {
+ return Either5[T1, T2, T3, T4, T5]{
+ argId: either5ArgId2,
+ arg2: value,
+ }
+}
+
+// NewEither5Arg3 builds the third argument of the Either5 struct.
+func NewEither5Arg3[T1 any, T2 any, T3 any, T4 any, T5 any](value T3) Either5[T1, T2, T3, T4, T5] {
+ return Either5[T1, T2, T3, T4, T5]{
+ argId: either5ArgId3,
+ arg3: value,
+ }
+}
+
+// NewEither5Arg4 builds the fourth argument of the Either5 struct.
+func NewEither5Arg4[T1 any, T2 any, T3 any, T4 any, T5 any](value T4) Either5[T1, T2, T3, T4, T5] {
+ return Either5[T1, T2, T3, T4, T5]{
+ argId: either5ArgId4,
+ arg4: value,
+ }
+}
+
+// NewEither5Arg5 builds the fith argument of the Either5 struct.
+func NewEither5Arg5[T1 any, T2 any, T3 any, T4 any, T5 any](value T5) Either5[T1, T2, T3, T4, T5] {
+ return Either5[T1, T2, T3, T4, T5]{
+ argId: either5ArgId5,
+ arg5: value,
+ }
+}
+
+// Either5 respresents a value of 5 possible types.
+// An instance of Either5 is an instance of either T1, T2, T3, T4, or T5.
+type Either5[T1 any, T2 any, T3 any, T4 any, T5 any] struct {
+ argId int8
+
+ arg1 T1
+ arg2 T2
+ arg3 T3
+ arg4 T4
+ arg5 T5
+}
+
+// IsArg1 returns true if Either5 uses the first argument.
+func (e Either5[T1, T2, T3, T4, T5]) IsArg1() bool {
+ return e.argId == either5ArgId1
+}
+
+// IsArg2 returns true if Either5 uses the second argument.
+func (e Either5[T1, T2, T3, T4, T5]) IsArg2() bool {
+ return e.argId == either5ArgId2
+}
+
+// IsArg3 returns true if Either5 uses the third argument.
+func (e Either5[T1, T2, T3, T4, T5]) IsArg3() bool {
+ return e.argId == either5ArgId3
+}
+
+// IsArg4 returns true if Either5 uses the fourth argument.
+func (e Either5[T1, T2, T3, T4, T5]) IsArg4() bool {
+ return e.argId == either5ArgId4
+}
+
+// IsArg5 returns true if Either5 uses the fith argument.
+func (e Either5[T1, T2, T3, T4, T5]) IsArg5() bool {
+ return e.argId == either5ArgId5
+}
+
+// Arg1 returns the first argument of a Either5 struct.
+func (e Either5[T1, T2, T3, T4, T5]) Arg1() (T1, bool) {
+ if e.IsArg1() {
+ return e.arg1, true
+ }
+ return empty[T1](), false
+}
+
+// Arg2 returns the second argument of a Either5 struct.
+func (e Either5[T1, T2, T3, T4, T5]) Arg2() (T2, bool) {
+ if e.IsArg2() {
+ return e.arg2, true
+ }
+ return empty[T2](), false
+}
+
+// Arg3 returns the third argument of a Either5 struct.
+func (e Either5[T1, T2, T3, T4, T5]) Arg3() (T3, bool) {
+ if e.IsArg3() {
+ return e.arg3, true
+ }
+ return empty[T3](), false
+}
+
+// Arg4 returns the fourth argument of a Either5 struct.
+func (e Either5[T1, T2, T3, T4, T5]) Arg4() (T4, bool) {
+ if e.IsArg4() {
+ return e.arg4, true
+ }
+ return empty[T4](), false
+}
+
+// Arg5 returns the fith argument of a Either5 struct.
+func (e Either5[T1, T2, T3, T4, T5]) Arg5() (T5, bool) {
+ if e.IsArg5() {
+ return e.arg5, true
+ }
+ return empty[T5](), false
+}
+
+// MustArg1 returns the first argument of a Either5 struct or panics.
+func (e Either5[T1, T2, T3, T4, T5]) MustArg1() T1 {
+ if !e.IsArg1() {
+ panic(either5MissingArg1)
+ }
+ return e.arg1
+}
+
+// MustArg2 returns the second argument of a Either5 struct or panics.
+func (e Either5[T1, T2, T3, T4, T5]) MustArg2() T2 {
+ if !e.IsArg2() {
+ panic(either5MissingArg2)
+ }
+ return e.arg2
+}
+
+// MustArg3 returns the third argument of a Either5 struct or panics.
+func (e Either5[T1, T2, T3, T4, T5]) MustArg3() T3 {
+ if !e.IsArg3() {
+ panic(either5MissingArg3)
+ }
+ return e.arg3
+}
+
+// MustArg4 returns the fourth argument of a Either5 struct or panics.
+func (e Either5[T1, T2, T3, T4, T5]) MustArg4() T4 {
+ if !e.IsArg4() {
+ panic(either5MissingArg4)
+ }
+ return e.arg4
+}
+
+// MustArg5 returns the fith argument of a Either5 struct or panics.
+func (e Either5[T1, T2, T3, T4, T5]) MustArg5() T5 {
+ if !e.IsArg5() {
+ panic(either5MissingArg5)
+ }
+ return e.arg5
+}
+
+// Arg1OrElse returns the first argument of a Either5 struct or fallback.
+func (e Either5[T1, T2, T3, T4, T5]) Arg1OrElse(fallback T1) T1 {
+ if e.IsArg1() {
+ return e.arg1
+ }
+ return fallback
+}
+
+// Arg2OrElse returns the second argument of a Either5 struct or fallback.
+func (e Either5[T1, T2, T3, T4, T5]) Arg2OrElse(fallback T2) T2 {
+ if e.IsArg2() {
+ return e.arg2
+ }
+ return fallback
+}
+
+// Arg3OrElse returns the third argument of a Either5 struct or fallback.
+func (e Either5[T1, T2, T3, T4, T5]) Arg3OrElse(fallback T3) T3 {
+ if e.IsArg3() {
+ return e.arg3
+ }
+ return fallback
+}
+
+// Arg4OrElse returns the fourth argument of a Either5 struct or fallback.
+func (e Either5[T1, T2, T3, T4, T5]) Arg4OrElse(fallback T4) T4 {
+ if e.IsArg4() {
+ return e.arg4
+ }
+ return fallback
+}
+
+// Arg5OrElse returns the fith argument of a Either5 struct or fallback.
+func (e Either5[T1, T2, T3, T4, T5]) Arg5OrElse(fallback T5) T5 {
+ if e.IsArg5() {
+ return e.arg5
+ }
+ return fallback
+}
+
+// Arg1OrEmpty returns the first argument of a Either5 struct or empty value.
+func (e Either5[T1, T2, T3, T4, T5]) Arg1OrEmpty() T1 {
+ if e.IsArg1() {
+ return e.arg1
+ }
+ return empty[T1]()
+}
+
+// Arg2OrEmpty returns the second argument of a Either5 struct or empty value.
+func (e Either5[T1, T2, T3, T4, T5]) Arg2OrEmpty() T2 {
+ if e.IsArg2() {
+ return e.arg2
+ }
+ return empty[T2]()
+}
+
+// Arg3OrEmpty returns the third argument of a Either5 struct or empty value.
+func (e Either5[T1, T2, T3, T4, T5]) Arg3OrEmpty() T3 {
+ if e.IsArg3() {
+ return e.arg3
+ }
+ return empty[T3]()
+}
+
+// Arg4OrEmpty returns the fourth argument of a Either5 struct or empty value.
+func (e Either5[T1, T2, T3, T4, T5]) Arg4OrEmpty() T4 {
+ if e.IsArg4() {
+ return e.arg4
+ }
+ return empty[T4]()
+}
+
+// Arg5OrEmpty returns the fith argument of a Either5 struct or empty value.
+func (e Either5[T1, T2, T3, T4, T5]) Arg5OrEmpty() T5 {
+ if e.IsArg5() {
+ return e.arg5
+ }
+ return empty[T5]()
+}
+
+// ForEach executes the given side-effecting function, depending of the argument set.
+func (e Either5[T1, T2, T3, T4, T5]) ForEach(arg1Cb func(T1), arg2Cb func(T2), arg3Cb func(T3), arg4Cb func(T4), arg5Cb func(T5)) {
+ switch e.argId {
+ case either5ArgId1:
+ arg1Cb(e.arg1)
+ case either5ArgId2:
+ arg2Cb(e.arg2)
+ case either5ArgId3:
+ arg3Cb(e.arg3)
+ case either5ArgId4:
+ arg4Cb(e.arg4)
+ case either5ArgId5:
+ arg5Cb(e.arg5)
+ }
+}
+
+// Match executes the given function, depending of the argument set, and returns result.
+func (e Either5[T1, T2, T3, T4, T5]) Match(
+ onArg1 func(T1) Either5[T1, T2, T3, T4, T5],
+ onArg2 func(T2) Either5[T1, T2, T3, T4, T5],
+ onArg3 func(T3) Either5[T1, T2, T3, T4, T5],
+ onArg4 func(T4) Either5[T1, T2, T3, T4, T5],
+ onArg5 func(T5) Either5[T1, T2, T3, T4, T5]) Either5[T1, T2, T3, T4, T5] {
+
+ switch e.argId {
+ case either5ArgId1:
+ return onArg1(e.arg1)
+ case either5ArgId2:
+ return onArg2(e.arg2)
+ case either5ArgId3:
+ return onArg3(e.arg3)
+ case either5ArgId4:
+ return onArg4(e.arg4)
+ case either5ArgId5:
+ return onArg5(e.arg5)
+ }
+
+ panic(either5InvalidArgumentId)
+}
+
+// MapArg1 executes the given function, if Either5 use the first argument, and returns result.
+func (e Either5[T1, T2, T3, T4, T5]) MapArg1(mapper func(T1) Either5[T1, T2, T3, T4, T5]) Either5[T1, T2, T3, T4, T5] {
+ if e.IsArg1() {
+ return mapper(e.arg1)
+ }
+
+ return e
+}
+
+// MapArg2 executes the given function, if Either5 use the second argument, and returns result.
+func (e Either5[T1, T2, T3, T4, T5]) MapArg2(mapper func(T2) Either5[T1, T2, T3, T4, T5]) Either5[T1, T2, T3, T4, T5] {
+ if e.IsArg2() {
+ return mapper(e.arg2)
+ }
+
+ return e
+}
+
+// MapArg3 executes the given function, if Either5 use the third argument, and returns result.
+func (e Either5[T1, T2, T3, T4, T5]) MapArg3(mapper func(T3) Either5[T1, T2, T3, T4, T5]) Either5[T1, T2, T3, T4, T5] {
+ if e.IsArg3() {
+ return mapper(e.arg3)
+ }
+
+ return e
+}
+
+// MapArg4 executes the given function, if Either5 use the fourth argument, and returns result.
+func (e Either5[T1, T2, T3, T4, T5]) MapArg4(mapper func(T4) Either5[T1, T2, T3, T4, T5]) Either5[T1, T2, T3, T4, T5] {
+ if e.IsArg4() {
+ return mapper(e.arg4)
+ }
+
+ return e
+}
+
+// MapArg5 executes the given function, if Either5 use the fith argument, and returns result.
+func (e Either5[T1, T2, T3, T4, T5]) MapArg5(mapper func(T5) Either5[T1, T2, T3, T4, T5]) Either5[T1, T2, T3, T4, T5] {
+ if e.IsArg5() {
+ return mapper(e.arg5)
+ }
+
+ return e
+}
diff --git a/vendor/github.com/samber/mo/future.go b/vendor/github.com/samber/mo/future.go
new file mode 100644
index 00000000..faf2345d
--- /dev/null
+++ b/vendor/github.com/samber/mo/future.go
@@ -0,0 +1,181 @@
+package mo
+
+import (
+ "sync"
+)
+
+// NewFuture instanciate a new future.
+func NewFuture[T any](cb func(resolve func(T), reject func(error))) *Future[T] {
+ future := Future[T]{
+ mu: sync.RWMutex{},
+ next: nil,
+ cancelCb: func() {},
+ }
+
+ go func() {
+ cb(future.resolve, future.reject)
+ }()
+
+ return &future
+}
+
+// Future represents a value which may or may not currently be available, but will be
+// available at some point, or an exception if that value could not be made available.
+type Future[T any] struct {
+ mu sync.RWMutex
+
+ next func(T, error)
+ cancelCb func()
+}
+
+func (f *Future[T]) resolve(value T) {
+ f.mu.RLock()
+ defer f.mu.RUnlock()
+
+ if f.next != nil {
+ f.next(value, nil)
+ }
+}
+
+func (f *Future[T]) reject(err error) {
+ f.mu.RLock()
+ defer f.mu.RUnlock()
+
+ if f.next != nil {
+ f.next(empty[T](), err)
+ }
+}
+
+// Then is called when Future is resolved. It returns a new Future.
+func (f *Future[T]) Then(cb func(T) (T, error)) *Future[T] {
+ f.mu.Lock()
+ defer f.mu.Unlock()
+
+ future := &Future[T]{
+ mu: sync.RWMutex{},
+ next: nil,
+ cancelCb: func() {
+ f.Cancel()
+ },
+ }
+
+ f.next = func(value T, err error) {
+ if err != nil {
+ future.reject(err)
+ return
+ }
+
+ newValue, err := cb(value)
+ if err != nil {
+ future.reject(err)
+ return
+ }
+
+ future.resolve(newValue)
+ }
+
+ return future
+}
+
+// Catch is called when Future is rejected. It returns a new Future.
+func (f *Future[T]) Catch(cb func(error) (T, error)) *Future[T] {
+ f.mu.Lock()
+ defer f.mu.Unlock()
+
+ future := &Future[T]{
+ mu: sync.RWMutex{},
+ next: nil,
+ cancelCb: func() {
+ f.Cancel()
+ },
+ }
+
+ f.next = func(value T, err error) {
+ if err == nil {
+ future.resolve(value)
+ return
+ }
+
+ newValue, err := cb(err)
+ if err != nil {
+ future.reject(err)
+ return
+ }
+
+ future.resolve(newValue)
+ }
+
+ return future
+}
+
+// Finally is called when Future is processed either resolved or rejected. It returns a new Future.
+func (f *Future[T]) Finally(cb func(T, error) (T, error)) *Future[T] {
+ f.mu.Lock()
+ defer f.mu.Unlock()
+
+ future := &Future[T]{
+ mu: sync.RWMutex{},
+ next: nil,
+ cancelCb: func() {
+ f.Cancel()
+ },
+ }
+
+ f.next = func(value T, err error) {
+ newValue, err := cb(value, err)
+ if err != nil {
+ future.reject(err)
+ return
+ }
+
+ future.resolve(newValue)
+ }
+
+ return future
+}
+
+// Cancel cancels the Future chain.
+func (f *Future[T]) Cancel() {
+ f.mu.Lock()
+ defer f.mu.Unlock()
+
+ f.next = nil
+ if f.cancelCb != nil {
+ f.cancelCb()
+ }
+}
+
+// Collect awaits and return result of the Future.
+func (f *Future[T]) Collect() (T, error) {
+ done := make(chan struct{})
+
+ var a T
+ var b error
+
+ f.mu.Lock()
+ f.next = func(value T, err error) {
+ a = value
+ b = err
+
+ done <- struct{}{}
+ }
+ f.mu.Unlock()
+
+ <-done
+
+ return a, b
+}
+
+// Result wraps Collect and returns a Result.
+func (f *Future[T]) Result() Result[T] {
+ return TupleToResult(f.Collect())
+}
+
+// Either wraps Collect and returns a Either.
+func (f *Future[T]) Either() Either[error, T] {
+ v, err := f.Collect()
+ if err != nil {
+ return Left[error, T](err)
+ }
+ return Right[error, T](v)
+}
diff --git a/vendor/github.com/samber/mo/io.go b/vendor/github.com/samber/mo/io.go
new file mode 100644
index 00000000..6dae4f3b
--- /dev/null
+++ b/vendor/github.com/samber/mo/io.go
@@ -0,0 +1,109 @@
+package mo
+
+// NewIO instanciates a new IO.
+func NewIO[R any](f f0[R]) IO[R] {
+ return IO[R]{
+ unsafePerform: f,
+ }
+}
+
+// IO represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type IO[R any] struct {
+ unsafePerform f0[R]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IO[R]) Run() R {
+ return io.unsafePerform()
+}
+
+// NewIO1 instanciates a new IO1.
+func NewIO1[R any, A any](f f1[R, A]) IO1[R, A] {
+ return IO1[R, A]{
+ unsafePerform: f,
+ }
+}
+
+// IO1 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type IO1[R any, A any] struct {
+ unsafePerform f1[R, A]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IO1[R, A]) Run(a A) R {
+ return io.unsafePerform(a)
+}
+
+// NewIO2 instanciates a new IO2.
+func NewIO2[R any, A any, B any](f f2[R, A, B]) IO2[R, A, B] {
+ return IO2[R, A, B]{
+ unsafePerform: f,
+ }
+}
+
+// IO2 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type IO2[R any, A any, B any] struct {
+ unsafePerform f2[R, A, B]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IO2[R, A, B]) Run(a A, b B) R {
+ return io.unsafePerform(a, b)
+}
+
+// NewIO3 instanciates a new IO3.
+func NewIO3[R any, A any, B any, C any](f f3[R, A, B, C]) IO3[R, A, B, C] {
+ return IO3[R, A, B, C]{
+ unsafePerform: f,
+ }
+}
+
+// IO3 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type IO3[R any, A any, B any, C any] struct {
+ unsafePerform f3[R, A, B, C]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IO3[R, A, B, C]) Run(a A, b B, c C) R {
+ return io.unsafePerform(a, b, c)
+}
+
+// NewIO4 instanciates a new IO4.
+func NewIO4[R any, A any, B any, C any, D any](f f4[R, A, B, C, D]) IO4[R, A, B, C, D] {
+ return IO4[R, A, B, C, D]{
+ unsafePerform: f,
+ }
+}
+
+// IO4 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type IO4[R any, A any, B any, C any, D any] struct {
+ unsafePerform f4[R, A, B, C, D]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IO4[R, A, B, C, D]) Run(a A, b B, c C, d D) R {
+ return io.unsafePerform(a, b, c, d)
+}
+
+// NewIO5 instanciates a new IO5.
+func NewIO5[R any, A any, B any, C any, D any, E any](f f5[R, A, B, C, D, E]) IO5[R, A, B, C, D, E] {
+ return IO5[R, A, B, C, D, E]{
+ unsafePerform: f,
+ }
+}
+
+// IO5 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type IO5[R any, A any, B any, C any, D any, E any] struct {
+ unsafePerform f5[R, A, B, C, D, E]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IO5[R, A, B, C, D, E]) Run(a A, b B, c C, d D, e E) R {
+ return io.unsafePerform(a, b, c, d, e)
+}
diff --git a/vendor/github.com/samber/mo/io_either.go b/vendor/github.com/samber/mo/io_either.go
new file mode 100644
index 00000000..079683fc
--- /dev/null
+++ b/vendor/github.com/samber/mo/io_either.go
@@ -0,0 +1,139 @@
+package mo
+
+// NewIOEither instanciates a new IO.
+func NewIOEither[R any](f fe0[R]) IOEither[R] {
+ return IOEither[R]{
+ unsafePerform: f,
+ }
+}
+
+// IO represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and can fail.
+type IOEither[R any] struct {
+ unsafePerform fe0[R]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IOEither[R]) Run() Either[error, R] {
+ v, err := io.unsafePerform()
+ if err != nil {
+ return Left[error, R](err)
+ }
+
+ return Right[error, R](v)
+}
+
+// NewIOEither1 instanciates a new IO1.
+func NewIOEither1[R any, A any](f fe1[R, A]) IOEither1[R, A] {
+ return IOEither1[R, A]{
+ unsafePerform: f,
+ }
+}
+
+// IO1 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and can fail.
+type IOEither1[R any, A any] struct {
+ unsafePerform fe1[R, A]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IOEither1[R, A]) Run(a A) Either[error, R] {
+ v, err := io.unsafePerform(a)
+ if err != nil {
+ return Left[error, R](err)
+ }
+
+ return Right[error, R](v)
+}
+
+// NewIOEither2 instanciates a new IO2.
+func NewIOEither2[R any, A any, B any](f fe2[R, A, B]) IOEither2[R, A, B] {
+ return IOEither2[R, A, B]{
+ unsafePerform: f,
+ }
+}
+
+// IO2 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and can fail.
+type IOEither2[R any, A any, B any] struct {
+ unsafePerform fe2[R, A, B]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IOEither2[R, A, B]) Run(a A, b B) Either[error, R] {
+ v, err := io.unsafePerform(a, b)
+ if err != nil {
+ return Left[error, R](err)
+ }
+
+ return Right[error, R](v)
+}
+
+// NewIOEither3 instanciates a new IO3.
+func NewIOEither3[R any, A any, B any, C any](f fe3[R, A, B, C]) IOEither3[R, A, B, C] {
+ return IOEither3[R, A, B, C]{
+ unsafePerform: f,
+ }
+}
+
+// IO3 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and can fail.
+type IOEither3[R any, A any, B any, C any] struct {
+ unsafePerform fe3[R, A, B, C]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IOEither3[R, A, B, C]) Run(a A, b B, c C) Either[error, R] {
+ v, err := io.unsafePerform(a, b, c)
+ if err != nil {
+ return Left[error, R](err)
+ }
+
+ return Right[error, R](v)
+}
+
+// NewIOEither4 instanciates a new IO4.
+func NewIOEither4[R any, A any, B any, C any, D any](f fe4[R, A, B, C, D]) IOEither4[R, A, B, C, D] {
+ return IOEither4[R, A, B, C, D]{
+ unsafePerform: f,
+ }
+}
+
+// IO4 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and can fail.
+type IOEither4[R any, A any, B any, C any, D any] struct {
+ unsafePerform fe4[R, A, B, C, D]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IOEither4[R, A, B, C, D]) Run(a A, b B, c C, d D) Either[error, R] {
+ v, err := io.unsafePerform(a, b, c, d)
+ if err != nil {
+ return Left[error, R](err)
+ }
+
+ return Right[error, R](v)
+}
+
+// NewIOEither5 instanciates a new IO5.
+func NewIOEither5[R any, A any, B any, C any, D any, E any](f fe5[R, A, B, C, D, E]) IOEither5[R, A, B, C, D, E] {
+ return IOEither5[R, A, B, C, D, E]{
+ unsafePerform: f,
+ }
+}
+
+// IO5 represents a non-deterministic synchronous computation that
+// can cause side effects, yields a value of type `R` and can fail.
+type IOEither5[R any, A any, B any, C any, D any, E any] struct {
+ unsafePerform fe5[R, A, B, C, D, E]
+}
+
+// Run execute the non-deterministic synchronous computation, with side effect.
+func (io IOEither5[R, A, B, C, D, E]) Run(a A, b B, c C, d D, e E) Either[error, R] {
+ v, err := io.unsafePerform(a, b, c, d, e)
+ if err != nil {
+ return Left[error, R](err)
+ }
+
+ return Right[error, R](v)
+}
diff --git a/vendor/github.com/samber/mo/option.go b/vendor/github.com/samber/mo/option.go
new file mode 100644
index 00000000..29bdaaae
--- /dev/null
+++ b/vendor/github.com/samber/mo/option.go
@@ -0,0 +1,276 @@
+package mo
+
+import (
+ "bytes"
+ "database/sql/driver"
+ "encoding/gob"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+var optionNoSuchElement = fmt.Errorf("no such element")
+
+// Some builds an Option when value is present.
+// Play: https://go.dev/play/p/iqz2n9n0tDM
+func Some[T any](value T) Option[T] {
+ return Option[T]{
+ isPresent: true,
+ value: value,
+ }
+}
+
+// None builds an Option when value is absent.
+// Play: https://go.dev/play/p/yYQPsYCSYlD
+func None[T any]() Option[T] {
+ return Option[T]{
+ isPresent: false,
+ }
+}
+
+// TupleToOption builds a Some Option when second argument is true, or None.
+// Play: https://go.dev/play/p/gkrg2pZwOty
+func TupleToOption[T any](value T, ok bool) Option[T] {
+ if ok {
+ return Some(value)
+ }
+ return None[T]()
+}
+
+// EmptyableToOption builds a Some Option when value is not empty, or None.
+// Play: https://go.dev/play/p/GSpQQ-q-UES
+func EmptyableToOption[T any](value T) Option[T] {
+ // 🤮
+ isZero := reflect.ValueOf(&value).Elem().IsZero()
+ if isZero {
+ return None[T]()
+ }
+
+ return Some(value)
+}
+
+// Option is a container for an optional value of type T. If value exists, Option is
+// of type Some. If the value is absent, Option is of type None.
+type Option[T any] struct {
+ isPresent bool
+ value T
+}
+
+// IsPresent returns true when value is absent.
+// Play: https://go.dev/play/p/nDqIaiihyCA
+func (o Option[T]) IsPresent() bool {
+ return o.isPresent
+}
+
+// IsAbsent returns true when value is present.
+// Play: https://go.dev/play/p/23e2zqyVOQm
+func (o Option[T]) IsAbsent() bool {
+ return !o.isPresent
+}
+
+// Size returns 1 when value is present or 0 instead.
+// Play: https://go.dev/play/p/7ixCNG1E9l7
+func (o Option[T]) Size() int {
+ if o.isPresent {
+ return 1
+ }
+
+ return 0
+}
+
+// Get returns value and presence.
+// Play: https://go.dev/play/p/0-JBa1usZRT
+func (o Option[T]) Get() (T, bool) {
+ if !o.isPresent {
+ return empty[T](), false
+ }
+
+ return o.value, true
+}
+
+// MustGet returns value if present or panics instead.
+// Play: https://go.dev/play/p/RVBckjdi5WR
+func (o Option[T]) MustGet() T {
+ if !o.isPresent {
+ panic(optionNoSuchElement)
+ }
+
+ return o.value
+}
+
+// OrElse returns value if present or default value.
+// Play: https://go.dev/play/p/TrGByFWCzXS
+func (o Option[T]) OrElse(fallback T) T {
+ if !o.isPresent {
+ return fallback
+ }
+
+ return o.value
+}
+
+// OrEmpty returns value if present or empty value.
+// Play: https://go.dev/play/p/SpSUJcE-tQm
+func (o Option[T]) OrEmpty() T {
+ return o.value
+}
+
+// ForEach executes the given side-effecting function of value is present.
+func (o Option[T]) ForEach(onValue func(value T)) {
+ if o.isPresent {
+ onValue(o.value)
+ }
+}
+
+// Match executes the first function if value is present and second function if absent.
+// It returns a new Option.
+// Play: https://go.dev/play/p/1V6st3LDJsM
+func (o Option[T]) Match(onValue func(value T) (T, bool), onNone func() (T, bool)) Option[T] {
+ if o.isPresent {
+ return TupleToOption(onValue(o.value))
+ }
+ return TupleToOption(onNone())
+}
+
+// Map executes the mapper function if value is present or returns None if absent.
+// Play: https://go.dev/play/p/mvfP3pcP_eJ
+func (o Option[T]) Map(mapper func(value T) (T, bool)) Option[T] {
+ if o.isPresent {
+ return TupleToOption(mapper(o.value))
+ }
+
+ return None[T]()
+}
+
+// MapNone executes the mapper function if value is absent or returns Option.
+// Play: https://go.dev/play/p/_KaHWZ6Q17b
+func (o Option[T]) MapNone(mapper func() (T, bool)) Option[T] {
+ if o.isPresent {
+ return Some(o.value)
+ }
+
+ return TupleToOption(mapper())
+}
+
+// FlatMap executes the mapper function if value is present or returns None if absent.
+// Play: https://go.dev/play/p/OXO-zJx6n5r
+func (o Option[T]) FlatMap(mapper func(value T) Option[T]) Option[T] {
+ if o.isPresent {
+ return mapper(o.value)
+ }
+
+ return None[T]()
+}
+
+// MarshalJSON encodes Option into json.
+func (o Option[T]) MarshalJSON() ([]byte, error) {
+ if o.isPresent {
+ return json.Marshal(o.value)
+ }
+
+ // if anybody find a way to support `omitempty` param, please contribute!
+ return json.Marshal(nil)
+}
+
+// UnmarshalJSON decodes Option from json.
+func (o *Option[T]) UnmarshalJSON(b []byte) error {
+ if bytes.Equal(b, []byte("null")) {
+ o.isPresent = false
+ return nil
+ }
+
+ err := json.Unmarshal(b, &o.value)
+ if err != nil {
+ return err
+ }
+
+ o.isPresent = true
+ return nil
+}
+
+// MarshalText implements the encoding.TextMarshaler interface.
+func (o Option[T]) MarshalText() ([]byte, error) {
+ return json.Marshal(o)
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface.
+func (o *Option[T]) UnmarshalText(data []byte) error {
+ return json.Unmarshal(data, o)
+}
+
+// BinaryMarshaler is the interface implemented by an object that can marshal itself into a binary form.
+func (o Option[T]) MarshalBinary() ([]byte, error) {
+ if !o.isPresent {
+ return []byte{0}, nil
+ }
+
+ var buf bytes.Buffer
+
+ enc := gob.NewEncoder(&buf)
+ if err := enc.Encode(o.value); err != nil {
+ return []byte{}, err
+ }
+
+ return append([]byte{1}, buf.Bytes()...), nil
+}
+
+// BinaryUnmarshaler is the interface implemented by an object that can unmarshal a binary representation of itself.
+func (o *Option[T]) UnmarshalBinary(data []byte) error {
+ if len(data) == 0 {
+ return errors.New("Option[T].UnmarshalBinary: no data")
+ }
+
+ if data[0] == 0 {
+ o.isPresent = false
+ o.value = empty[T]()
+ return nil
+ }
+
+ buf := bytes.NewBuffer(data[1:])
+ dec := gob.NewDecoder(buf)
+ err := dec.Decode(&o.value)
+ if err != nil {
+ return err
+ }
+
+ o.isPresent = true
+ return nil
+}
+
+// GobEncode implements the gob.GobEncoder interface.
+func (o Option[T]) GobEncode() ([]byte, error) {
+ return o.MarshalBinary()
+}
+
+// GobDecode implements the gob.GobDecoder interface.
+func (o *Option[T]) GobDecode(data []byte) error {
+ return o.UnmarshalBinary(data)
+}
+
+// Scan implements the SQL driver.Scanner interface.
+func (o *Option[T]) Scan(src any) error {
+ if src == nil {
+ o.isPresent = false
+ o.value = empty[T]()
+ return nil
+ }
+
+ if av, err := driver.DefaultParameterConverter.ConvertValue(src); err == nil {
+ if v, ok := av.(T); ok {
+ o.isPresent = true
+ o.value = v
+ return nil
+ }
+ }
+
+ return fmt.Errorf("failed to scan Option[T]")
+}
+
+// Value implements the driver Valuer interface.
+func (o Option[T]) Value() (driver.Value, error) {
+ if !o.isPresent {
+ return nil, nil
+ }
+
+ return o.value, nil
+}
diff --git a/vendor/github.com/samber/mo/result.go b/vendor/github.com/samber/mo/result.go
new file mode 100644
index 00000000..fa0a3b3b
--- /dev/null
+++ b/vendor/github.com/samber/mo/result.go
@@ -0,0 +1,155 @@
+package mo
+
+// Ok builds a Result when value is valid.
+// Play: https://go.dev/play/p/PDwADdzNoyZ
+func Ok[T any](value T) Result[T] {
+ return Result[T]{
+ value: value,
+ isErr: false,
+ }
+}
+
+// Err builds a Result when value is invalid.
+// Play: https://go.dev/play/p/PDwADdzNoyZ
+func Err[T any](err error) Result[T] {
+ return Result[T]{
+ err: err,
+ isErr: true,
+ }
+}
+
+// TupleToResult convert a pair of T and error into a Result.
+// Play: https://go.dev/play/p/KWjfqQDHQwa
+func TupleToResult[T any](value T, err error) Result[T] {
+ if err != nil {
+ return Err[T](err)
+ }
+ return Ok(value)
+}
+
+// Try returns either a Ok or Err object.
+// Play: https://go.dev/play/p/ilOlQx-Mx42
+func Try[T any](f func() (T, error)) Result[T] {
+ return TupleToResult(f())
+}
+
+// Result respresent a result of an action having one
+// of the following output: success or failure.
+// An instance of Result is an instance of either Ok or Err.
+// It could be compared to `Either[error, T]`.
+type Result[T any] struct {
+ isErr bool
+ value T
+ err error
+}
+
+// IsOk returns true when value is valid.
+// Play: https://go.dev/play/p/sfNvBQyZfgU
+func (r Result[T]) IsOk() bool {
+ return !r.isErr
+}
+
+// IsError returns true when value is invalid.
+// Play: https://go.dev/play/p/xkV9d464scV
+func (r Result[T]) IsError() bool {
+ return r.isErr
+}
+
+// Error returns error when value is invalid or nil.
+// Play: https://go.dev/play/p/CSkHGTyiXJ5
+func (r Result[T]) Error() error {
+ return r.err
+}
+
+// Get returns value and error.
+// Play: https://go.dev/play/p/8KyX3z6TuNo
+func (r Result[T]) Get() (T, error) {
+ if r.isErr {
+ return empty[T](), r.err
+ }
+
+ return r.value, nil
+}
+
+// MustGet returns value when Result is valid or panics.
+// Play: https://go.dev/play/p/8LSlndHoTAE
+func (r Result[T]) MustGet() T {
+ if r.isErr {
+ panic(r.err)
+ }
+
+ return r.value
+}
+
+// OrElse returns value when Result is valid or default value.
+// Play: https://go.dev/play/p/MN_ULx0soi6
+func (r Result[T]) OrElse(fallback T) T {
+ if r.isErr {
+ return fallback
+ }
+
+ return r.value
+}
+
+// OrEmpty returns value when Result is valid or empty value.
+// Play: https://go.dev/play/p/rdKtBmOcMLh
+func (r Result[T]) OrEmpty() T {
+ return r.value
+}
+
+// ToEither transforms a Result into an Either type.
+// Play: https://go.dev/play/p/Uw1Zz6b952q
+func (r Result[T]) ToEither() Either[error, T] {
+ if r.isErr {
+ return Left[error, T](r.err)
+ }
+
+ return Right[error, T](r.value)
+}
+
+// ForEach executes the given side-effecting function if Result is valid.
+func (r Result[T]) ForEach(mapper func(value T)) {
+ if !r.isErr {
+ mapper(r.value)
+ }
+}
+
+// Match executes the first function if Result is valid and second function if invalid.
+// It returns a new Result.
+// Play: https://go.dev/play/p/-_eFaLJ31co
+func (r Result[T]) Match(onSuccess func(value T) (T, error), onError func(err error) (T, error)) Result[T] {
+ if r.isErr {
+ return TupleToResult(onError(r.err))
+ }
+ return TupleToResult(onSuccess(r.value))
+}
+
+// Map executes the mapper function if Result is valid. It returns a new Result.
+// Play: https://go.dev/play/p/-ndpN_b_OSc
+func (r Result[T]) Map(mapper func(value T) (T, error)) Result[T] {
+ if !r.isErr {
+ return TupleToResult(mapper(r.value))
+ }
+
+ return Err[T](r.err)
+}
+
+// MapErr executes the mapper function if Result is invalid. It returns a new Result.
+// Play: https://go.dev/play/p/WraZixg9GGf
+func (r Result[T]) MapErr(mapper func(error) (T, error)) Result[T] {
+ if r.isErr {
+ return TupleToResult(mapper(r.err))
+ }
+
+ return Ok(r.value)
+}
+
+// FlatMap executes the mapper function if Result is valid. It returns a new Result.
+// Play: https://go.dev/play/p/Ud5QjZOqg-7
+func (r Result[T]) FlatMap(mapper func(value T) Result[T]) Result[T] {
+ if !r.isErr {
+ return mapper(r.value)
+ }
+
+ return Err[T](r.err)
+}
diff --git a/vendor/github.com/samber/mo/state.go b/vendor/github.com/samber/mo/state.go
new file mode 100644
index 00000000..a1bb0a60
--- /dev/null
+++ b/vendor/github.com/samber/mo/state.go
@@ -0,0 +1,52 @@
+package mo
+
+func NewState[S any, A any](f func(state S) (A, S)) State[S, A] {
+ return State[S, A]{
+ run: f,
+ }
+}
+
+func ReturnState[S any, A any](x A) State[S, A] {
+ return State[S, A]{
+ run: func(state S) (A, S) {
+ return x, state
+ },
+ }
+}
+
+// State represents a function `(S) -> (A, S)`, where `S` is state, `A` is result.
+type State[S any, A any] struct {
+ run func(state S) (A, S)
+}
+
+// Run executes a computation in the State monad.
+func (s State[S, A]) Run(state S) (A, S) {
+ return s.run(state)
+}
+
+// Get returns the current state.
+func (s State[S, A]) Get() State[S, S] {
+ return State[S, S]{
+ run: func(state S) (S, S) {
+ return state, state
+ },
+ }
+}
+
+// Modify the state by applying a function to the current state.
+func (s State[S, A]) Modify(f func(state S) S) State[S, A] {
+ return State[S, A]{
+ run: func(state S) (A, S) {
+ return empty[A](), f(state)
+ },
+ }
+}
+
+// Put set the state.
+func (s State[S, A]) Put(state S) State[S, A] {
+ return State[S, A]{
+ run: func(state S) (A, S) {
+ return empty[A](), state
+ },
+ }
+}
diff --git a/vendor/github.com/samber/mo/task.go b/vendor/github.com/samber/mo/task.go
new file mode 100644
index 00000000..39a28ef3
--- /dev/null
+++ b/vendor/github.com/samber/mo/task.go
@@ -0,0 +1,175 @@
+package mo
+
+// NewTask instanciates a new Task.
+func NewTask[R any](f ff0[R]) Task[R] {
+ return Task[R]{
+ unsafePerform: f,
+ }
+}
+
+// NewTaskFromIO instanciates a new Task from an existing IO.
+func NewTaskFromIO[R any](io IO[R]) Task[R] {
+ return Task[R]{
+ unsafePerform: func() *Future[R] {
+ return NewFuture[R](func(resolve func(R), reject func(error)) {
+ resolve(io.unsafePerform())
+ })
+ },
+ }
+}
+
+// Task represents a non-deterministic asynchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type Task[R any] struct {
+ unsafePerform ff0[R]
+}
+
+// Run execute the non-deterministic asynchronous computation, with side effect.
+func (t Task[R]) Run() *Future[R] {
+ return t.unsafePerform()
+}
+
+// NewTask1 instanciates a new Task1.
+func NewTask1[R any, A any](f ff1[R, A]) Task1[R, A] {
+ return Task1[R, A]{
+ unsafePerform: f,
+ }
+}
+
+// NewTaskFromIO1 instanciates a new Task1 from an existing IO1.
+func NewTaskFromIO1[R any, A any](io IO1[R, A]) Task1[R, A] {
+ return Task1[R, A]{
+ unsafePerform: func(a A) *Future[R] {
+ return NewFuture[R](func(resolve func(R), reject func(error)) {
+ resolve(io.unsafePerform(a))
+ })
+ },
+ }
+}
+
+// Task1 represents a non-deterministic asynchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type Task1[R any, A any] struct {
+ unsafePerform ff1[R, A]
+}
+
+// Run execute the non-deterministic asynchronous computation, with side effect.
+func (t Task1[R, A]) Run(a A) *Future[R] {
+ return t.unsafePerform(a)
+}
+
+// NewTask2 instanciates a new Task2.
+func NewTask2[R any, A any, B any](f ff2[R, A, B]) Task2[R, A, B] {
+ return Task2[R, A, B]{
+ unsafePerform: f,
+ }
+}
+
+// NewTaskFromIO2 instanciates a new Task2 from an existing IO2.
+func NewTaskFromIO2[R any, A any, B any](io IO2[R, A, B]) Task2[R, A, B] {
+ return Task2[R, A, B]{
+ unsafePerform: func(a A, b B) *Future[R] {
+ return NewFuture[R](func(resolve func(R), reject func(error)) {
+ resolve(io.unsafePerform(a, b))
+ })
+ },
+ }
+}
+
+// Task2 represents a non-deterministic asynchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type Task2[R any, A any, B any] struct {
+ unsafePerform ff2[R, A, B]
+}
+
+// Run execute the non-deterministic asynchronous computation, with side effect.
+func (t Task2[R, A, B]) Run(a A, b B) *Future[R] {
+ return t.unsafePerform(a, b)
+}
+
+// NewTask3 instanciates a new Task3.
+func NewTask3[R any, A any, B any, C any](f ff3[R, A, B, C]) Task3[R, A, B, C] {
+ return Task3[R, A, B, C]{
+ unsafePerform: f,
+ }
+}
+
+// NewTaskFromIO3 instanciates a new Task3 from an existing IO3.
+func NewTaskFromIO3[R any, A any, B any, C any](io IO3[R, A, B, C]) Task3[R, A, B, C] {
+ return Task3[R, A, B, C]{
+ unsafePerform: func(a A, b B, c C) *Future[R] {
+ return NewFuture[R](func(resolve func(R), reject func(error)) {
+ resolve(io.unsafePerform(a, b, c))
+ })
+ },
+ }
+}
+
+// Task3 represents a non-deterministic asynchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type Task3[R any, A any, B any, C any] struct {
+ unsafePerform ff3[R, A, B, C]
+}
+
+// Run execute the non-deterministic asynchronous computation, with side effect.
+func (t Task3[R, A, B, C]) Run(a A, b B, c C) *Future[R] {
+ return t.unsafePerform(a, b, c)
+}
+
+// NewTask4 instanciates a new Task4.
+func NewTask4[R any, A any, B any, C any, D any](f ff4[R, A, B, C, D]) Task4[R, A, B, C, D] {
+ return Task4[R, A, B, C, D]{
+ unsafePerform: f,
+ }
+}
+
+// NewTaskFromIO4 instanciates a new Task4 from an existing IO4.
+func NewTaskFromIO4[R any, A any, B any, C any, D any](io IO4[R, A, B, C, D]) Task4[R, A, B, C, D] {
+ return Task4[R, A, B, C, D]{
+ unsafePerform: func(a A, b B, c C, d D) *Future[R] {
+ return NewFuture[R](func(resolve func(R), reject func(error)) {
+ resolve(io.unsafePerform(a, b, c, d))
+ })
+ },
+ }
+}
+
+// Task4 represents a non-deterministic asynchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type Task4[R any, A any, B any, C any, D any] struct {
+ unsafePerform ff4[R, A, B, C, D]
+}
+
+// Run execute the non-deterministic asynchronous computation, with side effect.
+func (t Task4[R, A, B, C, D]) Run(a A, b B, c C, d D) *Future[R] {
+ return t.unsafePerform(a, b, c, d)
+}
+
+// NewTask5 instanciates a new Task5.
+func NewTask5[R any, A any, B any, C any, D any, E any](f ff5[R, A, B, C, D, E]) Task5[R, A, B, C, D, E] {
+ return Task5[R, A, B, C, D, E]{
+ unsafePerform: f,
+ }
+}
+
+// NewTaskFromIO5 instanciates a new Task5 from an existing IO5.
+func NewTaskFromIO5[R any, A any, B any, C any, D any, E any](io IO5[R, A, B, C, D, E]) Task5[R, A, B, C, D, E] {
+ return Task5[R, A, B, C, D, E]{
+ unsafePerform: func(a A, b B, c C, d D, e E) *Future[R] {
+ return NewFuture[R](func(resolve func(R), reject func(error)) {
+ resolve(io.unsafePerform(a, b, c, d, e))
+ })
+ },
+ }
+}
+
+// Task5 represents a non-deterministic asynchronous computation that
+// can cause side effects, yields a value of type `R` and never fails.
+type Task5[R any, A any, B any, C any, D any, E any] struct {
+ unsafePerform ff5[R, A, B, C, D, E]
+}
+
+// Run execute the non-deterministic asynchronous computation, with side effect.
+func (t Task5[R, A, B, C, D, E]) Run(a A, b B, c C, d D, e E) *Future[R] {
+ return t.unsafePerform(a, b, c, d, e)
+}
diff --git a/vendor/github.com/samber/mo/task_either.go b/vendor/github.com/samber/mo/task_either.go
new file mode 100644
index 00000000..08e23a96
--- /dev/null
+++ b/vendor/github.com/samber/mo/task_either.go
@@ -0,0 +1,56 @@
+package mo
+
+// NewTaskEither instanciates a new TaskEither.
+func NewTaskEither[R any](f ff0[R]) TaskEither[R] {
+ return TaskEither[R]{NewTask[R](f)}
+}
+
+// NewTaskEitherFromIO instanciates a new TaskEither from an existing IO.
+func NewTaskEitherFromIO[R any](io IO[R]) TaskEither[R] {
+ return TaskEither[R]{NewTaskFromIO[R](io)}
+}
+
+// TaskEither represents a non-deterministic asynchronous computation that
+// can cause side effects, yields a value of type `R` and can fail.
+type TaskEither[R any] struct {
+ Task[R]
+}
+
+// OrElse returns value if task succeeded or default value.
+func (t TaskEither[R]) OrElse(fallback R) R {
+ either := t.Run().Either()
+
+ right, isRight := either.Right()
+ if !isRight {
+ return fallback
+ }
+
+ return right
+}
+
+// Match executes the first function if task succeeded and second function if task failed.
+// It returns a new Option.
+func (t TaskEither[R]) Match(onLeft func(error) Either[error, R], onRight func(R) Either[error, R]) Either[error, R] {
+ either := t.Run().Either()
+ return either.Match(onLeft, onRight)
+}
+
+// TryCatch is an alias to Match
+func (t TaskEither[R]) TryCatch(onLeft func(error) Either[error, R], onRight func(R) Either[error, R]) Either[error, R] {
+ return t.Match(onLeft, onRight)
+}
+
+// ToTask converts TaskEither to Task
+func (t TaskEither[R]) ToTask(fallback R) Task[R] {
+ return NewTask(func() *Future[R] {
+ return t.Run().
+ Catch(func(err error) (R, error) {
+ return fallback, nil
+ })
+ })
+}
+
+// ToTask converts TaskEither to Task
+func (t TaskEither[R]) ToEither() Either[error, R] {
+ return t.Run().Either()
+}
diff --git a/vendor/github.com/samber/mo/types.go b/vendor/github.com/samber/mo/types.go
new file mode 100644
index 00000000..c85d4039
--- /dev/null
+++ b/vendor/github.com/samber/mo/types.go
@@ -0,0 +1,22 @@
+package mo
+
+type f0[R any] func() R
+type f1[R any, A any] func(A) R
+type f2[R any, A any, B any] func(A, B) R
+type f3[R any, A any, B any, C any] func(A, B, C) R
+type f4[R any, A any, B any, C any, D any] func(A, B, C, D) R
+type f5[R any, A any, B any, C any, D any, E any] func(A, B, C, D, E) R
+
+type ff0[R any] func() *Future[R]
+type ff1[R any, A any] func(A) *Future[R]
+type ff2[R any, A any, B any] func(A, B) *Future[R]
+type ff3[R any, A any, B any, C any] func(A, B, C) *Future[R]
+type ff4[R any, A any, B any, C any, D any] func(A, B, C, D) *Future[R]
+type ff5[R any, A any, B any, C any, D any, E any] func(A, B, C, D, E) *Future[R]
+
+type fe0[R any] func() (R, error)
+type fe1[R any, A any] func(A) (R, error)
+type fe2[R any, A any, B any] func(A, B) (R, error)
+type fe3[R any, A any, B any, C any] func(A, B, C) (R, error)
+type fe4[R any, A any, B any, C any, D any] func(A, B, C, D) (R, error)
+type fe5[R any, A any, B any, C any, D any, E any] func(A, B, C, D, E) (R, error)
diff --git a/vendor/github.com/samber/mo/utils.go b/vendor/github.com/samber/mo/utils.go
new file mode 100644
index 00000000..5233f671
--- /dev/null
+++ b/vendor/github.com/samber/mo/utils.go
@@ -0,0 +1,5 @@
+package mo
+
+func empty[T any]() (t T) {
+ return
+}
diff --git a/vendor/github.com/spf13/cobra/command.go b/vendor/github.com/spf13/cobra/command.go
index 9d5e9cf5..6ff47dd5 100644
--- a/vendor/github.com/spf13/cobra/command.go
+++ b/vendor/github.com/spf13/cobra/command.go
@@ -998,6 +998,10 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
// initialize completion at the last point to allow for user overriding
c.InitDefaultCompletionCmd()
+ // Now that all commands have been created, let's make sure all groups
+ // are properly created also
+ c.checkCommandGroups()
+
args := c.args
// Workaround FAIL with "go test -v" or "cobra.test -test.v", see #155
@@ -1092,6 +1096,19 @@ func (c *Command) ValidateRequiredFlags() error {
return nil
}
+// checkCommandGroups checks if a command has been added to a group that does not exists.
+// If so, we panic because it indicates a coding error that should be corrected.
+func (c *Command) checkCommandGroups() {
+ for _, sub := range c.commands {
+ // if Group is not defined let the developer know right away
+ if sub.GroupID != "" && !c.ContainsGroup(sub.GroupID) {
+ panic(fmt.Sprintf("group id '%s' is not defined for subcommand '%s'", sub.GroupID, sub.CommandPath()))
+ }
+
+ sub.checkCommandGroups()
+ }
+}
+
// InitDefaultHelpFlag adds default help flag to c.
// It is called automatically by executing the c or by calling help and usage.
// If c already has help flag, it will do nothing.
@@ -1218,10 +1235,6 @@ func (c *Command) AddCommand(cmds ...*Command) {
panic("Command can't be a child of itself")
}
cmds[i].parent = c
- // if Group is not defined let the developer know right away
- if x.GroupID != "" && !c.ContainsGroup(x.GroupID) {
- panic(fmt.Sprintf("Group id '%s' is not defined for subcommand '%s'", x.GroupID, cmds[i].CommandPath()))
- }
// update max lengths
usageLen := len(x.Use)
if usageLen > c.commandsMaxUseLen {
diff --git a/vendor/github.com/spf13/cobra/user_guide.md b/vendor/github.com/spf13/cobra/user_guide.md
index 977306aa..e55367e8 100644
--- a/vendor/github.com/spf13/cobra/user_guide.md
+++ b/vendor/github.com/spf13/cobra/user_guide.md
@@ -492,10 +492,11 @@ around it. In fact, you can provide your own if you want.
### Grouping commands in help
-Cobra supports grouping of available commands. Groups must be explicitly defined by `AddGroup` and set by
-the `GroupId` element of a subcommand. The groups will appear in the same order as they are defined.
-If you use the generated `help` or `completion` commands, you can set the group ids by `SetHelpCommandGroupId`
-and `SetCompletionCommandGroupId`, respectively.
+Cobra supports grouping of available commands in the help output. To group commands, each group must be explicitly
+defined using `AddGroup()` on the parent command. Then a subcommand can be added to a group using the `GroupID` element
+of that subcommand. The groups will appear in the help output in the same order as they are defined using different
+calls to `AddGroup()`. If you use the generated `help` or `completion` commands, you can set their group ids using
+`SetHelpCommandGroupId()` and `SetCompletionCommandGroupId()` on the root command, respectively.
### Defining your own help
diff --git a/vendor/golang.org/x/exp/slices/slices.go b/vendor/golang.org/x/exp/slices/slices.go
index a9fe63f5..0c756c46 100644
--- a/vendor/golang.org/x/exp/slices/slices.go
+++ b/vendor/golang.org/x/exp/slices/slices.go
@@ -162,6 +162,23 @@ func Delete[S ~[]E, E any](s S, i, j int) S {
return append(s[:i], s[j:]...)
}
+// Replace replaces the elements s[i:j] by the given v, and returns the
+// modified slice. Replace panics if s[i:j] is not a valid slice of s.
+func Replace[S ~[]E, E any](s S, i, j int, v ...E) S {
+ tot := len(s[:i]) + len(v) + len(s[j:])
+ if tot <= cap(s) {
+ s2 := s[:tot]
+ copy(s2[i+len(v):], s[j:])
+ copy(s2[i:], v)
+ return s2
+ }
+ s2 := make(S, tot)
+ copy(s2, s[:i])
+ copy(s2[i:], v)
+ copy(s2[i+len(v):], s[j:])
+ return s2
+}
+
// Clone returns a copy of the slice.
// The elements are copied using assignment, so this is a shallow clone.
func Clone[S ~[]E, E any](s S) S {
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 09cefb60..9a97a360 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -118,9 +118,15 @@ github.com/hhrutter/lzw
# github.com/hhrutter/tiff v0.0.0-20190829141212-736cae8d0bc7
## explicit; go 1.12
github.com/hhrutter/tiff
+# github.com/iancoleman/orderedmap v0.2.0
+## explicit
+github.com/iancoleman/orderedmap
# github.com/inconshreveable/mousetrap v1.0.1
## explicit; go 1.18
github.com/inconshreveable/mousetrap
+# github.com/invopop/jsonschema v0.6.0
+## explicit; go 1.16
+github.com/invopop/jsonschema
# github.com/ivanpirog/coloredcobra v1.0.1
## explicit; go 1.15
github.com/ivanpirog/coloredcobra
@@ -136,6 +142,9 @@ github.com/kballard/go-shellquote
# github.com/kennygrant/sanitize v1.2.4
## explicit
github.com/kennygrant/sanitize
+# github.com/lithammer/fuzzysearch v1.1.5
+## explicit; go 1.15
+github.com/lithammer/fuzzysearch/fuzzy
# github.com/lucasb-eyer/go-colorful v1.2.0
## explicit; go 1.12
github.com/lucasb-eyer/go-colorful
@@ -154,7 +163,7 @@ github.com/mattn/go-localereader
# github.com/mattn/go-runewidth v0.0.14
## explicit; go 1.9
github.com/mattn/go-runewidth
-# github.com/metafates/mangal-lua-libs v0.4.1
+# github.com/metafates/mangal-lua-libs v0.4.2
## explicit; go 1.18
github.com/metafates/mangal-lua-libs
github.com/metafates/mangal-lua-libs/base64
@@ -246,6 +255,9 @@ github.com/saintfish/chardet
# github.com/samber/lo v1.33.0
## explicit; go 1.18
github.com/samber/lo
+# github.com/samber/mo v1.5.1
+## explicit; go 1.18
+github.com/samber/mo
# github.com/sirupsen/logrus v1.9.0
## explicit; go 1.13
github.com/sirupsen/logrus
@@ -268,7 +280,7 @@ github.com/spf13/afero/mem
# github.com/spf13/cast v1.5.0
## explicit; go 1.18
github.com/spf13/cast
-# github.com/spf13/cobra v1.6.0
+# github.com/spf13/cobra v1.6.1
## explicit; go 1.15
github.com/spf13/cobra
# github.com/spf13/jwalterweatherman v1.1.0
@@ -314,7 +326,7 @@ github.com/yuin/gopher-lua
github.com/yuin/gopher-lua/ast
github.com/yuin/gopher-lua/parse
github.com/yuin/gopher-lua/pm
-# golang.org/x/exp v0.0.0-20221019170559-20944726eadf
+# golang.org/x/exp v0.0.0-20221028150844-83b7d23a625f
## explicit; go 1.18
golang.org/x/exp/constraints
golang.org/x/exp/slices
diff --git a/version/compare.go b/version/compare.go
new file mode 100644
index 00000000..11f31162
--- /dev/null
+++ b/version/compare.go
@@ -0,0 +1,45 @@
+package version
+
+import (
+ "fmt"
+ "github.com/samber/lo"
+ "strings"
+)
+
+func Compare(a, b string) (int, error) {
+ type version struct {
+ major, minor, patch int
+ }
+
+ parse := func(s string) (version, error) {
+ var v version
+ _, err := fmt.Sscanf(strings.TrimPrefix(s, "v"), "%d.%d.%d", &v.major, &v.minor, &v.patch)
+ return v, err
+ }
+
+ av, err := parse(a)
+ if err != nil {
+ return 0, err
+ }
+
+ bv, err := parse(b)
+ if err != nil {
+ return 0, err
+ }
+
+ for _, pair := range []lo.Tuple2[int, int]{
+ {av.major, bv.major},
+ {av.minor, bv.minor},
+ {av.patch, bv.patch},
+ } {
+ if pair.A > pair.B {
+ return 1, nil
+ }
+
+ if pair.A < pair.B {
+ return -1, nil
+ }
+ }
+
+ return 0, nil
+}
diff --git a/version/compare_test.go b/version/compare_test.go
new file mode 100644
index 00000000..a6468741
--- /dev/null
+++ b/version/compare_test.go
@@ -0,0 +1,90 @@
+package version
+
+import (
+ . "github.com/smartystreets/goconvey/convey"
+ "testing"
+)
+
+func TestCompareVersions(t *testing.T) {
+ Convey("Given two versions with different patches", t, func() {
+ v1, v2 := "1.0.0", "1.0.1"
+ Convey("When comparing "+v1+" to "+v2, func() {
+ result, err := Compare(v1, v2)
+ Convey("Error should be nil", func() {
+ So(err, ShouldBeNil)
+ Convey("Then the result should be -1", func() {
+ So(result, ShouldEqual, -1)
+ })
+ })
+ })
+
+ Convey("When comparing "+v2+" to "+v1, func() {
+ result, err := Compare(v2, v1)
+ Convey("Error should be nil", func() {
+ So(err, ShouldBeNil)
+ Convey("Then the result should be 1", func() {
+ So(result, ShouldEqual, 1)
+ })
+ })
+ })
+ })
+
+ Convey("Given two versions with different minor versions", t, func() {
+ v1, v2 := "1.0.0", "1.1.0"
+ Convey("When comparing "+v1+" to "+v2, func() {
+ result, err := Compare(v1, v2)
+ Convey("Error should be nil", func() {
+ So(err, ShouldBeNil)
+ Convey("Then the result should be -1", func() {
+ So(result, ShouldEqual, -1)
+ })
+ })
+ })
+
+ Convey("When comparing "+v2+" to "+v1, func() {
+ result, err := Compare(v2, v1)
+ Convey("Error should be nil", func() {
+ So(err, ShouldBeNil)
+ Convey("Then the result should be 1", func() {
+ So(result, ShouldEqual, 1)
+ })
+ })
+ })
+ })
+
+ Convey("Given two versions with different major versions", t, func() {
+ v1, v2 := "1.0.0", "2.0.0"
+ Convey("When comparing "+v1+" to "+v2, func() {
+ result, err := Compare(v1, v2)
+ Convey("Error should be nil", func() {
+ So(err, ShouldBeNil)
+ Convey("Then the result should be -1", func() {
+ So(result, ShouldEqual, -1)
+ })
+ })
+ })
+
+ Convey("When comparing "+v2+" to "+v1, func() {
+ result, err := Compare(v2, v1)
+ Convey("Error should be nil", func() {
+ So(err, ShouldBeNil)
+ Convey("Then the result should be 1", func() {
+ So(result, ShouldEqual, 1)
+ })
+ })
+ })
+ })
+
+ Convey("Given two same versions", t, func() {
+ v1, v2 := "1.0.0", "1.0.0"
+ Convey("When comparing "+v1+" to "+v2, func() {
+ result, err := Compare(v1, v2)
+ Convey("Error should be nil", func() {
+ So(err, ShouldBeNil)
+ Convey("Then the result should be 0", func() {
+ So(result, ShouldEqual, 0)
+ })
+ })
+ })
+ })
+}
diff --git a/version/notify.go b/version/notify.go
new file mode 100644
index 00000000..f6023d47
--- /dev/null
+++ b/version/notify.go
@@ -0,0 +1,34 @@
+package version
+
+import (
+ "fmt"
+ "github.com/metafates/mangal/color"
+ "github.com/metafates/mangal/constant"
+ "github.com/metafates/mangal/icon"
+ "github.com/metafates/mangal/style"
+ "github.com/metafates/mangal/util"
+)
+
+func Notify() {
+ erase := util.PrintErasable(fmt.Sprintf("%s Checking if new version is available...", icon.Get(icon.Progress)))
+ version, err := Latest()
+ erase()
+ if err == nil {
+ comp, err := Compare(version, constant.Version)
+ if err == nil && comp == -1 {
+ return
+ }
+ }
+
+ fmt.Printf(`
+%s New version is available %s %s
+%s
+
+`,
+ style.Fg(color.Green)("▇▇▇"),
+ style.Bold(version),
+ style.Faint(fmt.Sprintf("(You're on %s)", constant.Version)),
+ style.Faint("https://github.com/metafates/mangal/releases/tag/v"+version),
+ )
+
+}
diff --git a/version/version.go b/version/version.go
new file mode 100644
index 00000000..8838f59b
--- /dev/null
+++ b/version/version.go
@@ -0,0 +1,51 @@
+package version
+
+import (
+ "encoding/json"
+ "errors"
+ "github.com/metafates/mangal/cache"
+ "github.com/metafates/mangal/util"
+ "github.com/metafates/mangal/where"
+ "github.com/samber/mo"
+ "net/http"
+ "path/filepath"
+ "time"
+)
+
+var versionCacher = cache.New[string](filepath.Join(where.Cache(), "version.json"), &cache.Options{
+ ExpireEvery: mo.Some(time.Hour * 24),
+})
+
+// Latest returns the latest version of mangal.
+// It will fetch the latest version from the GitHub API.
+func Latest() (version string, err error) {
+ if ver := versionCacher.Get(); ver.IsPresent() {
+ return ver.MustGet(), nil
+ }
+
+ resp, err := http.Get("https://api.github.com/repos/metafates/mangal/releases/latest")
+ if err != nil {
+ return
+ }
+
+ defer util.Ignore(resp.Body.Close)
+
+ var release struct {
+ TagName string `json:"tag_name"`
+ }
+
+ err = json.NewDecoder(resp.Body).Decode(&release)
+ if err != nil {
+ return
+ }
+
+ // remove the v from the tag name
+ if release.TagName == "" {
+ err = errors.New("empty tag name")
+ return
+ }
+
+ version = release.TagName[1:]
+ _ = versionCacher.Set(version)
+ return
+}
diff --git a/updater/version_test.go b/version/version_test.go
similarity index 93%
rename from updater/version_test.go
rename to version/version_test.go
index 817870b4..af4ef1e7 100644
--- a/updater/version_test.go
+++ b/version/version_test.go
@@ -1,4 +1,4 @@
-package updater
+package version
import (
"github.com/metafates/mangal/constant"
@@ -15,7 +15,7 @@ func TestLatestVersion(t *testing.T) {
}
Convey("When getting the latest version", t, func() {
- version, err := LatestVersion()
+ version, err := Latest()
Convey("It should not return an error", func() {
So(err, ShouldBeNil)
diff --git a/where/where.go b/where/where.go
index af571fed..75da21fc 100644
--- a/where/where.go
+++ b/where/where.go
@@ -38,28 +38,26 @@ func Sources() string {
return mkdir(filepath.Join(Config(), "sources"))
}
+func AnilistBinds() string {
+ return filepath.Join(Config(), "anilist.json")
+}
+
// Logs path
// Will create the directory if it doesn't exist
func Logs() string {
return mkdir(filepath.Join(Config(), "logs"))
}
+// Queries path
+// Will create the directory if it doesn't exist
+func Queries() string {
+ return filepath.Join(Cache(), "queries.json")
+}
+
// History path to the file
// Will create the directory if it doesn't exist
func History() string {
- genericCacheDir, err := os.UserCacheDir()
- if err != nil {
- genericCacheDir = "."
- }
-
- path := filepath.Join(genericCacheDir, constant.CachePrefix+"history.json")
-
- exists := lo.Must(filesystem.Api().Exists(path))
- if !exists {
- lo.Must0(filesystem.Api().WriteFile(path, []byte("{}"), os.ModePerm))
- }
-
- return path
+ return filepath.Join(Config(), "history.json")
}
// Downloads path
@@ -80,18 +78,18 @@ func Downloads() string {
// Cache path
// Will create the directory if it doesn't exist
func Cache() string {
- genericCacheDir, err := os.UserCacheDir()
+ cacheDir, err := os.UserCacheDir()
if err != nil {
- genericCacheDir = "."
+ cacheDir = filepath.Join(".", "cache")
}
- cacheDir := filepath.Join(genericCacheDir, constant.CachePrefix)
+ cacheDir = filepath.Join(cacheDir, constant.Mangal)
return mkdir(cacheDir)
}
// Temp path
// Will create the directory if it doesn't exist
func Temp() string {
- tempDir := filepath.Join(os.TempDir(), constant.TempPrefix)
+ tempDir := filepath.Join(os.TempDir(), constant.Mangal)
return mkdir(tempDir)
}