From 564c5475c2f4d17e33948b30a849a267fda49ff9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Chastanet?= Date: Sat, 28 Dec 2024 07:52:48 +0100 Subject: [PATCH] Maintenance bash-tools-framework 6.1.1 - Update github worflows cron scheduling - Update github worflows versions - bash-compiler uses DEFAULT_TEMPLATE_FOLDER - recompiled all binaries to use bash-tools-framework 6.1.1 - updated pre-commit versions - fixed issues found by megalinter --- .bash-compiler | 2 +- .cspell/bash.txt | 4 --- .cspell/config.txt | 9 ------ .cspell/dirColors.txt | 3 -- .cspell/loremIpsum.txt | 4 --- .cspell/plantUml.txt | 1 - .cspell/postman.txt | 1 - .cspell/readme.txt | 3 -- .cspell/softwares.txt | 31 ++----------------- .github/dependabot.yml | 4 ++- .github/workflows/docsify-gh-pages.yml | 6 ++-- .github/workflows/lint-test.yml | 20 ++++++------ .github/workflows/precommit-autoupdate.yml | 6 ++-- .../set-github-status-on-pr-approved.yml | 1 + .grype.yaml | 2 +- .lycheeignore | 1 + .mega-linter-githubAction.yml | 2 +- .mega-linter.yml | 10 ++++++ .pre-commit-config-github.yaml | 16 ++++++++-- .pre-commit-config.yaml | 16 ++++++++-- .proselintrc | 4 ++- .proselintrc.json | 5 +++ Commands.tmpl.md | 2 +- LICENSE | 2 +- bin/cli | 15 +++++++-- bin/dbImport | 13 +++++++- bin/dbImportProfile | 13 +++++++- bin/dbImportStream | 13 +++++++- bin/dbQueryAllDatabases | 13 +++++++- bin/dbScriptAllDatabases | 13 +++++++- bin/doc | 13 +++++++- bin/gitIsAncestorOf | 13 +++++++- bin/gitIsBranch | 13 +++++++- bin/gitRenameBranch | 13 +++++++- bin/installRequirements | 13 +++++++- bin/mysql2puml | 13 +++++++- bin/postmanCli | 13 +++++++- bin/upgradeGithubRelease | 13 +++++++- bin/waitForIt | 13 +++++++- bin/waitForMysql | 13 +++++++- conf/dbScripts/extractData | 13 +++++++- .../MongoDBDataAPI.postman_collection.json | 2 +- install | 13 +++++++- src/_binaries/Docker/cli/cli-options.sh | 2 +- .../Docker/cli/testsData/cli.help.txt | 2 +- 45 files changed, 295 insertions(+), 102 deletions(-) create mode 100644 .proselintrc.json diff --git a/.bash-compiler b/.bash-compiler index 825ed727..8e514680 100644 --- a/.bash-compiler +++ b/.bash-compiler @@ -1,3 +1,3 @@ FRAMEWORK_ROOT_DIR=${ROOT_DIR}/vendor/bash-tools-framework BASH_TOOLS_ROOT_DIR=${ROOT_DIR} -TEMPLATES_ROOT_DIR=${HOME}/fchastanet/bash-compiler/examples/templates +TEMPLATES_ROOT_DIR=${DEFAULT_TEMPLATE_FOLDER} diff --git a/.cspell/bash.txt b/.cspell/bash.txt index c1540018..20990329 100644 --- a/.cspell/bash.txt +++ b/.cspell/bash.txt @@ -1,5 +1,4 @@ ABRT -ABRT addgroup asort AUTOCOMMIT @@ -23,7 +22,6 @@ Datash datetime DELIMS deps -Deps dhclient DISTRO distutils @@ -60,7 +58,6 @@ keychain KHTML killall lastpipe -lastpipe libkrb libloadandcheckconfigsh loadandcheckconfig @@ -76,7 +73,6 @@ newuser noargs nohup noninteractive -NONINTERACTIVE nullglob onbuild PATHCONV diff --git a/.cspell/config.txt b/.cspell/config.txt index 99df9c4d..bf0e702c 100644 --- a/.cspell/config.txt +++ b/.cspell/config.txt @@ -59,7 +59,6 @@ gtid heightwhenhidden histappend HISTCONTROL -HISTCONTROL HISTFILESIZE HISTIGNORE HISTSIZE @@ -91,8 +90,6 @@ maximised mcrypt mdformat megalinter -megalinter -meslo Meslo Msys mthis @@ -117,7 +114,6 @@ oldmode oneline ovpn oxsecurity -oxsecurity pagedown pageup Parens @@ -127,15 +123,12 @@ pcre pgdn pgup Phar -phar phing phpcollection phpcolors -PHPCS phpcs phpcsfixer PHPDOC -PHPDOC phpinfo phpmailer phpoption @@ -189,7 +182,6 @@ tigrc tintcolor Titlebar twosuperior -UNINDEXED unindexed unstaged upgrader @@ -201,7 +193,6 @@ wddx wekyb widthtype WORKON -WSLCONFIG wslconfig xapi XDMCP diff --git a/.cspell/dirColors.txt b/.cspell/dirColors.txt index 14ee2122..3716ca41 100644 --- a/.cspell/dirColors.txt +++ b/.cspell/dirColors.txt @@ -5,7 +5,6 @@ divx dtterm dvtm eterm -Eterm fbterm flac jfbterm @@ -17,9 +16,7 @@ Rakefile rmvb rxvt setgid -SETGID setuid -SETUID svgz webm webp diff --git a/.cspell/loremIpsum.txt b/.cspell/loremIpsum.txt index c461d347..cf7c757e 100644 --- a/.cspell/loremIpsum.txt +++ b/.cspell/loremIpsum.txt @@ -2,7 +2,6 @@ abitur adipiscing aliquet amet -amet commodo condimentum consectetur @@ -16,10 +15,8 @@ et fini finibus id -id in ipsum -ipsum libero lobortis Lorem @@ -35,6 +32,5 @@ quis sapien semper sit -sit turpis vel diff --git a/.cspell/plantUml.txt b/.cspell/plantUml.txt index ab6ee05e..f4719b85 100644 --- a/.cspell/plantUml.txt +++ b/.cspell/plantUml.txt @@ -1,5 +1,4 @@ endfunction -endfunction enduml nullable skinparam diff --git a/.cspell/postman.txt b/.cspell/postman.txt index 4dcf9bb9..25ee28de 100644 --- a/.cspell/postman.txt +++ b/.cspell/postman.txt @@ -1,5 +1,4 @@ clairefro -EJSON ejson octocat prerequest diff --git a/.cspell/readme.txt b/.cspell/readme.txt index 13ba2d06..9f23ffa9 100644 --- a/.cspell/readme.txt +++ b/.cspell/readme.txt @@ -17,14 +17,12 @@ endfunction enduml fchastanet functionsretryparameterized -functionsretryparameterized installationconfiguration installdir installfile Jetbrains libassertsh libloadandcheckconfigsh -libloadandcheckconfigsh libutilssh loadandcheckconfig loadprofile @@ -33,7 +31,6 @@ markdownlint openapi PATHCONV Powerlevel -powerlevel proot refactorings Scrasnups diff --git a/.cspell/softwares.txt b/.cspell/softwares.txt index 68d3bb93..d117e0e8 100644 --- a/.cspell/softwares.txt +++ b/.cspell/softwares.txt @@ -3,12 +3,8 @@ actionlint adduser alacritty anacron -Anacron awscli -Awscli awsume -Awsume -AWSUME baincd Bashtools buildkit @@ -34,17 +30,13 @@ dotnet dpkg ecma EJSON -ejson enduml ETAG fasd -Fasd fchastanet gdebi getent github -GITHUB -Github gitleaks gofmt golxdebindings @@ -62,26 +54,18 @@ iconify Inno jekyll jetbrains -Jetbrains -JETBRAINS keygen keyscan -KICS kics konsole kterm kube -Kube kubectl kubectx -Kubectx kubens kubeps -Kubeps kubernetes -Kubernetes kubie -Kubie lastname launchbar lesspipe @@ -99,8 +83,6 @@ ltrim lubuntu lxappearance lxde -Lxde -LXDE lxpanel lxsession lxterminal @@ -110,30 +92,23 @@ mattrose mbstring megalinter minikube -Minikube mlocate -Mlocate MSYS MULTIPARTS mypy mysql -MYSQL -mysqldump mysqldump mysqlshow nojekyll obconf octocat -octocat openbox pcmanfm phpmd phpstorm -Phpstorm +pipx pixbuf plantuml -Plantuml -plantuml podman powerlevel Powerline @@ -161,14 +136,12 @@ tzdata updatedb usermod vcxsrv -Vcxsrv venv virtualenv wholename wincmd winpty wslg -Wslg wslpath wslvar xcalc @@ -180,4 +153,4 @@ xorg xsession Xsrv xvfb -Xvfb +yapf diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 9ef12bc8..9e670394 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -6,6 +6,8 @@ updates: directory: "/" schedule: # Check for updates to GitHub Actions every week - interval: "weekly" + interval: "monthly" day: "friday" + time: "22:00" + timezone: "Europe/Paris" open-pull-requests-limit: 1 diff --git a/.github/workflows/docsify-gh-pages.yml b/.github/workflows/docsify-gh-pages.yml index e82f4a50..749be702 100644 --- a/.github/workflows/docsify-gh-pages.yml +++ b/.github/workflows/docsify-gh-pages.yml @@ -1,5 +1,5 @@ --- -# kics-scan disable=555ab8f9-2001-455e-a077-f2d0f41e2fb9 +# kics-scan disable=555ab8f9-2001-455e-a077-f2d0f41e2fb9,555ab8f9-2001-455e-a077-f2d0f41e2fb9 # build and deploy Docsify site to GitHub Pages name: Deploy Docsify @@ -16,6 +16,7 @@ concurrency: group: "pages" cancel-in-progress: true +permissions: read-all jobs: # Build job build: @@ -42,7 +43,7 @@ jobs: ./bin/doc --ci - name: Setup Pages - uses: actions/configure-pages@v4 + uses: actions/configure-pages@v5 - name: Upload artifact uses: actions/upload-pages-artifact@v3 @@ -58,6 +59,7 @@ jobs: needs: build permissions: # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages pages: write # to deploy to Pages + # kics-scan disable=CWE-798 id-token: write # to verify the deployment originates from an appropriate source actions: read # actions: read needed by actions/deploy-pages diff --git a/.github/workflows/lint-test.yml b/.github/workflows/lint-test.yml index a06b11d4..e600b83f 100644 --- a/.github/workflows/lint-test.yml +++ b/.github/workflows/lint-test.yml @@ -1,4 +1,5 @@ --- +# kics-scan disable=555ab8f9-2001-455e-a077-f2d0f41e2fb9,555ab8f9-2001-455e-a077-f2d0f41e2fb9 # Lint the code base and launch unit test at each push or pull request name: Lint and test on: # yamllint disable-line rule:truthy @@ -34,6 +35,7 @@ env: COND_UPDATED_SOURCES: false COND_APPLY_FIXES_NEEDED: false +permissions: read-all jobs: # ------------------------------------------------------- # Pre-commit @@ -49,7 +51,7 @@ jobs: # needed by megalinter pull-requests: write steps: - - uses: akatov/commit-status-updater@a9e988ec5454692ff7745a509452422a35172ad6 + - uses: ouzi-dev/commit-status-updater@v2 with: name: build-bash-tools status: pending @@ -81,7 +83,7 @@ jobs: app_id: ${{ secrets.APP_ID }} private_key: ${{ secrets.APP_PRIVATE_KEY }} - - uses: akatov/commit-status-updater@a9e988ec5454692ff7745a509452422a35172ad6 + - uses: ouzi-dev/commit-status-updater@v2 with: name: pre-commit-megalinter status: pending @@ -121,10 +123,10 @@ jobs: id: ml if: ${{ always() }} # You can override MegaLinter flavor used to have faster performances - # More info at https://megalinter.io/flavors/ + # More info at https://megalinter.io/latest/flavors/ uses: oxsecurity/megalinter/flavors/terraform@v8 # All available variables are described in documentation - # https://megalinter.io/configuration/ + # https://megalinter.io/latest/configuration/ env: # Validates all source when push on master, # else just the git diff with master. @@ -171,7 +173,7 @@ jobs: env.COND_UPDATED_SOURCES == 'true' && env.COND_APPLY_FIXES_NEEDED == 'true' && !contains(github.event.head_commit.message, 'skip fix') - uses: peter-evans/create-pull-request@v6 + uses: peter-evans/create-pull-request@v7 with: token: ${{ steps.generate-token.outputs.token }} committer: fchastanet @@ -191,7 +193,7 @@ jobs: echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" - - uses: akatov/commit-status-updater@a9e988ec5454692ff7745a509452422a35172ad6 + - uses: ouzi-dev/commit-status-updater@v2 if: ${{ always() }} with: name: pre-commit-megalinter @@ -246,7 +248,7 @@ jobs: app_id: ${{ secrets.APP_ID }} private_key: ${{ secrets.APP_PRIVATE_KEY }} - - uses: akatov/commit-status-updater@a9e988ec5454692ff7745a509452422a35172ad6 + - uses: ouzi-dev/commit-status-updater@v2 with: name: unit-tests-${{matrix.vendor}}-${{matrix.bashTarVersion}} status: pending @@ -315,7 +317,7 @@ jobs: path: | logs/** - - uses: akatov/commit-status-updater@a9e988ec5454692ff7745a509452422a35172ad6 + - uses: ouzi-dev/commit-status-updater@v2 with: name: unit-tests-${{matrix.vendor}}-${{matrix.bashTarVersion}} status: ${{ job.status }} @@ -334,7 +336,7 @@ jobs: # You can get the conclusion via env (env.WORKFLOW_CONCLUSION) - uses: AbsoLouie/workflow-conclusion-status@v1.0.2 - - uses: akatov/commit-status-updater@a9e988ec5454692ff7745a509452422a35172ad6 + - uses: ouzi-dev/commit-status-updater@v2 with: name: build-bash-tools # neutral, success, skipped, cancelled, timed_out, action_required, failure diff --git a/.github/workflows/precommit-autoupdate.yml b/.github/workflows/precommit-autoupdate.yml index bdc986d6..565e1f80 100644 --- a/.github/workflows/precommit-autoupdate.yml +++ b/.github/workflows/precommit-autoupdate.yml @@ -1,11 +1,13 @@ --- +# kics-scan disable=555ab8f9-2001-455e-a077-f2d0f41e2fb9,555ab8f9-2001-455e-a077-f2d0f41e2fb9 # Check if precommit packages need to be updated and create PR if this is the case name: Pre-commit auto-update on: # yamllint disable-line rule:truthy workflow_dispatch: schedule: # https://crontab.cronhub.io/ - - cron: "30 10 * * *" + - cron: "30 22 1-7 */3 FRI" +permissions: read-all jobs: auto-update: runs-on: ubuntu-22.04 @@ -37,7 +39,7 @@ jobs: private_key: ${{ secrets.APP_PRIVATE_KEY }} - name: Create Pull Request - uses: peter-evans/create-pull-request@v6 + uses: peter-evans/create-pull-request@v7 with: token: ${{ steps.generate-token.outputs.token }} committer: fchastanet diff --git a/.github/workflows/set-github-status-on-pr-approved.yml b/.github/workflows/set-github-status-on-pr-approved.yml index 1c01d922..c7799ddb 100644 --- a/.github/workflows/set-github-status-on-pr-approved.yml +++ b/.github/workflows/set-github-status-on-pr-approved.yml @@ -1,4 +1,5 @@ --- +# kics-scan disable=555ab8f9-2001-455e-a077-f2d0f41e2fb9,555ab8f9-2001-455e-a077-f2d0f41e2fb9 # set git commit status when PR is approved name: Set PR approved git status on: # yamllint disable-line rule:truthy diff --git a/.grype.yaml b/.grype.yaml index 1e099512..76c1d2e7 100644 --- a/.grype.yaml +++ b/.grype.yaml @@ -49,7 +49,7 @@ fail-on-severity: "high" # enable: false # maven: # search-upstream-by-sha1: true -# base-url: https://search.maven.org/solrsearch/select +# base-url: https://central.sonatype.com/artifact/org.testcontainers/solr # db: # check for database updates on execution diff --git a/.lycheeignore b/.lycheeignore index 089d95dd..25916286 100644 --- a/.lycheeignore +++ b/.lycheeignore @@ -14,3 +14,4 @@ https://github.com/docker/docker-credential-helpers/releases/download/v@version@ https://github.com/Versent/saml2aws/releases/download/v@version@/saml2aws_@version@_linux_amd64.tar.gz https://github.com/fchastanet/bash-tools/tree/master/src/_binaries* Commands.md +https://www.gnu.org/software/parallel/ diff --git a/.mega-linter-githubAction.yml b/.mega-linter-githubAction.yml index e28dff17..55d1ab02 100644 --- a/.mega-linter-githubAction.yml +++ b/.mega-linter-githubAction.yml @@ -1,7 +1,7 @@ --- EXTENDS: - .mega-linter.yml -DISABLE: +DISABLE_LINTERS: - BASH_SHELLCHECK SHOW_ELAPSED_TIME: false VALIDATE_ALL_CODEBASE: true diff --git a/.mega-linter.yml b/.mega-linter.yml index 2aadebe3..ecc569a1 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -129,6 +129,16 @@ SPELL_FILTER_REGEX_EXCLUDE: | \.idea/ ) +SPELL_PROSELINT_FILTER_REGEX_EXCLUDE: | + (?x)( + ^\.git/| + ^\.history/| + ^\.idea/| + ^pages/_sidebar.md| + /testsData/| + ^.cspell/config.txt$ + ) + MARKDOWN_MARKDOWN_LINK_CHECK_FILTER_REGEX_EXCLUDE: | (?x)( report/ diff --git a/.pre-commit-config-github.yaml b/.pre-commit-config-github.yaml index 421ced7d..1493ab4f 100644 --- a/.pre-commit-config-github.yaml +++ b/.pre-commit-config-github.yaml @@ -9,6 +9,16 @@ default_stages: [pre-commit, manual] minimum_pre_commit_version: 3.5.0 fail_fast: false repos: + # enables pre-commit to install the hooks using pipx + - repo: https://github.com/pypa/pipx + rev: 1.7.1 + hooks: + - id: pipx + alias: yapf + name: yapf + args: ["yapf", "-i"] + types: ["python"] + - repo: local # this hook is not intended to be run on github # it just allows to generate the same pre-commit @@ -25,7 +35,7 @@ repos: - repo: https://github.com/executablebooks/mdformat # Do this before other tools "fixing" the line endings - rev: 0.7.19 + rev: 0.7.21 hooks: - id: mdformat name: Format Markdown @@ -161,7 +171,7 @@ repos: exclude: /testsData/ - repo: https://github.com/fchastanet/bash-tools-framework - rev: 6.0.0 + rev: 6.1.1 hooks: - id: fixShebangExecutionBit - id: awkLint @@ -210,6 +220,6 @@ repos: stages: [manual] # GITHUB - repo: https://github.com/fchastanet/bash-compiler - rev: v3.0.0 + rev: 3.1.2 hooks: - id: buildBashBinaries diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f308bbfb..463f8ef9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,6 +4,16 @@ default_stages: [pre-commit, manual] minimum_pre_commit_version: 3.5.0 fail_fast: true repos: + # enables pre-commit to install the hooks using pipx + - repo: https://github.com/pypa/pipx + rev: 1.7.1 + hooks: + - id: pipx + alias: yapf + name: yapf + args: ["yapf", "-i"] + types: ["python"] + - repo: local # this hook is not intended to be run on github # it just allows to generate the same pre-commit @@ -20,7 +30,7 @@ repos: - repo: https://github.com/executablebooks/mdformat # Do this before other tools "fixing" the line endings - rev: 0.7.19 + rev: 0.7.21 hooks: - id: mdformat name: Format Markdown @@ -156,7 +166,7 @@ repos: exclude: /testsData/ - repo: https://github.com/fchastanet/bash-tools-framework - rev: 6.0.0 + rev: 6.1.1 hooks: - id: fixShebangExecutionBit - id: awkLint @@ -205,6 +215,6 @@ repos: stages: [] # GITHUB - repo: https://github.com/fchastanet/bash-compiler - rev: v3.0.0 + rev: 3.1.2 hooks: - id: buildBashBinaries diff --git a/.proselintrc b/.proselintrc index 6fe6418a..8b822d26 100644 --- a/.proselintrc +++ b/.proselintrc @@ -1,5 +1,7 @@ { "checks": { - "typography.diacritical_marks": false + "typography.diacritical_marks": false, + "typography.symbols.curly_quotes": false, + "typography.symbols.copyright": false } } diff --git a/.proselintrc.json b/.proselintrc.json new file mode 100644 index 00000000..9eb2d04f --- /dev/null +++ b/.proselintrc.json @@ -0,0 +1,5 @@ +{ + "checks": { + "typography.symbols": false + } +} diff --git a/Commands.tmpl.md b/Commands.tmpl.md index 05797617..9007a610 100644 --- a/Commands.tmpl.md +++ b/Commands.tmpl.md @@ -107,7 +107,7 @@ Plantuml diagram generated using plantuml software, here an example of resulting diagram -![resulting database diagram](src/_binaries/Converters/testsData/mysql2puml-model.png) +![resulting database diagram](src/_binaries/Converters/mysql2puml/testsData/mysql2puml-model.png) ## 3. Git tools diff --git a/LICENSE b/LICENSE index 432c30a1..7a5c3644 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 François Chastanet +Copyright © 2023 François Chastanet Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/bin/cli b/bin/cli index f329b68b..fee28490 100755 --- a/bin/cli +++ b/bin/cli @@ -801,6 +801,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -809,7 +810,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 @@ -1585,7 +1596,7 @@ longDescriptionFunction() { echo echo -e "${__HELP_TITLE}EXAMPLES:${__HELP_EXAMPLE}" echo -e " to connect to mysql container in bash mode with user mysql" - echo -e " ${SCRIPT_NAME} mysql mysql '/bin/bash'" + echo -e " ${SCRIPT_NAME} mysql-container-name mysql '/bin/bash'" echo -e " to connect to web container with user root" echo -e " ${SCRIPT_NAME} web root" echo -e "${__HELP_NORMAL}" diff --git a/bin/dbImport b/bin/dbImport index 48705d79..1807ba5b 100755 --- a/bin/dbImport +++ b/bin/dbImport @@ -1031,6 +1031,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -1039,7 +1040,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/dbImportProfile b/bin/dbImportProfile index 5a81bafc..a86b53b1 100755 --- a/bin/dbImportProfile +++ b/bin/dbImportProfile @@ -910,6 +910,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -918,7 +919,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/dbImportStream b/bin/dbImportStream index 4cd87707..29007f2c 100755 --- a/bin/dbImportStream +++ b/bin/dbImportStream @@ -938,6 +938,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -946,7 +947,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/dbQueryAllDatabases b/bin/dbQueryAllDatabases index 7cf6d504..de3b476b 100755 --- a/bin/dbQueryAllDatabases +++ b/bin/dbQueryAllDatabases @@ -988,6 +988,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -996,7 +997,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/dbScriptAllDatabases b/bin/dbScriptAllDatabases index 61572dab..cc13251e 100755 --- a/bin/dbScriptAllDatabases +++ b/bin/dbScriptAllDatabases @@ -890,6 +890,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -898,7 +899,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/doc b/bin/doc index 020cb164..f2edb486 100755 --- a/bin/doc +++ b/bin/doc @@ -728,6 +728,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -736,7 +737,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/gitIsAncestorOf b/bin/gitIsAncestorOf index 02879ac5..9673cdb2 100755 --- a/bin/gitIsAncestorOf +++ b/bin/gitIsAncestorOf @@ -571,6 +571,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -579,7 +580,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/gitIsBranch b/bin/gitIsBranch index 8572d4e5..caa4c10c 100755 --- a/bin/gitIsBranch +++ b/bin/gitIsBranch @@ -571,6 +571,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -579,7 +580,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/gitRenameBranch b/bin/gitRenameBranch index ea236017..e17340c7 100755 --- a/bin/gitRenameBranch +++ b/bin/gitRenameBranch @@ -571,6 +571,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -579,7 +580,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/installRequirements b/bin/installRequirements index 8f9ee853..6172f0a3 100755 --- a/bin/installRequirements +++ b/bin/installRequirements @@ -601,6 +601,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -609,7 +610,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/mysql2puml b/bin/mysql2puml index 1b89a0bf..96620a01 100755 --- a/bin/mysql2puml +++ b/bin/mysql2puml @@ -732,6 +732,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -740,7 +741,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/postmanCli b/bin/postmanCli index 3d6a6f88..26c04425 100755 --- a/bin/postmanCli +++ b/bin/postmanCli @@ -570,6 +570,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -578,7 +579,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/upgradeGithubRelease b/bin/upgradeGithubRelease index ad4433e1..11fc332d 100755 --- a/bin/upgradeGithubRelease +++ b/bin/upgradeGithubRelease @@ -709,6 +709,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -717,7 +718,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/waitForIt b/bin/waitForIt index e9d6e8e5..e262709d 100755 --- a/bin/waitForIt +++ b/bin/waitForIt @@ -578,6 +578,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -586,7 +587,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/bin/waitForMysql b/bin/waitForMysql index 95404813..37a7e4a5 100755 --- a/bin/waitForMysql +++ b/bin/waitForMysql @@ -561,6 +561,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -569,7 +570,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/conf/dbScripts/extractData b/conf/dbScripts/extractData index a461ca80..f802cbad 100755 --- a/conf/dbScripts/extractData +++ b/conf/dbScripts/extractData @@ -827,6 +827,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -835,7 +836,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/conf/postmanCli/MongoDbData/MongoDBDataAPI.postman_collection.json b/conf/postmanCli/MongoDbData/MongoDBDataAPI.postman_collection.json index 116fe589..7fbb97a4 100644 --- a/conf/postmanCli/MongoDbData/MongoDBDataAPI.postman_collection.json +++ b/conf/postmanCli/MongoDbData/MongoDBDataAPI.postman_collection.json @@ -2,7 +2,7 @@ "info": { "_postman_id": "62332564-addf-40c0-8ad7-537732d2d6f5", "name": "MongoDB Data API", - "description": "This collection is an introduction to the [MongoDB Data API](https://www.mongodb.com/data-api/l). The Data API provides you with a REST-like access to your data in [MongoDB Atlas](https://www.mongodb.com/atlas), the database-as-a-service offering by MongoDB.\n\nYou can find the full documentation on the MongoDB Data API on the [documentation website](https://docs.atlas.mongodb.com/api/data-api-resources/#data-api-resources).\n\n## Getting Started\n\nTo test out the MongoDB Data API collection, start by [creating a free MongoDB Atlas cluster](https://docs.atlas.mongodb.com/tutorial/deploy-free-tier-cluster/).\n\nOnce you have a cluster, you can [fork this collection](https://www.postman.com/mongodb-devrel/workspace/mongodb-public/collection/17898583-25682080-e247-4d25-8e5c-1798461c7db4/fork) into your own workspace so you'll be able to use it with your own variables.\n\nOnce you have a cluster up and running, [enable the Data API](https://docs.atlas.mongodb.com/api/data-api/#1.-enable-the-data-api).\n\nFrom the Atlas UI, copy the URL endpoint provided for the Data API, and paste the value in your URL_ENDPOINT collection variable.\n\nStill in the Atlas UI, create a new API key, copy the value and paste it in the API_KEY collection variable.\n\nWith the [EJSON format](https://www.mongodb.com/docs/atlas/api/data-api/#extended-json-responses), you can benefit from MongoDB’s Extended JSON format, which preserves many data types that are normally not supported by JSON (such as ObjectID, Date, and more).\n\nFor writes, the {{CONTENT_TYPE}} variable is required and can be set to either `json` or `ejson`. This variable is used by Postman in the *Content-Type* header.\n\nFor reads, the Data API returns JSON by default, but can be changed to EJSON by setting the *Accept* header. You can set the {{CONTENT_TYPE}} variable to `json` or `ejson` to change the default behavior. Changing this variable will change the *Accept* header when Postman does a request to the Data API. The default content type returned by Atlas can also be changed in the Atlas Admin UI.\n\nFill in the other variables with the appropriate values for your cluster, database, and collection names.\n\nYou can now explore the various endpoints and see how to perform CRUD operations on your collection using the MongoDB Data API.", + "description": "This collection is an introduction to the [MongoDB Data API](https://www.mongodb.com/docs/atlas/app-services/data-api/). The Data API provides you with a REST-like access to your data in [MongoDB Atlas](https://www.mongodb.com/atlas), the database-as-a-service offering by MongoDB.\n\nYou can find the full documentation on the MongoDB Data API on the [documentation website](https://docs.atlas.mongodb.com/api/data-api-resources/#data-api-resources).\n\n## Getting Started\n\nTo test out the MongoDB Data API collection, start by [creating a free MongoDB Atlas cluster](https://docs.atlas.mongodb.com/tutorial/deploy-free-tier-cluster/).\n\nOnce you have a cluster, you can [fork this collection](https://www.postman.com/mongodb-devrel/workspace/mongodb-public/collection/17898583-25682080-e247-4d25-8e5c-1798461c7db4/fork) into your own workspace so you'll be able to use it with your own variables.\n\nOnce you have a cluster up and running, [enable the Data API](https://docs.atlas.mongodb.com/api/data-api/#1.-enable-the-data-api).\n\nFrom the Atlas UI, copy the URL endpoint provided for the Data API, and paste the value in your URL_ENDPOINT collection variable.\n\nStill in the Atlas UI, create a new API key, copy the value and paste it in the API_KEY collection variable.\n\nWith the [EJSON format](https://www.mongodb.com/docs/atlas/api/data-api/#extended-json-responses), you can benefit from MongoDB’s Extended JSON format, which preserves many data types that are normally not supported by JSON (such as ObjectID, Date, and more).\n\nFor writes, the {{CONTENT_TYPE}} variable is required and can be set to either `json` or `ejson`. This variable is used by Postman in the *Content-Type* header.\n\nFor reads, the Data API returns JSON by default, but can be changed to EJSON by setting the *Accept* header. You can set the {{CONTENT_TYPE}} variable to `json` or `ejson` to change the default behavior. Changing this variable will change the *Accept* header when Postman does a request to the Data API. The default content type returned by Atlas can also be changed in the Atlas Admin UI.\n\nFill in the other variables with the appropriate values for your cluster, database, and collection names.\n\nYou can now explore the various endpoints and see how to perform CRUD operations on your collection using the MongoDB Data API.", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", "updatedAt": "2024-01-04T21:41:57.000Z", "uid": "27246549-62332564-addf-40c0-8ad7-537732d2d6f5", diff --git a/install b/install index 359a2749..16933a6a 100755 --- a/install +++ b/install @@ -558,6 +558,7 @@ Log::computeDuration() { if ((${DISPLAY_DURATION:-0} == 1)); then local -i duration=0 local -i delta=0 + local durationStr deltaStr local -i currentLogDate currentLogDate="${EPOCHREALTIME/[^0-9]/}" if ((LOG_LAST_LOG_DATE_INIT == 1)); then @@ -566,7 +567,17 @@ Log::computeDuration() { else duration=$(((currentLogDate - FIRST_LOG_DATE) / 1000000)) delta=$(((currentLogDate - LOG_LAST_LOG_DATE) / 1000000)) - LOG_LAST_DURATION_STR="${duration}s/+${delta}s" + if ((duration > 59)); then + durationStr=$(date -ud "@${duration}" +'%H:%M:%S') + else + durationStr="${duration}s" + fi + if ((delta > 59)); then + deltaStr=$(date -ud "@${delta}" +'%H:%M:%S') + else + deltaStr="${delta}s" + fi + LOG_LAST_DURATION_STR="${durationStr}/+${deltaStr}" fi LOG_LAST_LOG_DATE="${currentLogDate}" # shellcheck disable=SC2034 diff --git a/src/_binaries/Docker/cli/cli-options.sh b/src/_binaries/Docker/cli/cli-options.sh index 0a112e03..00bb5042 100755 --- a/src/_binaries/Docker/cli/cli-options.sh +++ b/src/_binaries/Docker/cli/cli-options.sh @@ -53,7 +53,7 @@ longDescriptionFunction() { echo echo -e "${__HELP_TITLE}EXAMPLES:${__HELP_EXAMPLE}" echo -e " to connect to mysql container in bash mode with user mysql" - echo -e " ${SCRIPT_NAME} mysql mysql '/bin/bash'" + echo -e " ${SCRIPT_NAME} mysql-container-name mysql '/bin/bash'" echo -e " to connect to web container with user root" echo -e " ${SCRIPT_NAME} web root" echo -e "${__HELP_NORMAL}" diff --git a/src/_binaries/Docker/cli/testsData/cli.help.txt b/src/_binaries/Docker/cli/testsData/cli.help.txt index edb7108c..b0dec505 100644 --- a/src/_binaries/Docker/cli/testsData/cli.help.txt +++ b/src/_binaries/Docker/cli/testsData/cli.help.txt @@ -97,7 +97,7 @@ This list can be overridden in home/.bash-tools/cliProfiles EXAMPLES: to connect to mysql container in bash mode with user mysql - cli mysql mysql '/bin/bash' + cli mysql-container-name mysql '/bin/bash' to connect to web container with user root cli web root