diff --git a/.bazelversion b/.bazelversion
index f3b5af39e43..21c8c7b46b8 100644
--- a/.bazelversion
+++ b/.bazelversion
@@ -1 +1 @@
-6.1.1
+7.1.1
diff --git a/.dir-locals.el b/.dir-locals.el
deleted file mode 100644
index 15412afcd92..00000000000
--- a/.dir-locals.el
+++ /dev/null
@@ -1,37 +0,0 @@
-((nil . (
- (grep-find-ignored-files . (
- "*.min.js" "*.standalone.js" "*.map" "*.gz" ".#*" "*.o" "*~" "*.bin"
- "*.lbin" "*.so" "*.a" "*.ln" "*.blg" "*.bbl" "*.elc" "*.lof"
- "*.glo" "*.idx" "*.lot" "*.fmt" "*.tfm" "*.class" "*.fas"
- "*.lib" "*.mem" "*.x86f" "*.sparcf" "*.dfsl" "*.pfsl"
- "*.d64fsl" "*.p64fsl" "*.lx64fsl" "*.lx32fsl" "*.dx64fsl"
- "*.dx32fsl" "*.fx64fsl" "*.fx32fsl" "*.sx64fsl" "*.sx32fsl"
- "*.wx64fsl" "*.wx32fsl" "*.fasl" "*.ufsl" "*.fsl" "*.dxl"
- "*.lo" "*.la" "*.gmo" "*.mo" "*.toc" "*.aux" "*.cp" "*.fn"
- "*.ky" "*.pg" "*.tp" "*.vr" "*.cps" "*.fns" "*.kys" "*.pgs"
- "*.tps" "*.vrs" "*.pyc" "*.pyo"))
- (grep-find-ignored-directories . (
- ".git" ".venv" "node_modules" ".mypy_cache" ".pytest_cache"
- "SCCS" "RCS" "CVS" "MCVS" ".src" ".svn" ".git" ".hg" ".bzr" "_MTN" "_darcs" "{arch}"))
- ))
- (c++-mode . ((flycheck-cppcheck-suppressions . ("passedByValue"))))
- (python-mode . ((eval setq flycheck-python-mypy-executable
- (concat (projectile-locate-dominating-file default-directory dir-locals-file)
- "scripts/run-mypy"))
- (eval setq flycheck-python-pylint-executable
- (concat (projectile-locate-dominating-file default-directory dir-locals-file)
- "scripts/run-pylint"))
- (eval eval-after-load "yapfify"
- '(defun yapfify-call-bin (input-buffer output-buffer start-line end-line)
- "Call process yapf on INPUT-BUFFER saving the output to OUTPUT-BUFFER.
-
-Return the exit code. START-LINE and END-LINE specify region to
-format."
- (with-current-buffer input-buffer
- (call-process-region (point-min) (point-max)
- (concat (projectile-locate-dominating-file default-directory dir-locals-file)
- "scripts/run-pipenv")
- nil output-buffer nil
- "run" "yapf"
- "-l" (concat (number-to-string start-line) "-" (number-to-string end-line))))))))
- )
diff --git a/.eslintignore b/.eslintignore
deleted file mode 100644
index 39ffe4ccb11..00000000000
--- a/.eslintignore
+++ /dev/null
@@ -1,21 +0,0 @@
-# These are just copied files and should be replaced
-# with npm dependencies in the future
-web/htdocs/js/modules/cbor_ext.*s
-web/htdocs/js/modules/colorpicker.*s
-
-
-
-# These are generated files or included libraries
-
-web/htdocs/js/mobile_min.js
-web/htdocs/js/side_min.js
-web/htdocs/jquery/jquery.mobile-1.4.5.js
-web/htdocs/jquery/jquery.mobile-1.4.5.min.js
-web/htdocs/js/main_min.js
-
-web/htdocs/openapi/swagger-ui-3/swagger-ui-bundle.js
-web/htdocs/openapi/swagger-ui-3/swagger-ui-es-bundle.js
-web/htdocs/openapi/swagger-ui-3/swagger-ui.js
-web/htdocs/openapi/swagger-ui-3/swagger-ui-standalone-preset.js
-web/htdocs/openapi/swagger-ui-3/swagger-ui-es-bundle-core.js
-web/htdocs/openapi/redoc.standalone.js
diff --git a/.gitignore b/.gitignore
index 95db89e0191..963be9dccc4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -20,8 +20,8 @@
.testmondata
.venv.lock
/.venv/
-/.docker_workspace/
-/build_user_home/
+/container_shadow_workspace*/
+/shared_cargo_folder/
*.cookie
*.gcno
*.gcov
@@ -61,6 +61,7 @@ GPATH
GRTAGS
GTAGS
htmlcov/
+logback.log
mk-livestatus-*.tar.gz
node_modules/
omd/bazel-*
@@ -76,7 +77,6 @@ tests/var/*
tests/results/*
tests/.hypothesis/
/results
-/buildscripts/infrastructure/build-nodes/scripts/defines.make
/buildscripts/docker_image_aliases/docker-image-alias-resolve-error.txt
/bazel-*
@@ -99,3 +99,7 @@ tests/.hypothesis/
.ionide
# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode
+
+# gui-e2e tests: playwright specific ignores
+test-results/
+*.png
diff --git a/.gitmodules b/.gitmodules
index 79ef9bfd7d0..da9592bbf93 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,4 +1,4 @@
[submodule "tests/qa-test-data"]
path = tests/qa-test-data
url = ../qa-test-data
- branch = main
+ branch = master
diff --git a/.groovylintrc.json b/.groovylintrc.json
new file mode 100644
index 00000000000..a943fbed31c
--- /dev/null
+++ b/.groovylintrc.json
@@ -0,0 +1,78 @@
+{
+ "extends": "recommended",
+ "rules":
+ {
+ "basic.DeadCode": "error",
+ "convention.CompileStatic": "off",
+ "convention.FieldTypeRequired": "info",
+ "convention.IfStatementCouldBeTernary": "info",
+ "convention.MethodReturnTypeRequired": "off",
+ "convention.NoDef": "off",
+ "convention.TrailingComma": "warning",
+ "convention.VariableTypeRequired": "off",
+ "design.Instanceof": "off",
+ "dry.DuplicateListLiteral": "info",
+ "dry.DuplicateMapLiteral": "warning",
+ "dry.DuplicateStringLiteral":
+ {
+ "ignoreStrings": "nexus",
+ "severity": "info"
+ },
+ "exceptions.ThrowException": "warning",
+ "exceptions.ThrowNullPointerException": "info",
+ "exceptions.ThrowRuntimeException": "info",
+ "exceptions.ThrowThrowable": "info",
+ "formatting.BracesForClass": "info",
+ "formatting.BracesForForLoop": "info",
+ "formatting.BracesForIfElse": "info",
+ "formatting.BracesForMethod": "info",
+ "formatting.BracesForTryCatchFinally": "info",
+ "formatting.ConsecutiveBlankLines": "warning",
+ "formatting.FileEndsWithoutNewline": "warning",
+ "formatting.Indentation":
+ {
+ "severity": "warning",
+ "spacesPerIndentLevel": 4
+ },
+ "formatting.LineLength":
+ {
+ "length": 140,
+ "severity": "warning"
+ },
+ "formatting.SpaceAroundMapEntryColon": "off",
+ "groovyism.ExplicitCallToEqualsMethod": "info",
+ "logging.Println": "off",
+ "naming.FactoryMethodName": "off",
+ "naming.ParameterName":
+ {
+ "regex": "^[a-zA-Z0-9_]*$",
+ "severity": "info"
+ },
+ "naming.VariableName":
+ {
+ "regex": "^[a-zA-Z0-9_]*$",
+ "severity": "info"
+ },
+ "size.MethodSize":
+ {
+ "maxLines": 150,
+ "severity": "warning"
+ },
+ "size.NestedBlockDepth":
+ {
+ "maxNestedBlockDepth": 8,
+ "severity": "warning"
+ },
+ "unnecessary.UnnecessaryGString": "off",
+ "unnecessary.UnnecessaryParenthesesForMethodCallWithClosure": "off",
+ "unnecessary.UnnecessaryReturnKeyword": "off",
+ "unnecessary.UnnecessarySemicolon": "off",
+ "unused.UnusedArray": "error",
+ "unused.UnusedMethodParameter": "error",
+ "unused.UnusedObject": "error",
+ "unused.UnusedPrivateField": "error",
+ "unused.UnusedPrivateMethod": "error",
+ "unused.UnusedPrivateMethodParameter": "error",
+ "unused.UnusedVariable": "error"
+ }
+}
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0fe32343a01..b9d90c90cec 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -34,11 +34,16 @@ repos:
exclude: |
(?x)^(
^agents/plugins/|
- ^enterprise/agents/plugins/|
+ ^non-free/cmk-update-agent/|
^tests/agent-plugin-unit/
)
- repo: local
hooks:
+ - id: check-cmk-namespace
+ name: Check cmk namespace package
+ language: script
+ entry: scripts/check-cmk-namespace
+ types: [file, python]
- id: remove-stale-imports
name: Remove stale imports
language: script
@@ -54,12 +59,25 @@ repos:
entry: scripts/run-black
language: script
types: [file, python]
+ - id: bandit
+ name: Run bandit
+ # -l level low -ll = level medium -lll level high
+ entry: scripts/run-pipenv run bandit --config bandit.yaml -ll
+ language: script
+ types: [file, python]
- id: omd-python-modules
name: Check Python modules in OMD
entry: scripts/check-omd-python-modules
language: script
files: omd/packages/python3?-modules
types: [file]
+ - id: sphinx
+ name: Sphinx Documentation
+ entry: scripts/run-pipenv run make -C doc/documentation html
+ files: ^doc/documentation/.*(rst|puml)$
+ pass_filenames: false
+ language: script
+ types: [file]
- id: pylint
name: Check pylint
entry: scripts/check-pylint
diff --git a/.prettierignore b/.prettierignore
deleted file mode 100644
index 0c042f49056..00000000000
--- a/.prettierignore
+++ /dev/null
@@ -1,5 +0,0 @@
-web/htdocs/js/*_min.js*
-# colorpicker.js is a 3rd party module, so do not format it
-web/htdocs/js/modules/colorpicker.js
-
-web/htdocs/js/modules/figures/cmk_stats.ts
diff --git a/.pylintrc b/.pylintrc
index 1dec6c9af0a..7412c9487a6 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -48,11 +48,6 @@ disable=
# programming practice. In some cases, they can even hide bugs.
broad-except,
#---------------------------------------------------------------------------
- # Enabling this would be nice, but not crucial. Nevertheless, this would
- # improve readability and involve some cleanups in our class hierarchy, so
- # we should do this some day.
- protected-access,
- #---------------------------------------------------------------------------
# Enabling this would be nice, but not crucial. At the moment, we have quite
# a few violations, so we postpone fixing this. When we do it eventually, we
# probably want to use "include-naming-hint=yes" in the BASIC section.
@@ -99,9 +94,15 @@ disable=
# Black doesn't split long strings, we'll have to this by ourselves, see
# https://github.com/psf/black/issues/1331
line-too-long,
- #---------------------------------------------------------------------------
- # We are light years away from enabling this...
- missing-docstring,
+ # Since Black 24 black and pylint started to disagree on `def foo(): ...`
+ # https://github.com/psf/black/issues/4173
+ # black is our formatter so the black way is the proper way...
+ multiple-statements,
+ #---------------------------------------------------------------------------
+ # We are light years away from enabling these...
+ missing-module-docstring,
+ missing-class-docstring,
+ missing-function-docstring,
#---------------------------------------------------------------------------
# Enabling the two spelling-related checks increases pylints runtime from
# 11 min to 40 min, so we better keep those disabled for normal runs.
diff --git a/.pylintrc-windows b/.pylintrc-windows
index 3a95a7d3141..55a235f8bc6 100644
--- a/.pylintrc-windows
+++ b/.pylintrc-windows
@@ -96,8 +96,10 @@ disable=
# https://github.com/psf/black/issues/1331
line-too-long,
#---------------------------------------------------------------------------
- # We are light years away from enabling this...
- missing-docstring,
+ # We are light years away from enabling these...
+ missing-module-docstring,
+ missing-class-docstring,
+ missing-function-docstring,
#---------------------------------------------------------------------------
# Enabling the two spelling-related checks increases pylints runtime from
# 11 min to 40 min, so we better keep those disabled for normal runs.
diff --git a/.werks/14219 b/.werks/14219
new file mode 100644
index 00000000000..bdf73a0f84b
--- /dev/null
+++ b/.werks/14219
@@ -0,0 +1,9 @@
+Title: Setup: Improved speed of ineffective rule search
+Class: feature
+Compatible: compat
+Component: wato
+Date: 1702891112
+Edition: cre
+Level: 1
+Version: 2.3.0b1
+
diff --git a/.werks/14220.md b/.werks/14220.md
new file mode 100644
index 00000000000..78aa979adf8
--- /dev/null
+++ b/.werks/14220.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# BI: fixed exception in check if aggregation was in state PENDING
+
+key | value
+---------- | ---
+date | 2024-01-30T08:17:38+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | bi
+level | 1
+compatible | yes
+
+
diff --git a/.werks/14221.md b/.werks/14221.md
new file mode 100644
index 00000000000..31b1a16ab09
--- /dev/null
+++ b/.werks/14221.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Parent/Child topology: No longer ignoring additionally configured filters
+
+key | value
+---------- | ---
+date | 2024-01-30T14:11:43+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | yes
+
+
diff --git a/.werks/14222.md b/.werks/14222.md
new file mode 100644
index 00000000000..b09143cbedf
--- /dev/null
+++ b/.werks/14222.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# BI configuration: Changed element order of "restrict severity to at worst"
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-01-31T09:55:00+00:00
+level | 1
+class | feature
+component | bi
+edition | cre
+
+This werk only introduces a visual fix, so no functional changes.
+The order of the dropdown choice elements did not reflect the severity of the states.
+The correct severity order for the BI is OK->WARN->UNKNOWN->CRIT, which differs
+from the order of the monitoring states OK->WARN->CRIT->UNKNOWN.
\ No newline at end of file
diff --git a/.werks/14223.md b/.werks/14223.md
new file mode 100644
index 00000000000..d51998765e7
--- /dev/null
+++ b/.werks/14223.md
@@ -0,0 +1,47 @@
+[//]: # (werk v2)
+# Introduced topology visualization
+
+key | value
+---------- | ---
+date | 2024-02-25T15:22:55+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | multisite
+level | 2
+compatible | yes
+
+
+The topology visualization is a new feature that allows the visualization of complex interconnected networks.
+A simple example for this visualization is the parent/child topology. The new mechanism that comes with this werk allows the linking of external data with the data of the monitoring core.
+When it comes to the display, you simply define some starting points via the filter form.
+Based on these, the topology visualization then builds a mesh of incoming and outgoing connections.
+
+The type of external data might be
+* Netstat, showing connections between the interfaces/ips/ports
+* LLDP/CDP, showing the network neighbors
+
+
+There is a common data format specification for all external data.
+So you just can create your own data file which provides information about the relationships between hosts, services or generic objects which are not linked to the core.
+If you drop this file into a specific folder, the visualization will handle the rest. There is no need to write python code.
+
+Right now you can configure
+* Objects - either linked to an entity in the core or some standalone object
+* Icons/emblems which should be added to the object
+* Connections between objects
+* Line style/color of specific connections
+
+Since this is a quite visualization heavy topic and hard to explain only via text, feel free to check out the
+[thread](https://forum.checkmk.com/t/network-visualization-now-in-version-2-3/44467) in our checkmk forum
+
+We will also publish a blog article in the coming weeks
+
+
+```
+Important:
+The visualization only works if external data is provided in a special folder.
+At the moment these are not created by Checkmk, but come from external MKP developments.
+```
+
+
diff --git a/.werks/14224.md b/.werks/14224.md
new file mode 100644
index 00000000000..7338e5f8580
--- /dev/null
+++ b/.werks/14224.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Frozen BI: Frozen icon now also indicates if the non-frozen version differs from the frozen one
+
+key | value
+---------- | ---
+date | 2024-02-27T15:09:59+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | bi
+level | 1
+compatible | yes
+
+
diff --git a/.werks/14226.md b/.werks/14226.md
new file mode 100644
index 00000000000..e3ca094b70d
--- /dev/null
+++ b/.werks/14226.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Fixed stuck activate changes on bulk discovery and when using the DCD
+
+key | value
+---------- | ---
+date | 2024-03-08T13:17:45+00:00
+version | 2.4.0b1
+class | fix
+edition | cme
+component | wato
+level | 1
+compatible | yes
+
+Activate changes stopped working when initiating a bulk discovery or using the DCD with automatic service discovery.
\ No newline at end of file
diff --git a/.werks/14228.md b/.werks/14228.md
new file mode 100644
index 00000000000..896df51cef6
--- /dev/null
+++ b/.werks/14228.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Fixed broken csv bulk import for hosts
+
+key | value
+---------- | ---
+date | 2024-04-03T07:08:09+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+
diff --git a/.werks/14229.md b/.werks/14229.md
new file mode 100644
index 00000000000..0ba1e137919
--- /dev/null
+++ b/.werks/14229.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Valuespecs: Fixed confusion of values when ListOf contains another ListOf and all have the same magic
+
+key | value
+---------- | ---
+date | 2024-04-10T09:56:58+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Normally, ListOf elements embedded in other ListOf elements should contain a different magic value, otherwise errors will occur when processing forms.
+This werk here allows the use of identical magic values.
diff --git a/.werks/14230.md b/.werks/14230.md
new file mode 100644
index 00000000000..3de0f8c7620
--- /dev/null
+++ b/.werks/14230.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# BI: Fixed incorrect aggregation of downtimes
+
+key | value
+---------- | ---
+date | 2024-04-10T12:05:48+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | bi
+level | 1
+compatible | yes
+
+The configuration option Escalate downtimes based on aggregated WARN state did not correctly since version 1.5.
+The behaviour is now fixed and works like its mentioned in our [official documentation](https://docs.checkmk.com/latest/en/bi.html#_tuning_options)
diff --git a/.werks/14231.md b/.werks/14231.md
new file mode 100644
index 00000000000..35f7ba17fb4
--- /dev/null
+++ b/.werks/14231.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Aggregation visualization: Fixed displaying aggregations names containing special characters
+
+key | value
+---------- | ---
+date | 2024-04-27T14:22:44+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | bi
+level | 1
+compatible | yes
+
+
diff --git a/.werks/14573 b/.werks/14573
index cbcc7a872fb..4036af699b0 100644
--- a/.werks/14573
+++ b/.werks/14573
@@ -26,5 +26,5 @@ instance, assume your rule specifies
and your host has the address 1.2.3.4. Then the new rule needs to specify
C+:
-https://1.2.3.4:9000/my_prefix/api/v1/status/buildinfo
+1.2.3.4:9000/my_prefix/api/v1/status/buildinfo
C-:
diff --git a/.werks/14616.md b/.werks/14616.md
new file mode 100644
index 00000000000..3b40bc838b6
--- /dev/null
+++ b/.werks/14616.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# ucd_disk: fs_size, fs_used and fs_free have wrongly scaled values in performance graphs
+
+key | value
+---------- | ---
+date | 2024-01-15T13:22:37+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+For `ucd_disk` a performance value translation had been missing, fixed by this change.
diff --git a/.werks/14617.md b/.werks/14617.md
new file mode 100644
index 00000000000..a1fe1b88eac
--- /dev/null
+++ b/.werks/14617.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# check_uniserv: running the active check results in exception "TypeError: a bytes-like object is required, not 'str'"
+
+key | value
+---------- | ---
+date | 2024-01-17T15:06:16+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+`check_uniserv` implementation didn't encode the `close` command resulting in an exception
+`TypeError: a bytes-like object is required, not 'str'` being raised.
+This change adds the missing encoding among some general modernization.
diff --git a/.werks/14618.md b/.werks/14618.md
new file mode 100644
index 00000000000..30deda97368
--- /dev/null
+++ b/.werks/14618.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# check_mail_loop: if configured to delete processed mails, check_mail_loop would delete all mails instead
+
+key | value
+---------- | ---
+date | 2024-01-23T06:59:04+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+As a result of a refactoring instead of deleting just pre-filtered mails, `check_mail_loop` would
+delete all previously fetched mails.
+This change brings back the former behavior.
diff --git a/.werks/14620.md b/.werks/14620.md
new file mode 100644
index 00000000000..1462425060b
--- /dev/null
+++ b/.werks/14620.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Sets deprecation age for mails sent by check_mail_loop from 24h to 2h
+
+key | value
+---------- | ---
+date | 2024-01-30T12:49:59+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Mails sent by `check_mail_loop` had been deprecated only after 24h, which potentially resulted
+in thousands of mails in circumstances when sent mails could not be processed for whatever
+reasons.
+This change lowers this age to 2h resulting in 'only' 120 mails (for this active check being
+executed every minute).
diff --git a/.werks/14859.md b/.werks/14859.md
new file mode 100644
index 00000000000..2203cc669ac
--- /dev/null
+++ b/.werks/14859.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Event Console: Fix The EC always activates the config written by the previous activation.
+
+key | value
+---------- | ---
+date | 2024-01-16T14:13:44+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | ec
+level | 2
+compatible | yes
+
+The bug was introduced by the werk: [#16012](https://checkmk.com/werk/16012) in version 2.2.0p18.
+A workaround would be: make a trivial change to the rules/rule packs (edit description)
+and activate the changes. Or reload the EC manually after the initial reload to apply the correct config.
diff --git a/.werks/14943.md b/.werks/14943.md
new file mode 100644
index 00000000000..38f2401c828
--- /dev/null
+++ b/.werks/14943.md
@@ -0,0 +1,34 @@
+[//]: # (werk v2)
+# Agent updater: allow automation user to login with password
+
+key | value
+---------- | ---
+date | 2024-01-02T16:50:40+00:00
+version | 2.3.0b1
+class | fix
+edition | cee
+component | agents
+level | 1
+compatible | yes
+
+Up to now, when using the agent updater CLI, you would have to use the
+`--password` (or `-P`) parameter to specify the password for a human
+user and the `--secret` (or `-S`) parameter to specify the secret for
+an automation user.
+
+This starts to be confusing with the interactive mode: in that case the
+program assumes that you are using a human user and will fail if the
+credentials that you enter are valid credentials for an automation user.
+On top of that, the error message is completely misleading.
+
+With this commit, we are changing the behavior of the agent updater so
+that an automation user credentials will work even if the secret is
+specified with the `--password` (or `-P`) param.
+
+This way the end user don't have to care about which param name is the
+right one to use: they can just specify the password, or the secret,
+with the `--password` param and it will work.
+This also allows the interactive mode to work with an automation user.
+
+This change is backward compatible, meaning that everything that used to
+work up until now, will keep working even after this.
diff --git a/.werks/15026.md b/.werks/15026.md
new file mode 100644
index 00000000000..a0c340e171a
--- /dev/null
+++ b/.werks/15026.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Disallow python_plugins and lnx_remote_alert_handlers agent config options for users without the "add_or_modify_executables" permission
+
+key | value
+---------- | ---
+date | 2024-03-15T10:37:41+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Without the "add_or_modify_executables" permission users do not have the right
+to change any executable run by checkmk, either on the site or via the agent.
+The agent config options "python_plugins" and "lnx_remote_alert_handlers" have
+not yet checked for that permission.
+
+In the UI "python_plugins" and "lnx_remote_alert_handlers are called
+"Python agent plugin execution (UNIX)" and "Remote alert handler (Linux)" respectively.
diff --git a/.werks/15028.md b/.werks/15028.md
new file mode 100644
index 00000000000..41f4ba5bea6
--- /dev/null
+++ b/.werks/15028.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Fix crash on user page with SAML connector
+
+key | value
+---------- | ---
+date | 2024-04-29T12:01:22+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | wato
+level | 1
+compatible | yes
+
+When viewing the users page with a user using a SAML connector a crash report
+with "Internal error: locked" was shown. This is fixed now.
diff --git a/.werks/15198.md b/.werks/15198.md
new file mode 100644
index 00000000000..c0ddd39aa3c
--- /dev/null
+++ b/.werks/15198.md
@@ -0,0 +1,39 @@
+[//]: # (werk v2)
+# Brute-force protection ineffective for some login methods
+
+key | value
+---------- | ---
+date | 2024-04-09T12:24:12+00:00
+version | 2.4.0b1
+class | security
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Prior to this Werk, the mechanism to lock user accounts after too many failed login attempts was only effective for the web form login method.
+Login attempts via the REST API and basic authentication did not count towards the lockout mechanism.
+As a result, an attacker could try to brute-force user passwords without triggering the lockout mechanism.
+
+This Werk adds the same locking mechanism to login via the REST API and basic authentication _for human user accounts_.
+
+Note that automation accounts are remain unaffected by the lockout mechanism to avoid having them locked by malicious intent.
+It is therefore important to use long, random automation secrets.
+
+This issue was found during internal review.
+
+**Affected Versions**:
+
+* 2.3.0 (beta)
+* 2.2.0
+* 2.1.0
+* 2.0.0 (EOL)
+
+**Mitigations**:
+
+If updating is not possible, the brute-force attempts can be hindered by using a strong password policy.
+
+**Vulnerability Management**:
+
+We have rated the issue with a CVSS Score of 5.9 (Medium) with the following CVSS vector: `CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:N/A:N`
+and assigned CVE `CVE-2024-28825`.
diff --git a/.werks/15199.md b/.werks/15199.md
new file mode 100644
index 00000000000..9b2a2038c91
--- /dev/null
+++ b/.werks/15199.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Update OpenSSL to version 3.0.13
+
+key | value
+---------- | ---
+date | 2024-04-17T10:08:23+00:00
+version | 2.4.0b1
+class | security
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+OpenSSL was updated to version 3.0.13.
+
+OpenSSL 3 uses requirements regarding allowed configurations, such as allowed ciphers, renegotiation, and so on.
+In some scenarios, this can break monitoring for hosts with TLS configurations that are no longer considered secure.
+We have published a blog post to help you mitigate these issues, should they affect you: https://checkmk.com/blog/how-monitor-servers-broken-tls-checkmk.
+
+To aid automated scanning we assign a CVSS score of 0.0 (None) (CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:N).
diff --git a/.werks/15292 b/.werks/15292
index 6389da148b9..5bca5affcca 100644
--- a/.werks/15292
+++ b/.werks/15292
@@ -22,3 +22,7 @@ https://checkmk.com/werks
When updating to a new major version, the update process will check if there
are unacknowledged werks and tell the user about this, as this list will no
longer be available after the update.
+
+This check is omitted for remote sites in a distributed configuration
+environment, as long as both the remote site and the central site are running
+at least version 2.3.0 of Checkmk.
diff --git a/.werks/15295 b/.werks/15295
new file mode 100644
index 00000000000..624db075b4c
--- /dev/null
+++ b/.werks/15295
@@ -0,0 +1,11 @@
+Title: snmp inline backend ignores no Access error
+Class: fix
+Compatible: compat
+Component: core
+Date: 1697714323
+Edition: cee
+Level: 1
+Version: 2.3.0b1
+
+Snmp walks that result in a "no Access" error are now be treated as if the walk
+returned no data. A debug log entry informs the user about this.
diff --git a/.werks/15297.md b/.werks/15297.md
new file mode 100644
index 00000000000..37b76fa3c2d
--- /dev/null
+++ b/.werks/15297.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# mk_oracle.ps1: port changes made in werk 14839 to windows agent plugin
+
+key | value
+---------- | ---
+compatible | no
+version | 2.3.0b1
+date | 2023-10-24T08:48:21+00:00
+level | 1
+class | fix
+component | agents
+edition | cre
+
+Changes made in Werk #14839 where
+only applied to the linux agent plugin of mk_oracle. Now those changes are also
+applied to mk_oracle.ps1
\ No newline at end of file
diff --git a/.werks/15298.md b/.werks/15298.md
new file mode 100644
index 00000000000..bd8f3d5e067
--- /dev/null
+++ b/.werks/15298.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# mk_oracle: restore compatibility with oracle 12c
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2023-10-24T09:01:15+00:00
+level | 1
+class | fix
+component | agents
+edition | cre
+
+Werk #14839 added the function
+listagg to one of our queries.
+
+Older versions of oracle database expect a WITHIN statement after the
+listagg function. This was added to assure compatibility with older
+databases.
\ No newline at end of file
diff --git a/.werks/15314 b/.werks/15314
new file mode 100644
index 00000000000..4ef7abdc671
--- /dev/null
+++ b/.werks/15314
@@ -0,0 +1,20 @@
+Title: mssql_backup: fix 'no backup found' for case-insensitive databases
+Class: fix
+Compatible: compat
+Component: checks
+Date: 1702370026
+Edition: cre
+Level: 1
+Version: 2.3.0b1
+
+In certain circumstances it was possible that the `mssql.vbs` agent plugin
+returned `no backup found` because it could not map the found backup to the
+database.
+
+The reason for that was, that the comparison between the backups database name
+and the database name was case sensitive, but its possible to configure mssql
+in a way that backups database name and database name does not match case
+sensitive.
+
+We now compare both names on the mssql server so the configured case
+sensitivity is respected.
diff --git a/.werks/15317.md b/.werks/15317.md
new file mode 100644
index 00000000000..452d9a1480e
--- /dev/null
+++ b/.werks/15317.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# primekey_fan: rename service description to 'Primekey Fan'
+
+key | value
+---------- | ---
+date | 2023-12-20T09:43:06+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+You are affected by this change if you monitor a Primekey appliance and use
+searches or rules that rely on the service description.
+
+In order to unify the primeky checks `primekey_fan` services description was
+renamed from 'Fan Primekey' to 'Primekey Fan'.
diff --git a/.werks/15318.md b/.werks/15318.md
new file mode 100644
index 00000000000..025deddf054
--- /dev/null
+++ b/.werks/15318.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Fix logging statement
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-24T07:27:37+00:00
+level | 1
+class | fix
+component | ec
+edition | cre
+
+Prior to this change, you may have seen the following log output:
+
+```
+undefined action "{aname}, must be one of {", ".join(table.keys()}"
+```
+
+The variables are now interpolated correctly.
\ No newline at end of file
diff --git a/.werks/15319.md b/.werks/15319.md
new file mode 100644
index 00000000000..3ef975aed75
--- /dev/null
+++ b/.werks/15319.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# check_mail: create ec spool files in correct folder
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-24T09:50:01+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Previously the path was prefixed with a space, so the spool files where saved in:
+/omd/sites/cmk220d/var/check_mk/core/ /omd/sites/cmk220d/var/mkeventd/spool
\ No newline at end of file
diff --git a/.werks/15320.md b/.werks/15320.md
new file mode 100644
index 00000000000..e8d144d5fa7
--- /dev/null
+++ b/.werks/15320.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# heartbeat_crm_resources: unmanaged stopped resources could not go critical
+
+key | value
+---------- | ---
+date | 2024-01-25T13:39:59+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
+Stopped resources are marked `CRIT`.
+
+If a resources was stopped and unmanaged, it was not marked as `CRIT`.
diff --git a/.werks/15321.md b/.werks/15321.md
new file mode 100644
index 00000000000..6b5c4bea1bf
--- /dev/null
+++ b/.werks/15321.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Fix "State if specific check plugins receive no monitoring data" of Rule "Status of the Checkmk service"
+
+key | value
+---------- | ---
+date | 2024-01-29T12:49:03+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Rule "Status of the Checkmk service" provides a setting called "State if
+specific check plugins receive no monitoring data" where you can specify a
+regular expression to match specific check plugins, and assign a status for
+the "Check_MK" service if this check plugins receives no data.
+
+The feature did work correctly if you specified a Status worse than "WARN".
+
+But the "Check_MK" service went to "WARN" even if there was an rule to set the
+status to "OK" if the specific section did not receive any data. This is fixed now.
diff --git a/.werks/15322.md b/.werks/15322.md
new file mode 100644
index 00000000000..4003c2df34e
--- /dev/null
+++ b/.werks/15322.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# BGP Peer State Mapping: add connect state
+
+key | value
+---------- | ---
+date | 2024-01-30T13:09:08+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+BGP Peer States can be in "connect". This state was missing in checkmk and thus
+the service was in "UNKNOWN" state.
diff --git a/.werks/15324.md b/.werks/15324.md
new file mode 100644
index 00000000000..6deed429103
--- /dev/null
+++ b/.werks/15324.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# agent_bakery: mk_postgres.py: restore required keys
+
+key | value
+---------- | ---
+date | 2024-02-01T06:19:27+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+[Werk #15645](https://checkmk.com/werk/15645) made the Inputs of "Instance
+settings" of the Agent rule "PostgreSQL database and sessions (Linux, Windows)"
+optional by accident. If you did not specify all keys, baking agents failed with
+a `KeyError` on the automation call.
diff --git a/.werks/15326.md b/.werks/15326.md
new file mode 100644
index 00000000000..66773ef9f18
--- /dev/null
+++ b/.werks/15326.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# oracle_instance: Fix ValidationError
+
+key | value
+---------- | ---
+date | 2024-02-05T07:35:36+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Both oracle instance and oracle uptime services vanished due to too strict
+validation. Crash report showed a `ValidationError`.
diff --git a/.werks/15327.md b/.werks/15327.md
new file mode 100644
index 00000000000..8bdb17075c4
--- /dev/null
+++ b/.werks/15327.md
@@ -0,0 +1,62 @@
+[//]: # (werk v2)
+# mk_oracle: Follow-up to privilege escalation fix
+
+key | value
+---------- | ---
+compatible | no
+version | 2.4.0b1
+date | 2024-04-04T07:59:38+00:00
+level | 2
+class | fix
+component | checks
+edition | cre
+
+You might be affected by this Werk if you use mk_oracle on a unix
+system.
+
+You might be affected by this Werk if you use oracle wallet to connect to your
+database.
+
+You are definitively affected by this Werk if you use oracle wallet to connect to your
+database and used the instructions of our official documentation to setup your
+configuration.
+
+This Werk fixes connection problems introduced with 2.1.0p41, 2.2.0p24 and 2.3.0b4.
+
+
+Since Werk #16232 we switch to a
+unprivileged user when executing oracle binaries. This causes problems when
+using an oracle wallet as the unprivileged user might not be able to access
+files defining the connection details and credentials.
+
+We introduced an additional permission check to the -t
"Just check
+the connection" option of mk_oracle
. It should help you modifying
+the permissions to continue using mk_oracle
with oracle wallet.
+
+You can execute it with the following command:
+
+
+MK_CONFDIR=/etc/check_mk/ MK_VARDIR=/var/lib/check_mk_agent /usr/lib/check_mk_agent/plugins/mk_oracle --no-spool -t
+
+
+The path to mk_oracle might be different if you execute it asynchronously. For a
+60 second interval the path would be /usr/lib/check_mk_agent/plugins/60/mk_oracle
+
+The script will test permissions of the files needed to connect to the database. It boils down to the following:
+
+mk_oracle
will switch to the owner of
+$ORACLE_HOME/bin/sqlplus
before executing sqlplus
. So
+this user has to have the following permissions:
+
+
+- read
$TNS_ADMIN/sqlnet.ora
+- read
$TNS_ADMIN/tnsnames.ora
+- execute the wallet folder (
/etc/check_mk/oracle_wallet
if followed the official documentation)
+- read files inside the wallet folder (
/etc/check_mk/oracle_wallet/*
if followed the official documentation)
+
+
+Beside that we also fixed some bash syntax errors we introduced with
+Werk #16232.
+
+See Troubleshooting mk_oracle for Windows and Linux
+for more information about troubleshooting this problem.
\ No newline at end of file
diff --git a/.werks/15328.md b/.werks/15328.md
new file mode 100644
index 00000000000..1c93ba465bc
--- /dev/null
+++ b/.werks/15328.md
@@ -0,0 +1,41 @@
+[//]: # (werk v2)
+# mk_oracle: Follow-up to privilege escalation fix: sqlnet.ora
+
+key | value
+---------- | ---
+compatible | no
+version | 2.4.0b1
+date | 2024-04-05T09:38:28+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+You are affected by this Werk if you use mk_oracle agent plugin on unix.
+
+mk_oracle only works if it can find a sqlnet.ora in your
+$TNS_ADMIN folder. In the past, mk_oracle executed all oracle
+binaries as root, so sqlnet.ora was alwas readable. With Werk #16232 the oracle binaries are
+executed with a low privileged user, so it might be the case, that
+sqlnet.ora can not be read by this user.
+
+mk_oracle will exit early if it can not read sqlnet.ora. The
+error message might look like:
+
+
+/etc/check_mk/sqlnet.ora can not be read by user "oracle"! Either use 'sqlnet.ora permission group' bakery rule, or directly modify permissions of the file.
+
+
+The error message will also be visible in the oracle_instance check.
+
+If you use the agent bakery to roll out mk_oracle to unix servers using
+.rpm, .deb or Solaris .pkg packages, you have to use
+the 'sqlnet.ora permission group' bakery rule to adapt the group of the
+sqlnet.ora file, otherwise your permission changes might be
+overwritten by updating the agent.
+
+Otherwise it is sufficient to adapt the permissions.
+
+If you install the agent on Unix using the tgz package, you will have
+to manually adjust the permissions of the sqlnet.ora file.
\ No newline at end of file
diff --git a/.werks/15329.md b/.werks/15329.md
new file mode 100644
index 00000000000..6b5b7cb09a6
--- /dev/null
+++ b/.werks/15329.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# mk_oracle: fix two parse errors
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-09T06:01:31+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Due to fixes introduced with
+Werk #16232 new error messages
+have been introduced to sections which previously had not to handle any errors.
+
+Now oracle_processes and oracle_recovery_area services can
+handle the new error messages.
\ No newline at end of file
diff --git a/.werks/15330.md b/.werks/15330.md
new file mode 100644
index 00000000000..ee423483b76
--- /dev/null
+++ b/.werks/15330.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# mk_oracle: report failed login
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-10T08:38:00+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Due to fixes introduced with
+Werk #16234 a failed login to the
+oracle database was not reported as critical, but the services were going
+stale. This is now fixed.
\ No newline at end of file
diff --git a/.werks/15331.md b/.werks/15331.md
new file mode 100644
index 00000000000..711d4fd2b5f
--- /dev/null
+++ b/.werks/15331.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# postgres_stat_database_size: Don't discover 'access_to_shared_objects'
+
+key | value
+---------- | ---
+date | 2024-04-16T07:10:21+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+Checkmk discovered Services like "PostgreSQL DB MAIN/access_to_shared_objects
+Size" but the Services only showed "Database size not available" and a WARN
+status.
+
+Those Services are no longer discovered.
diff --git a/.werks/15332.md b/.werks/15332.md
new file mode 100644
index 00000000000..825fa9839af
--- /dev/null
+++ b/.werks/15332.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Inventory: Add Windows support for Hardware > System > Uuid
+
+key | value
+---------- | ---
+date | 2024-04-16T13:09:47+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | inv
+level | 1
+compatible | yes
+
+This element is already available for Linux, now the windows agent also supports
+reading this value.
+
+You have to update `mk_inventory.vbs` on the monitored host, to provide the
+necessary data.
diff --git a/.werks/15333.md b/.werks/15333.md
new file mode 100644
index 00000000000..dcd3396b1d6
--- /dev/null
+++ b/.werks/15333.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Fix parsing of win_computersystemproduct
+
+key | value
+---------- | ---
+date | 2024-04-29T08:53:34+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | inv
+level | 1
+compatible | yes
+
+On some windows system `Win32_ComputerSystemProduct` seems to return no data.
+This call was introduced with [Werk #15332](https://checkmk.com/werk/15332)
+
+Now the parser of this data (`win_computersystemproduct`) can handle empty data.
+
+This change also reduces the data transported with the
+`win_computersystemproduct` section, as we only use the UUID.
diff --git a/.werks/15514.md b/.werks/15514.md
new file mode 100644
index 00000000000..3d2f4de9bd9
--- /dev/null
+++ b/.werks/15514.md
@@ -0,0 +1,38 @@
+[//]: # (werk v2)
+# check_httpv2: Introduce a reworked way to test web sites
+
+key | value
+---------- | ---
+date | 2024-03-08T10:06:58+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 2
+compatible | no
+
+The legacy http monitoring plugin caused quite some trouble over the last
+years. This included lots of effort to add features or just simply fixing
+bugs.
+
+With the new plugin, the functionality is moved to maintainable and
+extendable code completely under control of Checkmk. This means also
+breaking changes with the old plugin:
+
+* Some metrics are not available anymore as it has been known. We
+ discovered that these are simply not directly understandable. Instead we
+ will add metrics as needed in the future. Some metrics will already be
+ added in this first release
+* Some functionality has been a workaround and is now implemented directly
+ into the new plugin. This makes it hard to migrate rules automatically.
+* Users are now able to decide on their own which functionality should be
+ in an own service. This means, that it is now possible to test the
+ certificate validity and response times in one service, if needed.
+* User are able to configure multiple http checks within one rule. You can
+ provide standard settings to be used for all endpoints, and overwrite
+ them per entry for each endpoint. Migrating manually makes absolute
+ sense here.
+
+Please note that we will not remove the old plugin for now. We understand
+that you need some time to migrate your configurations. Nethertheless, we
+will deprecate the old plugin and eventually remove it from Checkmk.
diff --git a/.werks/15515.md b/.werks/15515.md
new file mode 100644
index 00000000000..f5d87bd6b01
--- /dev/null
+++ b/.werks/15515.md
@@ -0,0 +1,27 @@
+[//]: # (werk v2)
+# check_http: Soft deprecatation of old HTTP monitoring plug-in
+
+key | value
+---------- | ---
+date | 2024-04-03T13:15:48+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 2
+compatible | no
+
+The old plug-in is being deprecated in a soft way with this werk. Unlike
+hard deprecation, the deprecated rule set "Check HTTP service" will remain
+fully functional. However, new rules should only be created if absolutely
+necessary, such as when experiencing issues with the new "Check HTTP web
+service" implementation and needing to roll back to the old one.
+
+Please note that the rule set will be hard deprecated in version 2.4.0,
+meaning that you will no longer be able to create new rules. However, the
+plug-in itself will remain available as this is a component of the
+monitoring-plugins collection that comes with Checkmk.
+
+Please let us know if you find any features that were present in the old
+plug-in but are missing in the new one.
+
diff --git a/.werks/15516.md b/.werks/15516.md
new file mode 100644
index 00000000000..b670f59b19c
--- /dev/null
+++ b/.werks/15516.md
@@ -0,0 +1,33 @@
+[//]: # (werk v2)
+# check_cert: New active check for advanced certificate monitoring
+
+key | value
+---------- | ---
+date | 2024-04-03T13:42:35+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 2
+compatible | yes
+
+The _check_http_ plug-in was previously the only method to monitor
+certificates out-of-the-box with Checkmk. With the new plug-in Checkmk
+provides an extensive functionality to monitor certificates. This includes
+but is not limited to certificates provided by the HTTP protocol.
+
+With the new plug-in you can monitor all certificates provided through
+a TCP connection to encrypt communication. This includes the monitoring
+of
+
+* validity times (max and remaining)
+* issuer fields
+* subject fields
+* encryption algorithm
+* alternative names
+* response times
+* public key algorithm and size
+* serial number
+
+As with the reworked plugin to monitor web services, you are able to
+configure multiple services within a single rule.
diff --git a/.werks/15584 b/.werks/15584
deleted file mode 100644
index f31042c0bed..00000000000
--- a/.werks/15584
+++ /dev/null
@@ -1,13 +0,0 @@
-Title: mk_docker: Added podman support
-Class: feature
-Compatible: compat
-Component: checks
-Date: 1683627763
-Edition: cre
-Knowledge: undoc
-Level: 1
-Version: 2.3.0b1
-
-Added podman support in mk_docker.py.
-This way the plugin ends in case it is being executed on a non docker or podman host.
-
diff --git a/.werks/15619 b/.werks/15619
index ec202865e57..b2e731be379 100644
--- a/.werks/15619
+++ b/.werks/15619
@@ -1,4 +1,4 @@
-Title: mk_postgres.py: Allow Declaring PG_BINARY_NAME in postgres.cfg
+Title: mk_postgres.py: Allow Declaring PG_BINARY_PATH in postgres.cfg
Class: feature
Compatible: compat
Component: checks
diff --git a/.werks/15648 b/.werks/15648
new file mode 100644
index 00000000000..8afecabaafc
--- /dev/null
+++ b/.werks/15648
@@ -0,0 +1,17 @@
+Title: KUBE: Addition of support for Kubernetes version 1.28
+Class: feature
+Compatible: compat
+Component: checks
+Date: 1697615780
+Edition: cre
+Level: 1
+Version: 2.3.0b1
+
+With this release of Checkmk, we introduce support for version 1.28 of Kubernetes. In Checkmk 2.3,
+support for Kubernetes version 1.23 is removed. The supported versions are listed below:
+
+Checkmk 2.2: 1.22, 1.23, 1.24, 1.25, 1.26, 1.27
+Checkmk 2.3: 1.24, 1.25, 1.26, 1.27, 1.28
+
+The list of supported versions may not apply to future patch versions. For such cases, a
+new werk will be released.
diff --git a/.werks/15649 b/.werks/15649
new file mode 100644
index 00000000000..3ea37dbb570
--- /dev/null
+++ b/.werks/15649
@@ -0,0 +1,25 @@
+Title: windows_tasks: Display Correct Scheduled Task State
+Class: fix
+Compatible: incomp
+Component: checks
+Date: 1705315505
+Edition: cre
+Knowledge: doc
+Level: 1
+Version: 2.3.0b1
+
+Previously, the `Scheduled Task State` could be N\A, if the Action of a Windows
+task contained a comma. Moreover, the plugin only supported German or English
+localization settings for Windows. Finally, with German localization settings
+disabled scheduled tasks would be discovered (despite what the checkman page
+claimed). This worked correctly with English localization settings. With this
+Werk, `windows_tasks` uses the new PowerShell API to obtain the data. This
+fixes the aforementioned issues.
+
+Users, whom desire to discover disabled tasks need to configure the new service
+discovery rule `Windows Tasks`.
+
+The changes in this Werk are only applicable, if the underlying Windows version
+supports the PowerShell API. The Windows versions supported by Checkmk 2.2.0 and
+later all support the required the PowerShell functions. Versions such as
+Windows 7 does not support the PowerShell API.
diff --git a/.werks/15650 b/.werks/15650
new file mode 100644
index 00000000000..fe24ff8b0a6
--- /dev/null
+++ b/.werks/15650
@@ -0,0 +1,17 @@
+Title: mk_tsm: Fix export: =: is not an identifier
+Class: fix
+Compatible: compat
+Component: checks
+Date: 1705329209
+Edition: cre
+Knowledge: doc
+Level: 1
+Version: 2.3.0b1
+
+Since Werk 16273 `eval` is no longer used to create the required environment.
+In some setups, the following error occurs.
+```
+export: =: is not an identifier
+```
+With this Werk the error is fixed.
+The error occured, if the ouput of `get_dsmserv_processes` consisted of a single linebreak.
diff --git a/.werks/15694 b/.werks/15694
index 50c4521e90a..838d5db6323 100644
--- a/.werks/15694
+++ b/.werks/15694
@@ -11,5 +11,5 @@ Version: 2.3.0b1
With Checkmk 2.2, announced by Werk #14977, the usage of mod_auth_mellon was deprecated. This release now removes mod_auth_mellon.
If you still have mod_auth_mellon in your apache config, the apache service will not be able to start.
-Errors are logged to var/lib/apache2/error_log.
+Errors are logged to var/log/apache2/error_log.
If you want to continue to use SAML you can do it in the Enterprise Edition via Setup -> Users -> SAML connections.
diff --git a/.werks/15717.md b/.werks/15717.md
new file mode 100644
index 00000000000..afc4cbc8370
--- /dev/null
+++ b/.werks/15717.md
@@ -0,0 +1,13 @@
+[//]: # (werk v2)
+# NagVis: Updated to 1.9.40
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-09T10:43:25+00:00
+level | 1
+class | fix
+component | packages
+edition | cre
+
diff --git a/.werks/15718.md b/.werks/15718.md
new file mode 100644
index 00000000000..48bc0330b0d
--- /dev/null
+++ b/.werks/15718.md
@@ -0,0 +1,30 @@
+[//]: # (werk v2)
+# Improve main menu keyboard navigation
+
+key | value
+---------- | ---
+date | 2024-01-16T11:21:43+00:00
+version | 2.3.0b1
+class | feature
+edition | cre
+component | multisite
+level | 2
+compatible | yes
+
+The main menu of the UI has now more extended support for keyboard navigation.
+
+You can open up the menus with the following key combinations:
+
+* Monitoring: `ALT + m`
+* Setup: `ALT + s`
+* Customize: `ALT + c`
+
+The search field is automatically focussed (as before), you can directly insert
+your search term. By using `TAB` you can navigate the search results and confirm
+your choice with `ENTER`.
+
+Pressing `ESC` for the first time clears the current search term. A second `ESC`
+closes the open menu.
+
+Utilizing the workflow can make navigating to the desired pages in Checkmk much
+faster.
diff --git a/.werks/15719.md b/.werks/15719.md
new file mode 100644
index 00000000000..698552e1a65
--- /dev/null
+++ b/.werks/15719.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# mk_docker: Prevent "Missing monitoring data for plugins" warnings
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-18T06:57:22+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+In case a docker node section can not be computed by the mk_docker agent plugin, the "Docker node
+info" service reports about the error in detail. Until this werk, the "Check_MK" service did also
+raise an issue regarding the missing agent section, like: "Missing monitoring data for plugins:
+docker_node_disk_usage". This duplicate information is now suppressed, reducing the number of
+notification Checkmk produces in such situations.
\ No newline at end of file
diff --git a/.werks/15720.md b/.werks/15720.md
new file mode 100644
index 00000000000..ae87729bef9
--- /dev/null
+++ b/.werks/15720.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# docker_node_info: Display all errors reported by the agent plugin
+
+key | value
+---------- | ---
+date | 2024-01-18T13:15:53+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The "Docker node info" service reports about failures of the mk_docker
+agent plugin. In the past it always only reported one error per check
+cycle, which now changed to displaying all reported issues.
diff --git a/.werks/15721.md b/.werks/15721.md
new file mode 100644
index 00000000000..6e372d21ed0
--- /dev/null
+++ b/.werks/15721.md
@@ -0,0 +1,25 @@
+[//]: # (werk v2)
+# Fix wrong edition reported via livestatus in enterprise sites running the Nagios core
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-19T08:47:31+00:00
+level | 1
+class | fix
+component | core
+edition | cee
+
+This change fixes a bug in the Nagios Core edition detection logic which could occur when using the
+Nagios core with another edition than the Raw Edition. This is a rare use case, but can happen
+temporarily during the migration from the Raw Edition to an Enterprise Edition.
+
+The bug manifests itself by reporting the wrong edition via the livestatus interface. This can be
+observed with the following query:
+
+```
+lq "GET status\nColumns: edition"
+```
+
+The edition column was introduced in 2.2.0.
\ No newline at end of file
diff --git a/.werks/15722.md b/.werks/15722.md
new file mode 100644
index 00000000000..22e335abae0
--- /dev/null
+++ b/.werks/15722.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# omd update: Fix setting world read permissions on etc and local files
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-19T18:46:45+00:00
+level | 1
+class | fix
+component | omd
+edition | cre
+
+Starting with 2.2.0 (see #15062), we use a umask of 0077 to prevent files and directories owned by
+the site user from being world readable. However, when updating from the CRE to a non CRE edition,
+some files and directories were still set to be world readable during 'omd update'. This is now in
+line with the other files.
\ No newline at end of file
diff --git a/.werks/15723.md b/.werks/15723.md
new file mode 100644
index 00000000000..17cb18d7695
--- /dev/null
+++ b/.werks/15723.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Ensure site is stopped when running cmk-update-config
+
+key | value
+---------- | ---
+date | 2024-01-23T11:58:49+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | core
+level | 1
+compatible | yes
+
+The `cmk-update-config` command, which is usually executed as part of `omd
+update` expects the site to be stopped during execution. This is always ensured
+during `omd update`.
+
+However, the command can also be executed manually later on. In this situation
+the site can be running. To prevent unexpected results, the command now protects
+against an accidentally running site on invocation.
diff --git a/.werks/15724.md b/.werks/15724.md
new file mode 100644
index 00000000000..d7b6c06a6c1
--- /dev/null
+++ b/.werks/15724.md
@@ -0,0 +1,39 @@
+[//]: # (werk v2)
+# Change API specification computation
+
+key | value
+---------- | ---
+date | 2024-02-17T13:24:38+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 2
+compatible | yes
+
+The specification of the REST API defines the structure of the API. It is
+computed automatically from the implementation in Checkmk.
+
+Previously the specification was computed during runtime when something
+requested access to the specification. This could be a user opening ReDoc or the
+Swagger UI. The specification was then computed ad-hoc and cached in the memory of the
+apache process. This caused several issues:
+
+* After spawning a new apache, the specification needed to be recomputed for
+ every process. This caused a delay in the first request hitting an
+ apache process asking for it.
+* It was held in memory by every process consuming a few MB.
+* The invalidation of the cache and computation of new specification could not
+ be triggered manually.
+
+With this change the specification is now stored in the site and made available
+to all apache processes from there.
+
+With the dedicated command `cmk-compute-api-spec` the computation can now be
+triggered in specific situations automatically or manually for debugging.
+
+The specification is now updated in these situations:
+
+* post-create hook: Create the initial spec after a site has been created
+* post rename action: Update the spec after a site has been copied, restored or renamed
+* update-config action: Update the spec after the site has been updated
diff --git a/.werks/15725.md b/.werks/15725.md
new file mode 100644
index 00000000000..8b94691c519
--- /dev/null
+++ b/.werks/15725.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# Cleanup old Microcore config during update procedure
+
+key | value
+---------- | ---
+date | 2024-02-27T09:23:01+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | core
+level | 1
+compatible | yes
+
+This change prevents a problem which might occur in case the `omd update` did
+not finish successfully. In this situation, the Microcore might be started with
+a configuration file from the previous version. This could lead to unexpected
+behavior.
+
+Instead of keeping the old configuration, the update procedure now deletes the
+file which makes the Microcore fail during startup with a more helpful error
+message.
diff --git a/.werks/15745.md b/.werks/15745.md
new file mode 100644
index 00000000000..d8931d67f1c
--- /dev/null
+++ b/.werks/15745.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Checkmk now redacts site secrets during support diagnostics generation
+
+key | value
+---------- | ---
+date | 2024-01-18T07:51:01+00:00
+version | 2.3.0b1
+class | feature
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+Prior to this werk, generating a support diagnostic for deployments with distributed monitoring of multiple Checkmk instances would expose site secrets within the "etc/check_mk/multisite.d/sites.mk" file.
+
+All site secrets are now replaced with "redacted" during the generation process of support diagnostics (Setup > Maintenance > Support diagnostics) where Checkmk Configuration files have been selected with at least low sensitivity or the sites.mk file has been selected individually.
diff --git a/.werks/15838.md b/.werks/15838.md
new file mode 100644
index 00000000000..1e7f6a898ef
--- /dev/null
+++ b/.werks/15838.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Limit the service description length to 250
+
+key | value
+---------- | ---
+date | 2024-01-08T11:56:11+00:00
+version | 2.3.0b1
+class | feature
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Since this release WATO prevents creation of a service with
+too long service description.
+
diff --git a/.werks/15841.md b/.werks/15841.md
new file mode 100644
index 00000000000..70810abc745
--- /dev/null
+++ b/.werks/15841.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# The configuration is correctly loaded by RRD helper processes
+
+key | value
+---------- | ---
+date | 2024-03-26T10:03:03+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | core
+level | 2
+compatible | yes
+
+This change ensures the reloading of the configuration by already
+running RRD processes, thereby guaranteeing that those processes are
+using the correct configuration.
+
+SUP-17787
+CMK-16318
diff --git a/.werks/15842.md b/.werks/15842.md
new file mode 100644
index 00000000000..eb0ef97150a
--- /dev/null
+++ b/.werks/15842.md
@@ -0,0 +1,38 @@
+[//]: # (werk v2)
+# Enhanced MS SQL Server monitoring
+
+key | value
+---------- | ---
+date | 2024-04-03T07:47:56+00:00
+version | 2.3.0b4
+class | feature
+edition | cre
+component | checks
+level | 2
+compatible | yes
+
+With this release MS SQL Server is monitored using new plugin and new GUI.
+
+The old plugin is still supported but are considered deprecated.
+
+Key Enhancements out-of-the-box:
+
+- Configuration flexibility: The plugin can be configured through a YAML config file for any edition and/or a graphical user interface (GUI) for enterprise edition or better.
+- Cross platform: The plugin can be deployed on Linux and Windows.
+- Enhanced monitoring capabilities: Supports monitoring of remote databases on both Linux and Windows hosts, in addition to local monitoring on Windows hosts.
+- Customizable monitoring sections: Sections are now selectable and configurable
+- Customizable SQL statements: you may change SQL statement either manually(place file in `mssql` sub directory in config dir) or using `Custom files` rule in GUI.
+- Multi-instance support: Enables the selection of different instances for monitoring. Every instance can be configured separately
+- Multi-host support: possible to monitor databases on various hosts using one deployed plugin.
+- Security enhancements: Limited support for certificates is now available.
+- Asynchronous operation: Any section with exception `instances` can be set up for asynchronous operation.
+- Piggyback: It's possible to direct the output of a plugin to a different host, rather than to the host that retrieves the data.
+- Other improvements:
+ - Automatic detection of instances is possible for any Windows host, local and remote, depending on SQL Server Setup.
+ - Full logging support including rotation and file limits
+ - Limit for maximal connection counts
+ - Cache time and timeout can be configured too
+
+With regard to the old plug-in, there are also a few restrictions at the moment:
+- The database instances must be accessible via TCP/IP.
+- If several databases are running on a system, each using their own IP addresses, these must be explicitly specified in the configuration of the agent plug-in, as the addresses and ports are currently not yet found automatically.
\ No newline at end of file
diff --git a/.werks/15844.md b/.werks/15844.md
new file mode 100644
index 00000000000..f77b239830f
--- /dev/null
+++ b/.werks/15844.md
@@ -0,0 +1,25 @@
+[//]: # (werk v2)
+# Microsoft SQL Server (Windows) ruleset is deprecated
+
+key | value
+---------- | ---
+date | 2024-04-17T13:40:06+00:00
+version | 2.3.0b6
+class | feature
+edition | cre
+component | checks
+level | 2
+compatible | no
+
+We've introduced a new `Microsoft SQL Server (Linux, Windows)` plug-in for MS SQL
+database monitoring, see
+[werk 15842: Enhanced MS SQL Server monitoring](https://checkmk.com/werk/15842).
+The new plugin extends the functionality of `Microsoft SQL Server (Windows)`
+by adding more options and features.
+
+We recommend that you upgrade to the `Microsoft SQL Server (Linux, Windows)` plug-in to monitor MS SQL databases. This new agent plugin can be deployed
+alongside the Checkmk agent on your database systems, just like the previous
+plugin. You can also use this plugin on any Windows or Linux
+server to monitor remote MSSQL servers over the network.
+
+The previous `Microsoft SQL Server (Windows)` rule set is deprecated and renamed to `Microsoft SQL Server (deprecated)`. Please note that you may need to adjust settings on your databases or continue running the old plug-in for the time being, as the agent plug-in cannot connect to local database instances that are not available over a TCP/IP connection.
\ No newline at end of file
diff --git a/.werks/15845.md b/.werks/15845.md
new file mode 100644
index 00000000000..9a54da0ebe6
--- /dev/null
+++ b/.werks/15845.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# winperf_if check correctly discovers and processes interfaces with spaces
+
+key | value
+---------- | ---
+date | 2024-04-22T15:31:08+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | checks
+level | 1
+compatible | no
+
+Previously, some Windows network interface names with additional spaces —
+particularly before the hash symbol — were not recognized by the check engine.
+This led to the absence of some services, especially Windows teaming network
+interfaces.
+
+In this release, such interface names are handled correctly thus fixing the
+problem. Still, you may need to rediscover services again.
diff --git a/.werks/15976 b/.werks/15976
new file mode 100644
index 00000000000..23fbe597802
--- /dev/null
+++ b/.werks/15976
@@ -0,0 +1,18 @@
+Title: mssql_backup: Correct timezone difference for last backup date
+Class: fix
+Compatible: compat
+Component: checks
+Date: 1696949130
+Edition: cre
+Knowledge: doc
+Level: 1
+Version: 2.4.0b1
+
+This werk is relevant for users monitoring the age of the last backup time of mssql databases in different timezones.
+
+The date/time of the last backup of a mssql database is currently stored in local host time without the information about the host timezone. When this time is used to check the age of the last backup, it is interpreted in the Checkmk server timezone.
+When using different timezones, this leads to incorrect values for "Age of last database backup" and if the age is negative, in newer Checkmk versions to the warning "Cannot reasonably calculate time since last backup (hosts time running ahead)".
+
+The mssql agent plugin will now store the time in UTC and the mssql_backup check will interpret the time accordingly.
+
+You will need to update the agent plugin mssql.vbs to receive the corrected times.
diff --git a/.werks/16012 b/.werks/16012
new file mode 100644
index 00000000000..6950cc7dea6
--- /dev/null
+++ b/.werks/16012
@@ -0,0 +1,9 @@
+Title: Event Console: Fix events on central site if these events are dedicated for remote sites
+Class: fix
+Compatible: compat
+Component: ec
+Date: 1702905058
+Edition: cre
+Level: 1
+Version: 2.3.0b1
+
diff --git a/.werks/16013.md b/.werks/16013.md
new file mode 100644
index 00000000000..ba7e9e7650b
--- /dev/null
+++ b/.werks/16013.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Fix missing event console rules after site update
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-17T09:18:22+00:00
+level | 1
+class | fix
+component | ec
+edition | cee
+
+With werk 16012 the event console rules are filtered ond saved to the location
+var/mkeventd/active_config during activate changes.
+This werk fixes the missing procedure while updating to a new Checkmk version.
\ No newline at end of file
diff --git a/.werks/16015.md b/.werks/16015.md
new file mode 100644
index 00000000000..ab6cb4e469a
--- /dev/null
+++ b/.werks/16015.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# ldap & saml: resolve error when connection config is edited or created
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2023-07-28T08:14:29+00:00
+level | 1
+class | fix
+component | wato
+edition | cme
+
+Prior to this werk, Checkmk raised an error in the following cases:
+
+* when the user attempted to create a LDAP connection with a config with the customer option set to "Global"
+* when the user attempted to change a LDAP connection config with the customer option set to "Global"
+* when the user attempted to create a SAML connection config
+* when the user attempted to delete an existing SAML connection config
+
+This werk resolves these issues and Checkmk will not throw an error anymore.
\ No newline at end of file
diff --git a/.werks/16025 b/.werks/16025
new file mode 100644
index 00000000000..ab3ce4cda5f
--- /dev/null
+++ b/.werks/16025
@@ -0,0 +1,32 @@
+Title: Update PHP version in SLES15SP3 from 7 to 8
+Class: fix
+Compatible: incomp
+Component: rpm
+Date: 1701254497
+Edition: cre
+Level: 2
+Version: 2.3.0b1
+
+Checkmk was shipped with a dependency to PHP7 for SLES15SP3. Since PHP7 is
+part of the legacy module, this Werk updates the dependency from PHP7 to PHP8.
+
+As SLES only allows one version of PHP to be installed, the following steps
+will uninstall PHP7 from the system and install the new version of Checkmk
+with PHP8. Be aware that this procedure updates PHP from version 7 to 8 for the whole OS. In case you run additional PHP applications next to Checkmk, the update will also affect them.
+
+Run the following commands to perform the update to the new Checkmk version:
+
+* add SLES-15SP4 repo to get PHP8 with zypper addrepo https://updates.suse.com/SUSE/Products/SLE-BCI/15-SP4/x86_64/product/ sles15sp4
+* install the new Checkmk version with zypper install NEW_CHECKMK.rpm
+* Zypper will now complain about a conflict with several PHP packages and asks you to select a solution. There, select solution 1 to confirm the deinstallation of the current Checkmk version, the PHP7 modules and to continue with the installation
+* confirm the installation of the new Checkmk version and PHP8 with yes
+* removing the existing Checkmk version will throw an error like `Site is still using this version! Removal of (@System) failed:`, proceed by choosing ignore which creates a inconsistent state for the old Checkmk version package, which we will resolve in a later step.
+* PHP7 will be removed and PHP8 gets installed
+* change to the site user with omd su SITE_NAME
+* stop the site with omd stop
+* perform the update to the new Checkmk version with omd update, select Update at the user prompt
+* in case further prompts regarding wrong permissions of BUILD files appear, choose the default value with d
+* start the site again with omd start
+* exit from the site user
+* list all installed Checkmk version with omd versions
+* finally remove the old Checkmk installation with zypper remove OLD_CHECKMK
diff --git a/.werks/16037.md b/.werks/16037.md
new file mode 100644
index 00000000000..9be28ed75b3
--- /dev/null
+++ b/.werks/16037.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# folder_config/host_config: No longer accept non-existent site
+
+
+key | value
+---------- | ---
+date | 2023-12-22T08:11:28+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+You can no longer set a non-existent site on folders and hosts.
+
+When called with a non-existent site as an attribute the
+endpoints now return the status code 400.
diff --git a/.werks/16038.md b/.werks/16038.md
new file mode 100644
index 00000000000..c7d3fef5eb5
--- /dev/null
+++ b/.werks/16038.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# SLA tooltip missing UNKN state
+
+key | value
+---------- | ---
+date | 2024-01-04T08:28:52+00:00
+version | 2.3.0b1
+class | fix
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+
+The SLA tooltip would always show UNKN (0%) even if the state was UNKN.
+The corresponding value was aggregated as a PEND state.
+
+Now the UNKN state is displayed correctly.
diff --git a/.werks/16039.md b/.werks/16039.md
new file mode 100644
index 00000000000..af57e6c84d2
--- /dev/null
+++ b/.werks/16039.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# host_config: add inherited labels to effective attributes
+
+key | value
+---------- | ---
+date | 2024-01-26T11:20:55+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+Previously, when using the effective attributes query parameter with the
+```
+GET /objects/host_config/{host_name}
+```
+endpoint, only the labels of the current host have been returned.
+
+Now all of the effective labels are aggregated and returned.
diff --git a/.werks/16040.md b/.werks/16040.md
new file mode 100644
index 00000000000..4a56e7a5d22
--- /dev/null
+++ b/.werks/16040.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Add Top list dashlet
+
+key | value
+---------- | ---
+date | 2024-01-31T07:14:24+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+The Top list dashlet displays the top (or bottom) X values of a selected metric.
+
+The number of values can be selected, but is limited to 50 values.
diff --git a/.werks/16047.md b/.werks/16047.md
new file mode 100644
index 00000000000..717a881cf6c
--- /dev/null
+++ b/.werks/16047.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Changed host labels for Azure Resource Groups and Virtual Machine
+
+key | value
+---------- | ---
+date | 2024-01-26T12:20:23+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This werk affects users who use the tags of Azure Resource Groups or Virtual Machines (if monitored as hosts) as host labels in Checkmk.
+The host labels are renamed from the original Azure format "{key}":"{value}" to "cmk/azure/tag/{key}":"{value}" in order to stay consistent with other host labels and to avoid overwriting existing Checkmk labels.
+Additionally, label keys and values are now validated to 1) not hold empty values - empty values are replaced by "true" and 2) not contain colons (":") - colons are replaced by underscores ("_"). So a former label "label:key":"" will now be shown as "label_key":"true".
+The old labels will be removed in version 2.4.0.
diff --git a/.werks/16048.md b/.werks/16048.md
new file mode 100644
index 00000000000..f8e3456fd86
--- /dev/null
+++ b/.werks/16048.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Import Azure tags as service labels
+
+key | value
+---------- | ---
+date | 2024-01-29T07:26:41+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Azure resource tags are now imported as service labels as well. Each service that is discovered for an Azure resource now holds the resource's tags as service labels.
+Also, you can now restrict the import of Azure tags via regex or disable the import altogether. This can be done in the rule "Microsoft Azure" via option "Import tags as host/service labels".
+Note that tags are validated to 1) not hold empty values - empty values are replaced by "true" and 2) not contain colons (":") - colons are replaced by underscores ("_"). So an Azure resource tag "tag:key":"" will now be shown as "tag_key":"true".
diff --git a/.werks/16049.md b/.werks/16049.md
new file mode 100644
index 00000000000..c4d8543ffe0
--- /dev/null
+++ b/.werks/16049.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# Render service graphs of host independent of historic metrics
+
+key | value
+---------- | ---
+date | 2024-02-19T07:41:52+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | yes
+
+The view "Service graphs of host" used to show the error message "No historic metrics recorded but performance data is available. Maybe performance data processing is disabled." even if the underlying single service graphs existed.
+This occured especially when using a host check command.
+
+The behavior is fixed to always rendering the service graphs in the view "Service graphs of host", if their performance data is available.
+
+Note that this change does not affect the same error message shown for the "Host graph" row of the "Status of Host" view, as the available performance data belongs to the services and not to the host.
+
+
diff --git a/.werks/16050.md b/.werks/16050.md
new file mode 100644
index 00000000000..ac86cfbfacc
--- /dev/null
+++ b/.werks/16050.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Change default OS dashboards "Linux hosts" and "Windows hosts"
+
+key | value
+---------- | ---
+date | 2024-03-12T07:58:50+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+We change the default dashboards "Linux hosts" and "Windows hosts" (Monitor > Overview) to a new layout and partly to new dashboard elements.
+This change shall improve user experience and provide the most relevant Linux/Windows host information on these boards.
diff --git a/.werks/16067 b/.werks/16067
index b730256a6cf..f22567153a0 100644
--- a/.werks/16067
+++ b/.werks/16067
@@ -6,9 +6,8 @@ Date: 1698237843
Edition: cre
Knowledge: doc
Level: 1
-Version: 2.3.0i1
+Version: 2.3.0b1
Werk #15393 already solved this for most users but in rare cases, the
formulars were still so big that an "Internal server error" occurred on saving.
This change should fix that behaviour even for such cases.
-
diff --git a/.werks/16075 b/.werks/16075
new file mode 100644
index 00000000000..d31bbb62297
--- /dev/null
+++ b/.werks/16075
@@ -0,0 +1,14 @@
+Title: Notification spooler: Fix possible wrong order of notification processing
+Class: fix
+Compatible: compat
+Component: notifications
+Date: 1700481159
+Edition: cee
+Level: 1
+Version: 2.3.0b1
+
+The notification spooler used the mtime of the spool files to determine the
+order of execution.
+
+In rare cases, the mtime was too imprecise so we now use the mtime in
+nanoseconds.
diff --git a/.werks/16084.md b/.werks/16084.md
new file mode 100644
index 00000000000..5ae17b6ed27
--- /dev/null
+++ b/.werks/16084.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Show full agent hash in Check_MK Agent service and Agent update status
+
+key | value
+---------- | ---
+date | 2024-02-15T08:39:14+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | agents
+level | 1
+compatible | yes
+
+As an addition to Werk #15424, the *Check_MK Agent* service and the
+*Agent update status* page now also show the full 16-digit agent hash.
diff --git a/.werks/16085.md b/.werks/16085.md
new file mode 100644
index 00000000000..e6a87a5fde8
--- /dev/null
+++ b/.werks/16085.md
@@ -0,0 +1,24 @@
+[//]: # (werk v2)
+# Agent Updater: Better detection of underlying platform
+
+key | value
+---------- | ---
+date | 2024-02-21T10:36:36+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | agents
+level | 1
+compatible | yes
+
+Previously, the agent updater used to detect the underlying platform,
+in terms of the combination of OS and package manager (e.g., Linux + RPM),
+automatically by scanning for certrain directories.
+
+As multiple package managers may be available on one system, this sometimes
+lead to unexpected behavior on agent updates when the update mechanism decided
+to switch to a different platform.
+
+Now, the platform will be specified by a file that comes with the agent installation,
+and the agent updater will rely on this static information instead of the dynamic detection.
+
diff --git a/.werks/16088.md b/.werks/16088.md
new file mode 100644
index 00000000000..640098c33db
--- /dev/null
+++ b/.werks/16088.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Support Diagnostics: Also add the latest Crash Dumps to the SD Dump
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2023-09-27T16:54:11+00:00
+level | 1
+class | feature
+component | setup
+edition | cre
+
+For each category of crash dumps (base, check, ec, gui, rest_api, section), add the latest dump
+to the Support Diagnostics dump.
\ No newline at end of file
diff --git a/.werks/16114 b/.werks/16114
index 2c753c5b54e..894ab178adc 100644
--- a/.werks/16114
+++ b/.werks/16114
@@ -8,5 +8,9 @@ Knowledge: doc
Level: 1
Version: 2.3.0b1
-Previously, the folder name pattern for GET and DELETE endpoints were stricter than the CREATE, provoking that a new folder could not be retrieved nor deleted. This fix widens the folder name pattern on GET and DELETE endpoints to align with the CREATE one.
+Prior to this Werk, the folder name pattern for GET and DELETE endpoints did not allow the use of unicode characters while they were supported by the CREATE endpoint, with the result that folders created with such characters could not be accessed or deleted from the REST API.
+
+For example, the user was able to create a folder named û亿Ï8Ĺ, which then could not be read or deleted from the API.
+
+This Werk widens the folder name pattern on GET and DELETE endpoints to align with the CREATE one and now all of them support unicode characters.
diff --git a/.werks/16116.md b/.werks/16116.md
new file mode 100644
index 00000000000..cd35a80687d
--- /dev/null
+++ b/.werks/16116.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Fixed association of contacts with hosts/services/contactgroups
+
+key | value
+---------- | ---
+date | 2024-04-05T13:48:37+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | livestatus
+level | 3
+compatible | yes
+
+Checkmk 2.3 beta introduced a regression regarding contacts when
+then Nagios core was used: The association of contacts with hosts,
+services and contact groups was incorrect. A symptom of this bug
+were e.g. missing hosts or services in the GUI.
diff --git a/.werks/16147.md b/.werks/16147.md
new file mode 100644
index 00000000000..9140cf20590
--- /dev/null
+++ b/.werks/16147.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Abort CMC on irrecoverable filesystem errors
+
+key | value
+---------- | ---
+date | 2024-01-03T15:27:33+00:00
+version | 2.3.0b1
+class | fix
+edition | cee
+component | cmc
+level | 1
+compatible | yes
+
+The errors
+
+ * too many files open (EMFILE)
+ * too many files open in system (ENFILE)
+ * no buffer space (ENOBUFS)
+ * not enough memory (ENOMEM)
+
+now exit the core. Correct monitoring cannot be
+guaranteed when the server is in this state.
diff --git a/.werks/16148.md b/.werks/16148.md
new file mode 100644
index 00000000000..86539d0401c
--- /dev/null
+++ b/.werks/16148.md
@@ -0,0 +1,28 @@
+[//]: # (werk v2)
+# New livestatus column performance_data
+
+key | value
+---------- | ---
+date | 2024-01-12T06:35:01+00:00
+version | 2.3.0b1
+class | feature
+edition | cre
+component | livestatus
+level | 1
+compatible | yes
+
+The hosts and services tables have a new column named
+`performance_data` that returns a mapping where the keys
+are the names of metrics and values are the numeric values
+of the performance data.
+
+For example,
+```
+OMD[heute]:~$ lq << EOF
+> GET services
+> Columns: description performance_data
+> Filter: description = CPU load
+> OutputFormat: python
+> EOF
+[["CPU load",{"load5":0.64,"load1":0.62,"load15":1.13}]]
+```
diff --git a/.werks/16149.md b/.werks/16149.md
new file mode 100644
index 00000000000..8c6f6fbe8c2
--- /dev/null
+++ b/.werks/16149.md
@@ -0,0 +1,56 @@
+[//]: # (werk v2)
+# New OrderBy header in livestatus
+
+key | value
+---------- | ---
+date | 2024-01-29T10:12:24+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | livestatus
+level | 1
+compatible | yes
+
+We provide a new header for livestatus that returns the rows in
+sorted order.
+
+For example, to sort host names in descending order
+```
+$ lq 'GET hosts\nColumns: name\nOrderBy: name desc'
+zhost
+yhost
+xhost
+...
+```
+in ascending order
+```
+$ lq 'GET hosts\nColumns: name\nOrderBy: name asc'
+ahost
+bhost
+...
+```
+or, alternatively without `asc`,
+```
+$ lq 'GET hosts\nColumns: name\nOrderBy: name'
+ahost
+bhost
+...
+```
+
+The `OrderBy` header can be combined with the `Limit` header to
+limit the number of results as expected.
+```
+$ lq 'GET hosts\nColumns: name\nOrderBy: name\nLimit: 1'
+ahost
+```
+
+Furthermore, it is possible to sort on dictionary keys with the
+following syntax
+```
+$ lq << EOF
+GET services
+Columns: host_name description performance_data
+OrderBy: performance_data.user_time
+EOF
+...
+```
diff --git a/.werks/16163 b/.werks/16163
new file mode 100644
index 00000000000..694602f8ab4
--- /dev/null
+++ b/.werks/16163
@@ -0,0 +1,46 @@
+Title: jar_signature: Prevent privilege escalation to root
+Class: security
+Compatible: incomp
+Component: checks
+Date: 1702395666
+Edition: cre
+Level: 3
+Version: 2.3.0b1
+
+jar_signature agent plugin (configured by the 'Signatures of certificates in JAR files' bakery rule)
+was vulnerable to privilege escalation to root by the oracle user.
+
+A malicious oracle user could replace the jarsigner binary with another script and put
+it in the JAVA_HOME directory. The script would be executed by the root user.
+
+The jarsigner is now executed by the oracle user, preventing the privilege escalation.
+
+This werk is incompatible for users that use the jar_signature plugin. Too avoid risk, users
+should deploy the new version of the plugin or disable it.
+
+This issue was found during internal review.
+
+
+### Affected Versions
+
+ * 2.2.0
+ * 2.1.0
+ * 2.0.0 (EOL) and older
+
+
+### Mitigations
+
+If updating is not possible, disable the jar_signature plugin.
+
+
+### Vulnerability Management
+
+We have rated the issue with a CVSS score of 8.8 (High) with the following CVSS vector:
+`CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H`
+
+We have assigned `CVE-2023-6740`.
+
+
+### Changes
+
+The jarsigner binary is now executed by the oracle user.
\ No newline at end of file
diff --git a/.werks/16164.md b/.werks/16164.md
new file mode 100644
index 00000000000..a43be5f3f02
--- /dev/null
+++ b/.werks/16164.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# veeam_cdp_jobs: Handle last sync time from the future
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2023-12-29T08:24:59+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Previously, the veeam_cdp_jobs would crash when receiving last
+sync time from the future with a message:
+```
+raise ValueError("Cannot render negative timespan")
+```
+
+Now, the affected service will be in state WARN and report the following message:
+```
+"The timestamp of the file is in the future. Please investigate your host times"
+```
\ No newline at end of file
diff --git a/.werks/16165.md b/.werks/16165.md
new file mode 100644
index 00000000000..a6af7ab0e8a
--- /dev/null
+++ b/.werks/16165.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# check_mk_agent: Set LC_ALL before running the agent
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-02T10:09:48+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Previously, Checkmk agents would be run with a preset LC_ALL
+environment variable if neither C.UTF-8 or C.utf-8 locales were
+installed.
+
+That led to invalid agent output and crashes in section parsing
+in multiple checks for some of the locales.
+
+Linux, AIX, Solaris, FreeBSD and OpenWrt agents were affected.
+
+Now, LC_ALL variable is set to C for the described case.
\ No newline at end of file
diff --git a/.werks/16166.md b/.werks/16166.md
new file mode 100644
index 00000000000..6676bcc7579
--- /dev/null
+++ b/.werks/16166.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# ibm storwize: Fix missing data when monitoring nodes
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-03T09:05:20+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+lsnodestats command was used for monitoring nodes in IBM Storwize devices.
+Storwize devices don't have the lsnodestats command which led to missing data
+in ibm_svc_nodestats services.
+
+Now, the IBM SVC agent uses the lsnodestats if it's available and lsnodecanisterstats
+otherwise.
\ No newline at end of file
diff --git a/.werks/16167.md b/.werks/16167.md
new file mode 100644
index 00000000000..e86b842bc10
--- /dev/null
+++ b/.werks/16167.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# pandacom_psu: New PSU types added
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-04T13:07:52+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Pandacom devices with PSU types 65025-65033 caused a crash during
+section parsing.
+
+Now, Checkmk recognizes the new type codes and reports the correct
+PSU type.
\ No newline at end of file
diff --git a/.werks/16168.md b/.werks/16168.md
new file mode 100644
index 00000000000..e14d2994e87
--- /dev/null
+++ b/.werks/16168.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# fortiauthenticator_auth_fail: Detect new FortiAuthenticator devices
+
+key | value
+---------- | ---
+date | 2024-01-05T13:30:46+00:00
+version | 2.3.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Additional FortiAuthenticator devices with SysObjectID starting with
+".1.3.6.1.4.1.12356.113" are now detected.
diff --git a/.werks/16169.md b/.werks/16169.md
new file mode 100644
index 00000000000..12f0862128d
--- /dev/null
+++ b/.werks/16169.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# opsgenie: Fix notification acknowledgement if host or service are back to OK
+
+key | value
+---------- | ---
+date | 2024-01-05T14:04:50+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | notifications
+level | 1
+compatible | yes
+
+Previously, Opsgenie notification wouldn't acknowledge notifications if
+host or service state went back to OK in the meantime.
diff --git a/.werks/16170.md b/.werks/16170.md
new file mode 100644
index 00000000000..cbb70ec2884
--- /dev/null
+++ b/.werks/16170.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# snmp: Store OID cache per context group
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-07T09:54:40+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+SNMP caching didn't take contexts in consideration when storing
+OID data. This led to the same result being reported for the OID
+in different sections even if sections use different contexts.
+
+Now, SNMP caching stores fetched OID data for every group of contexts
+it was called with.
\ No newline at end of file
diff --git a/.werks/16171.md b/.werks/16171.md
new file mode 100644
index 00000000000..05377c96bae
--- /dev/null
+++ b/.werks/16171.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# aws: Fix Cloudwatch alarms fetching
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-21T13:16:55+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Cloudwatch alarms weren't fetched properly for environments with a lot
+of alarms. It resulted in missing alarms in the 'AWS/CloudWatch Alarms' service.
\ No newline at end of file
diff --git a/.werks/16172.md b/.werks/16172.md
new file mode 100644
index 00000000000..e3d9c3a492a
--- /dev/null
+++ b/.werks/16172.md
@@ -0,0 +1,31 @@
+[//]: # (werk v2)
+# kaspersky_av: Don't run kav4fs-control or kesl-control if they aren't owned by root
+
+key | value
+---------- | ---
+date | 2024-02-27T09:14:50+00:00
+version | 2.4.0b1
+class | security
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Kaspersky Anti-Virus plugin uses /opt/kaspersky/kav4fs/bin/kav4fs-control and
+/opt/kaspersky/kesl/bin/kesl-control commands to monitor a Kaspersky Anti-Virus
+installation.
+
+To prevent privilege escalation, the plugin (which is run by root user) must
+not run executables which can be changed by less privileged users.
+
+In the default installation, kav4fs-control and kesl-control commands are owned
+by root and root is the only user with write permissions, which prevents privilege
+escalation attacks.
+
+With this Werk, the plugin checks if control commands are owned by root and root
+is the only user with write permissions before running the command. If that's not
+the case the commands won't be run. This prevents privilege escalation attacks if
+the permissions of the control commands have been changed.
+
+We rate this with a CVSS of 0 (None) (CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:N).
+This CVSS is primarily meant to please automatic scanners.
diff --git a/.werks/16173.md b/.werks/16173.md
new file mode 100644
index 00000000000..8b053277aac
--- /dev/null
+++ b/.werks/16173.md
@@ -0,0 +1,31 @@
+[//]: # (werk v2)
+# symantec_av: Don't run sav command if it isn't owned by root
+
+key | value
+---------- | ---
+date | 2024-02-28T08:58:09+00:00
+version | 2.4.0b1
+class | security
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Symantec Anti Virus plugin uses /opt/Symantec/symantec_antivirus/sav command
+to monitor a Symantec Anti Virus installation.
+
+To prevent privilege escalation, the plugin (which is run by root user) must
+not run executables which can be changed by less privileged users.
+
+In the default installation, sav command is owned by root and root is the only
+user with write permissions, which prevents privilege escalation attacks.
+
+With this Werk, the plugin checks if sav command is owned by root and root
+is the only user with write permissions before running the command. If that's not
+the case the command won't be run. This prevents privilege escalation attacks if
+the permissions of the sav command have been changed.
+
+We rate this with a CVSS of 0 (None) (CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:N).
+This CVSS is primarily meant to please automatic scanners.
+
+CMK-15318
diff --git a/.werks/16174.md b/.werks/16174.md
new file mode 100644
index 00000000000..69d9977fddb
--- /dev/null
+++ b/.werks/16174.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# mongodb_replica_set: Fix replication lag and last replication time
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-07T09:48:38+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Checkmk previously assumed that timestamps collected from MongoDB oplog
+are provided in ms. This wasn't the case, which led to wrong values for
+replication lag and last replication time being shown in
+the 'MongoDB Replication Lag' service.
\ No newline at end of file
diff --git a/.werks/16175.md b/.werks/16175.md
new file mode 100644
index 00000000000..ce3c99bb690
--- /dev/null
+++ b/.werks/16175.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# apc_netbotz_sensors, apc_netbotz_other_sensors: Add monitoring for Netbotz 50 devices
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-08T10:39:25+00:00
+level | 1
+class | feature
+component | checks
+edition | cre
+
+apc_netbotz_sensors and apc_netbotz_other_sensors checks provided monitoring for APC Netbotz v2 devices.
+Now, the checks can additionally monitor APC Netbotz 50 devices.
\ No newline at end of file
diff --git a/.werks/16176.md b/.werks/16176.md
new file mode 100644
index 00000000000..4dec6b7a14c
--- /dev/null
+++ b/.werks/16176.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# postfix: Fix Postfix status monitoring for agents run in Docker
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-13T09:57:01+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Previously, Checkmk agent used the data from /proc to determine if Postfix instance is running.
+Since docker containers don't have permissions to read /proc, the agent always reported
+the Postfix instance as 'not running'.
+
+This resulted in CRIT 'Postfix status' service even if Postfix instance was running correctly.
\ No newline at end of file
diff --git a/.werks/16177.md b/.werks/16177.md
new file mode 100644
index 00000000000..f891c77b4cc
--- /dev/null
+++ b/.werks/16177.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# bi_aggregation: Better visualization of aggregation errors
+
+key | value
+---------- | ---
+date | 2024-03-15T15:27:21+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Aggregation errors are shown in the details of 'Aggr' services.
+Previously, all aggregation errors were shown in the same tree
+independent of whether they influence the state of the service or not,
+which lead to confusion.
+Now, two trees are shown, 'Aggregation problems affecting the state' and
+'Aggregation problems not affecting the state'.
diff --git a/.werks/16178.md b/.werks/16178.md
new file mode 100644
index 00000000000..1dab1cfc43e
--- /dev/null
+++ b/.werks/16178.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# dns: Reintroduce macro replacement in 'Expected DNS answers' config
+
+key | value
+---------- | ---
+date | 2024-03-20T08:49:31+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+With version 2.3.0b1, the macros in the 'Expected DNS answers' field of 'Check DNS service'
+were no longer replaced. This change reintroduces macro replacement.
diff --git a/.werks/16179.md b/.werks/16179.md
new file mode 100644
index 00000000000..494fdf03bda
--- /dev/null
+++ b/.werks/16179.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# size_trend: Treat negative free space as 0 in all cases
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-20T10:04:09+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Before the version 2.2.0p21, 'Time left until full' was reported to be 0 in case of
+devices reporting negative free space.
+With werk 16330, we stopped reporting the metric in case of very small size changes
+because it lead to infinite values. With this change the behavior was unintentionally
+also changed for negative free space values.
+This werk restores the same functionality in case of negative free space.
\ No newline at end of file
diff --git a/.werks/16180.md b/.werks/16180.md
new file mode 100644
index 00000000000..83fe5080350
--- /dev/null
+++ b/.werks/16180.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Ruleset API: Datamodel changes for Proxy FormSpec
+
+key | value
+---------- | ---
+date | 2024-03-28T13:45:23+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This only affects plugin developers using the new API `cmk.rulesets.v1`.
+The datamodel for the `Proxy`, `Levels`, `TimePeriod` and `Password` Formspecs is changed.
+Use the `migrate_to_...` migration function to update your stored configurations to the newer datamodel.
diff --git a/.werks/16194.md b/.werks/16194.md
new file mode 100644
index 00000000000..caf32f50d20
--- /dev/null
+++ b/.werks/16194.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Licensing: Allow UI to be used in trial and free state when CMC is not running
+
+key | value
+---------- | ---
+date | 2024-01-08T12:25:13+00:00
+version | 2.3.0b1
+class | fix
+edition | cce
+component | wato
+level | 1
+compatible | yes
+
+When using a CCE in the trial phase or in the free license state, the UI was mostly unusable when the CMC was not running (with the pages showing the error "Cannot connect to 'unix:/omd/sites/monitoring_eval/tmp/run/live'....")
+
+Since the CMC is prohibited from starting if too many services are being monitored in the free license state, this meant that in order to get out of the free state, the license could only be applied via REST-API.
+
+This has now been fixed.
diff --git a/.werks/16195.md b/.werks/16195.md
new file mode 100644
index 00000000000..84fd75c794d
--- /dev/null
+++ b/.werks/16195.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Licensing: Improve process of applying a license for non-running CMC
+
+key | value
+---------- | ---
+date | 2024-01-11T13:14:15+00:00
+version | 2.3.0b1
+class | fix
+edition | cce
+component | wato
+level | 1
+compatible | yes
+
+In werk #16194 an issue was fixed where the UI was not reachable to apply a license when the CMC is not running.
+However, if the core was not running due to a license issue, a new core configuration would have to be generated in order to restart the core.
+This has been improved so that the core can now be started without further interaction.
diff --git a/.werks/16197.md b/.werks/16197.md
new file mode 100644
index 00000000000..8487a488770
--- /dev/null
+++ b/.werks/16197.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# quantum_libsmall_*: Improve SNMP detection
+
+key | value
+---------- | ---
+date | 2024-02-27T12:11:36+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Currently the SNMP detection for `quantum_libsmall_status` and `quantum_libsmall_door` checks if "linux" and "library" are contained in the sysDescr and sysLocation OIDs. To make the detection more reliable, the sysObjectID is checked against the linux object identifier and the libraryProductName .1.3.6.1.4.1.3697.1.10.10.1.10.0 against "Quantum Small Library Product".
diff --git a/.werks/16198.md b/.werks/16198.md
new file mode 100644
index 00000000000..9f6fb231d62
--- /dev/null
+++ b/.werks/16198.md
@@ -0,0 +1,33 @@
+[//]: # (werk v2)
+# mk_informix: Do not allow privilege escalation
+
+key | value
+---------- | ---
+date | 2024-03-08T14:57:50+00:00
+version | 2.4.0b1
+class | security
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The informix database monitoring plugin would previously `eval` statements parsed from `$INFORMIXDIR/bin/onstat`. Since the plugin is usually run as root, this could cause statements injected in `$INFORMIXDIR/bin/onstat` to be run as root as well.
+By adding scripts named the same as other functionality found in `$PATH` to `$INFORMIXDIR/bin`, `$PATH` functionality could also be overshadowed and the custom executed as root.
+Finally, `$INFORMIXDIR/bin/onstat` would be executed as root, allowing a substituted script to be run with elevated privileges.
+
+With this werk, the environment variables will be exported instead and `$PATH` will now be searched before `$INFORMIXDIR/bin`.
+The plugin will now also check if `$INFORMIXDIR/bin/onstat` belongs to root if the plugin is executed as root. If not, it will be executed as the user owning the executable.
+
+
+This issue was found during internal review.
+
+*Affected Versions*:
+
+* 2.3.0 (beta)
+* 2.2.0
+* 2.1.0
+* 2.0.0 (EOL)
+
+*Vulnerability Management*:
+
+We have rated the issue with a CVSS Score of 8.8 (High) with the following CVSS vector: `CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H` and assigned CVE `CVE-2024-28824`.
diff --git a/.werks/16210.md b/.werks/16210.md
new file mode 100644
index 00000000000..ee4c197232f
--- /dev/null
+++ b/.werks/16210.md
@@ -0,0 +1,24 @@
+[//]: # (werk v2)
+# folder_config: Prevent unintentional deletion of a non-empty folder
+
+key | value
+---------- | ---
+date | 2024-01-11T09:34:55+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+Prior to this Werk, the REST API could delete non-empty folders without any further check. With this Werk, the endpoint now accepts the **delete_method** query parameter. The possible values are:
+- **recursive**: Deletes the folder and all the elements it contains.
+- **abort_on_nonempty**: Deletes the folder only if it is not empty
+
+If no delete_method is provided, **recursive** is asumed and the behaviour is the same as before this Werk.
+
+The enpoint will return a 409 status code when trying to delete a folder that contains hosts, rules, subfolders or is referenced by another object.
+
+Use example:
+`curl -X 'DELETE' 'http://example.com/my_site/check_mk/api/1.0/objects/folder_config/my_folder?delete_metod=abort_on_nonempty' -H 'accept: */*`
+
diff --git a/.werks/16227 b/.werks/16227
new file mode 100644
index 00000000000..f84a1dcda1e
--- /dev/null
+++ b/.werks/16227
@@ -0,0 +1,32 @@
+Title: Disabled automation users could still authenticate
+Class: security
+Compatible: incomp
+Component: wato
+Date: 1702309789
+Edition: cre
+Level: 1
+Version: 2.3.0b1
+
+Prior to this Werk an automation user whose password was disabled also described as "disable the login to this account" was still able to authenticate.
+The information that a user was disabled was not checked for automation users.
+
+We found this vulnerability internally.
+
+Affected Versions:
+LI: 2.2.0
+LI: 2.1.0
+LI: 2.0.0
+LI: 1.6.0
+LI: 1.5.0 (probably older versions as well)
+
+Mitigations:
+If the need arises to block an automation user one can change the password or remove that user from the system.
+
+Vulnerability Management:
+We have rated the issue with a CVSS Score of 8.8 (High) with the following CVSS vector:
+CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H.
+We assigned CVE-2023-31211 to this vulnerability.
+
+Changes:
+This Werk adds a check for the disabled information. During update you will be warned if a automation user is currently disabled.
+
diff --git a/.werks/16232.md b/.werks/16232.md
new file mode 100644
index 00000000000..ef26da4a921
--- /dev/null
+++ b/.werks/16232.md
@@ -0,0 +1,42 @@
+[//]: # (werk v2)
+# mk_oracle(ps1): Prevent privilege esclation to root
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-01-17T08:20:43+00:00
+level | 3
+class | security
+component | checks
+edition | cre
+
+The agent plugins mk_oracle, mk_oracle.ps1 and mk_oracle_crs were vulnerable to privilege escalation to root by the oracle user.
+
+A malicious oracle user could replace a binary (e.g. sqlplus) with another script and put
+it in the corresponding directory. The script would be executed by the root user.
+
+All binaries, which are called by the plugins, are now checked if they need to be executed as a non-root (non-administrator under Windows) user, preventing the privilege escalation.
+Affected binaries are: sqlplus, tnsping, crsctl.
+
+Affected Versions
+
+* 2.3.0 (beta)
+* 2.2.0
+* 2.1.0
+* 2.0.0 (EOL) and older
+
+Mitigations
+
+If updating is not possible, disable the mk_oracle plugin.
+
+Vulnerability Management
+
+We have rated the issue with a CVSS score of 8.2 (High) with the following CVSS vector:
+CVSS:3.1/AV:L/AC:L/PR:H/UI:N/S:C/C:H/I:H/A:H
+
+We have assigned CVE-2024-0638
.
+
+Changes
+
+All called binaries are now executed in a safe way.
\ No newline at end of file
diff --git a/.werks/16233.md b/.werks/16233.md
new file mode 100644
index 00000000000..ad02cbdc06d
--- /dev/null
+++ b/.werks/16233.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Cease to provide builds for Ubuntu 23.04 (Lunar Lobster)
+
+key | value
+---------- | ---
+date | 2024-01-23T13:20:36+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | no
+
+
diff --git a/.werks/16234.md b/.werks/16234.md
new file mode 100644
index 00000000000..dc55d8d275a
--- /dev/null
+++ b/.werks/16234.md
@@ -0,0 +1,33 @@
+[//]: # (werk v2)
+# Hide credentials in ps output for mk_oracle
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-20T18:39:35+00:00
+level | 1
+class | security
+component | checks
+edition | cre
+
+In the mk_oracle plugin sqlplus used to be called with the connection string as an argument.
+This connection string could contain credentials necessary to authenticate against the database.
+These arguments could be extracted by other users (e.g. with use of ps).
+
+This vulnerability was reported to us, we are not aware of any exploitations.
+
+Affected Versions:
+
+2.2.0
+2.1.0
+2.0.0 (probably older versions as well)
+
+Vulnerability Management:
+
+We have rated the issue with a CVSS Score of 3.8 (Low) with the following CVSS vector:
+CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:C/C:L/I:N/A:N.
+We assigned CVE-2024-1742 to this vulnerability.
+
+Changes:
+With this Werk the connection string is now piped via stdin to sqlplus.
\ No newline at end of file
diff --git a/.werks/16235.md b/.werks/16235.md
new file mode 100644
index 00000000000..6b5ed411111
--- /dev/null
+++ b/.werks/16235.md
@@ -0,0 +1,13 @@
+[//]: # (werk v2)
+# oracle_sql: don't crash on wrong keyword
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-20T19:17:54+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
diff --git a/.werks/16236.md b/.werks/16236.md
new file mode 100644
index 00000000000..e27de7ea277
--- /dev/null
+++ b/.werks/16236.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Regression when obtaining mails from mailbox
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-21T13:17:42+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+This regression exists since 2.2.0b1. Monitoring mails in your in box may have not been possible.
+This is fixed now.
\ No newline at end of file
diff --git a/.werks/16237.md b/.werks/16237.md
new file mode 100644
index 00000000000..f3d970d737f
--- /dev/null
+++ b/.werks/16237.md
@@ -0,0 +1,25 @@
+[//]: # (werk v2)
+# Path to mysql.ini under Windows for mk_sql
+
+key | value
+---------- | ---
+date | 2024-02-23T11:26:08+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+If you've been using mysql and the corresponding agent plugin *mk_sql*
+under Windows, the plugin may have crashed and the agent output would then
+show the following error:
+```
+<<>>
+[[MySQL83]]
+mysqladmin: File '\etc\check_mk\mysql.local.ini' not found (OS errno 2 - No such file or directory)
+mysqladmin: [ERROR] Stopped processing the 'include' directive in file C:\ProgramData\checkmk\agent\config\mysql.ini at line 8.
+```
+Under Windows, the plugin config path `C:\ProgramData\checkmk\agent\config` is now used.
+
+In contrast to the corresponding Linux plugin `mk_mysql`, the config path under Windows cannot be changed.
diff --git a/.werks/16238.md b/.werks/16238.md
new file mode 100644
index 00000000000..2a59e65282f
--- /dev/null
+++ b/.werks/16238.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Add m7i.large as aws resource type
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-27T12:50:29+00:00
+level | 1
+class | feature
+component | checks
+edition | cre
+
+You're affected if your aws_ec2_limits check reported "Unknown resource" and you're using "m7i.large".
+The aws resource names are changing from time to time and we will need to find a more stable solution for that in the future.
+But for now, this will be fixed by adding the resource name "m7i.large" to our internal list of aws resources.
\ No newline at end of file
diff --git a/.werks/16239.md b/.werks/16239.md
new file mode 100644
index 00000000000..3763f049ea4
--- /dev/null
+++ b/.werks/16239.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Extend devices found by apc_ats_status
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-28T12:37:31+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+This werk affects you, in case you try to monitor your APC Rack Automatic Transfer Switch with apc_ats_status.
+Previously some devices were not discovered due to a too strict scan function.
+Futher, we enable monitoring other power supplies available at such devices as for example 1V and 3.3V.
+This was fixed now.
\ No newline at end of file
diff --git a/.werks/16240.md b/.werks/16240.md
new file mode 100644
index 00000000000..ff8d2d325f4
--- /dev/null
+++ b/.werks/16240.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Crash on missing thread count on AIX
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-28T14:12:59+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+You're affected if you want to monitor your CPU and the cpu check crashes with something like:
+```
+ File "/omd/sites/YOURSITE/lib/python3/cmk/base/plugins/agent_based/cpu.py", line 48, in parse_cpu
+ count=int(row[3].split("/")[1]),
+ValueError (invalid literal for int() with base 10: '')
+```
+
+We saw this happening on AIX system when the ps information cannot be obtained due to low memory.
+The parsing is now more resilient against such missing data.
\ No newline at end of file
diff --git a/.werks/16241.md b/.werks/16241.md
new file mode 100644
index 00000000000..cc2d8eeb281
--- /dev/null
+++ b/.werks/16241.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# Crash during parsing of systemd unit when description is missing
+
+key | value
+---------- | ---
+date | 2024-02-29T10:20:38+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+You're affected if you're monitoring your systemd units with an old agent and
+your unit is missing a descpription.
+
+Parsing may have failed with:
+```
+> loaded_status, active_status, current_state, descr = remains.split(" ", 3)
+E ValueError: not enough values to unpack (expected 4, got 3)
+```
+
+This is fixed now.
diff --git a/.werks/16242.md b/.werks/16242.md
new file mode 100644
index 00000000000..09812bcf45a
--- /dev/null
+++ b/.werks/16242.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Kill forked processes by mk_oracle under AIX
+
+key | value
+---------- | ---
+date | 2024-03-06T12:43:13+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The agent plugin `mk_oracle` creates forked processes, e.g. from `sqlplus`.
+In order to reliable clean up stale processes, we kill now the whole process chain under AIX
+which corresponds to the stored `PID`.
+We introduce this only for `AIX` now as we have customers which are affected under that OS.
diff --git a/.werks/16259.md b/.werks/16259.md
new file mode 100644
index 00000000000..1c73954487c
--- /dev/null
+++ b/.werks/16259.md
@@ -0,0 +1,97 @@
+[//]: # (werk v2)
+# New APIs for plugin development
+
+key | value
+---------- | ---
+date | 2024-02-26T21:27:58+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 2
+compatible | yes
+
+
+Checkmk 2.3 features new APIs for plugin development.
+There are three new APIs, and a new version of the agent based API (also known as "Check API").
+The replaced APIs will not be supported after 2.3 (details below).
+
+Plugin APIs in general provide means to write own code that interacts with the main application using well defined and stable code libraries.
+
+While Checkmk has always offered the possibility to add custom plugins, the APIs were often ad-hoc and undocumented.
+In Checkmk 2.0 we started to address this with the introduction of the agent-based API.
+With this werk, all important elements of creating your own monitoring plugins are covered by an API.
+
+With the APIs we introduce here we clarify what is meant to be used by plugin developers and what are internal modules, which can and likely will change over time and should not be used.
+This is beneficial for all involved parties:
+
+ * Checkmk developers can easily figure out what parts of the code can be changed without breaking plugins.
+ We can focus on providing stable APIs while gaining the freedom to rapidly refactor the backend code.
+ * Plugin developers know what features to use to prevent their plugin from unexpectedly breaking during an upgrade, without having to closely follow the changes we make to the code.
+ * Checkmk users can have reliable information on which of their extensions will continue to work after a major upgrade.
+
+As a result, the increased transparency leads to a better stability and hence a better user experience on all sides.
+
+While the APIs are also intended to be stable, the main focus now is on transparency.
+All of the APIs have a limited scope, and we have tried to have them feature complete within these limits.
+However, as the development of Checkmk progresses, we might have to add some features or redesign others.
+The versioning of the APIs will allow us in these cases to maintain the old version (for a while) and provide documentation on how to migrate to the newer API version.
+
+**While we recommend testing the APIs and highly appreciate any feedback: Be aware that during the beta phase incompatible changes to the APIs might occur.
+Start with a small subset of your plugins to check out the APIs capabilities and limitations.
+Wait for the stable release before migrating a large amount of plugins to avoid having to make adjustments in all of them.**
+
+You can find a detailed technical documentation of the APIs in a running sites "Help" menu, under "Plugin API references".
+
+
+## Compatibility
+
+For all affected plugins (see below) the following migration timeline is supported:
+
+For Checkmk 2.3 we tried our best to ensure all plugins will continue working as in 2.2.
+Note that we can't guarantee all plugins will work for the above reasons.
+
+We strongly recommend users migrate to the new APIs during the lifetime of 2.3.
+
+With the update to 2.4 all measures to ensure that older plugins still work are dropped, effectively making it extremely unlikely that these plugins will continue to work.
+
+## General changes and common API properties
+
+The main changes of these APIs is to reduce side effects when importing the code (for better testablility) and allow for a better component oriented structure:
+
+ * We move away from the "registry" approach we had in the past, to a discovery based approach.
+ Plugins are instances of specific classes that are created in a certain place with a certain naming scheme.
+ * All plugins (rulesets, check plugins, metrics, ...) of the same "plugin family" can now live in a common folder.
+
+A detailed description can be found in the technical documentation mentioned above.
+
+Plugins already migrated by us can be found in the [cmk/plugins](https://github.com/Checkmk/checkmk/tree/master/cmk/plugins) folder of the source code.
+
+## Rulesets API: `cmk.rulesets.v1`
+
+This is the new API for the creation of rulesets used by the users to configure their plugins.
+The supported Ruleset types and input form elements can be found in the technical documentation.
+These are the plugins formally known to sit in your sites `local/share/check_mk/web/plugins/wato`.
+To be discovered by Checkmk they now have to be in `local/lib/python3/cmk_addons/plugins//rulesets`.
+
+## Graphing API: `cmk.graphing.v1`
+
+This is the new API for the creation of objects required for metric visualization, such as perfometers and graphs.
+The supported metric objects can be found in the technical documentation.
+These plugins previously where located at `local/share/check_mk/web/plugins/metrics`.
+To be discovered by Checkmk they now have to be in `local/lib/python3/cmk_addons/plugins//graphing`.
+
+## Server side calls API: `cmk.server_side_calls.v1`
+
+This is the new API for plugins that convert a configured ruleset for a special agent or active check to the command line command that is used to run the special agent or active check.
+Details on the exposed classes and their usage can be found in the technical documentation.
+These plugins previously where located at `local/share/check_mk/checks`, and filled a `special_agent_info` or `active_check_info` dictionary.
+To be discovered by Checkmk they now have to be in `local/lib/python3/cmk_addons/plugins//server_side_calls`.
+
+## New version of agent based API: `cmk.agent_based.v2`
+
+The new version of the agent based API was mostly added to increase consistency with the other three APIs (discovery mechanism, plugin location).
+It also features a couple of less important improvements. Details can be found in the technical documentation.
+These plugins previously where located at `local/lib/check_mk/base/plugins/agent_based`.
+To be discovered by Checkmk they now have to be in `local/lib/python3/cmk_addons/plugins//agent_based`.
+
diff --git a/.werks/16261.md b/.werks/16261.md
new file mode 100644
index 00000000000..84c860b2685
--- /dev/null
+++ b/.werks/16261.md
@@ -0,0 +1,24 @@
+[//]: # (werk v2)
+# Split up rule "Mails in outgoing mail queue"
+
+key | value
+---------- | ---
+date | 2023-12-25T17:11:32+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This affects all users that had rules for "Mails in outgoing mail queue" configured.
+
+To clean up inconsistencies that the new APIs no longer tolerate, we had to split the ruleset into two.
+
+We renamed "Mails in outgoing mail queue" to "Mails in outgoing mail queue (multiple queues)" and added a new ruleset "Mails in outgoing mail queue (single queue)".
+The required new rules are created automatically during upgrade, but we advise users to check if they have rules that are not needed anymore.
+
+The plugins using the new ruleset are
+ * Barracuda SPAM Firewall: Active and Deferred Mail Queue Length (`barracuda_mailqueues`)
+ * Nullmailer: Mailqueue Length (`nullmailer_mailq`)
+ * qmail: Mailqueue Length (`qmail_stats`)
diff --git a/.werks/16273 b/.werks/16273
new file mode 100644
index 00000000000..7e2a4ea0550
--- /dev/null
+++ b/.werks/16273
@@ -0,0 +1,39 @@
+Title: Local privilege escalation in agent plugin 'mk_tsm'
+Class: security
+Compatible: incomp
+Component: checks
+Date: 1702411459
+Edition: cre
+Level: 1
+Version: 2.3.0b1
+
+By crafting a malicious command that then shows up in the output of `ps` users of monitored hosts could gain root privileges.
+This was achieved by exploiting the insufficient quoting when using ksh's `eval` to create the required environment.
+
+This issue was found during internal review.
+
+
+### Affected Versions
+
+ * 2.2.0
+ * 2.1.0
+ * 2.0.0 (EOL) and older
+
+
+### Mitigations
+
+If updating is not possible, disable the Tivoli Storage Manager plugin.
+
+
+### Vulnerability Management
+
+We have rated the issue with a CVSS score of 8.8 (High) with the following CVSS vector:
+`CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H`
+
+We have assigned `CVE-2023-6735`.
+
+
+### Changes
+
+With this change we no longer use `eval` and fixe the quoting.
+This prevents variable exports being missinterpreted as commands to execute.
diff --git a/.werks/16274.md b/.werks/16274.md
index 5e13a6f5d18..18461bf21ca 100644
--- a/.werks/16274.md
+++ b/.werks/16274.md
@@ -16,15 +16,15 @@ This werk only affects you if you observe unknown "Logical device <ITEM>"
In case you are affected please run a discovery on the affected hosts and fix the monitoring history as described below.
-We observed some devices to send `""` (the null-byte) as their name (`OID .1.3.6.1.4.1.232.3.2.3.1.1.14`).
+We observed some devices to send `"\x00"` (the null-byte) as their name (`OID .1.3.6.1.4.1.232.3.2.3.1.1.14`).
Not all components delt well with it, leading to unacknowlegable downtimes, non matching rules and the like.
-We now replace all null-bytes with `"\x00"` (the literal containing the four characters backslash, 'x', 'zero', 'zero').
+We now replace all null-bytes with `"\\x00"` (the literal containing the four characters backslash, 'x', 'zero', 'zero').
As of Checkmk 2.3, this should in fact no longer be necessary, but as we need a backportable solution, we change the item consistently in all versions.
If this affects you, you might also want to fix the services in the monitoring history by running
```
-sed -i 's||\x00|' var/check_mk/core/history var/check_mk/core/archive/*
+sed -i 's|\\x00|\\\\x00|' var/check_mk/core/history var/check_mk/core/archive/*
```
diff --git a/.werks/16276.md b/.werks/16276.md
new file mode 100644
index 00000000000..8d52a9cf87a
--- /dev/null
+++ b/.werks/16276.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# "Cisco Devices: Temperature Sensors" used wrong lower device levels
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2023-12-18T11:39:31+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+The services picked the device levels meant for a "less than" comarison,
+when in fact comparing "less or equal" (as Checkmk usually does).
+They now pick the levels meant for "less or equal" comparison.
\ No newline at end of file
diff --git a/.werks/16277.md b/.werks/16277.md
new file mode 100644
index 00000000000..93cb99c61ec
--- /dev/null
+++ b/.werks/16277.md
@@ -0,0 +1,24 @@
+[//]: # (werk v2)
+# Split up rule "Main memory usage of simple devices"
+
+key | value
+---------- | ---
+date | 2023-12-23T14:52:41+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This affects all users that had rules for "Main memory usage of simple devices" configured.
+
+To clean up inconsistencies that the new APIs no longer tolerate, we had to split the ruleset into two.
+
+We renamed "Main memory usage of simple devices" to "Main memory usage of simple devices with multiple services" and added a new ruleset "Main memory usage of simple devices with single services".
+The required new rules are created automatically during upgrade, but we advise users to check if they have rules that are not needed anymore.
+
+The plugins using the new ruleset are
+ * Check Point Firewalls: Memory Usage (`checkpoint_memory`)
+ * HPE Procurve Switches: Memory Usage (`hp_procurve_mem`)
+ * UCD SNMP Daemon: Memory Check (`ucd_mem`)
diff --git a/.werks/16287.md b/.werks/16287.md
new file mode 100644
index 00000000000..170d5a7b93b
--- /dev/null
+++ b/.werks/16287.md
@@ -0,0 +1,50 @@
+[//]: # (werk v2)
+# rule_notification: match_hosttag now accepts custom tag ids
+
+key | value
+---------- | ---
+date | 2024-01-12T16:30:38+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+Previously, when creating or updating a notification rule,
+the match_host_tag field would only allow for builtin tag ids.
+With this werk we now accept any configured host tag id.
+The schema has also changed in order to make this possible.
+This means that instead of a field representing each tag,
+you now have to pass a list of aux tag ids/tag group tag ids
+along with an operator. This way we can determine if the
+tag id should match or not.
+
+e.g.
+
+```
+{
+ "match_host_tags": {
+ "state": "enabled",
+ "value": [
+ {
+ "tag_type": "aux_tag",
+ "tag_id": "snmp",
+ "operator": "is_set",
+ },
+ {
+ "tag_type": "tag_group",
+ "tag_group_id": "criticality",
+ "operator": "is",
+ "tag_id": "prod",
+ },
+ {
+ "tag_type": "tag_group",
+ "tag_group_id": "networking",
+ "operator": "is_not",
+ "tag_id": "lan",
+ }
+ ],
+ },
+}
+```
diff --git a/.werks/16289.md b/.werks/16289.md
new file mode 100644
index 00000000000..521b58f2ccb
--- /dev/null
+++ b/.werks/16289.md
@@ -0,0 +1,13 @@
+[//]: # (werk v2)
+# jolokia_jvm_memory: translate old heap and nonheap metrics
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2023-11-13T13:25:53+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
diff --git a/.werks/16292.md b/.werks/16292.md
new file mode 100644
index 00000000000..306963f1510
--- /dev/null
+++ b/.werks/16292.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# user_config: add verification for contact groups and locked attributes
+
+key | value
+---------- | ---
+date | 2024-01-16T14:42:21+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+This werk introduces two changes:
+* it now verifies for create & edit if the provided contact groups actually exist
+* it verifies that for an edit request, locked attributes are not being modified
diff --git a/.werks/16295 b/.werks/16295
index 810b672c626..2b997718fb3 100644
--- a/.werks/16295
+++ b/.werks/16295
@@ -1,6 +1,6 @@
Title: postfix_mailq_status: Rework discovery
Class: fix
-Compatible: incomp
+Compatible: compat
Component: checks
Date: 1700560692
Edition: cre
@@ -9,6 +9,3 @@ Version: 2.3.0b1
With this werk the postfix status service is not discovered if Postfix is not running.
The default mail queue is now discovered as "Postfix Status default".
-
-In order to make the check plugin work you have to perform a re-discovery on the affected hosts.
-Otherwise, the service "Postfix Status" (dicovered before this change) will stop working.
diff --git a/.werks/16306.md b/.werks/16306.md
new file mode 100644
index 00000000000..c94e6d530a4
--- /dev/null
+++ b/.werks/16306.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Fix PDF export of host- and servicegroup views
+
+key | value
+---------- | ---
+date | 2024-01-02T09:35:31+00:00
+version | 2.3.0b1
+class | fix
+edition | cee
+component | reporting
+level | 1
+compatible | yes
+
+If you exported a view with host- or servicegroup context, an error like "Error
+while rendering element type" was shown because of missing context information.
diff --git a/.werks/16307.md b/.werks/16307.md
new file mode 100644
index 00000000000..9dfa9a6502f
--- /dev/null
+++ b/.werks/16307.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Validate empty settings for "Maximum long output size"
+
+key | value
+---------- | ---
+date | 2024-01-02T10:33:32+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+It was possible to unset the settings for the global option "Maximum long
+output size" leading to an error on activating of changes.
diff --git a/.werks/16308.md b/.werks/16308.md
new file mode 100644
index 00000000000..20ea6a05ab8
--- /dev/null
+++ b/.werks/16308.md
@@ -0,0 +1,32 @@
+[//]: # (werk v2)
+# New option to test notification rulesets
+
+key | value
+---------- | ---
+date | 2024-01-11T12:12:45+00:00
+version | 2.3.0b1
+class | feature
+edition | cre
+component | notifications
+level | 2
+compatible | yes
+
+Previously, you could only test your notification rulesets using the "Analyze"
+option against a limited set of notifications in the backlog or with the "Fake
+check result" command.
+
+We now introduce the possibility to define a custom notification and test it
+against your rulesets. The option can be found in "Setup" - "Notifications" -
+"Test notifications".
+
+In the popup, select whether you want to test on a host or a service
+notification. Select the host and service (if you want to test on a service
+notification) and the type of simulation. Currently supported are 'Start of
+downtime" and "Status change". Optionally, you can specify a custom plugin
+output.
+
+A checkbox allows you to decide whether to test only (default) or to send a
+real notification according to your notification rules.
+
+Within the 'Advanced condition simulation' options you can set a custom
+notification date and time to test period matching and the notification number.
diff --git a/.werks/16310.md b/.werks/16310.md
new file mode 100644
index 00000000000..0dc4cc455bd
--- /dev/null
+++ b/.werks/16310.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Rework of "Automatically update service configuration" option
+
+key | value
+---------- | ---
+date | 2024-01-19T12:59:21+00:00
+version | 2.3.0b1
+class | feature
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+This option is used in the service discovery rule "Periodic service discovery"
+and the global option "Bulk discovery" and offers now the possibility to choose
+between a refresh of all services and host labels or a custom configuration. If
+you choose a custom one, you can select "Monitor undecided services", "Remove
+vanished services" and/or "Update host labels".
diff --git a/.werks/16312.md b/.werks/16312.md
new file mode 100644
index 00000000000..d6990c6d5f1
--- /dev/null
+++ b/.werks/16312.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Fix error on "Save & go to list" while editing dashboards
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-23T14:21:12+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+If you used the "Properties" option within the "Dashboard" page menu in
+dashboards and chose "Save & go to list" after editing the dashboard, you
+have seen an error like "This page was not found. Sorry.".
\ No newline at end of file
diff --git a/.werks/16313.md b/.werks/16313.md
new file mode 100644
index 00000000000..2238c210875
--- /dev/null
+++ b/.werks/16313.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Fix tree of folders snapin filter for statistics dashlets
+
+key | value
+---------- | ---
+date | 2024-01-24T09:17:40+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | no
+
+If you used the "Tree of folders" snapin and the shown dashboard contained a
+dashlet "Host statistics" or "Service statistics" the wato folder was not
+filtered for that dashlet.
diff --git a/.werks/16314.md b/.werks/16314.md
new file mode 100644
index 00000000000..8b7f7953189
--- /dev/null
+++ b/.werks/16314.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Fix error on edit of host or service statistics dashlet
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-24T14:06:42+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+If you edited the builtin dashlets "Host statistics" or "Service statistics" on
+the main dashboard (CRE) / problem dashboard (CEE) you got an error like
+"Internal error: size" on saving.
\ No newline at end of file
diff --git a/.werks/16315.md b/.werks/16315.md
new file mode 100644
index 00000000000..0a03945b997
--- /dev/null
+++ b/.werks/16315.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Respect sort index for listed dashboards in page menu
+
+key | value
+---------- | ---
+date | 2024-02-05T13:05:04+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | yes
+
+The page menu "Dashboards" in dashboards listed all other available dashboards
+in a random order. The defined sort index of each dashboard is now respected.
+
diff --git a/.werks/16316.md b/.werks/16316.md
new file mode 100644
index 00000000000..7fe90c7b016
--- /dev/null
+++ b/.werks/16316.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Respect timerange for all view elements in report
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-09T10:20:05+00:00
+level | 1
+class | fix
+component | reporting
+edition | cee
+
+If a report with more than one view element was created e.g. via the report
+snapin, the defined timerange was only set from the second view element on.
\ No newline at end of file
diff --git a/.werks/16317.md b/.werks/16317.md
new file mode 100644
index 00000000000..d32152a54ca
--- /dev/null
+++ b/.werks/16317.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# "Aggregation of Downtimes" BI option had no effect
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-15T07:48:10+00:00
+level | 1
+class | fix
+component | bi
+edition | cre
+
+The option should change the computation of downtimes from CRIT to WARN but did
+not. This has been fixed.
\ No newline at end of file
diff --git a/.werks/16318.md b/.werks/16318.md
new file mode 100644
index 00000000000..bba6401d8fc
--- /dev/null
+++ b/.werks/16318.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Connection test using SNMP credentials configured on host page
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-16T07:42:06+00:00
+level | 1
+class | fix
+component | wato
+edition | cre
+
+If you used "Save & run connection tests" on the host properties page with SNMP
+credentials configured, the configured password was not used for the executed
+tests.
+
+A workaround was to enter the password on the "Test connection" page again.
\ No newline at end of file
diff --git a/.werks/16319.md b/.werks/16319.md
new file mode 100644
index 00000000000..0733acca41a
--- /dev/null
+++ b/.werks/16319.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# Reduce context/search filter for "Metrics graph of a single service"
+
+key | value
+---------- | ---
+compatible | no
+version | 2.4.0b1
+date | 2024-02-27T09:10:58+00:00
+level | 1
+class | fix
+component | reporting
+edition | cee
+
+Since 2.2 the content element "Metrics graph of a single service" allowed to
+configure multiple context/search filters.
+This was wrong because this element is limited to e specific host and service.
+
+This change should have no effect on your configuration but we recommend to
+check reports with such content elements.
\ No newline at end of file
diff --git a/.werks/16320.md b/.werks/16320.md
new file mode 100644
index 00000000000..37c9a47b2c3
--- /dev/null
+++ b/.werks/16320.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Publish permission handling
+
+key | value
+---------- | ---
+compatible | no
+version | 2.4.0b1
+date | 2024-03-05T12:32:34+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+Werk 13498 introduced the possibility to set publish permissions independently
+of each other.
+
+Still, the permission "Publish views" (e.g. for publishing views) was needed
+to see the published views.
+
+This has been fixed.
+
+Note: Please check your publish configuration in views, dashboards, etc.
\ No newline at end of file
diff --git a/.werks/16321.md b/.werks/16321.md
new file mode 100644
index 00000000000..212f9e0581e
--- /dev/null
+++ b/.werks/16321.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Fix metric history painter with inherit time range option
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-07T08:35:37+00:00
+level | 1
+class | fix
+component | reporting
+edition | cee
+
+If you used the option “Inherit from report time range” for the painter "Metric
+history" in report content elements, an error was shown in the report.
+
+This affected all previous 2.2 versions.
\ No newline at end of file
diff --git a/.werks/16322.md b/.werks/16322.md
new file mode 100644
index 00000000000..08d7ff8158f
--- /dev/null
+++ b/.werks/16322.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Show used filter of virtual host tree
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-08T13:45:32+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+If you opened the "Filter" option of a view that was visited via a link from
+the "Virtual host tree" snapin, all filters were empty.
\ No newline at end of file
diff --git a/.werks/16324.md b/.werks/16324.md
new file mode 100644
index 00000000000..8e236ed2279
--- /dev/null
+++ b/.werks/16324.md
@@ -0,0 +1,49 @@
+[//]: # (werk v2)
+# NetApp: addition of datasource program and check plugins for NetApp ONTAP
+
+key | value
+---------- | ---
+date | 2024-01-29T12:15:27+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Since NetApp has discontinued support for their old API this werk adds datasource program and check plugins for the new NetApp ONTAP REST API.
+
+However, the new API does not provide certain metrics that were available in the old API. As a result, the new check plugins have been modified accordingly.
+
+The plugins that have been migrated with this werk are the ones that monitor:
+
+- NetApp aggregates
+- NetApp node's CPUs
+- NetApp disks
+- NetApp sensors of environment and shelves
+- NetApp node's fans
+- NetApp node's interfaces
+- NetApp LUNs
+- NetApp node's ports
+- NetApp power supply units
+- NetApp volumes (and snapshots)
+- NetApp (SVMs) Storage Virtual Machine's (formerly called "vservers”) traffic and status
+
+These plugins and metrics are under development:
+- NetApp snapvault
+- NetApp qtree quota
+- NetApp FCP
+- NetApp (SVMs) Storage Virtual Machine's (formerly called "vservers”)
+ - Nfsv3_read_ios (read ops)
+ - Nfsv3_write_ios (write ops)
+
+Here is a list of missing metrics and plugins as the corresponding data cannot be found in the new NetApp API:
+- NetApp Disk Summary
+ - Parity disks
+ - Total number of disks type data
+ - Total number of disks type parity
+- Traffic SVM (formerly vServer)
+ - Nfsv4_read_ios (read ops)
+ - Nfsv4_write_ios (write ops)
+- System time
+ - Missing service
\ No newline at end of file
diff --git a/.werks/16325.md b/.werks/16325.md
new file mode 100644
index 00000000000..0bb0f26714b
--- /dev/null
+++ b/.werks/16325.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# netapp_ontap_psu: fix discovery ruleset
+
+key | value
+---------- | ---
+date | 2024-01-31T13:08:07+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This problem affected users who wanted to discover the 'summary' mode of PSU service: it was not possible to configure the appropriate discovery rule. This werk fixes this behaviour.
+
+A rediscovery is required for the change to take effect.
diff --git a/.werks/16327.md b/.werks/16327.md
new file mode 100644
index 00000000000..ecde3ed7ad2
--- /dev/null
+++ b/.werks/16327.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# netapp_ontap_environment: show unit of measurement in summary
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-01T13:34:16+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+The service summary now displays the units of measurement of the monitored value.
\ No newline at end of file
diff --git a/.werks/16329.md b/.werks/16329.md
new file mode 100644
index 00000000000..007e0e46d69
--- /dev/null
+++ b/.werks/16329.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# mk_redis: Use REDISCLI_AUTH instead of -a argument in order to hide password
+
+key | value
+---------- | ---
+date | 2024-01-24T09:23:40+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+mk_redis used to expose the password in the process name because of the '-a' argument.
+This has been fixed using the environment variable REDISCLI_AUTH.
diff --git a/.werks/16330.md b/.werks/16330.md
new file mode 100644
index 00000000000..4b8461a22f7
--- /dev/null
+++ b/.werks/16330.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# size_trend: avoid 'infinite to integer' crash
+
+key | value
+---------- | ---
+date | 2024-01-24T14:08:45+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+While calculating how much time is left until the resource is full, if the value of the mb in range was too low (e.g. 1e-320) the check used to crash with 'infinite to integer'.
+This has now been fixed and if the value is too low and causes division with it to result in 'inf', the check will not be executed.
diff --git a/.werks/16331.md b/.werks/16331.md
new file mode 100644
index 00000000000..8e2421c7c24
--- /dev/null
+++ b/.werks/16331.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# mcafee_webgateway: Services not being discovered
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-01-29T10:17:43+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+Some mcafee services were not discovered because of the detection criteria.
+This has now been fixed by using the 'OBJECTID' for the detection.
\ No newline at end of file
diff --git a/.werks/16333.md b/.werks/16333.md
new file mode 100644
index 00000000000..1245d5f739a
--- /dev/null
+++ b/.werks/16333.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# if_lancom: Switch port discovery does not work for Lancom routers when the router description is configured to be used for the Service Description
+
+key | value
+---------- | ---
+date | 2024-02-01T16:48:01+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+Sometimes the Lancom routers don't provide a name. During data parsing this caused the description to be left as an empty string.
+As a result of that, when the router description was configured to be used for the Service Desciption, the discovery did not work properly.
+This has now been fixed by having a fallback mechanism. If the router name is not present, the router description will be used.
+Because of the change, the user is required to execute the service discovery again in order to discover any new services.
+As well as get rid of any vanished services.
diff --git a/.werks/16336.md b/.werks/16336.md
new file mode 100644
index 00000000000..b5e9a159589
--- /dev/null
+++ b/.werks/16336.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# lnx_video: Fix section parsing
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-08T17:22:52+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+The output of the 'lspci' command was not consistent. This caused the parsing of the lnx_video section to fail.
+As a consequnce, the graphics cards were not shown in the HW/SW inventory.
+This has now been fixed and the graphics cards will be shown.
\ No newline at end of file
diff --git a/.werks/16337.md b/.werks/16337.md
new file mode 100644
index 00000000000..8814690ec0e
--- /dev/null
+++ b/.werks/16337.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# mk_inventory: Fix for lnx_video when multiple graphics cards are available
+
+key | value
+---------- | ---
+date | 2024-02-12T12:30:17+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+When multiple graphics cards were present, the command 'lscpi -s: Invalid bus number'.
+This left the section blank and no cards were detected in the HW/SW inventory.
+This fix may cause new graphics cards to appear in the HW/SW inventory.
+For the fix to take effect, the user will need to re-deploy the plugin.
diff --git a/.werks/16338.md b/.werks/16338.md
new file mode 100644
index 00000000000..7db6752d9c5
--- /dev/null
+++ b/.werks/16338.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# mk_redis: Fix for Werk #16329
+
+key | value
+---------- | ---
+date | 2024-02-21T10:40:17+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+With werk #16329 when a password was set, the plugin did not work.
+This has now been fixed and configuring a password shouldn't cause any issues.
diff --git a/.werks/16339.md b/.werks/16339.md
new file mode 100644
index 00000000000..6c603f5687e
--- /dev/null
+++ b/.werks/16339.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# mtr: Fix section parsing error
+
+key | value
+---------- | ---
+date | 2024-02-25T22:30:51+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+When the mtr section contained a line that started with `**ERROR**`, the parsing of the section failed.
+This has now been fixed.
+The lines starting with `**ERROR**` will be ignored.
diff --git a/.werks/16340.md b/.werks/16340.md
new file mode 100644
index 00000000000..24d50e380ba
--- /dev/null
+++ b/.werks/16340.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# docker_node_images: KeyError: 'VirtualSize'
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-06T16:28:34+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+In newer versions of the Docker API the information about the 'VirtualSize' of a node image is not available, because it has been deprecated.
+From now on the plugin will use 'VirtualSize' if available, and 'Size' if not.
\ No newline at end of file
diff --git a/.werks/16341.md b/.werks/16341.md
new file mode 100644
index 00000000000..001db3b7c1e
--- /dev/null
+++ b/.werks/16341.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# rmk: Ignore RCC suites and RCC profile configuration if CORE mode is active
+
+key | value
+---------- | ---
+date | 2024-03-21T15:36:04+00:00
+version | 2.3.0b4
+class | fix
+edition | cee
+component | checks
+level | 1
+compatible | yes
+
+When the Robotmk Core MKP is installed, RCC Suites configuration and RCC Profile configuration are not available as they are Enterprise features.
+Previously, there were scenarios where RCC suites were running even though the Robotmk Core MKP was installed.
+The Agent Bakery would use previously saved Enterprise configurations without first migrating them to their CoreMode counterparts.
+
+This has now been fixed and the licensing mode is checked when the agent is baked. This means the RCC Suites/RCC Profile configuration will be ignored during the bake process.
+This prevents users from inadvertently relying on a paid feature when CoreMode is enabled.
diff --git a/.werks/16342.md b/.werks/16342.md
new file mode 100644
index 00000000000..46056489c11
--- /dev/null
+++ b/.werks/16342.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Rename service labels for Robotmk
+
+key | value
+---------- | ---
+date | 2024-03-25T12:28:56+00:00
+version | 2.3.0b4
+class | feature
+edition | cee
+component | multisite
+level | 2
+compatible | yes
+
+This is a follow-up for werk #13872.
+The service labels have been renamed to 'cmk/rmk/html_last_log:yes' and 'cmk/rmk/html_last_error_log:yes'.
+The icons for the last log file and last error log file will have an icon based on the new labels as well as the old ones from werk #13872.
diff --git a/.werks/16343.md b/.werks/16343.md
new file mode 100644
index 00000000000..349c9df5059
--- /dev/null
+++ b/.werks/16343.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# rmk: Remove 'Load environment variables from file' field in Robotmk Scheduler bakery rule
+
+key | value
+---------- | ---
+date | 2024-03-27T14:51:09+00:00
+version | 2.3.0b4
+class | feature
+edition | cee
+component | wato
+level | 1
+compatible | no
+
+Users who have configured the 'Load environment variable from file' field in the Robotmk Scheduler rule are affected by this incompatible werk. Any rules that contain the value for this field will be automatically migrated during the update and the value will be removed from the rules.
+
+Originally, this field was designed to be fully compatible with Robots that could be used within Robocorp's cloud environment.
+
+However, as Robocorp shifted its focus from Robot Framework to Python developers, the need for the VS Code extensions provided by Robocorp became redundant. The language server for Robot Framework would no longer be maintained, and the "RobotCode" extension would no longer serve Robot Framework users. In addition, both extensions had a rather confusing interface and didn't work well together. Now the RobotCode extension is the only necessary extension for VS Code, and it works very well.
+
+The env.json file generated from this field was used exclusively by the Robocorp extension. This approach allowed local initiation and debugging of automations with the exact set of environment variables configured, mirroring those set later in the Cloud UI.
+
+For the above reasons, we decided to remove this field.
diff --git a/.werks/16344.md b/.werks/16344.md
new file mode 100644
index 00000000000..c606314f693
--- /dev/null
+++ b/.werks/16344.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Pure Storage FlashArray: Devices with status 'not_installed' will now have an OK state
+
+key | value
+---------- | ---
+date | 2024-04-28T17:04:15+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Devices with a reported status of 'not_installed' previously caused the check state to be CRIT.
+This behaviour has now been changed. These devices will now have an OK state.
diff --git a/.werks/16349.md b/.werks/16349.md
new file mode 100644
index 00000000000..e84a8d34d92
--- /dev/null
+++ b/.werks/16349.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Support Diagnostics: UI improvements
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-17T07:36:58+00:00
+level | 1
+class | fix
+component | wato
+edition | cre
+
+This fix
+
+* renames "Crash Dumps" to "Crash Reports" and mark them as medium sensitive,
+* renames "Local Files" to "Local Files and MKPs". That's somewhat redundant, but it helps to better understand what is packed.
diff --git a/.werks/16350.md b/.werks/16350.md
new file mode 100644
index 00000000000..6ed845fb4fd
--- /dev/null
+++ b/.werks/16350.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Support Diagnostics: The timeout for creating a dump is now configurable
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-12T11:07:00+00:00
+level | 1
+class | feature
+component | wato
+edition | cre
+
+Before this werk, the process of creating a Support Diagnostics Dump sometimes lead to a timeout exception. The hard-coded timeout was 110s.
+Now, it's possible to configure the timeout in the user interface.
\ No newline at end of file
diff --git a/.werks/16355.md b/.werks/16355.md
new file mode 100644
index 00000000000..08eecf2525e
--- /dev/null
+++ b/.werks/16355.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# Graph oject lists: Avoid strange help texts in list views (non-English only)
+
+key | value
+---------- | ---
+date | 2024-04-15T10:20:16+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+When using Checkmk in a language other than English, the list views for graph collections, graph
+tunings, custom graphs and forecast graphs might have displayed strange help texts such as
+"Project-Id-Version: Checkmk user interface translation 0.1 ...".
+
+This happened only for graph objects with empty descriptions and only if the inline help was
+activated for the corresponding page.
diff --git a/.werks/16356.md b/.werks/16356.md
new file mode 100644
index 00000000000..b304bd1bc57
--- /dev/null
+++ b/.werks/16356.md
@@ -0,0 +1,25 @@
+[//]: # (werk v2)
+# Synthetic Monitoring: Incompatible overhauls
+
+key | value
+---------- | ---
+date | 2024-04-17T11:05:50+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | checks
+level | 1
+compatible | no
+
+This werk only affects users who have configured the rule *Robotmk scheduler (Windows)* during the
+2.3.0 beta phase. The following incompatible changes have been made:
+
+* The plan naming convention introduced in [werk 16421](https://checkmk.com/werk/16421) has been adopted in more places, both internally and user-facing.
+* The service items of the *RMK Plan* and *RMK Test* services have been reworked to include the name of the corresponding top-level Robot Framework suite.
+* Previously, the scheduler terminated in case of permission issues for example with its working directory. As of this werk, the scheduler instead skips affected plans and forwards these issues to the Checkmk server, where they are reported to the user.
+
+After updating, the *RMK scheduler status* service will report UNKNOWN. The plan and test services
+will go stale. Furthermore, the *Check_MK* service will report that there is monitoring data missing
+for the plugins `robotmk_plan` and `robotmk_test`. To remedy these issues, users first have to re-
+bake and then update the Checkmk agent on affected systems. After updating the agent, users have to
+re-discover the services of affected Checkmk hosts.
diff --git a/.werks/16357.md b/.werks/16357.md
new file mode 100644
index 00000000000..d9de542f747
--- /dev/null
+++ b/.werks/16357.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# Check certificates: Fix checking of common name of issuer
+
+key | value
+---------- | ---
+date | 2024-04-18T11:02:41+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The active certificate check (configured via the ruleset *Check certificates*) did not check the
+common name (CN) of the certificate issuer correctly:
+
+* If only the checking of the issuer CN was activated, the CN of the subject was checked instead.
+* If both the checking of the issuer and the subject CN where activated, the active check crashed with\
+error: the argument '--subject-cn \' cannot be used multiple times.
diff --git a/.werks/16359.md b/.werks/16359.md
new file mode 100644
index 00000000000..08835b178a1
--- /dev/null
+++ b/.werks/16359.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Remove deprecated windows plugins
+
+key | value
+---------- | ---
+date | 2024-01-16T11:18:30+00:00
+version | 2.3.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+In previous versions some windows agent plugins were included in `share/check_mk/agents/windows/plugins/deprecated`.
+As the name already suggested these were deprecated for a long time and are now removed.
+
+If you still require them you can use the plugins from former versions.
diff --git a/.werks/16360.md b/.werks/16360.md
new file mode 100644
index 00000000000..c29cba67d83
--- /dev/null
+++ b/.werks/16360.md
@@ -0,0 +1,27 @@
+[//]: # (werk v2)
+# Dedicated security logging
+
+key | value
+---------- | ---
+date | 2024-02-16T09:38:25+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+To make it easier to detect certain security relevant events a dedicated security log is introduced. You can find it in `var/log/security.log`.
+
+The format of each line is:
+1. The date and time the logentry was created (local time)
+2. The security domain and the process id.
+3. The message as json with a `summary` and `details` key. The contents of the `details` vary by the domain.
+
+Currently the following domains exist:
+* `application_errors`: e.g if a CSRF token could not be found/validated
+* `auth`: e.g. successful / unsuccessful authentication attempts. (Successful authentication attempts without opening a session are currently not logged.)
+* `service`: e.g. the start of a site
+* `user_management`: e.g. change of a password
+
+Please note that this logfile is still subject to change. Additional events might be added and details may change with p-releases.
diff --git a/.werks/16361.md b/.werks/16361.md
new file mode 100644
index 00000000000..b4066ce3c4f
--- /dev/null
+++ b/.werks/16361.md
@@ -0,0 +1,36 @@
+[//]: # (werk v2)
+# Privilege escalation in Windows agent
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-26T14:44:18+00:00
+level | 1
+class | security
+component | checks
+edition | cre
+
+In order to execute some system commands Checkmk Windows agent writes cmd files to `C:\Windows\Temp\` and afterwards executes them.
+The permissions of the files were set restrictive but existing files were not properly handled.
+If a cmd file already existed and was write protected the agent was not able to rewrite the file but did not handle this case and executed the file nevertheless.
+
+We thank Michael Baer (SEC Consult Vulnerability Lab) for reporting this issue.
+
+**Affected Versions**:
+* 2.2.0
+* 2.1.0
+* 2.0.0
+
+**Indicators of Compromise**:
+The filename of the cmd file needed to be guessed therefore the proof-of-concept creates a lot of files in `C\Windows\Temp` with the filename `cmk_all_\d+_1.cmd`.
+These file-creation events could be monitored.
+
+**Vulnerability Management**:
+We have rated the issue with a CVSS Score of 8.8 (High) with the following CVSS vector:
+`CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H`.
+We assigned CVE-2024-0670 to this vulnerability.
+
+**Changes**:
+This Werk changes the temp folder and adds a subfolder with more restrictive permissions in which the files are created.
+Also errors are handled better.
\ No newline at end of file
diff --git a/.werks/16364.md b/.werks/16364.md
new file mode 100644
index 00000000000..edd96301f17
--- /dev/null
+++ b/.werks/16364.md
@@ -0,0 +1,43 @@
+[//]: # (werk v2)
+# tag_group: change the identification field from 'ident' to 'id'
+
+key | value
+---------- | ---
+date | 2024-01-29T13:13:54+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+Before this Werk, when creating a tag group, the **ident** field was used to provide its identification as well as that of the tags associated to the group, however the information was returned in the **id** field. This Werk unifies the names and now the **ident** fields are renamed to **id**. Users should adapt their scripts accordingly.
+
+For comptatibility reasons CheckMK 2.2.0 and 2.3.0 will support both **id** and **ident**, but **ident** will be removed on the next version.
+
+The following example shows the changes that need to be applied to the payload to use this endpoint:
+
+
+Original payload:
+```json
+{
+ "ident": "test_group",
+ "title": "Test group",
+ "help_text": "My test groupd",
+ "tags": [
+ {"ident": "test", "title": "Test Tag"}
+ ]
+}
+```
+
+Updated payload:
+```json
+{
+ "id": "test_group",
+ "title": "Test group",
+ "help_text": "My test groupd",
+ "tags": [
+ {"id": "test", "title": "Test Tag"}
+ ]
+}
+```
diff --git a/.werks/16365.md b/.werks/16365.md
new file mode 100644
index 00000000000..a8a824246ff
--- /dev/null
+++ b/.werks/16365.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# baked_agents: Fix version displayed on agent files
+
+key | value
+---------- | ---
+date | 2024-02-08T08:30:06+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | wato
+level | 1
+compatible | yes
+
+Prior to this Werk, the running version of Checkmk was used to display
+the version of the agents. This caused that after updating Checkmk to a
+new version, the wrong version of the agents was displayed until they
+were baked again.
+
+To fix this bug, the agent version is now displayed instead of the
+Checkmk version.
+
+SUP-14846
diff --git a/.werks/16366.md b/.werks/16366.md
new file mode 100644
index 00000000000..1a15ad2ec62
--- /dev/null
+++ b/.werks/16366.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# rule: Fix crash on rule creation via REST API without conditions or properties
+
+key | value
+---------- | ---
+date | 2024-02-09T07:06:04+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+Before this Werk, an status code 500 Internal Server Error was returned when creating a
+rule without conditions or properties. This Werk fixes it and now it is possible to create
+a rule in such conditions.
+
diff --git a/.werks/16367.md b/.werks/16367.md
new file mode 100644
index 00000000000..e77390fa591
--- /dev/null
+++ b/.werks/16367.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# audit_log: Deprecate 'delete' method and replace it for 'archive'
+
+key | value
+---------- | ---
+date | 2024-02-09T13:24:13+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+Since the DELETE method actually archives the audit_log entries, this endpoint is
+replaced by domain-types/audit_log/actions/archive/invoke
+
+The DELETE endpoint is deprecated and will be removed on version 2.4.0.
+
+In order to update their scripts, users should replace
+curl -x DELETE http://host/site/api/1.0/domain-types/audit_log/collections/all
+
+with
+curl -x POST http://host/site/api/1.0/domain-types/audit_log/actions/archive/invoke
diff --git a/.werks/16368.md b/.werks/16368.md
new file mode 100644
index 00000000000..371c9b6aa3d
--- /dev/null
+++ b/.werks/16368.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# service_discovery: Fixed internal server error on service discovery when IP cannot be resolved
+
+key | value
+---------- | ---
+date | 2024-02-12T12:34:21+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+Prior to this Werk, when a service discovery was requested from a host whose IP could not be resolved, the endpoint returned a 500 error status (Internal Server Error). This Werk corrects this behavior and now returns error code 400.
+
diff --git a/.werks/16369.md b/.werks/16369.md
new file mode 100644
index 00000000000..b149e1e8444
--- /dev/null
+++ b/.werks/16369.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# downtimes: Added service_description field to services downtimes
+
+key | value
+---------- | ---
+date | 2024-02-20T14:52:12+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+When querying downtimes through the "show all downtimes" endpoint, the service_description field for service downtimes was not included. This werk introduces this field, which is not present in the host downtimes.
diff --git a/.werks/16370.md b/.werks/16370.md
new file mode 100644
index 00000000000..8a8a3378826
--- /dev/null
+++ b/.werks/16370.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# time_periods: Fix error when using time ranges shorter than a minute
+
+key | value
+---------- | ---
+date | 2024-02-22T10:48:51+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+Prior to this Werk, when trying to create a time period with a time range
+shorter than 1 minute, an error code 500 was returned due to a validation
+error. This Werk solves that problem, and time periods shorter than 1 minute
+are validated correctly, so the endpoint now accepts them.
diff --git a/.werks/16371.md b/.werks/16371.md
new file mode 100644
index 00000000000..01a5de77918
--- /dev/null
+++ b/.werks/16371.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# sla: range field for predefined_time_range parameter is mandatory
+
+key | value
+---------- | ---
+date | 2024-02-23T08:00:57+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | rest-api
+level | 1
+compatible | yes
+
+Before this werk, when the sla was computed for a predefined time
+range without specifying the range field, an error status 500
+Internal Server Error was returned. This werk solves the problem
+by checking for the existence of the range field, and if it does
+not exist, the endpoint returns an error status 400 - Bad Request.
+
diff --git a/.werks/16372.md b/.werks/16372.md
new file mode 100644
index 00000000000..b20249693dc
--- /dev/null
+++ b/.werks/16372.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# swagger: Fix parameters not working in Swagger
+
+key | value
+---------- | ---
+date | 2024-03-01T09:27:29+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+Since not all Python regexes are ECMA (JavaScript) compliant, several parameter fields did not work in Swagger. This werk solves the problem by adjusting the regexes to be compatible with JavaScript.
diff --git a/.werks/16373.md b/.werks/16373.md
new file mode 100644
index 00000000000..8a9059dd078
--- /dev/null
+++ b/.werks/16373.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# metrics: allow dots in metric ids
+
+key | value
+---------- | ---
+date | 2024-02-28T08:35:14+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+Before this Werk, the REST API was not able to access metrics named after IP addresses. This Werk fixes that and allows user to access metrics containing dots on the id
diff --git a/.werks/16374.md b/.werks/16374.md
new file mode 100644
index 00000000000..c830f8c8ee4
--- /dev/null
+++ b/.werks/16374.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# mem_win: Base prediction on original measurement
+
+key | value
+---------- | ---
+date | 2024-01-11T22:47:00+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This only affects users that monitor the memory usage of Windows hosts using both averaging and predictive levels.
+
+The prediction is now based on the unaveraged original value, potentially leading to a slightly larger standard deviation.
+The resulting levels are still applied to the averaged value, if averaging is configured.
\ No newline at end of file
diff --git a/.werks/16375.md b/.werks/16375.md
new file mode 100644
index 00000000000..848dc04592e
--- /dev/null
+++ b/.werks/16375.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Rule "Check Email": Allow all makros
+
+key | value
+---------- | ---
+date | 2024-01-15T13:59:03+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The ruleset "Check Email" recently featured stricter validation.
+As a result, macros (like `$HOSTNAME$`) could not be used anymore.
+This was partially compensated for in [#15203](https://checkmk.com/werk/15203), but this was still too restrictive.
+Users can now configure validated host adresses or unvalidated strings containing macros.
diff --git a/.werks/16376.md b/.werks/16376.md
new file mode 100644
index 00000000000..b55007cda9d
--- /dev/null
+++ b/.werks/16376.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# DCD: Not respecting "Validity of missing data" setting
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-01-23T14:05:03+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+This fixes disappearing hosts in case of temporarily missing data around the time when
+a cleanup background job is run (around midnight).
+
+In case a piggybacked host temporarily did not receive any data while the background
+(cron) job cmk --cleanup-piggyback
was executed, a subsequent run of the DCD would not
+respect the "Validity of missing data" setting, wrongly removing the affected host
+from the monitoring configuration.
\ No newline at end of file
diff --git a/.werks/16377.md b/.werks/16377.md
new file mode 100644
index 00000000000..3c4d0e26790
--- /dev/null
+++ b/.werks/16377.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# postfix_mailq: Rename "Postfix Queue" to "Postfix Queue default"
+
+key | value
+---------- | ---
+date | 2024-02-01T08:44:18+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This affects users monitoring hosts that run one single postfix instance.
+
+For new installations, the service "Postfix Queue" will be renamed to "Postfix Queue default".
+
+In general, Checkmk is able to monitor multiple postfix instances running on the monitored host.
+This will change the name in the common case where users are not running multiple postfix instances, but only the "default" one.
+If you want to switch to the new service description after an upgrade, you can do so using the setting "Use new service descriptions".
+Be aware that you will lose the historic data in case you do that.
diff --git a/.werks/16378.md b/.werks/16378.md
new file mode 100644
index 00000000000..a5a85bdef04
--- /dev/null
+++ b/.werks/16378.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Checkmk Linux agent: ignore \*.dpkg-tmp files in plugin folder
+
+key | value
+---------- | ---
+date | 2024-02-20T21:25:44+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The Checkmk agents for Linux, Solaris, AIX, OpenWrt and FreeBSD now ignore \*.dpkg-tmp files in the plugins folder.
+
+They previously executed them inadvertedly as plugins.
+This mostly fails silently (or even succeedes), but sometimes it is reported by the "Check_MK Agent" service.
diff --git a/.werks/16380.md b/.werks/16380.md
new file mode 100644
index 00000000000..d5874e316c6
--- /dev/null
+++ b/.werks/16380.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Ruleset API: add option to configure the default levels type
+
+key | value
+---------- | ---
+date | 2024-03-11T13:21:51+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+For details refer to the developer documentation in your sites "Help" menu.
diff --git a/.werks/16381.md b/.werks/16381.md
new file mode 100644
index 00000000000..9465de9fbbc
--- /dev/null
+++ b/.werks/16381.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# HP-UX: Mirror State of LVM Logical Volumes: be more robust in state evaluation
+
+key | value
+---------- | ---
+date | 2024-03-11T13:39:19+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The plugin previously only considered the status to be OK if the device reported a literal "available,syncd".
+It now is OK for any combination of (some of) the words "available", "syncd", "snapshot" and "space_efficient".
diff --git a/.werks/16382.md b/.werks/16382.md
new file mode 100644
index 00000000000..e029b705ac4
--- /dev/null
+++ b/.werks/16382.md
@@ -0,0 +1,41 @@
+[//]: # (werk v2)
+# Cleanup SNMP version and bulkwalk rulesets
+
+key | value
+---------- | ---
+date | 2024-03-13T06:09:17+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | no
+
+This werk is incompatible for users using the rule previously named "Disable bulk walks on SNMPv2c/v3" (see below for details).
+
+The ruleset for disabling bulkwalks has not been correctly applied to SNMPv3 hosts using the inline backend in the past.
+In addition it has been interfering with the ruleset to enable SNMP version 2 (over version 1).
+
+## Change
+
+The following new ruleset _names_ are introduced:
+
+ * "Disable bulkwalks" (formerly known as "Disable bulk walks on SNMPv2c/v3")
+ * "Enable SNMPv2c for hosts" (formerly known as "Enable SNMPv2c and bulk walk for hosts")
+ * "Enable SNMPv2c for management boards" (formerly known as "Enable SNMPv2c and bulk walk for management boards")
+
+With this change the following logic applies:
+
+ * **bulkwalk**:
+ The "bulkwalk" query is used if and only if the ruleset "Disable bulkwalks" does not match the host and it is available in the used SNMP version (v1 does not have "bulkwalk").
+ * **SNMP version**:
+ Checkmk will use SNMP v3 if and only if the host configuration contains SNMP v3 style credentials.
+ The remaining hosts will use SNMP v2c if and only if the ruleset "Enable SNMPv2c for hosts" matches, otherwise SNMPv1.
+
+This applies to both the "inline" and the "classic" backend.
+
+## Incompatibility
+
+Previously, in order to succesfully disable SNMP bulkwalks, users had to make sure the "Disable bulk walks on SNMPv2c/v3" matched the host, and the "Enable SNMPv2c and bulk walk for hosts" did not match the host.
+This is no longer the case.
+All hosts that are neither configured for SNMPv3 (see above) nor matched by the "Enable SNMPv2c" ruleset will use SNMPv1.
diff --git a/.werks/16383.md b/.werks/16383.md
new file mode 100644
index 00000000000..bd5cb9b3037
--- /dev/null
+++ b/.werks/16383.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Handle empty operational status during interface inventory
+
+key | value
+---------- | ---
+date | 2024-03-14T13:48:35+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+`inv_if.py` crashed on empty oper_status.
+This is fixed now as at least on some Cisco ASA/FirePower devices this value is empty.
diff --git a/.werks/16384.md b/.werks/16384.md
new file mode 100644
index 00000000000..3c0b4c9a377
--- /dev/null
+++ b/.werks/16384.md
@@ -0,0 +1,40 @@
+[//]: # (werk v2)
+# notification rule: allow for non builtin service levels
+
+key | value
+---------- | ---
+date | 2024-01-17T11:19:06+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+
+When configuring a notification rule via the Rest API, you could not
+set the value for "match_service_levels" to anything but the default
+service levels. This werk addresses this issue by now allowing any
+of the service levels configured to be used. This change mean that
+there is a change to the request schema. Previously, we accepeted
+the service level string value, whereas now we accept the integer
+value.
+
+Previous schema
+```
+{"match_service_levels": {
+ "state": "enabled",
+ "value": {"from_level": "silver", "to_level": "gold"}
+ }
+}
+```
+
+New schema
+```
+{"match_service_levels: {
+ "state": "enabled",
+ "value": {"from_level": 10, "to_level": 20}
+ }
+}
+```
+
diff --git a/.werks/16385.md b/.werks/16385.md
new file mode 100644
index 00000000000..f7c92c9b85b
--- /dev/null
+++ b/.werks/16385.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# revert_changes: internal changes can be reverted only if the user has the correct permission
+
+key | value
+---------- | ---
+date | 2024-01-17T11:41:59+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Changes made by the checkmk internal user can now only be reverted when the
+logged-in user has the permission "Discard foreign changes".
diff --git a/.werks/16386.md b/.werks/16386.md
new file mode 100644
index 00000000000..a90ca9b0f29
--- /dev/null
+++ b/.werks/16386.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# notification rule: match service levels and match time period being saved with wrong key
+
+key | value
+---------- | ---
+date | 2024-01-19T11:43:30+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+
+Previously when creating or updating an notification rule via the rest-api, the
+matching conditions for service levels and time periods were being saved to
+file with an incorrect key. This werk addresses this issue by correcting the
+keys being saved.
+
diff --git a/.werks/16387.md b/.werks/16387.md
new file mode 100644
index 00000000000..3f6cdaa523d
--- /dev/null
+++ b/.werks/16387.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# notification_rules: custom plugins now created correctly when using the REST-API
+
+key | value
+---------- | ---
+date | 2024-01-22T08:18:31+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+
+Previous to this werk, when creating a notification rule with a custom
+plugin via the rest-api, the parameters were not being shown in the UI
+as expected. The reason being, an incorrect format being saved to file.
+
+This werk addresses this issue by correcting the format being saved to
+file.
\ No newline at end of file
diff --git a/.werks/16388.md b/.werks/16388.md
new file mode 100644
index 00000000000..220c371c6c7
--- /dev/null
+++ b/.werks/16388.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# host: query on mk_inventory no longer causing crash
+
+key | value
+---------- | ---
+date | 2024-01-24T09:17:52+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+
+Previously, when you had a host that had no check running,
+calling the following endpoint would cause a crash.
+
+GET /domain-type/host/collections/all?columns=mkinventory
+
+This werk addresses this issue by preventing the crash
+and now returns an appropriate response.
\ No newline at end of file
diff --git a/.werks/16389.md b/.werks/16389.md
new file mode 100644
index 00000000000..15aa10f866d
--- /dev/null
+++ b/.werks/16389.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# authentication: remove user profile dir when unknown user and failed to login
+
+key | value
+---------- | ---
+date | 2024-01-26T15:59:51+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+
+Previously, failed login attempts with an unknown user would create a
+user profile directory. This is no longer the case. The profile
+directory is now only created for valid users.
+
diff --git a/.werks/16390.md b/.werks/16390.md
new file mode 100644
index 00000000000..f211bdfcb75
--- /dev/null
+++ b/.werks/16390.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# notification_rule: cancel previous notifications now working with custom plugin scripts
+
+key | value
+---------- | ---
+date | 2024-01-31T15:15:29+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+When creating or updating notification rules, the option to "cancel_previous_notifications"
+was previously not available when the plugin name selected was a custom plugin script.
+
+This werk addresses this issue and now allows for custom plugin scripts when setting the
+option to cancel.
diff --git a/.werks/16391.md b/.werks/16391.md
new file mode 100644
index 00000000000..600cf484225
--- /dev/null
+++ b/.werks/16391.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# ldap_connection: When you create/clone a new ldap connection, the ID will now be unique
+
+key | value
+---------- | ---
+date | 2024-02-09T15:54:09+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Previously, when creating a new LDAP connection via the UI, you could provide
+an ID that was already in use by another user connection. This is no longer
+the case with this werk. Now, when you clone a connection, a new unique ID
+will be generated from the cloned connection. If you create a new connection,
+the ID must be unique or you will get an error.
diff --git a/.werks/16392.md b/.werks/16392.md
new file mode 100644
index 00000000000..e77852fc7f4
--- /dev/null
+++ b/.werks/16392.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# global_settings: LSI MegaRAID: Logical Disks selected by default
+
+key | value
+---------- | ---
+date | 2024-02-13T15:13:19+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16393.md b/.werks/16393.md
new file mode 100644
index 00000000000..d59e5b71a42
--- /dev/null
+++ b/.werks/16393.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# notification_rule: allow custom plugin names when selecting cancel without a restart
+
+key | value
+---------- | ---
+date | 2024-02-14T13:46:22+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+We recently made a change that would allow you to create a notification rule,
+via the REST-API using a custom plugin but also setting the option
+"cancel_previous_notifications". However, an apache restart was required
+since we were verifying the custom plugin via the runtime generated
+schema. This werk fixes this issue by removing this verification from the
+schema and checking at the endpoint.
diff --git a/.werks/16394.md b/.werks/16394.md
new file mode 100644
index 00000000000..90d57907ca1
--- /dev/null
+++ b/.werks/16394.md
@@ -0,0 +1,13 @@
+[//]: # (werk v2)
+# check_mk_agent.aix: Fix Missing Plugin Details
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-17T16:15:00+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
diff --git a/.werks/16395.md b/.werks/16395.md
new file mode 100644
index 00000000000..0ecc4c23bb5
--- /dev/null
+++ b/.werks/16395.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# "Cisco Devices: Temperature Sensors" Revert: used wrong lower device levels
+
+key | value
+---------- | ---
+date | 2024-01-18T09:22:17+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+In Werk [#16276](https://checkmk.com/werk/16276), we replaced the "less than" comparison by the
+"less or equal" comparison. This is not consistent with how Checkmk handles lower levels, despite
+what the Werk claims. This Werk restores the old behavior before Werk
+[#16276](https://checkmk.com/werk/16276). Note, that Werk [#16276](https://checkmk.com/werk/16276)
+also uncovered some Cisco devices, which report faulty thresholds for "less or equal". These
+thresholds are now ignored again.
diff --git a/.werks/16396.md b/.werks/16396.md
new file mode 100644
index 00000000000..cddc7308619
--- /dev/null
+++ b/.werks/16396.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# KUBE: Fixing Validation Error for ContainerWaitingState
+
+key | value
+---------- | ---
+date | 2024-01-22T13:57:03+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16397.md b/.werks/16397.md
new file mode 100644
index 00000000000..d92bbc623f4
--- /dev/null
+++ b/.werks/16397.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# special_zerto: Error Message in Check_MK HW/SW Inventory
+
+key | value
+---------- | ---
+date | 2024-01-23T08:14:35+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Previously, the agent special_zerto would show the following error message in the Check_MK HW/SW
+Inventory
+```
+NOOPParser: Ignoring invalid data b'<<>>Initialized OK<<<>>>
+```
+Moreover, the service `Zerto Agent Status` was not discoverable and some piggyback data would
+incorrectly be mapped to the source host. This is now fixed.
diff --git a/.werks/16398.md b/.werks/16398.md
new file mode 100644
index 00000000000..67f03ea93e2
--- /dev/null
+++ b/.werks/16398.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# mk_jolokia: Respect Timeout Settings
+
+key | value
+---------- | ---
+date | 2024-01-23T09:28:00+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Previously, passing a timeout parameter the agent plugin `mk_jolokia` had no effect.
+This has been fixed.
diff --git a/.werks/16400.md b/.werks/16400.md
new file mode 100644
index 00000000000..9d205e0a4cb
--- /dev/null
+++ b/.werks/16400.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# omd restart: Exit Code is 2 on Failure
+
+key | value
+---------- | ---
+date | 2024-01-26T13:27:19+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+`omd restart` executes `omd start` and `omd stop`, both of which can terminate with exit code zero
+or two. Previously, the exit code of these commands would be ignored. `omd restart` now reports the
+worst exit code reported by the two commands.
diff --git a/.werks/16401.md b/.werks/16401.md
new file mode 100644
index 00000000000..23605df3723
--- /dev/null
+++ b/.werks/16401.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# omd start mkeventd: Error Code of Event Daemon
+
+key | value
+---------- | ---
+date | 2024-02-02T12:32:48+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+The command `omd start mkeventd` starts the event daemon. Previously, any exit code produced
+by this daemon would be ignored. Now, if the exit code is non-zero, then the message `Failed` is
+shown and the command `omd start mkeventd` exits with error code 2.
diff --git a/.werks/16402.md b/.werks/16402.md
new file mode 100644
index 00000000000..f9a12af32b7
--- /dev/null
+++ b/.werks/16402.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# azure_vm_network_io: KeyError (total_Network_In_Total)
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-05T14:11:34+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+The Microsoft Azure API sometimes omits the metrics total_Network_In_Total and
+total_Network_Out_Total. The check plugin azure_vm_network_io now omits computations based on these
+metrics rather than crashing.
\ No newline at end of file
diff --git a/.werks/16403.md b/.werks/16403.md
new file mode 100644
index 00000000000..9d340ae1780
--- /dev/null
+++ b/.werks/16403.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# Fix assert self._rulespec.item_name is not None
+
+key | value
+---------- | ---
+date | 2024-02-13T10:08:20+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+A `rulespec` may have an `item_spec` without a title. In
+[Werk #13387](https://checkmk.com/werk/13387) changed it so that if this title is missing the
+the following crash occurs.
+```
+assert self._rulespec.item_name is not None
+```
+The occured if one navigates to the parameters via `Parameters for this service` and clicks
+`Parameters`. It is now fixed.
diff --git a/.werks/16405.md b/.werks/16405.md
new file mode 100644
index 00000000000..fffbe5051f6
--- /dev/null
+++ b/.werks/16405.md
@@ -0,0 +1,25 @@
+[//]: # (werk v2)
+# cmk-update-config: Don't Read Characters Pressed before Prompt Appears
+
+key | value
+---------- | ---
+date | 2024-02-14T14:29:36+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+During `cmk-update-config` can prompt you with questions about how to continue the update. This
+is an example.
+```
+Exception while trying to load rulesets:
+
+You can abort the update process (A) and try to fix the incompatibilities or try to continue the update (c).
+Abort update? [A/c]
+```
+Previously, these prompts would read input, which was typed before the prompt was shown. Now, only
+the input is read after the prompt is shown.
+
+
diff --git a/.werks/16406.md b/.werks/16406.md
new file mode 100644
index 00000000000..c022bda06a8
--- /dev/null
+++ b/.werks/16406.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# cmk-update-config: Don't Prompt User if Using Conflict Mode "install" or "keepold"
+
+key | value
+---------- | ---
+date | 2024-03-07T13:04:36+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 2
+compatible | yes
+
+While upgrading with `cmk-update-config`, the user can be prompted with questions about the next
+update steps. This questioning can be disabled by using one of the conflict options `install`,
+`keepold` or `abort`. Due to a regression in the 2.3.0b1 the options `install` and `keepold` do not
+supress these questions. In particular, if there is a problem while `Verifying the Checkmk
+configuration...`, then the update of Checkmk on Checkmk appliances will exit with a traceback.
+Upgrading to the 2.3.0b1 is thus only possible here, if all problems are fixed beforehand.
diff --git a/.werks/16407.md b/.werks/16407.md
new file mode 100644
index 00000000000..81f9e5bd1a4
--- /dev/null
+++ b/.werks/16407.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# omd update: Don't Delete "config.pb" During Pre-Update
+
+key | value
+---------- | ---
+date | 2024-03-07T13:27:55+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | omd
+level | 1
+compatible | yes
+
+The `omd update` command has the capability to undo the changes it has done up until `Verifying
+Checkmk configuration...`. However, if any change after `Verifying Checkmk configuration...` is
+persisted. Due to a regression caused by Werk #15725, the file `config.pb` is deleted during this
+verification. If the update aborts during the verification, then users will see the following error:
+```
+Starting cmc...Failed (Config /omd/sites/prod_dmz/var/check_mk/core/config.pb missing, run "cmk -U" and try again)
+```
+With this Werk, `config.pb` will be deleting only while `Updating Checkmk configuration...`.
diff --git a/.werks/16408.md b/.werks/16408.md
new file mode 100644
index 00000000000..4b563a55f75
--- /dev/null
+++ b/.werks/16408.md
@@ -0,0 +1,24 @@
+[//]: # (werk v2)
+# omd update: Allow Aborting Before "Completed verifying site configuration."
+
+key | value
+---------- | ---
+date | 2024-03-07T13:47:39+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 2
+compatible | yes
+
+Sites may have configuration, MKPs and other local files, which are incompatible with the version
+targeted by `omd update`. If such a problem occurs, then aborting the update may be necessary. In
+earlier versions, users were advised to perform a downgrade, which was not user-friendly and had
+several pitfalls. Downgrading is not supported as it has many potential downsides. With this Werk,
+`omd update` is better able to deal with these situations. `omd update` will show the message
+```
+Completed verifying site configuration. Your site now has version {target version}.
+```
+If the update is aborted before this message is shown, then the site is restored to it's previous
+state. This includes selecting the `abort` option, unexpected internal errors, or aborting the
+update using CTRL-C.
diff --git a/.werks/16409.md b/.werks/16409.md
new file mode 100644
index 00000000000..66f357a156e
--- /dev/null
+++ b/.werks/16409.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Prometheus & Alertmanager: Removal of 'IP Address' and 'Host name' options
+
+key | value
+---------- | ---
+date | 2024-03-07T17:16:21+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+In Werk #14573, the deprecation of the options 'IP Address' and 'Host name' of the rules Prometheus
+and Alertmanager was announced and described. This Werk removes these options.
diff --git a/.werks/16410.md b/.werks/16410.md
new file mode 100644
index 00000000000..c71d741ef08
--- /dev/null
+++ b/.werks/16410.md
@@ -0,0 +1,27 @@
+[//]: # (werk v2)
+# omd update: Fix Aborting in Docker Container
+
+key | value
+---------- | ---
+date | 2024-03-12T15:30:46+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+This bug affects users of the 2.3.0 beta.
+
+In Werk #16408 a mechanism was introduced to restore a site after an aborted update. If this
+mechanism triggered inside a docker container, then the site may be in an inconsistent state. Any
+further update will show the following error.
+```
+FileNotFoundError: [Errno 2] No such file or directory: '/omd/versions/2.3.0b1.cee/share/omd/skel.permissions'
+```
+You can check whether you are affected by this issue by comparing the output of `omd update` and
+`cat "${OMD_ROOT}"/.version_meta/version`. If the versions do not match, then you are affected by
+this issue.
+
+This Werk fixes the update procedure. In case you are affected, you can delete `.version_meta` and
+upgrade to the version which is shown by `omd update`.
diff --git a/.werks/16411.md b/.werks/16411.md
new file mode 100644
index 00000000000..7b15199b004
--- /dev/null
+++ b/.werks/16411.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Robotmk: Add Inventory
+
+key | value
+---------- | ---
+date | 2024-03-13T10:26:52+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | checks
+level | 1
+compatible | yes
+
+This Werks adds a HW/SW inventory for Robotmk. The content is under active development.
diff --git a/.werks/16412.md b/.werks/16412.md
new file mode 100644
index 00000000000..83841293ff4
--- /dev/null
+++ b/.werks/16412.md
@@ -0,0 +1,35 @@
+[//]: # (werk v2)
+# cmk-update-config: Correct Assigning Files to Extension Package
+
+key | value
+---------- | ---
+date | 2024-03-14T12:54:20+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+This Werk affects those who are testing the 2.3.0 beta. It affects users, whom have enabled MKPs and
+then use either `omd update` or `cmk-update-config`.
+
+During the pre-update steps of Checkmk all rulesets are loaded, which are part of an MKP.
+Previously, if an error occurs during this step, then the user is prompted with the following error.
+```
+ 02/05 UI extensions...
+Error loading rulespecs:
+[ValueError('cmk.plugins.redfish.rulesets.datasource: boom')]
+Incompatible local file 'cmk/plugins/redfish/rulesets/datasource.py'.
+Error: cmk.plugins.redfish.rulesets.datasource: boom
+
+You can abort the update process (A) and try to fix the incompatibilities or continue the update (c).
+Abort the update process? [A/c]
+```
+Thus, eventhough the file belongs to an MKP, if that file is part of the ruleset API v1, then
+Checkmk does not correctly recognize that the file belongs to an MKP during the update. Now, Checkmk
+offers to disable the MKP instead, i.e.,
+```
+You can abort the update process (A) or disable the extension package (d) and continue the update process.
+Abort the update process? [A/d]
+```
diff --git a/.werks/16414.md b/.werks/16414.md
new file mode 100644
index 00000000000..31d930828f4
--- /dev/null
+++ b/.werks/16414.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# Migrate Old Configuration Format For 'Disable notifications'
+
+key | value
+---------- | ---
+date | 2024-03-18T13:59:16+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | notifications
+level | 1
+compatible | yes
+
+In Werk #6417, we announced that some changes to `etc/check_mk/conf.d/wato/contacts.mk` are
+necessary before updating. In case a user did not migrate his configuration, they would encounter
+the following crash in 2.2.0, but not in 2.1.0:
+```
+ File "/omd/sites/edding/lib/python3/cmk/base/notify.py", line 968, in rbn_rule_contacts
+ if disable_notifications_opts.get("disable", False):
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+AttributeError: 'bool' object has no attribute 'get'
+```
+With this Werk, the configuration is migrated automatically.
diff --git a/.werks/16415.md b/.werks/16415.md
new file mode 100644
index 00000000000..b968aed3c2e
--- /dev/null
+++ b/.werks/16415.md
@@ -0,0 +1,13 @@
+[//]: # (werk v2)
+# Prometheus & Alertmanager: Allow $HOSTNAME$ and $HOSTADDRESS$ in 'URL server address'
+
+key | value
+---------- | ---
+date | 2024-03-27T14:58:45+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
diff --git a/.werks/16416.md b/.werks/16416.md
new file mode 100644
index 00000000000..7a3f54bee19
--- /dev/null
+++ b/.werks/16416.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Make scp command work as site userr on SLES 15
+
+key | value
+---------- | ---
+date | 2024-03-28T08:38:12+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+On SUSE Linux Enterprise Server 15 systems, the `scp` command could crash with
+```
+/usr/bin/ssh: symbol lookup error: /usr/bin/ssh: undefined symbol: EVP_KDF_CTX_free, version OPENSSL_1_1_1d lost connection
+```
+when executed as a site user.
diff --git a/.werks/16420.md b/.werks/16420.md
new file mode 100644
index 00000000000..5ec0f265e6b
--- /dev/null
+++ b/.werks/16420.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# jolokia_info: Support Version 2.0
+
+key | value
+---------- | ---
+date | 2024-04-08T09:28:55+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Jolokia 2.0 was released 2023-12-19. With the release some changes to the Jolokia Protocol have been
+introduced. If a user installed this version of Jolokia, the check plugin `jolokia_info` showed
+`unknown` instead of the product information. After updating the agent plugin `mk_jolokia.py`, the
+correct information will be shown again.
diff --git a/.werks/16421.md b/.werks/16421.md
new file mode 100644
index 00000000000..42cd87269c8
--- /dev/null
+++ b/.werks/16421.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# Synthetic Monitoring: Adopt 'Plan' Naming Convention
+
+key | value
+---------- | ---
+date | 2024-04-09T11:39:57+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | checks
+level | 1
+compatible | no
+
+This Werk affects users, who have configured the rule `Robotmk Scheduler (Windows)` during the 2.3.0
+beta. The naming conventions of the check plugin `robotmk_suite` have been reworked.
+
+* Plugin was renamed from `robotmk_suite` to `robotmk_plan`.
+* Service label value changed from `cmk/rmk/type:suite` to `cmk/rmk/type:plan`.
+* The service item has changed it's suffix from `Suite` to `Plan`.
+* The metric was renamed from `robotmk_suite_runtime` to `robotmk_peak_attempt_runtime`
+* The metric was renamed from `robotmk_runtime_timeout_ratio` to `robotmk_runtime_limit_ratio`
+
+Users affected by this change must run re-discovery.
diff --git a/.werks/16423.md b/.werks/16423.md
new file mode 100644
index 00000000000..57c49152bcd
--- /dev/null
+++ b/.werks/16423.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# mk_postgres.py: Add 'PG_BINARY_PATH' to Bakery Rule
+
+key | value
+---------- | ---
+date | 2024-04-16T15:15:06+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | agents
+level | 1
+compatible | yes
+
+[Werk #15619](https://checkmk.com/werk/15619) added support for reading `PG_BINARY_PATH` from
+`postgres.cfg`. This Werk allows setting this value with the agent bakery.
diff --git a/.werks/16424.md b/.werks/16424.md
new file mode 100644
index 00000000000..691ddfc8067
--- /dev/null
+++ b/.werks/16424.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# omd start redis: Don't Start If Process Already Running
+
+key | value
+---------- | ---
+date | 2024-04-18T16:06:48+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+With this Werk, `omd start` will no longer create a new redis process if redis is already started.
+This aligns the behaviour with the other services of a site.
diff --git a/.werks/16425.md b/.werks/16425.md
new file mode 100644
index 00000000000..418c3b39dfa
--- /dev/null
+++ b/.werks/16425.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# APACHE_TCP_ADDR now allows IPv6 address
+
+key | value
+---------- | ---
+date | 2024-04-22T11:05:16+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+Previously, using the command `omd config set APACHE_TCP_ADDR` with a IPv6 address would fail. For
+example, the command
+```
+$ omd config set APACHE_TCP_ADDR "[::]"
+```
+resulted in `Invalid value for '[::]'. Does not match allowed pattern.`
+
+With this Werk, it `omd` only checks that a `Listen` directive for Apache can be constructed from the
+`APACHE_TCP_ADDR`. In particular, this a IPv6 address can be configured. Such an address must be
+surrounded by square brackets.
diff --git a/.werks/16426.md b/.werks/16426.md
new file mode 100644
index 00000000000..32af1fe3323
--- /dev/null
+++ b/.werks/16426.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# NetApp Snapshot Reserve: Don't Restrict 'Levels for used configured reserve'
+
+key | value
+---------- | ---
+date | 2024-05-02T07:01:49+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Previously, the option `Levels for used configured reserve` of the rule `NetApp Snapshot Reserve` did not allow percent values beyond 101.0 %.
+Now any non-negative value can be set.
diff --git a/.werks/16444.md b/.werks/16444.md
new file mode 100644
index 00000000000..b0802012ef9
--- /dev/null
+++ b/.werks/16444.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# EC: Fix missing configuration files
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-22T09:32:56+00:00
+level | 1
+class | fix
+component | ec
+edition | cee
+
+With werk 16012 the event console rules are filtered and saved to the location
+var/mkeventd/active_config during activate changes.
+Unfortunatelly other configuration files like global.mk are missing which are
+now copied recursively, too.
\ No newline at end of file
diff --git a/.werks/16445.md b/.werks/16445.md
new file mode 100644
index 00000000000..621731911d2
--- /dev/null
+++ b/.werks/16445.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# HW/SW Inventory: Improve displaying retention information
+
+key | value
+---------- | ---
+date | 2024-01-24T11:35:45+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | inv
+level | 1
+compatible | yes
+
+Fix missing retention information in views with HW/SW inventory columns. Do not
+display retention icon if retention information is valid but not outdated.
diff --git a/.werks/16446.md b/.werks/16446.md
new file mode 100644
index 00000000000..77de2cec5fa
--- /dev/null
+++ b/.werks/16446.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Event console: Fix missing customer if configured within rule packs
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.3.0b1
+date | 2024-01-25T09:19:59+00:00
+level | 1
+class | fix
+component | wato
+edition | cme
+
+If the customer field of a rule pack is configured then - after "save" - it's
+gone. This bug was introduced with Checkmk 2.2.0 and has been fixed now.
diff --git a/.werks/16447.md b/.werks/16447.md
new file mode 100644
index 00000000000..da5e89f8625
--- /dev/null
+++ b/.werks/16447.md
@@ -0,0 +1,13 @@
+[//]: # (werk v2)
+# Fix inventory sync of subsequent hosts if a previous one has invalid data
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-05T11:32:34+00:00
+level | 1
+class | fix
+component | liveproxy
+edition | cee
+
diff --git a/.werks/16448.md b/.werks/16448.md
new file mode 100644
index 00000000000..9471e957461
--- /dev/null
+++ b/.werks/16448.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Fix inventory sync of hosts if remote data is updated at the time when liveproxyd sync starts or ends
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-02-05T11:36:15+00:00
+level | 1
+class | fix
+component | liveproxy
+edition | cee
+
+When the liveproxyd starts or ends at a particular time TS and remote inventory
+data is written between TS and \ use the status of the service"
+would result in notifications for any service to be postponed forever. This has
+been fixed.
diff --git a/.werks/16510.md b/.werks/16510.md
new file mode 100644
index 00000000000..36e424a1c6a
--- /dev/null
+++ b/.werks/16510.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Improve InfluxDB logging
+
+key | value
+---------- | ---
+date | 2024-02-16T09:58:19+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | core
+level | 1
+compatible | yes
+
+The InfluxDB logger now logs the line protocol on debug level.
diff --git a/.werks/16511.md b/.werks/16511.md
new file mode 100644
index 00000000000..569c35a5cf4
--- /dev/null
+++ b/.werks/16511.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Let cmcdump handle semicolons in plugin output
+
+key | value
+---------- | ---
+date | 2024-02-21T13:39:02+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+cmcdump would not handle semicolons correctly, leading to
+garbled or incomplete output and spurious errors.
+
+This has been fixed by escaping semicolons in cmcdump
+and unescaping them in livestatus.
diff --git a/.werks/16519.md b/.werks/16519.md
new file mode 100644
index 00000000000..de57b00d656
--- /dev/null
+++ b/.werks/16519.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Bi: creating rules should allow the same host/service label group format as the response
+
+key | value
+---------- | ---
+date | 2024-02-16T13:37:01+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+This werk makes it possible to create a BI rule via the REST-API using the same schema
+that is returned in a response. Previously, this was not possible after some changes
+to how our label_groups are now configured.
diff --git a/.werks/16521.md b/.werks/16521.md
new file mode 100644
index 00000000000..48db221e3b6
--- /dev/null
+++ b/.werks/16521.md
@@ -0,0 +1,55 @@
+[//]: # (werk v2)
+# bi_rule: schema update to match the api docs
+
+key | value
+---------- | ---
+date | 2024-02-19T14:34:33+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+
+The Open API schema previously did not reflect the response or the request
+schema format that was required to create or show BI rules. This werk
+addresses this issue.
+
+Previously, when creating or getting a BI rule, via the REST-API, the
+schema for host_label_groups or service_label_groups looked similar
+to the following:
+
+```
+ "host_label_groups": [
+ [
+ "and",
+ [
+ ["and", "mystery/switch:yes"],
+ ["or", "mystery/switch:no"],
+ ],
+ ],
+ ]
+```
+
+This did not match the schema documented in the Open API docs.
+To fix this, we have now changed the format to the following
+
+```
+ "host_label_groups": [
+ {
+ "operator": "and",
+ "label_group": [
+ {"operator": "and", "label": "mystery/switch:yes"},
+ {"operator": "or", "label": "mystery/switch:no"},
+ ],
+ },
+ ]
+```
+
+This also aligns with other endpoints that use our new
+host_label_groups or service_label_groups, for example the
+rules endpoints.
+
+As this is a breaking change, user scripts should be adjusted
+accordingly.
\ No newline at end of file
diff --git a/.werks/16522.md b/.werks/16522.md
new file mode 100644
index 00000000000..8474a257d2e
--- /dev/null
+++ b/.werks/16522.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# comment: site_id only required when deleting comments by id
+
+key | value
+---------- | ---
+date | 2024-02-19T17:24:03+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+When deleting a comment via the REST-API, only the 'by_id' option
+leaves the comment selection ambiguous, while the options 'params'
+or 'query' already have the ability to pinpoint the comment
+requested. Therefore, the site_id should is not required in these
+cases.
+
+
diff --git a/.werks/16523.md b/.werks/16523.md
new file mode 100644
index 00000000000..5062a9a693b
--- /dev/null
+++ b/.werks/16523.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# event_console: site_id only required when deleting ec events by_id
+
+key | value
+---------- | ---
+date | 2024-02-20T06:46:32+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+When deleting an ec event via the REST-API, only the 'by_id' option
+leaves the event selection ambiguous, while the options 'params'
+or 'query' already have the ability to pinpoint the event
+requested. Therefore, the site_id should is not required in these
+cases.
+
diff --git a/.werks/16524.md b/.werks/16524.md
new file mode 100644
index 00000000000..adba0ebdf94
--- /dev/null
+++ b/.werks/16524.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# apidocs: improve the request/response examples
+
+key | value
+---------- | ---
+date | 2024-02-21T11:07:55+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+Previously the requests and urllib examples were hard coded to show
+the same response samples and the same response status codes in
+the request samples.
+
+This werk addresses this issue by showing the correct possible
+status codes for each endpoint.
+
diff --git a/.werks/16525.md b/.werks/16525.md
new file mode 100644
index 00000000000..93aab071850
--- /dev/null
+++ b/.werks/16525.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# password: improve the openapi documentation
+
+key | value
+---------- | ---
+date | 2024-03-07T08:13:23+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+This werk is to improve on the existing documentation for the password
+endpoints.
diff --git a/.werks/16526.md b/.werks/16526.md
new file mode 100644
index 00000000000..9b3736fe486
--- /dev/null
+++ b/.werks/16526.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# password: the response schema now matches what is returned
+
+key | value
+---------- | ---
+date | 2024-03-07T09:18:40+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+This werk addresses an issue with the REST-API password endpoint
+response. The response schema listed the title and the ident as
+fields that should be returned but we were not returning them as
+part of the password object. These have now been removed from the
+schema.
+
+Also, the members field was returning invalid information and
+hence has been removed.
diff --git a/.werks/16527.md b/.werks/16527.md
new file mode 100644
index 00000000000..87ffaffb8b6
--- /dev/null
+++ b/.werks/16527.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# ldap_connection: implementation of new ldap connection endpoints
+
+key | value
+---------- | ---
+date | 2024-03-07T09:59:38+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+This werk implements endpoints that provide a way to manage LDAP connections via the
+REST-API in the same way the user interface does. This includes creating, updating,
+deleting and listing LDAP connections.
+
+
+
diff --git a/.werks/16528.md b/.werks/16528.md
new file mode 100644
index 00000000000..2efc974d8fb
--- /dev/null
+++ b/.werks/16528.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# reports: remove site_id of other customers when rendering reports
+
+key | value
+---------- | ---
+date | 2024-04-18T07:21:06+00:00
+version | 2.4.0b1
+class | fix
+edition | cme
+component | reporting
+level | 1
+compatible | yes
+
+When you try to create a report and a remote site is not reachable, the
+report will include the site_id of said site. This werk addresses this
+issue by only showing errors that belong to that customer.
diff --git a/.werks/16529.md b/.werks/16529.md
new file mode 100644
index 00000000000..8a1f283f660
--- /dev/null
+++ b/.werks/16529.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# EC: fix a wrong message on a matched rule
+
+key | value
+---------- | ---
+date | 2024-02-23T08:14:30+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | ec
+level | 1
+compatible | yes
+
+Previously, the tooltip on a matched rule would say it is a cancelling rule.
+This message was misleading and was changed.
+
diff --git a/.werks/16530.md b/.werks/16530.md
new file mode 100644
index 00000000000..c68f1ee703e
--- /dev/null
+++ b/.werks/16530.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Make EC UPDATE command use a list of events
+
+key | value
+---------- | ---
+date | 2024-02-26T14:48:45+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | ec
+level | 1
+compatible | yes
+
+
+Event Console UPDATE command accepts a list of events instead of a single event.
+With this change the GUI will send a list of events to be updated to the Event Console.
+This allows for multiple events to be updated in a single command. Avoids the situation where
+some events are updated and others are not.
diff --git a/.werks/16531.md b/.werks/16531.md
new file mode 100644
index 00000000000..5d649b2feb6
--- /dev/null
+++ b/.werks/16531.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# EC: Fix erroneous truncation of messages by the EC
+
+key | value
+---------- | ---
+date | 2024-03-27T15:41:36+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | ec
+level | 1
+compatible | yes
+
+Messages missing the end of line terminator were ignored by mkeventd in some cases.
+
+This has been fixed.
+
+
diff --git a/.werks/16532.md b/.werks/16532.md
new file mode 100644
index 00000000000..50ad57fdbc2
--- /dev/null
+++ b/.werks/16532.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Add syslog format TP-Link T1500G-8T to Event Console
+
+key | value
+---------- | ---
+date | 2024-04-03T07:16:07+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | ec
+level | 1
+compatible | yes
+
+Event Console now supports the syslog format of the TP-Link T1500G device.
diff --git a/.werks/16549.md b/.werks/16549.md
new file mode 100644
index 00000000000..a07cd451b48
--- /dev/null
+++ b/.werks/16549.md
@@ -0,0 +1,24 @@
+[//]: # (werk v2)
+# Agent updates failing on Solaris 10
+
+key | value
+---------- | ---
+date | 2024-03-01T08:43:58+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | agents
+level | 1
+compatible | no
+
+On some Solaris 10 systems, an agent update did crash with error message
+```
+/var/sadm/pkg/check-mk-agent/install/postremove: syntax error at line 19: `(' unexpected
+pkgrm: ERROR: postremove script did not complete successfully
+```
+
+If you ran into this error, to make the update perform again, please delete the file
+`/var/sadm/pkg/check-mk-agent/install/postremove` on affected systems.
+
+Technical background:\
+The postremove script used the subshell evaluation syntax `$(...)` that is incompatible to the standard `bin/sh` shell found on some Solaris 10 systems.
\ No newline at end of file
diff --git a/.werks/16550.md b/.werks/16550.md
new file mode 100644
index 00000000000..26fee6c55e2
--- /dev/null
+++ b/.werks/16550.md
@@ -0,0 +1,33 @@
+[//]: # (werk v2)
+# Linux remote alert handlers not running under non-root user
+
+key | value
+---------- | ---
+date | 2024-03-12T09:14:38+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | agents
+level | 1
+compatible | yes
+
+In the ruleset *Remote alert handlers (Linux)*, you have to specify
+a user under that the remote alert handler will be executed on agent side.
+This user is set to *root* by default, but it's possible to choose
+an arbitrary user.
+
+But, when choosing a non-root user, the alert handlers previously
+failed to execute, because the handler files got deployed with root-ownership
+and were not readable by others.
+To fix the problem, the ownership of the files now get changed to the specified
+user.
+
+Security note:
+In general, it's important that all internal files of the Checkmk
+agent have root ownership, as they might be read/executed by the Checkmk agent
+under root.
+However, this is not the case for remote alert handlers, as they
+always get executed under the specified user.
+As an additional security measure, the dispatcher on agent side
+checks the ownership of installed remote alert handlers, and refuses to execute
+non-root owned handlers when called via SSH with root rights.
diff --git a/.werks/16551.md b/.werks/16551.md
new file mode 100644
index 00000000000..39021c0c405
--- /dev/null
+++ b/.werks/16551.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# Crash in agent bakery GUI after cloning a site
+
+key | value
+---------- | ---
+date | 2024-03-22T09:13:48+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | agents
+level | 1
+compatible | yes
+
+After cloning a site with `omd cp` or renaming it with `omd mv`, opening the agent bakery GUI
+did result in a crash, showing a message like
+```
+Internal error: '/omd/sites/SITE/share/check_mk/agents/check_mk_agent.linux' is not in the subpath of '/omd/sites/SITE_COPY/share/check_mk/agents' OR one path is relative and the other is absolute.
+```
+
+As a workaround, you can delete the file `~/var/check_mk/wato/bakery_file_status.mk`
+on the site.
diff --git a/.werks/16552.md b/.werks/16552.md
new file mode 100644
index 00000000000..abc3526f229
--- /dev/null
+++ b/.werks/16552.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Crash on activate changes when re-registering agents
+
+key | value
+---------- | ---
+date | 2024-03-28T15:50:10+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+When re-registering agents, a call to activate changes could crash with an error message like
+```
+[Errno 2] No such file or directory: '/omd/sites//var/agent-receiver/received-outputs/
+```
+
diff --git a/.werks/16553.md b/.werks/16553.md
new file mode 100644
index 00000000000..87aac5f15b6
--- /dev/null
+++ b/.werks/16553.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# check_httpv2: Lookup password containing whitespace
+
+key | value
+---------- | ---
+date | 2024-04-16T13:05:49+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Previously, the active check "HTTP web service" failed to parse the site's
+password store when there was at least one stored password containing whitespaces.
+
+This was observable when configuring "Authentication" with a password from password store,
+and resulted in a *CRIT* state and an error message like
+```
+error: invalid value 'http_pass' for '--auth-pw-pwstore ': Can't parse password store: Unexpected format.
+```
+in the service's summary.
diff --git a/.werks/16554.md b/.werks/16554.md
new file mode 100644
index 00000000000..c2c10b16a34
--- /dev/null
+++ b/.werks/16554.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# df: Wrong handling of lower levels for free space
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-19T12:35:12+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+This is a regression since Checkmk 2.2.0.
+
+When configuring the Service Monitoring Rule "Filesystems (used space and growth)",
+configured levels for free space were evaluated incorrectly.
+
+As a result, affected services erroneously showed up as *CRIT*.
+
+This happened because of a wrong rounding while evaluating the levels, and only affected
+small filesystems with a size below 1MB.
\ No newline at end of file
diff --git a/.werks/16559.md b/.werks/16559.md
new file mode 100644
index 00000000000..c09572cf227
--- /dev/null
+++ b/.werks/16559.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Host properties: Make "Additional IPv4/6 addresses" depend on "IP address family" attribute
+
+key | value
+---------- | ---
+date | 2024-03-01T09:06:53+00:00
+version | 2.3.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+The attributes "IPv4/6 address" are toggled based on the "IP address family"
+attribute in the host properties dialog. This behavior is now also applied to
+the "Additional IPv4/6 addresses" attributes.
diff --git a/.werks/16579.md b/.werks/16579.md
new file mode 100644
index 00000000000..6a6947efb86
--- /dev/null
+++ b/.werks/16579.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# downtimes: Added 'modify downtimes' endpoint
+
+key | value
+---------- | ---
+date | 2024-03-01T10:46:38+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+With this Werk a new endpoint for modifying downtimes is incorporated.
+It is possible to modify the comment and the end timestamp as in the GUI.
+The selection of downtimes to modify can be done by id, by query, or by
+hostname and service description.
+
+Method: PUT
+URL: domain-types/downtime/actions/modify/invoke
diff --git a/.werks/16580.md b/.werks/16580.md
new file mode 100644
index 00000000000..ef326def692
--- /dev/null
+++ b/.werks/16580.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Dashboard: Fix event statistics not displaying information if no events found
+
+key | value
+---------- | ---
+date | 2024-03-20T10:34:20+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | yes
+
+Prior to this Werk, if no events were found, the event statistics
+dashlet was drawn empty. This Werk corrects that behaviour and if
+no events are found, the graph should show with a count of zero.
diff --git a/.werks/16581.md b/.werks/16581.md
new file mode 100644
index 00000000000..e96fa0cff7e
--- /dev/null
+++ b/.werks/16581.md
@@ -0,0 +1,92 @@
+[//]: # (werk v2)
+# Automatic creation of labels based on OS information from the agent
+
+key | value
+---------- | ---
+date | 2024-03-08T12:39:02+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Checkmk automatically creates host labels based on OS data sent by the agents on check\_mk section.
+
+* `cmk/os_type`: Value taken from `OSType` line. No label created if line is not present.
+* `cmk/os_platform`: Value is taken from `OSPlatform` line. If line is not present, AgentOS value is used instead
+* `cmk/os_name`: Value taken from `OSName` line. No label created if line is not present.
+* `cmk/os_version`: Value taken from `OSVersion` line. No label created if line is not present.
+
+The following list shows an example of the information that agents send for the labels creation. The source is noted in square brackets.
+
+* AIX
+ * AgentOS: aix
+ * OSType: unix
+ * OSName: AIX
+ * OSVersion: [oslevel -s]: 7100-05-04-1914
+
+* FreeBSD
+ * AgentOS: freebsd
+ * OSType: unix
+ * OSName: [/etc/os-release (NAME)]: FreeBSD
+ * OSVersion: [/etc/os-release (VERSION\_ID)]: 13.2
+
+* HP-UX
+ * AgentOS: hpux
+ * OSType: unix
+ * OSName: HP-UX
+ * OSVersion: [uname -r | cut -d' ' -f1]: B.11.31
+
+* Linux
+ * AgentOS: linux
+ * OSType: linux
+ * OSPlatform: [/etc/os-release (ID)]: ubuntu
+ * OSName: [/etc/os-release (NAME)]: Ubuntu
+ * OSVersion: [/etc/os-release (VERSION\_ID)]: 22.04
+
+* MacOS
+ * AgentOS: macosx
+ * OSType: macos
+ * OSName: [sw\_vers -productName]: macOS
+ * OSVersion: [sw\_vers -productVersion]: 13.0
+
+* NetBSD
+ * AgentOS: netbsd
+ * OSType: unix
+ * OSName: [uname -s]: NetBSD
+ * OSVersion: [uname -r]: 9.3
+
+* OpenBSD
+ * AgentOS: openbsd
+ * OSType: unix
+ * OSName: [uname -s]: OpenBSD
+ * OSVersion: [uname -r]: 7.4
+
+* OpenVMS
+ * AgentOS: openvms
+ * OSName: OpenVMS
+
+* OpenWRT
+ * AgentOS: openwrt
+ * OSType: linux
+ * OSName: [/etc/os-release (NAME)]: OpenWRT
+ * OSVersion: [/etc/os-release (VERSION\_ID)]: snapshot
+
+* Solaris
+ * AgentOS: solaris
+ * OSType: unix
+ * OSName: [/etc/os-release (NAME)]: Oracle Solaris
+ * OSVersion: [/etc/os-release (VERSION\_ID)]: 11.4
+
+* Windows
+ * AgentOS: windows
+ * OSType: windows
+ * OSName: [wmi]: Microsoft Windows 10 Pro
+ * OSVersion: [wmi]: 10.0.19045
+
+* z/OS
+ * AgentOS: z/OS
+ * OSType: z/os
+ * OSName: z/OS
+
diff --git a/.werks/16583.md b/.werks/16583.md
new file mode 100644
index 00000000000..a6f5345ee7e
--- /dev/null
+++ b/.werks/16583.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# folders: Fix folders with same title were not displayed
+
+key | value
+---------- | ---
+date | 2024-03-14T13:43:44+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Before this Werk, when subfolders were listed and some of them had the
+same title, only the last one of the group was displayed. This Werk
+fixes that problem and now all folders are displayed.
diff --git a/.werks/16584.md b/.werks/16584.md
new file mode 100644
index 00000000000..856b7856938
--- /dev/null
+++ b/.werks/16584.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# REST API: Fix httpie examples
+
+key | value
+---------- | ---
+date | 2024-03-20T12:51:32+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+Some httpie examples had a backslash at the end of the last line causing
+these examples to fail when executed. This Werk fixes the way REST API
+examples are generated to prevent backslashes at the end of the last line.
diff --git a/.werks/16585.md b/.werks/16585.md
new file mode 100644
index 00000000000..b2a9a8604cf
--- /dev/null
+++ b/.werks/16585.md
@@ -0,0 +1,25 @@
+[//]: # (werk v2)
+# mk_jolokia: Add compatibility for / in MBeans
+
+key | value
+---------- | ---
+date | 2024-03-21T16:10:04+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Previously it was not possible to select an MBean that had a path separator. This Werk implements the Jolokia path separator `!/`.
+
+An example is shown in the following fragment of the jolokia.cfg file:
+
+```
+...
+custom_vars = [('Catalina:J2EEApplication=none,J2EEServer=none,WebModule=*localhost!/docs,j2eeType=Servlet,name=default','requestCount','myspecialmetric',[],False,'number')]]
+...
+```
+
+This will match the entry `myinstance,Catalina:J2EEApplication=none,J2EEServer=none,WebModule=//localhost/docs,j2eeType=Servlet,name=defaultmyspecialmetric.requestCount0number`
+
diff --git a/.werks/16586.md b/.werks/16586.md
new file mode 100644
index 00000000000..470e7c21a81
--- /dev/null
+++ b/.werks/16586.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Fix network scan fail to run due to invalid user
+
+key | value
+---------- | ---
+date | 2024-03-27T09:40:30+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Before this Werk, when running the scheduled network scan for a folder
+it failed with the message "An exception occured: Invalid username:
+'Network scan'". This Werk fixes that problem and now the network scan
+runs correctly with the configured user.
diff --git a/.werks/16587.md b/.werks/16587.md
new file mode 100644
index 00000000000..2fee6574841
--- /dev/null
+++ b/.werks/16587.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# tags: Prevent builtin auxiliary tags and host tag groups override
+
+key | value
+---------- | ---
+date | 2024-04-09T11:57:18+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Before this Werk it was possible to create auxiliary tags with the same
+name as a host tag group and vice versa. This Werk adds an additional
+check when creating such elements, both in WATO and in the REST API.
diff --git a/.werks/16588.md b/.werks/16588.md
new file mode 100644
index 00000000000..5dcfe66138f
--- /dev/null
+++ b/.werks/16588.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# time_periods: Fix timeperiod iCalendar (ics) import
+
+key | value
+---------- | ---
+date | 2024-04-17T09:19:29+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+This Werk solves a problem with importing events that take more than
+one day. Before this Werk, only the first day of the event was
+imported. Now all days involved are imported.
diff --git a/.werks/16589.md b/.werks/16589.md
new file mode 100644
index 00000000000..3ca6aec917f
--- /dev/null
+++ b/.werks/16589.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Monitor Redfish compatible management boards / BMCs via optional MKP
+
+key | value
+---------- | ---
+date | 2024-03-06T16:44:06+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+You can now monitor _Redfish_ compatible management boards / BMCs with Checkmk.
+To do so, please enable the natively shipped MKP redfish in _Setup --> Extension packages_ (in commercial editions of Checkmk) or via the command line tool `mkp` (in Checkmk Raw).
+This will enable a new datasource program under _Setup --> Other integrations --> Redfish Compatible Management Controller_.
+This is an experimental integration created by the Checkmk community (Andreas Döhler from Bechtle), which has already been tested in many environments.
+However, due to the diverse nature of server hardware, we plan to integrate it entirely for Checkmk 2.4.0, once we have gathered further feedback.
diff --git a/.werks/16599.md b/.werks/16599.md
new file mode 100644
index 00000000000..95ec654961c
--- /dev/null
+++ b/.werks/16599.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# jolokia metrics: restores 'default product' behavior
+
+key | value
+---------- | ---
+date | 2024-03-11T13:50:14+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The check plugin no longer showed any metrics if a product was not specified in the ruleset configuration.
+This werk restores the original behaviour, using as a default product the one reported in the info section of the agent output.
diff --git a/.werks/16600.md b/.werks/16600.md
new file mode 100644
index 00000000000..77355a77180
--- /dev/null
+++ b/.werks/16600.md
@@ -0,0 +1,53 @@
+[//]: # (werk v2)
+# NetApp via WebAPI: deprecate agent and plugins
+
+key | value
+---------- | ---
+date | 2024-03-15T14:10:08+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
+As of Checkmk version 2.3.0b4, the special agent "NetApp via WebAPI" (_agent_netapp_) is deprecated
+and will be removed in a future version. This also affects the associated
+checks and inventory plugins:
+
+- NetApp Filer: Cluster-Mode CPU Utilization (_netapp_api_aggr_)
+- NetApp Ontap Filer: 7Mode Cluster Status (_netapp_api_cluster_)
+- NetApp API Connection (_netapp_api_connection_)
+- NetApp Filer: Cluster-Mode CPU Utilization (_netapp_api_cpu_)
+- NetApp Clustermode Filer: NVRAM Battery (_netapp_api_cpu_nvram_bat_)
+- NetApp Filer: 7Mode Global CPU Utilization (_netapp_api_cpu_utilization_)
+- NetApp Filer: Disk (_Summarynetapp_api_disk_summary_)
+- NetApp Filer Clustermode: PSU Fault Info (_netapp_api_environment_)
+- NetApp Filer Clustermode: System Electrical Current (_netapp_api_environment_current_)
+- NetApp Filer Clustermode: Fan Fault Info (_netapp_api_environment_fan_faults_)
+- NetApp Filer Clustermode: System Fan Speed (_netapp_api_environment_fans_)
+- NetApp Filer Clustermode: System Temperature (_netapp_api_environment_temperature_)
+- NetApp Filer Clustermode: System Electrical Voltage (_netapp_api_environment_voltage_)
+- NetApp Filer: FANs (_netapp_api_fan_)
+- NetApp Filer: FANs Summary (_netapp_api_fan_summary_)
+- NetApp Cluster-Mode: State of Fibrechannel Interfaces (_netapp_api_fcp_)
+- NetApp Filer: State of Network Interfaces (_netapp_api_if_)
+- NetApp Filer: Version Info (_netapp_api_info_)
+- NetApp Filer: Used Space of LUNs (_netapp_api_luns_)
+- NetApp Filer: Ports (_netapp_api_ports_)
+- NetApp Filer 7Mode: Protocols (_netapp_api_protocol_)
+- NetApp Filer: Power Supplies (_netapp_api_psu_)
+- NetApp Filer: Power Supplies Summary (_netapp_api_psu_summary_)
+- NetApp Filer: Used Space of qtrees in Volumes (_netapp_api_qtree_quota_)
+- NetApp Filer: Used Space in Snapshots of Volumes (_netapp_api_snapshots_)
+- NetApp Filer: Snapvault/Snapmirror Lag-time (_netapp_api_snapvault_)
+- NetApp Filer: Overall System Health (_netapp_api_status_)
+- NetApp Filer: Systemtime (_netapp_api_systemtime_)
+- NetApp Filer: Temperature Sensors (_netapp_api_temp_)
+- NetApp Filer 7Mode: vFiler CPU Utilization (_netapp_api_vf_stats_)
+- NetApp Filer: vFiler Traffic (_netapp_api_vf_stats_traffic_)
+- NetApp Filer: vFiler Status (_netapp_api_vf_status_)
+- NetApp Filer: Used Space and Traffic of Volumes (_netapp_api_volumes_)
+- NetApp Filer: vServer Status (_netapp_api_vs_status_)
+- NetApp Filer: vServer Traffic Summary (_netapp_api_vs_traffic_)
diff --git a/.werks/16601.md b/.werks/16601.md
new file mode 100644
index 00000000000..ee67b7e8dd5
--- /dev/null
+++ b/.werks/16601.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# mcafee_webgateway: base OID changed
+
+key | value
+---------- | ---
+date | 2024-03-19T07:49:40+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The McAfee Web Gateway has been rebranded to Skyhigh Secure Web Gateway with its release 12.2.2.
+The base OID has been changed from `.1.3.6.1.4.1.1230*` to `.1.3.6.1.4.1.59732*`.
+
+Where possibile the "McAfee" string has been removed in favor of more generic therms.
+The old plugin names and ruleset names have been kept for compatibility/history-keeping reasons.
diff --git a/.werks/16602.md b/.werks/16602.md
new file mode 100644
index 00000000000..6e0adfc6ef7
--- /dev/null
+++ b/.werks/16602.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# agent_gcp: reduced query rate
+
+key | value
+---------- | ---
+date | 2024-03-25T12:43:27+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+An exception was sometimes raised for an excessive frequency of queries to the GCP API.
+The agent now queries the GCP API once per metric.
diff --git a/.werks/16603.md b/.werks/16603.md
new file mode 100644
index 00000000000..1fbde7e2629
--- /dev/null
+++ b/.werks/16603.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# mk_postgres: Ensure coherent string encoding
+
+key | value
+---------- | ---
+date | 2024-03-27T08:43:36+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Postgres.cfg was always read as unicode decoded.
+This ensures that the read text is correctly converted to byte string format.
diff --git a/.werks/16604.md b/.werks/16604.md
new file mode 100644
index 00000000000..d0eec12460b
--- /dev/null
+++ b/.werks/16604.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# jolokia_metrics: fix monitoring of tomcat requestCount metric
+
+key | value
+---------- | ---
+date | 2024-03-27T17:14:30+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The `requestCount` metric for tomcat servers was calculated by the plugin to obtain the number of requests per second;
+but the value obtained by the special agent is already in that format.
+
+It is therefore sufficient to monitor the value without further processing.
diff --git a/.werks/16605.md b/.werks/16605.md
new file mode 100644
index 00000000000..05e073c3139
--- /dev/null
+++ b/.werks/16605.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# check_wmi_webservices: fix CurrentConnections monitoring
+
+key | value
+---------- | ---
+date | 2024-04-02T06:44:07+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The CurrentConnections metric was calculated "per second".
+We now directly show the number of connections returned by the service.
diff --git a/.werks/16606.md b/.werks/16606.md
new file mode 100644
index 00000000000..a663fca33bb
--- /dev/null
+++ b/.werks/16606.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# agent_netapp_ontap: handle shelves without elements
+
+key | value
+---------- | ---
+date | 2024-04-10T12:26:35+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The agent did not handle the cases where shelves had no fans, temperature sensors or PSUs.
+This led to crashes during the agent execution.
+
+With this werk we now correctly handle these scenarios and the corresponding services are not discovered if no items are found.
\ No newline at end of file
diff --git a/.werks/16607.md b/.werks/16607.md
new file mode 100644
index 00000000000..6a4bb8f56f3
--- /dev/null
+++ b/.werks/16607.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Handle the uptime of Docker containers across time zones
+
+key | value
+---------- | ---
+date | 2024-04-12T09:35:03+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+As of version 2.3, the uptime monitoring service for Docker containers could crash if the container's start time was later than the current CheckMk site's current time. This resulted in a negative uptime calculation error (ValueError: "Cannot render negative timespan").
+This could occur in situations where the host system and the container had different configured timezones.
+
+We now correctly handle all timestamps, ensuring accurate uptime calculations regardless of timezone configurations.
diff --git a/.werks/16608.md b/.werks/16608.md
new file mode 100644
index 00000000000..6fe8a5d18f4
--- /dev/null
+++ b/.werks/16608.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# netapp_ontap_volumes: also monitor volumes without counters data
+
+key | value
+---------- | ---
+date | 2024-04-17T15:06:17+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+In the Netapp Ontap Plugin _NetApp Filer: Used Space and Traffic of Volumes_ volumes where not discovered
+if the counter information for these volumes was not present.
+
+With this werk we now handle this condition discovering and monitoring these volumes.
diff --git a/.werks/16609.md b/.werks/16609.md
new file mode 100644
index 00000000000..356e68e5546
--- /dev/null
+++ b/.werks/16609.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Publish permission handling for various components
+
+key | value
+---------- | ---
+compatible | no
+version | 2.4.0b1
+date | 2024-03-14T09:54:25+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+Werk 13498 introduced the possibility to set limit publish permissions
+to certain contact groups, sites etc. Still, the permission "Publish views"
+(e.g. for publishing views) was needed to see the published views. With
+Werk 16320 this has been fixed for dashboards, views and reports.
+
+This werk fixes the behavior for the remaining components (Bookmarks, Graphs,
+SLAs and Reports).
+
+Note: Please check the respective publish configuration.
\ No newline at end of file
diff --git a/.werks/16611.md b/.werks/16611.md
new file mode 100644
index 00000000000..29c431a5aff
--- /dev/null
+++ b/.werks/16611.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# notifications: Crash on config page when rule exists with missing permissions
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-27T09:39:50+00:00
+level | 1
+class | fix
+component | notifications
+edition | cre
+
+When a user tries to access the notification setup page where
+a rule is listed for which the user does not have access rights,
+the page would crash making any changes impossible.
+
+With this werk, the table generation is fixed and the page will
+no longer crash the GUI.
\ No newline at end of file
diff --git a/.werks/16612.md b/.werks/16612.md
new file mode 100644
index 00000000000..11a876bc4a8
--- /dev/null
+++ b/.werks/16612.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# notifications: Fix plugin permissions not loaded automatically
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-27T14:22:37+00:00
+level | 1
+class | fix
+component | notifications
+edition | cre
+
+Previously, it was possible for users with the "Notification configuration"
+permission to edit notification rules even if they did not have the permission
+to the plugin that was being used in the rule. When such a user edited
+such a rule, they were able to overwrite the notification plugin with
+any plugin they were allowed to use.
+
+This werk stops users from editing rules that use notification plugins
+they don't have access to.
\ No newline at end of file
diff --git a/.werks/16614.md b/.werks/16614.md
new file mode 100644
index 00000000000..57237033e26
--- /dev/null
+++ b/.werks/16614.md
@@ -0,0 +1,30 @@
+[//]: # (werk v2)
+# Ignore CAs with negative serial numbers
+
+key | value
+---------- | ---
+date | 2024-03-11T10:43:27+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | core
+level | 1
+compatible | no
+
+When Checkmk is configured to *Trust system wide configured CAs* the system CA store is traversed and the certificates are added to the trusted CAs.
+With RFC 5280 certificate serial numbers are required to be positive. Unfortunately there are CA certificates out from before this RFC and the might contain negative serial numbers.
+One we encountered several times while testing is:
+
+ commonName = EC-ACC
+ organizationalUnitName = Jerarquia Entitats de Certificacio Catalanes
+ organizationalUnitName = Vegeu https://www.catcert.net/verarrel (c)03
+ organizationalUnitName = Serveis Publics de Certificacio
+ organizationName = Agencia Catalana de Certificacio (NIF Q-0801176-I)
+ countryName = ES
+
+Our underlying library we use for handling certificates announced to no longer support certificates with negative serial numbers in one of the next versions. Therefore we decided to ignore certificates with negative serial numbers so we can update this library during the lifetime of this Checkmk release without changing this behaviour.
+
+Since the mentioned `EC-ACC` certificate was encountered multiple times during testing and is not widely used the fact that this certificate was encountered and is ignored is NOT logged.
+
+If you use certificates issued by CA certificates with negative serial numbers you can add them manually to your list of trusted certificates via the UI.
+This might cause warnings appearing in console outputs and in logfiles and may stop to work in the future.
diff --git a/.werks/16615.md b/.werks/16615.md
new file mode 100644
index 00000000000..5583b28e941
--- /dev/null
+++ b/.werks/16615.md
@@ -0,0 +1,46 @@
+[//]: # (werk v2)
+# Remove websphere_mq plugin
+
+key | value
+---------- | ---
+date | 2024-03-11T11:09:48+00:00
+version | 2.4.0b1
+class | security
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+With this Werk the `websphere_mq` plugin is removed for security reasons.
+
+In this plugin the output of `ps` is used to determine an argument for
+`runmqsc`. This meant that anybody who can launch processes with an arbitrary
+command line could manipulate one argument to `runmqsc`.
+
+The plugin was already superseded by the agent plugin `ibm_mq` and deprecated with Werk [10752](https://checkmk.com/werk/10752) and version 2.0.0.
+
+Since this plugin is already deprecated and it was not configurable via the
+*agent bakery* we assumed that this plugin is not frequently used. Therefore we
+decided to not fix the issue but to push the removal.
+
+We found this vulnerability internally.
+
+__Affected versions__:
+
+* 2.3.0
+* 2.2.0
+* 2.1.0
+* 2.0.0
+
+__Mitigations__:
+
+Migrate to the `ibm_mq` plugin.
+
+__Vulnerability Management__:
+
+We have rated the issue with a CVSS Score of 6.5 (Medium) with the following CVSS vector: `CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:C/C:N/I:H/A:N`.
+We assigned CVE-2024-3367 to this vulnerability.
+
+__Changes__:
+
+The plugin was removed.
diff --git a/.werks/16616.md b/.werks/16616.md
new file mode 100644
index 00000000000..0841d3fe8b2
--- /dev/null
+++ b/.werks/16616.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Terminate all GUI sessions during update
+
+key | value
+---------- | ---
+date | 2024-03-15T15:16:28+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+By default a GUI session is terminated after 90 minutes of inactivity (Configurable via global setting **Session management**).
+A user could therefore start a session and e.g. start configuring a complex check and while doing something other (e.g. researching some options) the site could be updated.
+If the user does not interact with the site in that period the user won't notice that the site was updated.
+Since updates might change some behaviour the session might not work as intended.
+
+Therefore during an update all sessions are now terminated.
+This will cause users to re-authenticate after a site update.
diff --git a/.werks/16617.md b/.werks/16617.md
new file mode 100644
index 00000000000..cb93243b85f
--- /dev/null
+++ b/.werks/16617.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Use session specific key for ValueSpec encryption
+
+key | value
+---------- | ---
+date | 2024-03-15T15:28:00+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+When a user edits a configuration e.g. for a special agent with an explicit password the complete configuration is transfered to the user.
+To not reveal the password in cleartext this field is encrypted.
+
+The key for that encryption was previously to this Werk shared amongst all users (a salt was used though).
+With this Werk every user session has now a secret dedicated to this encryption so the key is rotated often and not shared amongst other users.
diff --git a/.werks/16618.md b/.werks/16618.md
new file mode 100644
index 00000000000..6550ee18e4f
--- /dev/null
+++ b/.werks/16618.md
@@ -0,0 +1,31 @@
+[//]: # (werk v2)
+# Fix XSS in graph rendering
+
+key | value
+---------- | ---
+date | 2024-04-04T14:24:50+00:00
+version | 2.4.0b1
+class | security
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Prior to this Werk a service name with html tags lead to cross site scripting in the graph rendering.
+
+We found this vulnerability internally.
+
+**Affected Versions**:
+
+Only 2.3.0 is affected, older versions are NOT affected.
+
+**Vulnerability Management**:
+
+We have rated the issue with a CVSS Score of 4.6 (Medium) with the following CVSS vector:
+`CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:U/C:L/I:L/A:N`.
+We assigned CVE-2024-2380 to this vulnerability.
+
+**Changes**:
+
+This Werk changes the encoding engine to use our customized JSON encoder.
+
diff --git a/.werks/16619.md b/.werks/16619.md
new file mode 100644
index 00000000000..276966ff9b2
--- /dev/null
+++ b/.werks/16619.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Ruleset API: Ruleset validation
+
+key | value
+---------- | ---
+date | 2024-03-11T12:46:16+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+Rulespec/ruleset names now have to be valid [python identifiers](https://docs.python.org/3/reference/lexical_analysis.html#identifiers) that are not [reserved keywords](https://docs.python.org/3/reference/lexical_analysis.html#keywords). This also applies to choice identifiers in FormSpecs.
+
+The validation occurs during instantiation and will raise a `ValueError` when a violation is found.
diff --git a/.werks/16621.md b/.werks/16621.md
new file mode 100644
index 00000000000..329979f8913
--- /dev/null
+++ b/.werks/16621.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Ruleset API: Remove TupleDoNotUseWillbeRemoved from API
+
+key | value
+---------- | ---
+date | 2024-03-11T13:47:41+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | no
+
+As the name indicates, the previous `Tuple` FormSpec should no longer be used and with this will no longer be available in the API.
+Use `SimpleLevels` or `Levels` instead if you want to configure levels or a `Dictionary` otherwise.
+
diff --git a/.werks/16622.md b/.werks/16622.md
new file mode 100644
index 00000000000..64afba9f057
--- /dev/null
+++ b/.werks/16622.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# HW/SW Inventory: Improve filtering for number of sites for Checkmk version
+
+key | value
+---------- | ---
+date | 2024-03-11T14:01:12+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | inv
+level | 1
+compatible | no
+
+Previously a regex was applied for the Checkmk versions -> #Sites inventory column to filter on the number of sites.
+This has been changed to instead filter a range of numbers to better match the column type. For example a previous filter of `[0-9]|10` now translates to an explicit range from `0` to `10`.
+If you currently have any filters for the number of sites configured, you need to reconfigure them.
diff --git a/.werks/16623.md b/.werks/16623.md
new file mode 100644
index 00000000000..c3d9faedc54
--- /dev/null
+++ b/.werks/16623.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# HW/SW Inventory: Fix crash when filtering for number of sites for Checkmk version
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-03-11T14:37:28+00:00
+level | 1
+class | fix
+component | inv
+edition | cre
+
+When filtering the Checkmk versions -> #Sites inventory column, a crash occurs with
+```
+TypeError (expected string or bytes-like object)
+...
+File "/omd/sites/oldstable/lib/python3/cmk/gui/query_filters.py", line 510, in
+ return lambda row: bool(regex.search(row.get(column, "")))
+```
+
+This crash has been fixed.
\ No newline at end of file
diff --git a/.werks/16626.md b/.werks/16626.md
new file mode 100644
index 00000000000..4dd7aa8ac6c
--- /dev/null
+++ b/.werks/16626.md
@@ -0,0 +1,25 @@
+[//]: # (werk v2)
+# trigger openapi-spec generation job during start, restart and reload
+
+key | value
+---------- | ---
+date | 2024-03-20T13:23:59+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | omd
+level | 1
+compatible | yes
+
+Werk 16501 introduced a command to start a background job which
+triggers the regeneration of the API specification. This werk now
+includes execution of this command also during omd start, restart,
+and reload. With this mechanism the execution during `cmk-update-config`
+is no longer needed.
+
+Based on Werk 15724 the specification is now updated in these situations:
+
+* Create the initial spec after a site has been created
+* Update the spec after a site has been copied, restored or renamed
+* Update the spec when the apache process is started, restarted or reloaded
+
diff --git a/.werks/16627.md b/.werks/16627.md
new file mode 100644
index 00000000000..0d39388d7dd
--- /dev/null
+++ b/.werks/16627.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# kube_persistent_volume_claim: resolve KeyError crash when Volume parameters are configured
+
+key | value
+---------- | ---
+date | 2024-04-03T10:30:30+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Before this update, configuring 'Volume parameters' in the PVC check plugin led to a crash. This
+issue arose because the plugin was not designed to process trend size-related levels, which,
+however, were anticipated by the general filesystem function handler shared among filesystem-related
+check plugins. To resolve this, the general function has been modified to bypass trend computation
+when a trend rule is not set, a scenario always applicable to the PVC check plugin.
diff --git a/.werks/16628.md b/.werks/16628.md
new file mode 100644
index 00000000000..dbb6f55fbd9
--- /dev/null
+++ b/.werks/16628.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Enforcing password change redirect with 2FA enabled
+
+key | value
+---------- | ---
+date | 2024-04-04T14:47:34+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Before this werk, the site failed to redirect users to the
+"Change Password" page following a successful login when
+two-factor authentication (2FA) was enabled. This werk resolves
+the issue by ensuring that, after completing 2FA, users are now
+redirected correctly.
diff --git a/.werks/16629.md b/.werks/16629.md
new file mode 100644
index 00000000000..23dcb57fcf0
--- /dev/null
+++ b/.werks/16629.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Virtual host tree links work for more than three host tag groups
+
+key | value
+---------- | ---
+date | 2024-03-15T09:24:42+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | yes
+
+A virtual host tree (Setup > General > Global settings > User interface > Virtual host trees) can be configured with more than three host tag tree levels. Yet, the corresponding views that are linked to from the sidebar element "Virtual host trees" were not able to display more than three rows in the "Host tags" filter and thus only filtered for the first three.
+This is fixed. A virtual host tree link as described above now leads to a properly filtered view with all the given host tag filters shown in the filter popup.
+
diff --git a/.werks/16630.md b/.werks/16630.md
new file mode 100644
index 00000000000..15d826d2aa3
--- /dev/null
+++ b/.werks/16630.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Prevent check_mail crash for "Move to subfolder" option
+
+key | value
+---------- | ---
+date | 2024-03-18T09:19:03+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The active check check_mail can be configured to move mail messages from the inbox to a subfolder via the options "Forward mails as events to Event Console" > "Cleanup messages" > "Move to subfolder".
+
+For IMAP the copying of mails crashed when there were no mails available in the inbox.
+This is fixed to skipping the copy command in case there are no mails given.
diff --git a/.werks/16631.md b/.werks/16631.md
new file mode 100644
index 00000000000..7e6d6e223ca
--- /dev/null
+++ b/.werks/16631.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# check_mailboxes: Fixed handling of error "Not allowed to access Non IPM folder."
+
+key | value
+---------- | ---
+date | 2024-04-12T08:34:09+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | packages
+level | 1
+compatible | yes
+
+Due to a recent change in Microsoft 365, the access to Exchange mailbox folders via the active check `check_mailboxes` could fail with an error message like this:
+```
+Unhandled exception: ErrorAccessDenied('Not allowed to access Non IPM folder.')
+```
+
+With this werk we update the version of the package `exchangelib` to v5.2.1, fixing the respective error handling.
diff --git a/.werks/16632.md b/.werks/16632.md
new file mode 100644
index 00000000000..b728c4ae236
--- /dev/null
+++ b/.werks/16632.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# Reporting: Add time range options for first/last work day of this/last month
+
+key | value
+---------- | ---
+date | 2024-04-15T09:44:50+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | reporting
+level | 1
+compatible | yes
+
+When creating/editing a report users can now select the following additional options within the report property "Default time range":
+
+* "First work day of this month"
+* "Last work day of this month"
+* "First work day of last month"
+* "Last work day of last month"
+
+Note that these time ranges do not consider holidays.
diff --git a/.werks/16633.md b/.werks/16633.md
new file mode 100644
index 00000000000..6768b20c4b3
--- /dev/null
+++ b/.werks/16633.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Logfile pattern analyzer: Fix crash for first rule without regex pattern
+
+key | value
+---------- | ---
+date | 2024-04-17T08:10:14+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | yes
+
+The "Logfile pattern analyzer" page crashed when the first "Logfile pattern" rule in place did not hold a regex pattern and a later rule did hold a regex pattern.
+The rendering of the table of rules would crash with
+```
+Internal error: list index out of range
+```
+
+This is fixed and all rules are rendered as expected.
diff --git a/.werks/16634.md b/.werks/16634.md
new file mode 100644
index 00000000000..0ef8c91c906
--- /dev/null
+++ b/.werks/16634.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Dashboard element "Site overview": enable larger hexagon rendering
+
+key | value
+---------- | ---
+date | 2024-04-24T06:56:55+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+In the properties of the dashboard element "Site overview" users can now set the "Maximum hexagon size" to either "Default" or "Large". Rendering large hexagons can be useful when displaying the dashboard element on a large screen, as e.g. on a wall monitor.
diff --git a/.werks/16639.md b/.werks/16639.md
new file mode 100644
index 00000000000..717b9d4a8ee
--- /dev/null
+++ b/.werks/16639.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Ruleset API: Help text for SimpleLevels, Levels
+
+key | value
+---------- | ---
+date | 2024-03-13T09:20:05+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Previously the help text configured for SimpleLevels and Levels was not shown in the UI, this is now fixed.
+
diff --git a/.werks/16640.md b/.werks/16640.md
new file mode 100644
index 00000000000..dc89f68d9fa
--- /dev/null
+++ b/.werks/16640.md
@@ -0,0 +1,26 @@
+[//]: # (werk v2)
+# Ruleset API: Improve custom validation
+
+key | value
+---------- | ---
+date | 2024-03-14T14:49:51+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+To better support the combination of different validation functions, FormSpecs now expect a sequence of validation functions instead of just one for `custom_validate`.
+
+The validation of empty inputs is now handled in the new validator `LengthInRange` instead of `DisallowEmpty`.
+If you used
+```
+custom_validate=DisallowEmpty()
+```
+before, use
+```
+custom_validate=LengthInRange(min_value=1)
+```
+now.
+For consistency, `InRange` is renamed to `NumberInRange`
diff --git a/.werks/16641.md b/.werks/16641.md
new file mode 100644
index 00000000000..e3a0450560f
--- /dev/null
+++ b/.werks/16641.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Ruleset API: Allow configuration of String field size setting
+
+key | value
+---------- | ---
+date | 2024-03-20T15:34:24+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This only affects plugin developers.
+
+The size of the String input field can now be adapted to the specific purpose of the field: The developer can select between different size settings.
diff --git a/.werks/16642.md b/.werks/16642.md
new file mode 100644
index 00000000000..fd9f552b073
--- /dev/null
+++ b/.werks/16642.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Ruleset API: Datamodel changes for Password FormSpec
+
+key | value
+---------- | ---
+date | 2024-03-21T12:35:39+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This only affects plugin developers.
+The datamodel for the `Password` Formspec is changed.
+Use the `migrate_to_password` migration function to update your stored configurations to the newer datamodel.
diff --git a/.werks/16643.md b/.werks/16643.md
new file mode 100644
index 00000000000..3632bee70ff
--- /dev/null
+++ b/.werks/16643.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Licensing: Introduce grace period for unlicensed state
+
+key | value
+---------- | ---
+date | 2024-03-27T15:55:26+00:00
+version | 2.4.0b1
+class | fix
+edition | cce
+component | wato
+level | 1
+compatible | yes
+
+To lessen the impact of a setup becoming unlicensed, there is now a 7 day grace period before becoming unlicensed.
+In this time only warnings will be shown so that users have the opportunity to fix the licensing issues.
+
diff --git a/.werks/16644.md b/.werks/16644.md
new file mode 100644
index 00000000000..21b67dea3ca
--- /dev/null
+++ b/.werks/16644.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Check SQL: Allow to configure port via custom macros
+
+key | value
+---------- | ---
+date | 2024-03-15T13:24:34+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+In the ruleset _"Check SQL Database"_ you can now configure the port using
+macros.
diff --git a/.werks/16645.md b/.werks/16645.md
new file mode 100644
index 00000000000..6fc7095ba9d
--- /dev/null
+++ b/.werks/16645.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Effective parameters of Check_MK Discovery
+
+key | value
+---------- | ---
+date | 2024-03-16T23:26:40+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+In the page "Effective parameters of \ / Check_MK Discovery" the parameters of the periodic service discovery where not shown.
+
+In addition some values have been shown that are not relevant to this service.
diff --git a/.werks/16646.md b/.werks/16646.md
new file mode 100644
index 00000000000..0c377086d0b
--- /dev/null
+++ b/.werks/16646.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Nagios / CRE: Error precompiling checks: Cannot find check file
+
+key | value
+---------- | ---
+date | 2024-03-18T20:52:03+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This fixes a regression introduced in Checkmk 2.3.0b1 for Nagios / CRE users.
+The reported error during config creation was
+"Error precompiling checks for host \: Cannot find check file needed for \".
diff --git a/.werks/16647.md b/.werks/16647.md
new file mode 100644
index 00000000000..5a221aeaa85
--- /dev/null
+++ b/.werks/16647.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Repeated scaling during migration to new Ruleset API
+
+key | value
+---------- | ---
+date | 2024-03-22T09:19:58+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This only affects some beta users that tested the new ruleset API for plugin
+development.
+
+When migrating and scaling fixed levels to the new `Levels` form spec,
+values have been scaled over and over again.
diff --git a/.werks/16648.md b/.werks/16648.md
new file mode 100644
index 00000000000..5e30d61d01d
--- /dev/null
+++ b/.werks/16648.md
@@ -0,0 +1,13 @@
+[//]: # (werk v2)
+# Hitachi HUS DKU / Hitachi HUS DKC: Hardware State: Discover HM900 devices
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-02T10:14:01+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
diff --git a/.werks/16649.md b/.werks/16649.md
new file mode 100644
index 00000000000..f34dccdbbc6
--- /dev/null
+++ b/.werks/16649.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Server side calls API: respect libexec folder with Nagios core
+
+key | value
+---------- | ---
+date | 2024-04-03T05:33:49+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+When running the nagios core the `libexec` folder for active
+checks has not been conisdered when creating the core configuration.
diff --git a/.werks/16650.md b/.werks/16650.md
new file mode 100644
index 00000000000..860896df834
--- /dev/null
+++ b/.werks/16650.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Extension manager crashes if local 'cmk_addon' folder is missing
+
+key | value
+---------- | ---
+date | 2024-04-03T21:37:00+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This regression only affected the 2.3.0b\* versions.
diff --git a/.werks/16651.md b/.werks/16651.md
new file mode 100644
index 00000000000..c527a371147
--- /dev/null
+++ b/.werks/16651.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# "TSM - IBM Tivoli Storage Manager (Linux, Unix)": Agent plugin rules are merged
+
+key | value
+---------- | ---
+date | 2024-04-04T13:12:06+00:00
+version | 2.4.0b1
+class | feature
+edition | cee
+component | agents
+level | 1
+compatible | yes
+
+Multiple matching rules of the bakery configuration ruleset "TSM - IBM Tivoli Storage Manager (Linux, Unix)" will now be merged to compute the set of effective parameters.
+Previously only the first matching rule was applied.
+
+During the migration to Checkmk 2.4 existing rules will be "filled", such that the outcome of the rule evaluation will not change on existing configurations.
diff --git a/.werks/16652.md b/.werks/16652.md
new file mode 100644
index 00000000000..d480e15b357
--- /dev/null
+++ b/.werks/16652.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# NVIDIA Graphics Card: Fix parsing error on new data format
+
+key | value
+---------- | ---
+date | 2024-04-05T10:17:08+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16653.md b/.werks/16653.md
new file mode 100644
index 00000000000..b42696d18c2
--- /dev/null
+++ b/.werks/16653.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Windows DHCP: Fix rendering glitch in 2.3 beta
+
+key | value
+---------- | ---
+date | 2024-04-05T11:14:49+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16654.md b/.werks/16654.md
new file mode 100644
index 00000000000..b617af7b633
--- /dev/null
+++ b/.werks/16654.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# Read-only internal folder name when editing folders
+
+key | value
+---------- | ---
+date | 2024-03-20T08:45:21+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+When disabling the "Hide internal folder names in Setup" setting, the internal
+name must be set by the user when creating folders. Previously, the field was
+still modifiable when editing the folder properties, however no changes to it
+were saved.
+
+This werk now marks the field as read only when editing the folder properties.
diff --git a/.werks/16655.md b/.werks/16655.md
new file mode 100644
index 00000000000..528eb5d2eb9
--- /dev/null
+++ b/.werks/16655.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Bulk host rename: loosen requirements for adding prefixes and suffixes
+
+key | value
+---------- | ---
+date | 2024-03-25T07:55:46+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+When bulk renaming hosts the "Add Prefix" and "Add Suffix" options only allowed
+valid hostnames. This requirement was now changed, so that only the resulting
+hostname is validated.
diff --git a/.werks/16656.md b/.werks/16656.md
new file mode 100644
index 00000000000..3fa7b12cfe1
--- /dev/null
+++ b/.werks/16656.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# REST API: improve validation of host names
+
+key | value
+---------- | ---
+date | 2024-03-26T09:56:52+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+This werk improves upon the validation of host names in the REST API, so that it
+is in line with the GUI. In some cases the REST API will still be more
+restrictive, but invalid host names will no longer be allowed.
diff --git a/.werks/16657.md b/.werks/16657.md
new file mode 100644
index 00000000000..ecf74bba6c3
--- /dev/null
+++ b/.werks/16657.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# REST API: add columns parameter to service status endpoint
+
+key | value
+---------- | ---
+date | 2024-03-26T15:44:17+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+This werk adds the columns parameter to the single serivce status endpoint.
+This parameter already exists for the other service status endpoints, with the
+same behaviour. By default, the same columns as before are returned.
diff --git a/.werks/16658.md b/.werks/16658.md
new file mode 100644
index 00000000000..9e7d8cde43d
--- /dev/null
+++ b/.werks/16658.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# REST API: add mode parameter to host tag group delete endpoint
+
+key | value
+---------- | ---
+date | 2024-04-09T12:01:49+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+This werk adds the `mode` query parameter to the delete endpoint for host tag
+groups. It is an alternative to the existing `repair` parameter, with the
+following options:
+* `mode=abort` default behaviour, cancel deletion if tag group is in use
+* `mode=delete` same as `repair=True`, will delete affected rules
+* `mode=remove` will remove the tag group condition from rules instead of
+deleting the entire rule
diff --git a/.werks/16659.md b/.werks/16659.md
new file mode 100644
index 00000000000..ba54eaec1cc
--- /dev/null
+++ b/.werks/16659.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# ldap: allow manually updating locked status of users
+
+key | value
+---------- | ---
+date | 2024-04-24T08:37:31+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+When using the "Authentication Expiration" sync plug-in for LDAP, users can be
+stuck in a locked state after too many failed login attempts.
+
+With this werk it is now allowed to edit the "Disable password" option in the UI
+(or "disable_login" in the REST API) for users managed by LDAP. Please note that
+a sync with the LDAP will restore the original value.
diff --git a/.werks/16664.md b/.werks/16664.md
new file mode 100644
index 00000000000..30d38b01afe
--- /dev/null
+++ b/.werks/16664.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# interface inventory: recombine default interface inventory row with cisco specific row
+
+key | value
+---------- | ---
+date | 2024-03-22T11:52:27+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | inv
+level | 1
+compatible | yes
+
+The inventory data of `inv_cisco_vlans` and `inv_if` used to be combined in one row.
+
+This behavior was broken as of 2.3, where two different rows were displayed for each
+inventory plugin.
+
+Now the rows are unified again.
+
+Also fixed the missing color of the status displayed in the inventory rows.
diff --git a/.werks/16665.md b/.werks/16665.md
new file mode 100644
index 00000000000..7fb98912754
--- /dev/null
+++ b/.werks/16665.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# Crash when accessing overridden built-in dashboard
+
+key | value
+---------- | ---
+date | 2024-04-03T12:32:28+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | yes
+
+Accessing a built-in dashboard after overriding it with a custom dashboard
+could cause certain dashlets to crash.
+
+For example, you could access the built-in dashboard by clicking the link
+in Customize > Dashboards > Built-in. Another way to access the built-in
+dashboard is for example by having a bookmark to it.
+
+Now this crash no longer occurs and all dashlets render correctly.
diff --git a/.werks/16666.md b/.werks/16666.md
new file mode 100644
index 00000000000..b7ce73f4dbe
--- /dev/null
+++ b/.werks/16666.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Deprecate "Asynchronous execution of plug-ins" rule
+
+key | value
+---------- | ---
+date | 2024-04-22T06:19:27+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | setup
+level | 1
+compatible | yes
+
+Th rule "Asynchronous execution of plug-ins" has no affect on the execution of the scrips
+therefore it is being deprecated.
+
+This means it will eventually be removed in future versions.
+
diff --git a/.werks/16667.md b/.werks/16667.md
new file mode 100644
index 00000000000..a8dbc25ce23
--- /dev/null
+++ b/.werks/16667.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# discovery: fix writing of autochecks file for nodes
+
+key | value
+---------- | ---
+date | 2024-04-25T12:29:22+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+For clustered services the nodes' autocheck files would be written
+with the aggregated clustered service information.
+
+Now, at least for the autodiscovery, this is fixed and the individual
+node information is written.
+
diff --git a/.werks/16674.md b/.werks/16674.md
new file mode 100644
index 00000000000..349b6cf392b
--- /dev/null
+++ b/.werks/16674.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Ruleset API: Add option to group Dictionary elements
+
+key | value
+---------- | ---
+date | 2024-04-02T15:57:33+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This is only relevant for plugin developers.
+
+If some Dictionary entries are thematically closer related to each other than others they can now be configured and displayed as such.
+
diff --git a/.werks/16675.md b/.werks/16675.md
new file mode 100644
index 00000000000..8595305f776
--- /dev/null
+++ b/.werks/16675.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Ruleset API: Remove unused evaluation type specification
+
+key | value
+---------- | ---
+date | 2024-04-04T10:58:02+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This change is only relevant for plugin developers.
+
+Some rule specs allow the configuration of the `eval_type` to specify how the rules are meant to be evaluated in respect to each other.
+However, this configuration is not considered in some cases, instead Checkmk uses a predetermined evaluation type.
+For the affected rule specs this option was removed:
+* AgentConfig
+* NotificationParameters
+* DiscoveryParameters
+
diff --git a/.werks/16677.md b/.werks/16677.md
new file mode 100644
index 00000000000..c46b2c5b4c3
--- /dev/null
+++ b/.werks/16677.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Ruleset API: Rename PredictiveLevels.prefill_stddev_diff to prefill_stdev_diff
+
+key | value
+---------- | ---
+date | 2024-04-08T14:16:32+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This change is only relevant for plugin developers.
+
diff --git a/.werks/16678.md b/.werks/16678.md
new file mode 100644
index 00000000000..558eecb8a4e
--- /dev/null
+++ b/.werks/16678.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# HW-/SW-Inventory: Do not run autoinventory for inventorized hosts
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-10T09:08:54+00:00
+level | 1
+class | fix
+component | inv
+edition | cre
+
+Previously hosts marked for inventorization were afterwards not unmarked, meaning they would be processed again everytime the autoinventory was running.
+This has been fixed.
\ No newline at end of file
diff --git a/.werks/16680.md b/.werks/16680.md
new file mode 100644
index 00000000000..465f49a13a0
--- /dev/null
+++ b/.werks/16680.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Ruleset API: Changes to available rule spec topics
+
+key | value
+---------- | ---
+date | 2024-04-15T06:44:00+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+
+This change is only relevant for plugin developers.
+
+
+The available topics for rule specs have changed:
+
+* added: `SYNTHETIC_MONITORING`
+* removed: `AGENT_PLUGINS`
+
diff --git a/.werks/16681.md b/.werks/16681.md
new file mode 100644
index 00000000000..9db133d5a2c
--- /dev/null
+++ b/.werks/16681.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# check_http: Stricter address family configuration requirements
+
+key | value
+---------- | ---
+date | 2024-04-17T10:07:49+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+The active check "http" allows different configurations for the address family in the host settings, among others the primary address family.
+If this option is selected, a primary IP address now has to be configured for the relevant host.
+An IP address also has to be configured for the host if no explicit address is configured in the rule.
+
+
+Note that this active check is being deprecated in favor of the newer "httpv2".
diff --git a/.werks/16682.md b/.werks/16682.md
new file mode 100644
index 00000000000..79e6ff58c92
--- /dev/null
+++ b/.werks/16682.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Ruleset API: Fix migration with scaling of SimpleLevels
+
+key | value
+---------- | ---
+date | 2024-04-17T11:19:36+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This change is relevant to plugin developers
+
+The migration helper functions `migrate_to_integer_simple_levels` and `migrate_to_float_simple_levels` for `SimpleLevels` currently apply the scaling factor (if given) every time the migration is run, meaning also to the already migrated value.
+This means any rule where these helpers are used with a scaling factor will have incorrect values and will have to be manually corrected.
+
+No shipped rules are affected by this.
+
diff --git a/.werks/16683.md b/.werks/16683.md
new file mode 100644
index 00000000000..b676e262f77
--- /dev/null
+++ b/.werks/16683.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Licensing: Reset license state when updating from Enterprise to non-Enterprise
+
+key | value
+---------- | ---
+date | 2024-04-17T16:10:14+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | setup
+level | 1
+compatible | no
+
+When upgrading from an Enterprise to another edition is performed, the site will start a new trial period, even if licensing credentials had already been configured.
+To license the product, a license verification needs to be performed (on the licensing page: Setup > Maintenance > Licensing > Online/Offline verification).
+
diff --git a/.werks/16684.md b/.werks/16684.md
new file mode 100644
index 00000000000..e852f6f601b
--- /dev/null
+++ b/.werks/16684.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Ruleset API: rename Dictionaries 'deprecated_elements'
+
+key | value
+---------- | ---
+date | 2024-04-09T10:11:41+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This only affects plugin developers using the new ruleset API.
+
+The `Dictionary`s attribute `deprecated_elements` is renamed to
+`ignored_elements`.
+It is validated that no present elements are marked as 'ignored'.
diff --git a/.werks/16685.md b/.werks/16685.md
new file mode 100644
index 00000000000..29c489926b9
--- /dev/null
+++ b/.werks/16685.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# emcvnx_info: Fix regression in 2.3.0-beta
+
+key | value
+---------- | ---
+date | 2024-04-10T14:54:18+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16686.md b/.werks/16686.md
new file mode 100644
index 00000000000..9740dc8bbdd
--- /dev/null
+++ b/.werks/16686.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Rendering of fractional values in some few rulesets
+
+key | value
+---------- | ---
+date | 2024-04-11T10:25:58+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This only affects the 2.3-beta versions.
+There where issues when rendering fractional values in some specific rulesets.
diff --git a/.werks/16687.md b/.werks/16687.md
new file mode 100644
index 00000000000..06481c99248
--- /dev/null
+++ b/.werks/16687.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# New APIs: Crash during config generation for custom special agent or active check calls
+
+key | value
+---------- | ---
+date | 2024-04-15T07:19:19+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This only affects plugin developers using the new API in `cmk.server_side_calls`.
+The config generation for custom plugins crashed when passwords where in cluded in the command.
+
+The mechanism how to pass `Secret`s has changed since 2.3.0b3.
+To learn how to adjust your special agent or active check to the new API, please refer to the Sphinx documentation of the `cmk.server_side_calls.v1.Secret` object.
diff --git a/.werks/16688.md b/.werks/16688.md
new file mode 100644
index 00000000000..4c7149ae81b
--- /dev/null
+++ b/.werks/16688.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# veeam_tapejobs: TypeError (not all arguments converted during string formatting)
+
+key | value
+---------- | ---
+date | 2024-04-16T14:08:54+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This fixes a regression in Checkmk 2.3.0-beta.
diff --git a/.werks/16689.md b/.werks/16689.md
new file mode 100644
index 00000000000..c49130abd84
--- /dev/null
+++ b/.werks/16689.md
@@ -0,0 +1,36 @@
+[//]: # (werk v2)
+# Decommission legacy check API
+
+key | value
+---------- | ---
+date | 2024-04-17T06:27:51+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This werk only affects users and maintainers of custom check plugins developed against the API that was replaced in Checkmk 2.0.0.
+
+The old API for the plugins residing in `local/share/check_mk/checks` will no longer be stable in Checkmk version 2.3.
+Plugins not maintained by Checkmk will almost certainly be incompatible and therefore ignored.
+
+Compatible plugins _will_ be considered, but the notion of what constitutes a compatible plugin may change any time.
+
+As of Checkmk version 2.2 plugins in that folder generated a warning on the commandline and resulted in an "Analyze Configuration" test issueing a WARNING.
+
+We now escalate this to CRITICAL.
+Incompatible plugins are reported during `omd update`.
+
+If you maintian such plugins, please migrate them to the new API before upgrading to Checkmk 2.3.
+You can find a blog post on how to migrate these plugins
+[here](https://checkmk.com/blog/migrating-check-plug-ins-to-checkmk-2-0).
+A comprehensive guide on how to write plugins is found
+[in our documentation](https://docs.checkmk.com/2.3.0-beta/en/devel_check_plugins.html).
+Please also refer to the APIs documentation found in your site (Help -> Check plugin API reference).
+
+Commandline call plugins for special agents and active checks in this folder will still work,
+but we provide a new API for those as well now (see [Werk #16259](https://checkmk.com/werk/16259)).
+They will stop working in Checkmk 2.4.
+
diff --git a/.werks/16690.md b/.werks/16690.md
new file mode 100644
index 00000000000..5a1432e5cf9
--- /dev/null
+++ b/.werks/16690.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# pfsense_if: stale services in 2.3 beta
+
+key | value
+---------- | ---
+date | 2024-04-17T11:38:38+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16691.md b/.werks/16691.md
new file mode 100644
index 00000000000..01ac8a05849
--- /dev/null
+++ b/.werks/16691.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# Linux agent: drop support for FreeIPMI 0.8.0 and earlier
+
+key | value
+---------- | ---
+date | 2024-04-17T20:21:00+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This change is only incompatible for users monitoring hosts with a FreeIPMI
+version of 0.8.0 or earlier.
+
+FreeIPMI 0.8.1 was released in December 2009.
+
+
diff --git a/.werks/16692.md b/.werks/16692.md
new file mode 100644
index 00000000000..e390dbb7b6f
--- /dev/null
+++ b/.werks/16692.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# inventory_primekey: do not crash upon empty node ID
+
+key | value
+---------- | ---
+date | 2024-04-20T10:06:26+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16693.md b/.werks/16693.md
new file mode 100644
index 00000000000..795bd08ba48
--- /dev/null
+++ b/.werks/16693.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Re-enable inline SNMP for SNMPv1
+
+key | value
+---------- | ---
+date | 2024-04-20T13:55:35+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Due to a memory leak in the underlying library, Checkmk was using the
+'classic' SNMP backend for all SNMPv1 hosts regardless of the user
+configuration.
+
+This memory leak has since been fixed, so we remove the fallback.
diff --git a/.werks/16694.md b/.werks/16694.md
new file mode 100644
index 00000000000..bd33f815f0b
--- /dev/null
+++ b/.werks/16694.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Fix event statistics dashlet filters
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-09T09:59:29+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+For the dashlet "Event statistics" you were able to configure "Host" and
+"Service" Context/Search filter.
+
+The "Service" filter had no effect and is now replaced with the "Event Console
+event" filter option.
\ No newline at end of file
diff --git a/.werks/16695.md b/.werks/16695.md
new file mode 100644
index 00000000000..35d23492260
--- /dev/null
+++ b/.werks/16695.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Just show an error message if redis is not reachable
+
+key | value
+---------- | ---
+date | 2024-04-17T06:14:37+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | multisite
+level | 1
+compatible | yes
+
+The mega menu search produced a crash report if redis was not reachable.
+Now only an error message is shown.
diff --git a/.werks/16697.md b/.werks/16697.md
new file mode 100644
index 00000000000..eb74bc9a029
--- /dev/null
+++ b/.werks/16697.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Ignore empty "Multiple sites" filter if "Site" filter is set
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-23T06:07:04+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+If both filters "Multiple sites" and "Sites" were set, the "Multiple sites"
+filter was used, even if empty.
\ No newline at end of file
diff --git a/.werks/16698.md b/.werks/16698.md
new file mode 100644
index 00000000000..8c50dd0e30a
--- /dev/null
+++ b/.werks/16698.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# mknotifyd: Log to correct file after logrotate
+
+key | value
+---------- | ---
+date | 2024-04-25T08:08:50+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | notifications
+level | 1
+compatible | yes
+
+The logrotate cronjob rotated the logfile mknotifyd.log in the right way but
+the mknotifyd was not aware of the changed logfile, resulting in logging to the
+rotated file. The mknotifyd is now aware of a change of the logfile.
diff --git a/.werks/16699.md b/.werks/16699.md
new file mode 100644
index 00000000000..c4e20a60c30
--- /dev/null
+++ b/.werks/16699.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Do not escape HTML output of ps check in HTML Emails
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-30T11:24:24+00:00
+level | 1
+class | fix
+component | notifications
+edition | cre
+
+If you configured a process discovery rule with option "Enable per-process
+details in long-output" the "Details" column in HTML Emails showed the escaped
+output.
\ No newline at end of file
diff --git a/.werks/16700.md b/.werks/16700.md
new file mode 100644
index 00000000000..e32326bc4b9
--- /dev/null
+++ b/.werks/16700.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Log to notify.log after logrotate
+
+key | value
+---------- | ---
+date | 2024-05-02T07:09:40+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | notifications
+level | 1
+compatible | yes
+
+The logrotate cronjob rotated the logfile notify.log in the right way but the
+logs were written to the archived file afterwards until a restart of the core.
diff --git a/.werks/16701.md b/.werks/16701.md
new file mode 100644
index 00000000000..fb2d2bf5227
--- /dev/null
+++ b/.werks/16701.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Fix unescaped details for process discovery checks in availability
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-05-07T07:53:43+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+Werk #15523 introduced a change in the escaping of ps checks but the
+availability still showed a broken table.
+
+This is fixed now.
\ No newline at end of file
diff --git a/.werks/16702.md b/.werks/16702.md
new file mode 100644
index 00000000000..835232dfe5e
--- /dev/null
+++ b/.werks/16702.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Log to alert.log and dcd.log after logrotate
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-05-07T09:32:55+00:00
+level | 1
+class | fix
+component | wato
+edition | cee
+
+Same as werk #16700 and #16698 for alert.log and dcd.log.
\ No newline at end of file
diff --git a/.werks/16703.md b/.werks/16703.md
new file mode 100644
index 00000000000..95c1255d114
--- /dev/null
+++ b/.werks/16703.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Respect all sites in multiple sites filter for statistics dashlets
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-05-08T06:53:55+00:00
+level | 1
+class | fix
+component | multisite
+edition | cre
+
+The dashlets "Host statistics" and "Service statistics" always showed results
+for just one site in a "Multiple sites" filter.
\ No newline at end of file
diff --git a/.werks/16734.md b/.werks/16734.md
new file mode 100644
index 00000000000..e67ca76cb08
--- /dev/null
+++ b/.werks/16734.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Fix missing inventory macros in dashlets
+
+key | value
+---------- | ---
+date | 2024-04-15T11:02:13+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16735.md b/.werks/16735.md
new file mode 100644
index 00000000000..57ae43f54de
--- /dev/null
+++ b/.werks/16735.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Fix unknown METRIC element if combined graph is added to dashboard
+
+key | value
+---------- | ---
+date | 2024-04-22T12:34:09+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | multisite
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16736.md b/.werks/16736.md
new file mode 100644
index 00000000000..f6ab38d80ad
--- /dev/null
+++ b/.werks/16736.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Warn during update if contact groups are used in rulesets but not available
+
+key | value
+---------- | ---
+date | 2024-04-23T07:42:11+00:00
+version | 2.4.0b1
+class | fix
+edition | cme
+component | wato
+level | 1
+compatible | yes
+
+Some rulesets like 'Assignment of hosts (or services) to contact groups' use
+contact groups which may not be synced because they are assigned to a specific
+customer or to 'Provider' (default). This inconsistency might have already been
+present in your configuration but went unnoticed in the previous versions. With
+2.3, we introduced new validation mechanics which now warn you about this
+potential issue.
+
+With this change we make the shown error message a more helpful so that users
+get a hint how to solve the situation.
diff --git a/.werks/16737.md b/.werks/16737.md
new file mode 100644
index 00000000000..68938615206
--- /dev/null
+++ b/.werks/16737.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Warn during update if contact groups are used in rulesets 'host_groups' or 'service_groups'
+
+key | value
+---------- | ---
+date | 2024-05-03T08:31:13+00:00
+version | 2.4.0b1
+class | fix
+edition | cme
+component | wato
+level | 1
+compatible | yes
+
+This werk extends the werk 16376 (have a look for more detailed information).
diff --git a/.werks/16738.md b/.werks/16738.md
new file mode 100644
index 00000000000..40e16cd89aa
--- /dev/null
+++ b/.werks/16738.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# winperf_processor: Remove (constant) undefined metric "Cpus"
+
+key | value
+---------- | ---
+date | 2024-05-06T12:24:16+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+
diff --git a/.werks/16754.md b/.werks/16754.md
new file mode 100644
index 00000000000..3e26516eaa3
--- /dev/null
+++ b/.werks/16754.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# docker_container: skip on incomplete data for diskstat and memory
+
+key | value
+---------- | ---
+date | 2024-04-16T10:28:20+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+During the data parsing of a container, it is possible to encounter
+incomplete metric sets, which previously caused a crash. Since the
+data in these instances is simply unavailable, we now skip the
+discovery or check cycle for such cases. This adjustment is applied
+to docker container disktat and memory check plugins.
diff --git a/.werks/16755.md b/.werks/16755.md
new file mode 100644
index 00000000000..8e8cba6bccc
--- /dev/null
+++ b/.werks/16755.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# mem_win: rename 'Commit Charge' to 'Virtual Memory' for correctness
+
+key | value
+---------- | ---
+date | 2024-04-19T12:34:59+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The value previously displayed as 'Commit Charge' did not match its
+description. Therefore, this value has been accurately renamed to
+'Virtual Memory,' while maintaining the original calculation method.
+Correspondingly, the titles of related graphs have been adjusted to
+reflect this change.
+
+In versions prior to 2.3, the service summary included a metric labeled
+'Pagefile installed.' This has now been correctly renamed to
+'Total Virtual Memory,' as it never accurately represented the
+'Pagefile installed.'
diff --git a/.werks/16756.md b/.werks/16756.md
new file mode 100644
index 00000000000..c2de53edf2b
--- /dev/null
+++ b/.werks/16756.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# host_config: introduce include_links option for Show all hosts endpoint
+
+key | value
+---------- | ---
+date | 2024-04-23T06:37:18+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | rest-api
+level | 1
+compatible | no
+
+This werk introduces the flag 'include_links' for the 'Show all hosts'
+host config endpoint. It toggles whether the links field should be populated
+of the individual hosts. This serves as a mechanism to reduce the payload
+generated. This field defaults to False and existing scripts therefore have
+to be adjusted accordingly.
diff --git a/.werks/16757.md b/.werks/16757.md
new file mode 100644
index 00000000000..16d9af73d17
--- /dev/null
+++ b/.werks/16757.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# agent_kube: resolve case when referenced PVC is no longer present
+
+key | value
+---------- | ---
+date | 2024-05-06T10:47:15+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Prior to this werk, the Kubernetes special agent failed whenever
+the API returned Kubernetes objects with at least one reference
+to a no longer existing PVC. This werk fixes this issue by ignoring
+such references.
diff --git a/.werks/16760.md b/.werks/16760.md
new file mode 100644
index 00000000000..3ea1b2d79b5
--- /dev/null
+++ b/.werks/16760.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# netapp_ontap: monitor S3 Subvolumes
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-04-18T15:07:04+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+The Netapp Ontap plugin _NetApp Filer: Used Space and Traffic of Volumes_ did not monitor S3 subvolumes correctly.
+
+With this werk, the S3 volumes with their respective counters are discovered and monitored.
\ No newline at end of file
diff --git a/.werks/16761.md b/.werks/16761.md
new file mode 100644
index 00000000000..923463d9ec8
--- /dev/null
+++ b/.werks/16761.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# agent_azure: handling case insensitivity in Azure resource group names
+
+key | value
+---------- | ---
+compatible | no
+version | 2.4.0b1
+date | 2024-04-23T08:25:11+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+This change impacts all those who are monitoring Azure systems.
+
+Azure resource group names aren't case sensitive.
+The APIs may return values with different casing than the originally specified name.
+(see: https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/frequently-asked-questions#are-resource-group-names-case-sensitive)
+
+We modified the Azure agent in order to convert resources groups to lower case as soon as they are retrieved from the api.
+
+This is an incompatible change so a rediscovery is needed for the services to be properly monitored.
\ No newline at end of file
diff --git a/.werks/16779.md b/.werks/16779.md
new file mode 100644
index 00000000000..f513b098e33
--- /dev/null
+++ b/.werks/16779.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# ldap: show the correct customer for ldap users
+
+key | value
+---------- | ---
+date | 2024-04-18T14:03:11+00:00
+version | 2.4.0b1
+class | fix
+edition | cme
+component | wato
+level | 1
+compatible | yes
+
+
+When an ldap connection is configured for a specific customer, this
+wasn't reflected in the users for that ldap connection. This werk
+addresses this issue by now showing the correct customer.
+
diff --git a/.werks/16780.md b/.werks/16780.md
new file mode 100644
index 00000000000..343230a2f50
--- /dev/null
+++ b/.werks/16780.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# APIDocs: missing ETag response header for 2 endpoints.
+
+key | value
+---------- | ---
+date | 2024-04-24T14:41:14+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+The following endpoints did not show that they returned an ETag header
+as part of their 200 OK response.
+
+* Show all pending changes
+* Show password
+
+This werk addresses this issue. Both now show the correct headers in
+their responses.
diff --git a/.werks/16781.md b/.werks/16781.md
new file mode 100644
index 00000000000..94cea2d833a
--- /dev/null
+++ b/.werks/16781.md
@@ -0,0 +1,22 @@
+[//]: # (werk v2)
+# Bi: service/host_label group operator now has a default operator key "and"
+
+key | value
+---------- | ---
+date | 2024-04-29T11:10:04+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | rest-api
+level | 1
+compatible | yes
+
+When creating Bi rules via the REST-API, the host_label_groups and
+the service_label_groups fields both accepted a list of conditions.
+These conditions previously required an operator field. However,
+we realized that the operator field for the first item in these lists
+really wasn't needed and had no affect. So that the user doesn't have
+to provide this value, we have now added a default value for all
+conditions in this list of "and".
+
+
diff --git a/.werks/16789.md b/.werks/16789.md
new file mode 100644
index 00000000000..1b6e3a8defd
--- /dev/null
+++ b/.werks/16789.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# check_http: Improve handling of old service description
+
+key | value
+---------- | ---
+date | 2024-04-18T16:02:21+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+If not configured otherwise, the service description of the active check "http" will be prepended with either "HTTP" or "HTTPS".
+In older installations only the "HTTP" prefix was possible and special handling was introduced to keep the old prefix for existing services.
+
+To improve the handling existing configurations of the "Service name" in the "Check HTTP service (deprecated)" rule may now be updated to contain the prefix "^HTTP" if the configured value does not start with a "^".
+This only changes the saved rule, the actual description of the service will not change.
diff --git a/.werks/16790.md b/.werks/16790.md
new file mode 100644
index 00000000000..f3fffdd26a1
--- /dev/null
+++ b/.werks/16790.md
@@ -0,0 +1,21 @@
+[//]: # (werk v2)
+# Ruleset API: Fix error during AgentConfig creation
+
+key | value
+---------- | ---
+date | 2024-04-19T11:48:42+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
+When creating an `AgentConfig` rulespec using the Ruleset API an error
+```
+KeyError: 'cmk-match-type'
+```
+was raised.
+
+
diff --git a/.werks/16791.md b/.werks/16791.md
new file mode 100644
index 00000000000..e8dc678e1c4
--- /dev/null
+++ b/.werks/16791.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Fix categorization of some rule specs using new Ruleset API
+
+key | value
+---------- | ---
+date | 2024-04-19T12:04:19+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The following rule specs/topic combinations from cmk.rulesets.v1 will now be shown in different categories in the UI
+
+* SpecialAgent with Topic.CLOUD: under `Setup > Agents > VM, cloud, container`
+* AgentConfig with TOPIC.LINUX: under `Setup > Agents > Windows, Linux, Solaris, AIX > Agent rules > Linux Agent` (not available in Raw)
+* AgentConfig with TOPIC.WINDOWS: under `Setup > Agents > Windows, Linux, Solaris, AIX > Agent rules > Windows Agent` (not available in Raw)
+* AgentConfig with TOPIC.GENERAL: under `Setup > Agents > Windows, Linux, Solaris, AIX > Agent rules > Generic Options`
+* AgentConfig with another topic: under `Setup > Agents > Windows, Linux, Solaris, AIX > Agent rules > Agent plug-ins` (not available in Raw)
diff --git a/.werks/16793.md b/.werks/16793.md
new file mode 100644
index 00000000000..824fda1b24f
--- /dev/null
+++ b/.werks/16793.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# infoblox_temp: Add support for Nios version > 8.6
+
+key | value
+---------- | ---
+date | 2024-04-26T06:56:26+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The relevant OIDs have changed for newer Nios versions. The check now differentiates between the different version, meaning the check does not crash anymore when parsing newer sections.
diff --git a/.werks/16794.md b/.werks/16794.md
new file mode 100644
index 00000000000..aac84012884
--- /dev/null
+++ b/.werks/16794.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# "checkgroup_parameters:if": Rename to "checkgroup_parameters:interfaces"
+
+key | value
+---------- | ---
+date | 2024-04-26T11:27:20+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | no
+
+This only affects you if you are configuring rules through the REST API.
+In order to make the "checkgroup_parameters:if" ruleset compliant with the new Ruleset API, it has been renamed to "checkgroup_parameters:interfaces".
+Any configuration inside Checkmk will be automatically updated, however any outside references to the old name will have to be adjusted manually.
+
diff --git a/.werks/16795.md b/.werks/16795.md
new file mode 100644
index 00000000000..8e43ac87edd
--- /dev/null
+++ b/.werks/16795.md
@@ -0,0 +1,15 @@
+[//]: # (werk v2)
+# Ruleset API: SingleChoice rendering when using DictGroups
+
+key | value
+---------- | ---
+date | 2024-05-03T13:31:07+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+When a SingleChoice (rendered as a Dropdown) is grouped together with another element via a DictGroup, the Dropdown could not be closed after opening it.
+This affected the "cert" ruleset of the shipped rulesets.
diff --git a/.werks/16799.md b/.werks/16799.md
new file mode 100644
index 00000000000..64da68b3323
--- /dev/null
+++ b/.werks/16799.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Custom & forecast graphs: More descriptive error messages in case of missing user input when configuring metrics
+
+key | value
+---------- | ---
+date | 2024-04-19T10:57:49+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | metrics
+level | 1
+compatible | yes
+
+When configuring the metrics rendered in custom and forecast graphs, users have to select a host,
+a service and a metric. Previously, if any of these fields were missing, the Checkmk UI displayed
+the message "Cannot calculate graph recipes" and an uninformative traceback. As of this werk, the UI instead displays a descriptive error message.
diff --git a/.werks/16800.md b/.werks/16800.md
new file mode 100644
index 00000000000..6b3ebb5b49a
--- /dev/null
+++ b/.werks/16800.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Fix crash in SNMPv1 and SNMPv2 connection tests
+
+key | value
+---------- | ---
+date | 2024-04-19T15:28:03+00:00
+version | 2.3.0b6
+class | fix
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+When running the SNMP connection tests for a host that has SNMPv3 credentials configured, the SNMPv1
+and SNMPv2 connection tests crashed. With the Inline SNMP backend, the corresponding error message
+read "argument 2 must be str, not tuple". With the Classic backend, there was no error message at
+all.
diff --git a/.werks/16801.md b/.werks/16801.md
new file mode 100644
index 00000000000..3e71b8782d9
--- /dev/null
+++ b/.werks/16801.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Custom graphs: Fix crash in case of unavailable scalars
+
+key | value
+---------- | ---
+date | 2024-04-23T10:22:44+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | metrics
+level | 1
+compatible | yes
+
+When adding a scalar to a custom graph, it is possible that no value is available for this scalar.
+For example, this is the case when adding the CRIT threshold of a metric for which no thresholds are
+configured. In such cases, no graph was rendered. Instead, the Checkmk UI displayed the message
+"Cannot calculate graph recipes" and showed a traceback.
+
+This werk restores the correct behavior: No lines are rendered for unavailable scalars and they are
+denoted with "n/a" in the graph legend.
diff --git a/.werks/16802.md b/.werks/16802.md
new file mode 100644
index 00000000000..ad69653e59b
--- /dev/null
+++ b/.werks/16802.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Synthetic monitoring: Fix crash in "Robotmk scheduler (Windows)" bakery rule when attempting to save with invalid settings
+
+key | value
+---------- | ---
+date | 2024-04-24T12:38:16+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | setup
+level | 1
+compatible | yes
+
+When attempting to save the bakery rule "Robotmk scheduler (Windows)" with invalid settings such as
+an empty base directory or an invalid application name, the UI page crashed with
+TypeError: argument of type 'NoneType' is not iterable
.
diff --git a/.werks/16803.md b/.werks/16803.md
new file mode 100644
index 00000000000..27b2fb18837
--- /dev/null
+++ b/.werks/16803.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Custom graphs: Fix crash in case of missing host/service/metric
+
+key | value
+---------- | ---
+date | 2024-04-24T14:21:32+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | metrics
+level | 1
+compatible | yes
+
+Custom graphs can contain elements whose host or service is non-existant. This happens for example
+when a host is removed from the monitoring after one of its metrics has been added to a custom
+graph. In such cases, no graph was rendered. Instead, the Checkmk UI displayed the message "Cannot
+calculate graph recipes" and showed a traceback.
+
+As of this werk, the UI instead renders no lines for such elements and denotes them with "n/a" in
+the graph legend.
diff --git a/.werks/16804.md b/.werks/16804.md
new file mode 100644
index 00000000000..6aaf963a538
--- /dev/null
+++ b/.werks/16804.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Printer cartridge levels: Allow to configure receptacles and containers individually
+
+key | value
+---------- | ---
+date | 2024-04-22T09:37:09+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Some printers only report "some remaining" in case the exact percentage of
+the filing level is unknown.
+For these cases user could explicitly configure the monitoring state.
+This state can now be configured depending on whether the supply is a container
+or a receptacle.
diff --git a/.werks/16805.md b/.werks/16805.md
new file mode 100644
index 00000000000..f8ea0d0adda
--- /dev/null
+++ b/.werks/16805.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# mkp-tool: CLIs 'package' command
+
+key | value
+---------- | ---
+date | 2024-04-23T11:57:59+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+The `mkp package ` command would fail with "File conflict: /omd/sites/mydevsite/local/... (already existing)" if called in a site context.
+
+Additionally, we no longer write the mkp-tools version into the "version.min_required" field of the manifest template.
diff --git a/.werks/16806.md b/.werks/16806.md
new file mode 100644
index 00000000000..34b0a303cb0
--- /dev/null
+++ b/.werks/16806.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Handle unexpected files in 'cmk_addons/plugins'
+
+key | value
+---------- | ---
+date | 2024-04-23T12:04:08+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Checkmk 2.3.0 beta crashed during various operations if unexpected files where
+put into `local/lib/python3/cmk_addons/plugins/` or `local/lib/python3/cmk/plugins/`.
+They are ignored now.
diff --git a/.werks/16807.md b/.werks/16807.md
new file mode 100644
index 00000000000..cd509996a6b
--- /dev/null
+++ b/.werks/16807.md
@@ -0,0 +1,23 @@
+[//]: # (werk v2)
+# Ignore unknown "Disabled checks" during update config
+
+key | value
+---------- | ---
+date | 2024-04-24T12:25:30+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+If users had disabled checks that have since been removed or are temporarily unavailable (due to disabled MKPs for instance), they would be prompted with a message like
+
+```
+WARNING: Invalid rule configuration detected (Ruleset: ignored_checks, Title: Disabled checks, Folder: ,
+-| Rule nr: 1, Exception: ifoperstatus is not an allowed value)
+```
+
+These invalid values are ignored now.
+They do no harm and they are dropped upon editing the rule.
+
diff --git a/.werks/16808.md b/.werks/16808.md
new file mode 100644
index 00000000000..1219fb10787
--- /dev/null
+++ b/.werks/16808.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# agent_threepar: The agent mistakenly only accepted default values as valid ones
+
+key | value
+---------- | ---
+date | 2024-04-24T14:01:07+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This fixes a regression in Checkmk 2.3 beta.
+The integration to monitor "3PAR Configuration" mistakenly only accepted any
+subset of the default values as valid values.
diff --git a/.werks/16809.md b/.werks/16809.md
new file mode 100644
index 00000000000..f084a135baa
--- /dev/null
+++ b/.werks/16809.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# check_by_ssh: Crashed in 2.3 beta if 'timeout' was configured
+
+key | value
+---------- | ---
+date | 2024-04-25T10:27:15+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16810.md b/.werks/16810.md
new file mode 100644
index 00000000000..d0b0d9fe0e6
--- /dev/null
+++ b/.werks/16810.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Fixed sed command in Werk #16274
+
+key | value
+---------- | ---
+date | 2024-04-25T12:03:09+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+
diff --git a/.werks/16811.md b/.werks/16811.md
new file mode 100644
index 00000000000..c128294bd0e
--- /dev/null
+++ b/.werks/16811.md
@@ -0,0 +1,16 @@
+[//]: # (werk v2)
+# Performance regression during config generation in 2.3.0b6
+
+key | value
+---------- | ---
+date | 2024-04-26T08:27:28+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+This fixes a performance regression introduced in Checkmk version 2.3.0b6.
+The introduced issue slowed the config generation down proportional to the
+number of monitored hosts.
diff --git a/.werks/16812.md b/.werks/16812.md
new file mode 100644
index 00000000000..2d0c8808c46
--- /dev/null
+++ b/.werks/16812.md
@@ -0,0 +1,13 @@
+[//]: # (werk v2)
+# Simulation mode: Critical discovery service for SNMP hosts
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-05-02T08:43:35+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
diff --git a/.werks/16814.md b/.werks/16814.md
new file mode 100644
index 00000000000..ac84a45dce6
--- /dev/null
+++ b/.werks/16814.md
@@ -0,0 +1,18 @@
+[//]: # (werk v2)
+# Broken password store in case of partial config genration
+
+key | value
+---------- | ---
+date | 2024-05-08T14:00:51+00:00
+version | 2.4.0b1
+class | fix
+edition | cee
+component | core
+level | 1
+compatible | yes
+
+This fixes a regression in Checkmk 2.3.0 and 2.3.0p1.
+
+In case of a partial configuration update (changes that only affect individual hosts) the password store would break.
+
+Affected users can change an arbitrardy ruleset to provoke a regenration of the full core conifg.
diff --git a/.werks/16826.md b/.werks/16826.md
new file mode 100644
index 00000000000..b28401ba98e
--- /dev/null
+++ b/.werks/16826.md
@@ -0,0 +1,19 @@
+[//]: # (werk v2)
+# cisco_temperature: Fix some CISCO-ENVMON-MIB sensors not getting discovered
+
+key | value
+---------- | ---
+compatible | yes
+version | 2.4.0b1
+date | 2024-05-02T13:48:44+00:00
+level | 1
+class | fix
+component | checks
+edition | cre
+
+There was a bug in how temperature sensors from Cisco devices using
+CISCO-ENVMON-MIB
were parsed which stopped them from being
+discovered.
+
+This werk fixes the bug, allowing all cisco temperature sensors using
+that MIB to be discovered.
\ No newline at end of file
diff --git a/.werks/16829.md b/.werks/16829.md
new file mode 100644
index 00000000000..f2e0674002d
--- /dev/null
+++ b/.werks/16829.md
@@ -0,0 +1,14 @@
+[//]: # (werk v2)
+# Notify users on account security changes
+
+key | value
+---------- | ---
+date | 2024-04-26T12:20:30+00:00
+version | 2.4.0b1
+class | feature
+edition | cre
+component | wato
+level | 1
+compatible | yes
+
+Checkmk will now notify users on security changes to their accounts within Checkmk. By default users will be emailed if this option is configured, otherwise users will be notfied via the internal user messaging system. Notifications within Checkmk cannot be deleted by any user however the display duration of these notifications can be configured. The default of 7 days with a minimum of 15 minutes.
diff --git a/.werks/16834.md b/.werks/16834.md
new file mode 100644
index 00000000000..c30133ce65d
--- /dev/null
+++ b/.werks/16834.md
@@ -0,0 +1,20 @@
+[//]: # (werk v2)
+# Windows agent internal users are permanent now
+
+key | value
+---------- | ---
+date | 2024-05-07T09:18:50+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Previously, the Windows agent service deleted internal users
+created to execute plugins in group mode. As a result, the
+permission list for certain files may look strange over time,
+filled with UUIDs of non-existent users.
+
+With this release, the problem has been resolved. The agent
+will create or reuse an existing user and will not delete it
diff --git a/.werks/16835.md b/.werks/16835.md
new file mode 100644
index 00000000000..48c4caa232f
--- /dev/null
+++ b/.werks/16835.md
@@ -0,0 +1,17 @@
+[//]: # (werk v2)
+# Correctly disable async mode mk-sql plugin config
+
+key | value
+---------- | ---
+date | 2024-05-07T14:56:50+00:00
+version | 2.4.0b1
+class | fix
+edition | cre
+component | checks
+level | 1
+compatible | yes
+
+Previously, it was not possible to disable async
+mode for sections that were async by default.
+
+Since this release, the problem has been fixed.
diff --git a/.werks/first_free b/.werks/first_free
index 4036f9309f1..19991f4aabf 100644
--- a/.werks/first_free
+++ b/.werks/first_free
@@ -1 +1 @@
-16363
+16874
diff --git a/.yamllint.yml b/.yamllint.yml
new file mode 100644
index 00000000000..67b26aa62dc
--- /dev/null
+++ b/.yamllint.yml
@@ -0,0 +1,6 @@
+---
+extends: default
+
+rules:
+ line-length:
+ max: 100
diff --git a/Makefile b/Makefile
index 3fd2129d9f5..77acac1c700 100644
--- a/Makefile
+++ b/Makefile
@@ -10,20 +10,10 @@ DIST_ARCHIVE := check-mk-$(EDITION)-$(OMD_VERSION).tar.gz
TAROPTS := --owner=root --group=root --exclude=.svn --exclude=*~ \
--exclude=.gitignore --exclude=*.swp --exclude=.f12 \
--exclude=__pycache__ --exclude=*.pyc
-ARTIFACT_STORAGE := https://artifacts.lan.tribe29.com
# TODO: Prefixing the command with the environment variable breaks xargs usage below!
PIPENV := PIPENV_PYPI_MIRROR=$(PIPENV_PYPI_MIRROR) scripts/run-pipenv
BLACK := scripts/run-black
-JAVASCRIPT_SOURCES := $(filter-out %_min.js, \
- $(wildcard \
- $(foreach subdir,* */* */*/* */*/*/* */*/*/*/*,web/htdocs/js/$(subdir).[jt]s)))
-
-SCSS_SOURCES := $(wildcard $(foreach subdir,* */* */*/*,web/htdocs/themes/$(subdir)/*.scss))
-
-
-WEBPACK_MODE ?= production
-
OPENAPI_SPEC := web/htdocs/openapi/checkmk.yaml
LOCK_FD := 200
@@ -39,11 +29,11 @@ endif
CI ?= false
.PHONY: announcement all build check-setup \
- clean css dist documentation \
+ clean dist docker-context-clean documentation \
format format-c test-format-c format-python format-shell \
- format-js help install mrproper mrclean \
+ help install mrproper mrclean \
packages setup setversion version openapi \
- protobuf-files
+ protobuf-files frontend-vue
help:
@echo "setup --> Prepare system for development and building"
@@ -94,7 +84,7 @@ $(SOURCE_BUILT_OHM) $(SOURCE_BUILT_WINDOWS):
# is currently not used by most distros
# Would also use --exclude-vcs, but this is also not available
# And --transform is also missing ...
-dist: $(SOURCE_BUILT_AGENTS) $(SOURCE_BUILT_AGENT_UPDATER) protobuf-files $(JAVASCRIPT_MINI) $(THEME_RESOURCES)
+dist: $(SOURCE_BUILT_AGENTS) $(SOURCE_BUILT_AGENT_UPDATER) protobuf-files cmk-frontend frontend-vue
$(MAKE) -C agents/plugins
set -e -o pipefail ; EXCLUDES= ; \
if [ -d .git ]; then \
@@ -105,7 +95,7 @@ dist: $(SOURCE_BUILT_AGENTS) $(SOURCE_BUILT_AGENT_UPDATER) protobuf-files $(JAVA
fi ; \
done ; \
else \
- for F in $(DIST_ARCHIVE) enterprise/agents/plugins/{build,build-32,src} enterprise/agents/plugins/{build,build-32,src} enterprise/agents/winbuild; do \
+ for F in $(DIST_ARCHIVE) non-free/cmk-update-agent/{build,build-32,src} non-free/cmk-update-agent/{build,build-32,src} enterprise/agents/winbuild; do \
EXCLUDES+=" --exclude $$F" ; \
done ; \
fi ; \
@@ -130,6 +120,12 @@ dist: $(SOURCE_BUILT_AGENTS) $(SOURCE_BUILT_AGENT_UPDATER) protobuf-files $(JAVA
check-mk-$(EDITION)-$(OMD_VERSION)
rm -rf check-mk-$(EDITION)-$(OMD_VERSION)
+cmk-frontend:
+ cd packages/cmk-frontend && ENTERPRISE=$(ENTERPRISE) ./run --setup-environment --all
+
+frontend-vue:
+ cd packages/cmk-frontend-vue && ./run
+
announcement:
mkdir -p $(CHECK_MK_ANNOUNCE_FOLDER)
PYTHONPATH=${PYTHONPATH}:$(REPO_PATH) $(PIPENV) run python -m cmk.utils.werks announce .werks $(VERSION) --format=md > $(CHECK_MK_ANNOUNCE_MD)
@@ -157,7 +153,7 @@ setversion:
$(MAKE) -C agents NEW_VERSION=$(NEW_VERSION) setversion
sed -i 's/^ARG CMK_VERSION=.*$$/ARG CMK_VERSION="$(NEW_VERSION)"/g' docker_image/Dockerfile
ifeq ($(ENTERPRISE),yes)
- sed -i 's/^__version__ = ".*/__version__ = "$(NEW_VERSION)"/' enterprise/agents/plugins/cmk_update_agent.py
+ sed -i 's/^__version__ = ".*/__version__ = "$(NEW_VERSION)"/' non-free/cmk-update-agent/cmk_update_agent.py
sed -i 's/^VERSION = ".*/VERSION = "$(NEW_VERSION)"/' omd/packages/enterprise/bin/cmcdump
sed -i 's/^set(CMK_VERSION .*)/set(CMK_VERSION ${NEW_VERSION})/' packages/cmc/CMakeLists.txt
endif
@@ -174,60 +170,6 @@ openapi-clean:
openapi: $(OPENAPI_SPEC)
-# TODO: The --unsafe-perm was added because the CI executes this as root during
-# tests and building versions. Once we have the then build system this should not
-# be necessary anymore.
-#
-# NOTE 1: What we actually want are grouped targets, but this would require GNU
-# make >= 4.3, so we use the common workaround of an intermediate target.
-#
-# NOTE 2: NPM people have a totally braindead idea about reproducible builds
-# which almost all other people consider a bug, so we have to touch our target
-# files. Read https://github.com/npm/npm/issues/20439 and be amazed...
-#
-# NOTE 3: NPM sometimes terminates with a very unhelpful "npm ERR! cb() never
-# called!" message, where the underlying reason seems to be quite obscure, see
-# https://npm.community/t/crash-npm-err-cb-never-called/858.
-#
-# NOTE 4: The sed call is to get the same "resolved" entries independent of the
-# used registry. The resolved entry is only a hint for npm.
-.INTERMEDIATE: .ran-npm
-node_modules/.bin/webpack: .ran-npm
-node_modules/.bin/prettier: .ran-npm
-.ran-npm: package.json package-lock.json
- @echo "npm version: $$(npm --version)"
- npm --version | grep "^$(NPM_VERSION)\." >/dev/null 2>&1
- @echo "node version: $$(node --version)"
- node --version | grep "^v$(NODEJS_VERSION)\." >/dev/null 2>&1
- @echo "open file descriptor limit (soft): $$(ulimit -Sn)"
- @echo "open file descriptor limit (hard): $$(ulimit -Hn)"
- @if curl --silent --output /dev/null --head '${ARTIFACT_STORAGE}/#browse/browse:npm-proxy'; then \
- REGISTRY=--registry=${ARTIFACT_STORAGE}/repository/npm-proxy/ ; \
- export SASS_BINARY_SITE='${ARTIFACT_STORAGE}/repository/archives/'; \
- echo "Installing from local registry ${ARTIFACT_STORAGE}" ; \
- else \
- REGISTRY= ; \
- echo "Installing from public registry" ; \
- fi ; \
- npm ci --yes --audit=false --unsafe-perm $$REGISTRY
- sed -i 's#"resolved": "https://artifacts.lan.tribe29.com/repository/npm-proxy/#"resolved": "https://registry.npmjs.org/#g' package-lock.json
- touch node_modules/.bin/webpack node_modules/.bin/prettier
-
-# NOTE 1: Match anything patterns % cannot be used in intermediates. Therefore, we
-# list all targets separately.
-#
-# NOTE 2: For the touch command refer to the notes above.
-#
-# NOTE 3: The cma_facelift.scss target is used to generate a css file for the virtual
-# appliance. It is called from the cma git's makefile and the built css file is moved
-# to ~/git/cma/skel/usr/share/cma/webconf/htdocs/
-.INTERMEDIATE: .ran-webpack
-$(JAVASCRIPT_MINI): .ran-webpack
-$(THEME_CSS_FILES): .ran-webpack
-.ran-webpack: node_modules/.bin/webpack webpack.config.js postcss.config.js $(JAVASCRIPT_SOURCES) $(SCSS_SOURCES)
- WEBPACK_MODE=$(WEBPACK_MODE) ENTERPRISE=$(ENTERPRISE) node_modules/.bin/webpack --mode=$(WEBPACK_MODE:quick=development)
- touch $(JAVASCRIPT_MINI) $(THEME_CSS_FILES)
-
# TODO(sp) The target below is not correct, we should not e.g. remove any stuff
# which is needed to run configure, this should live in a separate target. In
# fact, we should really clean up all this cleaning-chaos and finally follow the
@@ -237,10 +179,7 @@ clean:
$(MAKE) -C omd clean
rm -rf *.rpm *.deb *.exe \
*~ counters autochecks \
- precompiled cache web/htdocs/js/*_min.js \
- web/htdocs/themes/*/theme.css announce*
-
-css: .ran-webpack
+ precompiled cache announce*
EXCLUDE_PROPER= \
--exclude="**/.vscode" \
@@ -257,12 +196,12 @@ EXCLUDE_CLEAN=$(EXCLUDE_PROPER) \
# The list of files and folders to be protected from remove after "buildclean" is called
# Rust dirs are kept due to heavy load when compiled: .cargo, controller
AGENT_CTL_TARGET_PATH=packages/cmk-agent-ctl/target
-CHECK_SQL_TARGET_PATH=packages/check-sql/target
+MK_SQL_TARGET_PATH=packages/mk-sql/target
EXCLUDE_BUILD_CLEAN=$(EXCLUDE_CLEAN) \
--exclude="doc/plugin-api/build" \
--exclude=".cargo" \
--exclude=$(AGENT_CTL_TARGET_PATH) \
- --exclude=$(CHECK_SQL_TARGET_PATH) \
+ --exclude=$(MK_SQL_TARGET_PATH) \
--exclude="agents/plugins/*_2.py" \
--exclude="agents/plugins/*.py.checksum"
@@ -279,73 +218,21 @@ buildclean:
git clean -d --force -x $(EXCLUDE_BUILD_CLEAN)
+# This target should clean up everything which may have been built previously in the same working directory
+# within a specific docker image, which would be *falsely* re-used in another (incompatible) docker image.
+# One example:
+# - python-ldap wheel has been built in docker image ubuntu:20.04
+# - this would include a shared object file which is linked against the system's libldap in version 2.4
+# - switching to another docker image, e.g. ubuntu:22.04, would not have libldap in that version (but in version 2.5)
+# ldd .venv/lib/python3.12/site-packages/_ldap.cpython-312-x86_64-linux-gnu.so | grep "ldap"
+# libldap_r-2.4.so.2 => not found
+# TODO: The list of to-be-cleaned artifacts is by no means complete. Add more as soon we know about them.
+docker-context-clean:
+ rm -rf .cache .venv
+
setup:
-# librrd-dev is still needed by the python rrd package we build in our virtual environment
- sudo apt-get install \
- build-essential \
- clang-$(CLANG_VERSION) \
- clang-format-$(CLANG_VERSION) \
- clang-tidy-$(CLANG_VERSION) \
- clang-tools-$(CLANG_VERSION) \
- clangd-$(CLANG_VERSION) \
- cmake \
- curl \
- direnv \
- doxygen \
- figlet \
- gawk \
- git \
- ksh \
- libclang-$(CLANG_VERSION)-dev \
- libjpeg-dev \
- libkrb5-dev \
- libldap2-dev \
- libmariadb-dev-compat \
- libpango1.0-dev \
- libpcap-dev \
- librrd-dev \
- libsasl2-dev \
- libsqlite3-dev \
- libtool-bin \
- libxml2-dev \
- libreadline-dev \
- libxml2-dev \
- libxslt-dev \
- libpq-dev \
- libreadline-dev \
- lld-$(CLANG_VERSION) \
- lldb-$(CLANG_VERSION) \
- musl-tools \
- p7zip-full \
- patchelf \
- python3-pip \
- python3-venv \
- shellcheck \
- valgrind \
- zlib1g-dev
- if type pyenv >/dev/null 2>&1 && pyenv shims --short | grep '^pipenv$$'; then \
- CMD="pyenv exec" ; \
- else \
- CMD="" ; \
- fi ; \
- $$CMD pip3 install --user --upgrade \
- pip \
- pipenv=="$(PIPENV_VERSION)" \
- virtualenv=="$(VIRTUALENV_VERSION)" \
- wheel
- if ! type rustup >/dev/null 2>&1; then \
- curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh; \
- source $$HOME/.cargo/env; \
- fi ; \
- rustup target add x86_64-unknown-linux-musl
- $(MAKE) -C web setup
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
- sudo add-apt-repository \
- "deb [arch=amd64] https://download.docker.com/linux/ubuntu $$(lsb_release -cs) stable"
- sudo apt-get update
- sudo apt-get install docker-ce
+ sudo buildscripts/infrastructure/build-nodes/scripts/install-development.sh --profile all
sudo bash -c 'usermod -a -G docker $$SUDO_USER'
- $(MAKE) -C locale setup
$(MAKE) check-setup
linesofcode:
@@ -353,10 +240,10 @@ linesofcode:
protobuf-files:
ifeq ($(ENTERPRISE),yes)
- $(MAKE) -C enterprise protobuf-files
+ $(MAKE) -C non-free/cmc-protocols protobuf-files
endif
-format: format-python format-c format-shell format-js format-css format-bazel
+format: format-python format-c format-shell format-bazel
format-c:
packages/livestatus/run --format
@@ -390,12 +277,6 @@ format-shell:
what-gerrit-makes:
$(MAKE) -C tests what-gerrit-makes
-format-js:
- scripts/run-prettier --no-color --ignore-path ./.prettierignore --write "{{enterprise/web,web}/htdocs/js/**/,}*.{js,ts,vue}"
-
-format-css:
- scripts/run-prettier --no-color --ignore-path ./.prettierignore --write "web/htdocs/themes/**/*.scss"
-
format-bazel:
scripts/run-buildifier --lint=fix --mode=fix
diff --git a/Pipfile b/Pipfile
index 7741f7fbbe3..37a748463c3 100644
--- a/Pipfile
+++ b/Pipfile
@@ -7,12 +7,14 @@ name = "pypi"
3to2 = "*" # used for converting agent plugins from py3 to 2
astroid = "*" # used by testlib.pylint_checker_localization
autoflake = "*" # used to remove unused import statements
+azure-mgmt-resource = "*" # used by the release process to publish the azure image to the marketplace
+azure-mgmt-compute = "*" # used by the release process to add a new image version
bandit = "*" # used by test/Makefile's test-bandit target
"beautifulsoup4" = "*" # used by the GUI crawler and various tests
-boto3-stubs = {extras = ["logs"], version = "==1.26.105"} # used for type checking, keep version in sync with boto3
-botocore-stubs = "==1.29.105" # used for type checking, keep version in sync with botocore
+boto3-stubs = {extras = ["logs"], version = "==1.34.15"} # used for type checking, keep version in sync with boto3
+botocore-stubs = "==1.34.15" # used for type checking, keep version in sync with botocore
bson = "*" # used by test_mk_mongodb unit test
-checkmk-dev-tools = "~=0.1.42" # provides tooling for build artifacts
+checkmk-dev-tools = "~=0.1.60" # provides tooling for build artifacts
compiledb = "*" # used by the Livestatus/CMC Makefiles for building compile_command.json
devpi-client = "*" # Managing the devpi server
dill = "*"
@@ -21,11 +23,11 @@ dockerpty = "*" # used by dockerized tests for opening debug shells
fakeredis = {version = "*", extras = ["lua"]} # used for testing code which uses Redis (lua is needed for faking features like "lock")
fastapi = "*" # needed to run fake idp for cse tests
flake8 = "*"
-freezegun = "==1.2.2" # used by various unit tests, pinned because of 36 unit test failures (to be analyzed, probably https://github.com/spulec/freezegun/issues/521, hopefully fixed when https://github.com/spulec/freezegun/pull/525 is landed)
hypothesis = "*" # used by unit tests
-hypothesis-jsonschema = "==0.22.1" # pinned for: AttributeError: module 'jsonschema.exceptions' has no attribute '_RefResolutionError'. Did you mean: 'RefResolutionError'?
+hypothesis-jsonschema = "*" # used by unit tests
isort = "*" # used as a plugin for editors
jsonschema = "*"
+msrest = "*"
mypy = "*" # used by test/static/Makefile's test-mypy target
mypy-protobuf = "*" # for mypy
networkx = "*" # needed for py-import-cycles
@@ -34,34 +36,34 @@ pipfile = "*" # for generating import names during int tests
playwright = "*" # used for in-browser testing
polyfactory = "*" # used for generating mock data for unit tests
pre-commit = "*" # used to fix / find issues before committing changes
-preprocess = "*" # implicit dependency of pytest-testmon, might be removed when testmon is updated
py-import-cycles = "*" # used in tests/Makefile
pylint = "*" # used by test/Makefile's test-pylint target
pylint-pydantic = "*"
pyls-isort = "*" # isort plugin for python-lsp-server
pylsp-mypy = "*" # mypy plugin for python-lsp-server
pymongo = "*" # used by mk_mongodb agent plugin
-pytest = "*" # used by various test/Makefile targets
+pytest = "*"
pytest-cov = "*" # used (indirectly) by test/Makefile's test-unit-coverage-html target, see comment there
pytest-html = "*" # used to generate HTML reports for test suites
pytest-mock = "*" # used by quite a few unit/integration tests via the mocker fixture
-pytest-profiling = "*" # used to analyze test run performance
pytest-random-order = "*" # used to test resiliency
pytest-repeat = "*" # used to test resiliency
pytest-testmon = "*" # used for pre-commit checking via .pre-commit-config.yaml
pytest-xdist = "*" # used to limit number of procs in e2e tests
+pytest-check = "*" # used to queue failed assertions and continue test execution
python-lsp-black = "*" # black plugin for python-lsp-server
python-lsp-server = "*" # Note: There are extras, but the default seems sensible.
responses = "*" # used for unit tests
ruff = "*"
schemathesis = "*" # used for openapi tests
semver = "*"
-sphinx = "==6.2.1" # used for the plugin API documentation, pinned for now see https://github.com/kivy/kivy/issues/8230)
+sphinx = "*" # used for the plugin API documentation
sphinx-autodoc-typehints = "*" # used for the plugin API documentation
sphinx-rtd-theme = "*" # used for the plugin API documentation
sphinxcontrib-plantuml = "*" # used for the API and architecture
sphinxcontrib-spelling = "*"
telnetlib3 = "*" # used by Windows Agent tests
+time-machine = "*" # used by various unit tests. Substitute for freezegun
twine = "*" # used to upload packages to internal PyPi mirror
types-beautifulsoup4 = "*" # used for type checking
types-docutils = "*" # used for type checking
@@ -81,7 +83,6 @@ types-pymysql = "*" # used for type checking
types-pyopenssl = "*" # used for type checking
types-pysaml2 = "*" # used for type checking
types-python-dateutil = "*" # used for type checking
-types-pytz = "*" # used for type checking
types-pyyaml = "*" # used for type checking
types-redis = "*" # used for type checking
types-requests = "*" # used for type checking
@@ -97,84 +98,79 @@ webtest = "*" # used by WSGI based tests
[packages]
setuptools-scm = "==4.1.2" # needed by various setup.py
six = "==1.16.0" # direct dependency + needed by python-dateutil, vcrpy, python-active-directory
-python-dateutil = "==2.8.2" # direct dependency
+python-dateutil = "~=2.9.0" # direct dependency
pyyaml = "==6.0.1" # needed by vcrpy
wrapt = "==1.15.0" # needed by vcrpy
-yarl = "==1.8.2" # needed by vcrpy
+yarl = "==1.9.4" # needed by vcrpy
multidict = "==6.0.4" # needed by yarl
idna = "==3.4" # needed by yarl
-vcrpy = "==4.2.1" # used by various unit tests to mock HTTP transactions in some special agents (only)
+vcrpy = "==6.0.1" # used by various unit tests to mock HTTP transactions in some special agents (only)
cryptography = "*"
bcrypt = "*"
paramiko = "*"
pyasn1 = "==0.4.8" # needed by pysnmp
pyasn1-modules = "==0.2.8" # needed by kubernetes
ply = "==3.11" # needed by pysmi, python-active-directory
-pysmi-lextudio = "==1.1.13" # needed by pysnmp
-pysnmp-lextudio = "==5.0.29" # needed by Event Console
-snmpsim-lextudio = "*" # needed by pysnmp integration tests
-urllib3 = "~=1.26.15" # needed by requests
-pyopenssl = "~=23.0"
+pysmi-lextudio = "==1.4.3" # needed by pysnmp
+pysnmp-lextudio = "==6.1.2" # needed by Event Console
+snmpsim-lextudio = "==1.1.0" # needed by pysnmp integration tests
+urllib3 = "*" # needed by requests
+pyopenssl = "*"
pbr = "==5.11.1" # needed by jira
pyghmi = "==1.5.60" # needed by base for IPMI
-requests = "~=2.28"
-pykerberos = "==1.2.4" # needed by check_bi_aggr
-requests-kerberos = "==0.12.0" # needed by check_bi_aggr, versions >=0.13.0 depend on pyspnego, which pulls in gssapi, which in turn fails on SLES because of https://github.com/pythongssapi/python-gssapi/issues/212
+requests = "~=2.31"
+requests-kerberos = "==0.14.0" # needed by check_bi_aggr
markupsafe = "==2.1.2" # needed by jinja2
itsdangerous = "==2.1.2" # needed by flask
-jinja2 = "==3.1.2" # needed by flask
-more-itertools = "==9.1.0" # needed by connexion through zipp, pytest
-zipp = "==3.15.0" # needed by importlib-metadata
+jinja2 = "==3.1.3" # needed by flask
+more-itertools = "==10.2.0" # needed by connexion through zipp, pytest
+zipp = "==3.18.1" # needed by importlib-metadata
pyrsistent = "==0.19.3" # needed by jsonschema
-werkzeug = "==2.2.3" # direct dependency and needed by flask
-flask = "~=2.2" # direct dependency
-pytz = "==2023.3" # direct dependency
-openapi-spec-validator = "==0.5.6" # direct dependency
-psutil = "==5.9.4" # needed for omdlib
+werkzeug = "==3.0.2" # direct dependency and needed by flask
+flask = "==3.0.3" # direct dependency
+openapi-spec-validator = "==0.7.1" # direct dependency
+psutil = "==5.9.8" # needed for omdlib
oauthlib = "==3.2.2"
requests-oauthlib = "==1.3.1"
-docutils = "*" # needed by boto3 (aws)
-jira = "==3.5.0" # needed by jira
-adal = "==1.2.7" # needed by agent_azure
-pillow = "~=10.0"
+jira = "==3.6.0" # needed by jira
+msal = "==1.26.0" # needed by agent_azure
+pillow = "~=10.0" # TODO(kb): unpin (temporary pinned)
python-ldap = "==3.4.3" # needed by GUI (User sync), python-active-directory
dicttoxml = "==1.7.16" # needed by GUI (API XML format)
cython = "==0.29.34" # needed by numpy, change also in omd/packages/python3-modules/build-python3-modules.bzl
-numpy = "==1.24.2" # needed by GUI (metrics)
-reportlab = "~=3.6.12" # needed by GUI (reporting)
-pypdf3 = "==1.0.6" # needed by GUI (reporting)
+numpy = "==1.26.4" # needed by GUI (metrics)
+reportlab = "==4.1.0" # needed by GUI (reporting)
+pypdf = "==4.2.0" # needed by GUI (reporting)
roman = "==4.0" # needed by GUI (reporting)
google-auth = "==2.17.1" # needed by kubernetes
websocket-client = "==1.5.1" # needed by kubernetes
kubernetes = "==21.7.0" # needed by kubernetes
jmespath = "==1.0.1" # needed by boto3 (aws)
-botocore = "==1.29.105" # needed by boto3 (aws), keep version in sync with botocore-stubs
-s3transfer = "==0.6.0" # needed by boto3 (aws)
-boto3 = "==1.26.105" # needed by boto3 (aws), keep version in sync with boto3-stubs
+botocore = "==1.34.15" # needed by boto3 (aws), keep version in sync with botocore-stubs
+s3transfer = "==0.10.0" # needed by boto3 (aws)
+boto3 = "==1.34.15" # needed by boto3 (aws), keep version in sync with boto3-stubs
python-snap7 = "==1.3" # needed by Siemens PLC special agent
-pymssql = "==2.2.8" # needed by check_sql active check
+pymssql = "==2.2.11" # needed by check_sql active check
pymysql = "==1.0.3" # needed by check_sql active check
psycopg2-binary = "==2.9.6" # needed by check_sql active check
-apispec = "==5.2.2" # direct dependency
-marshmallow = "==3.19.0" # direct dependency
-marshmallow-oneofschema = "==3.0.1" # direct dependency
+apispec = "==6.4.0" # direct dependency
+marshmallow = "==3.21.1" # direct dependency
+marshmallow-oneofschema = "==3.1.1" # direct dependency
apispec-oneofschema = "==3.0.0" # direct dependency
pydantic = "==2.5.2"
pydantic_core = "==2.14.5" # used by pydantic and by us
-annotated-types = "==0.5.0" # provides validators for pydantic2
+annotated-types = "==0.6.0" # provides validators for pydantic2
paho-mqtt = "==1.6.1" # needed for MQTT special agent
python-active-directory = "==2.0.1"
rrdtool = "==0.1.16" # Actually we ship the binding which comes with rrdtool itself...
-docstring-parser = "==0.15" # direct dependency
+docstring-parser = "==0.16" # direct dependency
pyprof2calltree = "==1.4.5" # used for GUI/API profiling
-pyparsing = "==3.0.9" # direct dependency
-ordered-set = "==4.1.0" # needed by deepdiff
-deepdiff = "==6.3.0" # used for recording setup audit log
+pyparsing = "==3.1.2" # direct dependency
redis = "==4.5.4" # needed by GUI (caching)
fido2 = "*" # used for GUI 2fa webauthn
-tenacity = "==8.2.2" # needed by opsgenie-sdk
+tenacity = "==8.2.3" # needed by opsgenie-sdk
opsgenie-sdk = "==2.1.5" # needed by opsgenie_issues
-protobuf = "==3.20.1" # For development, Runtime modules are shipped by omd/packages/protobuf
+protobuf = "*" # For development, Runtime modules are shipped by omd/packages/protobuf
cmk-agent-based = {editable = true, path = "./packages/cmk-agent-based"}
cmk-agent-receiver = {editable = true, path = "./packages/cmk-agent-receiver"}
cmk-graphing = {editable = true, path = "./packages/cmk-graphing"}
@@ -184,15 +180,14 @@ cmk-server-side-calls = {editable = true, path = "./packages/cmk-server-side-cal
cmk-werks = {editable = true, path = "./packages/cmk-werks"}
cmk-livestatus-client = {editable = true, path = "./packages/cmk-livestatus-client"}
pysmb = "==1.2.9.1" # used by SMB share special agent
-google-cloud-monitoring = "~=2.11" # used by the gcp special agent
-google-cloud-asset = "~=3.14" # used by the gcp special agent
-exchangelib = "==4.9.0" # used by active checks for mail
-lxml = "==4.9.2" # used via beautifulsoup4 as a parser and in the agent_netapp special agent
-grpcio = "~=1.53"
-google-api-python-client = "~=2.83" # use rest api to query big query for gcp cost monitoring
-setproctitle = "==1.3.2" # used by mkbench/dcd/liveproxyd
-black = {version = "==23.3.0", extras = ["d"]} # used in our CI, the LSP-integration and in the OpenAPI part, "d" for broken black packaging only, see https://github.com/psf/black/issues/690
-meraki = "==1.30.0" # used by special agent Cisco Meraki
+google-cloud-monitoring = "*" # used by the gcp special agent
+google-cloud-asset = "*" # used by the gcp special agent
+exchangelib = "==5.2.1" # used by active checks for mail
+lxml = "==4.9.3" # used via beautifulsoup4 as a parser and in the agent_netapp special agent
+grpcio = "*"
+google-api-python-client = "*" # use rest api to query big query for gcp cost monitoring
+setproctitle = "==1.3.3" # used by mkbench/dcd/liveproxyd
+meraki = "==1.41.0" # used by special agent Cisco Meraki
feedparser = "==6.0.10" # used by special agent Microsoft Azure Status
pysaml2 = "~=7.4" # used by SSO via SAML2.0
azure-storage-blob = "==12.15.0" # used for backups to Azure Blob Storage
@@ -203,6 +198,8 @@ icalendar = "==5.0.7" # used for importing .ical files
recurring_ical_events = "==2.0.2" # used for parsing recurring events in icalendar objects
typing-extensions = "*"
xmltodict = "==0.13.0" # used for parsing robotmk test results
+netapp-ontap = "==9.14.1.0" # used by netapp special agent
+redfish = "~= 3.2.2" # used for management board monitoring via Redfish, used by optional MKP
[requires]
-python_version = "3.11"
+python_version = "3.12"
diff --git a/Pipfile.lock b/Pipfile.lock
index c6abd14ff2d..92498b4bf33 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,11 +1,11 @@
{
"_meta": {
"hash": {
- "sha256": "e168b1088638b67b99c84fb0d73196d8b973fcbb50f1f27fbb12641623f79b9d"
+ "sha256": "65001a05a0c5af63e363395f0e0d56634f5b88a9f3023661f50e41377f30a1b9"
},
"pipfile-spec": 6,
"requires": {
- "python_version": "3.11"
+ "python_version": "3.12"
},
"sources": [
{
@@ -16,95 +16,87 @@
]
},
"default": {
- "adal": {
- "hashes": [
- "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d",
- "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1"
- ],
- "index": "pypi",
- "version": "==1.2.7"
- },
"aiohttp": {
"hashes": [
- "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f",
- "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c",
- "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af",
- "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4",
- "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a",
- "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489",
- "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213",
- "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01",
- "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5",
- "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361",
- "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26",
- "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0",
- "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4",
- "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8",
- "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1",
- "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7",
- "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6",
- "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a",
- "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd",
- "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4",
- "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499",
- "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183",
- "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544",
- "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821",
- "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501",
- "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f",
- "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe",
- "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f",
- "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672",
- "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5",
- "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2",
- "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57",
- "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87",
- "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0",
- "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f",
- "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7",
- "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed",
- "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70",
- "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0",
- "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f",
- "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d",
- "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f",
- "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d",
- "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431",
- "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff",
- "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf",
- "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83",
- "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690",
- "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587",
- "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e",
- "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb",
- "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3",
- "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66",
- "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014",
- "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35",
- "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f",
- "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0",
- "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449",
- "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23",
- "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5",
- "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd",
- "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4",
- "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b",
- "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558",
- "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd",
- "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766",
- "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a",
- "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636",
- "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d",
- "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590",
- "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e",
- "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d",
- "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c",
- "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28",
- "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065",
- "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"
- ],
- "markers": "python_version >= '3.8'",
- "version": "==3.9.1"
+ "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8",
+ "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c",
+ "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475",
+ "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed",
+ "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf",
+ "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372",
+ "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81",
+ "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f",
+ "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1",
+ "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd",
+ "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a",
+ "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb",
+ "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46",
+ "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de",
+ "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78",
+ "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c",
+ "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771",
+ "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb",
+ "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430",
+ "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233",
+ "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156",
+ "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9",
+ "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59",
+ "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888",
+ "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c",
+ "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c",
+ "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da",
+ "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424",
+ "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2",
+ "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb",
+ "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8",
+ "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a",
+ "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10",
+ "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0",
+ "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09",
+ "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031",
+ "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4",
+ "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3",
+ "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa",
+ "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a",
+ "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe",
+ "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a",
+ "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2",
+ "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1",
+ "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323",
+ "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b",
+ "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b",
+ "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106",
+ "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac",
+ "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6",
+ "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832",
+ "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75",
+ "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6",
+ "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d",
+ "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72",
+ "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db",
+ "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a",
+ "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da",
+ "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678",
+ "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b",
+ "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24",
+ "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed",
+ "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f",
+ "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e",
+ "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58",
+ "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a",
+ "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342",
+ "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558",
+ "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2",
+ "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551",
+ "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595",
+ "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee",
+ "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11",
+ "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d",
+ "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7",
+ "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==3.9.5"
},
"aiosignal": {
"hashes": [
@@ -116,29 +108,29 @@
},
"annotated-types": {
"hashes": [
- "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802",
- "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"
+ "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43",
+ "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==0.5.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.6.0"
},
"anyio": {
"hashes": [
- "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780",
- "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"
+ "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8",
+ "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"
],
- "markers": "python_version >= '3.7'",
- "version": "==3.7.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.3.0"
},
"apispec": {
"hashes": [
- "sha256:6ea6542e1ebffe9fd95ba01ef3f51351eac6c200a974562c7473059b9cd20aa7",
- "sha256:f5f0d6b452c3e4a0e0922dce8815fac89dc4dbc758acef21fb9e01584d6602a5"
+ "sha256:00ac876f1968d8e1624400c8571d2a2887edd3d6aa9d1a130efaafbdb49e85d7",
+ "sha256:42b8a6833cf154c9dbd22d006b56bf9c49c972d32d24fe716fd734e0f6b739b8"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==5.2.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==6.4.0"
},
"apispec-oneofschema": {
"hashes": [
@@ -148,29 +140,21 @@
"index": "pypi",
"version": "==3.0.0"
},
- "async-timeout": {
- "hashes": [
- "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f",
- "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"
- ],
- "markers": "python_full_version <= '3.11.2'",
- "version": "==4.0.3"
- },
"attrs": {
"hashes": [
- "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04",
- "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"
+ "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30",
+ "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"
],
"markers": "python_version >= '3.7'",
- "version": "==23.1.0"
+ "version": "==23.2.0"
},
"azure-core": {
"hashes": [
- "sha256:13b485252ecd9384ae624894fe51cfa6220966207264c360beada239f88b738a",
- "sha256:604a005bce6a49ba661bb7b2be84a9b169047e52fcfcd0a4e4770affab4178f7"
+ "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f",
+ "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"
],
"markers": "python_version >= '3.7'",
- "version": "==1.29.6"
+ "version": "==1.30.1"
},
"azure-identity": {
"hashes": [
@@ -230,57 +214,31 @@
"markers": "python_version >= '3.7'",
"version": "==4.1.2"
},
- "black": {
- "extras": [
- "d"
- ],
- "hashes": [
- "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5",
- "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915",
- "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326",
- "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940",
- "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b",
- "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30",
- "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c",
- "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c",
- "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab",
- "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27",
- "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2",
- "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961",
- "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9",
- "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb",
- "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70",
- "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331",
- "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2",
- "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266",
- "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d",
- "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6",
- "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b",
- "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925",
- "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8",
- "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4",
- "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"
+ "blinker": {
+ "hashes": [
+ "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9",
+ "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"
],
- "markers": "python_version >= '3.7'",
- "version": "==23.3.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.7.0"
},
"boto3": {
"hashes": [
- "sha256:2914776e0138530ec6464d0e2f05b4aa18e9212ac920c48472f8a93650feaed2",
- "sha256:f4951f8162905b96fd045e32853ba8cf707042faac846a23910817c508ef27d7"
+ "sha256:2b74c58f475ff0dcf2f3637da9367a9465d29fad971ff5d8dc54ac39554e9022",
+ "sha256:f8f16c2d0ec1dca291857f1c138d5c30e01e40f653443cc2679e2f6ae71b05a6"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==1.26.105"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.34.15"
},
"botocore": {
"hashes": [
- "sha256:06a2838daad3f346cba5460d0d3deb198225b556ff9ca729798d787fadbdebde",
- "sha256:17c82391dfd6aaa8f96fbbb08cad2c2431ef3cda0ece89e6e6ba444c5eed45c2"
+ "sha256:16bcf871e67ef0177593f06e9e5bae4db51c9a9a2e953cb14feeb42d53441a85",
+ "sha256:c3c3404962a6d9d5e1634bd70ed53b8eff1ff17ee9d7a6240e9e8c94db48ad6f"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==1.29.105"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.34.15"
},
"cached-property": {
"hashes": [
@@ -291,19 +249,19 @@
},
"cachetools": {
"hashes": [
- "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2",
- "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"
+ "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945",
+ "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"
],
"markers": "python_version >= '3.7'",
- "version": "==5.3.2"
+ "version": "==5.3.3"
},
"certifi": {
"hashes": [
- "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1",
- "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"
+ "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f",
+ "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"
],
"markers": "python_version >= '3.6'",
- "version": "==2023.11.17"
+ "version": "==2024.2.2"
},
"cffi": {
"hashes": [
@@ -464,9 +422,17 @@
"sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956",
"sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"
],
- "markers": "python_version >= '3.8'",
+ "markers": "platform_python_implementation != 'PyPy'",
"version": "==1.16.0"
},
+ "chardet": {
+ "hashes": [
+ "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7",
+ "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"
+ ],
+ "markers": "python_version >= '3.7'",
+ "version": "==5.2.0"
+ },
"charset-normalizer": {
"hashes": [
"sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027",
@@ -605,33 +571,42 @@
},
"cryptography": {
"hashes": [
- "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960",
- "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a",
- "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc",
- "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a",
- "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf",
- "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1",
- "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39",
- "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406",
- "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a",
- "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a",
- "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c",
- "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be",
- "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15",
- "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2",
- "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d",
- "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157",
- "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003",
- "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248",
- "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a",
- "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec",
- "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309",
- "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7",
- "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==41.0.7"
+ "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee",
+ "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576",
+ "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d",
+ "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30",
+ "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413",
+ "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb",
+ "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da",
+ "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4",
+ "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd",
+ "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc",
+ "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8",
+ "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1",
+ "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc",
+ "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e",
+ "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8",
+ "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940",
+ "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400",
+ "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7",
+ "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16",
+ "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278",
+ "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74",
+ "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec",
+ "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1",
+ "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2",
+ "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c",
+ "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922",
+ "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a",
+ "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6",
+ "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1",
+ "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e",
+ "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac",
+ "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==42.0.5"
},
"cython": {
"hashes": [
@@ -680,14 +655,13 @@
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.29.34"
},
- "deepdiff": {
+ "decorator": {
"hashes": [
- "sha256:15838bd1cbd046ce15ed0c41e837cd04aff6b3e169c5e06fca69d7aa11615ceb",
- "sha256:6a3bf1e7228ac5c71ca2ec43505ca0a743ff54ec77aa08d7db22de6bc7b2b644"
+ "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330",
+ "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"
],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==6.3.0"
+ "markers": "python_version >= '3.5'",
+ "version": "==5.1.1"
},
"defusedxml": {
"hashes": [
@@ -708,54 +682,45 @@
},
"dnspython": {
"hashes": [
- "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8",
- "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"
+ "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50",
+ "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"
],
- "markers": "python_version >= '3.8' and python_version < '4.0'",
- "version": "==2.4.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==2.6.1"
},
"docstring-parser": {
"hashes": [
- "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682",
- "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9"
+ "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e",
+ "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"
],
"index": "pypi",
"markers": "python_version >= '3.6' and python_version < '4.0'",
- "version": "==0.15"
- },
- "docutils": {
- "hashes": [
- "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6",
- "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==0.20.1"
+ "version": "==0.16"
},
"elementpath": {
"hashes": [
- "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55",
- "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"
+ "sha256:cda092281afe508ece1bf65373905b30196c9426f3730cfea46059e103a131bd",
+ "sha256:dfc4b8ca3d87966dcb0df40b5b6d04a98f053683271930fad9e7fa000924dfb2"
],
- "markers": "python_version >= '3.7'",
- "version": "==4.1.5"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.4.0"
},
"exchangelib": {
"hashes": [
- "sha256:43d7d5ef4a18dcdfe6a28c2d534d842697b0302ef2569b5d0af498f77fc78661",
- "sha256:d6a1c3f7c55a42ff2ec33629128eeed09e87b09834e4006ed7a3c9931258f3e1"
+ "sha256:8457cfb7dd43fd441c68a2b34152eab0e73167829fc2c5f5c5c2c8f68d8f2da8",
+ "sha256:c436d395d6235936194ffd71efbec2131d0d99e49dd70028d9ae5d69e0e6ee50"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.9.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==5.2.1"
},
"fastapi": {
"hashes": [
- "sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e",
- "sha256:75a11f6bfb8fc4d2bec0bd710c2d5f2829659c0e8c0afd5560fdda6ce25ec653"
+ "sha256:239403f2c0a3dda07a9420f95157a7f014ddb2b770acdbc984f9bdf3ead7afdb",
+ "sha256:b53d673652da3b65e8cd787ad214ec0fe303cad00d2b529b86ce7db13f17518d"
],
- "markers": "python_version >= '3.7'",
- "version": "==0.103.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.110.2"
},
"feedparser": {
"hashes": [
@@ -768,21 +733,21 @@
},
"fido2": {
"hashes": [
- "sha256:6110d913106f76199201b32d262b2857562cc46ba1d0b9c51fbce30dc936c573",
- "sha256:a3b7d7d233dec3a4fa0d6178fc34d1cce17b820005a824f6ab96917a1e3be8d8"
+ "sha256:26100f226d12ced621ca6198528ce17edf67b78df4287aee1285fee3cd5aa9fc",
+ "sha256:6be34c0b9fe85e4911fd2d103cce7ae8ce2f064384a7a2a3bd970b3ef7702931"
],
"index": "pypi",
- "markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==1.1.2"
+ "markers": "python_version >= '3.8' and python_version < '4.0'",
+ "version": "==1.1.3"
},
"flask": {
"hashes": [
- "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf",
- "sha256:edee9b0a7ff26621bd5a8c10ff484ae28737a2410d99b0bb9a6850c7fb977aa0"
+ "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3",
+ "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==2.2.5"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.0.3"
},
"frozenlist": {
"hashes": [
@@ -877,31 +842,33 @@
},
"gitpython": {
"hashes": [
- "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4",
- "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"
+ "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c",
+ "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"
],
"markers": "python_version >= '3.7'",
- "version": "==3.1.40"
+ "version": "==3.1.43"
},
"google-api-core": {
"extras": [
"grpc"
],
"hashes": [
- "sha256:92d17123cfe399b5ef7e026c63babf978d8475e1ac877919eb7933e25dea2273",
- "sha256:e16c15a11789bc5a3457afb2818a3540a03f341e6e710d7f9bbf6cde2ef4a7c8"
+ "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6",
+ "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9",
+ "sha256:a816206facdae788ffa1df17df0aeee0b96ca675593d854ed90c6ba6213c675f",
+ "sha256:d785ba174593d89483b07116dd809ae7ac46f0173ab13c780be361618755ccc7"
],
"markers": "python_version >= '3.7'",
- "version": "==2.10.1"
+ "version": "==2.18.0"
},
"google-api-python-client": {
"hashes": [
- "sha256:3a45a53c031478d1c82c7162dd25c9a965247bca6bd438af0838a9d9b8219405",
- "sha256:b605adee2d09a843b97a59925757802904679e44e5599708cedb8939900dfbc7"
+ "sha256:bbb51b0fbccdf40e536c26341e372d7800f09afebb53103bbcc94e08f14b523b",
+ "sha256:d01c70c7840ec37888aa02b1aea5d9baba4c1701e268d1a0251640afd56e5e90"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==2.111.0"
+ "version": "==2.127.0"
},
"google-auth": {
"hashes": [
@@ -921,196 +888,171 @@
},
"google-cloud-access-context-manager": {
"hashes": [
- "sha256:0faaa40b2c2049de41cd1eb948f02c00aa2b6bcfa7e6d17d6a94f1561b1bee34",
- "sha256:1854bd56f7571f13f52af2734248239985cedeaa4c5e5f3220219912bd7c3bc3"
+ "sha256:7a23d2d08facb6f1d4b5456cce82144235575c6ccc6b6c6d9400de6cf23966d3",
+ "sha256:a5b424312c084b02b6f98c1ebfb6af28132fc01e5d719817fa499e78c87e04b7"
],
"markers": "python_version >= '3.7'",
- "version": "==0.1.14"
+ "version": "==0.2.0"
},
"google-cloud-asset": {
"hashes": [
- "sha256:2c7fb8b119fbbf492eb9b6c6943669c319b6f79a3f8d77677c25a1ea626d2684",
- "sha256:811338b4eab79d757a92804da81c70637392e7be84f209d58f3c0302c4b63a5b"
+ "sha256:1e9079e0e71ac070047c8952c36bb804b3fd24ed6b8824f5023dc25e8e66d052",
+ "sha256:b75d175b4742e5c0d1e9c2a752289c25d295fa39127318a680be030a2d622056"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==3.14.0"
+ "version": "==3.26.0"
},
"google-cloud-monitoring": {
"hashes": [
- "sha256:3d76463cc7abfd8e339b1d94b3c11facb60b9c5d6d805eb76431e60663cf334c",
- "sha256:cc8a4b118b56ce2566ad7dfab56926f35747ca28465dbac21e8f6e258704a16f"
+ "sha256:1b174e656a3bfd767c269bf2ba023b40e0a80a85e36ed0b75bb272be65e76904",
+ "sha256:e7b1c8758fc3563ffb9a347bc5172e2782f44c121bc80fc15283e289cff675bf"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==2.11.1"
+ "version": "==2.21.0"
},
"google-cloud-org-policy": {
"hashes": [
- "sha256:0a95b373fd71707ff4bbec518eceeebedee0ae34769511d65e04c47c9a09e108",
- "sha256:29866f9693ea1b2d33183499177cba32c159c9bf4cfbcf07198a783693c28926"
+ "sha256:01e824ff5747980ca650cbfdf0a6a5375d5acdbead39cbb1abe9a9d9afd67df4",
+ "sha256:dc12434f06c80e1b1c1c347cadc8cb40ffc9619ae4b5c5beb5c2bd585c95d97f"
],
"markers": "python_version >= '3.7'",
- "version": "==1.4.1"
+ "version": "==1.11.0"
},
"google-cloud-os-config": {
"hashes": [
- "sha256:6d326f5e16687a6abef85eb6ed096d30f8d4259b4978573a9f020f6bef9166d1",
- "sha256:a6bfc93daa38192e57ae67912e4ddfd5b09a0ea96a5e8756b27f9c4c4a11f8d3"
+ "sha256:a0a3aa5959803f5e1d2a96d50e5717d8a49171e4ea5444bf50694bb18b675870",
+ "sha256:f8ee31ac9c873acabcb68923cb780b844175e48eba17efb745739c0f82ada59d"
],
"markers": "python_version >= '3.7'",
- "version": "==1.12.2"
+ "version": "==1.17.3"
},
"googleapis-common-protos": {
"extras": [
"grpc"
],
"hashes": [
- "sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394",
- "sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417"
+ "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e",
+ "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"
],
"markers": "python_version >= '3.7'",
- "version": "==1.56.4"
+ "version": "==1.63.0"
},
"grpc-google-iam-v1": {
"hashes": [
- "sha256:312801ae848aeb8408c099ea372b96d253077e7851aae1a9e745df984f81f20c",
- "sha256:3f0ac2c940b9a855d7ce7e31fde28bddb0d9ac362d32d07c67148306931a0e30",
- "sha256:4e088344e27357974e2e29828ad58bc578cbda7d3062d47f9b114128b187f1f1",
- "sha256:9752dcb0394f8fe909f05dd37f1bb04afa11676315a2a20ea11059c466ade571"
+ "sha256:1a8e37e9661e792f9d2968ba70b04d3ee364fda6f002f76d04788f76c7cb4d1e",
+ "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89",
+ "sha256:b9b19a1daabcd9dbcbf330b2e0df156c4e8b46e9ae5a1c748e461bae071dfb23",
+ "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"
],
- "markers": "python_version >= '3.6'",
- "version": "==0.12.4"
+ "markers": "python_version >= '3.7'",
+ "version": "==0.13.0"
},
"grpcio": {
"hashes": [
- "sha256:02887d81da095b49ebae7c493a2401500da3db964e7bac57eaa9f28f12ccba75",
- "sha256:0523c323b36d21094a3f3bb7d2baacb80f14789f868ed96070449090c6e2a723",
- "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6",
- "sha256:099ac33af1aeb012e1faa6f14e6b56cf2d9bf02034b0df60ee583ada5930a60c",
- "sha256:09d930bfdfcd9d69d87e34a5cfa8f62d7451a15982f7c629d60a288e53d2b6e6",
- "sha256:0a02b63ddf6a4e307a5e238200a26088fc7c10b353e329cfea1a600cb4db3b3c",
- "sha256:0af6893f60cb2714d8ce5455a6f79e850d67fd83b98cfc2ac70b3127bf326933",
- "sha256:0cc257a1bc3f33b52b9ae09ed8975f40f593d3fc86bcf233bc2f41a325dd7dc4",
- "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328",
- "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead",
- "sha256:15a6549b86eebf20b87fa4e885c02cc1fae91de82fee45c08ba72db95484cb91",
- "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5",
- "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491",
- "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96",
- "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444",
- "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951",
- "sha256:2e19ecdd1dd56bd74a8d5d6b32d6233c1f4dbd22253bdc90a60370ca033480e7",
- "sha256:2fa17b45160d9696ead6a81de6fff11101287c233eeec5dfcd113ea665867e0f",
- "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf",
- "sha256:344899cb9bfe0bc546effd9ea7aa54c8d79615c5b2f38ff437fa6cac204376ca",
- "sha256:36a8aef4fc0835353ef9467c959b9da4c6d35c368fba544384a1513e70e22b35",
- "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253",
- "sha256:41e201c2b1b1940a91db2ea1db79949b37984d019c5823388fd90b4fa3cd752c",
- "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629",
- "sha256:43cc342e2a748d807e2918fb06f17d213706996359c02fc9383369b42d7a4915",
- "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae",
- "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43",
- "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b",
- "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14",
- "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab",
- "sha256:51dac29a77243726346fc6134c4004d4a09c18dfce153ef712f3f8d569621264",
- "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390",
- "sha256:58a70c614f6f3de3777a51c88a3d2cf8aee9fa8baa9c95acf939f13f8ad35fd9",
- "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2",
- "sha256:619e21f3fe58d66753e95d1c46518f309470b24dfbbcf124337201ccd5a9ca5e",
- "sha256:627e6035fe6f417899f1099af6c654209c9e9b0c4c252daa5b1b99fef6c5ea66",
- "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0",
- "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590",
- "sha256:6ad27940f21c5653551c34b76767c9df695b709b9881f4c24631c4d47fc774f0",
- "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508",
- "sha256:6eee6ab0207da912c931719a1aae07a72fc390d1a45f6401d1bc55c38343d32e",
- "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b",
- "sha256:70384008284d5dc4997f0dd7d2ae417fb97abfef9bb6bb57e71d41b3e808585f",
- "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08",
- "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13",
- "sha256:76d7218ae779b1a9f85150bda0a6e6727cc8d7eb1f392167c48f7f8af894244a",
- "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca",
- "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03",
- "sha256:7b447061f5cb9949bf06bc603ed4fccd5f7047149d670a7647759be438716319",
- "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748",
- "sha256:7e24cf409ad99ff9380300bd5d4761ec8835ef82f5cefad606ac0d33fdeaf049",
- "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860",
- "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d",
- "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353",
- "sha256:8e5388c4dd77ea4bdec0f8e2ad9b1a701f684c0926c1d01309cd4f277840215f",
- "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e",
- "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c",
- "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134",
- "sha256:91a95bb8034e33af972e2e1717bd6869f40240aa849a154ada14769796e463cf",
- "sha256:92abb5d63e805be982ccdd8ae9a2e9159eab52a69167fb5db558f3e940a78678",
- "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415",
- "sha256:93ef5d949c8309c226f5ed44801e3dd153c8d0f03ac9d49149c2e443a7b3a8be",
- "sha256:959bb7037fb582bf72aa7abf3b296ec32394fb72478145d355579377feea2bc8",
- "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320",
- "sha256:9a4ae966aadd4c1891c93e06ac5b370c488aab40ebd36a697d237ea1ba768e1a",
- "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179",
- "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324",
- "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18",
- "sha256:a80a0805321a97caf3ff7fae43b269920a5449363ddc9fb3640e5575f25e76e8",
- "sha256:a912dc8b09772fb48783ffa928c319e260649303344408532c2fb6696b5f31cf",
- "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df",
- "sha256:ae9549df8d95125b739eef8eacf8da81a9f21c237953057e308be50a88ae1202",
- "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e",
- "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b",
- "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6",
- "sha256:b4cfb790f517effe860e8471e94d761aa64afc98189075ad1cac0af73e97b533",
- "sha256:b6ff31cb3e4afac24ab67096968043b1abbb7a0c087f27fa5ba02605d5a70a5b",
- "sha256:b708f291b81423f13ee2bb05fcdc2a3c0feeb7885716e6c09d8caa46f1c65513",
- "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d",
- "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff",
- "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968",
- "sha256:c78663a1e9a55f57009d1a0bced20939ec66d8f9b8419f39063fd9ed5fe370c2",
- "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619",
- "sha256:c863ca1faf3fe0fbc399e7888b944087e7a2f80e9ddd7397a7f15113a7333cce",
- "sha256:cea07a9c514a104ed8b495dbf8053e6bd9971c0920f8d0dbad77aa9de567e4b5",
- "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139",
- "sha256:d378ea7527706f14173dcb3ebaf9ae0f7b1e42d92a6a7af9e73cc6ca809ffdb2",
- "sha256:d43ec03e6cb9af8b6fcae05d7566d669d34dd6a6ff365b71c550e7748023b62e",
- "sha256:d4695dfae9fd44227b7168ac461383c9337b4d7b5ed5e8df5b5a37e88460b5d8",
- "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55",
- "sha256:dcdcccdc6cb66bc4efe6d63fa39b744f72f657cde714b54a3b262107d8ed8c1a",
- "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454",
- "sha256:df96ff24d80c800047e78fe458bfded1fdec025895ca4ff98f4f359e4e270438",
- "sha256:dff0c330df1799fd720f14a24ca01b8c600292c773424f144c719507e62005ea",
- "sha256:e0341fa59c169f91d58a03dbb8632215d9c4d9fdfaf07e8fe5a36a6f2b6dc647",
- "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65",
- "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a",
- "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19",
- "sha256:e781b676075978aefbffb80717350f5970691d64726715649209e55482b4e84b",
- "sha256:ec5a642f5cd5800740e30dc96b93a232e819f650861d0fcbc55e9d6f6bbd8d3c",
- "sha256:f3f2ace17edc8046ec6d6ebe4143e5eb69181c325b852bcb6180e2d4e02bf07e",
- "sha256:f556d6f64409bb4edca5d132a2d6c4d94cedd89d9e8a5a9ed7112cc169974be0",
- "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b",
- "sha256:fae6e30f576f4ffe5ebe2045973d052b648c55a028071b563eace0de2121186f",
- "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd",
- "sha256:fd60d27239fbfe846c2a7ef67359433a391b225e4d610e460b95045ce9f66a1d",
- "sha256:fe182f127bca03ef1675ee6a70feaa2470cf21a0d061ae000269b460150e4737",
- "sha256:fe72b8e73ba8cfc925fedbb7d7e2fa519cf96e34eaf30d6194d0e2f6272bfa91"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==1.60.0"
+ "sha256:07ce1f775d37ca18c7a141300e5b71539690efa1f51fe17f812ca85b5e73262f",
+ "sha256:112eaa7865dd9e6d7c0556c8b04ae3c3a2dc35d62ad3373ab7f6a562d8199200",
+ "sha256:162ccf61499c893831b8437120600290a99c0bc1ce7b51f2c8d21ec87ff6af8b",
+ "sha256:16da954692fd61aa4941fbeda405a756cd96b97b5d95ca58a92547bba2c1624f",
+ "sha256:17708db5b11b966373e21519c4c73e5a750555f02fde82276ea2a267077c68ad",
+ "sha256:1bcfe5070e4406f489e39325b76caeadab28c32bf9252d3ae960c79935a4cc36",
+ "sha256:1c1bb80299bdef33309dff03932264636450c8fdb142ea39f47e06a7153d3063",
+ "sha256:2507006c8a478f19e99b6fe36a2464696b89d40d88f34e4b709abe57e1337467",
+ "sha256:262cda97efdabb20853d3b5a4c546a535347c14b64c017f628ca0cc7fa780cc6",
+ "sha256:26f415f40f4a93579fd648f48dca1c13dfacdfd0290f4a30f9b9aeb745026811",
+ "sha256:2a0204532aa2f1afd467024b02b4069246320405bc18abec7babab03e2644e75",
+ "sha256:2e72ddfee62430ea80133d2cbe788e0d06b12f865765cb24a40009668bd8ea05",
+ "sha256:3abe6838196da518863b5d549938ce3159d809218936851b395b09cad9b5d64a",
+ "sha256:3ad00f3f0718894749d5a8bb0fa125a7980a2f49523731a9b1fabf2b3522aa43",
+ "sha256:3c3ed41f4d7a3aabf0f01ecc70d6b5d00ce1800d4af652a549de3f7cf35c4abd",
+ "sha256:404d3b4b6b142b99ba1cff0b2177d26b623101ea2ce51c25ef6e53d9d0d87bcc",
+ "sha256:41955b641c34db7d84db8d306937b72bc4968eef1c401bea73081a8d6c3d8033",
+ "sha256:53d3a59a10af4c2558a8e563aed9f256259d2992ae0d3037817b2155f0341de1",
+ "sha256:55ddaf53474e8caeb29eb03e3202f9d827ad3110475a21245f3c7712022882a9",
+ "sha256:589ea8e75de5fd6df387de53af6c9189c5231e212b9aa306b6b0d4f07520fbb9",
+ "sha256:5dab7ac2c1e7cb6179c6bfad6b63174851102cbe0682294e6b1d6f0981ad7138",
+ "sha256:65034473fc09628a02fb85f26e73885cf1ed39ebd9cf270247b38689ff5942c5",
+ "sha256:66344ea741124c38588a664237ac2fa16dfd226964cca23ddc96bd4accccbde5",
+ "sha256:6e784f60e575a0de554ef9251cbc2ceb8790914fe324f11e28450047f264ee6f",
+ "sha256:80407bc007754f108dc2061e37480238b0dc1952c855e86a4fc283501ee6bb5d",
+ "sha256:82af3613a219512a28ee5c95578eb38d44dd03bca02fd918aa05603c41018051",
+ "sha256:88b4f9ee77191dcdd8810241e89340a12cbe050be3e0d5f2f091c15571cd3930",
+ "sha256:99701979bcaaa7de8d5f60476487c5df8f27483624f1f7e300ff4669ee44d1f2",
+ "sha256:a1511a303f8074f67af4119275b4f954189e8313541da7b88b1b3a71425cdb10",
+ "sha256:a5eb4844e5e60bf2c446ef38c5b40d7752c6effdee882f716eb57ae87255d20a",
+ "sha256:a75af2fc7cb1fe25785be7bed1ab18cef959a376cdae7c6870184307614caa3f",
+ "sha256:a90ac47a8ce934e2c8d71e317d2f9e7e6aaceb2d199de940ce2c2eb611b8c0f4",
+ "sha256:aa787b83a3cd5e482e5c79be030e2b4a122ecc6c5c6c4c42a023a2b581fdf17b",
+ "sha256:aaae70364a2d1fb238afd6cc9fcb10442b66e397fd559d3f0968d28cc3ac929c",
+ "sha256:af15e9efa4d776dfcecd1d083f3ccfb04f876d613e90ef8432432efbeeac689d",
+ "sha256:af7dc3f7a44f10863b1b0ecab4078f0a00f561aae1edbd01fd03ad4dcf61c9e9",
+ "sha256:b7ec9e2f8ffc8436f6b642a10019fc513722858f295f7efc28de135d336ac189",
+ "sha256:b94d41b7412ef149743fbc3178e59d95228a7064c5ab4760ae82b562bdffb199",
+ "sha256:c1624aa686d4b36790ed1c2e2306cc3498778dffaf7b8dd47066cf819028c3ad",
+ "sha256:c5ffeb269f10cedb4f33142b89a061acda9f672fd1357331dbfd043422c94e9e",
+ "sha256:c6ad9c39704256ed91a1cffc1379d63f7d0278d6a0bad06b0330f5d30291e3a3",
+ "sha256:c772f225483905f675cb36a025969eef9712f4698364ecd3a63093760deea1bc",
+ "sha256:c77618071d96b7a8be2c10701a98537823b9c65ba256c0b9067e0594cdbd954d",
+ "sha256:c79b518c56dddeec79e5500a53d8a4db90da995dfe1738c3ac57fe46348be049",
+ "sha256:cfd23ad29bfa13fd4188433b0e250f84ec2c8ba66b14a9877e8bce05b524cf54",
+ "sha256:d0695ae31a89f1a8fc8256050329a91a9995b549a88619263a594ca31b76d756",
+ "sha256:d2c1771d0ee3cf72d69bb5e82c6a82f27fbd504c8c782575eddb7839729fbaad",
+ "sha256:da6a7b6b938c15fa0f0568e482efaae9c3af31963eec2da4ff13a6d8ec2888e4",
+ "sha256:db068bbc9b1fa16479a82e1ecf172a93874540cb84be69f0b9cb9b7ac3c82670",
+ "sha256:db707e3685ff16fc1eccad68527d072ac8bdd2e390f6daa97bc394ea7de4acea",
+ "sha256:e2cc8a308780edbe2c4913d6a49dbdb5befacdf72d489a368566be44cadaef1a",
+ "sha256:f27246d7da7d7e3bd8612f63785a7b0c39a244cf14b8dd9dd2f2fab939f2d7f1",
+ "sha256:f4aa94361bb5141a45ca9187464ae81a92a2a135ce2800b2203134f7a1a1d479",
+ "sha256:fa63245271920786f4cb44dcada4983a3516be8f470924528cf658731864c14b"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==1.62.2"
},
"grpcio-status": {
"hashes": [
- "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf",
- "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"
- ],
- "version": "==1.48.2"
+ "sha256:206ddf0eb36bc99b033f03b2c8e95d319f0044defae9b41ae21408e7e0cda48f",
+ "sha256:62e1bfcb02025a1cd73732a2d33672d3e9d0df4d21c12c51e0bbcaf09bab742a"
+ ],
+ "version": "==1.62.2"
+ },
+ "gssapi": {
+ "hashes": [
+ "sha256:19c373b3ba63ce19cd3163aa1495635e3d01b0de6cc4ff1126095eded1df6e01",
+ "sha256:338db18612e3e6ed64e92b6d849242a535fdc98b365f21122992fb8cae737617",
+ "sha256:37f1a8046d695f2c9b8d640a6e385780d3945c0741571ed6fee6f94c31e431dc",
+ "sha256:3a3f63105f39c4af29ffc8f7b6542053d87fe9d63010c689dd9a9f5571facb8e",
+ "sha256:465c6788f2ac6ef7c738394ba8fde1ede6004e5721766f386add63891d8c90af",
+ "sha256:4d9ed83f2064cda60aad90e6840ae282096801b2c814b8cbd390bf0df4635aab",
+ "sha256:4e4a83e9b275fe69b5d40be6d5479889866b80333a12c51a9243f2712d4f0554",
+ "sha256:5731c5b40ecc3116cfe7fb7e1d1e128583ec8b3df1e68bf8cd12073160793acd",
+ "sha256:5b4bf84d0a6d7779a4bf11dacfd3db57ae02dd53562e2aeadac4219a68eaee07",
+ "sha256:791e44f7bea602b8e3da1ec56fbdb383b8ee3326fdeb736f904c2aa9af13a67d",
+ "sha256:7d91fe6e2a5c89b32102ea8e374b8ae13b9031d43d7b55f3abc1f194ddce820d",
+ "sha256:8d57d67547e18f4e44a688bfb20abbf176d1b8df547da2b31c3f2df03cfdc269",
+ "sha256:8fb8ee70458f47b51ed881a6881f30b187c987c02af16cc0fff0079255d4d465",
+ "sha256:8fdb1ff130cee49bc865ec1624dee8cf445cd6c6e93b04bffef2c6f363a60cb9",
+ "sha256:aa3c8d0b1526f52559552bb2c9d2d6be013d76a8e5db00b39a1db5727e93b0b0",
+ "sha256:b031c0f186ab4275186da385b2c7470dd47c9b27522cb3b753757c9ac4bebf11",
+ "sha256:b03d6b30f1fcd66d9a688b45a97e302e4dd3f1386d5c333442731aec73cdb409",
+ "sha256:ca6ceb17fc15eda2a69f2e8c6cf10d11e2edb32832255e5d4c65b21b6db4680a",
+ "sha256:d5b28237afc0668046934792756dd4b6b7e957b0d95a608d02f296734a2819ad",
+ "sha256:e2bb081f2db2111377effe7d40ba23f9a87359b9d2f4881552b731e9da88b36b",
+ "sha256:e40efc88ccefefd6142f8c47b8af498731938958b808bad49990442a91f45160",
+ "sha256:e556878da197ad115a566d36e46a8082d0079731d9c24d1ace795132d725ff2a",
+ "sha256:edc8ef3a9e397dbe18bb6016f8e2209969677b534316d20bb139da2865a38efe",
+ "sha256:ee74b9211c977b9181ff4652d886d7712c9a221560752a35393b58e5ea07887a"
+ ],
+ "version": "==1.8.3"
},
"gunicorn": {
"hashes": [
- "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e",
- "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"
+ "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9",
+ "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"
],
- "markers": "python_version >= '3.5'",
- "version": "==20.1.0"
+ "markers": "python_version >= '3.7'",
+ "version": "==22.0.0"
},
"h11": {
"hashes": [
@@ -1164,21 +1106,21 @@
},
"jinja2": {
"hashes": [
- "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
- "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
+ "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa",
+ "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==3.1.2"
+ "version": "==3.1.3"
},
"jira": {
"hashes": [
- "sha256:1b82b860d5ad10bda411a532269dcff5031a98d84adcba9ba453036c3b5ebd17",
- "sha256:b188e13f430a95437d3152481799ecfccb9773194a57be076c1ca3e5fb93f6bb"
+ "sha256:08b28388ee498542ebb6b05db87e6c46c37535c268717ccc23c84b377ea309fb",
+ "sha256:4c67497fe8dc2f60f1c4f7b33479f059c928bec3db9dcb5cd7b6a09b6ecc0942"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==3.5.0"
+ "version": "==3.6.0"
},
"jmespath": {
"hashes": [
@@ -1189,21 +1131,68 @@
"markers": "python_version >= '3.7'",
"version": "==1.0.1"
},
+ "jsonpatch": {
+ "hashes": [
+ "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade",
+ "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"
+ ],
+ "markers": "python_version >= '3.5'",
+ "version": "==1.33"
+ },
+ "jsonpath-rw": {
+ "hashes": [
+ "sha256:05c471281c45ae113f6103d1268ec7a4831a2e96aa80de45edc89b11fac4fbec"
+ ],
+ "version": "==1.4.0"
+ },
+ "jsonpointer": {
+ "hashes": [
+ "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a",
+ "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"
+ ],
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'",
+ "version": "==2.4"
+ },
"jsonschema": {
"hashes": [
- "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d",
- "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"
+ "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f",
+ "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"
],
- "markers": "python_version >= '3.7'",
- "version": "==4.17.3"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.21.1"
},
- "jsonschema-spec": {
+ "jsonschema-path": {
"hashes": [
- "sha256:90215863b56e212086641956b20127ccbf6d8a3a38343dad01d6a74d19482f76",
- "sha256:f2206d18c89d1824c1f775ba14ed039743b41a9167bd2c5bdb774b66b3ca0bbf"
+ "sha256:271aedfefcd161a0f467bdf23e1d9183691a61eaabf4b761046a914e369336c7",
+ "sha256:4d0dababf341e36e9b91a5fb2a3e3fd300b0150e7fe88df4e55cc8253c5a3989"
],
- "markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'",
- "version": "==0.1.6"
+ "markers": "python_full_version >= '3.8.0' and python_full_version < '4.0.0'",
+ "version": "==0.3.2"
+ },
+ "jsonschema-specifications": {
+ "hashes": [
+ "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc",
+ "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==2023.12.1"
+ },
+ "krb5": {
+ "hashes": [
+ "sha256:2269fb6c0813cd7f58526a152d746aebb8e48026b92856093865414395c185e9",
+ "sha256:3008124d01da50559ee7ac2ce0045ee069963f3086ec5c2460b07da6fae4fdda",
+ "sha256:6ca9dcb23dc0014f79af0f151bb501bfe4f371b3e54bde78e79ea73dad272eda",
+ "sha256:7125ee240dad951cc0a71e567c51b215238e490e87ad67b1af9a69dd90e63bca",
+ "sha256:73badd6982e8af81e4cca82c4f1a6dbcc50257d700072b8df7c84ea003c1b5e4",
+ "sha256:83d0a7d44130681f6a8168fc3609d783c77868fe1ab4a9861da30ae8212d632a",
+ "sha256:a9b4109adf9f02d0885c96611aba1945970b5319a93cb427617049d6536921ac",
+ "sha256:df15e3fe8b1d03cf715b5866215da6b131e1fd5ddd6e7f659e74bb79498033b2",
+ "sha256:e0412d84484bf37158f040baa86ac3c08604251f9d0afdf2e9659b237ce3cdfa",
+ "sha256:e51c700cf148e63fef60bc4b2c453018218a3170dedbfe2840f122aee5a453e7",
+ "sha256:f65fbbcf6de0fecee56a05370b6f65230c121a0cadad8e6a56f5a852bdeecaa6",
+ "sha256:fb69fe96be7197f007b5b20172346728349d0b03a39b3343e8793fabb3d28626"
+ ],
+ "version": "==0.5.1"
},
"kubernetes": {
"hashes": [
@@ -1263,95 +1252,112 @@
},
"lxml": {
"hashes": [
- "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7",
- "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726",
- "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03",
- "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140",
- "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a",
- "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05",
- "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03",
- "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419",
- "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4",
- "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e",
- "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67",
- "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50",
- "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894",
- "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf",
- "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947",
- "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1",
- "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd",
- "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3",
- "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92",
- "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3",
- "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457",
- "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74",
- "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf",
- "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1",
- "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4",
- "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975",
- "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5",
- "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe",
- "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7",
- "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1",
- "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2",
- "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409",
- "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f",
- "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f",
- "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5",
- "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24",
- "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e",
- "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4",
- "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a",
- "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c",
- "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de",
- "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f",
- "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b",
- "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5",
- "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7",
- "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a",
- "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c",
- "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9",
- "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e",
- "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab",
- "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941",
- "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5",
- "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45",
- "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7",
- "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892",
- "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746",
- "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c",
- "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53",
- "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe",
- "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184",
- "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38",
- "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df",
- "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9",
- "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b",
- "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2",
- "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0",
- "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda",
- "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b",
- "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5",
- "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380",
- "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33",
- "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8",
- "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1",
- "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889",
- "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9",
- "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f",
- "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"
+ "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3",
+ "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d",
+ "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a",
+ "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120",
+ "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305",
+ "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287",
+ "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23",
+ "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52",
+ "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f",
+ "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4",
+ "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584",
+ "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f",
+ "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693",
+ "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef",
+ "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5",
+ "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02",
+ "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc",
+ "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7",
+ "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da",
+ "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a",
+ "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40",
+ "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8",
+ "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd",
+ "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601",
+ "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c",
+ "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be",
+ "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2",
+ "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c",
+ "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129",
+ "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc",
+ "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2",
+ "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1",
+ "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7",
+ "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d",
+ "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477",
+ "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d",
+ "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e",
+ "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7",
+ "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2",
+ "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574",
+ "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf",
+ "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b",
+ "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98",
+ "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12",
+ "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42",
+ "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35",
+ "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d",
+ "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce",
+ "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d",
+ "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f",
+ "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db",
+ "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4",
+ "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694",
+ "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac",
+ "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2",
+ "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7",
+ "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96",
+ "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d",
+ "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b",
+ "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a",
+ "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13",
+ "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340",
+ "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6",
+ "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458",
+ "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c",
+ "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c",
+ "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9",
+ "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432",
+ "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991",
+ "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69",
+ "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf",
+ "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb",
+ "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b",
+ "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833",
+ "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76",
+ "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85",
+ "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e",
+ "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50",
+ "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8",
+ "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4",
+ "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b",
+ "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5",
+ "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190",
+ "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7",
+ "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa",
+ "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0",
+ "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9",
+ "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0",
+ "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b",
+ "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5",
+ "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7",
+ "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"
],
"index": "pypi",
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
- "version": "==4.9.2"
+ "version": "==4.9.3"
},
"markdown": {
"hashes": [
- "sha256:5874b47d4ee3f0b14d764324d2c94c03ea66bee56f2d929da9f2508d65e722dc",
- "sha256:b65d7beb248dc22f2e8a31fb706d93798093c308dc1aba295aedeb9d41a813bd"
+ "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f",
+ "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006",
+ "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3",
+ "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"
],
"markers": "python_version >= '3.8'",
- "version": "==3.5.1"
+ "version": "==3.6"
},
"markupsafe": {
"hashes": [
@@ -1412,44 +1418,46 @@
},
"marshmallow": {
"hashes": [
- "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78",
- "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"
+ "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3",
+ "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==3.19.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.21.1"
},
"marshmallow-oneofschema": {
"hashes": [
- "sha256:62cd2099b29188c92493c2940ee79d1bf2f2619a71721664e5a98ec2faa58237",
- "sha256:bd29410a9f2f7457a2b428286e2a80ef76b8ddc3701527dc1f935a88914b02f2"
+ "sha256:68b4a57d0281a04ac25d4eb7a4c5865a57090a0a8fd30fd6362c8e833ac6a6d9",
+ "sha256:ff4cb2a488785ee8edd521a765682c2c80c78b9dc48894124531bdfa1ec9303b"
],
"index": "pypi",
- "markers": "python_version >= '3.6'",
- "version": "==3.0.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.1.1"
},
"meraki": {
"hashes": [
- "sha256:798f57d46cb6321f7f3cbc5df2be0ab748700606a46aca79eb58d4b8fab2ab44",
- "sha256:b36eb11b0592581faaa4e4d47bc218a39df2c1839a6945a30f39c5aa999195a9"
+ "sha256:697706311aa48953e72c462bcc830b0e21575aeac144d94c8383a745de6396b6",
+ "sha256:6b3f31981d278cbba2cce5229436528da4ecac5054d1cb7da6467f6d854cac84"
],
"index": "pypi",
- "version": "==1.30.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.41.0"
},
"more-itertools": {
"hashes": [
- "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d",
- "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"
+ "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684",
+ "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==9.1.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==10.2.0"
},
"msal": {
"hashes": [
"sha256:224756079fe338be838737682b49f8ebc20a87c1c5eeaf590daae4532b83de15",
"sha256:be77ba6a8f49c9ff598bbcdc5dfcf1c9842f3044300109af738e8c3e371065b5"
],
+ "index": "pypi",
"markers": "python_version >= '2.7'",
"version": "==1.26.0"
},
@@ -1542,48 +1550,57 @@
"markers": "python_version >= '3.7'",
"version": "==6.0.4"
},
- "mypy-extensions": {
+ "netapp-ontap": {
"hashes": [
- "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d",
- "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"
+ "sha256:495e679e10e61b9bb2cf5a432a5cdaf9f2ec56855540ba221c021d31a6b91eb7",
+ "sha256:9e1ed21dccae8d35574a0c506a3440e04b9e37a1dff1c2ae7906ec6e7afafaac"
],
- "markers": "python_version >= '3.5'",
- "version": "==1.0.0"
+ "index": "pypi",
+ "markers": "python_version >= '3.6'",
+ "version": "==9.14.1.0"
},
"numpy": {
"hashes": [
- "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22",
- "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f",
- "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9",
- "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96",
- "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0",
- "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a",
- "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281",
- "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04",
- "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468",
- "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253",
- "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756",
- "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a",
- "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb",
- "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d",
- "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0",
- "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910",
- "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978",
- "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5",
- "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f",
- "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a",
- "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5",
- "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2",
- "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d",
- "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95",
- "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5",
- "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d",
- "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780",
- "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.8'",
- "version": "==1.24.2"
+ "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b",
+ "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818",
+ "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20",
+ "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0",
+ "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010",
+ "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a",
+ "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea",
+ "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c",
+ "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71",
+ "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110",
+ "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be",
+ "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a",
+ "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a",
+ "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5",
+ "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed",
+ "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd",
+ "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c",
+ "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e",
+ "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0",
+ "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c",
+ "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a",
+ "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b",
+ "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0",
+ "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6",
+ "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2",
+ "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a",
+ "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30",
+ "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218",
+ "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5",
+ "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07",
+ "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2",
+ "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4",
+ "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764",
+ "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef",
+ "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3",
+ "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.9'",
+ "version": "==1.26.4"
},
"oauthlib": {
"hashes": [
@@ -1596,20 +1613,20 @@
},
"openapi-schema-validator": {
"hashes": [
- "sha256:79f37f38ef9fd5206b924ed7a6f382cea7b649b3b56383c47f1906082b7b9015",
- "sha256:c573e2be2c783abae56c5a1486ab716ca96e09d1c3eab56020d1dc680aa57bf8"
+ "sha256:11a95c9c9017912964e3e5f2545a5b11c3814880681fcacfb73b1759bb4f2804",
+ "sha256:c4887c1347c669eb7cded9090f4438b710845cd0f90d1fb9e1b3303fb37339f8"
],
- "markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'",
- "version": "==0.4.4"
+ "markers": "python_full_version >= '3.8.0' and python_full_version < '4.0.0'",
+ "version": "==0.6.2"
},
"openapi-spec-validator": {
"hashes": [
- "sha256:1189d0618ae0678ccf6c883cc1266d381454eece6f21fcf330cc7caea5fc25eb",
- "sha256:4145478f26df16059c147406eaaa59b77ff60e3461ba6edb9aa84d481ed89aaf"
+ "sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959",
+ "sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7"
],
"index": "pypi",
- "markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'",
- "version": "==0.5.6"
+ "markers": "python_full_version >= '3.8.0' and python_full_version < '4.0.0'",
+ "version": "==0.7.1"
},
"opsgenie-sdk": {
"hashes": [
@@ -1619,22 +1636,13 @@
"index": "pypi",
"version": "==2.1.5"
},
- "ordered-set": {
- "hashes": [
- "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562",
- "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.1.0"
- },
"packaging": {
"hashes": [
- "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5",
- "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"
+ "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5",
+ "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"
],
"markers": "python_version >= '3.7'",
- "version": "==23.2"
+ "version": "==24.0"
},
"paho-mqtt": {
"hashes": [
@@ -1660,14 +1668,6 @@
"markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'",
"version": "==0.4.3"
},
- "pathspec": {
- "hashes": [
- "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08",
- "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"
- ],
- "markers": "python_version >= '3.8'",
- "version": "==0.12.1"
- },
"pbr": {
"hashes": [
"sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b",
@@ -1679,72 +1679,79 @@
},
"pillow": {
"hashes": [
- "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d",
- "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de",
- "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616",
- "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839",
- "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099",
- "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a",
- "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219",
- "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106",
- "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b",
- "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412",
- "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b",
- "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7",
- "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2",
- "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7",
- "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14",
- "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f",
- "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27",
- "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57",
- "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262",
- "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28",
- "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610",
- "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172",
- "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273",
- "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e",
- "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d",
- "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818",
- "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f",
- "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9",
- "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01",
- "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7",
- "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651",
- "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312",
- "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80",
- "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666",
- "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061",
- "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b",
- "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992",
- "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593",
- "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4",
- "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db",
- "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba",
- "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd",
- "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e",
- "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212",
- "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb",
- "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2",
- "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34",
- "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256",
- "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f",
- "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2",
- "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38",
- "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996",
- "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a",
- "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.8'",
- "version": "==10.1.0"
- },
- "platformdirs": {
- "hashes": [
- "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380",
- "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"
+ "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c",
+ "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2",
+ "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb",
+ "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d",
+ "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa",
+ "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3",
+ "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1",
+ "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a",
+ "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd",
+ "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8",
+ "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999",
+ "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599",
+ "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936",
+ "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375",
+ "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d",
+ "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b",
+ "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60",
+ "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572",
+ "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3",
+ "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced",
+ "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f",
+ "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b",
+ "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19",
+ "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f",
+ "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d",
+ "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383",
+ "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795",
+ "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355",
+ "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57",
+ "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09",
+ "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b",
+ "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462",
+ "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf",
+ "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f",
+ "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a",
+ "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad",
+ "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9",
+ "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d",
+ "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45",
+ "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994",
+ "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d",
+ "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338",
+ "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463",
+ "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451",
+ "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591",
+ "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c",
+ "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd",
+ "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32",
+ "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9",
+ "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf",
+ "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5",
+ "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828",
+ "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3",
+ "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5",
+ "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2",
+ "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b",
+ "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2",
+ "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475",
+ "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3",
+ "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb",
+ "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef",
+ "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015",
+ "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002",
+ "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170",
+ "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84",
+ "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57",
+ "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f",
+ "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27",
+ "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"
],
+ "index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==4.1.0"
+ "version": "==10.3.0"
},
"ply": {
"hashes": [
@@ -1776,79 +1783,44 @@
},
"protobuf": {
"hashes": [
- "sha256:0405c3c1cbcc5f827c4a681558d3c628b0a0ac8a7eaea840e521ea427fbe803c",
- "sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf",
- "sha256:091a3b6bea4b01ad77846598b77e7f56a51c28214abfd31054ef0ea7c666c064",
- "sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f",
- "sha256:109f003328dd46b96e318ba4a4c6a82dd128e4d786c273c45dcc93a4b2630ece",
- "sha256:26355216684829155238c27858a909426423880740d32293e4efc262385c321b",
- "sha256:2845c86bd3dfae3b2d8e4697e7b7afe1bd05ee2d8c71171de1975d3449009e39",
- "sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f",
- "sha256:2a82a269769dd693480b0dd8267dadbadf50dcc33dbf0c602d643c8367896b60",
- "sha256:318e1a0e10fc062b6f52e9c4922f4ce2545d13480f11f1cea67852b560461c56",
- "sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7",
- "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996",
- "sha256:439712847df0920fbdc4e490240edd8bb025f0bb9b529fb465242d2365a6f6f0",
- "sha256:497cbc7c0d034d6061be631b332433560d12ca8cb603a3132d978c44571d043b",
- "sha256:4b255dc7714eb904a5de2578a5f0358132c6eb28c3b9d8abfc307de274881e4f",
- "sha256:4d5eefb8b11f5904cb226036168120a440451da1b370fbc1315b2a11af026590",
- "sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067",
- "sha256:6960da4d4c16adb02c07ed4f55d1669b1cfe0180d09550d47f2f15b3563b7504",
- "sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c",
- "sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7",
- "sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9",
- "sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c",
- "sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739",
- "sha256:7d8ed8d87a008685f7950a0545180a2457d8601f3150ec2288f185195cb54506",
- "sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91",
- "sha256:8f4b3f2de9559da9ce9f6099e8c0423470d64fc6e88b8a9ccecb104b33c975d3",
- "sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c",
- "sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153",
- "sha256:a80b13b6c31cfe2fd43846d99e740e9f5f22ace756a26d59897185d84d31210f",
- "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9",
- "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388",
- "sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e",
- "sha256:af908d773fa818256f6159556d3bcb8db71415c0219299cebad01df123730c51",
- "sha256:c8d375262a9efe44ac73985c62a2722b155b7e33f4a4bd4066c7a1b24fce93c2",
- "sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab",
- "sha256:cc18e48ff46cf0c853713413add97cfdc14672aa4a7a1f7a2e0471712430c85f",
- "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde",
- "sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531",
- "sha256:cf45ce9e038a19f770e84b5ba5eb4434b044fc633247b903ae728c66b210f7b1",
- "sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8",
- "sha256:dd3d652fec35c01f737b034a8726677bc8a8767981ed25c4fd3eb4dbe4b9ab9b",
- "sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7",
- "sha256:dfe8f342fb5c2f92dcaf3855b532d02e9d7ff847342b2b3ae324aa102c7a2fb3",
- "sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20",
- "sha256:f26f89a4495ea4f2c4abc703b8f68ab1f6c5ebf18a8732df39e8bdf7b9d94da4",
- "sha256:f899a5661f45dbd8ff0261c22a327c1333a317450c836874ab3c34ffd7053bd8",
- "sha256:fcd931cfd80ab29412588c62735b2783e34350bbf03eff277988debea4c3f8a6",
- "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"
+ "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4",
+ "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8",
+ "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c",
+ "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d",
+ "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4",
+ "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa",
+ "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c",
+ "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019",
+ "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9",
+ "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c",
+ "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==3.20.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.25.3"
},
"psutil": {
"hashes": [
- "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff",
- "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1",
- "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62",
- "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549",
- "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08",
- "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7",
- "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e",
- "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe",
- "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24",
- "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad",
- "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94",
- "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8",
- "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7",
- "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"
+ "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d",
+ "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73",
+ "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8",
+ "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2",
+ "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e",
+ "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36",
+ "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7",
+ "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c",
+ "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee",
+ "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421",
+ "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf",
+ "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81",
+ "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0",
+ "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631",
+ "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4",
+ "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"
],
"index": "pypi",
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==5.9.4"
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
+ "version": "==5.9.8"
},
"psycopg2-binary": {
"hashes": [
@@ -1959,10 +1931,11 @@
},
"pycparser": {
"hashes": [
- "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
- "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
+ "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6",
+ "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"
],
- "version": "==2.21"
+ "markers": "python_version >= '3.8'",
+ "version": "==2.22"
},
"pydantic": {
"hashes": [
@@ -2111,50 +2084,79 @@
"markers": "python_version >= '3.7'",
"version": "==2.8.0"
},
- "pykerberos": {
- "hashes": [
- "sha256:9d701ebd8fc596c99d3155d5ba45813bd5908d26ef83ba0add250edb622abed4"
- ],
- "index": "pypi",
- "version": "==1.2.4"
- },
"pymssql": {
"hashes": [
- "sha256:03903bdf23a2aac26e9b772b3998efeba079fcb6fcfa6df7abc614e9afa14af0",
- "sha256:049f2e3de919e8e02504780a21ebbf235e21ca8ed5c7538c5b6e705aa6c43d8c",
- "sha256:0dd86d8e3e346e34f3f03d12e333747b53a1daa74374a727f4714d5b82ee0dd5",
- "sha256:2446645eb8684c0cb246a3294110455dd89a29608dfa7a58ea88aa42aa1cf005",
- "sha256:253af3d39fc0235627966817262d5c4c94ad09dcbea59664748063470048c29c",
- "sha256:293cb4d0339e221d877d6b19a1905082b658f0100a1e2ccc9dda10de58938901",
- "sha256:2c9d109df536dc5f7dd851a88d285a4c9cb12a9314b621625f4f5ab1197eb312",
- "sha256:30bfd7b8edef78097ccd3f52ac3f3a5c3cf0019f8a280f306cacbbb165caaf63",
- "sha256:358d5acf0298d6618edf7fedc4ce3dc8fb5ce8a9db85e7332d5196d29d841821",
- "sha256:381d8a47c4665d99f114849bed23bcba1922c9d005accc3ac19cee8a1d3522dc",
- "sha256:3906993300650844ec140aa58772c0f5f3e9e9d5709c061334fd1551acdcf066",
- "sha256:3933f7f082be74698eea835df51798dab9bc727d94d3d280bffc75ab9265f890",
- "sha256:47859887adeaf184766b5e0bc845dd23611f3808f9521552063bb36eabc10092",
- "sha256:4f365033c9b4263b74b8a332bbdf2d7d8d7230f05805439b4f3fbf0a0164acfe",
- "sha256:508226a0df7cb6faeda9f8e84e85743690ca427d7b27af9a73d75fcf0c1eef6e",
- "sha256:5c83208138f87942c5f08aa50c5fb8d89b7f15340cde58a77b08f49df277e134",
- "sha256:63e1be8936372c07aee2405203ee0161ce76b03893cafe3d46841be9886f5ffe",
- "sha256:6b2d9c6d38a416c6f2db36ff1cd8e69f9a5387a46f9f4f612623192e0c9404b1",
- "sha256:7309c7352e4a87c9995c3183ebfe0ff4135e955bb759109637673c61c9f0ca8d",
- "sha256:7e4538e85d7b5fb3867636391f91e9e18ac2e0aef660d25e97268e04339f2c36",
- "sha256:821945c2214fe666fd456c61e09a29a00e7719c9e136c801bffb3a254e9c579b",
- "sha256:895041edd002a2e91d8a4faf0906b6fbfef29d9164bc6beb398421f5927fa40e",
- "sha256:9b8d603cc1ec7ae585c5a409a1d45e8da067970c79dd550d45c238ae0aa0f79f",
- "sha256:9baefbfbd07d0142756e2dfcaa804154361ac5806ab9381350aad4e780c3033e",
- "sha256:bf31b8b76634c826a91f9999e15b7bfb0c051a0f53b319fd56481a67e5b903bb",
- "sha256:c83ad3ad20951f3a94894b354fa5fa9666dcd5ebb4a635dad507c7d1dd545833",
- "sha256:cc85b609b4e60eac25fa38bbac1ff854fd2c2a276e0ca4a3614c6f97efb644bb",
- "sha256:d63d6f25cf40fe6a03c49be2d4d337858362b8ab944d6684c268e4990807cf0c",
- "sha256:d873e553374d5b1c57fe1c43bb75e3bcc2920678db1ef26f6bfed396c7d21b30",
- "sha256:de313375b90b0f554058992f35c4a4beb3f6ec2f5912d8cd6afb649f95b03a9f",
- "sha256:e920d6f805a525f19e770e48326a5f96b83d7b8dfd093f5b7015b54ef84bcf4c",
- "sha256:ebe7f64d5278d807f14bea08951e02512bfbc6219fd4d4f15bb45ded885cf3d4"
- ],
- "index": "pypi",
- "version": "==2.2.8"
+ "sha256:001242cedc73587cbb10aec4069de50febbff3c4c50f9908a215476496b3beab",
+ "sha256:070181361ab94bdaeb14b591a35d853f327bc90c660b04047d474274fbb80357",
+ "sha256:09075e129655ab1178d2d60efb9b3fbf5cdb6da2338ecdb3a92c53a4ad7efa0c",
+ "sha256:0bdd1fb49b0e331e47e83f39d4af784c857e230bfc73519654bab29285c51c63",
+ "sha256:0c26af25991715431559cb5b37f243b8ff676540f504ed0317774dfc71827af1",
+ "sha256:0dcd76a8cc757c7cfe2d235f232a20d74ac8cebf9feabcdcbda5ef33157d14b1",
+ "sha256:139a833e6e72a624e4f2cde803a34a616d5661dd9a5b2ae0402d9d8a597b2f1f",
+ "sha256:139c5032e0a2765764987803f1266132fcc5da572848ccc4d29cebba794a4260",
+ "sha256:15257c7bd89c0283f70d6eaafd9b872201818572b8ba1e8576408ae23ef50c7c",
+ "sha256:15815bf1ff9edb475ec4ef567f23e23c4e828ce119ff5bf98a072b66b8d0ac1b",
+ "sha256:167313d91606dc7a3c05b2ad60491a138b7408a8779599ab6430a48a67f133f0",
+ "sha256:1956c111debe67f69a9c839b33ce420f0e8def1ef5ff9831c03d8ac840f82376",
+ "sha256:1a75afa17746972bb61120fb6ea907657fc1ab68250bbbd8b21a00d0720ed0f4",
+ "sha256:1bc0ba19b4426c57509f065a03748d9ac230f1543ecdac57175e6ebd213a7bc0",
+ "sha256:1d5aa1a090b17f4ba75ffac3bb371f6c8c869692b653689396f9b470fde06981",
+ "sha256:1f7ba71cf81af65c005173f279928bf86700d295f97e4965e169b5764bc6c4f2",
+ "sha256:2609bbd3b715822bb4fa6d457b2985d32ad6ab9580fdb61ae6e0eee251791d24",
+ "sha256:287c8f79a7eca0c6787405797bac0f7c502d9be151f3f823aae12042235f8426",
+ "sha256:2b621c5e32136dabc2fea25696beab0647ec336d25c04ab6d8eb8c8ee92f0e52",
+ "sha256:34ab2373ca607174ad7244cfe955c07b6bc77a1e21d3c3143dbe934dec82c3a4",
+ "sha256:410e8c40b7c1b421e750cf80ccf2da8d802ed815575758ac9a78c5f6cd995723",
+ "sha256:428e32e53c554798bc2d0682a169fcb681df6b68544c4aedd1186018ea7e0447",
+ "sha256:452b88a4ceca7efb934b5babb365851a3c52e723642092ebc92777397c2cacdb",
+ "sha256:4551f50c8a3b6ffbd71f794ee1c0c0134134c5d6414302c2fa28b67fe4470d07",
+ "sha256:492e49616b58b2d6caf4a2598cb344572870171a7b65ba1ac61a5e248b6a8e1c",
+ "sha256:4eeaacc1dbbc678f4e80c6fd6fc279468021fdf2e486adc8631ec0de6b6c0e62",
+ "sha256:5928324a09de7466368c15ece1de4ab5ea968d24943ceade758836f9fc7149f5",
+ "sha256:5b081aa7b02911e3f299f7d1f68ce8ca585a5119d44601bf4483da0aae8c2181",
+ "sha256:6452326cecd4dcee359a6f8878b827118a8c8523cd24de5b3a971a7a172e4275",
+ "sha256:658c85474ea01ca3a30de769df06f46681e882524b05c6994cd6fd985c485f27",
+ "sha256:65bb674c0ba35379bf93d1b2cf06fdc5e7ec56e1d0e9de525bdcf977190b2865",
+ "sha256:692ab328ac290bd2031bc4dd6deae32665dfffda1b12aaa92928d3ebc667d5ad",
+ "sha256:6ddaf0597138179517bdbf5b5aa3caffee65987316dc906359a5d0801d0847ee",
+ "sha256:723a4612421027a01b51e42e786678a18c4a27613a3ccecf331c026e0cc41353",
+ "sha256:7332db36a537cbc16640a0c3473a2e419aa5bc1f9953cada3212e7b2587de658",
+ "sha256:7bac28aed1d625a002e0289e0c18d1808cecbdc12e2a1a3927dbbaff66e5fff3",
+ "sha256:803122aec31fbd52f5d65ef3b30b3bd2dc7b2a9e3a8223d16078a25805155c45",
+ "sha256:84aff3235ad1289c4079c548cfcdf7eaaf2475b9f81557351deb42e8f45a9c2d",
+ "sha256:85ea4ea296afcae34bc61e4e0ef2f503270fd4bb097b308a07a9194f1f063aa1",
+ "sha256:8d418f4dca245421242ed9df59d3bcda0cd081650df6deb1bef7f157b6a6f9dd",
+ "sha256:984d99ee6a2579f86c536b1b0354ad3dc9701e98a4b3953f1301b4695477cd2f",
+ "sha256:9a644e4158fed30ae9f3846f2f1c74d36fa1610eb552de35b7f611d063fa3c85",
+ "sha256:a0ebb0e40c93f8f1e40aad80f512ae4aa89cb1ec8a96964b9afedcff1d5813fd",
+ "sha256:a114633fa02b7eb5bc63520bf07954106c0ed0ce032449c871abb8b8c435a872",
+ "sha256:a5308507c2c4e94ede7e5b164870c1ba2be55abab6daf795b5529e2da4e838b6",
+ "sha256:ae9818df40588d5a49e7476f05e31cc83dea630d607178d66762ca8cf32e9f77",
+ "sha256:aec64022a2419fad9f496f8e310522635e39d092970e1d55375ea0be86725174",
+ "sha256:b4a8377527702d746c490c2ce67d17f1c351d182b49b82fae6e67ae206bf9663",
+ "sha256:b78032e45ea33c55d430b93e55370b900479ea324fae5d5d32486cc0fdc0fedd",
+ "sha256:bdca43c42d5f370358535b2107140ed550d74f9ef0fc95d2d7fa8c4e40ee48c2",
+ "sha256:c1bde266dbc91b100abd0311102a6585df09cc963599421cc12fd6b4cfa8e3d3",
+ "sha256:c382aea9adaaee189f352d7a493e3f76c13f9337ec2b6aa40e76b114fa13ebac",
+ "sha256:c389c8041c94d4058827faf5735df5f8e4c1c1eebdd051859536dc393925a667",
+ "sha256:c8b35b3d5e326729e5edb73d593103d2dbfb474bd36ee95b4e85e1f8271ba98a",
+ "sha256:cd7292d872948c1f67c8cc12158f2c8ed9873d54368139ce1f67b2262ac34029",
+ "sha256:d2ae69d8e46637a203cfb48e05439fc9e2ff7646fa1f5396aa3577ce52810031",
+ "sha256:d315f08c106c884d6b42f70c9518e765a5bc23f6d3a587346bc4e6f198768c7a",
+ "sha256:d7234b0f61dd9ccb2304171b5fd7ed9db133b4ea7c835c9942c9dc5bfc00c1cb",
+ "sha256:da492482b923b9cc9ad37f0f5592c776279299db2a89c0b7fc931aaefec652d4",
+ "sha256:e0ed115902956efaca9d9a20fa9b2b604e3e11d640416ca74900d215cdcbf3ab",
+ "sha256:e57fbfad252434d64bdf4b6a935e4241616a4cf8df7af58b9772cd91fce9309a",
+ "sha256:e8d9d42a50f6e8e6b356e4e8b2fa1da725344ec0be6f8a6107b7196e5bd74906",
+ "sha256:ee8b10f797d0bfec626b803891cf9e98480ee11f2e8459a7616cdb7e4e4bf2de",
+ "sha256:f0c44169df8d23c7ce172bd90ef5deb44caf19f15990e4db266e3193071988a4",
+ "sha256:f13710240457ace5b8c9cca7f4971504656f5703b702895a86386e87c7103801",
+ "sha256:fa1767239ed45e1fa91d82fc0c63305750530787cd64089cabbe183eb538a35b",
+ "sha256:fbca115e11685b5891755cc22b3db4348071b8d100a41e1ce93526d9c3dbf2d5",
+ "sha256:fe0cc975aac87b364fdb55cb89642435c3e859dcd99d7260f48af94111ba2673"
+ ],
+ "index": "pypi",
+ "version": "==2.2.11"
},
"pymysql": {
"hashes": [
@@ -2185,28 +2187,30 @@
},
"pyopenssl": {
"hashes": [
- "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2",
- "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"
+ "sha256:17ed5be5936449c5418d1cd269a1a9e9081bc54c17aed272b45856a3d3dc86ad",
+ "sha256:cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==23.3.0"
+ "version": "==24.1.0"
},
"pyparsing": {
"hashes": [
- "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb",
- "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"
+ "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad",
+ "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"
],
"index": "pypi",
"markers": "python_full_version >= '3.6.8'",
- "version": "==3.0.9"
+ "version": "==3.1.2"
},
- "pypdf3": {
+ "pypdf": {
"hashes": [
- "sha256:c946f3273419e37258e35e72273f49904ab15723d87a761c1115ef99799f8c5f"
+ "sha256:dc035581664e0ad717e3492acebc1a5fc23dba759e788e3d4a9fc9b1a32e72c1",
+ "sha256:fe63f3f7d1dcda1c9374421a94c1bba6c6f8c4a62173a59b64ffd52058f846b1"
],
"index": "pypi",
- "version": "==1.0.6"
+ "markers": "python_version >= '3.6'",
+ "version": "==4.2.0"
},
"pyprof2calltree": {
"hashes": [
@@ -2251,12 +2255,12 @@
},
"pysaml2": {
"hashes": [
- "sha256:2bc5147b3b2f902a9131bf08240c068becea29994aafb7654a63d7270ac5b63b",
- "sha256:6616abe0526915cabef6af3a81570bd4c339bedd8db3ab12dcd4fa0612896837"
+ "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318",
+ "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"
],
"index": "pypi",
"markers": "python_version >= '3.9' and python_version < '4.0'",
- "version": "==7.4.2"
+ "version": "==7.5.0"
},
"pysmb": {
"hashes": [
@@ -2267,21 +2271,21 @@
},
"pysmi-lextudio": {
"hashes": [
- "sha256:749fc6c7e139bcdc821ed2430eb8b113cd51266241e97be7f6aa401c319bab69",
- "sha256:d4518b2747b49fbcda191a29acebe4408bdf6f5011945152fe306e6fdef916bd"
+ "sha256:7d255fb38669410835acf6c2e8ab41975a6d8e64593b119552e36ecba004054f",
+ "sha256:cb629c6386a30c976f83c29fc71e53b06d60f15094d0c0114cf8d095351b76e5"
],
"index": "pypi",
- "markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==1.1.13"
+ "markers": "python_version >= '3.8' and python_version < '4.0'",
+ "version": "==1.4.3"
},
"pysnmp-lextudio": {
"hashes": [
- "sha256:073e93cac4b29be9f40ea5514ad5ac3ba6c26824a96b7a2a96dab7cfa6422e5d",
- "sha256:eacc5f33d3a8c7a30122e5f06fd97393a0f101961ffb3f549eabd33ea22f5f6c"
+ "sha256:4035677d236f9fb6da5dbcfae2dc9122ee3652f535efeb904a56f54f162f28c9",
+ "sha256:5a30d289f73fbdd56ca4a6b83e43cef3a1871eee402651748c8ff47f603e5cb2"
],
"index": "pypi",
"markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==5.0.29"
+ "version": "==6.1.2"
},
"pysnmpcrypto": {
"hashes": [
@@ -2302,6 +2306,9 @@
"version": "==0.0.4"
},
"pyspnego": {
+ "extras": [
+ "kerberos"
+ ],
"hashes": [
"sha256:3d5c5c28dbd0cd6a679acf45219630254db3c0e5ad4a16de521caa0585b088c0",
"sha256:9a22c23aeae7b4424fdb2482450d3f8302ac012e2644e1cfe735cf468fcd12ed"
@@ -2363,12 +2370,12 @@
},
"python-dateutil": {
"hashes": [
- "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
- "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
+ "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3",
+ "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"
],
"index": "pypi",
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.8.2"
+ "version": "==2.9.0.post0"
},
"python-ldap": {
"hashes": [
@@ -2380,11 +2387,11 @@
},
"python-multipart": {
"hashes": [
- "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132",
- "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"
+ "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026",
+ "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"
],
- "markers": "python_version >= '3.7'",
- "version": "==0.0.6"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.0.9"
},
"python-snap7": {
"hashes": [
@@ -2401,13 +2408,10 @@
},
"pytz": {
"hashes": [
- "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588",
- "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b",
- "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb",
- "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"
+ "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812",
+ "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"
],
- "index": "pypi",
- "version": "==2023.3"
+ "version": "==2024.1"
},
"pyyaml": {
"hashes": [
@@ -2440,6 +2444,7 @@
"sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4",
"sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba",
"sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8",
+ "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef",
"sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5",
"sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd",
"sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3",
@@ -2473,6 +2478,14 @@
],
"version": "==2.0.2"
},
+ "redfish": {
+ "hashes": [
+ "sha256:217ef1fc9a3366396c206ea4c4626008c3d4c75d0c44d56058a314ef9b247794",
+ "sha256:7f18458b6fce6c7e41af7a24e2da7f4218f1b647a68d81ad4a840a8788349174"
+ ],
+ "index": "pypi",
+ "version": "==3.2.2"
+ },
"redis": {
"hashes": [
"sha256:2c19e6767c474f2e85167909061d525ed65bea9301c0770bb151e041b7ac89a2",
@@ -2482,57 +2495,22 @@
"markers": "python_version >= '3.7'",
"version": "==4.5.4"
},
+ "referencing": {
+ "hashes": [
+ "sha256:81a1471c68c9d5e3831c30ad1dd9815c45b558e596653db751a2bfdd17b3b9ec",
+ "sha256:c19c4d006f1757e3dd75c4f784d38f8698d87b649c54f9ace14e5e8c9667c01d"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==0.31.1"
+ },
"reportlab": {
"hashes": [
- "sha256:0b94e4f65a5f77a631cc010c9a7892d69e33f3251b760639dcc76420e138ce95",
- "sha256:11a71c314183532d889ad4b3941f61c3fe4bfdda769c768a7f02d93cb69dd1bb",
- "sha256:149718c3eaee937f28094325f0dd9ae1add3172c2dacbb93ff5403f37c9d3c57",
- "sha256:21d6b6bcdecee9c7ce047156d0553a30d736b8172629e4c0fcacab35ba261f3b",
- "sha256:269c59e508df08be498ab9e5278addb2cc16989677a03f800b17f8a31f8c5cc7",
- "sha256:36568d3cb4101a210c4d821d9101635c2ef6e06bd649335938c01eb197f50c5d",
- "sha256:3cb0da4975dbade6cc2ea6b0b0b17578af266dc3f669e959648f3306af993369",
- "sha256:48eadd93237c7e2739525c74cf6615dd6c1a767c839f4b0d7c12167dc0b09911",
- "sha256:57add04824bca89a130f9d428ace1b003cce4061386e0ec2a1b45b554ffe7aa3",
- "sha256:58ea3471b9b4b8e7952bd357e8487789da11213470be328ffb3e5b7d7690c2c7",
- "sha256:5a460f4c0c30bdf9d7bef46a816671a4386a9253670a53d35c694c666544261f",
- "sha256:6172481e8acffcf72042653e977281fbd807a41705a39456d92d2606d8b8c5e2",
- "sha256:65b441e22d8fe93154567a30662d8539e639b78142815afcaf92b388846eb3c1",
- "sha256:6ea46fef07c588fef84d1164d4788fef322b39feb2bfb2df0a0706181dff79b8",
- "sha256:6f75d33f7a3720cf47371ab63ced0f0ebd1aeb6db19386ae92f8977a09be9611",
- "sha256:6fdac930dfdc6227720545ec44fdb396e92d53ec227a6f5ae58cc8cb9a6cbe89",
- "sha256:701290747662d2b3be49fc0de33898ecc9ce3fafe0e2887d406e24693465e5ae",
- "sha256:753485bb2b18cbd11340e227e4aaf9bde3bb64f83406dfa011e92ad0231a42c9",
- "sha256:7b690bc30f58931b0abd47635d93a43a82d67972e83a6511cc8adbcd7da25310",
- "sha256:7efdf68e97e8fea8683bfc17f25747fefbda729b9018bc2e3221658ac41ee0bd",
- "sha256:7ff89011b5ee30209b3106641e3b7b4959f10aa6e9d6f3030205123c178f605d",
- "sha256:8260c002e4845a5af65908d5ee2099bcc25a16c7646c5c417fa27f1e4b844bc1",
- "sha256:8e4983486d419daa45cade40874bb869976e27ba11f77fb4b9ae32417284ade7",
- "sha256:8f00175f8e12e6f7d3a01309de6d7008fac94a2cdce6837ad066f0961472c9e5",
- "sha256:9f869286fcefa7f8e89e38448309891ff110ad74f58a7317ec204f3d4b8ad5f5",
- "sha256:a0330322c6c8123745ac7667fcc6ae3e0de3b73c15bdfaa28c788a9eaa0f50da",
- "sha256:a043cff1781ddb2a0ba0e8e760a79fc5be2430957c4f2a1f51bd4528cc53178f",
- "sha256:a477f652e6c417ad40387a8498d9ad827421006f156aab16f67adc9b81699a72",
- "sha256:a4dbc28fede7f504b9ac65ce9cbea35585e999d63f9fa68bc73f5a75b4929302",
- "sha256:afb418409e0d323c6cb5e3be7ea4d14dfbf8a07eb03ab0b0062904cacf819878",
- "sha256:b0d91663d450c11404ec189ebc5a4abdf20f7c4eca5954a920427cdbf5601525",
- "sha256:ba6f533b262f4ee1636b754992bb2fb349df0500d765ac9be014a375c047f4db",
- "sha256:bbdbba1ec3498b17eefca14d424ee90bb95b53e1423ecb22f1c17733c3406559",
- "sha256:ca8eb7a6607f8a664187a330bab9f8d11c9f81ed885e063dfbb29a130944a72a",
- "sha256:cca2d4c783f985b91b98e80d09ac79b6ed3f317a729cba5ba86edfe5eb9a2d9c",
- "sha256:d59e62faa03003be81aa14d37ac34ea110e5ac59c8678fd4c0daa7d8b8f42096",
- "sha256:d95fc8bc177a009053548c6d851a513b2147c465a5e8fea82287ea22d6825c4e",
- "sha256:dbddadca6f08212732e83a60e30a42cfc7d2695892cedea208b3c3e7131c9993",
- "sha256:e13a4e81761636591f5b60104f6e1eec70832ffd9aa781db68d7ebb576970d4b",
- "sha256:e28a8d9cf462e2b4c9e71abd0630f9ec245d88b976b283b0dbb4602c9ddb3938",
- "sha256:e5949f3b4e207fa7901c0cc3b49470b2a3372617a47dfbc892db31c2b56af296",
- "sha256:e98965c6e60d76ff63989d9400ae8e65efd67c665d785b377f438f166a57c053",
- "sha256:f1993a68c0edc45895d3df350d01b0456efe79aaf309cef777762742be501f2a",
- "sha256:faeebde62f0f6ad86985bec5685411260393d2eb7ba907972da56af586b644e8",
- "sha256:ff09a0a1e5cef05309ac09dfc5185e8151d927bcf45470d2f540c96260f8a355"
+ "sha256:28a40d5000afbd8ccae15a47f7abe2841768461354bede1a9d42841132997c98",
+ "sha256:3a99faf412691159c068b3ff01c15307ce2fd2cf6b860199434874e002040a84"
],
"index": "pypi",
"markers": "python_version >= '3.7' and python_version < '4'",
- "version": "==3.6.13"
+ "version": "==4.1.0"
},
"requests": {
"hashes": [
@@ -2545,13 +2523,12 @@
},
"requests-kerberos": {
"hashes": [
- "sha256:1c3c32242bf0f5f1986656fa2ca913f486e47d9f31598640f47c914ffb8d5642",
- "sha256:5733abc0b6524815f6fc72d5c0ec9f3fb89137b852adea2a461c45931f5675e0",
- "sha256:5ef3966b4db9e9e5f0027815e350d3e81a70e4bc3caace89a9be94fc6e0e1b22",
- "sha256:9d21f15241c53c2ad47e813138b9aee4b9acdd04b82048c4388ade15f40a52fd"
+ "sha256:cda9d1240ae5392e081869881c8742d0e171fd6a893a7ac0875db2748e966fd1",
+ "sha256:da74ea478ccd8584de88092bdcd17a7c29d494374a340d1d8677189903c9ac6a"
],
"index": "pypi",
- "version": "==0.12.0"
+ "markers": "python_version >= '3.6'",
+ "version": "==0.14.0"
},
"requests-ntlm": {
"hashes": [
@@ -2578,6 +2555,13 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.0.0"
},
+ "requests-unixsocket": {
+ "hashes": [
+ "sha256:28304283ea9357d45fff58ad5b11e47708cfbf5806817aa59b2a363228ee971e",
+ "sha256:c685c680f0809e1b2955339b1e5afc3c0022b3066f4f7eb343f43a6065fc0e5d"
+ ],
+ "version": "==0.3.0"
+ },
"rfc3339-validator": {
"hashes": [
"sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b",
@@ -2596,6 +2580,111 @@
"markers": "python_version >= '3.7'",
"version": "==4.0"
},
+ "rpds-py": {
+ "hashes": [
+ "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f",
+ "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c",
+ "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76",
+ "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e",
+ "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157",
+ "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f",
+ "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5",
+ "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05",
+ "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24",
+ "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1",
+ "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8",
+ "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b",
+ "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb",
+ "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07",
+ "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1",
+ "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6",
+ "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e",
+ "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e",
+ "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1",
+ "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab",
+ "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4",
+ "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17",
+ "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594",
+ "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d",
+ "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d",
+ "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3",
+ "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c",
+ "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66",
+ "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f",
+ "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80",
+ "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33",
+ "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f",
+ "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c",
+ "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022",
+ "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e",
+ "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f",
+ "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da",
+ "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1",
+ "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688",
+ "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795",
+ "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c",
+ "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98",
+ "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1",
+ "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20",
+ "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307",
+ "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4",
+ "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18",
+ "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294",
+ "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66",
+ "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467",
+ "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948",
+ "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e",
+ "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1",
+ "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0",
+ "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7",
+ "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd",
+ "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641",
+ "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d",
+ "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9",
+ "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1",
+ "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da",
+ "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3",
+ "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa",
+ "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7",
+ "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40",
+ "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496",
+ "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124",
+ "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836",
+ "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434",
+ "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984",
+ "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f",
+ "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6",
+ "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e",
+ "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461",
+ "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c",
+ "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432",
+ "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73",
+ "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58",
+ "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88",
+ "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337",
+ "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7",
+ "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863",
+ "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475",
+ "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3",
+ "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51",
+ "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf",
+ "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024",
+ "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40",
+ "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9",
+ "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec",
+ "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb",
+ "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7",
+ "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861",
+ "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880",
+ "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f",
+ "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd",
+ "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca",
+ "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58",
+ "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==0.18.0"
+ },
"rrdtool": {
"hashes": [
"sha256:5f0aff8b3e0a0f701652fa88bf605a54be9e6b25fba52a13b67c71f7b35a1451"
@@ -2614,107 +2703,115 @@
},
"s3transfer": {
"hashes": [
- "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd",
- "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"
+ "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e",
+ "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==0.6.0"
- },
- "semver": {
- "hashes": [
- "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc",
- "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"
- ],
- "markers": "python_version >= '3.7'",
- "version": "==3.0.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.10.0"
},
"setproctitle": {
"hashes": [
- "sha256:1c5d5dad7c28bdd1ec4187d818e43796f58a845aa892bb4481587010dc4d362b",
- "sha256:1c8d9650154afaa86a44ff195b7b10d683c73509d085339d174e394a22cccbb9",
- "sha256:1f0cde41857a644b7353a0060b5f94f7ba7cf593ebde5a1094da1be581ac9a31",
- "sha256:1f29b75e86260b0ab59adb12661ef9f113d2f93a59951373eb6d68a852b13e83",
- "sha256:1fa1a0fbee72b47dc339c87c890d3c03a72ea65c061ade3204f285582f2da30f",
- "sha256:1ff863a20d1ff6ba2c24e22436a3daa3cd80be1dfb26891aae73f61b54b04aca",
- "sha256:265ecbe2c6eafe82e104f994ddd7c811520acdd0647b73f65c24f51374cf9494",
- "sha256:288943dec88e178bb2fd868adf491197cc0fc8b6810416b1c6775e686bab87fe",
- "sha256:2a97d51c17d438cf5be284775a322d57b7ca9505bb7e118c28b1824ecaf8aeaa",
- "sha256:2e3ac25bfc4a0f29d2409650c7532d5ddfdbf29f16f8a256fc31c47d0dc05172",
- "sha256:2fbd8187948284293f43533c150cd69a0e4192c83c377da837dbcd29f6b83084",
- "sha256:37ece938110cab2bb3957e3910af8152ca15f2b6efdf4f2612e3f6b7e5459b80",
- "sha256:4058564195b975ddc3f0462375c533cce310ccdd41b80ac9aed641c296c3eff4",
- "sha256:4749a2b0c9ac52f864d13cee94546606f92b981b50e46226f7f830a56a9dc8e1",
- "sha256:4bba3be4c1fabf170595b71f3af46c6d482fbe7d9e0563999b49999a31876f77",
- "sha256:4d8938249a7cea45ab7e1e48b77685d0f2bab1ebfa9dde23e94ab97968996a7c",
- "sha256:5194b4969f82ea842a4f6af2f82cd16ebdc3f1771fb2771796e6add9835c1973",
- "sha256:55ce1e9925ce1765865442ede9dca0ba9bde10593fcd570b1f0fa25d3ec6b31c",
- "sha256:570d255fd99c7f14d8f91363c3ea96bd54f8742275796bca67e1414aeca7d8c3",
- "sha256:587c7d6780109fbd8a627758063d08ab0421377c0853780e5c356873cdf0f077",
- "sha256:589be87172b238f839e19f146b9ea47c71e413e951ef0dc6db4218ddacf3c202",
- "sha256:5b932c3041aa924163f4aab970c2f0e6b4d9d773f4d50326e0ea1cd69240e5c5",
- "sha256:5fb4f769c02f63fac90989711a3fee83919f47ae9afd4758ced5d86596318c65",
- "sha256:630f6fe5e24a619ccf970c78e084319ee8be5be253ecc9b5b216b0f474f5ef18",
- "sha256:65d884e22037b23fa25b2baf1a3316602ed5c5971eb3e9d771a38c3a69ce6e13",
- "sha256:6c877691b90026670e5a70adfbcc735460a9f4c274d35ec5e8a43ce3f8443005",
- "sha256:710e16fa3bade3b026907e4a5e841124983620046166f355bbb84be364bf2a02",
- "sha256:7a55fe05f15c10e8c705038777656fe45e3bd676d49ad9ac8370b75c66dd7cd7",
- "sha256:7aa0aac1711fadffc1d51e9d00a3bea61f68443d6ac0241a224e4d622489d665",
- "sha256:7f0bed90a216ef28b9d227d8d73e28a8c9b88c0f48a082d13ab3fa83c581488f",
- "sha256:7f2719a398e1a2c01c2a63bf30377a34d0b6ef61946ab9cf4d550733af8f1ef1",
- "sha256:7fe9df7aeb8c64db6c34fc3b13271a363475d77bc157d3f00275a53910cb1989",
- "sha256:88486e6cce2a18a033013d17b30a594f1c5cb42520c49c19e6ade40b864bb7ff",
- "sha256:8e4f8f12258a8739c565292a551c3db62cca4ed4f6b6126664e2381acb4931bf",
- "sha256:8ff3c8cb26afaed25e8bca7b9dd0c1e36de71f35a3a0706b5c0d5172587a3827",
- "sha256:9124bedd8006b0e04d4e8a71a0945da9b67e7a4ab88fdad7b1440dc5b6122c42",
- "sha256:92c626edc66169a1b09e9541b9c0c9f10488447d8a2b1d87c8f0672e771bc927",
- "sha256:a149a5f7f2c5a065d4e63cb0d7a4b6d3b66e6e80f12e3f8827c4f63974cbf122",
- "sha256:a47d97a75fd2d10c37410b180f67a5835cb1d8fdea2648fd7f359d4277f180b9",
- "sha256:a499fff50387c1520c085a07578a000123f519e5f3eee61dd68e1d301659651f",
- "sha256:a8e0881568c5e6beff91ef73c0ec8ac2a9d3ecc9edd6bd83c31ca34f770910c4",
- "sha256:ab45146c71ca6592c9cc8b354a2cc9cc4843c33efcbe1d245d7d37ce9696552d",
- "sha256:b2c9cb2705fc84cb8798f1ba74194f4c080aaef19d9dae843591c09b97678e98",
- "sha256:b34baef93bfb20a8ecb930e395ccd2ae3268050d8cf4fe187de5e2bd806fd796",
- "sha256:b617f12c9be61e8f4b2857be4a4319754756845dbbbd9c3718f468bbb1e17bcb",
- "sha256:b9fb97907c830d260fa0658ed58afd48a86b2b88aac521135c352ff7fd3477fd",
- "sha256:bae283e85fc084b18ffeb92e061ff7ac5af9e183c9d1345c93e178c3e5069cbe",
- "sha256:c2c46200656280a064073447ebd363937562debef329482fd7e570c8d498f806",
- "sha256:c8a09d570b39517de10ee5b718730e171251ce63bbb890c430c725c8c53d4484",
- "sha256:c91b9bc8985d00239f7dc08a49927a7ca1ca8a6af2c3890feec3ed9665b6f91e",
- "sha256:ca58cd260ea02759238d994cfae844fc8b1e206c684beb8f38877dcab8451dfc",
- "sha256:d7d17c8bd073cbf8d141993db45145a70b307385b69171d6b54bcf23e5d644de",
- "sha256:dad42e676c5261eb50fdb16bdf3e2771cf8f99a79ef69ba88729aeb3472d8575",
- "sha256:db684d6bbb735a80bcbc3737856385b55d53f8a44ce9b46e9a5682c5133a9bf7",
- "sha256:de3a540cd1817ede31f530d20e6a4935bbc1b145fd8f8cf393903b1e02f1ae76",
- "sha256:e00c9d5c541a2713ba0e657e0303bf96ddddc412ef4761676adc35df35d7c246",
- "sha256:e1aafc91cbdacc9e5fe712c52077369168e6b6c346f3a9d51bf600b53eae56bb",
- "sha256:e425be62524dc0c593985da794ee73eb8a17abb10fe692ee43bb39e201d7a099",
- "sha256:e43f315c68aa61cbdef522a2272c5a5b9b8fd03c301d3167b5e1343ef50c676c",
- "sha256:e49ae693306d7624015f31cb3e82708916759d592c2e5f72a35c8f4cc8aef258",
- "sha256:e5c50e164cd2459bc5137c15288a9ef57160fd5cbf293265ea3c45efe7870865",
- "sha256:e8579a43eafd246e285eb3a5b939e7158073d5087aacdd2308f23200eac2458b",
- "sha256:e85e50b9c67854f89635a86247412f3ad66b132a4d8534ac017547197c88f27d",
- "sha256:e932089c35a396dc31a5a1fc49889dd559548d14cb2237adae260382a090382e",
- "sha256:f0452282258dfcc01697026a8841258dd2057c4438b43914b611bccbcd048f10",
- "sha256:f4bfc89bd33ebb8e4c0e9846a09b1f5a4a86f5cb7a317e75cc42fee1131b4f4f",
- "sha256:fa2f50678f04fda7a75d0fe5dd02bbdd3b13cbe6ed4cf626e4472a7ccf47ae94",
- "sha256:faec934cfe5fd6ac1151c02e67156c3f526e82f96b24d550b5d51efa4a5527c6",
- "sha256:fcd3cf4286a60fdc95451d8d14e0389a6b4f5cebe02c7f2609325eb016535963",
- "sha256:fe8a988c7220c002c45347430993830666e55bc350179d91fcee0feafe64e1d4",
- "sha256:fed18e44711c5af4b681c2b3b18f85e6f0f1b2370a28854c645d636d5305ccd8",
- "sha256:ffc61a388a5834a97953d6444a2888c24a05f2e333f9ed49f977a87bb1ad4761"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==1.3.2"
+ "sha256:00e6e7adff74796ef12753ff399491b8827f84f6c77659d71bd0b35870a17d8f",
+ "sha256:059f4ce86f8cc92e5860abfc43a1dceb21137b26a02373618d88f6b4b86ba9b2",
+ "sha256:088b9efc62d5aa5d6edf6cba1cf0c81f4488b5ce1c0342a8b67ae39d64001120",
+ "sha256:0d3a953c50776751e80fe755a380a64cb14d61e8762bd43041ab3f8cc436092f",
+ "sha256:1342f4fdb37f89d3e3c1c0a59d6ddbedbde838fff5c51178a7982993d238fe4f",
+ "sha256:184239903bbc6b813b1a8fc86394dc6ca7d20e2ebe6f69f716bec301e4b0199d",
+ "sha256:195c961f54a09eb2acabbfc90c413955cf16c6e2f8caa2adbf2237d1019c7dd8",
+ "sha256:1f5d9027eeda64d353cf21a3ceb74bb1760bd534526c9214e19f052424b37e42",
+ "sha256:200620c3b15388d7f3f97e0ae26599c0c378fdf07ae9ac5a13616e933cbd2086",
+ "sha256:200ede6fd11233085ba9b764eb055a2a191fb4ffb950c68675ac53c874c22e20",
+ "sha256:21112fcd2195d48f25760f0eafa7a76510871bbb3b750219310cf88b04456ae3",
+ "sha256:224602f0939e6fb9d5dd881be1229d485f3257b540f8a900d4271a2c2aa4e5f4",
+ "sha256:287490eb90e7a0ddd22e74c89a92cc922389daa95babc833c08cf80c84c4df0a",
+ "sha256:2982efe7640c4835f7355fdb4da313ad37fb3b40f5c69069912f8048f77b28c8",
+ "sha256:2df2b67e4b1d7498632e18c56722851ba4db5d6a0c91aaf0fd395111e51cdcf4",
+ "sha256:2e4a8104db15d3462e29d9946f26bed817a5b1d7a47eabca2d9dc2b995991503",
+ "sha256:2e71f6365744bf53714e8bd2522b3c9c1d83f52ffa6324bd7cbb4da707312cd8",
+ "sha256:334f7ed39895d692f753a443102dd5fed180c571eb6a48b2a5b7f5b3564908c8",
+ "sha256:33c5609ad51cd99d388e55651b19148ea99727516132fb44680e1f28dd0d1de9",
+ "sha256:37a62cbe16d4c6294e84670b59cf7adcc73faafe6af07f8cb9adaf1f0e775b19",
+ "sha256:38ae9a02766dad331deb06855fb7a6ca15daea333b3967e214de12cfae8f0ef5",
+ "sha256:38da436a0aaace9add67b999eb6abe4b84397edf4a78ec28f264e5b4c9d53cd5",
+ "sha256:415bfcfd01d1fbf5cbd75004599ef167a533395955305f42220a585f64036081",
+ "sha256:417de6b2e214e837827067048f61841f5d7fc27926f2e43954567094051aff18",
+ "sha256:477d3da48e216d7fc04bddab67b0dcde633e19f484a146fd2a34bb0e9dbb4a1e",
+ "sha256:4a6ba2494a6449b1f477bd3e67935c2b7b0274f2f6dcd0f7c6aceae10c6c6ba3",
+ "sha256:4fe1c49486109f72d502f8be569972e27f385fe632bd8895f4730df3c87d5ac8",
+ "sha256:507e8dc2891021350eaea40a44ddd887c9f006e6b599af8d64a505c0f718f170",
+ "sha256:53bc0d2358507596c22b02db079618451f3bd720755d88e3cccd840bafb4c41c",
+ "sha256:554eae5a5b28f02705b83a230e9d163d645c9a08914c0ad921df363a07cf39b1",
+ "sha256:59335d000c6250c35989394661eb6287187854e94ac79ea22315469ee4f4c244",
+ "sha256:5a740f05d0968a5a17da3d676ce6afefebeeeb5ce137510901bf6306ba8ee002",
+ "sha256:5bc94cf128676e8fac6503b37763adb378e2b6be1249d207630f83fc325d9b11",
+ "sha256:64286f8a995f2cd934082b398fc63fca7d5ffe31f0e27e75b3ca6b4efda4e353",
+ "sha256:664698ae0013f986118064b6676d7dcd28fefd0d7d5a5ae9497cbc10cba48fa5",
+ "sha256:68f960bc22d8d8e4ac886d1e2e21ccbd283adcf3c43136161c1ba0fa509088e0",
+ "sha256:69d565d20efe527bd8a9b92e7f299ae5e73b6c0470f3719bd66f3cd821e0d5bd",
+ "sha256:6a143b31d758296dc2f440175f6c8e0b5301ced3b0f477b84ca43cdcf7f2f476",
+ "sha256:6a249415f5bb88b5e9e8c4db47f609e0bf0e20a75e8d744ea787f3092ba1f2d0",
+ "sha256:6b9e62ddb3db4b5205c0321dd69a406d8af9ee1693529d144e86bd43bcb4b6c0",
+ "sha256:7f1d36a1e15a46e8ede4e953abb104fdbc0845a266ec0e99cc0492a4364f8c44",
+ "sha256:816330675e3504ae4d9a2185c46b573105d2310c20b19ea2b4596a9460a4f674",
+ "sha256:87e668f9561fd3a457ba189edfc9e37709261287b52293c115ae3487a24b92f6",
+ "sha256:897a73208da48db41e687225f355ce993167079eda1260ba5e13c4e53be7f754",
+ "sha256:8c331e91a14ba4076f88c29c777ad6b58639530ed5b24b5564b5ed2fd7a95452",
+ "sha256:950f6476d56ff7817a8fed4ab207727fc5260af83481b2a4b125f32844df513a",
+ "sha256:9617b676b95adb412bb69645d5b077d664b6882bb0d37bfdafbbb1b999568d85",
+ "sha256:9e3b99b338598de0bd6b2643bf8c343cf5ff70db3627af3ca427a5e1a1a90dd9",
+ "sha256:a1fcac43918b836ace25f69b1dca8c9395253ad8152b625064415b1d2f9be4fb",
+ "sha256:a680d62c399fa4b44899094027ec9a1bdaf6f31c650e44183b50d4c4d0ccc085",
+ "sha256:a6d50252377db62d6a0bb82cc898089916457f2db2041e1d03ce7fadd4a07381",
+ "sha256:a83ca086fbb017f0d87f240a8f9bbcf0809f3b754ee01cec928fff926542c450",
+ "sha256:a911b26264dbe9e8066c7531c0591cfab27b464459c74385b276fe487ca91c12",
+ "sha256:ab2900d111e93aff5df9fddc64cf51ca4ef2c9f98702ce26524f1acc5a786ae7",
+ "sha256:ab92e51cd4a218208efee4c6d37db7368fdf182f6e7ff148fb295ecddf264287",
+ "sha256:accb66d7b3ccb00d5cd11d8c6e07055a4568a24c95cf86109894dcc0c134cc89",
+ "sha256:ad6d20f9541f5f6ac63df553b6d7a04f313947f550eab6a61aa758b45f0d5657",
+ "sha256:aeaa71fb9568ebe9b911ddb490c644fbd2006e8c940f21cb9a1e9425bd709574",
+ "sha256:af2c67ae4c795d1674a8d3ac1988676fa306bcfa1e23fddb5e0bd5f5635309ca",
+ "sha256:af4061f67fd7ec01624c5e3c21f6b7af2ef0e6bab7fbb43f209e6506c9ce0092",
+ "sha256:b1067647ac7aba0b44b591936118a22847bda3c507b0a42d74272256a7a798e9",
+ "sha256:b5901a31012a40ec913265b64e48c2a4059278d9f4e6be628441482dd13fb8b5",
+ "sha256:bbbd6c7de0771c84b4aa30e70b409565eb1fc13627a723ca6be774ed6b9d9fa3",
+ "sha256:bdfd7254745bb737ca1384dee57e6523651892f0ea2a7344490e9caefcc35e64",
+ "sha256:c05ac48ef16ee013b8a326c63e4610e2430dbec037ec5c5b58fcced550382b74",
+ "sha256:c1c84beab776b0becaa368254801e57692ed749d935469ac10e2b9b825dbdd8e",
+ "sha256:c32c41ace41f344d317399efff4cffb133e709cec2ef09c99e7a13e9f3b9483c",
+ "sha256:c3ba57029c9c50ecaf0c92bb127224cc2ea9fda057b5d99d3f348c9ec2855ad3",
+ "sha256:c7951820b77abe03d88b114b998867c0f99da03859e5ab2623d94690848d3e45",
+ "sha256:c913e151e7ea01567837ff037a23ca8740192880198b7fbb90b16d181607caae",
+ "sha256:c9a402881ec269d0cc9c354b149fc29f9ec1a1939a777f1c858cdb09c7a261df",
+ "sha256:cbf16381c7bf7f963b58fb4daaa65684e10966ee14d26f5cc90f07049bfd8c1e",
+ "sha256:d4460795a8a7a391e3567b902ec5bdf6c60a47d791c3b1d27080fc203d11c9dc",
+ "sha256:d7f27e0268af2d7503386e0e6be87fb9b6657afd96f5726b733837121146750d",
+ "sha256:d876d355c53d975c2ef9c4f2487c8f83dad6aeaaee1b6571453cb0ee992f55f6",
+ "sha256:da0d57edd4c95bf221b2ebbaa061e65b1788f1544977288bdf95831b6e44e44d",
+ "sha256:ddedd300cd690a3b06e7eac90ed4452348b1348635777ce23d460d913b5b63c3",
+ "sha256:df3f4274b80709d8bcab2f9a862973d453b308b97a0b423a501bcd93582852e3",
+ "sha256:e18b7bd0898398cc97ce2dfc83bb192a13a087ef6b2d5a8a36460311cb09e775",
+ "sha256:e5119a211c2e98ff18b9908ba62a3bd0e3fabb02a29277a7232a6fb4b2560aa0",
+ "sha256:e5e08e232b78ba3ac6bc0d23ce9e2bee8fad2be391b7e2da834fc9a45129eb87",
+ "sha256:eae8988e78192fd1a3245a6f4f382390b61bce6cfcc93f3809726e4c885fa68d",
+ "sha256:f05e66746bf9fe6a3397ec246fe481096664a9c97eb3fea6004735a4daf867fd",
+ "sha256:f1da82c3e11284da4fcbf54957dafbf0655d2389cd3d54e4eaba636faf6d117a",
+ "sha256:f38d48abc121263f3b62943f84cbaede05749047e428409c2c199664feb6abc7",
+ "sha256:f5e7266498cd31a4572378c61920af9f6b4676a73c299fce8ba93afd694f8ae7",
+ "sha256:fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39",
+ "sha256:ff814dea1e5c492a4980e3e7d094286077054e7ea116cbeda138819db194b2cd"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==1.3.3"
},
"setuptools": {
"hashes": [
- "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2",
- "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"
+ "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987",
+ "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"
],
"markers": "python_version >= '3.8'",
- "version": "==69.0.2"
+ "version": "==69.5.1"
},
"setuptools-scm": {
"hashes": [
@@ -2756,64 +2853,64 @@
},
"sniffio": {
"hashes": [
- "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101",
- "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"
+ "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2",
+ "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"
],
"markers": "python_version >= '3.7'",
- "version": "==1.3.0"
+ "version": "==1.3.1"
},
"snmpsim-lextudio": {
"hashes": [
- "sha256:5eaf21c72da67ffd9bd965ac183b78e67d4f667b87363c744833abd1c2f94842",
- "sha256:7754b0aa275be5224853aa9c6ee9b197a05efafd4c84fb0360c5b0756c1d7351"
+ "sha256:0d6fe597652b8c0fd517433f0c92b67ad9febbc8733f6658d2aea3b1e3d35e59",
+ "sha256:24eaeacac89fea434126ec3f9c8cf96ebfb27fa7a81b4d4736137c977391b281"
],
"index": "pypi",
- "markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==1.0.5"
+ "markers": "python_version >= '3.8' and python_version < '4.0'",
+ "version": "==1.1.0"
},
"starlette": {
"hashes": [
- "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75",
- "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"
+ "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee",
+ "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"
],
- "markers": "python_version >= '3.7'",
- "version": "==0.27.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.37.2"
},
"tenacity": {
"hashes": [
- "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0",
- "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"
+ "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a",
+ "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"
],
"index": "pypi",
- "markers": "python_version >= '3.6'",
- "version": "==8.2.2"
+ "markers": "python_version >= '3.7'",
+ "version": "==8.2.3"
},
"tqdm": {
"hashes": [
- "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386",
- "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"
+ "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9",
+ "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"
],
"markers": "python_version >= '3.7'",
- "version": "==4.66.1"
+ "version": "==4.66.2"
},
"typing-extensions": {
"hashes": [
- "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783",
- "sha256:56a8f7a8776ea160e59ef0af6fc3a3a03b7d42156b90e47f0241515fcec620c2",
- "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd",
- "sha256:cc22327e22d9b583d1565ce1ed9f5ecc22831afa743f8789a403cad849fb702b"
+ "sha256:6f1117ac0cbe64536f34520c4688cd144794f9b1d79690bfe0389aa12a347976",
+ "sha256:7427ef26efa5e4e465e3765af0e52d3897e3684c908efe20e3331e1ce51884b3",
+ "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0",
+ "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==4.9.0"
+ "version": "==4.11.0"
},
"tzdata": {
"hashes": [
- "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a",
- "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"
+ "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd",
+ "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"
],
"markers": "python_version >= '2'",
- "version": "==2023.3"
+ "version": "==2024.1"
},
"tzlocal": {
"hashes": [
@@ -2833,12 +2930,12 @@
},
"urllib3": {
"hashes": [
- "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07",
- "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"
+ "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84",
+ "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"
],
"index": "pypi",
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
- "version": "==1.26.18"
+ "markers": "python_version >= '3.7'",
+ "version": "==2.0.7"
},
"uvicorn": {
"hashes": [
@@ -2850,12 +2947,11 @@
},
"vcrpy": {
"hashes": [
- "sha256:7cd3e81a2c492e01c281f180bcc2a86b520b173d2b656cb5d89d99475423e013",
- "sha256:efac3e2e0b2af7686f83a266518180af7a048619b2f696e7bad9520f5e2eac09"
+ "sha256:9e023fee7f892baa0bbda2f7da7c8ac51165c1c6e38ff8688683a12a4bde9278"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.2.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==6.0.1"
},
"websocket-client": {
"hashes": [
@@ -2868,12 +2964,12 @@
},
"werkzeug": {
"hashes": [
- "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe",
- "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"
+ "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795",
+ "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==2.2.3"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.0.2"
},
"wrapt": {
"hashes": [
@@ -3034,18 +3130,18 @@
},
"x-wr-timezone": {
"hashes": [
- "sha256:0a5e20c603c0876b03b6ce3b8682c69c18da8ca107aec70caefa58a8f396bf48",
- "sha256:26d98b5f5ae190b68df8b9b9856c4867389956f5b295e0e14f632d7341b60f67"
+ "sha256:0b5e16f677c8f51ce41087a0b3d4f786c5fdcf78af4f8a75d4d960107dcb6d3a",
+ "sha256:ccbaae164a52086535fd2eb1dced164a5250acc83a672fbb044d11e81a268965"
],
- "version": "==0.0.6"
+ "version": "==0.0.7"
},
"xmlschema": {
"hashes": [
- "sha256:276a03e0fd3c94c148d528bff4d9482f9b99bf8c7b4056a2e8e703d28149d454",
- "sha256:f2b29c45485fac414cc1fdb38d18a220c5987d7d3aa996e6df6ff35ee94d5a63"
+ "sha256:4f7497de6c8b6dc2c28ad7b9ed6e21d186f4afe248a5bea4f54eedab4da44083",
+ "sha256:ec2b2a15c8896c1fcd14dcee34ca30032b99456c3c43ce793fdb9dca2fb4b869"
],
"markers": "python_version >= '3.7'",
- "version": "==2.5.0"
+ "version": "==2.5.1"
},
"xmltodict": {
"hashes": [
@@ -3058,93 +3154,109 @@
},
"yarl": {
"hashes": [
- "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87",
- "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89",
- "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a",
- "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08",
- "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996",
- "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077",
- "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901",
- "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e",
- "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee",
- "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574",
- "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165",
- "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634",
- "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229",
- "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b",
- "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f",
- "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7",
- "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf",
- "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89",
- "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0",
- "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1",
- "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe",
- "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf",
- "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76",
- "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951",
- "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863",
- "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06",
- "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562",
- "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6",
- "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c",
- "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e",
- "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1",
- "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3",
- "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3",
- "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778",
- "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8",
- "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2",
- "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b",
- "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d",
- "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f",
- "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c",
- "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581",
- "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918",
- "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c",
- "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e",
- "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220",
- "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37",
- "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739",
- "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77",
- "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6",
- "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42",
- "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946",
- "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5",
- "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d",
- "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146",
- "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a",
- "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83",
- "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef",
- "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80",
- "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588",
- "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5",
- "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2",
- "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef",
- "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826",
- "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05",
- "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516",
- "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0",
- "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4",
- "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2",
- "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0",
- "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd",
- "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8",
- "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b",
- "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1",
- "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==1.8.2"
+ "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51",
+ "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce",
+ "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559",
+ "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0",
+ "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81",
+ "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc",
+ "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4",
+ "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c",
+ "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130",
+ "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136",
+ "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e",
+ "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec",
+ "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7",
+ "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1",
+ "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455",
+ "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099",
+ "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129",
+ "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10",
+ "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142",
+ "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98",
+ "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa",
+ "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7",
+ "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525",
+ "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c",
+ "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9",
+ "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c",
+ "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8",
+ "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b",
+ "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf",
+ "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23",
+ "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd",
+ "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27",
+ "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f",
+ "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece",
+ "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434",
+ "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec",
+ "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff",
+ "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78",
+ "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d",
+ "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863",
+ "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53",
+ "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31",
+ "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15",
+ "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5",
+ "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b",
+ "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57",
+ "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3",
+ "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1",
+ "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f",
+ "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad",
+ "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c",
+ "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7",
+ "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2",
+ "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b",
+ "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2",
+ "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b",
+ "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9",
+ "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be",
+ "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e",
+ "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984",
+ "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4",
+ "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074",
+ "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2",
+ "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392",
+ "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91",
+ "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541",
+ "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf",
+ "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572",
+ "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66",
+ "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575",
+ "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14",
+ "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5",
+ "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1",
+ "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e",
+ "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551",
+ "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17",
+ "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead",
+ "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0",
+ "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe",
+ "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234",
+ "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0",
+ "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7",
+ "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34",
+ "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42",
+ "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385",
+ "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78",
+ "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be",
+ "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958",
+ "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749",
+ "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==1.9.4"
},
"zipp": {
"hashes": [
- "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b",
- "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"
+ "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b",
+ "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==3.15.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.18.1"
}
},
"develop": {
@@ -3157,62 +3269,95 @@
},
"alabaster": {
"hashes": [
- "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3",
- "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"
+ "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65",
+ "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"
],
- "markers": "python_version >= '3.6'",
- "version": "==0.7.13"
+ "markers": "python_version >= '3.9'",
+ "version": "==0.7.16"
},
"annotated-types": {
"hashes": [
- "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802",
- "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"
+ "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43",
+ "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==0.5.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.6.0"
},
"anyio": {
"hashes": [
- "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780",
- "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"
+ "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8",
+ "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"
],
- "markers": "python_version >= '3.7'",
- "version": "==3.7.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.3.0"
},
"astroid": {
"hashes": [
- "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91",
- "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"
+ "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819",
+ "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"
],
"index": "pypi",
"markers": "python_full_version >= '3.8.0'",
- "version": "==3.0.2"
+ "version": "==3.1.0"
},
- "async-timeout": {
+ "attrs": {
"hashes": [
- "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f",
- "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"
+ "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30",
+ "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"
],
- "markers": "python_full_version <= '3.11.2'",
- "version": "==4.0.3"
+ "markers": "python_version >= '3.7'",
+ "version": "==23.2.0"
},
- "attrs": {
+ "autoflake": {
+ "hashes": [
+ "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840",
+ "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.8'",
+ "version": "==2.3.1"
+ },
+ "azure-common": {
+ "hashes": [
+ "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3",
+ "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"
+ ],
+ "version": "==1.1.28"
+ },
+ "azure-core": {
"hashes": [
- "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04",
- "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"
+ "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f",
+ "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"
],
"markers": "python_version >= '3.7'",
- "version": "==23.1.0"
+ "version": "==1.30.1"
},
- "autoflake": {
+ "azure-mgmt-compute": {
"hashes": [
- "sha256:265cde0a43c1f44ecfb4f30d95b0437796759d07be7706a2f70e4719234c0f79",
- "sha256:62b7b6449a692c3c9b0c916919bbc21648da7281e8506bcf8d3f8280e431ebc1"
+ "sha256:4d80d723ec6d4cb9583617ebec0716e7d74b2732acbaed023ed2e3cc7053d00e",
+ "sha256:9f6d29864ebe080796d4020533e79e4c8508512d3c53ec5a7a8930e4bd2f0bd4"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==2.2.1"
+ "version": "==30.6.0"
+ },
+ "azure-mgmt-core": {
+ "hashes": [
+ "sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d",
+ "sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae"
+ ],
+ "markers": "python_version >= '3.7'",
+ "version": "==1.4.0"
+ },
+ "azure-mgmt-resource": {
+ "hashes": [
+ "sha256:c2ba6cfd99df95f55f36eadc4245e3dc713257302a1fd0277756d94bd8cb28e0",
+ "sha256:f185eec72bbc39f42bcb83ae6f1bad744f0e3f20a12d9b2b3e70d16c74ad9cc0"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==23.0.1"
},
"babel": {
"hashes": [
@@ -3232,12 +3377,12 @@
},
"bandit": {
"hashes": [
- "sha256:36da17c67fc87579a5d20c323c8d0b1643a890a2b93f00b3d1229966624694ff",
- "sha256:72ce7bc9741374d96fb2f1c9a8960829885f1243ffde743de70a19cee353e8f3"
+ "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b",
+ "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==1.7.6"
+ "version": "==1.7.8"
},
"bashlex": {
"hashes": [
@@ -3249,66 +3394,60 @@
},
"beautifulsoup4": {
"hashes": [
- "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da",
- "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"
+ "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051",
+ "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"
],
"index": "pypi",
"markers": "python_full_version >= '3.6.0'",
- "version": "==4.12.2"
+ "version": "==4.12.3"
},
"black": {
- "extras": [
- "d"
- ],
- "hashes": [
- "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5",
- "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915",
- "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326",
- "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940",
- "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b",
- "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30",
- "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c",
- "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c",
- "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab",
- "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27",
- "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2",
- "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961",
- "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9",
- "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb",
- "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70",
- "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331",
- "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2",
- "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266",
- "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d",
- "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6",
- "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b",
- "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925",
- "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8",
- "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4",
- "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"
+ "hashes": [
+ "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d",
+ "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd",
+ "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33",
+ "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965",
+ "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070",
+ "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397",
+ "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745",
+ "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1",
+ "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665",
+ "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436",
+ "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb",
+ "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e",
+ "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6",
+ "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702",
+ "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8",
+ "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8",
+ "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3",
+ "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad",
+ "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf",
+ "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e",
+ "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641",
+ "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"
],
- "markers": "python_version >= '3.7'",
- "version": "==23.3.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==24.4.0"
},
"boto3-stubs": {
"extras": [
"logs"
],
"hashes": [
- "sha256:2fa3dfde8b06f8f6362ba74ac2873ee036354892dc2b34a22e65aaeb90da527a",
- "sha256:b66c4b1a64cca8d429ebcf4512e029d2bf3870532a0c6fc567299f98d8a81ed6"
+ "sha256:93321cfd3b2f161ed3b9fdf4176e5aed4a6f5a0d124cb9661cc238843180e6e0",
+ "sha256:d9b21c2771e2f067097400ee34d92e08e71986e4bc20f1f4f3b1be0fc3f7f61b"
],
- "markers": "python_version >= '3.7'",
- "version": "==1.26.105"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.34.15"
},
"botocore-stubs": {
"hashes": [
- "sha256:dbd6c14739cba8fa89930921279fb499e15b292bcf2869198a347d821ccfb2a6",
- "sha256:fc2407fe899c6f5698262ea88e0c3f2f36c1b4fc741ed037065c96ad3c1a3ba0"
+ "sha256:4cd79dd4419e2a229ffa8512154e91c91cad2376675f3ee3b07cbcbb5fd7a256",
+ "sha256:b42c5aa7bd00a11ff299cac20516db2901254a9b1abad531bddc8400213d91cd"
],
"index": "pypi",
- "markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==1.29.105"
+ "markers": "python_version >= '3.8' and python_version < '4.0'",
+ "version": "==1.34.15"
},
"bson": {
"hashes": [
@@ -3319,19 +3458,19 @@
},
"build": {
"hashes": [
- "sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b",
- "sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f"
+ "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d",
+ "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"
],
- "markers": "python_version >= '3.7'",
- "version": "==1.0.3"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.2.1"
},
"certifi": {
"hashes": [
- "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1",
- "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"
+ "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f",
+ "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"
],
"markers": "python_version >= '3.6'",
- "version": "==2023.11.17"
+ "version": "==2024.2.2"
},
"cffi": {
"hashes": [
@@ -3492,7 +3631,7 @@
"sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956",
"sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"
],
- "markers": "python_version >= '3.8'",
+ "markers": "platform_python_implementation != 'PyPy'",
"version": "==1.16.0"
},
"cfgv": {
@@ -3609,12 +3748,12 @@
},
"checkmk-dev-tools": {
"hashes": [
- "sha256:324e685c91bbf8df5534020f76af6c7ce62b97b8e40a2c3844eaef7b63a44465",
- "sha256:5acbb31c0843ce37593acd657f619c384484694d036ef7a5e6f91e7c841b5ae4"
+ "sha256:592da0e829a1af9f51ca69605b3153d42ab5e575f128b20dff5658e869aaa0c8",
+ "sha256:deae3403e08f7242ac91d355afec4f2a5cf5afe3faed9e7cf746961b55bbb8ed"
],
"index": "pypi",
"markers": "python_full_version >= '3.10.4' and python_full_version < '4.0.0'",
- "version": "==0.1.42"
+ "version": "==0.1.60"
},
"click": {
"hashes": [
@@ -3649,117 +3788,134 @@
"toml"
],
"hashes": [
- "sha256:007a7e49831cfe387473e92e9ff07377f6121120669ddc39674e7244350a6a29",
- "sha256:1191270b06ecd68b1d00897b2daddb98e1719f63750969614ceb3438228c088e",
- "sha256:1367aa411afb4431ab58fd7ee102adb2665894d047c490649e86219327183134",
- "sha256:1f0f8f0c497eb9c9f18f21de0750c8d8b4b9c7000b43996a094290b59d0e7523",
- "sha256:222b038f08a7ebed1e4e78ccf3c09a1ca4ac3da16de983e66520973443b546bc",
- "sha256:243576944f7c1a1205e5cd658533a50eba662c74f9be4c050d51c69bd4532936",
- "sha256:2e9223a18f51d00d3ce239c39fc41410489ec7a248a84fab443fbb39c943616c",
- "sha256:307aecb65bb77cbfebf2eb6e12009e9034d050c6c69d8a5f3f737b329f4f15fb",
- "sha256:31c0b1b8b5a4aebf8fcd227237fc4263aa7fa0ddcd4d288d42f50eff18b0bac4",
- "sha256:3b15e03b8ee6a908db48eccf4e4e42397f146ab1e91c6324da44197a45cb9132",
- "sha256:3c854c1d2c7d3e47f7120b560d1a30c1ca221e207439608d27bc4d08fd4aeae8",
- "sha256:475de8213ed95a6b6283056d180b2442eee38d5948d735cd3d3b52b86dd65b92",
- "sha256:50c472c1916540f8b2deef10cdc736cd2b3d1464d3945e4da0333862270dcb15",
- "sha256:593efa42160c15c59ee9b66c5f27a453ed3968718e6e58431cdfb2d50d5ad284",
- "sha256:65d716b736f16e250435473c5ca01285d73c29f20097decdbb12571d5dfb2c94",
- "sha256:733537a182b5d62184f2a72796eb6901299898231a8e4f84c858c68684b25a70",
- "sha256:757453848c18d7ab5d5b5f1827293d580f156f1c2c8cef45bfc21f37d8681069",
- "sha256:79c32f875fd7c0ed8d642b221cf81feba98183d2ff14d1f37a1bbce6b0347d9f",
- "sha256:7f3bad1a9313401ff2964e411ab7d57fb700a2d5478b727e13f156c8f89774a0",
- "sha256:7fbf3f5756e7955174a31fb579307d69ffca91ad163467ed123858ce0f3fd4aa",
- "sha256:811ca7373da32f1ccee2927dc27dc523462fd30674a80102f86c6753d6681bc6",
- "sha256:89400aa1752e09f666cc48708eaa171eef0ebe3d5f74044b614729231763ae69",
- "sha256:8c944cf1775235c0857829c275c777a2c3e33032e544bcef614036f337ac37bb",
- "sha256:9437a4074b43c177c92c96d051957592afd85ba00d3e92002c8ef45ee75df438",
- "sha256:9e17d9cb06c13b4f2ef570355fa45797d10f19ca71395910b249e3f77942a837",
- "sha256:9ede881c7618f9cf93e2df0421ee127afdfd267d1b5d0c59bcea771cf160ea4a",
- "sha256:a1f76cfc122c9e0f62dbe0460ec9cc7696fc9a0293931a33b8870f78cf83a327",
- "sha256:a2ac4245f18057dfec3b0074c4eb366953bca6787f1ec397c004c78176a23d56",
- "sha256:a702e66483b1fe602717020a0e90506e759c84a71dbc1616dd55d29d86a9b91f",
- "sha256:ad2453b852a1316c8a103c9c970db8fbc262f4f6b930aa6c606df9b2766eee06",
- "sha256:af75cf83c2d57717a8493ed2246d34b1f3398cb8a92b10fd7a1858cad8e78f59",
- "sha256:afdcc10c01d0db217fc0a64f58c7edd635b8f27787fea0a3054b856a6dff8717",
- "sha256:c59a3e59fb95e6d72e71dc915e6d7fa568863fad0a80b33bc7b82d6e9f844973",
- "sha256:cad9afc1644b979211989ec3ff7d82110b2ed52995c2f7263e7841c846a75348",
- "sha256:d299d379b676812e142fb57662a8d0d810b859421412b4d7af996154c00c31bb",
- "sha256:d31650d313bd90d027f4be7663dfa2241079edd780b56ac416b56eebe0a21aab",
- "sha256:d874434e0cb7b90f7af2b6e3309b0733cde8ec1476eb47db148ed7deeb2a9494",
- "sha256:db0338c4b0951d93d547e0ff8d8ea340fecf5885f5b00b23be5aa99549e14cfd",
- "sha256:df04c64e58df96b4427db8d0559e95e2df3138c9916c96f9f6a4dd220db2fdb7",
- "sha256:e995efb191f04b01ced307dbd7407ebf6e6dc209b528d75583277b10fd1800ee",
- "sha256:eda7f6e92358ac9e1717ce1f0377ed2b9320cea070906ece4e5c11d172a45a39",
- "sha256:ee453085279df1bac0996bc97004771a4a052b1f1e23f6101213e3796ff3cb85",
- "sha256:ee6621dccce8af666b8c4651f9f43467bfbf409607c604b840b78f4ff3619aeb",
- "sha256:eee5e741b43ea1b49d98ab6e40f7e299e97715af2488d1c77a90de4a663a86e2",
- "sha256:f3bfd2c2f0e5384276e12b14882bf2c7621f97c35320c3e7132c156ce18436a1",
- "sha256:f501e36ac428c1b334c41e196ff6bd550c0353c7314716e80055b1f0a32ba394",
- "sha256:f9191be7af41f0b54324ded600e8ddbcabea23e1e8ba419d9a53b241dece821d",
- "sha256:fbd8a5fe6c893de21a3c6835071ec116d79334fbdf641743332e442a3466f7ea",
- "sha256:fc200cec654311ca2c3f5ab3ce2220521b3d4732f68e1b1e79bef8fcfc1f2b97",
- "sha256:ff4800783d85bff132f2cc7d007426ec698cdce08c3062c8d501ad3f4ea3d16c",
- "sha256:ffb0eacbadb705c0a6969b0adf468f126b064f3362411df95f6d4f31c40d31c1",
- "sha256:fff0b2f249ac642fd735f009b8363c2b46cf406d3caec00e4deeb79b5ff39b40"
- ],
- "markers": "python_version >= '3.8'",
- "version": "==7.3.3"
+ "sha256:075299460948cd12722a970c7eae43d25d37989da682997687b34ae6b87c0ef0",
+ "sha256:07dfdd492d645eea1bd70fb1d6febdcf47db178b0d99161d8e4eed18e7f62fe7",
+ "sha256:0cbdf2cae14a06827bec50bd58e49249452d211d9caddd8bd80e35b53cb04631",
+ "sha256:2055c4fb9a6ff624253d432aa471a37202cd8f458c033d6d989be4499aed037b",
+ "sha256:262fffc1f6c1a26125d5d573e1ec379285a3723363f3bd9c83923c9593a2ac25",
+ "sha256:280132aada3bc2f0fac939a5771db4fbb84f245cb35b94fae4994d4c1f80dae7",
+ "sha256:2b57780b51084d5223eee7b59f0d4911c31c16ee5aa12737c7a02455829ff067",
+ "sha256:2bd7065249703cbeb6d4ce679c734bef0ee69baa7bff9724361ada04a15b7e3b",
+ "sha256:3235d7c781232e525b0761730e052388a01548bd7f67d0067a253887c6e8df46",
+ "sha256:33c020d3322662e74bc507fb11488773a96894aa82a622c35a5a28673c0c26f5",
+ "sha256:357754dcdfd811462a725e7501a9b4556388e8ecf66e79df6f4b988fa3d0b39a",
+ "sha256:39793731182c4be939b4be0cdecde074b833f6171313cf53481f869937129ed3",
+ "sha256:3c2b77f295edb9fcdb6a250f83e6481c679335ca7e6e4a955e4290350f2d22a4",
+ "sha256:41327143c5b1d715f5f98a397608f90ab9ebba606ae4e6f3389c2145410c52b1",
+ "sha256:427e1e627b0963ac02d7c8730ca6d935df10280d230508c0ba059505e9233475",
+ "sha256:432949a32c3e3f820af808db1833d6d1631664d53dd3ce487aa25d574e18ad1c",
+ "sha256:4ba01d9ba112b55bfa4b24808ec431197bb34f09f66f7cb4fd0258ff9d3711b1",
+ "sha256:4d0e206259b73af35c4ec1319fd04003776e11e859936658cb6ceffdeba0f5be",
+ "sha256:51431d0abbed3a868e967f8257c5faf283d41ec882f58413cf295a389bb22e58",
+ "sha256:565b2e82d0968c977e0b0f7cbf25fd06d78d4856289abc79694c8edcce6eb2de",
+ "sha256:6782cd6216fab5a83216cc39f13ebe30adfac2fa72688c5a4d8d180cd52e8f6a",
+ "sha256:6afd2e84e7da40fe23ca588379f815fb6dbbb1b757c883935ed11647205111cb",
+ "sha256:710c62b6e35a9a766b99b15cdc56d5aeda0914edae8bb467e9c355f75d14ee95",
+ "sha256:84921b10aeb2dd453247fd10de22907984eaf80901b578a5cf0bb1e279a587cb",
+ "sha256:85a5dbe1ba1bf38d6c63b6d2c42132d45cbee6d9f0c51b52c59aa4afba057517",
+ "sha256:9c6384cc90e37cfb60435bbbe0488444e54b98700f727f16f64d8bfda0b84656",
+ "sha256:9dd88fce54abbdbf4c42fb1fea0e498973d07816f24c0e27a1ecaf91883ce69e",
+ "sha256:a81eb64feded34f40c8986869a2f764f0fe2db58c0530d3a4afbcde50f314880",
+ "sha256:a898c11dca8f8c97b467138004a30133974aacd572818c383596f8d5b2eb04a9",
+ "sha256:a9960dd1891b2ddf13a7fe45339cd59ecee3abb6b8326d8b932d0c5da208104f",
+ "sha256:a9a7ef30a1b02547c1b23fa9a5564f03c9982fc71eb2ecb7f98c96d7a0db5cf2",
+ "sha256:ad97ec0da94b378e593ef532b980c15e377df9b9608c7c6da3506953182398af",
+ "sha256:adf032b6c105881f9d77fa17d9eebe0ad1f9bfb2ad25777811f97c5362aa07f2",
+ "sha256:bbfe6389c5522b99768a93d89aca52ef92310a96b99782973b9d11e80511f932",
+ "sha256:bd4bacd62aa2f1a1627352fe68885d6ee694bdaebb16038b6e680f2924a9b2cc",
+ "sha256:bf0b4b8d9caa8d64df838e0f8dcf68fb570c5733b726d1494b87f3da85db3a2d",
+ "sha256:c379cdd3efc0658e652a14112d51a7668f6bfca7445c5a10dee7eabecabba19d",
+ "sha256:c58536f6892559e030e6924896a44098bc1290663ea12532c78cef71d0df8493",
+ "sha256:cbe6581fcff7c8e262eb574244f81f5faaea539e712a058e6707a9d272fe5b64",
+ "sha256:ced268e82af993d7801a9db2dbc1d2322e786c5dc76295d8e89473d46c6b84d4",
+ "sha256:cf3539007202ebfe03923128fedfdd245db5860a36810136ad95a564a2fdffff",
+ "sha256:cf62d17310f34084c59c01e027259076479128d11e4661bb6c9acb38c5e19bb8",
+ "sha256:d0194d654e360b3e6cc9b774e83235bae6b9b2cac3be09040880bb0e8a88f4a1",
+ "sha256:d3d117890b6eee85887b1eed41eefe2e598ad6e40523d9f94c4c4b213258e4a4",
+ "sha256:db2de4e546f0ec4b2787d625e0b16b78e99c3e21bc1722b4977c0dddf11ca84e",
+ "sha256:e768d870801f68c74c2b669fc909839660180c366501d4cc4b87efd6b0eee375",
+ "sha256:e7c211f25777746d468d76f11719e64acb40eed410d81c26cefac641975beb88",
+ "sha256:eed462b4541c540d63ab57b3fc69e7d8c84d5957668854ee4e408b50e92ce26a",
+ "sha256:f0bfe42523893c188e9616d853c47685e1c575fe25f737adf473d0405dcfa7eb",
+ "sha256:f609ebcb0242d84b7adeee2b06c11a2ddaec5464d21888b2c8255f5fd6a98ae4",
+ "sha256:fea9d3ca80bcf17edb2c08a4704259dadac196fe5e9274067e7a20511fad1743",
+ "sha256:fed7a72d54bd52f4aeb6c6e951f363903bd7d70bc1cad64dd1f087980d309ab9"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==7.5.0"
},
"cryptography": {
"hashes": [
- "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960",
- "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a",
- "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc",
- "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a",
- "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf",
- "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1",
- "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39",
- "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406",
- "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a",
- "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a",
- "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c",
- "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be",
- "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15",
- "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2",
- "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d",
- "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157",
- "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003",
- "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248",
- "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a",
- "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec",
- "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309",
- "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7",
- "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"
+ "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee",
+ "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576",
+ "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d",
+ "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30",
+ "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413",
+ "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb",
+ "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da",
+ "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4",
+ "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd",
+ "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc",
+ "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8",
+ "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1",
+ "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc",
+ "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e",
+ "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8",
+ "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940",
+ "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400",
+ "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7",
+ "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16",
+ "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278",
+ "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74",
+ "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec",
+ "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1",
+ "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2",
+ "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c",
+ "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922",
+ "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a",
+ "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6",
+ "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1",
+ "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e",
+ "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac",
+ "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==42.0.5"
+ },
+ "cssselect": {
+ "hashes": [
+ "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc",
+ "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"
],
- "index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==41.0.7"
+ "version": "==1.2.0"
},
"devpi-client": {
"hashes": [
- "sha256:a0e5f967c59780d258b205ea1c4d82b177439c0dd798317a6b10f50f7d7c6cf4",
- "sha256:fb63fd088dce1ddd7cbe9879f6cc95bf464eb3d48cbdf51fc9c7bdef5e4d1ba0"
+ "sha256:3090788aa5bffe3d35f5f8e5789eab77c28b211b4982b1c2b4b744fdb3f64e0b",
+ "sha256:e5a17a10816785fcb00de01f59237e79952968ee9d88f08fe501d3d7563f2102"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==7.0.2"
+ "version": "==7.0.3"
},
"devpi-common": {
"hashes": [
- "sha256:d96be2e842fa76d7c67c185ec037e80e250b323271edaa716ccb31eecd0d73eb",
- "sha256:f8e01b4f6df080f62284cce58c5bb1ce1e869b0c234aac7ad135609745fc173d"
+ "sha256:235a0a9a45c96e54c60ba6ba2f77d856cf90f1a69c1bee949887e9edc03a41cc",
+ "sha256:8a83b788ac821ade1e348d571db083575c5c5d92ba0932a3a8f75c6f8dde4d88"
],
"markers": "python_version >= '3.7'",
- "version": "==4.0.3"
+ "version": "==4.0.4"
},
"dill": {
"hashes": [
- "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e",
- "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"
+ "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca",
+ "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==0.3.7"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.3.8"
},
"distlib": {
"hashes": [
@@ -3770,11 +3926,11 @@
},
"dnspython": {
"hashes": [
- "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8",
- "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"
+ "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50",
+ "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"
],
- "markers": "python_version >= '3.8' and python_version < '4.0'",
- "version": "==2.4.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==2.6.1"
},
"docker": {
"hashes": [
@@ -3798,112 +3954,71 @@
},
"docstring-to-markdown": {
"hashes": [
- "sha256:3025c428638ececae920d6d26054546a20335af3504a145327e657e7ad7ce1ce",
- "sha256:aa487059d0883e70e54da25c7b230e918d9e4d40f23d6dfaa2b73e4225b2d7dd"
+ "sha256:27afb3faedba81e34c33521c32bbd258d7fbb79eedf7d29bc4e81080e854aec0",
+ "sha256:e146114d9c50c181b1d25505054a8d0f7a476837f0da2c19f07e06eaed52b73d"
],
"markers": "python_version >= '3.6'",
- "version": "==0.13"
+ "version": "==0.15"
},
"docutils": {
"hashes": [
"sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6",
"sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"
],
- "index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==0.20.1"
},
"execnet": {
"hashes": [
- "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41",
- "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"
+ "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc",
+ "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"
],
- "markers": "python_version >= '3.7'",
- "version": "==2.0.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==2.1.1"
},
"faker": {
"hashes": [
- "sha256:2d8a350e952225a145307d7461881c44a1c9320e90fbe8bd903d5947f133f3ec",
- "sha256:ff61cca42547795bee8a11319792a8fee6d0f0cd191e831f7f3050c5851fcd8a"
+ "sha256:34b947581c2bced340c39b35f89dbfac4f356932cfff8fe893bde854903f0e6e",
+ "sha256:adb98e771073a06bdc5d2d6710d8af07ac5da64c8dc2ae3b17bb32319e66fd82"
],
"markers": "python_version >= '3.8'",
- "version": "==21.0.0"
+ "version": "==24.11.0"
},
"fakeredis": {
"extras": [
"lua"
],
"hashes": [
- "sha256:a2a5ccfcd72dc90435c18cde284f8cdd0cb032eb67d59f3fed907cde1cbffbbd",
- "sha256:d1cb22ed76b574cbf807c2987ea82fc0bd3e7d68a7a1e3331dd202cc39d6b4e5"
+ "sha256:13ac8bd57c852d8b3c0684fa6755fac4abb4feab6483a52212b932d11c795bf3",
+ "sha256:d063085fe962d16637cfe21044f277cfc54d6fb456d12a7c87514990c3fac98e"
],
"markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==2.20.1"
+ "version": "==2.22.0"
},
"fastapi": {
"hashes": [
- "sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e",
- "sha256:75a11f6bfb8fc4d2bec0bd710c2d5f2829659c0e8c0afd5560fdda6ce25ec653"
+ "sha256:239403f2c0a3dda07a9420f95157a7f014ddb2b770acdbc984f9bdf3ead7afdb",
+ "sha256:b53d673652da3b65e8cd787ad214ec0fe303cad00d2b529b86ce7db13f17518d"
],
- "markers": "python_version >= '3.7'",
- "version": "==0.103.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.110.2"
},
"filelock": {
"hashes": [
- "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e",
- "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"
+ "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f",
+ "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"
],
"markers": "python_version >= '3.8'",
- "version": "==3.13.1"
+ "version": "==3.13.4"
},
"flake8": {
"hashes": [
- "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23",
- "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"
+ "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132",
+ "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"
],
"index": "pypi",
"markers": "python_full_version >= '3.8.1'",
- "version": "==6.1.0"
- },
- "freezegun": {
- "hashes": [
- "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446",
- "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.6'",
- "version": "==1.2.2"
- },
- "future": {
- "hashes": [
- "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"
- ],
- "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==0.18.3"
- },
- "gitdb": {
- "hashes": [
- "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4",
- "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"
- ],
- "markers": "python_version >= '3.7'",
- "version": "==4.0.11"
- },
- "gitpython": {
- "hashes": [
- "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4",
- "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"
- ],
- "markers": "python_version >= '3.7'",
- "version": "==3.1.40"
- },
- "gprof2dot": {
- "hashes": [
- "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5",
- "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"
- ],
- "markers": "python_version >= '2.7'",
- "version": "==2022.7.29"
+ "version": "==7.0.0"
},
"graphql-core": {
"hashes": [
@@ -3915,74 +4030,75 @@
},
"graphviz": {
"hashes": [
- "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977",
- "sha256:8c58f14adaa3b947daf26c19bc1e98c4e0702cdc31cf99153e6f06904d492bf8"
+ "sha256:09d6bc81e6a9fa392e7ba52135a9d49f1ed62526f96499325930e87ca1b5925d",
+ "sha256:81f848f2904515d8cd359cc611faba817598d2feaac4027b266aa3eda7b3dde5"
],
- "markers": "python_version >= '3.7'",
- "version": "==0.20.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.20.3"
},
"greenlet": {
"hashes": [
- "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174",
- "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd",
- "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa",
- "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a",
- "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec",
- "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565",
- "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d",
- "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c",
- "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234",
- "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d",
- "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546",
- "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2",
- "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74",
- "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de",
- "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd",
- "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9",
- "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3",
- "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846",
- "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2",
- "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353",
- "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8",
- "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166",
- "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206",
- "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b",
- "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d",
- "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe",
- "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997",
- "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445",
- "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0",
- "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96",
- "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884",
- "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6",
- "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1",
- "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619",
- "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94",
- "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4",
- "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1",
- "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63",
- "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd",
- "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a",
- "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376",
- "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57",
- "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16",
- "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e",
- "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc",
- "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a",
- "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c",
- "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5",
- "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a",
- "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72",
- "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9",
- "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9",
- "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e",
- "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8",
- "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65",
- "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064",
- "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"
+ "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67",
+ "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6",
+ "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257",
+ "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4",
+ "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676",
+ "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61",
+ "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc",
+ "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca",
+ "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7",
+ "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728",
+ "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305",
+ "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6",
+ "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379",
+ "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414",
+ "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04",
+ "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a",
+ "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf",
+ "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491",
+ "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559",
+ "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e",
+ "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274",
+ "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb",
+ "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b",
+ "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9",
+ "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b",
+ "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be",
+ "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506",
+ "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405",
+ "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113",
+ "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f",
+ "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5",
+ "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230",
+ "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d",
+ "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f",
+ "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a",
+ "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e",
+ "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61",
+ "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6",
+ "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d",
+ "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71",
+ "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22",
+ "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2",
+ "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3",
+ "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067",
+ "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc",
+ "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881",
+ "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3",
+ "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e",
+ "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac",
+ "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53",
+ "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0",
+ "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b",
+ "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83",
+ "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41",
+ "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c",
+ "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf",
+ "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da",
+ "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"
],
"markers": "python_version >= '3.7'",
- "version": "==3.0.1"
+ "version": "==3.0.3"
},
"h11": {
"hashes": [
@@ -3994,28 +4110,28 @@
},
"httpcore": {
"hashes": [
- "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7",
- "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"
+ "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61",
+ "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"
],
"markers": "python_version >= '3.8'",
- "version": "==1.0.2"
+ "version": "==1.0.5"
},
"httpx": {
"hashes": [
- "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8",
- "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"
+ "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5",
+ "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"
],
"markers": "python_version >= '3.8'",
- "version": "==0.25.2"
+ "version": "==0.27.0"
},
"hypothesis": {
"hashes": [
- "sha256:3cba76a7389bd7245c350fcf7234663314dc81a5be0bbef72a07d8c249bfc210",
- "sha256:fa755ded526e50b7e2f642cdc5d64519f88d4e4ee71d9d29ec3eb2f2fddf1274"
+ "sha256:3dacf6ec90e8d14aaee02cde081ac9a17d5b70105e45e6ac822db72052c0195b",
+ "sha256:ebff09d7fa4f1fb6a855a812baf17e578b4481b7b70ec6d96496210d1a4c6c35"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==6.92.1"
+ "version": "==6.100.1"
},
"hypothesis-graphql": {
"hashes": [
@@ -4027,20 +4143,20 @@
},
"hypothesis-jsonschema": {
"hashes": [
- "sha256:082968cb86a6aac2369627b08753cbf714c08054b1ebfce3588e3756e652cde6",
- "sha256:5dd7449009f323e408a9aa64afb4d18bd1f60ea2eabf5bf152a510da728b34f2"
+ "sha256:a4d74d9516dd2784fbbae82e009f62486c9104ac6f4e3397091d98a1d5ee94a2",
+ "sha256:f4ac032024342a4149a10253984f5a5736b82b3fe2afb0888f3834a31153f215"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==0.22.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.23.1"
},
"identify": {
"hashes": [
- "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d",
- "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"
+ "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa",
+ "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"
],
"markers": "python_version >= '3.8'",
- "version": "==2.5.33"
+ "version": "==2.5.36"
},
"idna": {
"hashes": [
@@ -4061,11 +4177,11 @@
},
"importlib-metadata": {
"hashes": [
- "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7",
- "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"
+ "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570",
+ "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"
],
"markers": "python_version >= '3.8'",
- "version": "==7.0.0"
+ "version": "==7.1.0"
},
"iniconfig": {
"hashes": [
@@ -4075,6 +4191,13 @@
"markers": "python_version >= '3.7'",
"version": "==2.0.0"
},
+ "isodate": {
+ "hashes": [
+ "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96",
+ "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"
+ ],
+ "version": "==0.6.1"
+ },
"isort": {
"hashes": [
"sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109",
@@ -4086,11 +4209,27 @@
},
"jaraco.classes": {
"hashes": [
- "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb",
- "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"
+ "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd",
+ "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==3.4.0"
+ },
+ "jaraco.context": {
+ "hashes": [
+ "sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266",
+ "sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2"
],
"markers": "python_version >= '3.8'",
- "version": "==3.3.0"
+ "version": "==5.3.0"
+ },
+ "jaraco.functools": {
+ "hashes": [
+ "sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664",
+ "sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==4.0.1"
},
"jedi": {
"hashes": [
@@ -4110,28 +4249,28 @@
},
"jinja2": {
"hashes": [
- "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
- "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
+ "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa",
+ "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==3.1.2"
+ "version": "==3.1.3"
},
"jsonschema": {
"hashes": [
- "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d",
- "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"
+ "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f",
+ "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"
],
- "markers": "python_version >= '3.7'",
- "version": "==4.17.3"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.21.1"
},
"jsonschema-specifications": {
"hashes": [
- "sha256:9472fc4fea474cd74bea4a2b190daeccb5a9e4db2ea80efcf7a1b582fc9a81b8",
- "sha256:e74ba7c0a65e8cb49dc26837d6cfe576557084a8b423ed16a420984228104f93"
+ "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc",
+ "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"
],
"markers": "python_version >= '3.8'",
- "version": "==2023.11.2"
+ "version": "==2023.12.1"
},
"junit-xml": {
"hashes": [
@@ -4142,11 +4281,11 @@
},
"keyring": {
"hashes": [
- "sha256:4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836",
- "sha256:e730ecffd309658a08ee82535a3b5ec4b4c8669a9be11efb66249d8e0aeb9a25"
+ "sha256:26fc12e6a329d61d24aa47b22a7c5c3f35753df7d8f2860973cf94f4e1fb3427",
+ "sha256:7230ea690525133f6ad536a9b5def74a4bd52642abe594761028fc044d7c7893"
],
"markers": "python_version >= '3.8'",
- "version": "==24.3.0"
+ "version": "==25.1.0"
},
"lazy": {
"hashes": [
@@ -4158,89 +4297,94 @@
},
"lupa": {
"hashes": [
- "sha256:0068d75f0df5f2fb85230b1df7a05305645ee28ef89551997eb09009c70d7f8a",
- "sha256:019e10a56c50ba60e94ff8c3e60a9a239d6438f1dc6ac17bcf2d44d4ada8f171",
- "sha256:02a0e45ada08e5694ab3f3c06523ec16322dfb875668ce9ff3e04a01d3e18e81",
- "sha256:02ed2848a33dfe43013c5a86d2c155a9669d3c438a847a4e3816b7f1bf17cec6",
- "sha256:033a14fe291ef532db11c3f3b65b364b5b3b3d3b6146aa7f7412f8f4d89471ce",
- "sha256:0432ec532513eaf5ae8961000baf56d550fed4a7b91c0a9759b6f17c1dafc8af",
- "sha256:06792b86f9410bd26936728e7f903e2eee76642cbf51e435622637a3d752a2ea",
- "sha256:0e66da3bc40cde8edeb4d7d8141afad67ec6a5da0ee07ce5265df7e899e0883c",
- "sha256:17fd814523b9fa268df8f0995874218a9be008dbcd1c1c7bd28207814a209491",
- "sha256:1be2e1015d8481511852ae0f9f05f3722715d7aadb48207480eb50edc45a7510",
- "sha256:200544d259a054c5d0c6696499d0c66ccd924d42efb41b09b19c2af9771f5c31",
- "sha256:201fc894d257132e90e42ce9396c5b45aa5f5bdc4cd4dfc8076c8476f04dd44b",
- "sha256:282126096ba71c1926f28da59cd1cf6913b7e9e7020d577b42dc52ca3c359e93",
- "sha256:29c46d79273a72c010a2949d41336bbb5ebafd09e2c2a4342d2f2f4238d378c8",
- "sha256:2a3dbf85baf66f0a8b862293c3cd61430d2d379652e3db3e5f979b16db7e374b",
- "sha256:2c11eafd262ff47ccb0bf9c28126dde21d3d01205cf6f5b5c2c4dbf04b99f5e9",
- "sha256:2d02d4af2682169b8aa744e7eae59d1e05f9b0071a59fb140852dae9b5c8d86c",
- "sha256:32d1e7cdced4e29771dacfed68abc92da9ba2300a2929ec5782467316ea4a715",
- "sha256:345032ef77bd474d288ea2c4ddd14b552b93d60a40a9b0daf0a82bc078625982",
- "sha256:3b3e02b920b61601e2d9713b1e197d8cbab0bd3709774ec6823357cd83ee7b9d",
- "sha256:3c953b9430751e792b721dd2265af1759251cdac0ade5642f25e16a6174bcc58",
- "sha256:3d34870912bf7501d2a9e7dc75319e55f836fd8412b783afa44c5bfb72be0867",
- "sha256:404bda126a34eef839e29fc94fd65c1092b53301b2d0abc9388f02cc5ba87ac9",
- "sha256:43353ae1e204b1f7fb18150f7dc5357592be37431e84f799c6cf21a4b7a52dcc",
- "sha256:4649a5501f0d8e5c96c297896377e9f73d0167df139109536187c57c60be1e90",
- "sha256:46b77e4a545d5ba00d17432853b26b50299129047d4f999c007fb9b6db3cfdd6",
- "sha256:47d3eb18511e83068a8ce476a9f7ad8642a35189e682f5a1053970ec9d98272a",
- "sha256:4c776290a06b03e8dd5ca061d9fefde13be37fb25700c56bb513343262ea1729",
- "sha256:4e00664780836b353113804f8e0f860322abf5ef723d615ba6f49d9e78874944",
- "sha256:50c529e5ecf3ec5b3e57efbb9a5def5125ceb7b95f12e2c89c34535856abb1ac",
- "sha256:5396ebb51753a8243a18080e2efa9f085bac5d43185d5a1dd9a3679ff7fb09c5",
- "sha256:5c249d83655942ebe7db99c4e981de547867a7d30ace34e61f3ccc5b7a14402c",
- "sha256:5e980571081c93152bb04de07bbde6852462e1674349eb3eafe703f5fa81a836",
- "sha256:65d5971eb8c060eb3c9218c25181001e25982dfdf88e0b284447f837a4318a5f",
- "sha256:682860cd6ed84e0ffdaf84c82c21b192858261964b3ed126bc54d52cc8a480b4",
- "sha256:690c0654b92c6de0893c004d0a46d5d5b5fd76e9017dda328a2435afdf3c55a0",
- "sha256:6e9ece8e7e4399473e1f9a4733445d93148c3205e1b87c158894287f3213bf6b",
- "sha256:71e517327bff75cc5e60fe105da7da6621a75ba05a5050869e33b4bdbe838288",
- "sha256:7563c4a015f51eb36d92874c0448bb8df504041d894e61e6c9cb9e6613132470",
- "sha256:769d7747056380ca4fb7923b7031b5732c1b9b9d0d160324cc88a32d7c98127c",
- "sha256:7762c6780fe7ab64d64f8658ab54d79cb5d3d0fbdcc76290f5fc19b41fc01ad5",
- "sha256:793bddad1a36eb7c8c04775867942cf2adfe09d482311791022c4ab4802169b4",
- "sha256:7c10d4f0fa592b798a71c0b2e273e4b899a14b3634a48cbc444917b254ddce37",
- "sha256:7caa1ce59fe1cefd845093d1354244c59d286fcc1196a15297fb189a5bb749c6",
- "sha256:8214a8b0fb1277e026301f60101af323c93868eefcad69984e7285bea5c1ac3f",
- "sha256:88495333e79937cdf7edac35ec36aca41d50134dbb23f2f1684a1685a4295433",
- "sha256:8f3e6ea86053ec0c9945ae313fba8ba06dc4ccc397369709bba956dd48db95a7",
- "sha256:90788d250f727720747784e67fbc50917f5ce051e24bc49661850f98b1b9ed42",
- "sha256:974de113c63e35668fbbbff656fef718e586abed3fc875eae4fece279a1e8a11",
- "sha256:9a5843fbfb22b70ea13ec624d43c818b396ff1f62d9bd84f9ed10e3fef06ccf0",
- "sha256:9add3d9ba86fa2fb5604e429ca811b9fa6b4c55fe5330bd9f0fcf51f2c5bebf8",
- "sha256:9b7c9799a45e6fff8c38395d370b318b8ce6841710c2082f180ea7d189f7d229",
- "sha256:9c7ec361e05d932c5355825982613077ac8cb5b63d95022d571290d8ca667188",
- "sha256:9fa9d5013a06aa09392f1d02d9724a9856f4f4111794ca9be17a016c83c6546a",
- "sha256:a6847c2541f9cbdd596df821a575222f471175cd710fb967ffc51801dae58d68",
- "sha256:a91eacc06ac89a2134c6b0f35ac65c45e18c984baf24b03d0f5187071074a597",
- "sha256:a97e647ac11ca5131a73628ee063233378c03100f0f408c77f9b45cb358619ab",
- "sha256:ab2ca1c51724b779a2531d2bef1480faae203c8917b9cc3d0a3d3acb37c1d7ad",
- "sha256:ad3fef486be7adddd349fe9a9c393789061312cf98ebc533b489be34f484cb79",
- "sha256:b0d5481e3af166d73da373ffda0eab1bd709b0177daa2616ce95816483942c21",
- "sha256:b3f6837c1e2fd7c66100828953063dfe8a1d283bc48e1144d621b35bf19ce79f",
- "sha256:becb01602dc6d5439101e1ac5877b25e35817b1bd131b9af709a5a181e6b8026",
- "sha256:c0be42065ad39219eaf890c224cc7cc140ed72691b97b0905dd7a89abebdf474",
- "sha256:c19482a595deed90e5b8542df1ed861e2a4a9d99bd8a9ff108e3a7c66bc7c6c0",
- "sha256:d225e06748aca078a02529054c6678ba3e5b7cc2080b5be30e33ede9eac5efb2",
- "sha256:d412925a73b6b848fd1076fbc392d445ff4a1ab5b5bb278e358f78768677c963",
- "sha256:d85c20691dbd2db5b7c60f40e4a5ced6a35be60264a81dc08804483917b41ea9",
- "sha256:dd9af8e86b3c811ce74f11a12f275c873bd38f40de6ce76b7ddc3664e113a98e",
- "sha256:dea916b28ee38c904ece3a26986b6943a073666c038ae6b6d6d131668da20f59",
- "sha256:e051969dc712d7050d0f3d6c6c8ed063941a004381e84f072815350476118f81",
- "sha256:e361efe6c8a667fa221d42b7fa2beb7fada86e901a0f0e1e17c7c7927d66b2ff",
- "sha256:eece0bc316c2b050e8c3596320e124c8ccea2a7872e593193d30eecab7f0acf6",
- "sha256:f04c7a8d4e5b50a570681b990ff3be09bce5efbd91a521442c0ebfc36e0ce422",
- "sha256:f3f962a499f95b3a5e90de36ac396cdb59c0c46b8003fbfcc1e2d78d7edc14f8",
- "sha256:f50a395dc3c950974ac73b2476136785c6995f611a81e14d2a7c6aa59b342abf",
- "sha256:f576699ca59f3f76127d70210a0ba20e7def93ab1a7e3587d55dd4b770775788",
- "sha256:f7c1cfa9dac4f1363d9620384f9881a1ec968ff825be1e9b2ecdb4cb5375fbf2",
- "sha256:f8368f0d5131f47da60f7cea4a5932418ca0bcd12c22fcf700f36af93fdf2a6a",
- "sha256:fb4426cddefb48683068e94ed4748710507bbd3f0a4d71574535443c75a16e36",
- "sha256:fb5efacbb5dd568d44f4f31a4764a52eefb78288f0445da016652fe7143cdde3",
- "sha256:fcedc43012527edb4ca2b97a6c8176dd2384a006e47549d4e73143f7982deaff"
- ],
- "version": "==2.0"
+ "sha256:05616bc5c467d7ec0b26de99d1586bdd4e034cd3b9068be9306e128d0d005d34",
+ "sha256:05fa474ae5617a77bdb9e09c42d45f3b4b869cd3c412914eaf7913a0a38cf03d",
+ "sha256:0912e46a398831d4299f6fb4bb75ba5a8de9cd73a3461cbc4a37123a0c660d51",
+ "sha256:09ade981e97c8267029c89fb374f92f327b55198eded6b386065963d93157a62",
+ "sha256:0cc42e41f82ed6812a930a2c3599d1964583a482adbed4599f9a94a6e2aff7c8",
+ "sha256:12f4591da2c7ff5b84a69a5363c0f5ce646fcff8519b49200d17e5fdb987a6cd",
+ "sha256:151077023b2be939c09a6393142be6d70b92cac2fea38e21cfb976ea28c022dc",
+ "sha256:1e2ad3329e89fbc20a8c32eb64bb6416207c12e60b30ce002e0e4a425c7eb0ea",
+ "sha256:23852fb56d14853cc0a62c0f93decdb4d2b476ce7e512c4488fe8a186e6d060e",
+ "sha256:23c28564dd5812ba31e07e0bb0e7334ca33b46ded233935982074db7088832fc",
+ "sha256:27a23b70bd995688925e8c64fbc2119cc2577e266aa40b8c8ff5c3eee51b0a62",
+ "sha256:29f50f1d2a53071c6eb3b89289753ba6306417cb4bf55c00897251e2e813fe7e",
+ "sha256:2d82bea5aa6eb98208f3a07f7feea253998b7fa7e76ef2e4ab5510e0156a0ce3",
+ "sha256:35350b8f70f0e9422c7c96be478cdb0afb09aac1724e2eccc4f3bf60881073b9",
+ "sha256:354ab722b30711de8e30a11f9383bb68fd4acf68b87915f26960477906690455",
+ "sha256:35630eef63d8f363d768beec5c14e7ccaf4cfc2a979e0662fce998b26678dc2e",
+ "sha256:35f44781de55a4ebf8270e1ae1d50975c43f6e04ef91efb5f60b4fdbc3141c98",
+ "sha256:3c8956ea9a3cf930cdda50e985232dea813662ff7afd4e9595cacd8509d55aff",
+ "sha256:43a15a366dea073072cccf800fdbd9c63fb83b77c783674e1e0900013fddd833",
+ "sha256:45f4194d1d72d01cc1034ab2ed3e1d34c5e9b58652dab5222f54e6051456ecd1",
+ "sha256:48bc5e40218f4e20e6734d9f945c634d5c8e2514b98ed1cf5650961f65c71501",
+ "sha256:4b136250e3abf6cd366db3516c0df8fc3bdf485dbb681e09cda6f58ea63a6db0",
+ "sha256:4bebb8792220b91d7d97a8f0fe1b07002e3947471f80c7b872f8a994ee4c0926",
+ "sha256:4ea6a0137d02dcc87db56099d79ec859d0b3dece7557cae02c1bb4e332be440b",
+ "sha256:553b94b068a3fe22dc7c5724d1a312d3bc6daed40ad36138b0ad4b3667e34c09",
+ "sha256:5579bcf9e99ff85c7bba3eb98642059a9580e2d4aa038a19fef814512c4392c2",
+ "sha256:5988d7a7d0c469eebbe30a59442980dd950369ea824bffef499eeb7920e63db5",
+ "sha256:59dcc5a65af2e8b35594466b1ca4005e03c4ee5dd90d88113334c4cef45ee035",
+ "sha256:5a69abf48ba14df28901d00156023799dd6d9d25489018f8dca0f784d5b48003",
+ "sha256:5a8ff2bb744d17c7ba4fd1158feada8a49c77b28105c077858b1d8ac90e0e8ff",
+ "sha256:5cddbf849e6292da3cd9e0e2352392817db041cf368517ac0618c273188e4aaf",
+ "sha256:5d91f1ad69e4012c4afc0aa7287339d036b6b7c554ebfc583b06ec47751963a3",
+ "sha256:5dfd149622d688d2aefd50f74dea6ced1663e5ddedda0fb040bfc0fa0ddb15c7",
+ "sha256:604609f8c636c16795426233691e35ab1877fd2b7833331aec62d5dac57ffb63",
+ "sha256:607955e6d8faf304ef9c0186f11e479b7e175c894d1eb312ea1234b997d1e5a4",
+ "sha256:60eb8ffde52d989ddd2a403c3d7c0268447b663e75bd52e6e10fecdcf673c90e",
+ "sha256:63d4991769497044531ac25390d6dcb960402425eb670022274a830c505bda07",
+ "sha256:689099fbe46258f6e4722a3ec595fd785375fadc853020543f75bdf3e23ffbf4",
+ "sha256:68ffa2545329144ec419587175620f67882c0d062d0dd749f6524d608a92d63c",
+ "sha256:6b53faece345c5b711713337777cf2e8c148359df44ec819949022072372d1ac",
+ "sha256:6b5a50b598064d4cf0f0b417fbe0136f0eb059c3a9c0b671ced299d6c4214267",
+ "sha256:70cba7ca6b7e64071524d43f1af0921085f8585c80714605e4d968fb947cf25d",
+ "sha256:7230cc64bcc661ed92c7d94ea3f394c3e79a24588e988203214847d15f3ef7a7",
+ "sha256:738295b071749da7e25f81f25245fdafbf310cbf68e1a9a91e61658f6542fd0b",
+ "sha256:760030712d5273396f5e963dd8731aefb5ac65d92eff8bf8fd4124c1630fe950",
+ "sha256:7713b5fd295e0934cf6c7778944bf750c7a78d69b7efb3fd68ba7ca1e12ddbd2",
+ "sha256:7a58963c9cd335d092d11c7242a6433806e70410fa66aafefe0cefd9bba30f42",
+ "sha256:7d4f876d42236d47ef247076501a2c74849b52070637d8cca905d06a710794ce",
+ "sha256:7edf57a0f5f9da3fe8997bb7a11007c6e01b757bd72beee99ecdb7491877c5a9",
+ "sha256:7f34eef2370377f55df184c033864f4d371bef50688867929b1cf85e796e8c22",
+ "sha256:809ce9a77eef51089c98360312ef59ece7839af331f9aea7afbf40842d7116f5",
+ "sha256:8b0636b1fc9f97d416005ddd3c59d5ce0ae98580534d830625c692d31053f486",
+ "sha256:8b64ea3ea1d3988a10227507f122b8b1ae65d7491a7f21e622fade6af313c29c",
+ "sha256:9899df13e8518a807392febc9922372f904f72fc7b07c3b849e651bb2c51cdcc",
+ "sha256:98d260af271353d3eaea3a44ab610db25c7eb3a489d39cfdd20a6ccb482dba92",
+ "sha256:aab836f17b9625b8511f5f9c76fd4598c16e9d7a27d314cd12fc1de987f3bf58",
+ "sha256:aada43e1a378eef21418b34fe33194d42f74ca98e9541cfacd4e49470050937a",
+ "sha256:b0503575acd52a828017b10b5358f39bdb3a55918e10ac5ee96533db374f7d94",
+ "sha256:b43404eee3f543696d55583283b0df919ded8a152f5a1226efdc2a0694189a27",
+ "sha256:b518e7e38cb47c22243fbddd12ef85f24852f60f1a7152fd92a8290128cc1643",
+ "sha256:b91157e7d431c146acf694bf6cb8657bd76aa66805dd79fa03aef13e14d9a2ff",
+ "sha256:ba6a0ec6df9e75f18c8bf33cad1e983b55ad8f6965c99ae2d9f6e7f73bac6cdb",
+ "sha256:bd24a43ebef9deb5bea8f9f63ce0e0e1831fa0ffd663404bc06460ed53cbf0e4",
+ "sha256:bdf4e0d935fd1c7c7f1e4e97ae63b646eddf23dac2e06178f5238b10c3c1d2d8",
+ "sha256:c1c0a0270e41a2dd982824cd2fd4960f4c09c97514c6ed58056834054637de39",
+ "sha256:c2a96fa5fcc10eef350bf3cf685fd5c9c90cd5548e57369881b736bb5848dcf9",
+ "sha256:c33ee203ab6310ba0f43069a6b7acf89313da9acedc4c9a1df21b250cd9dc69f",
+ "sha256:c82c96f0982eadfa5552a95df93ae563cc46a7948ba15542e03999ed82d3b6f8",
+ "sha256:c8ec99552cd5f2b1caba63d082ea3cbdf0872d8634d04233b9000ac0c1aebfcf",
+ "sha256:ca36d2337064a980e2f565ea28618744d85e75ea1b5b47be18d543810c413102",
+ "sha256:caf3bed9165ff503b9a381ce13655e0487499094b2065e8d90f55d98b28623ba",
+ "sha256:ce67c0de8d0aaa707d45dec3a4da360e7432fb396d832dda608bc1ab3534abe2",
+ "sha256:d0b046d05a60ce4026c3732e35e99e0c876e143b4dc22bf875ecd6fc87a90e48",
+ "sha256:d19171e45156935eb75879d39f9dc69d21140fdcba40c441ba5e866eacdd3804",
+ "sha256:d25089fe7d6160ff98613e9e28844aad431453abd7fad820117ab901c36c1fae",
+ "sha256:d29bafd459d925339771ef0cb5c83bd7f5f4b5743fc717d55428b77d41032145",
+ "sha256:d3faf580c2b0c70f778b1a22a0afc4bc225076d50ae3f9e354237259d83af97b",
+ "sha256:d76c75c032c674897338df93dc660d02316f5217c8075f2e9ebfcfdbc798a6e0",
+ "sha256:db39dbb443ad89fe6c2059dd4a2bcb80bfbe6b9d2ed137c4c83b476e826b76ad",
+ "sha256:db60e884ba66182eddf62081f262f4080d2f34dd9fcac4ed941ccf0199f7ad28",
+ "sha256:e7876d07cdd1709c7890e0b51ef595600fb72dee40351d0327056300becce601",
+ "sha256:e93adbabe49d2a548cdeb5c9862aacfc21d55899de795cf5de88a56f3e045115",
+ "sha256:f0cbd41c23bf18d3ae6bc65c0ec88f711a1e012bca56a19e6cd04265da1bdf5a",
+ "sha256:f2dcac388cf6995e5c6b4b3cb3acfa8af70e2542c3ae50c294a02a8a06e1534f",
+ "sha256:fb683e0affa423614ea4cd518c6a4d8ac68f0d09928e4188f26be1668d3c0bc7",
+ "sha256:fdfd08101ddbbd178977f05bff94b9dbed677b5f218028412a98361c65a830d5"
+ ],
+ "version": "==2.1"
},
"markdown-it-py": {
"hashes": [
@@ -4325,12 +4469,21 @@
},
"more-itertools": {
"hashes": [
- "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d",
- "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"
+ "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684",
+ "sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==9.1.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==10.2.0"
+ },
+ "msrest": {
+ "hashes": [
+ "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32",
+ "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.6'",
+ "version": "==0.7.1"
},
"multi-key-dict": {
"hashes": [
@@ -4424,44 +4577,44 @@
},
"mypy": {
"hashes": [
- "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340",
- "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49",
- "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82",
- "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce",
- "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb",
- "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51",
- "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5",
- "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e",
- "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7",
- "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33",
- "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9",
- "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1",
- "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6",
- "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a",
- "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe",
- "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7",
- "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200",
- "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7",
- "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a",
- "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28",
- "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea",
- "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120",
- "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d",
- "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42",
- "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea",
- "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2",
- "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.8'",
- "version": "==1.7.1"
+ "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6",
+ "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913",
+ "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129",
+ "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc",
+ "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974",
+ "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374",
+ "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150",
+ "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03",
+ "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9",
+ "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02",
+ "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89",
+ "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2",
+ "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d",
+ "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3",
+ "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612",
+ "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e",
+ "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3",
+ "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e",
+ "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd",
+ "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04",
+ "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed",
+ "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185",
+ "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf",
+ "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b",
+ "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4",
+ "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f",
+ "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.8'",
+ "version": "==1.9.0"
},
"mypy-boto3-logs": {
"hashes": [
- "sha256:4a592d7d8180d45197197af4dc959f61b5c4d989a0ba615ecde6e21661c4013f",
- "sha256:6d913c9be74c9eff71910b5af18f8beeeae524dae273fea7b47ead0ee1a09134"
+ "sha256:53c4988f655e21d2834dadcc600f3c182f34924c37d7a25bbd1b10857acb8b18",
+ "sha256:cf5fac4801dd92f05007fb1b4444ff98258544d1f21e64e9228e34188046f841"
],
- "version": "==1.26.149"
+ "version": "==1.34.66"
},
"mypy-extensions": {
"hashes": [
@@ -4473,42 +4626,47 @@
},
"mypy-protobuf": {
"hashes": [
- "sha256:15604f6943b16c05db646903261e3b3e775cf7f7990b7c37b03d043a907b650d",
- "sha256:24f3b0aecb06656e983f58e07c732a90577b9d7af3e1066fc2b663bbf0370248"
+ "sha256:02f242eb3409f66889f2b1a3aa58356ec4d909cdd0f93115622e9e70366eca3c",
+ "sha256:56176e4d569070e7350ea620262478b49b7efceba4103d468448f1d21492fd6c"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==3.3.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.6.0"
},
"networkx": {
"hashes": [
- "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6",
- "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"
+ "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9",
+ "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2",
+ "sha256:34aa6c15ab30af14fb5304096872e573494c87f909929169121dd7f8dc375a9d",
+ "sha256:99abc769d26d4008974e6da5a0e9509d5c05d645b002e6e96030b7a50a951a5c",
+ "sha256:a7cafccea6b22ebe320ab6a2f13d76ae6b47054bf24bfa3674ddef91e74c914f",
+ "sha256:c75056e63ab1438680e56753fc89c4f32f3a7038215cfa0c4483a023b97e0f3a",
+ "sha256:ce0779e4827a646078fd8bfc4225988430d8bfea5c03f86afc4ec403896618dc"
],
"index": "pypi",
- "markers": "python_version >= '3.9'",
- "version": "==3.2.1"
+ "markers": "python_version >= '3.10'",
+ "version": "==3.3"
},
"nh3": {
"hashes": [
- "sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770",
- "sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf",
- "sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305",
- "sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601",
- "sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28",
- "sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7",
- "sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3",
- "sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911",
- "sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf",
- "sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0",
- "sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5",
- "sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97",
- "sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d",
- "sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e",
- "sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3",
- "sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6"
- ],
- "version": "==0.2.15"
+ "sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a",
+ "sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911",
+ "sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb",
+ "sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a",
+ "sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc",
+ "sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028",
+ "sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9",
+ "sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3",
+ "sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351",
+ "sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10",
+ "sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71",
+ "sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f",
+ "sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b",
+ "sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a",
+ "sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062",
+ "sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a"
+ ],
+ "version": "==0.2.17"
},
"nodeenv": {
"hashes": [
@@ -4518,21 +4676,37 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'",
"version": "==1.8.0"
},
+ "oauthlib": {
+ "hashes": [
+ "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca",
+ "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.6'",
+ "version": "==3.2.2"
+ },
"packaging": {
"hashes": [
- "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5",
- "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"
+ "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5",
+ "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"
],
"markers": "python_version >= '3.7'",
- "version": "==23.2"
+ "version": "==24.0"
+ },
+ "packaging-legacy": {
+ "hashes": [
+ "sha256:6cd21cd283c09409349bccc10bb55bfd837b4aab86a7b0f87bfcb8dd9831a8a3",
+ "sha256:c974a42291a77112313f0198b87ad96e07a3c357295d572560a4b9c368f7d9db"
+ ],
+ "version": "==23.0.post0"
},
"parso": {
"hashes": [
- "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0",
- "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"
+ "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18",
+ "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"
],
"markers": "python_version >= '3.6'",
- "version": "==0.8.3"
+ "version": "==0.8.4"
},
"pathspec": {
"hashes": [
@@ -4568,122 +4742,77 @@
},
"pkginfo": {
"hashes": [
- "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546",
- "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046"
+ "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297",
+ "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097"
],
"markers": "python_version >= '3.6'",
- "version": "==1.9.6"
+ "version": "==1.10.0"
},
"platformdirs": {
"hashes": [
- "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380",
- "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"
+ "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf",
+ "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"
],
"markers": "python_version >= '3.8'",
- "version": "==4.1.0"
+ "version": "==4.2.1"
},
"playwright": {
"hashes": [
- "sha256:35b7e0b389df2aa632f3614d35be7bace35f6f634d880db44b035c83e4481312",
- "sha256:382a7465cc0ea3bf7fa66716bd37fd53f66af4bcc5c72283a8eff3f6e87758a8",
- "sha256:3ae90ea5ad776fe5e1300a9c730244c8e57a183c6eb261044418710d51ae03c0",
- "sha256:4ec3746de69e7ba912b70e0fe3a3c6b8af97f21ece793c5db27c251da4d2f3e6",
- "sha256:6a842dca4dd53feda1d7bd0e14aa65140e4e816452ebddd307e90cad184d92bd",
- "sha256:ba5a89953aedb158025e4581eafb6fdeebb3d58acd9ce24b59f691b1e2a861bc",
- "sha256:f11e1ec32f3b3dbd7f24d1481c313cb527001955004ee88a73f9b4a610d0db28"
+ "sha256:50d9a5c07c76456945a2296d63f78fdf6eb11aed3e8d39bb5ccbda760a8d6d41",
+ "sha256:87191272c40b4c282cf2c9449ca3acaf705f38ac6e2372270f1617ce16b661b8",
+ "sha256:9b7bd707eeeaebee47f656b2de90aa9bd85e9ca2c6af7a08efd73896299e4d50",
+ "sha256:b03b12bd4da9c2cfb78dff820deac8b52892fe3c2f89a4d95d6f08c59e41deb9",
+ "sha256:bd8b818904b17e2914be23e7bc2a340b203f57fe81678520b10f908485b056ea",
+ "sha256:e05a8d8fb2040c630429cca07e843c8fa33059717837c8f50c01b7d1fc651ce1",
+ "sha256:e9ec21b141727392f630761c7f4dec46d80c98243614257cc501b64ff636d337"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==1.40.0"
+ "version": "==1.43.0"
},
"pluggy": {
"hashes": [
- "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12",
- "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"
+ "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1",
+ "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"
],
"markers": "python_version >= '3.8'",
- "version": "==1.3.0"
+ "version": "==1.5.0"
},
"polyfactory": {
"hashes": [
- "sha256:03acb0718f4efb2458c62eb8a2c888294c5b5bf2db31e0efc15a57ecc9eb3c2e",
- "sha256:d1e6d8952789de61dca2c32f3e3c9362d7681cf405cf9a41267915e0e33f7639"
+ "sha256:a3ff5263756ad74acf4001f04c1b6aab7d1197cbaa070352df79573a8dcd85ec",
+ "sha256:ff5b6a8742cbd6fbde9f81310b9732d5421fbec31916d6ede5a977753110fbe9"
],
"index": "pypi",
"markers": "python_version >= '3.8' and python_version < '4.0'",
- "version": "==2.13.0"
+ "version": "==2.15.0"
},
"pre-commit": {
"hashes": [
- "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376",
- "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"
+ "sha256:5eae9e10c2b5ac51577c3452ec0a490455c45a0533f7960f993a0d01e59decab",
+ "sha256:e209d61b8acdcf742404408531f0c37d49d2c734fd7cff2d6076083d191cb060"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
- "version": "==3.6.0"
- },
- "preprocess": {
- "hashes": [
- "sha256:ae799e3ab836efeb4b9958bf0e54b5248a368bcdfd77e2c5467b8c56fe00dea1",
- "sha256:fb751f3218b7948a23cd821588c296695c0141f75a2e48201afe8e266518abaa"
- ],
- "index": "pypi",
- "version": "==2.0.0"
+ "version": "==3.7.0"
},
"protobuf": {
"hashes": [
- "sha256:0405c3c1cbcc5f827c4a681558d3c628b0a0ac8a7eaea840e521ea427fbe803c",
- "sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf",
- "sha256:091a3b6bea4b01ad77846598b77e7f56a51c28214abfd31054ef0ea7c666c064",
- "sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f",
- "sha256:109f003328dd46b96e318ba4a4c6a82dd128e4d786c273c45dcc93a4b2630ece",
- "sha256:26355216684829155238c27858a909426423880740d32293e4efc262385c321b",
- "sha256:2845c86bd3dfae3b2d8e4697e7b7afe1bd05ee2d8c71171de1975d3449009e39",
- "sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f",
- "sha256:2a82a269769dd693480b0dd8267dadbadf50dcc33dbf0c602d643c8367896b60",
- "sha256:318e1a0e10fc062b6f52e9c4922f4ce2545d13480f11f1cea67852b560461c56",
- "sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7",
- "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996",
- "sha256:439712847df0920fbdc4e490240edd8bb025f0bb9b529fb465242d2365a6f6f0",
- "sha256:497cbc7c0d034d6061be631b332433560d12ca8cb603a3132d978c44571d043b",
- "sha256:4b255dc7714eb904a5de2578a5f0358132c6eb28c3b9d8abfc307de274881e4f",
- "sha256:4d5eefb8b11f5904cb226036168120a440451da1b370fbc1315b2a11af026590",
- "sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067",
- "sha256:6960da4d4c16adb02c07ed4f55d1669b1cfe0180d09550d47f2f15b3563b7504",
- "sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c",
- "sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7",
- "sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9",
- "sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c",
- "sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739",
- "sha256:7d8ed8d87a008685f7950a0545180a2457d8601f3150ec2288f185195cb54506",
- "sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91",
- "sha256:8f4b3f2de9559da9ce9f6099e8c0423470d64fc6e88b8a9ccecb104b33c975d3",
- "sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c",
- "sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153",
- "sha256:a80b13b6c31cfe2fd43846d99e740e9f5f22ace756a26d59897185d84d31210f",
- "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9",
- "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388",
- "sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e",
- "sha256:af908d773fa818256f6159556d3bcb8db71415c0219299cebad01df123730c51",
- "sha256:c8d375262a9efe44ac73985c62a2722b155b7e33f4a4bd4066c7a1b24fce93c2",
- "sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab",
- "sha256:cc18e48ff46cf0c853713413add97cfdc14672aa4a7a1f7a2e0471712430c85f",
- "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde",
- "sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531",
- "sha256:cf45ce9e038a19f770e84b5ba5eb4434b044fc633247b903ae728c66b210f7b1",
- "sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8",
- "sha256:dd3d652fec35c01f737b034a8726677bc8a8767981ed25c4fd3eb4dbe4b9ab9b",
- "sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7",
- "sha256:dfe8f342fb5c2f92dcaf3855b532d02e9d7ff847342b2b3ae324aa102c7a2fb3",
- "sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20",
- "sha256:f26f89a4495ea4f2c4abc703b8f68ab1f6c5ebf18a8732df39e8bdf7b9d94da4",
- "sha256:f899a5661f45dbd8ff0261c22a327c1333a317450c836874ab3c34ffd7053bd8",
- "sha256:fcd931cfd80ab29412588c62735b2783e34350bbf03eff277988debea4c3f8a6",
- "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"
+ "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4",
+ "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8",
+ "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c",
+ "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d",
+ "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4",
+ "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa",
+ "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c",
+ "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019",
+ "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9",
+ "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c",
+ "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==3.20.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.25.3"
},
"ptyprocess": {
"hashes": [
@@ -4709,10 +4838,11 @@
},
"pycparser": {
"hashes": [
- "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
- "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
+ "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6",
+ "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"
],
- "version": "==2.21"
+ "markers": "python_version >= '3.8'",
+ "version": "==2.22"
},
"pydantic": {
"hashes": [
@@ -4836,11 +4966,11 @@
},
"pyee": {
"hashes": [
- "sha256:9bcc9647822234f42c228d88de63d0f9ffa881e87a87f9d36ddf5211f6ac977d",
- "sha256:a642c51e3885a33ead087286e35212783a4e9b8d6514a10a5db4e57ac57b2b29"
+ "sha256:5d346a7d0f861a4b2e6c47960295bd895f816725b27d656181947346be98d7c1",
+ "sha256:b53af98f6990c810edd9b56b87791021a8f54fd13db4edd1142438d44ba2263f"
],
"markers": "python_version >= '3.8'",
- "version": "==11.0.1"
+ "version": "==11.1.0"
},
"pyenchant": {
"hashes": [
@@ -4854,11 +4984,11 @@
},
"pyflakes": {
"hashes": [
- "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774",
- "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"
+ "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f",
+ "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"
],
"markers": "python_version >= '3.8'",
- "version": "==3.1.0"
+ "version": "==3.2.0"
},
"pygments": {
"hashes": [
@@ -4870,12 +5000,12 @@
},
"pylint": {
"hashes": [
- "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b",
- "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"
+ "sha256:507a5b60953874766d8a366e8e8c7af63e058b26345cfcb5f91f89d987fd6b74",
+ "sha256:6a69beb4a6f63debebaab0a3477ecd0f559aa726af4954fc948c51f7a2549e23"
],
"index": "pypi",
"markers": "python_full_version >= '3.8.0'",
- "version": "==3.0.3"
+ "version": "==3.1.0"
},
"pylint-plugin-utils": {
"hashes": [
@@ -4911,92 +5041,92 @@
},
"pymongo": {
"hashes": [
- "sha256:00c199e1c593e2c8b033136d7a08f0c376452bac8a896c923fcd6f419e07bdd2",
- "sha256:010bc9aa90fd06e5cc52c8fac2c2fd4ef1b5f990d9638548dde178005770a5e8",
- "sha256:026a24a36394dc8930cbcb1d19d5eb35205ef3c838a7e619e04bd170713972e7",
- "sha256:061598cbc6abe2f382ab64c9caa83faa2f4c51256f732cdd890bcc6e63bfb67e",
- "sha256:13552ca505366df74e3e2f0a4f27c363928f3dff0eef9f281eb81af7f29bc3c5",
- "sha256:13d613c866f9f07d51180f9a7da54ef491d130f169e999c27e7633abe8619ec9",
- "sha256:144a31391a39a390efce0c5ebcaf4bf112114af4384c90163f402cec5ede476b",
- "sha256:1461199b07903fc1424709efafe379205bf5f738144b1a50a08b0396357b5abf",
- "sha256:154b361dcb358ad377d5d40df41ee35f1cc14c8691b50511547c12404f89b5cb",
- "sha256:1c5654bb8bb2bdb10e7a0bc3c193dd8b49a960b9eebc4381ff5a2043f4c3c441",
- "sha256:1de3c6faf948f3edd4e738abdb4b76572b4f4fdfc1fed4dad02427e70c5a6219",
- "sha256:1ed23b0e2dac6f84f44c8494fbceefe6eb5c35db5c1099f56ab78fc0d94ab3af",
- "sha256:1f2b856518bfcfa316c8dae3d7b412aecacf2e8ba30b149f5eb3b63128d703b9",
- "sha256:2346450a075625c4d6166b40a013b605a38b6b6168ce2232b192a37fb200d588",
- "sha256:262356ea5fcb13d35fb2ab6009d3927bafb9504ef02339338634fffd8a9f1ae4",
- "sha256:27b81ecf18031998ad7db53b960d1347f8f29e8b7cb5ea7b4394726468e4295e",
- "sha256:2940aa20e9cc328e8ddeacea8b9a6f5ddafe0b087fedad928912e787c65b4909",
- "sha256:2d4ccac3053b84a09251da8f5350bb684cbbf8c8c01eda6b5418417d0a8ab198",
- "sha256:2dd2f6960ee3c9360bed7fb3c678be0ca2d00f877068556785ec2eb6b73d2414",
- "sha256:3071ec998cc3d7b4944377e5f1217c2c44b811fae16f9a495c7a1ce9b42fb038",
- "sha256:3094c7d2f820eecabadae76bfec02669567bbdd1730eabce10a5764778564f7b",
- "sha256:30b2c9caf3e55c2e323565d1f3b7e7881ab87db16997dc0cbca7c52885ed2347",
- "sha256:3177f783ae7e08aaf7b2802e0df4e4b13903520e8380915e6337cdc7a6ff01d8",
- "sha256:31dab1f3e1d0cdd57e8df01b645f52d43cc1b653ed3afd535d2891f4fc4f9712",
- "sha256:33bb16a07d3cc4e0aea37b242097cd5f7a156312012455c2fa8ca396953b11c4",
- "sha256:349093675a2d3759e4fb42b596afffa2b2518c890492563d7905fac503b20daa",
- "sha256:39d77d8bbb392fa443831e6d4ae534237b1f4eee6aa186f0cdb4e334ba89536e",
- "sha256:3a7f02a58a0c2912734105e05dedbee4f7507e6f1bd132ebad520be0b11d46fd",
- "sha256:3b287e814a01deddb59b88549c1e0c87cefacd798d4afc0c8bd6042d1c3d48aa",
- "sha256:3c74f4725485f0a7a3862cfd374cc1b740cebe4c133e0c1425984bcdcce0f4bb",
- "sha256:3cadf7f4c8e94d8a77874b54a63c80af01f4d48c4b669c8b6867f86a07ba994f",
- "sha256:3d18a9b9b858ee140c15c5bfcb3e66e47e2a70a03272c2e72adda2482f76a6ad",
- "sha256:3f0e6a6c807fa887a0c51cc24fe7ea51bb9e496fe88f00d7930063372c3664c3",
- "sha256:4344c30025210b9fa80ec257b0e0aab5aa1d5cca91daa70d82ab97b482cc038e",
- "sha256:4497d49d785482cc1a44a0ddf8830b036a468c088e72a05217f5b60a9e025012",
- "sha256:547dc5d7f834b1deefda51aedb11a7af9c51c45e689e44e14aa85d44147c7657",
- "sha256:5556e306713e2522e460287615d26c0af0fe5ed9d4f431dad35c6624c5d277e9",
- "sha256:55dac73316e7e8c2616ba2e6f62b750918e9e0ae0b2053699d66ca27a7790105",
- "sha256:56816e43c92c2fa8c11dc2a686f0ca248bea7902f4a067fa6cbc77853b0f041e",
- "sha256:5bd94c503271e79917b27c6e77f7c5474da6930b3fb9e70a12e68c2dff386b9a",
- "sha256:5ec31adc2e988fd7db3ab509954791bbc5a452a03c85e45b804b4bfc31fa221d",
- "sha256:69247f7a2835fc0984bbf0892e6022e9a36aec70e187fcfe6cae6a373eb8c4de",
- "sha256:6a0ae7a48a6ef82ceb98a366948874834b86c84e288dbd55600c1abfc3ac1d88",
- "sha256:6a1810c2cbde714decf40f811d1edc0dae45506eb37298fd9d4247b8801509fe",
- "sha256:6dcc95f4bb9ed793714b43f4f23a7b0c57e4ef47414162297d6f650213512c19",
- "sha256:76013fef1c9cd1cd00d55efde516c154aa169f2bf059b197c263a255ba8a9ddf",
- "sha256:77e0df59b1a4994ad30c6d746992ae887f9756a43fc25dec2db515d94cf0222d",
- "sha256:7bb0e9049e81def6829d09558ad12d16d0454c26cabe6efc3658e544460688d9",
- "sha256:88beb444fb438385e53dc9110852910ec2a22f0eab7dd489e827038fdc19ed8d",
- "sha256:8b47ebd89e69fbf33d1c2df79759d7162fc80c7652dacfec136dae1c9b3afac7",
- "sha256:8d219b4508f71d762368caec1fc180960569766049bbc4d38174f05e8ef2fe5b",
- "sha256:8ec75f35f62571a43e31e7bd11749d974c1b5cd5ea4a8388725d579263c0fdf6",
- "sha256:9167e735379ec43d8eafa3fd675bfbb12e2c0464f98960586e9447d2cf2c7a83",
- "sha256:9a710c184ba845afb05a6f876edac8f27783ba70e52d5eaf939f121fc13b2f59",
- "sha256:9aafd036f6f2e5ad109aec92f8dbfcbe76cff16bad683eb6dd18013739c0b3ae",
- "sha256:9c79d597fb3a7c93d7c26924db7497eba06d58f88f58e586aa69b2ad89fee0f8",
- "sha256:a2831e05ce0a4df10c4ac5399ef50b9a621f90894c2a4d2945dc5658765514ed",
- "sha256:a5e641f931c5cd95b376fd3c59db52770e17bec2bf86ef16cc83b3906c054845",
- "sha256:b10d8cda9fc2fcdcfa4a000aa10413a2bf8b575852cd07cb8a595ed09689ca98",
- "sha256:b435b13bb8e36be11b75f7384a34eefe487fe87a6267172964628e2b14ecf0a7",
- "sha256:b7b1a83ce514700276a46af3d9e481ec381f05b64939effc9065afe18456a6b9",
- "sha256:b8729dbf25eb32ad0dc0b9bd5e6a0d0b7e5c2dc8ec06ad171088e1896b522a74",
- "sha256:bbed8cccebe1169d45cedf00461b2842652d476d2897fd1c42cf41b635d88746",
- "sha256:c258dbacfff1224f13576147df16ce3c02024a0d792fd0323ac01bed5d3c545d",
- "sha256:c30a9e06041fbd7a7590693ec5e407aa8737ad91912a1e70176aff92e5c99d20",
- "sha256:c91ea3915425bd4111cb1b74511cdc56d1d16a683a48bf2a5a96b6a6c0f297f7",
- "sha256:d0355cff58a4ed6d5e5f6b9c3693f52de0784aa0c17119394e2a8e376ce489d4",
- "sha256:d483793a384c550c2d12cb794ede294d303b42beff75f3b3081f57196660edaf",
- "sha256:d4c2be9760b112b1caf649b4977b81b69893d75aa86caf4f0f398447be871f3c",
- "sha256:d8e62d06e90f60ea2a3d463ae51401475568b995bafaffd81767d208d84d7bb1",
- "sha256:da08ea09eefa6b960c2dd9a68ec47949235485c623621eb1d6c02b46765322ac",
- "sha256:dd1fa413f8b9ba30140de198e4f408ffbba6396864c7554e0867aa7363eb58b2",
- "sha256:e2aced6fb2f5261b47d267cb40060b73b6527e64afe54f6497844c9affed5fd0",
- "sha256:e438417ce1dc5b758742e12661d800482200b042d03512a8f31f6aaa9137ad40",
- "sha256:e470fa4bace5f50076c32f4b3cc182b31303b4fefb9b87f990144515d572820b",
- "sha256:eaf2f65190c506def2581219572b9c70b8250615dc918b3b7c218361a51ec42e",
- "sha256:ef102a67ede70e1721fe27f75073b5314911dbb9bc27cde0a1c402a11531e7bd",
- "sha256:ef801027629c5b511cf2ba13b9be29bfee36ae834b2d95d9877818479cdc99ea",
- "sha256:f7acc03a4f1154ba2643edeb13658d08598fe6e490c3dd96a241b94f09801626",
- "sha256:f9756f1d25454ba6a3c2f1ef8b7ddec23e5cdeae3dc3c3377243ae37a383db00",
- "sha256:ff62ba8ff70f01ab4fe0ae36b2cb0b5d1f42e73dfc81ddf0758cd9f77331ad25",
- "sha256:ff925f1cca42e933376d09ddc254598f8c5fcd36efc5cac0118bb36c36217c41"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.6.1"
+ "sha256:00e6cfce111883ca63a3c12878286e0b89871f4b840290e61fb6f88ee0e687be",
+ "sha256:01277a7e183c59081368e4efbde2b8f577014431b257959ca98d3a4e8682dd51",
+ "sha256:0182899aafe830f25cf96c5976d724efeaaf7b6646c15424ad8dd25422b2efe1",
+ "sha256:098d420a8214ad25f872de7e8b309441995d12ece0376218a04d9ed5d2222cf3",
+ "sha256:0a4ea44e5a913bdb7c9abd34c69e9fcfac10dfaf49765463e0dc1ea922dd2a9d",
+ "sha256:0e208f2ab7b495eff8fd175022abfb0abce6307ac5aee3f4de51fc1a459b71c9",
+ "sha256:138b9fa18d40401c217bc038a48bcde4160b02d36d8632015b1804971a2eaa2f",
+ "sha256:14a82593528cddc93cfea5ee78fac95ae763a3a4e124ca79ee0b24fbbc6da1c9",
+ "sha256:151361c101600a85cb1c1e0db4e4b28318b521fcafa9b62d389f7342faaaee80",
+ "sha256:17c1c143ba77d6e21fc8b48e93f0a5ed982a23447434e9ee4fbb6d633402506b",
+ "sha256:18e5c161b18660f1c9d1f78236de45520a436be65e42b7bb51f25f74ad22bdde",
+ "sha256:1c2761302b6cbfd12e239ce1b8061d4cf424a361d199dcb32da534985cae9350",
+ "sha256:26d036e0f5de09d0b21d0fc30314fcf2ae6359e4d43ae109aa6cf27b4ce02d30",
+ "sha256:2a6ae9a600bbc2dbff719c98bf5da584fb8a4f2bb23729a09be2e9c3dbc61c8a",
+ "sha256:2ef1b4992ee1cb8bb16745e70afa0c02c5360220a7a8bb4775888721f052d0a6",
+ "sha256:36d7049fc183fe4edda3eae7f66ea14c660921429e082fe90b4b7f4dc6664a70",
+ "sha256:391aea047bba928006114282f175bc8d09c53fe1b7d8920bf888325e229302fe",
+ "sha256:3b909e5b1864de01510079b39bbdc480720c37747be5552b354bc73f02c24a3c",
+ "sha256:3e1ba5a037c526a3f4060c28f8d45d71ed9626e2bf954b0cd9a8dcc3b45172ee",
+ "sha256:400074090b9a631f120b42c61b222fd743490c133a5d2f99c0208cefcccc964e",
+ "sha256:462684a6f5ce6f2661c30eab4d1d459231e0eed280f338e716e31a24fc09ccb3",
+ "sha256:4670edbb5ddd71a4d555668ef99b032a5f81b59e4145d66123aa0d831eac7883",
+ "sha256:48c60bd32ec141c0d45d8471179430003d9fb4490da181b8165fb1dce9cc255c",
+ "sha256:4955be64d943b30f2a7ff98d818ca530f7cb37450bc6b32c37e0e74821907ef8",
+ "sha256:4a0660ce32d8459b7f12dc3ca0141528fead62d3cce31b548f96f30902074cc0",
+ "sha256:4d167d546352869125dc86f6fda6dffc627d8a9c8963eaee665825f2520d542b",
+ "sha256:53451190b8628e1ce7d1fe105dc376c3f10705127bd3b51fe3e107b9ff1851e6",
+ "sha256:5c8a4982f5eb767c6fbfb8fb378683d09bcab7c3251ba64357eef600d43f6c23",
+ "sha256:5f465cca9b178e7bb782f952dd58e9e92f8ba056e585959465f2bb50feddef5f",
+ "sha256:60931b0e07448afe8866ffff764cd5bf4b1a855dc84c7dcb3974c6aa6a377a59",
+ "sha256:664c64b6bdb31aceb80f0556951e5e2bf50d359270732268b4e7af00a1cf5d6c",
+ "sha256:6b5aec78aa4840e8d6c3881900259892ab5733a366696ca10d99d68c3d73eaaf",
+ "sha256:6cec7279e5a1b74b257d0270a8c97943d745811066630a6bc6beb413c68c6a33",
+ "sha256:6d5b35da9e16cda630baed790ffc3d0d01029d269523a7cec34d2ec7e6823e75",
+ "sha256:6de33f1b2eed91b802ec7abeb92ffb981d052f3604b45588309aae9e0f6e3c02",
+ "sha256:705a9bfd619301ee7e985d6f91f68b15dfcb2f6f36b8cc225cc82d4260d2bce5",
+ "sha256:722f2b709b63311c0efda4fa4c603661faa4bec6bad24a6cc41a3bc6d841bf09",
+ "sha256:731a92dfc4022db763bfa835c6bd160f2d2cba6ada75749c2ed500e13983414b",
+ "sha256:7330245253fbe2e09845069d2f4d35dd27f63e377034c94cb0ddac18bc8b0d82",
+ "sha256:75107a386d4ccf5291e75cce8ca3898430e7907f4cc1208a17c9efad33a1ea84",
+ "sha256:7df8b166d3db6cfead4cf55b481408d8f0935d8bd8d6dbf64507c49ef82c7200",
+ "sha256:7ee79e02a7c5ed34706ecb5dad19e6c7d267cf86d28c075ef3127c58f3081279",
+ "sha256:872bad5c83f7eec9da11e1fef5f858c6a4c79fe4a83c7780e7b0fe95d560ae3f",
+ "sha256:8b3853fb66bf34ce1b6e573e1bbb3cb28763be9d1f57758535757faf1ab2f24a",
+ "sha256:8d0ea740a2faa56f930dc82c5976d96c017ece26b29a1cddafb58721c7aab960",
+ "sha256:8e97c138d811e9367723fcd07c4402a9211caae20479fdd6301d57762778a69f",
+ "sha256:90525454546536544307e6da9c81f331a71a1b144e2d038fec587cc9f9250285",
+ "sha256:9066dff9dc0a182478ca5885d0b8a2b820b462e19459ada109df7a3ced31b272",
+ "sha256:9757602fb45c8ecc1883fe6db7c59c19d87eb3c645ec9342d28a6026837da931",
+ "sha256:98877a9c4ad42df8253a12d8d17a3265781d1feb5c91c767bd153f88feb0b670",
+ "sha256:994386a4d6ad39e18bcede6dc8d1d693ec3ed897b88f86b1841fbc37227406da",
+ "sha256:9b35f8bded43ff91475305445fedf0613f880ff7e25c75ae1028e1260a9b7a86",
+ "sha256:9c9340c7161e112e36ebb97fbba1cdbe7db3dfacb694d2918b1f155a01f3d859",
+ "sha256:9e51e30d67b468a2a634ade928b30cb3e420127f148a9aec60de33f39087bdc4",
+ "sha256:a023804a3ac0f85d4510265b60978522368b5815772262e61e3a2222a8b315c9",
+ "sha256:aa310096450e9c461b7dfd66cbc1c41771fe36c06200440bb3e062b1d4a06b6e",
+ "sha256:af039afc6d787502c02089759778b550cb2f25dbe2780f5b050a2e37031c3fbf",
+ "sha256:af5c5112db04cf62a5d9d224a24f289aaecb47d152c08a457cca81cee061d5bd",
+ "sha256:b3d10bdd46cbc35a2109737d36ffbef32e7420569a87904738ad444ccb7ac2c5",
+ "sha256:b7cf28d9c90e40d4e385b858e4095739829f466f23e08674085161d86bb4bb10",
+ "sha256:bec8e4e88984be157408f1923d25869e1b575c07711cdbdde596f66931800934",
+ "sha256:becfa816545a48c8e740ac2fd624c1c121e1362072d68ffcf37a6b1be8ea187e",
+ "sha256:c2ad3e5bfcd345c0bfe9af69a82d720860b5b043c1657ffb513c18a0dee19c19",
+ "sha256:c4726e36a2f7e92f09f5b8e92ba4db7525daffe31a0dcbcf0533edc0ade8c7d8",
+ "sha256:c67c19f653053ef2ebd7f1837c2978400058d6d7f66ec5760373a21eaf660158",
+ "sha256:c701de8e483fb5e53874aab642235361aac6de698146b02c644389eaa8c137b6",
+ "sha256:cc7a26edf79015c58eea46feb5b262cece55bc1d4929a8a9e0cbe7e6d6a9b0eb",
+ "sha256:ccc15a7c7a99aed7d0831eaf78a607f1db0c7a255f96e3d18984231acd72f70c",
+ "sha256:cd6c15242d9306ff1748681c3235284cbe9f807aeaa86cd17d85e72af626e9a7",
+ "sha256:cdbea2aac1a4caa66ee912af3601557d2bda2f9f69feec83601c78c7e53ece64",
+ "sha256:d30d5d7963453b478016bf7b0d87d7089ca24d93dbdecfbc9aa32f1b4772160a",
+ "sha256:dde9fb6e105ce054339256a8b7a9775212ebb29596ef4e402d7bbc63b354d202",
+ "sha256:e097f877de4d6af13a33ef938bf2a2350f424be5deabf8b857da95f5b080487a",
+ "sha256:e1e1586ebdebe0447a24842480defac17c496430a218486c96e2da3f164c0f05",
+ "sha256:e344d0afdd7c06c1f1e66a4736593293f432defc2191e6b411fc9c82fa8c5adc",
+ "sha256:e4056bc421d4df2c61db4e584415f2b0f1eebb92cbf9222f7f38303467c37117",
+ "sha256:e420e74c6db4594a6d09f39b58c0772679006cb0b4fc40901ba608794d87dad2",
+ "sha256:e458e6fc2b7dd40d15cda04898bd2d8c9ff7ae086c516bc261628d54eb4e3158",
+ "sha256:eaf3d594ebfd5e1f3503d81e06a5d78e33cda27418b36c2491c3d4ad4fca5972",
+ "sha256:ebcc145c74d06296ce0cad35992185064e5cb2aadef719586778c144f0cd4d37",
+ "sha256:f4330c022024e7994b630199cdae909123e4b0e9cf15335de71b146c0f6a2435",
+ "sha256:ff7d1f449fcad23d9bc8e8dc2b9972be38bcd76d99ea5f7d29b2efa929c2a7ff"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==4.6.3"
},
"pyproject-hooks": {
"hashes": [
@@ -5008,29 +5138,38 @@
},
"pyrate-limiter": {
"hashes": [
- "sha256:98cc52cdbe058458e945ae87d4fd5a73186497ffa545ee6e98372f8599a5bd34",
- "sha256:a99e52159f5ed5eb58118bed8c645e30818e7c0e0d127a0585c8277c776b0f7f"
+ "sha256:390f97066b322732e498c9e921fbdfd31d9ec0070a14e06da9af0efc62e091e4",
+ "sha256:715e9f08c6fe2a00d0cae5b1a6647d68ffeb2a54dc0cc2cff4e046b067ce6da4"
],
- "markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==2.10.0"
+ "markers": "python_version >= '3.8' and python_version < '4.0'",
+ "version": "==3.6.0"
},
"pytest": {
"hashes": [
- "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac",
- "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"
+ "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7",
+ "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.8'",
+ "version": "==8.1.1"
+ },
+ "pytest-check": {
+ "hashes": [
+ "sha256:51b8f18a8ccaa426c5d913c4e0e46f014aaa7579481ea03d22d7e1f498f689b2",
+ "sha256:c54c18f0b890cac1c610c78ef2bb3d8ecb29cf33d1cf09fc1166802d6ab88e28"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==7.4.3"
+ "version": "==2.3.1"
},
"pytest-cov": {
"hashes": [
- "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6",
- "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"
+ "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652",
+ "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.1.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==5.0.0"
},
"pytest-html": {
"hashes": [
@@ -5043,39 +5182,29 @@
},
"pytest-metadata": {
"hashes": [
- "sha256:769a9c65d2884bd583bc626b0ace77ad15dbe02dd91a9106d47fd46d9c2569ca",
- "sha256:a17b1e40080401dc23177599208c52228df463db191c1a573ccdffacd885e190"
+ "sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b",
+ "sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8"
],
- "markers": "python_version >= '3.7'",
- "version": "==3.0.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.1.1"
},
"pytest-mock": {
"hashes": [
- "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f",
- "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"
+ "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f",
+ "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==3.12.0"
- },
- "pytest-profiling": {
- "hashes": [
- "sha256:3b255f9db36cb2dd7536a8e7e294c612c0be7f7850a7d30754878e4315d56600",
- "sha256:6bce4e2edc04409d2f3158c16750fab8074f62d404cc38eeb075dff7fcbb996c",
- "sha256:93938f147662225d2b8bd5af89587b979652426a8a6ffd7e73ec4a23e24b7f29",
- "sha256:999cc9ac94f2e528e3f5d43465da277429984a1c237ae9818f8cfd0b06acb019"
- ],
- "index": "pypi",
- "version": "==1.7.0"
+ "version": "==3.14.0"
},
"pytest-random-order": {
"hashes": [
- "sha256:6cb1e59ab0f798bb0c3488c11ae0c70d7d3340306a466d28b28ccd8ef8c20b7e",
- "sha256:dbe6debb9353a7af984cc9eddbeb3577dd4dbbcc1529a79e3d21f68ed9b45605"
+ "sha256:4472d7d34f1f1c5f3a359c4ffc5c13ed065232f31eca19c8844c1ab406e79080",
+ "sha256:882727a8b597ecd06ede28654ffeb8a6d511a1e4abe1054cca7982f2e42008cd"
],
"index": "pypi",
"markers": "python_full_version >= '3.5.0'",
- "version": "==1.1.0"
+ "version": "==1.1.1"
},
"pytest-repeat": {
"hashes": [
@@ -5096,14 +5225,14 @@
},
"pytest-testmon": {
"hashes": [
- "sha256:1a0b53c81b335c70dec780cb21ec0faafc7f4925dfc49e976527727ceb9de654",
- "sha256:87aa6ccd20f299454562c5a1d4b0c2973a439a111b372fac9258930122d28408",
- "sha256:a9848735b53381bf97a421c5c40828f0e1973d8a30748d345edc2108315cbe8d",
- "sha256:b3d20a3ceb099e36727217096a7b3fc662877bd8b0768d2439983924c2a807a6"
+ "sha256:16925a7208c3f543bfa6ad909aeb2712fc835c02323e5def7f87b143783ba32f",
+ "sha256:8271ca47bc8c80760c4fc7fd7895ea786b111bbb31f13eeea879a6fd11fe2226",
+ "sha256:875a0e9336bbaf5bdabccb340ae7ace016ceba4998c6927677e16389443341ee",
+ "sha256:8ebe2c3de42d99306ee54cd4536fed0fc48346a954420da904b18e8d59b5da98"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==2.1.0"
+ "version": "==2.1.1"
},
"pytest-xdist": {
"hashes": [
@@ -5116,12 +5245,12 @@
},
"python-dateutil": {
"hashes": [
- "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86",
- "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"
+ "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3",
+ "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"
],
"index": "pypi",
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
- "version": "==2.8.2"
+ "version": "==2.9.0.post0"
},
"python-jenkins": {
"hashes": [
@@ -5132,12 +5261,12 @@
},
"python-lsp-black": {
"hashes": [
- "sha256:5aa257e9e7b7e5a2316ef2a9fbcd242e82e0f695bf1622e31c0bf5cd69e6113f",
- "sha256:5f583b4395d8d048885974095088ab81e36e501de369cc49a621a82473bb9070"
+ "sha256:8286d2d310c566844b3c116b824ada6fccfa6ba228b1a09a0526b74c04e0805f",
+ "sha256:d5efdee45f5fa9e5241f5d4d396cd46127f45c85817916b1fd92c2986652bf7e"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==1.3.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==2.0.0"
},
"python-lsp-jsonrpc": {
"hashes": [
@@ -5149,12 +5278,12 @@
},
"python-lsp-server": {
"hashes": [
- "sha256:6b947cf9dc33d7bed9abc936bb173140fcf606b6eb50cf02e27d4cb09f10d3fb",
- "sha256:dc0c8298f0222fd66a52aa3170f3a5c8fe3021007a02098bb72f7fd8df353d13"
+ "sha256:278cb41ea69ca9f84ec99d4edc96ff5f2f9e795d240771dc46dc1653f56ddfe3",
+ "sha256:89edd6fb3f7852e4bf5a3d1d95ea41484d1a28fa94b6e3cbff12b9db123b8e86"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==1.9.0"
+ "version": "==1.11.0"
},
"pyyaml": {
"hashes": [
@@ -5187,6 +5316,7 @@
"sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4",
"sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba",
"sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8",
+ "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef",
"sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5",
"sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd",
"sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3",
@@ -5215,11 +5345,11 @@
},
"readme-renderer": {
"hashes": [
- "sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d",
- "sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1"
+ "sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311",
+ "sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9"
],
"markers": "python_version >= '3.8'",
- "version": "==42.0"
+ "version": "==43.0"
},
"redis": {
"hashes": [
@@ -5232,11 +5362,11 @@
},
"referencing": {
"hashes": [
- "sha256:689e64fe121843dcfd57b71933318ef1f91188ffb45367332700a86ac8fd6161",
- "sha256:bdcd3efb936f82ff86f993093f6da7435c7de69a3b3a5a06678a6050184bee99"
+ "sha256:81a1471c68c9d5e3831c30ad1dd9815c45b558e596653db751a2bfdd17b3b9ec",
+ "sha256:c19c4d006f1757e3dd75c4f784d38f8698d87b649c54f9ace14e5e8c9667c01d"
],
"markers": "python_version >= '3.8'",
- "version": "==0.32.0"
+ "version": "==0.31.1"
},
"requests": {
"hashes": [
@@ -5247,6 +5377,15 @@
"markers": "python_version >= '3.7'",
"version": "==2.31.0"
},
+ "requests-oauthlib": {
+ "hashes": [
+ "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5",
+ "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
+ "version": "==1.3.1"
+ },
"requests-toolbelt": {
"hashes": [
"sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6",
@@ -5257,12 +5396,12 @@
},
"responses": {
"hashes": [
- "sha256:a2b43f4c08bfb9c9bd242568328c65a34b318741d3fab884ac843c5ceeb543f9",
- "sha256:b127c6ca3f8df0eb9cc82fd93109a3007a86acb24871834c47b77765152ecf8c"
+ "sha256:01ae6a02b4f34e39bffceb0fc6786b67a25eae919c6368d05eabc8d9576c2a66",
+ "sha256:2f0b9c2b6437db4b528619a77e5d565e4ec2a9532162ac1a131a83529db7be1a"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==0.24.1"
+ "version": "==0.25.0"
},
"rfc3986": {
"hashes": [
@@ -5274,149 +5413,149 @@
},
"rich": {
"hashes": [
- "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa",
- "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"
+ "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222",
+ "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"
],
"markers": "python_full_version >= '3.7.0'",
- "version": "==13.7.0"
+ "version": "==13.7.1"
},
"rpds-py": {
"hashes": [
- "sha256:02744236ac1895d7be837878e707a5c35fb8edc5137602f253b63623d7ad5c8c",
- "sha256:03f9c5875515820633bd7709a25c3e60c1ea9ad1c5d4030ce8a8c203309c36fd",
- "sha256:044f6f46d62444800402851afa3c3ae50141f12013060c1a3a0677e013310d6d",
- "sha256:07a2e1d78d382f7181789713cdf0c16edbad4fe14fe1d115526cb6f0eef0daa3",
- "sha256:082e0e55d73690ffb4da4352d1b5bbe1b5c6034eb9dc8c91aa2a3ee15f70d3e2",
- "sha256:13152dfe7d7c27c40df8b99ac6aab12b978b546716e99f67e8a67a1d441acbc3",
- "sha256:13716e53627ad97babf72ac9e01cf9a7d4af2f75dd5ed7b323a7a9520e948282",
- "sha256:13ff62d3561a23c17341b4afc78e8fcfd799ab67c0b1ca32091d71383a98ba4b",
- "sha256:1607cda6129f815493a3c184492acb5ae4aa6ed61d3a1b3663aa9824ed26f7ac",
- "sha256:164fcee32f15d04d61568c9cb0d919e37ff3195919cd604039ff3053ada0461b",
- "sha256:1c24e30d720c0009b6fb2e1905b025da56103c70a8b31b99138e4ed1c2a6c5b0",
- "sha256:1e6fcd0a0f62f2997107f758bb372397b8d5fd5f39cc6dcb86f7cb98a2172d6c",
- "sha256:1fd0f0b1ccd7d537b858a56355a250108df692102e08aa2036e1a094fd78b2dc",
- "sha256:2181e86d4e1cdf49a7320cb72a36c45efcb7670d0a88f09fd2d3a7967c0540fd",
- "sha256:2974e6dff38afafd5ccf8f41cb8fc94600b3f4fd9b0a98f6ece6e2219e3158d5",
- "sha256:2dccc623725d0b298f557d869a68496a2fd2a9e9c41107f234fa5f7a37d278ac",
- "sha256:2df3d07a16a3bef0917b28cd564778fbb31f3ffa5b5e33584470e2d1b0f248f0",
- "sha256:2e7e5633577b3bd56bf3af2ef6ae3778bbafb83743989d57f0e7edbf6c0980e4",
- "sha256:2ee066a64f0d2ba45391cac15b3a70dcb549e968a117bd0500634754cfe0e5fc",
- "sha256:2f1f295a5c28cfa74a7d48c95acc1c8a7acd49d7d9072040d4b694fe11cd7166",
- "sha256:2faa97212b0dc465afeedf49045cdd077f97be1188285e646a9f689cb5dfff9e",
- "sha256:30479a9f1fce47df56b07460b520f49fa2115ec2926d3b1303c85c81f8401ed1",
- "sha256:337a8653fb11d2fbe7157c961cc78cb3c161d98cf44410ace9a3dc2db4fad882",
- "sha256:3423007fc0661827e06f8a185a3792c73dda41f30f3421562f210cf0c9e49569",
- "sha256:373b76eeb79e8c14f6d82cb1d4d5293f9e4059baec6c1b16dca7ad13b6131b39",
- "sha256:3b79c63d29101cbaa53a517683557bb550462394fb91044cc5998dd2acff7340",
- "sha256:3bbc89ce2a219662ea142f0abcf8d43f04a41d5b1880be17a794c39f0d609cb0",
- "sha256:3c11bc5814554b018f6c5d6ae0969e43766f81e995000b53a5d8c8057055e886",
- "sha256:3cd61e759c4075510052d1eca5cddbd297fe1164efec14ef1fce3f09b974dfe4",
- "sha256:3d40fb3ca22e3d40f494d577441b263026a3bd8c97ae6ce89b2d3c4b39ac9581",
- "sha256:3db0c998c92b909d7c90b66c965590d4f3cd86157176a6cf14aa1f867b77b889",
- "sha256:422b0901878a31ef167435c5ad46560362891816a76cc0d150683f3868a6f0d1",
- "sha256:46b4f3d47d1033db569173be62365fbf7808c2bd3fb742314d251f130d90d44c",
- "sha256:485fbdd23becb822804ed05622907ee5c8e8a5f43f6f43894a45f463b2217045",
- "sha256:53304cc14b1d94487d70086e1cb0cb4c29ec6da994d58ae84a4d7e78c6a6d04d",
- "sha256:5595c80dd03d7e6c6afb73f3594bf3379a7d79fa57164b591d012d4b71d6ac4c",
- "sha256:56b51ba29a18e5f5810224bcf00747ad931c0716e3c09a76b4a1edd3d4aba71f",
- "sha256:580182fa5b269c2981e9ce9764367cb4edc81982ce289208d4607c203f44ffde",
- "sha256:5e99d6510c8557510c220b865d966b105464740dcbebf9b79ecd4fbab30a13d9",
- "sha256:5eb05b654a41e0f81ab27a7c3e88b6590425eb3e934e1d533ecec5dc88a6ffff",
- "sha256:62b292fff4739c6be89e6a0240c02bda5a9066a339d90ab191cf66e9fdbdc193",
- "sha256:6a5122b17a4faf5d7a6d91fa67b479736c0cacc7afe791ddebb7163a8550b799",
- "sha256:6a8ff8e809da81363bffca2b965cb6e4bf6056b495fc3f078467d1f8266fe27f",
- "sha256:6c43e1b89099279cc03eb1c725c5de12af6edcd2f78e2f8a022569efa639ada3",
- "sha256:709dc11af2f74ba89c68b1592368c6edcbccdb0a06ba77eb28c8fe08bb6997da",
- "sha256:7e072f5da38d6428ba1fc1115d3cc0dae895df671cb04c70c019985e8c7606be",
- "sha256:813a65f95bfcb7c8f2a70dd6add9b51e9accc3bdb3e03d0ff7a9e6a2d3e174bf",
- "sha256:86c01299942b0f4b5b5f28c8701689181ad2eab852e65417172dbdd6c5b3ccc8",
- "sha256:893e38d0f4319dfa70c0f36381a37cc418985c87b11d9784365b1fff4fa6973b",
- "sha256:8a5f574b92b3ee7d254e56d56e37ec0e1416acb1ae357c4956d76a1788dc58fb",
- "sha256:8b9650f92251fdef843e74fc252cdfd6e3c700157ad686eeb0c6d7fdb2d11652",
- "sha256:8ec464f20fe803ae00419bd1610934e3bda963aeba1e6181dfc9033dc7e8940c",
- "sha256:8f333bfe782a2d05a67cfaa0cc9cd68b36b39ee6acfe099f980541ed973a7093",
- "sha256:8ffdeb7dbd0160d4e391e1f857477e4762d00aa2199c294eb95dfb9451aa1d9f",
- "sha256:911e600e798374c0d86235e7ef19109cf865d1336942d398ff313375a25a93ba",
- "sha256:9235be95662559141934fced8197de6fee8c58870f36756b0584424b6d708393",
- "sha256:938518a11780b39998179d07f31a4a468888123f9b00463842cd40f98191f4d3",
- "sha256:93c18a1696a8e0388ed84b024fe1a188a26ba999b61d1d9a371318cb89885a8c",
- "sha256:97532802f14d383f37d603a56e226909f825a83ff298dc1b6697de00d2243999",
- "sha256:98ee201a52a7f65608e5494518932e1473fd43535f12cade0a1b4ab32737fe28",
- "sha256:9d2ae79f31da5143e020a8d4fc74e1f0cbcb8011bdf97453c140aa616db51406",
- "sha256:9d38494a8d21c246c535b41ecdb2d562c4b933cf3d68de03e8bc43a0d41be652",
- "sha256:9d41ebb471a6f064c0d1c873c4f7dded733d16ca5db7d551fb04ff3805d87802",
- "sha256:9e09d017e3f4d9bd7d17a30d3f59e4d6d9ba2d2ced280eec2425e84112cf623f",
- "sha256:a6945c2d61c42bb7e818677f43638675b8c1c43e858b67a96df3eb2426a86c9d",
- "sha256:a72e00826a2b032dda3eb25aa3e3579c6d6773d22d8446089a57a123481cc46c",
- "sha256:aa1e626c524d2c7972c0f3a8a575d654a3a9c008370dc2a97e46abd0eaa749b9",
- "sha256:ab095edf1d840a6a6a4307e1a5b907a299a94e7b90e75436ee770b8c35d22a25",
- "sha256:ac2ac84a4950d627d84b61f082eba61314373cfab4b3c264b62efab02ababe83",
- "sha256:ac7187bee72384b9cfedf09a29a3b2b6e8815cc64c095cdc8b5e6aec81e9fd5f",
- "sha256:ae9d83a81b09ce3a817e2cbb23aabc07f86a3abc664c613cd283ce7a03541e95",
- "sha256:afeabb382c1256a7477b739820bce7fe782bb807d82927102cee73e79b41b38b",
- "sha256:b2a4cd924d0e2f4b1a68034abe4cadc73d69ad5f4cf02db6481c0d4d749f548f",
- "sha256:b414ef79f1f06fb90b5165db8aef77512c1a5e3ed1b4807da8476b7e2c853283",
- "sha256:b4ecbba7efd82bd2a4bb88aab7f984eb5470991c1347bdd1f35fb34ea28dba6e",
- "sha256:b61d5096e75fd71018b25da50b82dd70ec39b5e15bb2134daf7eb7bbbc103644",
- "sha256:b629db53fe17e6ce478a969d30bd1d0e8b53238c46e3a9c9db39e8b65a9ef973",
- "sha256:b70b45a40ad0798b69748b34d508259ef2bdc84fb2aad4048bc7c9cafb68ddb3",
- "sha256:b88c3ab98556bc351b36d6208a6089de8c8db14a7f6e1f57f82a334bd2c18f0b",
- "sha256:baf744e5f9d5ee6531deea443be78b36ed1cd36c65a0b95ea4e8d69fa0102268",
- "sha256:bbc7421cbd28b4316d1d017db338039a7943f945c6f2bb15e1439b14b5682d28",
- "sha256:c31272c674f725dfe0f343d73b0abe8c878c646967ec1c6106122faae1efc15b",
- "sha256:c51a899792ee2c696072791e56b2020caff58b275abecbc9ae0cb71af0645c95",
- "sha256:c61e42b4ceb9759727045765e87d51c1bb9f89987aca1fcc8a040232138cad1c",
- "sha256:c7cd0841a586b7105513a7c8c3d5c276f3adc762a072d81ef7fae80632afad1e",
- "sha256:c827a931c6b57f50f1bb5de400dcfb00bad8117e3753e80b96adb72d9d811514",
- "sha256:d2aa3ca9552f83b0b4fa6ca8c6ce08da6580f37e3e0ab7afac73a1cfdc230c0e",
- "sha256:d46ee458452727a147d7897bb33886981ae1235775e05decae5d5d07f537695a",
- "sha256:d64a657de7aae8db2da60dc0c9e4638a0c3893b4d60101fd564a3362b2bfeb34",
- "sha256:d800a8e2ac62db1b9ea5d6d1724f1a93c53907ca061de4d05ed94e8dfa79050c",
- "sha256:d9d7ebcd11ea76ba0feaae98485cd8e31467c3d7985210fab46983278214736b",
- "sha256:dd7d3608589072f63078b4063a6c536af832e76b0b3885f1bfe9e892abe6c207",
- "sha256:ec19e823b4ccd87bd69e990879acbce9e961fc7aebe150156b8f4418d4b27b7f",
- "sha256:ee40206d1d6e95eaa2b7b919195e3689a5cf6ded730632de7f187f35a1b6052c",
- "sha256:f138f550b83554f5b344d6be35d3ed59348510edc3cb96f75309db6e9bfe8210",
- "sha256:f3e6e2e502c4043c52a99316d89dc49f416acda5b0c6886e0dd8ea7bb35859e8",
- "sha256:fb10bb720348fe1647a94eb605accb9ef6a9b1875d8845f9e763d9d71a706387",
- "sha256:fc066395e6332da1e7525d605b4c96055669f8336600bef8ac569d5226a7c76f",
- "sha256:fc33267d58dfbb2361baed52668c5d8c15d24bc0372cecbb79fed77339b55e0d"
- ],
- "markers": "python_version >= '3.8'",
- "version": "==0.15.2"
+ "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f",
+ "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c",
+ "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76",
+ "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e",
+ "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157",
+ "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f",
+ "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5",
+ "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05",
+ "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24",
+ "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1",
+ "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8",
+ "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b",
+ "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb",
+ "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07",
+ "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1",
+ "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6",
+ "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e",
+ "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e",
+ "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1",
+ "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab",
+ "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4",
+ "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17",
+ "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594",
+ "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d",
+ "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d",
+ "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3",
+ "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c",
+ "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66",
+ "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f",
+ "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80",
+ "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33",
+ "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f",
+ "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c",
+ "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022",
+ "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e",
+ "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f",
+ "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da",
+ "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1",
+ "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688",
+ "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795",
+ "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c",
+ "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98",
+ "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1",
+ "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20",
+ "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307",
+ "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4",
+ "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18",
+ "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294",
+ "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66",
+ "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467",
+ "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948",
+ "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e",
+ "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1",
+ "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0",
+ "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7",
+ "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd",
+ "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641",
+ "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d",
+ "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9",
+ "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1",
+ "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da",
+ "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3",
+ "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa",
+ "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7",
+ "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40",
+ "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496",
+ "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124",
+ "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836",
+ "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434",
+ "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984",
+ "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f",
+ "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6",
+ "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e",
+ "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461",
+ "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c",
+ "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432",
+ "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73",
+ "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58",
+ "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88",
+ "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337",
+ "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7",
+ "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863",
+ "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475",
+ "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3",
+ "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51",
+ "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf",
+ "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024",
+ "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40",
+ "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9",
+ "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec",
+ "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb",
+ "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7",
+ "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861",
+ "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880",
+ "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f",
+ "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd",
+ "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca",
+ "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58",
+ "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==0.18.0"
},
"ruff": {
"hashes": [
- "sha256:05ffe9dbd278965271252704eddb97b4384bf58b971054d517decfbf8c523f05",
- "sha256:5daaeaf00ae3c1efec9742ff294b06c3a2a9db8d3db51ee4851c12ad385cda30",
- "sha256:7d076717c67b34c162da7c1a5bda16ffc205e0e0072c03745275e7eab888719f",
- "sha256:7de792582f6e490ae6aef36a58d85df9f7a0cfd1b0d4fe6b4fb51803a3ac96fa",
- "sha256:a05b0ddd7ea25495e4115a43125e8a7ebed0aa043c3d432de7e7d6e8e8cd6448",
- "sha256:aa8ee4f8440023b0a6c3707f76cadce8657553655dcbb5fc9b2f9bb9bee389f6",
- "sha256:b6a21ab023124eafb7cef6d038f835cb1155cd5ea798edd8d9eb2f8b84be07d9",
- "sha256:bd8ee69b02e7bdefe1e5da2d5b6eaaddcf4f90859f00281b2333c0e3a0cc9cd6",
- "sha256:c8e3255afd186c142eef4ec400d7826134f028a85da2146102a1172ecc7c3696",
- "sha256:ce697c463458555027dfb194cb96d26608abab920fa85213deb5edf26e026664",
- "sha256:db6cedd9ffed55548ab313ad718bc34582d394e27a7875b4b952c2d29c001b26",
- "sha256:e49fbdfe257fa41e5c9e13c79b9e79a23a79bd0e40b9314bc53840f520c2c0b3",
- "sha256:e6f08ca730f4dc1b76b473bdf30b1b37d42da379202a059eae54ec7fc1fbcfed",
- "sha256:f35960b02df6b827c1b903091bb14f4b003f6cf102705efc4ce78132a0aa5af3",
- "sha256:f41f692f1691ad87f51708b823af4bb2c5c87c9248ddd3191c8f088e66ce590a",
- "sha256:f7ee467677467526cfe135eab86a40a0e8db43117936ac4f9b469ce9cdb3fb62",
- "sha256:ff78a7583020da124dd0deb835ece1d87bb91762d40c514ee9b67a087940528b"
+ "sha256:0926cefb57fc5fced629603fbd1a23d458b25418681d96823992ba975f050c2b",
+ "sha256:1c859f294f8633889e7d77de228b203eb0e9a03071b72b5989d89a0cf98ee262",
+ "sha256:2c6e37f2e3cd74496a74af9a4fa67b547ab3ca137688c484749189bf3a686ceb",
+ "sha256:2d9ef6231e3fbdc0b8c72404a1a0c46fd0dcea84efca83beb4681c318ea6a953",
+ "sha256:6e68d248ed688b9d69fd4d18737edcbb79c98b251bba5a2b031ce2470224bdf9",
+ "sha256:9485f54a7189e6f7433e0058cf8581bee45c31a25cd69009d2a040d1bd4bfaef",
+ "sha256:a1eaf03d87e6a7cd5e661d36d8c6e874693cb9bc3049d110bc9a97b350680c43",
+ "sha256:b34510141e393519a47f2d7b8216fec747ea1f2c81e85f076e9f2910588d4b64",
+ "sha256:b90506f3d6d1f41f43f9b7b5ff845aeefabed6d2494307bc7b178360a8805252",
+ "sha256:b92f03b4aa9fa23e1799b40f15f8b95cdc418782a567d6c43def65e1bbb7f1cf",
+ "sha256:baa27d9d72a94574d250f42b7640b3bd2edc4c58ac8ac2778a8c82374bb27984",
+ "sha256:c7d391e5936af5c9e252743d767c564670dc3889aff460d35c518ee76e4b26d7",
+ "sha256:d2921ac03ce1383e360e8a95442ffb0d757a6a7ddd9a5be68561a671e0e5807e",
+ "sha256:d592116cdbb65f8b1b7e2a2b48297eb865f6bdc20641879aa9d7b9c11d86db79",
+ "sha256:eec8d185fe193ad053eda3a6be23069e0c8ba8c5d20bc5ace6e3b9e37d246d3f",
+ "sha256:efd703a5975ac1998c2cc5e9494e13b28f31e66c616b0a76e206de2562e0843c",
+ "sha256:f1ee41580bff1a651339eb3337c20c12f4037f6110a36ae4a2d864c52e5ef954"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
- "version": "==0.1.8"
+ "version": "==0.4.1"
},
"schemathesis": {
"hashes": [
- "sha256:d02fe7aefc071a8a0a81cc76d2e18ac83bc647a3ecb6204e9acae7ef94b10576",
- "sha256:f3be922432cb89e44ef6283891c4bd79fab6975094b6bdfaf7a6932aed7e024a"
+ "sha256:15ce5e2bf8a9c3cce425022db30fd506f241272fd99daec45ba4f491e5a8ea3e",
+ "sha256:d17dfbf0f1e085b14199e831fdc5d524ded7f461e0bdc9b18446e9ad29ff6495"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==3.21.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.27.0"
},
"secretstorage": {
"hashes": [
@@ -5431,16 +5570,17 @@
"sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc",
"sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"
],
+ "index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==3.0.2"
},
"setuptools": {
"hashes": [
- "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2",
- "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"
+ "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987",
+ "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"
],
"markers": "python_version >= '3.8'",
- "version": "==69.0.2"
+ "version": "==69.5.1"
},
"shutilwhich": {
"hashes": [
@@ -5457,21 +5597,13 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
- "smmap": {
- "hashes": [
- "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62",
- "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"
- ],
- "markers": "python_version >= '3.7'",
- "version": "==5.0.1"
- },
"sniffio": {
"hashes": [
- "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101",
- "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"
+ "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2",
+ "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"
],
"markers": "python_version >= '3.7'",
- "version": "==1.3.0"
+ "version": "==1.3.1"
},
"snowballstemmer": {
"hashes": [
@@ -5497,21 +5629,21 @@
},
"sphinx": {
"hashes": [
- "sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b",
- "sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912"
+ "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3",
+ "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"
],
"index": "pypi",
- "markers": "python_version >= '3.8'",
- "version": "==6.2.1"
+ "markers": "python_version >= '3.9'",
+ "version": "==7.3.7"
},
"sphinx-autodoc-typehints": {
"hashes": [
- "sha256:5d44e2996633cdada499b6d27a496ddf9dbc95dd1f0f09f7b37940249e61f6e9",
- "sha256:ac099057e66b09e51b698058ba7dd76e57e1fe696cd91b54e121d3dad188f91d"
+ "sha256:46f1a710b3ed35904f63a77c5e68334c5ee1c2e22828b75fdcd147f1c52c199b",
+ "sha256:51bf8dc77c4fba747e32f0735002a91500747d0553cae616863848e8f5e49fe8"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==1.23.0"
+ "markers": "python_version >= '3.9'",
+ "version": "==2.1.0"
},
"sphinx-rtd-theme": {
"hashes": [
@@ -5532,27 +5664,27 @@
},
"sphinxcontrib-applehelp": {
"hashes": [
- "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d",
- "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"
+ "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619",
+ "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"
],
"markers": "python_version >= '3.9'",
- "version": "==1.0.7"
+ "version": "==1.0.8"
},
"sphinxcontrib-devhelp": {
"hashes": [
- "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212",
- "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"
+ "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f",
+ "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"
],
"markers": "python_version >= '3.9'",
- "version": "==1.0.5"
+ "version": "==1.0.6"
},
"sphinxcontrib-htmlhelp": {
"hashes": [
- "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a",
- "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"
+ "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015",
+ "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"
],
"markers": "python_version >= '3.9'",
- "version": "==2.0.4"
+ "version": "==2.0.5"
},
"sphinxcontrib-jquery": {
"hashes": [
@@ -5572,26 +5704,26 @@
},
"sphinxcontrib-plantuml": {
"hashes": [
- "sha256:8975778eed9c142a42ecdc4ecd83132dec0690c035cae788c123750ead526703"
+ "sha256:97a4f2a26af91db88770ccf8a3b2e03305bcda7ec41a7f969fc8cb27b84a3c44"
],
"index": "pypi",
- "version": "==0.27"
+ "version": "==0.29"
},
"sphinxcontrib-qthelp": {
"hashes": [
- "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d",
- "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"
+ "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6",
+ "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"
],
"markers": "python_version >= '3.9'",
- "version": "==1.0.6"
+ "version": "==1.0.7"
},
"sphinxcontrib-serializinghtml": {
"hashes": [
- "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54",
- "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"
+ "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7",
+ "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"
],
"markers": "python_version >= '3.9'",
- "version": "==1.1.9"
+ "version": "==1.1.10"
},
"sphinxcontrib-spelling": {
"hashes": [
@@ -5604,27 +5736,27 @@
},
"starlette": {
"hashes": [
- "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75",
- "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"
+ "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee",
+ "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"
],
- "markers": "python_version >= '3.7'",
- "version": "==0.27.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.37.2"
},
"starlette-testclient": {
"hashes": [
- "sha256:3fb6681d1dc7e9ab6dc05b5ab455b822a03f37e7371316a828e2d8380a198a4a",
- "sha256:dfbcceba46302d58bec086645c789032707a3bb0256d4cf0de66d40c13ded20e"
+ "sha256:31c28c10abd240beb327ef1ee4dc395403c87da07d4665126b7d3c7b60444e04",
+ "sha256:84306a5ca443f81b2d5e838071ed175e9f1ece4ff0806b94778f5122b1c57ee6"
],
"markers": "python_version >= '3.7'",
- "version": "==0.2.0"
+ "version": "==0.3.0"
},
"stevedore": {
"hashes": [
- "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d",
- "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"
+ "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9",
+ "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"
],
"markers": "python_version >= '3.8'",
- "version": "==5.1.0"
+ "version": "==5.2.0"
},
"telnetlib3": {
"hashes": [
@@ -5635,6 +5767,69 @@
"markers": "python_version >= '3.7'",
"version": "==2.0.4"
},
+ "time-machine": {
+ "hashes": [
+ "sha256:0312b47f220e46f1bbfaded7fc1469882d9c2a27c6daf44e119aea7006b595cc",
+ "sha256:06e913d570d7ee3e199e3316f10f10c8046287049141b0a101197712b4eac106",
+ "sha256:0a39dba3033d9c28347d2db16bcb16041bbf4e9032e2b70023686b6f95deac9d",
+ "sha256:0e120f95c17bf8e0c097fd8863a8eb24054f9b17d9b17c465694be50f8348a3a",
+ "sha256:107caed387438d689180b692e8d84aa1ebe8918790df83dc5e2146e60e5e0859",
+ "sha256:15cf3623a4ba2bb4fce4529295570acd5f6c6b44bcbfd1b8d0756ce56c38fe82",
+ "sha256:19db257117739b2dda1d57e149bb715a593313899b3902a7e6d752c5f1d22542",
+ "sha256:27f735cba4c6352ad7bc53ce2d86b715379261a634e690b79fac329081e26fb6",
+ "sha256:2c774f4b603a36ca2611327c57aa8ce0d5042298da008238ee5234b31ce7b22c",
+ "sha256:30a4a18357fa6cf089eeefcb37e9549b42523aebb5933894770a8919e6c398e1",
+ "sha256:31e6e9bff89b7c6e4cbc169ba1d00d6c107b3abc43173b2799352b6995cf7cb2",
+ "sha256:364353858708628655bf9fa4c2825febd679c729d9e1dd424ff86845828bac05",
+ "sha256:36aa4f17adcd73a6064bf4991a29126cac93521f0690805edb91db837c4e1453",
+ "sha256:39de6d37a14ff8882d4f1cbd50c53268b54e1cf4ef9be2bfe590d10a51ccd314",
+ "sha256:39fceeb131e6c07b386de042ce1016be771576e9516124b78e75cbab94ae5041",
+ "sha256:3b94274abe24b6a90d8a5c042167a9a7af2d3438b42ac8eb5ede50fbc73c08db",
+ "sha256:416d94eab7723c7d8a37fe6b3b1882046fdbf3c31b9abec3cac87cf35dbb8230",
+ "sha256:442d42f1b0ef006f03a5a34905829a1d3ac569a5bcda64d29706e6dc60832f94",
+ "sha256:4f00f67d532da82538c4dfbbddc587e70c82664f168c11e1c2915d0c85ec2fc8",
+ "sha256:528d588d1e8ba83e45319a74acab4be0569eb141113fdf50368045d0a7d79cee",
+ "sha256:57dc7efc1dde4331902d1bdefd34e8ee890a5c28533157e3b14a429c86b39533",
+ "sha256:59a02c3d3b3b29e2dc3a708e775c5d6b951b0024c4013fed883f0d2205305c9e",
+ "sha256:5e19b19d20bfbff8c97949e06e150998cf9d0a676e1641fb90597e59a9d7d5e2",
+ "sha256:5f3d5c21884aee10e13b00ef45fab893a43db9d59ec27271573528bd359b0ef5",
+ "sha256:6706eb06487354a5e219cacea709fb3ec44dec3842c6218237d5069fa5f1ad64",
+ "sha256:6ced9de5eff1fb37efb12984ab7b63f31f0aeadeedec4be6d0404ec4fa91f2e7",
+ "sha256:7161cea2ff3244cc6075e365fab89000df70ead63a3da9d473983d580558d2de",
+ "sha256:72a153b085b4aee652d6b3bf9019ca897f1597ba9869b640b06f28736b267182",
+ "sha256:7fd7d188b4f9d358c6bd477daf93b460d9b244a4c296ddd065945f2b6193c2bd",
+ "sha256:87e80408e6b6670e9ce33f94b1cc6b72b1a9b646f5e19f586908129871f74b40",
+ "sha256:90725f936ad8b123149bc82a46394dd7057e63157ee11ba878164053fa5bd8ad",
+ "sha256:993ab140eb5678d1ee7f1197f08e4499dc8ea883ad6b8858737de70d509ec5b5",
+ "sha256:99e6f013e67c4f74a9d8f57e34173b2047f2ad48f764e44c38f3ee5344a38c01",
+ "sha256:a75e24e59f58059bbbc50e7f97aa6d126bbc2f603a8a5cd1e884beffcf130d8f",
+ "sha256:a927d87501da8b053a27e80f5d0e1e58fbde4b50d70df2d3853ed67e89a731cf",
+ "sha256:adfbfa796dd96383400b44681eacc5ab06d3cbfad39c30878e5ead0bfdca808a",
+ "sha256:b0f8ba70fbb71d7fbc6d6adb90bed72a83db15b3318c7af0060467539b2f1b63",
+ "sha256:b951b6f4b8a752ab8c441df422e21954a721a0a5276aa3814ce8cf7205aeb6da",
+ "sha256:bb3a2518c52aa944989b541e5297b833388eb3fe72d91eb875b21fe771597b04",
+ "sha256:be215eb63d74a3d580f7924bb4209c783fabcfb3253073f4dcb3424d57d0f518",
+ "sha256:c69c0cb498c86ef843cd15964714e76465cc25d64464da57d5d1318f499de099",
+ "sha256:c77a616561dd4c7c442e9eee8cbb915750496e9a5a7fca6bcb11a9860226d2d0",
+ "sha256:cab4abf4d1490a7da35db5a321ff8a4d4a2195f4832a792c75b626ffc4a5584c",
+ "sha256:d45bd60bea85869615b117667f10a821e3b0d3603c47bfd105b45d1f67156fc8",
+ "sha256:d63ef00d389fa6d2c76c863af580b3e4a8f0ccc6a9aea8e64590588e37f13c00",
+ "sha256:dc48d3934109b0bdbbdc5e9ce577213f7148a92fed378420ee13453503fe4db9",
+ "sha256:dd26039a9ffea2d5ee1309f2ec9b656d4925371c65563822d52e4037a4186eca",
+ "sha256:ddbbba954e9a409e7d66d60df2b6b8daeb897f8338f909a92d9d20e431ec70d1",
+ "sha256:e030d2051bb515251d7f6edd9bbcf79b2b47811e2c402aba9c126af713843d26",
+ "sha256:e7fa70a6bdca40cc4a8386fd85bc1bae0a23ab11e49604ef853ab3ce92be127f",
+ "sha256:edea570f3835a036e8860bb8d6eb8d08473c59313db86e36e3b207f796fd7b14",
+ "sha256:ee68597bd3fa5ab94633c8a9d3ebd4032091559610e078381818a732910002bc",
+ "sha256:f5d371a5218318121a6b44c21438258b6408b8bfe7ccccb754cf8eb880505576",
+ "sha256:fb467d6c9e9ab615c8cf22d751d34296dacf801be323a57adeb4ff345cf72473",
+ "sha256:fd8645b820f7895fdafbc4412d1ce376956e36ad4fd05a43269aa06c3132afc3",
+ "sha256:fe508a6c43fb72fa4f66b50b14684cf58d3db95fed617177ec197a7a90427bae"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.8'",
+ "version": "==2.14.1"
+ },
"toml": {
"hashes": [
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
@@ -5661,147 +5856,169 @@
},
"tomlkit": {
"hashes": [
- "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4",
- "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"
+ "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b",
+ "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"
],
"markers": "python_version >= '3.7'",
- "version": "==0.12.3"
+ "version": "==0.12.4"
+ },
+ "trickkiste": {
+ "hashes": [
+ "sha256:2014109041c73c4bad9bfe25c84960ce66dc4b6cc6008e4098000ddca0c6d521",
+ "sha256:8e85e647d1e70ec7633803be7899c56404e98882554ed326e0857520d3b8f7a7"
+ ],
+ "markers": "python_version < '4' and python_full_version >= '3.10.4'",
+ "version": "==0.0.9"
},
"twine": {
"hashes": [
- "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8",
- "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"
+ "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4",
+ "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.0.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==5.0.0"
},
"types-awscrt": {
"hashes": [
- "sha256:99778c952e1eae10cc7a53468413001177026c9434345bf00120bb2ea5b79109",
- "sha256:e872b65d041687ec7fb49fb4dcb871ff10ade5efeca02722e037a03bff81db7e"
+ "sha256:3ae374b553e7228ba41a528cf42bd0b2ad7303d806c73eff4aaaac1515e3ea4e",
+ "sha256:64898a2f4a2468f66233cb8c29c5f66de907cf80ba1ef5bb1359aef2f81bb521"
],
"markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==0.20.0"
+ "version": "==0.20.9"
},
"types-beautifulsoup4": {
"hashes": [
- "sha256:59980028d29bf55d0db359efa305b75bacf0cb92e3f3f6b3fd408f2531df274c",
- "sha256:8b03b054cb2e62abf82bbbeda57a07257026f4ed9010ef17d8f8eff43bb1f9b7"
+ "sha256:000cdddb8aee4effb45a04be95654de8629fb8594a4f2f1231cff81108977324",
+ "sha256:e37e4cfa11b03b01775732e56d2c010cb24ee107786277bae6bc0fa3e305b686"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.12.0.7"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.12.0.20240229"
+ },
+ "types-cffi": {
+ "hashes": [
+ "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0",
+ "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"
+ ],
+ "markers": "python_version >= '3.8'",
+ "version": "==1.16.0.20240331"
},
"types-docutils": {
"hashes": [
- "sha256:4928e790f42b99d5833990f99c8dd9fa9f16825f6ed30380ca981846d36870cd",
- "sha256:a930150d8e01a9170f9bca489f46808ddebccdd8bc1e47c07968a77e49fb9321"
+ "sha256:7716ec6c68b5179b7ba1738cace2f1326e64df9f44b7ab08d9904d32c23fc15f",
+ "sha256:7f6e84ba8fcd2454c5b8bb8d77384d091a901929cc2b31079316e10eb346580a"
],
"index": "pypi",
- "version": "==0.20.0.3"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.21.0.20240423"
},
"types-html5lib": {
"hashes": [
- "sha256:16fe936d99b9f7fc210e2e21a2aed1b6bbbc554ad8242a6ef75f6f2bddb27e58",
- "sha256:80e1a2062d22a3affe5c28d97da30bffbf3a076d393c80fc6f1671216c1bd492"
+ "sha256:22736b7299e605ec4ba539d48691e905fd0c61c3ea610acc59922232dc84cede",
+ "sha256:af5de0125cb0fe5667543b158db83849b22e25c0e36c9149836b095548bf1020"
],
- "version": "==1.1.11.15"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.1.11.20240228"
},
"types-jmespath": {
"hashes": [
- "sha256:d1d3ff9eff8a4abbb4f78e35a02c9efa5743b042d66d21414227c1a1978d71c8",
- "sha256:dfa091f4f4e54f2e74e5667c6ff1d300a24399b43707a3c44254438aa76f3729"
+ "sha256:b4a65a116bfc1c700a4fd9d24e2e397f4a431122e0320a77b7f1989a6b5d819e",
+ "sha256:c3e715fcaae9e5f8d74e14328fdedc4f2b3f0e18df17f3e457ae0a18e245bde0"
],
"index": "pypi",
- "version": "==1.0.2.7"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.0.2.20240106"
},
"types-jsonschema": {
"hashes": [
- "sha256:0de1032d243f1d3dba8b745ad84efe8c1af71665a9deb1827636ac535dcb79c1",
- "sha256:e6d5df18aaca4412f0aae246a294761a92040e93d7bc840f002b7329a8b72d26"
+ "sha256:3a5ed0a72ab7bc304ca4accbb709272c620f396abf2fb19570b80d949e357eb6",
+ "sha256:78dec1d88c5aec77e46e6bddce2a082157ce3059ec7aab19169b13b2ee553a51"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==4.20.0.0"
+ "version": "==4.21.0.20240331"
},
"types-lxml": {
"hashes": [
- "sha256:545097ca5f69d568827416d671285cae203a5b3d99937294e155c4cc0e46e712",
- "sha256:daf1458b7d9b2fb421354137b97c6029c4fe75cef2cbab0d8e144d8c548d9b98"
+ "sha256:7e5f836067cde4fddce3cdbf2bac7192c764bf5ee6d3eb86c732ad1b84f265c5",
+ "sha256:dd8105b579925af1b6ae77469f4fc835be3872b15e86cb46ad4fcc33b20c781d"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==2023.10.21"
+ "version": "==2024.4.14"
},
"types-mypy-extensions": {
"hashes": [
- "sha256:1b2f28994587a8a6656843c013b6834c1408b47fd5751bf1c4d2122884cbcfe4",
- "sha256:ff0b5da4ea9fb2fcdc73aeed68edd232e35eef720ae84457875427633d36f37f"
+ "sha256:5bd57d5c6bf07b5a8db3c1b70df52c1570cab34a69a631e31d755c003d1a9da7",
+ "sha256:a482ec81320658aaf14600f7dd8e6e3bc76394cdae1a8f403c5fa36300068248"
],
"index": "pypi",
- "version": "==1.0.0.5"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.0.0.20240311"
},
"types-oauthlib": {
"hashes": [
- "sha256:132476c79c2fbd691ce70dd06782c5ec852b71df74272101da0602eb20c99078",
- "sha256:e771f8ef199d1e40030f5fd680f9715bbbda68e87e155aabdeff7ed251802d24"
+ "sha256:2b58e68b549f37ea1fe725cd7e78661a58679c16d2e83d53f1e531588ad72223",
+ "sha256:aa8b50a6737a0c75ea2a19e4598cfabd767ca5fba8d5310dc736afaa179db14d"
],
"index": "pypi",
- "version": "==3.2.0.10"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.2.0.20240217"
},
"types-paho-mqtt": {
"hashes": [
- "sha256:50313d93f63d777da391acaac0278d346cf9e4a2576d814989d6500bd0ca4a35",
- "sha256:fe34c68abc849cd96e1482138bbdf5f465de59629dd367cb3a2423dd9ca3220b"
+ "sha256:694eec160340f2a2b151237dcc3f107a63e1c4e5b8f9fcda0ba392049af9cbec",
+ "sha256:cd275c14f39363c2a0f8286ead9a46962e5421ebd477547b892ae016699f5a4a"
],
"index": "pypi",
- "version": "==1.6.0.7"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.6.0.20240321"
},
"types-paramiko": {
"hashes": [
- "sha256:18fe96e6ef78ca04b2ac2a111f9404409b31a1d28bb2f5dca1a1afd62b8351a9",
- "sha256:4615fa0bc5b78c0f1b68b106071dc29737cb2cf53903712df72785dad5b359c3"
+ "sha256:aaa98dda232c47886563d66743d3a8b66c432790c596bc3bdd3f17f91be2a8c1",
+ "sha256:c56e0d43399a1b909901b1e0375e0ff6ee62e16cd6e00695024abc2e9fe02035"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==3.3.0.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.4.0.20240423"
},
"types-pillow": {
"hashes": [
- "sha256:131078ffa547bf9a201d39ffcdc65633e108148085f4f1b07d4647fcfec6e923",
- "sha256:525c1c5ee67b0ac1721c40d2bc618226ef2123c347e527e14e05b920721a13b9"
+ "sha256:696e68b9b6a58548fc307a8669830469237c5b11809ddf978ac77fafa79251cd",
+ "sha256:bd12923093b96c91d523efcdb66967a307f1a843bcfaf2d5a529146c10a9ced3"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==10.1.0.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==10.2.0.20240423"
},
"types-protobuf": {
"hashes": [
- "sha256:131ab7d0cbc9e444bc89c994141327dcce7bcaeded72b1acb72a94827eb9c7af",
- "sha256:57ab42cb171dfdba2c74bb5b50c250478538cc3c5ed95b8b368929ad0c9f90a5"
+ "sha256:e4dc2554d342501d5aebc3c71203868b51118340e105fc190e3a64ca1be43831",
+ "sha256:e6074178109f97efe9f0b20a035ba61d7c3b03e867eb47d254d2b2ab6a805e36"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.24.0.4"
+ "markers": "python_version >= '3.8'",
+ "version": "==5.26.0.20240422"
},
"types-psutil": {
"hashes": [
- "sha256:2161d166256084acf629d30aaf6bda8bee726ae1fea530559650281056b491fc",
- "sha256:f7d8769812d72a4b513d7ec9eb5580fe2f6013fc270394a603cb6534811f3e4d"
+ "sha256:1b976cf86308316c5ac22cec688015b04273c84f8e691c3dfb0c12318f32a6f3",
+ "sha256:93a5f61c05d36804ffaf008e4f4f56661e5ab2beb432645ce5789248f64b5d6f"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==5.9.5.17"
+ "markers": "python_version >= '3.8'",
+ "version": "==5.9.5.20240423"
},
"types-pyasn1": {
"hashes": [
- "sha256:023e903f5920ec9585555235f95bb2d2756b7b58023d3f94890ee8d1d4d9d1ff",
- "sha256:1bbbe3fcf16a65064e4a5bd7f1be43c375ba241054f8f361b5e6c61c8deb3935"
+ "sha256:5d54dcb33f69dd269071ca098e923ac20c5f03c814631fa7f3ed9ee035a5da3a",
+ "sha256:848d01e7313c200acc035a8b3d377fe7b2aecbe77f2be49eb160a7f82835aaaf"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==0.5.0.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==0.6.0.20240402"
},
"types-pymssql": {
"hashes": [
@@ -5821,12 +6038,12 @@
},
"types-pyopenssl": {
"hashes": [
- "sha256:00171433653265843b7469ddb9f3c86d698668064cc33ef10537822156130ebf",
- "sha256:5ffb077fe70b699c88d5caab999ae80e192fe28bf6cda7989b7e79b1e4e2dcd3"
+ "sha256:38e75fb828d2717be173770bbae8c22811fdec68e2bc3f5833954113eb84237d",
+ "sha256:4ce41ddaf383815168b6e21d542fd92135f10a5e82adb3e593a6b79638b0b511"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==23.3.0.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==24.0.0.20240417"
},
"types-pysaml2": {
"hashes": [
@@ -5839,79 +6056,75 @@
},
"types-python-dateutil": {
"hashes": [
- "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b",
- "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"
+ "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202",
+ "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"
],
"index": "pypi",
- "version": "==2.8.19.14"
- },
- "types-pytz": {
- "hashes": [
- "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf",
- "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"
- ],
- "index": "pypi",
- "version": "==2023.3.1.1"
+ "markers": "python_version >= '3.8'",
+ "version": "==2.9.0.20240316"
},
"types-pyyaml": {
"hashes": [
- "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062",
- "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"
+ "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342",
+ "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"
],
"index": "pypi",
- "version": "==6.0.12.12"
+ "markers": "python_version >= '3.8'",
+ "version": "==6.0.12.20240311"
},
"types-redis": {
"hashes": [
- "sha256:94fc61118601fb4f79206b33b9f4344acff7ca1d7bba67834987fb0efcf6a770",
- "sha256:c8cfc84635183deca2db4a528966c5566445fd3713983f0034fb0f5a09e0890d"
+ "sha256:963219f01e1a132bd05e6e7b328c432fb099ea885895900bfc6e2a44a449256c",
+ "sha256:f5e372d9db3c2109f579ff9de5073cb2f239689a2dfd68ef2ca8d244172b19a9"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==4.6.0.11"
+ "markers": "python_version >= '3.8'",
+ "version": "==4.6.0.20240423"
},
"types-requests": {
"hashes": [
- "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9",
- "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"
+ "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1",
+ "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==2.31.0.6"
+ "markers": "python_version >= '3.8'",
+ "version": "==2.31.0.20240406"
},
"types-s3transfer": {
"hashes": [
- "sha256:0f78c95c2ee390faad71735df35b6b81fca5bce4b864ac6a7707da2a845a5e86",
- "sha256:241e8b7b209c4064a451897bace1525ba64098a3ae955bdd0fb4b970cc69db73"
+ "sha256:02154cce46528287ad76ad1a0153840e0492239a0887e8833466eccf84b98da0",
+ "sha256:49a7c81fa609ac1532f8de3756e64b58afcecad8767933310228002ec7adff74"
],
"index": "pypi",
- "markers": "python_version >= '3.7' and python_version < '4.0'",
- "version": "==0.9.0"
+ "markers": "python_version >= '3.8' and python_version < '4.0'",
+ "version": "==0.10.1"
},
"types-setuptools": {
"hashes": [
- "sha256:8c86195bae2ad81e6dea900a570fe9d64a59dbce2b11cc63c046b03246ea77bf",
- "sha256:b0a06219f628c6527b2f8ce770a4f47550e00d3e8c3ad83e2dc31bc6e6eda95d"
+ "sha256:a4381e041510755a6c9210e26ad55b1629bc10237aeb9cb8b6bd24996b73db48",
+ "sha256:a7ba908f1746c4337d13f027fa0f4a5bcad6d1d92048219ba792b3295c58586d"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==69.0.0.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==69.5.0.20240423"
},
"types-simplejson": {
"hashes": [
- "sha256:8ba093dc7884f59b3e62aed217144085e675a269debc32678fd80e0b43b2b86f",
- "sha256:ebc81f886f89d99d6b80c726518aa2228bc77c26438f18fd81455e4f79f8ee1b"
+ "sha256:2831366f70d5d55832c3d978dff7c989575d55fb38621d086dbfee7a30c2b500",
+ "sha256:6f06d8e50112bc931863a40e2cf463a99223d55fe07f275826fa969962bb56a4"
],
"index": "pypi",
- "version": "==3.19.0.2"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.19.0.20240310"
},
"types-six": {
"hashes": [
- "sha256:1591a09430a3035326da5fdb71692d0b3cc36b25a440cc5929ca6241f3984705",
- "sha256:746e6c25b8c48b3c8ab9efe7f68022839111de423d35ba4b206b88b12d75f233"
+ "sha256:abc0377990d38e9b37b3333dd115ec960ca9788d78f3d9c7eb3f778cfc6c925c",
+ "sha256:b5a117193ba0dc7a66507925e95e140b2af52731402cdd71ef9f2a4348e01f60"
],
"index": "pypi",
- "version": "==1.16.21.9"
+ "markers": "python_version >= '3.8'",
+ "version": "==1.16.21.20240311"
},
"types-urllib3": {
"hashes": [
@@ -5931,14 +6144,14 @@
},
"typing-extensions": {
"hashes": [
- "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783",
- "sha256:56a8f7a8776ea160e59ef0af6fc3a3a03b7d42156b90e47f0241515fcec620c2",
- "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd",
- "sha256:cc22327e22d9b583d1565ce1ed9f5ecc22831afa743f8789a403cad849fb702b"
+ "sha256:6f1117ac0cbe64536f34520c4688cd144794f9b1d79690bfe0389aa12a347976",
+ "sha256:7427ef26efa5e4e465e3765af0e52d3897e3684c908efe20e3331e1ce51884b3",
+ "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0",
+ "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
- "version": "==4.9.0"
+ "version": "==4.11.0"
},
"ujson": {
"hashes": [
@@ -6013,12 +6226,12 @@
},
"urllib3": {
"hashes": [
- "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07",
- "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"
+ "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84",
+ "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"
],
"index": "pypi",
- "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
- "version": "==1.26.18"
+ "markers": "python_version >= '3.7'",
+ "version": "==2.0.7"
},
"uvicorn": {
"hashes": [
@@ -6030,19 +6243,19 @@
},
"virtualenv": {
"hashes": [
- "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3",
- "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"
+ "sha256:0846377ea76e818daaa3e00a4365c018bc3ac9760cbb3544de542885aad61fb3",
+ "sha256:ec25a9671a5102c8d2657f62792a27b48f016664c6873f6beed3800008577210"
],
"markers": "python_version >= '3.7'",
- "version": "==20.25.0"
+ "version": "==20.26.0"
},
"waitress": {
"hashes": [
- "sha256:7500c9625927c8ec60f54377d590f67b30c8e70ef4b8894214ac6e4cad233d2a",
- "sha256:780a4082c5fbc0fde6a2fcfe5e26e6efc1e8f425730863c04085769781f51eba"
+ "sha256:005da479b04134cdd9dd602d1ee7c49d79de0537610d653674cc6cbde222b8a1",
+ "sha256:2a06f242f4ba0cc563444ca3d1998959447477363a2d7e9b8b4d75d35cfd1669"
],
- "markers": "python_full_version >= '3.7.0'",
- "version": "==2.1.2"
+ "markers": "python_full_version >= '3.8.0'",
+ "version": "==3.0.0"
},
"webob": {
"hashes": [
@@ -6063,102 +6276,118 @@
},
"werkzeug": {
"hashes": [
- "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe",
- "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612"
+ "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795",
+ "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==2.2.3"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.0.2"
},
"yarl": {
"hashes": [
- "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87",
- "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89",
- "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a",
- "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08",
- "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996",
- "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077",
- "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901",
- "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e",
- "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee",
- "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574",
- "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165",
- "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634",
- "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229",
- "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b",
- "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f",
- "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7",
- "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf",
- "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89",
- "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0",
- "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1",
- "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe",
- "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf",
- "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76",
- "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951",
- "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863",
- "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06",
- "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562",
- "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6",
- "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c",
- "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e",
- "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1",
- "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3",
- "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3",
- "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778",
- "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8",
- "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2",
- "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b",
- "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d",
- "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f",
- "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c",
- "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581",
- "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918",
- "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c",
- "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e",
- "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220",
- "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37",
- "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739",
- "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77",
- "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6",
- "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42",
- "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946",
- "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5",
- "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d",
- "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146",
- "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a",
- "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83",
- "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef",
- "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80",
- "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588",
- "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5",
- "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2",
- "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef",
- "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826",
- "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05",
- "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516",
- "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0",
- "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4",
- "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2",
- "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0",
- "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd",
- "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8",
- "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b",
- "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1",
- "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"
- ],
- "index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==1.8.2"
+ "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51",
+ "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce",
+ "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559",
+ "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0",
+ "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81",
+ "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc",
+ "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4",
+ "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c",
+ "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130",
+ "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136",
+ "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e",
+ "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec",
+ "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7",
+ "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1",
+ "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455",
+ "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099",
+ "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129",
+ "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10",
+ "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142",
+ "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98",
+ "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa",
+ "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7",
+ "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525",
+ "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c",
+ "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9",
+ "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c",
+ "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8",
+ "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b",
+ "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf",
+ "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23",
+ "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd",
+ "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27",
+ "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f",
+ "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece",
+ "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434",
+ "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec",
+ "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff",
+ "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78",
+ "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d",
+ "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863",
+ "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53",
+ "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31",
+ "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15",
+ "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5",
+ "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b",
+ "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57",
+ "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3",
+ "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1",
+ "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f",
+ "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad",
+ "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c",
+ "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7",
+ "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2",
+ "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b",
+ "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2",
+ "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b",
+ "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9",
+ "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be",
+ "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e",
+ "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984",
+ "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4",
+ "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074",
+ "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2",
+ "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392",
+ "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91",
+ "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541",
+ "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf",
+ "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572",
+ "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66",
+ "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575",
+ "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14",
+ "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5",
+ "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1",
+ "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e",
+ "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551",
+ "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17",
+ "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead",
+ "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0",
+ "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe",
+ "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234",
+ "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0",
+ "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7",
+ "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34",
+ "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42",
+ "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385",
+ "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78",
+ "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be",
+ "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958",
+ "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749",
+ "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"
+ ],
+ "index": "pypi",
+ "markers": "python_version >= '3.7'",
+ "version": "==1.9.4"
},
"zipp": {
"hashes": [
- "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b",
- "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"
+ "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b",
+ "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"
],
"index": "pypi",
- "markers": "python_version >= '3.7'",
- "version": "==3.15.0"
+ "markers": "python_version >= '3.8'",
+ "version": "==3.18.1"
}
}
}
diff --git a/WORKSPACE b/WORKSPACE
index 8ced5dd4294..13c56dd0106 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -3,45 +3,53 @@ workspace(name = "omd_packages")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("//:bazel_variables.bzl", "UPSTREAM_MIRROR_URL")
+RULES_FOREIGN_CC_VERSION = "0.9.0"
+
http_archive(
name = "rules_foreign_cc",
+ patch_args = ["-p1"],
+ patches = ["//omd/packages/foreign_cc:symlink.patch"],
sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51",
- strip_prefix = "rules_foreign_cc-0.9.0",
+ strip_prefix = "rules_foreign_cc-" + RULES_FOREIGN_CC_VERSION,
urls = [
- "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz",
- UPSTREAM_MIRROR_URL + "rules_foreign_cc-" + "0.9.0" + ".tar.gz",
- ]
+ "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/" + RULES_FOREIGN_CC_VERSION + ".tar.gz",
+ UPSTREAM_MIRROR_URL + "rules_foreign_cc-" + RULES_FOREIGN_CC_VERSION + ".tar.gz",
+ ],
)
load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies")
-# These toolchains are configured by defualt by rules_foreign_cc. We need to register
-# those manually because we set `register_toolchains = False` in order to load our own
-# implimentation of the shell toolchain
-register_toolchains("@rules_foreign_cc//toolchains:preinstalled_autoconf_toolchain")
-register_toolchains("@rules_foreign_cc//toolchains:preinstalled_m4_toolchain")
-register_toolchains("@rules_foreign_cc//toolchains:preinstalled_automake_toolchain")
-register_toolchains("@rules_foreign_cc//toolchains:preinstalled_pkgconfig_toolchain")
-
-# Our implimentation of the shell toolchain in order to fix symlinks and other bugs
-register_toolchains("//foreign_cc_adapted:shell_toolchain")
+rules_foreign_cc_dependencies()
-# This sets up some common toolchains for building targets. For more details, please see
-# https://bazelbuild.github.io/rules_foreign_cc/0.9.0/flatten.html#rules_foreign_cc_dependencies
-rules_foreign_cc_dependencies(
- register_toolchains = False,
-)
+RULES_PKG_VERSION = "0.9.1"
http_archive(
name = "rules_pkg",
sha256 = "8f9ee2dc10c1ae514ee599a8b42ed99fa262b757058f65ad3c384289ff70c4b8",
urls = [
- "https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.9.1/rules_pkg-0.9.1.tar.gz",
- "https://github.com/bazelbuild/rules_pkg/releases/download/0.9.1/rules_pkg-0.9.1.tar.gz",
- UPSTREAM_MIRROR_URL + "rules_pkg-" + "0.9.1" + ".tar.gz",
+ "https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/" + RULES_PKG_VERSION + "/rules_pkg-" + RULES_PKG_VERSION + ".tar.gz",
+ "https://github.com/bazelbuild/rules_pkg/releases/download/" + RULES_PKG_VERSION + "/rules_pkg-" + RULES_PKG_VERSION + ".tar.gz",
+ UPSTREAM_MIRROR_URL + "rules_pkg-" + RULES_PKG_VERSION + ".tar.gz",
],
)
+RULES_RUST_VERSION = "0.36.2"
+
+http_archive(
+ name = "rules_rust",
+ sha256 = "a761d54e49db06f863468e6bba4a13252b1bd499e8f706da65e279b3bcbc5c52",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/rules_rust/releases/download/" + RULES_RUST_VERSION + "/rules_rust-v" + RULES_RUST_VERSION + ".tar.gz",
+ "https://github.com/bazelbuild/rules_rust/releases/download/" + RULES_RUST_VERSION + "/rules_rust-v" + RULES_RUST_VERSION + ".tar.gz",
+ UPSTREAM_MIRROR_URL + "rules_rust-v" + RULES_RUST_VERSION + ".tar.gz",
+ ],
+)
+
+load("//omd/packages/rules:cargo_deps.bzl", "cargo_deps")
+load("//omd/packages/rules:rust_workspace.bzl", "rust_workspace")
+
+rust_workspace()
+
load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies")
rules_pkg_dependencies()
@@ -70,24 +78,35 @@ load(
"LIBGSF_VERSION",
"MOD_FCGID_SHA256",
"MOD_FCGID_VERSION",
+ "MOD_WSGI_SHA256",
+ "MOD_WSGI_VERSION",
"MONITORING_PLUGINS_SHA256",
"MONITORING_PLUGINS_VERSION",
"MSITOOLS_SHA256",
"MSITOOLS_VERSION",
"NAGIOS_SHA256",
"NAGIOS_VERSION",
+ "NET_SNMP_SHA256",
+ "NET_SNMP_VERSION",
"NRPE_SHA256",
"NRPE_VERSION",
"OPENSSL_SHA256",
"OPENSSL_VERSION",
- "PNP4NAGIOS_SHA256",
- "PNP4NAGIOS_VERSION",
"PATCH_SHA256",
"PATCH_VERSION",
+ "PNP4NAGIOS_SHA256",
+ "PNP4NAGIOS_VERSION",
"PYTHON_SHA256",
"PYTHON_VERSION",
+ "REDFISH_MKP_COMMIT_HASH",
+ "REDFISH_MKP_SHA256",
+ "REDFISH_MKP_VERSION",
"REDIS_SHA256",
"REDIS_VERSION",
+ "ROBOTMK_SHA256",
+ "ROBOTMK_VERSION",
+ "RRDTOOL_SHA256",
+ "RRDTOOL_VERSION",
"SNAP7_SHA256",
"SNAP7_VERSION",
"STUNNEL_SHA256",
@@ -96,13 +115,26 @@ load(
"XINETD_VERSION",
"XMLSEC1_SHA256",
"XMLSEC1_VERSION",
- "MOD_WSGI_SHA256",
- "MOD_WSGI_VERSION",
- "NET_SNMP_SHA256",
- "NET_SNMP_VERSION",
- "ROBOTMK_SHA256",
- "ROBOTMK_VERSION",
)
+
+cargo_deps(
+ name = "check-cert-deps",
+ package = "packages/check-cert",
+)
+
+load("@check-cert-deps//:defs.bzl", check_cert_deps = "crate_repositories")
+
+check_cert_deps()
+
+cargo_deps(
+ name = "check-http-deps",
+ package = "packages/check-http",
+)
+
+load("@check-http-deps//:defs.bzl", check_http_deps = "crate_repositories")
+
+check_http_deps()
+
load("//omd/packages/patch:patch_http.bzl", "patch")
patch(
@@ -202,8 +234,8 @@ perl_modules()
load("//omd/packages/crypt-ssleay:cryptssl_http.bzl", "crypt_ssleay")
crypt_ssleay(
- sha256=CRYPT_SSL_SHA256,
- version_str=CRYPT_SSL_VERSION,
+ sha256 = CRYPT_SSL_SHA256,
+ version_str = CRYPT_SSL_VERSION,
)
load("//omd/packages/nrpe:nrpe_http.bzl", "nrpe")
@@ -249,6 +281,7 @@ nagios(
)
load("//omd/packages/python3-modules:create_python_requirements.bzl", "create_python_requirements")
+
create_python_requirements(
name = "python_modules",
# TODO: differentiate between own code and things we get from other omd packages
@@ -257,6 +290,7 @@ create_python_requirements(
"rrdtool", # don't build with pip -> see rrdtool omd packages
"agent-receiver", # don't build with pip (yet)
"werks", # don't build with pip (yet)
+ "netapp-ontap", # their build process is broken, see https://github.com/NetApp/ontap-rest-python/issues/46
],
requirements = "//:Pipfile",
)
@@ -279,5 +313,20 @@ load("//omd/packages/robotmk:robotmk_http.bzl", "robotmk")
robotmk(
sha256 = ROBOTMK_SHA256,
- version_str= ROBOTMK_VERSION
+ version_str = ROBOTMK_VERSION,
+)
+
+load("//omd/packages/rrdtool:rrdtool_http.bzl", "rrdtool")
+
+rrdtool(
+ sha256 = RRDTOOL_SHA256,
+ version_str = RRDTOOL_VERSION,
+)
+
+load("//omd/packages/redfish_mkp:redfish_mkp_http.bzl", "redfish_mkp")
+
+redfish_mkp(
+ commit_hash = REDFISH_MKP_COMMIT_HASH,
+ sha256 = REDFISH_MKP_SHA256,
+ version_str = REDFISH_MKP_VERSION,
)
diff --git a/active_checks/.f12 b/active_checks/.f12
index 536935e1a60..9bd52ac9482 100755
--- a/active_checks/.f12
+++ b/active_checks/.f12
@@ -11,13 +11,12 @@
echo "Installing to $ROOT/lib/nagios/plugins"
make
-RSYNC_OPTS="-a
- --chown=root:root
+RSYNC_OPTS="-rlD
--exclude=.*.swp
--exclude=Makefile
--exclude=*.cc"
# shellcheck disable=SC2086
-sudo rsync $RSYNC_OPTS ./ "$ROOT/lib/nagios/plugins"
+rsync $RSYNC_OPTS ./ "$ROOT/lib/nagios/plugins"
# shellcheck disable=SC2086
-sudo rsync $RSYNC_OPTS --delete-after ./ "$ROOT/share/doc/check_mk/treasures/active_checks"
+rsync $RSYNC_OPTS --delete-after ./ "$ROOT/share/doc/check_mk/treasures/active_checks"
diff --git a/active_checks/check_bi_aggr b/active_checks/check_bi_aggr
index db7eff1d20a..507c89cdb61 100755
--- a/active_checks/check_bi_aggr
+++ b/active_checks/check_bi_aggr
@@ -10,10 +10,11 @@ import subprocess
import sys
import time
import traceback
+from pathlib import Path
import requests
import urllib3
-from requests_kerberos import HTTPKerberosAuth # type: ignore[import]
+from requests_kerberos import HTTPKerberosAuth # type: ignore[import-untyped]
import cmk.utils.password_store
from cmk.utils.crypto.secrets import AutomationUserSecret
@@ -230,9 +231,9 @@ if track_downtime:
)
sys.exit(1)
- socket_path = os.environ["OMD_ROOT"] + "/tmp/run/live"
+ socket_path = Path(os.environ["OMD_ROOT"]) / "tmp/run/live"
- conn = livestatus.SingleSiteConnection("unix:" + socket_path)
+ conn = livestatus.SingleSiteConnection(f"unix:{socket_path}")
now = time.time()
# find out if, according to previous tracking, there already is a downtime
diff --git a/active_checks/check_elasticsearch_query b/active_checks/check_elasticsearch_query
index 2ed729d0cf3..7577427318c 100755
--- a/active_checks/check_elasticsearch_query
+++ b/active_checks/check_elasticsearch_query
@@ -6,22 +6,22 @@
import argparse
import sys
import urllib.parse
+from pathlib import Path
import requests
import urllib3
-import cmk.utils.password_store
-from cmk.utils.exceptions import MKGeneralException
+from cmk.utils import password_store
-cmk.utils.password_store.replace_passwords()
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def main():
args = parse_arguments()
+ auth = _make_auth(args.user, args.password, args.password_id)
try:
- msg, state, perfdata = handle_request(args)
+ msg, state, perfdata = handle_request(args, auth)
except Exception as exc:
sys.stdout.write("UNKNOWN - %s\n" % exc)
return 3
@@ -30,7 +30,22 @@ def main():
sys.exit(state)
-def handle_request(args):
+def _make_auth(
+ user: str | None,
+ password: str | None,
+ password_ref: str | None,
+) -> tuple[str, str] | None:
+ if user is None:
+ return None
+ if password is not None:
+ return (user, password)
+ if password_ref is not None:
+ pw_id, pw_file = password_ref.split(":", 1)
+ return (user, password_store.lookup(Path(pw_file), pw_id))
+ return None
+
+
+def handle_request(args: argparse.Namespace, auth: tuple[str, str] | None) -> tuple[str, int, str]:
url = urllib.parse.urlunparse(
(
args.protocol,
@@ -56,21 +71,20 @@ def handle_request(args):
if args.fieldname:
query["query"]["bool"]["must"][0]["query_string"]["fields"] = args.fieldname.split(" ")
- if args.user and args.password:
- raw_response = requests.get(url, json=query, auth=(args.user, args.password)) # nosec B113
- else:
- raw_response = requests.get(url, json=query) # nosec B113
+ raw_response = requests.get(url, json=query, auth=auth) # nosec B113
- msg, state, perfdata = handle_query(raw_response, args.warn, args.crit, args.timerange)
+ msg, state, perfdata = handle_query(raw_response, args.warn, args.crit)
return msg, state, perfdata
-def handle_query(raw_response, warn, crit, timerange):
+def handle_query(
+ raw_response: requests.Response, warn: int | None, crit: int | None
+) -> tuple[str, int, str]:
response_data = raw_response.json()
if "count" not in response_data:
- raise MKGeneralException("Missing section count in raw response data")
+ raise ValueError("Missing section count in raw response data")
state = 0
value = response_data["count"]
@@ -101,11 +115,17 @@ def parse_arguments(argv=None):
default=None,
help="Username for elasticsearch login",
)
- parser.add_argument(
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument(
"-s",
"--password",
default=None,
- help="Password for easticsearch login",
+ help="Password for easticsearch login. Preferred over --password-id",
+ )
+ group.add_argument(
+ "--password-id",
+ default=None,
+ help="Password store reference to the password for easticsearch login",
)
parser.add_argument(
"-P",
diff --git a/active_checks/check_form_submit b/active_checks/check_form_submit
index 7e8a20d6e4c..6c2c479237e 100755
--- a/active_checks/check_form_submit
+++ b/active_checks/check_form_submit
@@ -3,6 +3,8 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
+# pylint: disable=protected-access
+
"""
This check performs HTTP requests with some advanced features like
a) Detecting, populating and submitting HTML forms
@@ -16,7 +18,6 @@ import enum
import html.parser
import http.cookiejar
import re
-import socket
import ssl
import sys
import urllib.error
@@ -158,9 +159,11 @@ def init_http(validate_server_cert: bool) -> urllib.request.OpenerDirector:
urllib.request.HTTPHandler(debuglevel=0),
urllib.request.HTTPSHandler(
debuglevel=0,
- context=None
- if validate_server_cert
- else ssl._create_unverified_context(), # nosec B323 # BNS:501305
+ context=(
+ None
+ if validate_server_cert
+ else ssl._create_unverified_context() # nosec B323 # BNS:501305
+ ),
),
urllib.request.HTTPCookieProcessor(http.cookiejar.CookieJar()),
)
@@ -201,7 +204,7 @@ def open_url( # type: ignore[no-untyped-def]
except urllib.error.URLError as e:
new_state(2, f"Unable to open {url} : {e.reason}")
- except socket.timeout as e:
+ except TimeoutError as e:
new_state(2, f"Unable to open {url} : {e}")
real_url = fd.geturl()
diff --git a/active_checks/check_mail b/active_checks/check_mail
index f67b3ed349a..da919d8bcc4 100755
--- a/active_checks/check_mail
+++ b/active_checks/check_mail
@@ -16,7 +16,7 @@ import time
from email.message import Message as POPIMAPMessage
from pathlib import Path
-from exchangelib import Message as EWSMessage # type: ignore[import]
+from exchangelib import Message as EWSMessage # type: ignore[import-untyped]
from cmk.utils.mailbox import (
active_check_main,
@@ -111,7 +111,7 @@ def syslog_time(localtime: time.struct_time = time.localtime()) -> str:
def _get_imap_or_pop_log_line(msg: POPIMAPMessage, body_limit: int) -> str:
subject = msg.get("Subject", "None")
encoding = msg.get("Content-Transfer-Encoding", "None")
- payload = msg.get_payload()
+ payload = str(msg.get_payload())
# Add the body to the event
if msg.is_multipart():
@@ -121,9 +121,10 @@ def _get_imap_or_pop_log_line(msg: POPIMAPMessage, body_limit: int) -> str:
disposition = str(part.get("Content-Disposition"))
encoding = part.get("Content-Transfer-Encoding", "None")
if content_type == "text/plain" and "attachment" not in disposition:
- payload = part.get_payload()
+ payload = str(part.get_payload())
if encoding == "base64":
+ # bytes -> str conversion with str() is brutal...
payload = str(base64.b64decode(payload))
return subject + " | " + payload[:body_limit]
@@ -181,7 +182,7 @@ def forward_to_ec(args: Args, messages: list[str]) -> CheckResult:
if not description:
return "%s/tmp/run/mkeventd/eventsocket" % os.getenv("OMD_ROOT", "")
if description == "spool:":
- return "spool: %s/var/mkeventd/spool" % os.getenv("OMD_ROOT", "")
+ return "spool:%s/var/mkeventd/spool" % os.getenv("OMD_ROOT", "")
if "," in description:
prot_addr_port = description.split(",")
return prot_addr_port[0], prot_addr_port[1], prot_addr_port[3]
diff --git a/active_checks/check_mail_loop b/active_checks/check_mail_loop
index bc5f57858b8..8f498b30f5f 100755
--- a/active_checks/check_mail_loop
+++ b/active_checks/check_mail_loop
@@ -10,23 +10,19 @@ import logging
import os
import re
import time
-from contextlib import suppress
+from collections.abc import Mapping, MutableMapping
+from contextlib import ExitStack, suppress
from email.message import Message as POPIMAPMessage
+from pathlib import Path
-from exchangelib import Message as EWSMessage # type: ignore[import]
+from exchangelib import Message as EWSMessage # type: ignore[import-untyped]
-from cmk.utils.mailbox import (
- active_check_main,
- Args,
- CheckResult,
- FetchMailsError,
- Mailbox,
- MailID,
- MailMessages,
- Message,
-)
+from cmk.utils.mailbox import active_check_main, Args, CheckResult, Mailbox, MailID, Message
-MailDict = dict[str, MailID]
+# "-" -> (sent-timestamp, key)
+MailDict = MutableMapping[str, MailID]
+
+DEPRECATION_AGE = 2 * 3600
def create_argument_parser() -> argparse.ArgumentParser:
@@ -116,14 +112,15 @@ def create_argument_parser() -> argparse.ArgumentParser:
"CRITICAL state",
)
- default_status_dir = (
- (os.environ["OMD_ROOT"] + "/var/check_mk") if "OMD_ROOT" in os.environ else "/tmp" #
- )
parser.add_argument(
"--status-dir",
- type=str,
+ type=Path,
metavar="PATH",
- default=default_status_dir,
+ default=(
+ Path(os.environ["OMD_ROOT"]) / "var/check_mk"
+ if "OMD_ROOT" in os.environ
+ else Path("/tmp") # nosec B108 # BNS:13b2c8
+ ),
help="This plugin needs a file to store information about sent, received "
"and expected mails. Defaults to either '/tmp/' or "
"'/omd/sites//var/check_mk' when executed from within an "
@@ -156,9 +153,9 @@ def create_argument_parser() -> argparse.ArgumentParser:
return parser
-def load_expected_mails(status_path: str) -> MailDict:
+def load_expected_mails(status_path: Path) -> MailDict:
with suppress(IOError):
- with open(status_path) as file:
+ with status_path.open() as file:
return {
ts + "-" + key: (int(ts), int(key)) #
for line in file #
@@ -175,78 +172,23 @@ def save_expected_mails(expected_mails: MailDict, status_path: str) -> None:
file.write("\n")
-def _regex_pattern(subject: str) -> re.Pattern:
- return re.compile(r"(?i)(?:re: |wg: )?%s ([^\s]+) ([^\s]+)" % subject)
-
-
-def subject_and_received_timestamp_from_msg(
- msg: Message, protocol: str, now: int
-) -> tuple[str, int]:
+def subject_and_received_timestamp_from_msg(msg: Message, protocol: str) -> tuple[str, None | int]:
if protocol in {"POP3", "IMAP"}:
assert isinstance(msg, POPIMAPMessage)
- subject = msg.get("Subject", "")
-
if "Received" in msg:
parsed = email.utils.parsedate_tz(msg["Received"].split(";")[-1])
- rx_ts = email.utils.mktime_tz(parsed) if parsed else now
- else:
- # use current time as fallback where no Received header could be found
- rx_ts = now
- elif protocol == "EWS":
- assert isinstance(msg, EWSMessage)
- subject = msg.subject
+ rx_ts = email.utils.mktime_tz(parsed) if parsed else None
+ return msg.get("Subject", ""), rx_ts
+ return msg.get("Subject", ""), None
+ if protocol == "EWS":
+ assert isinstance(msg, EWSMessage)
try:
- rx_ts = int(msg.datetime_received.timestamp()) # cast float to int
+ return msg.subject, int(msg.datetime_received.timestamp())
except Exception:
- rx_ts = now
- else:
- raise NotImplementedError(f"Fetching mails is not implemented for {protocol}")
-
- return subject, rx_ts
-
-
-def fetch_mail_timestamps(
- args: Args,
- mails: MailMessages,
- expected_mails: MailDict,
- protocol: str,
- now: int = int(time.time()),
-) -> tuple[MailDict, MailDict]:
- """Fetch mails and return tuples of dicts containing timestamps of mails which have been
- not expected (anymore) and expected"""
- if not expected_mails:
- return {}, {}
-
- try:
- obsolete_mails: MailDict = {}
- fetched_mails: MailDict = {}
- # Now filter out the messages for this check
- pattern = _regex_pattern(args.subject)
+ return msg.subject, None
- for index, msg in mails.items():
- msg_subject, rx_ts = subject_and_received_timestamp_from_msg(msg, protocol, now)
-
- matches = pattern.match(msg_subject)
- if not matches:
- logging.debug("Skip mail with subject %r", msg_subject)
- continue
-
- ts = matches.group(1).strip()
- key = matches.group(2).strip()
-
- ts_key = f"{ts}-{key}"
- if ts_key not in expected_mails:
- # Delete any "Check_MK-Mail-Loop" messages older than 24 hours, even if
- # they are not in our list
- if args.delete_messages and now - rx_ts > 24 * 3600:
- obsolete_mails[ts_key] = index, rx_ts
- continue
-
- fetched_mails[ts_key] = index, rx_ts
- return obsolete_mails, fetched_mails
- except Exception as exc:
- raise FetchMailsError("Failed to fetch mails: %r" % exc) from exc
+ raise NotImplementedError(f"Fetching mails is not implemented for {protocol}")
def check_mails( # pylint: disable=too-many-branches
@@ -287,7 +229,7 @@ def check_mails( # pylint: disable=too-many-branches
# but keep waiting for other mails which have not yet reached it
if now - send_ts >= critical:
logging.warning(
- "Found mail with critical roundtrip time: %r (%dsec)",
+ "found mail with critical roundtrip time: %r (%dsec)",
ident,
now - send_ts,
)
@@ -339,48 +281,102 @@ def _fetch_config_equals_send_config(args: Args) -> bool:
)
+def subject_regex(subject: str) -> re.Pattern:
+ """Returns regex used for subject matching - extra function for testability"""
+ return re.compile(rf"(?i)(?:re: |wg: )?{subject} ([0-9]+) ([0-9]+)")
+
+
def check_mail_roundtrip(args: Args) -> CheckResult:
# TODO: maybe we should use cmk.utils.paths.tmp_dir?
- status_path = "{}/check_mail_loop{}.status".format(
- args.status_dir,
- (".%s" % args.status_suffix) if args.status_suffix else "",
- )
- logging.debug("status_path: %r", status_path)
+ status_file_components = ("check_mail_loop", args.status_suffix, "status")
+ status_path = args.status_dir / ".".join(filter(bool, status_file_components))
+ logging.debug("status_path: '%s'", status_path)
+
expected_mails = load_expected_mails(status_path) or {}
logging.debug("expected_mails: %r", expected_mails)
- with Mailbox(args) as mailbox:
+ # Store the unmodified list of expected mails for later deletion
+ expected_mails_keys = set(expected_mails.keys())
+
+ # Match subjects of type "[re/was:] Check_MK-Mail-Loop "
+ re_subject = subject_regex(args.subject)
+
+ with ExitStack() as context:
+ mailbox = context.enter_context(Mailbox(args))
mailbox.connect()
- fetched_mail_messages: MailMessages = mailbox.fetch_mails()
- obsolete_mails, fetched_mails = fetch_mail_timestamps(
- args,
- fetched_mail_messages,
- expected_mails,
- mailbox.protocol(),
- )
- logging.debug("obsolete_mails: %r", obsolete_mails)
- logging.debug("fetched_mails: %r", fetched_mails)
+ # re-use already connected Mailbox instance if credentials are the same
if _fetch_config_equals_send_config(args):
- new_mail = mailbox.send_mail(args)
+ send_mailbox = mailbox
else:
- with Mailbox(args, "send") as send_mailbox:
- send_mailbox.connect()
- new_mail = send_mailbox.send_mail(args)
+ send_mailbox = context.enter_context(Mailbox(args, "send"))
+ # note: only for EWS connect() has an effect. IMAP will connect
+ # when sending email.
+ send_mailbox.connect()
+
+ now = int(time.time())
+
+ def filter_subject(subject: None | str, re_pattern: re.Pattern[str]) -> None | re.Match:
+ if re_pattern and not (match := re_pattern.match(subject or "")):
+ logging.debug("ignore message with subject %r", subject)
+ return None
+ return match
+
+ # create a collection of all mails with their relevant details filtered
+ # by subject
+ # str -> (index, rx-timestamp, subject, raw_message)
+ message_details: Mapping[str, tuple[int, int, str, Message]] = {
+ f"{tx_timestamp}-{key}": (index, rx_timestamp or now, subject, raw_message)
+ # we don't filter for subject here...
+ for index, raw_message in mailbox.fetch_mails().items()
+ for subject, rx_timestamp in (
+ subject_and_received_timestamp_from_msg(raw_message, mailbox.protocol()),
+ )
+ # .. because we need the groups
+ if (match := filter_subject(subject, re_subject))
+ for tx_timestamp, key in (match.groups(),)
+ }
+ logging.debug("received %d check_mail_loop messages", len(message_details))
+
+ # relevant messages are a subset of above received messages which we expected
+ relevant_mail_loop_messages = {
+ ts_key: (index, rx_timestamp)
+ for ts_key, (index, rx_timestamp, subject, raw_message) in message_details.items()
+ if ts_key in expected_mails
+ }
+ logging.debug("relevant messages: %r", relevant_mail_loop_messages)
+
+ # send a 'sensor-email' with a timestamp we expect to receive next time
+ new_mail = send_mailbox.send_mail(args)
logging.debug("sent new mail: %r", new_mail)
expected_mails.update((new_mail,))
state, output, perfdata = check_mails(
args.warning,
args.critical,
- expected_mails,
- fetched_mails,
+ expected_mails, # WARNING: will be modified!
+ relevant_mail_loop_messages,
)
+
save_expected_mails(expected_mails, status_path)
+
+ deletion_candidates = {
+ index: raw_message
+ for ts_key, (index, rx_timestamp, _subject, raw_message) in message_details.items()
+ if ts_key in expected_mails_keys or now - rx_timestamp > DEPRECATION_AGE
+ }
+ logging.debug(
+ "candidates for deletion (expected messages + those older than %ds): %s",
+ DEPRECATION_AGE,
+ list(deletion_candidates.keys()),
+ )
if args.delete_messages:
# Do not delete all messages in the inbox. Only the ones which were
# processed before! In the meantime new ones might have come in.
- mailbox.delete_mails(fetched_mail_messages)
+ logging.debug("delete messages...")
+ mailbox.delete_mails(deletion_candidates)
+ else:
+ logging.debug("deletion not active (--delete-messages not provided)")
return state, output, perfdata
diff --git a/active_checks/check_mailboxes b/active_checks/check_mailboxes
index 84ed8a47135..c5295e6afb5 100755
--- a/active_checks/check_mailboxes
+++ b/active_checks/check_mailboxes
@@ -53,6 +53,7 @@ def create_argument_parser() -> argparse.ArgumentParser:
"--mailbox",
type=str,
nargs="+",
+ action="extend",
help="mailbox to check. Can appear repeatedly to monitor multiple mailboxes",
)
diff --git a/active_checks/check_notify_count b/active_checks/check_notify_count
index eb9b420397c..413075dea15 100755
--- a/active_checks/check_notify_count
+++ b/active_checks/check_notify_count
@@ -7,6 +7,7 @@ import getopt
import os
import sys
import time
+from pathlib import Path
import livestatus
@@ -42,7 +43,7 @@ except getopt.GetoptError as err:
sys.stderr.write("%s\n" % err)
sys.exit(1)
-socket_path = ""
+socket_path = None
timerange = 60
warn, crit = None, None
@@ -57,16 +58,17 @@ for o, a in opts:
elif o == "-c":
crit = int(a)
elif o == "-l":
- socket_path = a
+ socket_path = Path(a)
if len(args) == 1:
sys.stderr.write("ERROR: No service pattern given.\n")
sys.exit(1)
-if not socket_path and "OMD_ROOT" in os.environ:
- socket_path = os.environ["OMD_ROOT"] + "/tmp/run/live"
+if socket_path is None and "OMD_ROOT" in os.environ:
+ socket_path = Path(os.environ["OMD_ROOT"]) / "tmp/run/live"
-if not os.path.exists(socket_path):
+# The condition when socket_path is empty and OMD_ROOT is not set, so I consider it an error
+if socket_path is None or not socket_path.exists():
sys.stderr.write("ERROR: Livestatus socket (%s) does not exist\n" % socket_path)
sys.exit(1)
@@ -78,7 +80,7 @@ query = (
"Stats: state != 999\n"
) % (int(time.time() - (timerange * 60)))
-c = livestatus.SingleSiteConnection("unix:" + socket_path)
+c = livestatus.SingleSiteConnection(f"unix:{socket_path}")
total_num = 0
perfdata = []
diff --git a/active_checks/check_sftp b/active_checks/check_sftp
index 05bd86a2dbe..8ab3ec69dda 100755
--- a/active_checks/check_sftp
+++ b/active_checks/check_sftp
@@ -3,294 +3,9 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-import getopt
-import os
import sys
-import time
-
-import paramiko
-
-from cmk.utils.password_store import replace_passwords
-
-
-def usage():
- sys.stderr.write(
- """
-USAGE: check_sftp [OPTIONS] HOST
-
-OPTIONS:
- --host HOST SFTP server address
- --user USER Username for sftp login
- --secret SECRET Secret/Password for sftp login
- --port PORT Alternative port number (default is 22 for the connection)
- --get-remote FILE Path to the file which to pull from SFTP server (e.g.
- /tmp/testfile.txt)
- --get-local PATH Path to store the pulled file locally (e.g. $OMD_ROOT/tmp/)
- --put-local FILE Path to the file to push to the sftp server. See above for example
- --put-remote PATH Path to save the pushed file (e.g. /tmp/)
- --get-timestamp PATH Path to the file for getting the timestamp of this file
- --timeout SECONDS Set timeout for connection (default is 10 seconds)
- --verbose Output some more detailed information
- --look-for-keys Search for discoverable keys in the user's "~/.ssh" directory
- -h, --help Show this help message and exit
- """
- )
- sys.exit(1)
-
-
-def connection(opt_host, opt_user, opt_pass, opt_port, opt_timeout, opt_look_for_keys):
- client = paramiko.SSHClient()
- client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # nosec B507
- client.connect(
- opt_host,
- username=opt_user,
- password=opt_pass,
- port=opt_port,
- timeout=opt_timeout,
- look_for_keys=opt_look_for_keys,
- )
- return client
-
-
-def get_paths(
- opt_put_local,
- opt_get_local,
- opt_put_remote,
- opt_get_remote,
- opt_timestamp,
- omd_root,
- working_dir,
-):
- paths = {}
- if opt_put_local:
- put_filename = opt_put_local.split("/")[-1]
- paths["put_filename"] = put_filename
- paths["local_put_path"] = f"{omd_root}/{opt_put_local}"
- if len(opt_put_remote) > 0:
- paths["remote_put_path"] = f"{working_dir}/{opt_put_remote}/{put_filename}"
- else:
- paths["remote_put_path"] = f"{working_dir}/{put_filename}"
-
- if opt_get_remote:
- get_filename = opt_get_remote.split("/")[-1]
- paths["get_filename"] = get_filename
- paths["remote_get_path"] = f"{working_dir}/{opt_get_remote}"
- if len(opt_get_local) > 0:
- paths["local_get_path"] = f"{omd_root}/{opt_get_local}/{get_filename}"
- else:
- paths["local_get_path"] = f"{omd_root}/{get_filename}"
-
- if opt_timestamp:
- paths["timestamp_filename"] = opt_timestamp.split("/")[-1]
- paths["timestamp_path"] = f"{working_dir}/{opt_timestamp}"
-
- return paths
-
-
-def file_available(opt_put_local, opt_put_remote, sftp, working_dir):
- filename = opt_put_local.split("/")[-1]
- return filename in sftp.listdir(f"{working_dir}/{opt_put_remote}")
-
-
-def create_testfile(paths):
- path = paths["local_put_path"]
- if not os.path.isfile(path):
- with open(path, "w") as f:
- f.write("This is a test by Check_MK\n")
-
-
-def put_file(sftp, paths):
- sftp.put(paths["local_put_path"], paths["remote_put_path"])
-
-
-def get_file(sftp, paths):
- sftp.get(paths["remote_get_path"], paths["local_get_path"])
-
-
-def get_timestamp(sftp, paths):
- return sftp.stat(paths["timestamp_path"])
-
-
-def output_check_result(s):
- sys.stdout.write("%s\n" % s)
-
-
-def parse_arguments(sys_args): # pylint: disable=too-many-branches
- if sys_args is None:
- sys_args = sys.argv[1:]
-
- opt_host = None
- opt_user = None
- opt_pass = None
- opt_port = 22
- opt_get_remote = None
- opt_get_local = None
- opt_put_local = None
- opt_put_remote = None
- opt_timestamp = None
- opt_timeout = 10.0
- opt_verbose = False
- opt_look_for_keys = False
-
- short_options = "hv"
- long_options = [
- "host=",
- "user=",
- "secret=",
- "port=",
- "get-remote=",
- "get-local=",
- "put-local=",
- "put-remote=",
- "get-timestamp=",
- "verbose",
- "help",
- "timeout=",
- "look-for-keys",
- ]
-
- try:
- opts, _args = getopt.getopt(sys_args, short_options, long_options)
- except getopt.GetoptError as err:
- sys.stderr.write("%s\n" % err)
- sys.exit(1)
-
- for opt, arg in opts:
- if opt in ["-h", "help"]:
- usage()
- elif opt in ["--host"]:
- opt_host = arg
- elif opt in ["--user"]:
- opt_user = arg
- elif opt in ["--secret"]:
- opt_pass = arg
- elif opt in ["--port"]:
- opt_port = int(arg)
- elif opt in ["--timeout"]:
- opt_timeout = float(arg)
- elif opt in ["--put-local"]:
- opt_put_local = arg
- elif opt in ["--put-remote"]:
- opt_put_remote = arg
- elif opt in ["--get-local"]:
- opt_get_local = arg
- elif opt in ["--get-remote"]:
- opt_get_remote = arg
- elif opt in ["--get-timestamp"]:
- opt_timestamp = arg
- elif opt in ["--look-for-keys"]:
- opt_look_for_keys = True
- elif opt in ["-v", "--verbose"]:
- opt_verbose = True
-
- return (
- opt_host,
- opt_user,
- opt_pass,
- opt_port,
- opt_get_remote,
- opt_get_local,
- opt_put_local,
- opt_put_remote,
- opt_timestamp,
- opt_timeout,
- opt_verbose,
- opt_look_for_keys,
- )
-
-
-def main(sys_args=None):
- (
- opt_host,
- opt_user,
- opt_pass,
- opt_port,
- opt_get_remote,
- opt_get_local,
- opt_put_local,
- opt_put_remote,
- opt_timestamp,
- opt_timeout,
- opt_verbose,
- opt_look_for_keys,
- ) = parse_arguments(sys_args)
-
- messages = []
- states = []
- try: # Establish connection
- client = connection(opt_host, opt_user, opt_pass, opt_port, opt_timeout, opt_look_for_keys)
- sftp = client.open_sftp()
- messages.append("Login successful")
- states.append(0)
- except Exception:
- if opt_verbose:
- raise
- return 2, "Connection failed!"
-
- # Let's prepare for some other tests...
- omd_root = os.getenv("OMD_ROOT")
- sftp.chdir(".")
- working_dir = sftp.getcwd()
- paths = get_paths(
- opt_put_local,
- opt_get_local,
- opt_put_remote,
- opt_get_remote,
- opt_timestamp,
- omd_root,
- working_dir,
- )
- testfile_remote = True
-
- # .. and eventually execute them!
- try: # Put a file to the server
- if opt_put_local is not None:
- create_testfile(paths)
- testfile_remote = file_available(opt_put_local, opt_put_remote, sftp, working_dir)
- put_file(sftp, paths)
- states.append(0)
- messages.append("Successfully put file to SFTP server")
- except Exception:
- if opt_verbose:
- raise
- states.append(2)
- messages.append("Could not put file to SFTP server! (!!)")
-
- try: # Get a file from the server
- if opt_get_remote is not None:
- get_file(sftp, paths)
- states.append(0)
- messages.append("Successfully got file from SFTP server")
- except Exception:
- if opt_verbose:
- raise
- states.append(2)
- messages.append("Could not get file from SFTP server! (!!)")
-
- try: # Get timestamp of a remote file
- if opt_timestamp is not None:
- file_stats = get_timestamp(sftp, paths)
- states.append(0)
- messages.append(
- "Timestamp of {} is: {}".format(
- paths["timestamp_filename"], time.ctime(file_stats.st_mtime)
- )
- )
- except Exception:
- if opt_verbose:
- raise
- states.append(2)
- messages.append("Could not get timestamp of file! (!!)")
-
- # Remove useless files
- if not testfile_remote:
- sftp.remove(paths["remote_put_path"])
-
- return max(states), ", ".join(messages)
+from cmk.active_checks.check_sftp import main
if __name__ == "__main__":
- replace_passwords()
- exitcode, info = main()
- output_check_result(info)
- sys.exit(exitcode)
+ sys.exit(main())
diff --git a/active_checks/check_sql b/active_checks/check_sql
index 4c07c5d7541..a232e599967 100755
--- a/active_checks/check_sql
+++ b/active_checks/check_sql
@@ -228,7 +228,7 @@ def _default_execute(
def postgres_connect(host: str, port: int, db_name: str, user: str, pwd: str) -> Any:
- import psycopg2 # type: ignore[import] # pylint: disable=import-outside-toplevel
+ import psycopg2 # type: ignore[import-untyped] # pylint: disable=import-outside-toplevel
return psycopg2.connect(host=host, port=port, database=db_name, user=user, password=pwd)
@@ -252,7 +252,7 @@ def mysql_execute(
def mssql_connect(host: str, port: int, db_name: str, user: str, pwd: str) -> Any:
- import pymssql # type: ignore[import] # pylint: disable=import-outside-toplevel
+ import pymssql # type: ignore[import-untyped] # pylint: disable=import-outside-toplevel
return pymssql.connect(host=host, port=port, database=db_name, user=user, password=pwd)
@@ -276,7 +276,7 @@ def oracle_connect(host: str, port: int, db_name: str, user: str, pwd: str) -> A
f"/usr/lib/python{sys.version_info.major}.{sys.version_info.minor}/site-packages"
)
try:
- import oracledb # type: ignore[import] # pylint: disable=import-error,import-outside-toplevel
+ import oracledb # type: ignore[import-not-found] # pylint: disable=import-error,import-outside-toplevel
except ImportError as exc:
bail_out(3, "%s. Please install it via 'pip install oracledb'." % exc)
@@ -325,8 +325,8 @@ def oracle_execute(
def db2_connect(host: str, port: int, db_name: str, user: str, pwd: str) -> Any:
# IBM data server driver
try:
- import ibm_db # type: ignore[import] # pylint: disable=import-error,import-outside-toplevel
- import ibm_db_dbi # type: ignore[import] # pylint: disable=import-error,import-outside-toplevel
+ import ibm_db # type: ignore[import-not-found] # pylint: disable=import-error,import-outside-toplevel
+ import ibm_db_dbi # type: ignore[import-not-found] # pylint: disable=import-error,import-outside-toplevel
except ImportError as exc:
bail_out(3, "%s. Please install it via pip." % exc)
@@ -346,7 +346,7 @@ def db2_execute(
def sqlanywhere_connect(host: str, port: int, db_name: str, user: str, pwd: str) -> Any:
try:
- import sqlanydb # type: ignore[import] # pylint: disable=import-error,import-outside-toplevel
+ import sqlanydb # type: ignore[import-not-found] # pylint: disable=import-error,import-outside-toplevel
except ImportError as exc:
bail_out(3, "%s. Please install it via 'pip install sqlanydb'." % exc)
return sqlanydb.connect(uid=user, pwd=pwd, dbn=db_name, host=f"{host}:{port}")
diff --git a/active_checks/check_uniserv b/active_checks/check_uniserv
index 3a7793f0faf..b0db3e3a002 100755
--- a/active_checks/check_uniserv
+++ b/active_checks/check_uniserv
@@ -6,66 +6,59 @@
import re
import socket
import sys
+from collections.abc import Mapping
-def parse_response(data):
+def parse_response(data: str) -> Mapping[str, str]:
try:
parsed = dict([x.split("=") for x in data.split(";")][:-1])
- response_type = parsed["type"]
+ if parsed["type"] == "1":
+ bail_out(3, f"Invalid response: {data!r}")
except (ValueError, KeyError):
- sys.stdout.write("Invalid data: %s\n" % data)
- sys.exit(3)
+ bail_out(3, f"Invalid data: {data!r}")
- if response_type == "1":
- sys.stdout.write("Invalid response: %s\n" % data)
- sys.exit(3)
return parsed
-def send_and_receive(sock, request_str):
- encoding = "utf-8"
- sock.send(request_str.encode(encoding))
- answer = sock.recv(1024)
- return parse_response(answer.decode(encoding))
+def send_and_receive(sock: socket.socket, request_str: str) -> Mapping[str, str]:
+ sock.send(f"{request_str}\n".encode())
+ return parse_response(sock.recv(1024).decode())
-def check_job(job, s, sid, street, street_nr, city, regex):
+def check_job(
+ job: str,
+ tcp_socket: socket.socket,
+ sid: str,
+ street: str,
+ street_nr: str,
+ city: str,
+ regex: str,
+) -> tuple[int, str]:
if job == "VERSION":
- sendstring = "version:session=" + sid + "\n"
- data = send_and_receive(s, sendstring)
+ data = send_and_receive(tcp_socket, f"version:session={sid}")
try:
- version = data["version_str"]
+ return 0, f"Version: {data['version_str']}"
except KeyError:
return 3, "Unknown version"
- return 0, "Version: %s" % version
if job == "ADDRESS":
- sendstring = (
- "exec:session={};request_type=check_address;in_str={};in_hno={};in_city={}\n".format(
- sid,
- street,
- street_nr,
- city,
- )
+ _data = send_and_receive(
+ tcp_socket,
+ f"exec:session={sid};request_type=check_address;in_str={street};"
+ f"in_hno={street_nr};in_city={city}",
)
- _data = send_and_receive(s, sendstring)
- sendstring = "fetch:session=" + sid + ";out_zip=?;out_city=?\n"
- data = send_and_receive(s, sendstring)
+ data = send_and_receive(tcp_socket, f"fetch:session={sid};out_zip=?;out_city=?")
try:
- city_zip = data["out_zip"]
- city_name = data["out_city"]
+ infotext = f"Address: {data['out_zip']} {data['out_city']}"
except KeyError:
return 3, "Unknown zip or city"
- state = 0
- infotext = f"Address: {city_zip} {city_name}"
- if not re.match(regex, data["out_city"]):
- infotext += " but expects %s" % regex
- state = 2
+ if re.match(regex, data["out_city"]):
+ return 0, infotext
- return state, infotext
+ return 2, f"{infotext} but expects {regex}"
return 3, "Unknown job"
@@ -93,35 +86,40 @@ def parse_arguments(sys_args):
city = sys_args[6]
regex = sys_args[7]
except (IndexError, ValueError):
- sys.stdout.write(
- " Usage: check_uniserv HOSTNAME PORT SERVICE (VERSION|ADDRESS STREET NR CITY SEARCH_REGEX)\n"
+ bail_out(
+ 3,
+ (
+ " Usage: check_uniserv HOSTNAME PORT SERVICE"
+ " (VERSION|ADDRESS STREET NR CITY SEARCH_REGEX)"
+ ),
)
- sys.exit(3)
return host, tcp_port, service, job, street, street_nr, city, regex
+def bail_out(result: int, message: str) -> None:
+ sys.stdout.write(f"{message}\n")
+ raise SystemExit(result)
+
+
def main(sys_args=None):
host, tcp_port, service, job, street, street_nr, city, regex = parse_arguments(sys_args)
- s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- s.connect((host, tcp_port))
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as tcp_socket:
+ tcp_socket.connect((host, tcp_port))
- sendstring = "open:service=" + service + ";servicehost=" + host + "\n"
- data = send_and_receive(s, sendstring)
- sid = data.get("session")
- if not sid:
- return 3, "Error getting SID. Response was: %s\n" % data
+ data = send_and_receive(tcp_socket, f"open:service={service};servicehost={host}")
+ if not (sid := data.get("session")):
+ return 3, f"Error getting SID. Response was: {data}"
- state, infotext = check_job(job, s, sid, street, street_nr, city, regex)
+ state, infotext = check_job(job, tcp_socket, sid, street, street_nr, city, regex)
- s.send("close:session=" + sid + "\n")
- s.close()
+ tcp_socket.send(f"close:session={sid}\n".encode())
- return state, "%s\n" % infotext
+ return state, infotext
if __name__ == "__main__":
exitcode, info = main()
- sys.stdout.write("%s\n" % info)
+ sys.stdout.write(f"{info}\n")
sys.exit(exitcode)
diff --git a/agents/.f12 b/agents/.f12
index 0467fb519b0..85f40cec549 100755
--- a/agents/.f12
+++ b/agents/.f12
@@ -13,11 +13,11 @@
ROOT=/omd/sites/$SITE
# don't delete *.deb and *.rpm files as those are part of the distribution and can not be found in the source directory
-sudo rsync -varx --delete \
+rsync --verbose --recursive --links --devices --specials --one-file-system --delete \
--exclude=.f12* \
--exclude=__init__.py* \
--exclude cmk-agent-ctl \
- --exclude check-sql \
+ --exclude mk-sql \
--exclude windows \
--exclude linux \
--exclude=.gitignore \
diff --git a/agents/Makefile b/agents/Makefile
index bfe29de44b9..02e6d6972e4 100644
--- a/agents/Makefile
+++ b/agents/Makefile
@@ -24,14 +24,14 @@ AGENT_CTL := $(AGENTS_DIR)/linux/cmk-agent-ctl
AGENT_CTL_GZ := $(AGENT_CTL).gz
AGENT_CTL_DIR := $(REPO_PATH)/packages/cmk-agent-ctl
-CHECK_SQL := $(AGENTS_DIR)/linux/check-sql
-CHECK_SQL_DIR := $(REPO_PATH)/packages/check-sql
+MK_SQL := $(AGENTS_DIR)/linux/mk-sql
+MK_SQL_DIR := $(REPO_PATH)/packages/mk-sql
BUILDROOT_DEPS := \
conffiles/super-server.cfg \
check_mk_agent.linux \
check_mk_caching_agent.linux \
- $(CHECK_SQL) \
+ $(MK_SQL) \
$(AGENT_CTL_GZ) \
mk-job \
waitmax \
@@ -89,9 +89,9 @@ setversion:
sed -i 's/define ProductName = "Checkmk Agent [^"]*"/define ProductName = "Checkmk Agent $(NEW_VERSION_MAJOR)"/' wnx/install/version.wxi
sed -i 's/define ServiceDescription = "Checkmk monitoring agent service - [^"]*"/define ServiceDescription = "Checkmk monitoring agent service - $(NEW_VERSION_MAJOR)"/' wnx/install/Product.wxs
sed -i 's/pub const VERSION: &str = "[^"]*";/pub const VERSION: \&str = "$(NEW_VERSION)";/' $(AGENT_CTL_DIR)/src/constants.rs
- sed -i 's/pub const VERSION: &str = "[^"]*";/pub const VERSION: \&str = "$(NEW_VERSION)";/' $(CHECK_SQL_DIR)/src/constants.rs
+ sed -i 's/pub const VERSION: &str = "[^"]*";/pub const VERSION: \&str = "$(NEW_VERSION)";/' $(MK_SQL_DIR)/src/constants.rs
-build: $(DEB_PACKAGE) $(RPM_PACKAGE) $(WIN_PACKAGE) $(AGENT_CTL) $(CHECK_SQL)
+build: $(DEB_PACKAGE) $(RPM_PACKAGE) $(WIN_PACKAGE) $(AGENT_CTL) $(MK_SQL)
$(AGENT_CTL):
$(AGENT_CTL_DIR)/run --setup-environment --build
@@ -102,11 +102,11 @@ $(AGENT_CTL):
$(AGENT_CTL_GZ): $(AGENT_CTL)
gzip --best --keep --force $(AGENT_CTL)
-$(CHECK_SQL):
- $(CHECK_SQL_DIR)/run --setup-environment --build
+$(MK_SQL):
+ $(MK_SQL_DIR)/run --setup-environment --build
mkdir -p linux
- install -m 755 $(CHECK_SQL_DIR)/target/x86_64-unknown-linux-musl/release/check-sql $(CHECK_SQL)
- strip $(CHECK_SQL)
+ install -m 755 $(MK_SQL_DIR)/target/x86_64-unknown-linux-musl/release/mk-sql $(MK_SQL)
+ strip $(MK_SQL)
packages: $(DEB_PACKAGE) $(RPM_PACKAGE)
deb: $(DEB_PACKAGE)
diff --git a/agents/check_mk_agent.aix b/agents/check_mk_agent.aix
index 974e1ac8899..f2b5630458a 100755
--- a/agents/check_mk_agent.aix
+++ b/agents/check_mk_agent.aix
@@ -44,7 +44,7 @@ is_valid_plugin() {
# test if a file is executable and does not have certain
# extensions (remnants from distro upgrades).
case "${1:?No plugin defined}" in
- *.dpkg-new | *.dpkg-old | *.dpkg-temp) return 1 ;;
+ *.dpkg-new | *.dpkg-old | *.dpkg-temp | *.dpkg-tmp) return 1 ;;
*) [ -f "${1}" ] && [ -x "${1}" ] ;;
esac
}
@@ -215,7 +215,7 @@ set_up_profiling() {
unset_locale() {
# eliminate localized outputs where possible
# The locale logic here is used to make the Python encoding detection work (see CMK-2778).
- unset -v LANG
+ unset -v LANG LC_ALL
if inpath locale && inpath paste; then
# match C.UTF-8 at the beginning, but not e.g. es_EC.UTF-8!
case "$(locale -a | paste -sd ' ' -)" in
@@ -302,7 +302,7 @@ waitmax() {
section_checkmk() {
echo "<<>>"
- echo "Version: 2.3.0b1"
+ echo "Version: 2.4.0b1"
echo "AgentOS: aix"
echo "Hostname: $(hostname)"
echo "AgentDirectory: ${MK_CONFDIR}"
@@ -310,6 +310,9 @@ section_checkmk() {
echo "SpoolDirectory: ${SPOOLDIR}"
echo "PluginsDirectory: ${PLUGINSDIR}"
echo "LocalDirectory: ${LOCALDIR}"
+ echo "OSType: unix"
+ echo "OSName: AIX"
+ echo "OSVersion: $(oslevel -s)"
#
# BEGIN COMMON AGENT CODE
@@ -750,9 +753,11 @@ _run_cached_internal() {
if [ $((NOW - MTIME)) -gt "${REFRESH_INTERVAL}" ] && [ ! -e "$(_cfile_in_use)" ]; then
# Start it. If the command fails the output is thrown away
cat </dev/null 2>&1 &
-eval '${MK_DEFINE_LOG_SECTION_TIME}'
-exec > "${CACHEFILE}.new.\$\$" || exit 1
-$* && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" || rm -f "${CACHEFILE}.new.\$\$"
+ eval '${MK_DEFINE_LOG_SECTION_TIME}'
+ exec > "${CACHEFILE}.new.\$\$" || exit 1
+ $* \
+ && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" \
+ || rm -f "${CACHEFILE}.new.\$\$"
HERE
fi
@@ -942,7 +947,7 @@ run_purely_synchronous_sections() {
_log_section_time section_cmk_agent_ctl_status
- [ -z "${MK_SKIP_CHECKMK_AGENT_PLUGINS}" ] && log_section_time section_checkmk_agent_plugins
+ [ -z "${MK_SKIP_CHECKMK_AGENT_PLUGINS}" ] && _log_section_time section_checkmk_agent_plugins
[ -z "${MK_SKIP_DF}" ] && _log_section_time section_df
diff --git a/agents/check_mk_agent.freebsd b/agents/check_mk_agent.freebsd
index 29b3a8e2ea1..c3e8a89e91f 100755
--- a/agents/check_mk_agent.freebsd
+++ b/agents/check_mk_agent.freebsd
@@ -57,7 +57,7 @@ is_valid_plugin() {
# test if a file is executable and does not have certain
# extensions (remnants from distro upgrades).
case "${1:?No plugin defined}" in
- *.dpkg-new | *.dpkg-old | *.dpkg-temp) return 1 ;;
+ *.dpkg-new | *.dpkg-old | *.dpkg-temp | *.dpkg-tmp) return 1 ;;
*) [ -f "${1}" ] && [ -x "${1}" ] ;;
esac
}
@@ -218,7 +218,7 @@ set_up_profiling() {
unset_locale() {
# eliminate localized outputs where possible
# The locale logic here is used to make the Python encoding detection work (see CMK-2778).
- unset -v LANG
+ unset -v LANG LC_ALL
if inpath locale && inpath paste; then
# match C.UTF-8 at the beginning, but not e.g. es_EC.UTF-8!
case "$(locale -a | paste -sd ' ' -)" in
@@ -236,7 +236,7 @@ unset_locale() {
section_checkmk() {
echo "<<>>"
- echo "Version: 2.3.0b1"
+ echo "Version: 2.4.0b1"
echo "AgentOS: freebsd"
echo "Hostname: $(hostname)"
echo "AgentDirectory: ${MK_CONFDIR}"
@@ -244,6 +244,15 @@ section_checkmk() {
echo "SpoolDirectory: ${SPOOLDIR}"
echo "PluginsDirectory: ${PLUGINSDIR}"
echo "LocalDirectory: ${LOCALDIR}"
+ echo "OSType: unix"
+
+ while read -r line; do
+ raw_line="${line//\"/}"
+ case $line in
+ NAME=*) echo "OSName: ${raw_line##*=}" ;;
+ VERSION_ID=*) echo "OSVersion: ${raw_line##*=}" ;;
+ esac
+ done /dev/null
#
# BEGIN COMMON AGENT CODE
@@ -707,9 +716,11 @@ _run_cached_internal() {
if [ $((NOW - MTIME)) -gt "${REFRESH_INTERVAL}" ] && [ ! -e "$(_cfile_in_use)" ]; then
# Start it. If the command fails the output is thrown away
cat </dev/null 2>&1 &
-eval '${MK_DEFINE_LOG_SECTION_TIME}'
-exec > "${CACHEFILE}.new.\$\$" || exit 1
-$* && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" || rm -f "${CACHEFILE}.new.\$\$"
+ eval '${MK_DEFINE_LOG_SECTION_TIME}'
+ exec > "${CACHEFILE}.new.\$\$" || exit 1
+ $* \
+ && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" \
+ || rm -f "${CACHEFILE}.new.\$\$"
HERE
fi
diff --git a/agents/check_mk_agent.hpux b/agents/check_mk_agent.hpux
index 25abb77c06a..58e0fddc450 100755
--- a/agents/check_mk_agent.hpux
+++ b/agents/check_mk_agent.hpux
@@ -31,7 +31,7 @@ fi
section_checkmk() {
echo "<<>>"
- echo "Version: 2.3.0b1"
+ echo "Version: 2.4.0b1"
echo "AgentOS: hpux"
echo "Hostname: $(hostname)"
echo "AgentDirectory: $MK_CONFDIR"
@@ -39,7 +39,9 @@ section_checkmk() {
echo "SpoolDirectory: $SPOOLDIR"
echo "PluginsDirectory: $PLUGINSDIR"
echo "LocalDirectory: $LOCALDIR"
-
+ echo "OSType: unix"
+ echo "OSName: HP-UX"
+ echo "OSVersion: $(uname -r | cut -d' ' -f1)"
}
run_purely_synchronous_sections() {
diff --git a/agents/check_mk_agent.linux b/agents/check_mk_agent.linux
index 3ac698d6b52..a502f4bd558 100755
--- a/agents/check_mk_agent.linux
+++ b/agents/check_mk_agent.linux
@@ -44,7 +44,7 @@ is_valid_plugin() {
# test if a file is executable and does not have certain
# extensions (remnants from distro upgrades).
case "${1:?No plugin defined}" in
- *.dpkg-new | *.dpkg-old | *.dpkg-temp) return 1 ;;
+ *.dpkg-new | *.dpkg-old | *.dpkg-temp | *.dpkg-tmp) return 1 ;;
*) [ -f "${1}" ] && [ -x "${1}" ] ;;
esac
}
@@ -229,7 +229,7 @@ set_up_profiling() {
unset_locale() {
# eliminate localized outputs where possible
# The locale logic here is used to make the Python encoding detection work (see CMK-2778).
- unset -v LANG
+ unset -v LANG LC_ALL
if inpath locale && inpath paste; then
# match C.UTF-8 at the beginning, but not e.g. es_EC.UTF-8!
case "$(locale -a | paste -sd ' ' -)" in
@@ -464,7 +464,7 @@ export_utility_functions() {
section_checkmk() {
cat <>>
-Version: 2.3.0b1
+Version: 2.4.0b1
AgentOS: linux
Hostname: $(uname -n)
AgentDirectory: ${MK_CONFDIR}
@@ -488,6 +488,20 @@ HERE
# However this ("successfully") reports an empty list or '[unprintable]' on older systemd versions :-(
fi
+ #
+ # OS based labels are created from these variables
+ #
+
+ echo "OSType: linux"
+ while read -r line; do
+ raw_line="${line//\"/}"
+ case $line in
+ ID=*) echo "OSPlatform: ${raw_line##*=}" ;;
+ NAME=*) echo "OSName: ${raw_line##*=}" ;;
+ VERSION_ID=*) echo "OSVersion: ${raw_line##*=}" ;;
+ esac
+ done <<<"$(cat /etc/os-release 2>/dev/null)"
+
#
# BEGIN COMMON AGENT CODE
#
@@ -544,17 +558,6 @@ section_checkmk_failed_plugin() {
section_labels() {
echo '<<>>'
- echo '{"cmk/os_type":"linux"}'
-
- while read -r line; do
- raw_line="${line//\"/}"
- case $line in
- ID=*) echo '{"cmk/os_platform":"'"${raw_line##*=}"'"}' ;;
- NAME=*) echo '{"cmk/os_name":"'"${raw_line##*=}"'"}' ;;
- VERSION_ID=*) echo '{"cmk/os_version":"'"${raw_line##*=}"'"}' ;;
- esac
- done <<<"$(cat /etc/os-release 2>/dev/null)"
-
if [ -n "${IS_DOCKERIZED}" ] || [ -n "${IS_LXC_CONTAINER}" ]; then
echo '{"cmk/device_type":"container"}'
elif grep "hypervisor" /proc/cpuinfo >/dev/null 2>&1; then
@@ -882,13 +885,7 @@ section_ipmitool() {
section_ipmisensors() {
inpath ipmi-sensors && ls /dev/ipmi* >/dev/null || return
- ${MK_RUN_SYNC_PARTS} && echo '<<>>'
- # Newer ipmi-sensors version have new output format; Legacy format can be used
- if ipmi-sensors --help | grep -q legacy-output; then
- IPMI_FORMAT="--legacy-output"
- else
- IPMI_FORMAT=""
- fi
+
if ipmi-sensors --help | grep -q " \-\-groups"; then
IPMI_GROUP_OPT="-g"
else
@@ -896,8 +893,8 @@ section_ipmisensors() {
fi
# At least with ipmi-sensors 0.7.16 this group is Power_Unit instead of "Power Unit"
- _run_cached_internal "ipmi_sensors" 300 300 900 600 "echo '<<>>'; for class in Temperature Power_Unit Fan; do
- ipmi-sensors ${IPMI_FORMAT} --sdr-cache-directory /var/cache ${IPMI_GROUP_OPT} \"\${class}\" | sed -e 's/ /_/g' -e 's/:_\?/ /g' -e 's@ \([^(]*\)_(\([^)]*\))@ \2_\1@'
+ _run_cached_internal "ipmi_sensors" 300 300 900 600 "echo '<<>>'; for class in Temperature Power_Unit Fan; do
+ ipmi-sensors --sdr-cache-directory /var/cache ${IPMI_GROUP_OPT} \"\${class}\"
# In case of a timeout immediately leave loop.
if [ $? = 255 ]; then break; fi
done"
@@ -1091,7 +1088,7 @@ read_postfix_master_pid() {
if [ -e "${postfix_queue_dir}/pid/master.pid" ]; then
if [ -r "${postfix_queue_dir}/pid/master.pid" ]; then
postfix_pid=$(sed 's/ //g' <"${postfix_queue_dir}/pid/master.pid") # handle possible spaces in output
- if readlink -- "/proc/${postfix_pid}/exe" | grep -q ".*postfix/\(s\?bin/\)\?master.*"; then
+ if ps -p "${postfix_pid}" -o cmd= | grep -q ".*postfix/\(s\?bin/\)\?/\?master.*"; then
echo "${postfix_instance_name}:the Postfix mail system is running:PID:${postfix_pid}"
else
echo "${postfix_instance_name}:PID file exists but instance is not running!"
@@ -1122,7 +1119,7 @@ section_mailqueue() {
done
fi
# Always check for the default queue. It can exist even if multiple instances are configured
- read_postfix_queue_dirs "$(postconf -h queue_directory 2>/dev/null)"
+ read_postfix_queue_dirs "$(postconf -h queue_directory 2>/dev/null)" "default"
read_postfix_master_pid "$(postconf -h queue_directory 2>/dev/null)" "default"
elif [ -x /usr/sbin/ssmtp ]; then
@@ -1810,9 +1807,11 @@ _run_cached_internal() {
if [ $((NOW - MTIME)) -gt "${REFRESH_INTERVAL}" ] && [ ! -e "$(_cfile_in_use)" ]; then
# Start it. If the command fails the output is thrown away
cat </dev/null 2>&1 &
-eval '${MK_DEFINE_LOG_SECTION_TIME}'
-exec > "${CACHEFILE}.new.\$\$" || exit 1
-$* && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" || rm -f "${CACHEFILE}.new.\$\$"
+ eval '${MK_DEFINE_LOG_SECTION_TIME}'
+ exec > "${CACHEFILE}.new.\$\$" || exit 1
+ $* \
+ && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" \
+ || rm -f "${CACHEFILE}.new.\$\$"
HERE
fi
diff --git a/agents/check_mk_agent.macosx b/agents/check_mk_agent.macosx
index fb4cd7f66d7..30163137325 100755
--- a/agents/check_mk_agent.macosx
+++ b/agents/check_mk_agent.macosx
@@ -36,7 +36,7 @@ fi
section_checkmk() {
echo "<<>>"
- echo "Version: 2.3.0b1"
+ echo "Version: 2.4.0b1"
echo "AgentOS: macosx"
echo "Hostname: $(hostname)"
echo "AgentDirectory: $MK_CONFDIR"
@@ -46,6 +46,7 @@ section_checkmk() {
echo "LocalDirectory: $LOCALDIR"
echo "OSName: $(sw_vers -productName)"
echo "OSVersion: $(sw_vers -productVersion)"
+ echo "OSType: macos"
}
section_lnx_if() {
diff --git a/agents/check_mk_agent.netbsd b/agents/check_mk_agent.netbsd
index d2df9878a9a..d32786f4ff2 100755
--- a/agents/check_mk_agent.netbsd
+++ b/agents/check_mk_agent.netbsd
@@ -53,7 +53,7 @@ fi
section_misc_sections() {
echo "<<>>"
- echo "Version: 2.3.0b1"
+ echo "Version: 2.4.0b1"
echo "AgentOS: netbsd"
echo "Hostname: $(hostname)"
echo "AgentDirectory: $MK_CONFDIR"
@@ -61,6 +61,9 @@ section_misc_sections() {
echo "SpoolDirectory: $SPOOLDIR"
echo "PluginsDirectory: $PLUGINSDIR"
echo "LocalDirectory: $LOCALDIR"
+ echo "OSType: unix"
+ echo "OSName: $(uname -s)"
+ echo "OSVersion: $(uname -r)"
echo '<<>>'
df -kPt ffs | sed -e 's/^\([^ ][^ ]*\) \(.*\)$/\1 ffs \2/' | sed 1d
diff --git a/agents/check_mk_agent.openbsd b/agents/check_mk_agent.openbsd
index 7e12d2df9ed..68667091f0a 100755
--- a/agents/check_mk_agent.openbsd
+++ b/agents/check_mk_agent.openbsd
@@ -1,10 +1,8 @@
-#!/bin/sh
+#!/bin/ksh
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-# shellcheck shell=ksh
-
# Author: Lars Michelsen
# Florian Heigl
# Christian Zigotzky
@@ -124,7 +122,7 @@ section_openbsd_mem() {
section_misc_sections() {
echo "<<>>"
- echo "Version: 2.3.0b1"
+ echo "Version: 2.4.0b1"
echo "AgentOS: openbsd"
echo "Hostname: $(hostname)"
echo "AgentDirectory: $MK_CONFDIR"
@@ -132,6 +130,9 @@ section_misc_sections() {
echo "SpoolDirectory: $SPOOLDIR"
echo "PluginsDirectory: $PLUGINSDIR"
echo "LocalDirectory: $LOCALDIR"
+ echo "OSType: unix"
+ echo "OSName: $(uname -s)"
+ echo "OSVersion: $(uname -r)"
echo '<<>>'
df -kPt ffs | sed -e 's/^\([^ ][^ ]*\) \(.*\)$/\1 ffs \2/' | sed 1d
diff --git a/agents/check_mk_agent.openvms b/agents/check_mk_agent.openvms
index 02032119c2f..4de31a2b159 100755
--- a/agents/check_mk_agent.openvms
+++ b/agents/check_mk_agent.openvms
@@ -142,11 +142,12 @@ $set noVerify
$define sys$output "''cacheFile'_new"
$on error then goto WCFdone
$say "<<>>"
-$say "Version: 2.3.0b1"
+$say "Version: 2.4.0b1"
$say "AgentOS: openvms"
$say "Nodename: ",f$getsyi("nodename")
$say "Architecture: ''HWinfo'"
$say "SWversion: ''SWinfo'"
+$say "OSName: OpenVMS"
$say "<<>>"
$say "''numCPU' ''cpu2' ''cpu3' ''wt0' ''wt1'"
$say "<<>>"
diff --git a/agents/check_mk_agent.openwrt b/agents/check_mk_agent.openwrt
index 92f792b4314..b296b693f9e 100755
--- a/agents/check_mk_agent.openwrt
+++ b/agents/check_mk_agent.openwrt
@@ -46,7 +46,7 @@ is_valid_plugin() {
# test if a file is executable and does not have certain
# extensions (remnants from distro upgrades).
case "${1:?No plugin defined}" in
- *.dpkg-new | *.dpkg-old | *.dpkg-temp) return 1 ;;
+ *.dpkg-new | *.dpkg-old | *.dpkg-temp | *.dpkg-tmp) return 1 ;;
*) [ -f "${1}" ] && [ -x "${1}" ] ;;
esac
}
@@ -218,7 +218,7 @@ set_up_profiling() {
unset_locale() {
# eliminate localized outputs where possible
# The locale logic here is used to make the Python encoding detection work (see CMK-2778).
- unset -v LANG
+ unset -v LANG LC_ALL
if inpath locale && inpath paste; then
# match C.UTF-8 at the beginning, but not e.g. es_EC.UTF-8!
case "$(locale -a | paste -sd ' ' -)" in
@@ -270,7 +270,7 @@ section_cpu() {
section_checkmk() {
echo "<<>>"
- echo "Version: 2.3.0b1"
+ echo "Version: 2.4.0b1"
echo "AgentOS: openwrt"
echo "Hostname: $(hostname)"
echo "AgentDirectory: ${MK_CONFDIR}"
@@ -279,6 +279,15 @@ section_checkmk() {
echo "PluginsDirectory: ${PLUGINSDIR}"
echo "LocalDirectory: ${LOCALDIR}"
+ echo "OSType: linux"
+ while read -r line; do
+ raw_line=$(echo "$line" | tr -d \")
+ case $raw_line in
+ NAME=*) echo "OSName: ${raw_line##*=}" ;;
+ VERSION_ID=*) echo "OSVersion: ${raw_line##*=}" ;;
+ esac
+ done /dev/null
+
# If we are called via xinetd, try to find only_from configuration
if [ -n "${REMOTE_HOST}" ]; then
printf 'OnlyFrom: '
@@ -1150,9 +1159,11 @@ _run_cached_internal() {
if [ $((NOW - MTIME)) -gt "${REFRESH_INTERVAL}" ] && [ ! -e "$(_cfile_in_use)" ]; then
# Start it. If the command fails the output is thrown away
cat </dev/null 2>&1 &
-eval '${MK_DEFINE_LOG_SECTION_TIME}'
-exec > "${CACHEFILE}.new.\$\$" || exit 1
-$* && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" || rm -f "${CACHEFILE}.new.\$\$"
+ eval '${MK_DEFINE_LOG_SECTION_TIME}'
+ exec > "${CACHEFILE}.new.\$\$" || exit 1
+ $* \
+ && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" \
+ || rm -f "${CACHEFILE}.new.\$\$"
HERE
fi
diff --git a/agents/check_mk_agent.solaris b/agents/check_mk_agent.solaris
index 6111b5dde6a..588555af298 100755
--- a/agents/check_mk_agent.solaris
+++ b/agents/check_mk_agent.solaris
@@ -44,7 +44,7 @@ is_valid_plugin() {
# test if a file is executable and does not have certain
# extensions (remnants from distro upgrades).
case "${1:?No plugin defined}" in
- *.dpkg-new | *.dpkg-old | *.dpkg-temp) return 1 ;;
+ *.dpkg-new | *.dpkg-old | *.dpkg-temp | *.dpkg-tmp) return 1 ;;
*) [ -f "${1}" ] && [ -x "${1}" ] ;;
esac
}
@@ -213,7 +213,7 @@ set_up_profiling() {
unset_locale() {
# eliminate localized outputs where possible
# The locale logic here is used to make the Python encoding detection work (see CMK-2778).
- unset -v LANG
+ unset -v LANG LC_ALL
if inpath locale && inpath paste; then
# match C.UTF-8 at the beginning, but not e.g. es_EC.UTF-8!
case "$(locale -a | paste -sd ' ' -)" in
@@ -300,7 +300,7 @@ export_utility_functions() {
section_checkmk() {
echo "<<>>"
- echo "Version: 2.3.0b1"
+ echo "Version: 2.4.0b1"
echo "AgentOS: solaris"
echo "Hostname: $(hostname)"
echo "AgentDirectory: ${MK_CONFDIR}"
@@ -309,6 +309,15 @@ section_checkmk() {
echo "PluginsDirectory: ${PLUGINSDIR}"
echo "LocalDirectory: ${LOCALDIR}"
+ echo "OSType: unix"
+ while read -r line; do
+ raw_line="${line//\"/}"
+ case $line in
+ NAME=*) echo "OSName: ${raw_line##*=}" ;;
+ VERSION_ID=*) echo "OSVersion: ${raw_line##*=}" ;;
+ esac
+ done <<<"$(cat /etc/os-release 2>/dev/null)"
+
#
# BEGIN COMMON AGENT CODE
#
@@ -793,9 +802,11 @@ _run_cached_internal() {
if [ $((NOW - MTIME)) -gt "${REFRESH_INTERVAL}" ] && [ ! -e "$(_cfile_in_use)" ]; then
# Start it. If the command fails the output is thrown away
cat </dev/null 2>&1 &
-eval '${MK_DEFINE_LOG_SECTION_TIME}'
-exec > "${CACHEFILE}.new.\$\$" || exit 1
-$* && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" || rm -f "${CACHEFILE}.new.\$\$"
+ eval '${MK_DEFINE_LOG_SECTION_TIME}'
+ exec > "${CACHEFILE}.new.\$\$" || exit 1
+ $* \
+ && mv -f "${CACHEFILE}.new.\$\$" "${CACHEFILE}" && rm -f "${FAIL_REPORT_FILE}" \
+ || rm -f "${CACHEFILE}.new.\$\$"
HERE
fi
diff --git a/agents/modules/windows/BUILD_NUM b/agents/modules/windows/BUILD_NUM
index 7273c0fa8c5..64bb6b746dc 100644
--- a/agents/modules/windows/BUILD_NUM
+++ b/agents/modules/windows/BUILD_NUM
@@ -1 +1 @@
-25
+30
diff --git a/agents/modules/windows/Makefile b/agents/modules/windows/Makefile
index 591cc3676f8..0682116b5aa 100644
--- a/agents/modules/windows/Makefile
+++ b/agents/modules/windows/Makefile
@@ -12,8 +12,8 @@
include ../../../defines.make
# commandline
-PY_VER ?= $(PYTHON_MAJOR_DOT_MINOR)
-PY_SUBVER ?= $(PYTHON_VERSION_PATCH)
+PY_VER ?= $(PYTHON_VERSION_WINDOWS_MAJOR_DOT_MINOR)
+PY_SUBVER ?= $(PYTHON_VERSION_WINDOWS_PATCH)
#MSBUILD ?= C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\MSBuild\15.0\Bin\MSBuild.exe
MSBUILD ?= C:\Program Files\Microsoft Visual Studio\2022\Professional\MSBuild\Current\Bin\msbuild.exe
@@ -110,7 +110,9 @@ $(PYTHON_PIPFILE_LOCK): $(PYTHON_PIPFILE) ## piplock building
@powershell Write-Host "Target $@" -foreground white
@call exec_cmd.bat build_pipfile_lock.cmd $(PY_VER) $(PY_SUBVER)
-venv: python_install $(PYTHON_PIPFILE_LOCK) ## venv building
+python_pipfile: $(PYTHON_PIPFILE_LOCK)
+
+venv: python_install python_pipfile ## venv building
@powershell Write-Host "Target '$@'" -foreground white
exec_cmd.bat build_environment.cmd $(PY_VER) $(PY_SUBVER)
exec_cmd.bat clean_environment.cmd $(PY_VER) $(PY_SUBVER)
diff --git a/agents/modules/windows/build_environment.cmd b/agents/modules/windows/build_environment.cmd
index 905ece103fe..d35e007d50c 100644
--- a/agents/modules/windows/build_environment.cmd
+++ b/agents/modules/windows/build_environment.cmd
@@ -13,4 +13,7 @@ set PYTHONPATH=%cd%\Lib\;%cd%\DLLs\
set PYTHONHOME=%cd%
set PATH=%cd%\;%cd%\Scripts\;%PATH%
%cd%\python.exe -m pipenv sync --python=%cd%\python.exe || exit /b 5
+:: we use plain copy since 3.12(changed location of files)
+xcopy %build_dir%\win32\binaries\*.* %save_dir%\.venv\Scripts /Y /Q || powershell Write-Host "`'%save_dir%`' xcopy failed" -Foreground red && exit /b 8
+
exit /b 0
diff --git a/agents/modules/windows/exec_cmd.bat b/agents/modules/windows/exec_cmd.bat
index c7f56a87d17..ca91e5679af 100644
--- a/agents/modules/windows/exec_cmd.bat
+++ b/agents/modules/windows/exec_cmd.bat
@@ -4,13 +4,13 @@
::
:: 'exec.cmd script PY_VER PY_SUBVER'
::
-:: Example: exec.cmd install_legacy.cmd 3.9 7
+:: Example: exec.cmd install_legacy.cmd 3.12 0
:: Always return back to current dir
:: *********************************************
@echo off
-if "%3" == "" powershell Write-Host "Usage: exec_cmd cmd 3.9 7" -foreground red && exit /b 1
+if "%3" == "" powershell Write-Host "Usage: exec_cmd cmd 3.12 0" -foreground red && exit /b 1
set PY_VER=%2
set PY_SUBVER=%3
:: remove dot from the PY_VER
diff --git a/agents/modules/windows/install_pipenv.cmd b/agents/modules/windows/install_pipenv.cmd
index ddd949f3c73..6f2636aef53 100644
--- a/agents/modules/windows/install_pipenv.cmd
+++ b/agents/modules/windows/install_pipenv.cmd
@@ -11,6 +11,6 @@ powershell Write-Host "Pipenv installing..." -foreground green
:: As for 19.03.2019 we must use virtualenv 20.0.10
powershell Write-Host "virtualenv resetting to the correct version..." -foreground green
-.\python.exe -m pip install virtualenv==20.0.10
+.\python.exe -m pip install virtualenv
powershell Write-Host "Pipenv installed" -foreground green
exit /b 0
diff --git a/agents/modules/windows/patch_pipfile.py b/agents/modules/windows/patch_pipfile.py
index 9984665c8c2..4e3e049a208 100644
--- a/agents/modules/windows/patch_pipfile.py
+++ b/agents/modules/windows/patch_pipfile.py
@@ -8,7 +8,11 @@
import sys
-from colorama import Fore, init, Style # type: ignore[import] # pylint: disable=import-error
+from colorama import ( # type: ignore[import-untyped] # pylint: disable=import-error
+ Fore,
+ init,
+ Style,
+)
init()
diff --git a/agents/modules/windows/pipfiles/3/Pipfile b/agents/modules/windows/pipfiles/3/Pipfile
index 9e9c1956ed9..a502cb954a6 100644
--- a/agents/modules/windows/pipfiles/3/Pipfile
+++ b/agents/modules/windows/pipfiles/3/Pipfile
@@ -6,15 +6,15 @@ name = "pypi"
[dev-packages]
[packages]
-colorama = "==0.4.1" # Windows has to have some color output
-pyyaml = "==5.1.2" # to support the agent config
-certifi= "==2020.4.5.1" # jolokia
-chardet = "==3.0.4" # jolokia
-idna = "==2.9" # jolokia
-requests = {extras = ["socks"], version = "2.23.0"} # jolokia, cmk_update_agent
-urllib3 = "==1.25.9" # jolokia
-pyopenssl = "~=22.0" # cmk_update_agent
-pywin32 = "==303" # logwatch
+colorama = "==0.4.6" # Windows has to have some color output
+pyyaml = "==6.0.1" # to support the agent config
+certifi= "==2023.11.17" # jolokia
+chardet = "==5.2.0" # jolokia
+idna = "==3.6" # jolokia
+requests = {extras = ["socks"], version = "2.31.0"} # jolokia, cmk_update_agent
+urllib3 = "==2.1.0" # jolokia
+pyopenssl = "~=24.0" # cmk_update_agent
+pywin32 = "==306" # logwatch
[requires]
-python_version = "3.10"
+python_version = "3.12"
diff --git a/agents/modules/windows/readme.md b/agents/modules/windows/readme.md
index 096ff3d8f42..6621fc6a844 100644
--- a/agents/modules/windows/readme.md
+++ b/agents/modules/windows/readme.md
@@ -1,16 +1,16 @@
# Windows Modules to deploy wint Windows Agent 2.0 and later
-## Python 3.11.5
+## Python 3.12.0
### Source
-PYTHON 3.11.5, provided as source tarball by standard Checkmk development process
+PYTHON 3.12.0, provided as source tarball by standard Checkmk development process
=======
-## Python 3.11.2
+## Python 3.12.0
### Source
-PYTHON 3.11.2, provided as source tarball by standard Checkmk development process
+PYTHON 3.12.0, provided as source tarball by standard Checkmk development process
### Changing or Updating the Python
@@ -57,7 +57,7 @@ This procedure may quite annoying, you have to check next points:
Usually it is conftest.py and Makefile.
-6. Check build_the_module.cmd for 3.11 and et cetera
+6. Check build_the_module.cmd for 3.12 and et cetera
7. Check the Windows node builds artifacts succesfully.
### PROCESS
@@ -65,7 +65,7 @@ This procedure may quite annoying, you have to check next points:
#### Execution local
##### Building
-make build PY_VER=3.11 PY_SUBVER=1
+make build PY_VER=3.12 PY_SUBVER=0
##### Testing
make integration
@@ -78,7 +78,7 @@ Main entry:
build_the_module cached
In a turn the script makes two calls:
-build_the_cached artefact_dir credentials url 3.11 1
+build_the_cached artefact_dir credentials url 3.12 1
#### Caching
@@ -86,16 +86,16 @@ All builds of the Python are cached.
Name of the cached file
python-%version%.%subversion%_%git_hash%_%BUILD_NUM%.cab
-This mean that you didn't get a new build till you increase valeu in the file *BUILD_NUM*.
+This mean that you didn't get a new build till you increase value in the file *BUILD_NUM*.
Just a commit is not enough, because some builds can't get data about current git hash.
In latter case the git_hash is replaced with string "latest".
-#### Steps 3.11 and newer
+#### Steps 3.12 and newer
1. Deploy package from the *omd/packages*
2. Build and copy results t the *out*.
-3. Uninstall from backuped python-3.11.exe in *uninstall*
+3. Uninstall from backuped python-3.12.exe in *uninstall*
4. Install to the *to_install*
5. Upgrade pip
6. Install pipenv
@@ -115,7 +115,7 @@ In latter case the git_hash is replaced with string "latest".
|
|-- tmp/
| |
-| +-- 3.11/
+| +-- 3.12/
| | python-3.cab * resulting module file
| |
| |-- to_save/ * to produce module *
@@ -140,8 +140,8 @@ In latter case the git_hash is replaced with string "latest".
|
|-- python/
|
- +-- 3.11/
+ +-- 3.12/
|
- |-- python-3.11.timestamp
+ |-- python-3.12.timestamp
|
- +-- python-3.11/
+ +-- python-3.12/
diff --git a/agents/modules/windows/tests/Makefile b/agents/modules/windows/tests/Makefile
index f52458c726e..812d7a8a806 100644
--- a/agents/modules/windows/tests/Makefile
+++ b/agents/modules/windows/tests/Makefile
@@ -4,8 +4,8 @@
include ../../../../defines.make
# commandline
-PY_VER ?= $(PYTHON_MAJOR_DOT_MINOR)
-PY_SUBVER ?= $(PYTHON_VERSION_PATCH)
+PY_VER ?= $(PYTHON_VERSION_WINDOWS_MAJOR_DOT_MINOR)
+PY_SUBVER ?= $(PYTHON_VERSION_WINDOWS_PATCH)
INTEGRATION_TIMESTAMP := results\integration.timestamp
@@ -13,7 +13,7 @@ INTEGRATION_TIMESTAMP := results\integration.timestamp
PYTHON := ..\..\..\..\artefacts\python-3.cab
# Scripts to be compatible with pythons
-SCRIPT_1 := ..\..\..\..\enterprise\agents\plugins\cmk_update_agent.py
+SCRIPT_1 := ..\..\..\..\non-free\cmk-update-agent\cmk_update_agent.py
SCRIPT_2 := ..\..\..\plugins\mk_logwatch.py
SCRIPT_3 := ..\..\..\plugins\mk_jolokia.py
diff --git a/agents/modules/windows/tests/integration/test_scripts_execution.py b/agents/modules/windows/tests/integration/test_scripts_execution.py
index 65c19a43514..a9c9de133f6 100644
--- a/agents/modules/windows/tests/integration/test_scripts_execution.py
+++ b/agents/modules/windows/tests/integration/test_scripts_execution.py
@@ -35,7 +35,7 @@ def run_script(work_python: Path, *, script: Path) -> tuple[int, str, str]:
"script,expected_code,expected_pipe,expected_err",
[
(
- Path("..\\..\\..\\..\\..\\enterprise\\agents\\plugins\\cmk_update_agent.py"),
+ Path("..\\..\\..\\..\\..\\non-free\\cmk-update-agent\\cmk_update_agent.py"),
1,
"",
"Missing config file at .\\cmk-update-agent.cfg. Configuration",
diff --git a/agents/plugins/.f12 b/agents/plugins/.f12
index 4618ee4ede1..a06739816f1 100755
--- a/agents/plugins/.f12
+++ b/agents/plugins/.f12
@@ -14,7 +14,7 @@
# sudo mkdir -p /usr/lib/check_mk_agent/plugins
# sudo install -m 755 * /usr/lib/check_mk_agent/plugins || true
-sudo rsync -va \
+rsync -vrlD \
--exclude=.f12 \
--exclude=__init__.py* \
./ "$ROOT/share/check_mk/agents/plugins/"
diff --git a/agents/plugins/apache_status.py b/agents/plugins/apache_status.py
index f75b178f9b6..a2f5bd444fc 100755
--- a/agents/plugins/apache_status.py
+++ b/agents/plugins/apache_status.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
USER_AGENT = "checkmk-agent-apache_status-" + __version__
# Checkmk-Agent-Plugin - Apache Server Status
@@ -290,7 +290,7 @@ def main():
try:
response_body = get_response_body(proto, cafile, address, portspec, page)
for line in response_body.split("\n"):
- if not line.strip():
+ if not line or line.isspace():
continue
if line.lstrip()[0] == "<":
# Seems to be html output. Skip this server.
diff --git a/agents/plugins/asmcmd.sh b/agents/plugins/asmcmd.sh
index 7f0aee70106..3277b2e1f88 100755
--- a/agents/plugins/asmcmd.sh
+++ b/agents/plugins/asmcmd.sh
@@ -9,6 +9,6 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
su - griduser -c "asmcmd $*"
diff --git a/agents/plugins/db2_mem b/agents/plugins/db2_mem
index c051e5bdce4..d902ec08de3 100755
--- a/agents/plugins/db2_mem
+++ b/agents/plugins/db2_mem
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
INSTANCES=$(ps -ef | grep db2sysc | awk '{print $1}' | sort -u | grep -v root)
diff --git a/agents/plugins/dnsclient b/agents/plugins/dnsclient
index 809e2a9de64..d5e834fbd5c 100755
--- a/agents/plugins/dnsclient
+++ b/agents/plugins/dnsclient
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# This check can be used to test the name resolution of a given host
# address using the local resolver of the system this script is
diff --git a/agents/plugins/hpux_lunstats b/agents/plugins/hpux_lunstats
index 439911c8614..0180a700f87 100755
--- a/agents/plugins/hpux_lunstats
+++ b/agents/plugins/hpux_lunstats
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Monitor status of LUNs on HP-UX
# Put this file into /usr/lib/check_mk_agent/plugins. Then
diff --git a/agents/plugins/hpux_statgrab b/agents/plugins/hpux_statgrab
index 58d8b599380..7ded5d1c299 100755
--- a/agents/plugins/hpux_statgrab
+++ b/agents/plugins/hpux_statgrab
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# this is for users who compiled statgrab on hp-ux.
# note you'll need a 0.18+ version, from their github page at
diff --git a/agents/plugins/ibm_mq b/agents/plugins/ibm_mq
index 6ac323e9bf9..e387d736827 100755
--- a/agents/plugins/ibm_mq
+++ b/agents/plugins/ibm_mq
@@ -10,7 +10,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
#
# Collects monitoring information of all IBM MQ Queue Managers.
diff --git a/agents/plugins/isc_dhcpd.py b/agents/plugins/isc_dhcpd.py
index e63dd3f360c..aee4be3f7be 100755
--- a/agents/plugins/isc_dhcpd.py
+++ b/agents/plugins/isc_dhcpd.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
# Monitor leases if ISC-DHCPD
import calendar
diff --git a/agents/plugins/jar_signature b/agents/plugins/jar_signature
index 3e980f53c1b..3c08dd291e5 100755
--- a/agents/plugins/jar_signature
+++ b/agents/plugins/jar_signature
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# This agent uses the program "jarsigner" to read ssl certificate
# information of jar files and outputs the information to stdout
@@ -30,7 +30,7 @@ echo "<<>>"
for JAR in $JAR_PATH; do
if [ -e "$JAR" ]; then # avoid entry for '*.jar'
echo "[[[${JAR##*/}]]]"
- OUTPUT=$(jarsigner -verify -verbose -certs "$JAR")
+ OUTPUT=$(su - oracle -c "jarsigner -verify -verbose -certs $JAR")
LINE=$(echo "$OUTPUT" | grep -n ^s | tail -n1 | cut -d: -f1)
echo "${OUTPUT}" | tail -n +"${LINE}"
echo
diff --git a/agents/plugins/kaspersky_av b/agents/plugins/kaspersky_av
index 660b1622eb4..21fcfbe5b4e 100755
--- a/agents/plugins/kaspersky_av
+++ b/agents/plugins/kaspersky_av
@@ -9,16 +9,60 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
KAV4FS="kav4fs"
KESL="kesl"
+# BEGIN COMMON PLUGIN CODE
+
+# check that no users other than root can change the file
+only_root_can_modify() {
+ permissions=$1
+ owner=$2
+ group=$3
+
+ group_write_perm=$(echo "$permissions" | cut -c 6)
+ other_write_perm=$(echo "$permissions" | cut -c 9)
+
+ if [ "$owner" != "root" ] || [ "$other_write_perm" != "-" ]; then
+ return 1
+ fi
+
+ [ "$group" = "root" ] || [ "$group_write_perm" = "-" ]
+}
+
+get_binary_owner() {
+ BINARY_PATH=$1
+ stat -c '%U' "${BINARY_PATH}"
+}
+
+get_binary_execution_mode() {
+ BINARY_PATH=$1
+ BINARY_USER=$2
+
+ # if the executable belongs to someone besides root, do not execute it as root
+ if needs_user_switch_before_executing "$BINARY_PATH"; then
+ echo "su ${BINARY_USER} -c"
+ return
+ fi
+ echo "bash -c"
+}
+
+needs_user_switch_before_executing() {
+ BINARY_PATH=$1
+
+ [ "$(whoami)" = "root" ] && ! only_root_can_modify "$(stat -c '%A' "$BINARY_PATH")" "$(stat -c '%U' "$BINARY_PATH")" "$(stat -c '%G' "$BINARY_PATH")"
+}
+
+# END COMMON PLUGIN CODE
+
run() {
suite="$1"
control="/opt/kaspersky/$suite/bin/$suite-control"
[ -x "$(command -v "$control")" ] || return
+ only_root_can_modify "$(stat -c '%A' "$control")" "$(stat -c '%U' "$control")" "$(stat -c '%G' "$control")" || return
if [ "$suite" = "$KAV4FS" ]; then
echo "<<>>"
diff --git a/agents/plugins/lnx_container_host_if.linux b/agents/plugins/lnx_container_host_if.linux
index 67952cc7a82..992bba044d8 100755
--- a/agents/plugins/lnx_container_host_if.linux
+++ b/agents/plugins/lnx_container_host_if.linux
@@ -14,7 +14,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
CONFIG_FILE="${MK_CONFDIR}/lnx_container_host_if.cfg"
__read_network_interface_files() {
diff --git a/agents/plugins/lnx_quota b/agents/plugins/lnx_quota
index 86c2c7673ac..9117360af4f 100755
--- a/agents/plugins/lnx_quota
+++ b/agents/plugins/lnx_quota
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if type repquota >/dev/null; then
echo "<<>>"
diff --git a/agents/plugins/lvm b/agents/plugins/lvm
index ec22ac7db91..755b260b582 100755
--- a/agents/plugins/lvm
+++ b/agents/plugins/lvm
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
echo "<<>>"
vgs --units b --nosuffix --noheadings --separator ' '
diff --git a/agents/plugins/mailman2_lists b/agents/plugins/mailman2_lists
index 0db113cc86e..7505ae4876a 100755
--- a/agents/plugins/mailman2_lists
+++ b/agents/plugins/mailman2_lists
@@ -7,7 +7,7 @@
Monitor Mailman 2 mailing lists.
"""
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
# This Checkmk-Agent plugin gathers information about mailinglists hosted
# by the local mailman instance.
diff --git a/agents/plugins/mailman3_lists b/agents/plugins/mailman3_lists
index d76bb1b914e..5e4ac4c7d51 100755
--- a/agents/plugins/mailman3_lists
+++ b/agents/plugins/mailman3_lists
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Monitor Mailman 3 mailing lists
diff --git a/agents/plugins/mk_apt b/agents/plugins/mk_apt
index 1151cfb0f52..6b2d7c28107 100755
--- a/agents/plugins/mk_apt
+++ b/agents/plugins/mk_apt
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Check for APT updates (Debian, Ubuntu)
# TODO:
diff --git a/agents/plugins/mk_ceph b/agents/plugins/mk_ceph
index 20a3fb05133..c99a95ed8b0 100755
--- a/agents/plugins/mk_ceph
+++ b/agents/plugins/mk_ceph
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Check Ceph storage
# Config file must contain:
diff --git a/agents/plugins/mk_cups_queues b/agents/plugins/mk_cups_queues
index bdb8524a48c..c8b185d50aa 100755
--- a/agents/plugins/mk_cups_queues
+++ b/agents/plugins/mk_cups_queues
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if type lpstat >/dev/null 2>&1; then
export LC_TIME="en_US.UTF-8"
diff --git a/agents/plugins/mk_db2.aix b/agents/plugins/mk_db2.aix
index 2a05d31fa85..4f8bf8f6223 100755
--- a/agents/plugins/mk_db2.aix
+++ b/agents/plugins/mk_db2.aix
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Monitor DB/2 databases on AIX
# $HOME/sqllib/db2profile
diff --git a/agents/plugins/mk_db2.linux b/agents/plugins/mk_db2.linux
index 7e57d142e88..1604d5185fc 100755
--- a/agents/plugins/mk_db2.linux
+++ b/agents/plugins/mk_db2.linux
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Agent plugin to monitor DB/2 databases on Linux.
diff --git a/agents/plugins/mk_docker.py b/agents/plugins/mk_docker.py
index b46ad36ce6f..5cdb2314d78 100755
--- a/agents/plugins/mk_docker.py
+++ b/agents/plugins/mk_docker.py
@@ -19,7 +19,7 @@
from __future__ import with_statement
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
# NOTE: docker is available for python versions from 2.6 / 3.3
@@ -44,28 +44,17 @@ def which(prg):
# The "import docker" checks below result in agent sections being created. This
-# is a way to end the plugin in case it is being executed on a non docker or podman host
-if os.path.isdir("/var/lib/docker") and os.path.isdir("/var/run/docker") and which("docker"):
- DEFAULT_CFG_SECTION = {
- "base_url": "unix://var/run/docker.sock",
- "skip_sections": "",
- "container_id": "short",
- }
-
-# Use podman CFG_SECTION
-elif os.path.isfile("/usr/bin/runc") and which("podman"):
- DEFAULT_CFG_SECTION = {
- "base_url": "unix://run/podman/podman.sock",
- "skip_sections": "",
- "container_id": "short",
- }
-
-else:
- sys.stderr.write("mk_docker.py: Does not seem to be a docker or podman host. Terminating.\n")
+# is a way to end the plugin in case it is being executed on a non docker host
+if (
+ not os.path.isfile("/var/lib/docker")
+ and not os.path.isfile("/var/run/docker")
+ and not which("docker")
+):
+ sys.stderr.write("mk_docker.py: Does not seem to be a docker host. Terminating.\n")
sys.exit(1)
try:
- import docker # type: ignore[import]
+ import docker # type: ignore[import-untyped]
except ImportError:
sys.stdout.write(
"<<>>\n"
@@ -92,6 +81,12 @@ def which(prg):
DEFAULT_CFG_FILE = os.path.join(os.getenv("MK_CONFDIR", ""), "docker.cfg")
+DEFAULT_CFG_SECTION = {
+ "base_url": "unix://var/run/docker.sock",
+ "skip_sections": "",
+ "container_id": "short",
+}
+
LOGGER = logging.getLogger(__name__)
@@ -166,10 +161,10 @@ def __init__(self, name=None, piggytarget=None):
if piggytarget is not None:
self.append("<<<<%s>>>>" % piggytarget)
if name is not None:
- self.append("<<>>" % name)
+ self.append("<<<%s:sep(124)>>>" % name)
version_json = json.dumps(Section.version_info)
self.append("@docker_version_info|%s" % version_json)
- self.append("<<>>" % name)
+ self.append("<<<%s:sep(0)>>>" % name)
def write(self):
if self[0].startswith("<<<<"):
@@ -183,7 +178,7 @@ def write(self):
def report_exception_to_server(exc, location):
LOGGER.info("handling exception: %s", exc)
msg = "Plugin exception in %s: %s" % (location, exc)
- sec = Section("node_info")
+ sec = Section("docker_node_info")
sec.append(json.dumps({"Unknown": msg}))
sec.write()
@@ -267,7 +262,7 @@ def _read_df_result(self):
return json.loads(file_.read())
-class MKDockerClient(docker.DockerClient):
+class MKDockerClient(docker.DockerClient): # type: ignore[misc]
"""a docker.DockerClient that caches containers and node info"""
API_VERSION = "auto"
@@ -410,7 +405,7 @@ def is_disabled_section(config, section_name):
@time_it
def section_node_info(client):
LOGGER.debug(client.node_info)
- section = Section("node_info")
+ section = Section("docker_node_info")
section.append(json.dumps(client.node_info))
section.write()
@@ -418,7 +413,7 @@ def section_node_info(client):
@time_it
def section_node_disk_usage(client):
"""docker system df"""
- section = Section("node_disk_usage")
+ section = Section("docker_node_disk_usage")
try:
data = client.df()
except docker.errors.APIError as exc:
@@ -496,7 +491,7 @@ def _robust_inspect(client, docker_object):
@time_it
def section_node_images(client):
"""in subsections list [[[images]]] and [[[containers]]]"""
- section = Section("node_images")
+ section = Section("docker_node_images")
images = _robust_inspect(client, "images")
LOGGER.debug(images)
@@ -515,14 +510,14 @@ def section_node_images(client):
@time_it
def section_node_network(client):
networks = client.networks.list(filters={"driver": "bridge"})
- section = Section("node_network")
+ section = Section("docker_node_network")
section += [json.dumps(n.attrs) for n in networks]
section.write()
def section_container_node_name(client, container_id):
node_name = client.node_info.get("Name")
- section = Section("container_node_name", piggytarget=container_id)
+ section = Section("docker_container_node_name", piggytarget=container_id)
section.append(json.dumps({"NodeName": node_name}))
section.write()
@@ -545,14 +540,14 @@ def section_container_status(client, container_id):
pass
status["NodeName"] = client.node_info.get("Name")
- section = Section("container_status", piggytarget=container_id)
+ section = Section("docker_container_status", piggytarget=container_id)
section.append(json.dumps(status))
section.write()
def section_container_labels(client, container_id):
container = client.all_containers[container_id]
- section = Section("container_labels", piggytarget=container_id)
+ section = Section("docker_container_labels", piggytarget=container_id)
section.append(json.dumps(container.labels))
section.write()
@@ -560,7 +555,7 @@ def section_container_labels(client, container_id):
def section_container_network(client, container_id):
container = client.all_containers[container_id]
network = container.attrs.get("NetworkSettings", {})
- section = Section("container_network", piggytarget=container_id)
+ section = Section("docker_container_network", piggytarget=container_id)
section.append(json.dumps(network))
section.write()
@@ -603,7 +598,7 @@ def section_container_mem(client, container_id):
if stats is None: # container not running
return
container_mem = stats["memory_stats"]
- section = Section("container_mem", piggytarget=container_id)
+ section = Section("docker_container_mem", piggytarget=container_id)
section.append(json.dumps(container_mem))
section.write()
@@ -613,7 +608,7 @@ def section_container_cpu(client, container_id):
if stats is None: # container not running
return
container_cpu = stats["cpu_stats"]
- section = Section("container_cpu", piggytarget=container_id)
+ section = Section("docker_container_cpu", piggytarget=container_id)
section.append(json.dumps(container_cpu))
section.write()
@@ -625,7 +620,7 @@ def section_container_diskstat(client, container_id):
container_blkio = stats["blkio_stats"]
container_blkio["time"] = time.time()
container_blkio["names"] = client.device_map()
- section = Section("container_diskstat", piggytarget=container_id)
+ section = Section("docker_container_diskstat", piggytarget=container_id)
section.append(json.dumps(container_blkio))
section.write()
@@ -660,9 +655,16 @@ def call_node_sections(client, config):
except Exception as exc:
if DEBUG:
raise
+ # The section is already always written. Prevent duplicate @docker_version_info
+ if name != "docker_node_info":
+ write_empty_section(name)
report_exception_to_server(exc, section.__name__)
+def write_empty_section(name, piggytarget=None):
+ Section(name, piggytarget).write()
+
+
def call_container_sections(client, config):
jobs = []
for container_id in client.all_containers:
diff --git a/agents/plugins/mk_errpt.aix b/agents/plugins/mk_errpt.aix
index a060d1a678a..919819fde00 100755
--- a/agents/plugins/mk_errpt.aix
+++ b/agents/plugins/mk_errpt.aix
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Logfile monitoring for AIX via errpt
#
diff --git a/agents/plugins/mk_filehandler b/agents/plugins/mk_filehandler
index f97f41c6477..ff2802c593e 100755
--- a/agents/plugins/mk_filehandler
+++ b/agents/plugins/mk_filehandler
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
FILE=/proc/sys/fs/file-nr
diff --git a/agents/plugins/mk_filestats.py b/agents/plugins/mk_filestats.py
index b33473c83b5..39656681c62 100755
--- a/agents/plugins/mk_filestats.py
+++ b/agents/plugins/mk_filestats.py
@@ -87,7 +87,7 @@
'../cfg_examples/filestats.cfg' relative to this file.
"""
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
import collections
import configparser
@@ -269,7 +269,7 @@ def __iter__(self):
if filestat.isfile:
yield filestat
if filestat.isdir:
- for filestat in self._iterate_folder(item):
+ for filestat in self._iterate_folder(item): # pylint: disable=use-yield-from
yield filestat
diff --git a/agents/plugins/mk_haproxy.freebsd b/agents/plugins/mk_haproxy.freebsd
index fc902c363c6..584d49f4f89 100755
--- a/agents/plugins/mk_haproxy.freebsd
+++ b/agents/plugins/mk_haproxy.freebsd
@@ -4,7 +4,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if [ -r /var/run/haproxy.stat ]; then
echo "<<>>"
diff --git a/agents/plugins/mk_informix b/agents/plugins/mk_informix
index b945ebbf688..e619a6fc8fa 100755
--- a/agents/plugins/mk_informix
+++ b/agents/plugins/mk_informix
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Informix
# Make ENV-VARs avail for subshells
@@ -24,6 +24,60 @@ set -a
# | |_| |
# '----------------------------------------------------------------------'
+# BEGIN COMMON PLUGIN CODE
+
+# check that no users other than root can change the file
+only_root_can_modify() {
+ permissions=$1
+ owner=$2
+ group=$3
+
+ group_write_perm=$(echo "$permissions" | cut -c 6)
+ other_write_perm=$(echo "$permissions" | cut -c 9)
+
+ if [ "$owner" != "root" ] || [ "$other_write_perm" != "-" ]; then
+ return 1
+ fi
+
+ [ "$group" = "root" ] || [ "$group_write_perm" = "-" ]
+}
+
+get_binary_owner() {
+ BINARY_PATH=$1
+ stat -c '%U' "${BINARY_PATH}"
+}
+
+get_binary_execution_mode() {
+ BINARY_PATH=$1
+ BINARY_USER=$2
+
+ # if the executable belongs to someone besides root, do not execute it as root
+ if needs_user_switch_before_executing "$BINARY_PATH"; then
+ echo "su ${BINARY_USER} -c"
+ return
+ fi
+ echo "bash -c"
+}
+
+needs_user_switch_before_executing() {
+ BINARY_PATH=$1
+
+ [ "$(whoami)" = "root" ] && ! only_root_can_modify "$(stat -c '%A' "$BINARY_PATH")" "$(stat -c '%U' "$BINARY_PATH")" "$(stat -c '%G' "$BINARY_PATH")"
+}
+
+# END COMMON PLUGIN CODE
+
+set_env() {
+ # set environment variables given in the form VARNAME1=value1;VARNAME2=value2;...
+ while IFS=';' read -ra parts; do
+ for part in "${parts[@]}"; do
+ var_name="${part%%=*}"
+ var_value="${part#*=}"
+ export "$var_name"="$var_value"
+ done
+ done <<<"$1"
+}
+
do_check() {
# $1:section, $2:excludelist
if echo "$2" | grep -qe "${1}"; then
@@ -77,10 +131,10 @@ all_sections="sessions locks tabextents dbspaces logusage"
informix_status() {
echo "<<>>"
echo "[[[$INFORMIXSERVER/$SERVERNUM]]]"
- "$INFORMIXDIR/bin/onstat" - >/dev/null 2>&1
+ $EXECUTION_MODE "\"$INFORMIXDIR\"/bin/onstat -" >/dev/null 2>&1
state=$?
echo "Status:"$state
- "$INFORMIXDIR/bin/onstat" -g dis
+ $EXECUTION_MODE "\"$INFORMIXDIR\"/bin/onstat -g dis"
port=$(grep "$INFORMIXSERVER" /etc/services)
echo "PORT:$port"
}
@@ -214,13 +268,16 @@ fi
# | |
# '----------------------------------------------------------------------'
+INFORMIXDIR=${ONINIT_PATH%/bin*}
+
+if [ ! -f "$INFORMIXDIR/bin/onstat" ]; then
+ exit 1
+fi
+
+EXECUTION_MODE="$(get_binary_execution_mode "$INFORMIXDIR/bin/onstat" "$(get_binary_owner "$INFORMIXDIR/bin/onstat")")"
+
for IDSENV in $(
- # FIXME: This is supressed, so we can enable the shellcheck.
- # I think the shellchecker is right here: INFORMIXDIR will not
- # be available in the for-loops body.
- # shellcheck disable=SC2030
- export INFORMIXDIR=${ONINIT_PATH%/bin*}
- "$INFORMIXDIR/bin/onstat" -g dis |
+ $EXECUTION_MODE "$INFORMIXDIR/bin/onstat -g dis" |
grep -E '^Server[ ]*:|^Server Number[ ]*:|^INFORMIX|^SQLHOSTS|^ONCONFIG' |
sed -e 's/Server Number/SERVERNUM/' \
-e 's/Server/INFORMIXSERVER/' \
@@ -231,13 +288,12 @@ for IDSENV in $(
); do
(
- # Set environment
- eval "$IDSENV"
+ set_env "$IDSENV"
# FIXME: This is supressed, so we can enable the shellcheck.
# I think the shellchecker is right here: INFORMIXDIR will not
# be available in the for-loops body.
# shellcheck disable=SC2031
- PATH="$INFORMIXDIR/bin:$PATH"
+ PATH="$PATH:$INFORMIXDIR/bin"
# try to set them via 'onstat -g env' otherwise
# DB HAS TO BE RUNNING
diff --git a/agents/plugins/mk_inotify.py b/agents/plugins/mk_inotify.py
index 47e5ae9848e..7ae6f559678 100755
--- a/agents/plugins/mk_inotify.py
+++ b/agents/plugins/mk_inotify.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
import os
import signal
@@ -14,7 +14,7 @@
try:
import configparser
except ImportError: # Python 2
- import ConfigParser as configparser # type: ignore[import,no-redef]
+ import ConfigParser as configparser # type: ignore[import-not-found,no-redef]
try:
from typing import Any # noqa: F401 # pylint: disable=unused-import
@@ -23,7 +23,7 @@
try:
# TODO: We should probably ship this package.
- import pyinotify # type: ignore[import] # pylint: disable=import-error
+ import pyinotify # type: ignore[import-not-found] # pylint: disable=import-error
except ImportError:
sys.stderr.write("Error: Python plugin pyinotify is not installed\n")
sys.exit(1)
@@ -237,7 +237,7 @@ def do_output(what, event):
}
-class NotifyEventHandler(pyinotify.ProcessEvent):
+class NotifyEventHandler(pyinotify.ProcessEvent): # type: ignore[misc]
def process_IN_MOVED_TO(self, event):
do_output("movedto", event)
diff --git a/agents/plugins/mk_inventory.aix b/agents/plugins/mk_inventory.aix
index 35cbc668842..292d2bf461e 100755
--- a/agents/plugins/mk_inventory.aix
+++ b/agents/plugins/mk_inventory.aix
@@ -13,7 +13,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
_load_config() {
# defaults:
diff --git a/agents/plugins/mk_inventory.linux b/agents/plugins/mk_inventory.linux
index d5ae4376c1b..b0616f1e603 100755
--- a/agents/plugins/mk_inventory.linux
+++ b/agents/plugins/mk_inventory.linux
@@ -13,7 +13,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
_load_config() {
# defaults:
@@ -196,7 +196,9 @@ section_lnx_video() {
vgas="$(lspci | grep VGA | cut -d" " -f 1)"
[ -n "$vgas" ] || return
echo "<<>>"
- lspci -v -s "$vgas"
+ printf '%s\n' "$vgas" | while IFS= read -r vga; do
+ lspci -v -s "$vga"
+ done
}
section_lnx_ip_r() {
@@ -215,8 +217,8 @@ section_lnx_sysctl() {
}
section_lnx_block_devices() {
- # Block devices
- (# This subshell is for scoping UEVENT_FILE and DEVICE_DIR
+ # The subshell below is for scoping UEVENT_FILE and DEVICE_DIR.
+ (
# use sep=0 rather than 124 in order to be more flexible in the parse function
echo "<<>>"
find "/sys/block/" -maxdepth 1 -type f -o -type l | while read -r DEVICE; do
diff --git a/agents/plugins/mk_inventory.solaris b/agents/plugins/mk_inventory.solaris
index e246e132011..35d001fa012 100755
--- a/agents/plugins/mk_inventory.solaris
+++ b/agents/plugins/mk_inventory.solaris
@@ -13,7 +13,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
_load_config() {
# defaults:
diff --git a/agents/plugins/mk_iptables b/agents/plugins/mk_iptables
index 86355fab483..f4f7a432cb2 100755
--- a/agents/plugins/mk_iptables
+++ b/agents/plugins/mk_iptables
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# iptables
if type iptables-save >/dev/null; then
diff --git a/agents/plugins/mk_jolokia.py b/agents/plugins/mk_jolokia.py
index c59ff243542..50dbb240e03 100755
--- a/agents/plugins/mk_jolokia.py
+++ b/agents/plugins/mk_jolokia.py
@@ -4,12 +4,13 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
USER_AGENT = "checkmk-agent-mk_jolokia-" + __version__
import io
import os
+import re
import socket
import sys
import urllib.parse
@@ -416,7 +417,6 @@ def _initialize_http_session(self, user_agent):
session.verify = self._config["verify"]
if session.verify is False:
urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning)
- session.timeout = self._config["timeout"] # type: ignore[attr-defined]
session.headers["User-Agent"] = user_agent
auth_method = self._config.get("mode")
@@ -445,7 +445,8 @@ def _initialize_http_session(self, user_agent):
return session
def get_post_data(self, path, function, use_target):
- segments = path.strip("/").split("/")
+ segments = re.split(r"(? tuple[str, str, str]
+ if "details" in data:
+ info = data["details"]
+ # https://github.com/jolokia/jolokia/blob/2.0/src/documentation/manual/modules/ROOT/pages/jolokia_mbeans.adoc
+ product = info.get("server_product", "unknown")
+ version = info.get("server_version", "unknown")
+ else: # jolokia version 1.7.2 or lower
+ # https://github.com/jolokia/jolokia/blob/v1.7.2/src/docbkx/protocol/version.xml
+ info = data.get("info", {})
+ product = info.get("product", "unknown")
+ version = info.get("version", "unknown")
+ agentversion = data.get("agent", "unknown")
+ return product, version, agentversion
+
+
def generate_jolokia_info(inst):
# Determine type of server
try:
@@ -659,15 +679,13 @@ def generate_jolokia_info(inst):
yield inst.name, "ERROR", str(exc)
raise SkipInstance(exc)
- info = data.get("info", {})
- version = info.get("version", "unknown")
- product = info.get("product", "unknown")
+ product, version, agentversion = _parse_fetched_data(data)
+
if inst.product is not None:
product = inst.product
else:
inst.product = product
- agentversion = data.get("agent", "unknown")
yield inst.name, product, version, agentversion
diff --git a/agents/plugins/mk_logins b/agents/plugins/mk_logins
index d828d9356d1..23da771aa67 100755
--- a/agents/plugins/mk_logins
+++ b/agents/plugins/mk_logins
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if type who >/dev/null; then
echo "<<>>"
diff --git a/agents/plugins/mk_logwatch.py b/agents/plugins/mk_logwatch.py
index f97f6111b19..09b2e6ef132 100755
--- a/agents/plugins/mk_logwatch.py
+++ b/agents/plugins/mk_logwatch.py
@@ -20,7 +20,7 @@
from __future__ import with_statement
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
import sys
diff --git a/agents/plugins/mk_mongodb.py b/agents/plugins/mk_mongodb.py
index ffa0d48d6c9..3f4d3470661 100755
--- a/agents/plugins/mk_mongodb.py
+++ b/agents/plugins/mk_mongodb.py
@@ -24,7 +24,7 @@
"""
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
import argparse
import configparser
diff --git a/agents/plugins/mk_mysql b/agents/plugins/mk_mysql
index 66b3399d2f8..5aa3430ee52 100755
--- a/agents/plugins/mk_mysql
+++ b/agents/plugins/mk_mysql
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# gets optional socket as argument
do_query() {
@@ -43,6 +43,7 @@ do_query() {
}
+# The following logic exists as well in the windows vbs script mk_mysql
if [ ! -f "${MK_CONFDIR}/mysql.local.cfg" ]; then
cat <"${MK_CONFDIR}/mysql.local.cfg"
# This file is created because some versions of mysqladmin
diff --git a/agents/plugins/mk_nfsiostat b/agents/plugins/mk_nfsiostat
index 10a5f97ecb4..428589f5659 100755
--- a/agents/plugins/mk_nfsiostat
+++ b/agents/plugins/mk_nfsiostat
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if command nfsiostat >/dev/null; then
echo '<<>>'
diff --git a/agents/plugins/mk_omreport b/agents/plugins/mk_omreport
index c9b444f9b9c..3c6224e1b96 100755
--- a/agents/plugins/mk_omreport
+++ b/agents/plugins/mk_omreport
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if command -v omreport >/dev/null; then
echo "<<>>"
diff --git a/agents/plugins/mk_oracle b/agents/plugins/mk_oracle
index 5134c88bee0..c62e1c4224d 100755
--- a/agents/plugins/mk_oracle
+++ b/agents/plugins/mk_oracle
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Checkmk agent plugin for monitoring ORACLE databases
# This plugin is a result of the common work of Thorsten Bruhns
@@ -206,6 +206,10 @@ CONFIGURATION CUSTOM SQLS:
Only useful if SQLS_SECTION_NAME is not 'oracle_sql'.
ASCII code, eg. '58' means ':'.
Can be set globally or within a section definition.
+ If an error occurs, the error message will have the
+ following format, independent of the separator chosen.
+
+ |FAILURE|
SQLS_ITEM_NAME=item_name
Only useful if SQLS_SECTION_NAME is 'oracle_sql'. In
@@ -602,9 +606,8 @@ set_ora_env() {
if [ -f ${OLRLOC} ]; then
# oratab is not supported in Grid-Infrastructure 12.2+
# => fetch ORACLE_HOME from cluster repository for all GI/Restart Environments!
- # OLRLOC holds CRS_HOME
- # shellcheck disable=SC1090
- . ${OLRLOC}
+ # OLRLOC holds crs_home
+ crs_home=$(get_crs_home_from_olrloc "${OLRLOC}")
export ORA_HOME_SOURCE="(GI): "
# set ORACLE_HOME = crs_home for ASM
@@ -612,8 +615,10 @@ set_ora_env() {
# shellcheck disable=SC2154
ORACLE_HOME=${crs_home}
else
+ crsctl_path="${crs_home}"/bin/crsctl
+ EXECUTION_MODE="$(get_binary_execution_mode "$crsctl_path" "$(get_binary_owner "$crsctl_path")")"
# get ORACLE_HOME with crsctl from Oracle Grid Infrastructure / Restart
- ORACLE_HOME=$("${crs_home}"/bin/crsctl stat res -p -w "((TYPE = ora.database.type) and (GEN_USR_ORA_INST_NAME = ${ORACLE_SID}))" | grep -m1 "^ORACLE_HOME=" | cut -d"=" -f2)
+ ORACLE_HOME=$($EXECUTION_MODE "\"${crsctl_path}\" stat res -p -w \"((TYPE = ora.database.type) and (GEN_USR_ORA_INST_NAME = ${ORACLE_SID}))\" | ${GREP} -m1 ^ORACLE_HOME= | cut -d= -f2")
fi
else
# Single Instance with oratab
@@ -634,7 +639,6 @@ set_ora_env() {
fi
fi
- LD_LIBRARY_PATH=$ORACLE_HOME/lib
if [ ! -d "${ORACLE_HOME:-'not_found'}" ]; then
logging -c -e "[${sid}] [set_ora_env]" "ORA-99999 ORACLE_HOME for SID '${ORACLE_SID}' not found or not existing!"
return 2
@@ -645,7 +649,7 @@ set_ora_env() {
logging -c -e "[${sid}] [set_ora_env]" "TNS_ADMIN/sqlnet.ora: ${TNS_ADMIN}/sqlnet.ora"
exit 1
fi
- export ORACLE_HOME TNS_ADMIN ORACLE_SID LD_LIBRARY_PATH
+ export ORACLE_HOME TNS_ADMIN ORACLE_SID
}
set_ora_version() {
@@ -1242,7 +1246,7 @@ sql_dataguard_stats() {
JOIN v\$parameter vp on 1=1
JOIN v\$instance i on 1=1
left outer join V\$dataguard_stats ds on 1=1
- left outer join (select listagg(to_char(inst_id) || '.' || status, ', ') status
+ left outer join (select listagg(to_char(inst_id) || '.' || status, ', ') WITHIN GROUP (ORDER BY to_char(inst_id) || '.' || status) status
from gv\$managed_standby
where process = 'MRP0') ms on 1=1
WHERE vp.name = 'log_archive_config'
@@ -2334,9 +2338,58 @@ handle_custom_sql_errors() {
# | |_| |
# '----------------------------------------------------------------------'
+# BEGIN COMMON PLUGIN CODE
+
+# check that no users other than root can change the file
+only_root_can_modify() {
+ permissions=$1
+ owner=$2
+ group=$3
+
+ group_write_perm=$(echo "$permissions" | cut -c 6)
+ other_write_perm=$(echo "$permissions" | cut -c 9)
+
+ if [ "$owner" != "root" ] || [ "$other_write_perm" != "-" ]; then
+ return 1
+ fi
+
+ [ "$group" = "root" ] || [ "$group_write_perm" = "-" ]
+}
+
+get_binary_owner() {
+ BINARY_PATH=$1
+ stat -c '%U' "${BINARY_PATH}"
+}
+
+get_binary_execution_mode() {
+ BINARY_PATH=$1
+ BINARY_USER=$2
+
+ # if the executable belongs to someone besides root, do not execute it as root
+ if needs_user_switch_before_executing "$BINARY_PATH"; then
+ echo "su ${BINARY_USER} -c"
+ return
+ fi
+ echo "bash -c"
+}
+
+needs_user_switch_before_executing() {
+ BINARY_PATH=$1
+
+ [ "$(whoami)" = "root" ] && ! only_root_can_modify "$(stat -c '%A' "$BINARY_PATH")" "$(stat -c '%U' "$BINARY_PATH")" "$(stat -c '%G' "$BINARY_PATH")"
+}
+
+# END COMMON PLUGIN CODE
+
+get_crs_home_from_olrloc() {
+ "${GREP}" "crs_home" "${1}" | cut -d"=" -f2
+}
+
get_sqlplus_version_with_precision() {
precision="$1"
- "${ORACLE_HOME}"/bin/sqlplus -V | "${GREP}" ^SQL | cut -d" " -f3 | cut -d"." -f-"${precision}"
+ sqlplus_path="${ORACLE_HOME}"/bin/sqlplus
+ EXECUTION_MODE="$(get_binary_execution_mode "$sqlplus_path" "$(get_binary_owner "$sqlplus_path")")"
+ $EXECUTION_MODE "\"${sqlplus_path}\" -V" | ${GREP} ^SQL | cut -d" " -f3 | cut -d"." -f-"${precision}"
}
print_dummy_sections() {
@@ -2431,6 +2484,8 @@ run_cached() {
NAME="oracle_${MK_SID}${3}"
MAXAGE="${1}"
+ # Attention: CMD will contain the shell function to be executed. Make sure to export all needed functions during
+ # that execution (see e.g. export -f logging)
CMD="${2}"
CREATION_TIMEOUT=$((MAXAGE * 2))
@@ -2479,7 +2534,16 @@ run_cached() {
echo "<<>>"
pid="${cfile##*.new.}"
printf "timeout|%s|%s|%s\n" "${NAME}" "${CREATION_TIMEOUT}" "${pid}"
- kill -9 "${pid}" >/dev/null 2>&1 && sleep 2 # TODO: what about child processes?
+
+ # Workaround for AIX to kill child processes
+ if [ "$OS_TYPE" = 'AIX' ] && [ -x "$(command -v proctree 2>/dev/null)" ]; then
+ # shellcheck disable=SC2016
+ pidlist="$(proctree "$pid" 2>/dev/null | $AWK '{ printf $1" "}')"
+ fi
+ pidlist="${pidlist:-"${pid}"}"
+ # shellcheck disable=SC2086
+ kill -9 ${pidlist} >/dev/null 2>&1 && sleep 2 # TODO: what about child processes under non-AIX systems?
+
if [ -n "$(ps -o args= -p "${pid}")" ]; then
printf "killfailed|%s|%s|%s\n" "${NAME}" "${CREATION_TIMEOUT}" "${pid}"
else
@@ -2642,8 +2706,10 @@ mk_ora_db_connect() {
logging "[${sid}] [mk_ora_db_connect]" "TNS Alias PING: $TNSALIAS"
- if [ -f "${ORACLE_HOME}"/bin/tnsping ]; then
- if "${ORACLE_HOME}"/bin/tnsping "$TNSALIAS" >/dev/null 2>&1; then
+ tnsping_path="${ORACLE_HOME}"/bin/tnsping
+ if [ -f "${tnsping_path}" ]; then
+ EXECUTION_MODE="$(get_binary_execution_mode "$tnsping_path" "$(get_binary_owner "$tnsping_path")")"
+ if $EXECUTION_MODE "\"${tnsping_path}\" \"$TNSALIAS\"" >/dev/null 2>&1; then
TNSPINGOK=yes
else
unset TNSALIAS
@@ -2711,18 +2777,24 @@ mk_ora_db_connect() {
# '----------------------------------------------------------------------'
mk_ora_sqlplus() {
+ # Executes a SQL query by using sqlplus binary.
+ # The query will be piped-in and consumed via cat - so always execute cat at the very beginning of the function
+ function_stdin="$(cat)"
local from_where="$1"
local print_elapsed_time="$2"
local start_time=
local elapsed_time=
- local loc_stdin=
local output=
logging "[${MK_SID}] [${from_where}] [mk_ora_sqlplus]" "Piggyback host: $MK_PIGGYBACK_HOST"
start_time="$(perl -MTime::HiRes=time -wle 'print time')"
- loc_stdin=$(cat)
-
+ read -r -d '' pipe_input <>>>"
fi
- if output=$(echo "$(ora_session_environment)${loc_stdin}" | "$SQLPLUS" -L -s "$MK_DB_CONNECT"); then
+ EXECUTION_USER="$(get_binary_owner "$SQLPLUS")"
+ EXECUTION_MODE="$(get_binary_execution_mode "$SQLPLUS" "$EXECUTION_USER")"
+
+ if ! $EXECUTION_MODE "test -r \"${TNS_ADMIN}/sqlnet.ora\""; then
+ # we can not read sqlnet.ora!
+ # set_ora_env already checked that the file exists
+ # and we want to transport a meaningful error message to the ui and in logging
+ local sqlnet_ora_error="${TNS_ADMIN}/sqlnet.ora can not be read by user \"${EXECUTION_USER}\"! Either use 'sqlnet.ora permission group' bakery rule, or directly modify permissions of the file."
+ logging -c -e "[${MK_SID}] [${from_where}] [mk_ora_sqlplus]" "$sqlnet_ora_error"
+
+ echo '<<>>'
+ echo "$(echo "$MK_SID" | tr '[:lower:]' '[:upper:]')|FAILURE|$sqlnet_ora_error"
+ exit 1
+
+ fi
+
+ if output=$(echo "$pipe_input" | $EXECUTION_MODE "\"$SQLPLUS\" -L -s /nolog"); then
echo -e "$output"
elapsed_time=$(bc <<<"$(perl -MTime::HiRes=time -wle 'print time')-$start_time")
@@ -2756,6 +2844,7 @@ mk_ora_sqlplus() {
if $MK_ORA_DEBUG_CONNECT; then
echo " Logindetails: ${MK_DB_CONNECT}" >&2
echo -e " Error Message: ${output:0:100}"
+ exit 1
else
echo '<<>>'
echo -e "$output"
@@ -2790,10 +2879,120 @@ do_async_checks() {
echo "$MK_ASYNC_SECTIONS_QUERY" | mk_ora_sqlplus "do_async_checks"
}
+do_suggest_group() {
+ local MODE=$1
+ local USER=$2
+ local FILE=$3
+ if id -Gn "$USER" | "${GREP}" '\boinstall\b' >/dev/null; then
+ echo " We suggest to change the group to oinstall and give $MODE permission for the group:"
+ echo " chgrp oinstall \"$FILE\""
+ local m="x"
+ if [ "$MODE" = "read" ]; then
+ m="r"
+ fi
+ echo " chmod g+$m \"$FILE\""
+ fi
+}
+
+do_test_file_permission() {
+ local MODE=$1
+ local USER=$2
+ local FILE=$3
+
+ local m="-x"
+ if [ "$MODE" = "read" ]; then
+ m="-r"
+ fi
+
+ if su "${USER}" -c "test $m \"$FILE\""; then
+ echo "* user \"$USER\" can $MODE $FILE"
+ true
+ else
+ echo "* ERROR! user \"$USER\" can NOT $MODE $FILE"
+ echo " $(ls -dl "$FILE")"
+ do_suggest_group "$MODE" "$USER" "$FILE"
+ echo
+ false
+ fi
+}
+
+do_test_permissions() {
+ local DEFAULT_SQLNET="LOG_DIRECTORY_CLIENT=/var/log/check_mk/oracle_clientDIAG_ADR_ENABLED=OFFSQLNET.WALLET_OVERRIDE=TRUEWALLET_LOCATION=(SOURCE=(METHOD=FILE)(METHOD_DATA=(DIRECTORY=/etc/check_mk/oracle_wallet)))"
+
+ echo
+ echo "---checking permissions-------------------------------------------------"
+ echo "see https://checkmk.atlassian.net/wiki/spaces/KB/pages/70582273/Troubleshooting+mk+oracle+for+Windows+and+Linux"
+ echo
+
+ local SQLPLUS="${ORACLE_HOME}/bin/sqlplus"
+ echo "* sqlplus binary: ${SQLPLUS}"
+ local BINARY_OWNER
+ BINARY_OWNER="$(get_binary_owner "$SQLPLUS")"
+ echo "* sqlplus binary owner: ${BINARY_OWNER}"
+ if ! needs_user_switch_before_executing "$SQLPLUS"; then
+ echo "* change user: false"
+ echo "------------------------------------------------------------------------"
+ return
+ fi
+ echo "* change user: true"
+
+ echo "* \$TNS_ADMIN: ${TNS_ADMIN}"
+ local PATH_SQLNET="$TNS_ADMIN/sqlnet.ora"
+ local PATH_TNSNAMES="$TNS_ADMIN/tnsnames.ora"
+ if ! do_test_file_permission "read" "$BINARY_OWNER" "$PATH_SQLNET"; then
+ echo " If you use the AGENT BAKERY you have to use the rule 'sqlnet.ora premission group' to make this change permanently, otherwise it will be overwritten by an agent update."
+ fi
+ if [ -f "$PATH_TNSNAMES" ]; then
+ do_test_file_permission "read" "$BINARY_OWNER" "$PATH_TNSNAMES"
+ fi
+ echo
+
+ local GENERIC_ERROR_MESSAGE="
+ Could not login. In case you are using a wallet to connect, there might be a permission error.
+ Make sure that the wallet folder can be read and executed by user \"$BINARY_OWNER\" and
+ the files inside the wallet can be read by the user.
+ Consult your ora files for hints where the wallet is located:
+ $PATH_SQLNET
+ $PATH_TNSNAMES
+"
+
+ # let's go all in and see if the connection works:
+ if error=$(echo "" | mk_ora_sqlplus "do_testmode" "no" 2>&1); then
+ echo "* test login works"
+ else
+ echo "* test-login does not work!"
+ if echo "$error" | ${GREP} "ORA-12578" >/dev/null; then
+ echo " ORA-12578 suggests, that there is an error reading the wallet."
+ if [ "$(sed '/^#/d' /dev/null || exit 0
echo "<<>>"
- $CRSCTL query has releaseversion
+ execute_as_user "${CRSCTL}" "query has releaseversion"
echo "<<>>"
OLS_NODENAME=$(uname -n)
echo "nodename|$OLS_NODENAME"
- $CRSCTL stat res -f | $GREP -E $resourcefilter | sed "s/^/csslocal\|/"
+ execute_as_user "${CRSCTL}" "stat res -f" | $GREP -E $resourcefilter | sed "s/^/csslocal\|/"
}
function printcrsdata() {
ps -ef | $GREP -v grep | $GREP -e ohasd.bin -e crsd.bin >/dev/null || exit 0
echo "<<>>"
- crs_version=$($CRSCTL query crs releaseversion)
+ crs_version=$(execute_as_user "${CRSCTL}" "query crs releaseversion")
echo "$crs_version"
echo "<<>>"
- $CRSCTL query css votedisk | $GREP "^ [0-9]"
+ execute_as_user "${CRSCTL}" "query css votedisk" | $GREP "^ [0-9]"
ps -ef | $GREP -v grep | $GREP crsd.bin >/dev/null || exit 0
echo "<<>>"
- OLS_NODENAME=$($OLSNODES -l)
+ OLS_NODENAME=$(execute_as_user "${OLSNODES}" "-l")
echo "nodename|$OLS_NODENAME"
crs_version_short=$(echo "$crs_version" | cut -d"[" -f2- | cut -d"." -f-2 | sed 's/\.//')
if [ "$crs_version_short" -ge 112 ]; then
- $CRSCTL stat res -v -n "$OLS_NODENAME" -init | $GREP -E "$resourcefilter" | sed "s/^/csslocal\|/"
- for nodelist in $($OLSNODES); do
- $CRSCTL stat res -v -n "$nodelist" | $GREP -E "$resourcefilter" | sed "s/^/$nodelist\|/"
+ execute_as_user "${CRSCTL}" "stat res -v -n \"$OLS_NODENAME\" -init" | $GREP -E "$resourcefilter" | sed "s/^/csslocal\|/"
+ for nodelist in $(execute_as_user "${OLSNODES}"); do
+ execute_as_user "${CRSCTL}" "stat res -v -n \"$nodelist\"" | $GREP -E "$resourcefilter" | sed "s/^/$nodelist\|/"
done
else
- $CRS_STAT -f -c "$OLS_NODENAME" | $GREP -E "$resourcefilter" | sed "s/^/$OLS_NODENAME\|/"
+ execute_as_user "${CRS_STAT}" "-f -c \"$OLS_NODENAME\"" | $GREP -E "$resourcefilter" | sed "s/^/$OLS_NODENAME\|/"
fi
}
diff --git a/agents/plugins/mk_postgres.py b/agents/plugins/mk_postgres.py
index 92f2a0ff4e1..5a109cdb3bb 100755
--- a/agents/plugins/mk_postgres.py
+++ b/agents/plugins/mk_postgres.py
@@ -52,7 +52,7 @@
Different defaults are chosen for Windows.
"""
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
import abc
import io
@@ -517,7 +517,9 @@ def _parse_wmic_logicaldisk(wmic_output):
@classmethod
def _logical_drives(cls):
# type: () -> Iterable[str]
- for drive in cls._parse_wmic_logicaldisk(cls._call_wmic_logicaldisk()):
+ for drive in cls._parse_wmic_logicaldisk( # pylint: disable=use-yield-from # for python2.7
+ cls._call_wmic_logicaldisk()
+ ):
yield drive
def get_psql_binary_path(self):
@@ -1262,7 +1264,7 @@ def parse_postgres_cfg(postgres_cfg, config_separator):
pg_database, pg_port, pg_version = parse_env_file(env_file)
instances.append(
{
- "name": instance_name,
+ "name": instance_name.strip(),
"pg_user": pg_user.strip(),
"pg_passfile": pg_passfile.strip(),
"pg_database": pg_database,
@@ -1311,6 +1313,7 @@ def main(argv=None):
)
with open(postgres_cfg_path) as opened_file:
postgres_cfg = opened_file.readlines()
+ postgres_cfg = [ensure_str(el) for el in postgres_cfg]
dbuser, pg_binary_path, instances = parse_postgres_cfg(postgres_cfg, helper.get_conf_sep())
except Exception:
_, e = sys.exc_info()[:2] # python2 and python3 compatible exception logging
diff --git a/agents/plugins/mk_redis b/agents/plugins/mk_redis
index 71b60fc4cc8..d8856b58dd0 100755
--- a/agents/plugins/mk_redis
+++ b/agents/plugins/mk_redis
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# sample output of pgrep command
# 1051 /usr/bin/redis-server 127.0.0.1:6380
@@ -56,7 +56,9 @@ redis_args() {
fi
if [[ "${!PASSWORD}" ]] && [[ "${!PASSWORD}" != "None" ]]; then
- REDIS_ARGS+=("-a" "${!PASSWORD}")
+ REDIS_CLI_COMMAND="REDISCLI_AUTH='${!PASSWORD}' redis-cli"
+ else
+ REDIS_CLI_COMMAND="redis-cli"
fi
REDIS_ARGS+=("info")
@@ -104,8 +106,9 @@ main() {
# print server section
echo "[[[$INSTANCE|${!HOST}|${!PORT}]]]"
- # execute command
- waitmax 3 redis-cli "${REDIS_ARGS[@]}" || true
+ # TODO: Use an array instead of the suppression for the command, too.
+ # shellcheck disable=SC2086
+ waitmax 3 bash -c "${REDIS_CLI_COMMAND} ${REDIS_ARGS[*]}" || true
echo
done
diff --git a/agents/plugins/mk_sap.aix b/agents/plugins/mk_sap.aix
index 45f86420c53..a244e933022 100755
--- a/agents/plugins/mk_sap.aix
+++ b/agents/plugins/mk_sap.aix
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# cat sapservices
##!/bin/sh
@@ -43,25 +43,25 @@ CMK_VERSION="2.3.0b1"
sapservices="/usr/sap/sapservices"
-if [ -r "$sapservices" ]; then
+_read_sap_services() {
+ grep "^LIBPATH=/usr/sap/" "${sapservices}" | cut -d';' -f3
+}
+
+if [ -r "${sapservices}" ]; then
echo "<<>>"
# loop over ids
- # SC2162: read without -r will mangle backslashes.
- # We suppress it here for compatibility (curretly backslashes e.g. before spaces are dropped).
- # Since escaping of field seperators is not relevant when reading into one variable, we probably
- # would have wanted "read -r".
- # shellcheck disable=SC2162
- while read LINE; do
- command=$(echo "$LINE" | grep "^LIBPATH=/usr/sap/" | grep -v "^LIBPATH=/usr/sap/D" | awk -F" " '{print $5}')
- if [ -n "$command" ]; then
- id2=$(echo "$command" | awk -F"/" '{print $4}')
- path="/sapmnt/$id2/exe"
- sappfpar=$(find "$path" -name sappfpar | head -1)
- sapcontrol=$(find "$path" -name sapcontrol | head -1)
- lib_path=$(find "$path" -name libicuuc\*.a | head -1 | sed -e 's,/[^ /]*$,,')
- id=$(LIBPATH=$LIBPATH:$lib_path $sappfpar SAPSYSTEM "$command")
- echo -n "[$id]"
- LIBPATH=$LIBPATH:$lib_path $sapcontrol -nr "$id" -function GetProcessList
- fi
- done <"$sapservices"
+ # shellcheck disable=SC2034 # _bin appears unused.
+ _read_sap_services | while read -r _bin command; do
+ [ -n "${command}" ] || continue
+
+ id2=$(echo "${command}" | cut -d/ -f4)
+ path="/sapmnt/${id2}/exe"
+ sappfpar=$(find "${path}" -name sappfpar | head -1)
+ sapcontrol=$(find "${path}" -name sapcontrol | head -1)
+ lib_path=$(find "${path}" -name libicuuc\*.a | head -1 | sed -e 's,/[^ /]*$,,')
+ id=$(LIBPATH=${LIBPATH}:${lib_path} ${sappfpar} SAPSYSTEM "${command}")
+ printf "[%s]" "${id}"
+ LIBPATH=${LIBPATH}:${lib_path} ${sapcontrol} -nr "${id}" -function GetProcessList
+
+ done
fi
diff --git a/agents/plugins/mk_sap.py b/agents/plugins/mk_sap.py
index 632316333a8..80fbc383c25 100755
--- a/agents/plugins/mk_sap.py
+++ b/agents/plugins/mk_sap.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
# This agent plugin has been built to collect information from SAP R/3 systems
# using RFC calls. It needs the python module pyrfc.
@@ -477,7 +477,7 @@ def main(): # pylint: disable=too-many-branches
global state_file_changed
try:
- import pyrfc # type: ignore[import]
+ import pyrfc # type: ignore[import-not-found]
except ImportError as e:
if "No module named pyrfc" in str(e):
sys.stderr.write("Missing the Python module pyrfc.\n")
diff --git a/agents/plugins/mk_sap_hana b/agents/plugins/mk_sap_hana
index 125ed818d06..5e66c82252e 100755
--- a/agents/plugins/mk_sap_hana
+++ b/agents/plugins/mk_sap_hana
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# SAP HANA Plugin for Checkmk Agent on Linux
# Copyright Gerd Stolz - SVA - 2016
diff --git a/agents/plugins/mk_saprouter b/agents/plugins/mk_saprouter
index 68551dfe8c9..b7b0244bb74 100755
--- a/agents/plugins/mk_saprouter
+++ b/agents/plugins/mk_saprouter
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Plugin for SAP router
diff --git a/agents/plugins/mk_scaleio b/agents/plugins/mk_scaleio
index dac2ae958e3..c520c4bc00e 100755
--- a/agents/plugins/mk_scaleio
+++ b/agents/plugins/mk_scaleio
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Plugin for EMCs ScaleIO software. This plugins needs to be installed
# on all MDM servers as the information is provdided only by an active
diff --git a/agents/plugins/mk_site_object_counts b/agents/plugins/mk_site_object_counts
index 36651d9374e..bb4f6b90cc5 100755
--- a/agents/plugins/mk_site_object_counts
+++ b/agents/plugins/mk_site_object_counts
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
_socket_exists() {
[ -S "/omd/sites/${1}/tmp/run/live" ]
diff --git a/agents/plugins/mk_sshd_config b/agents/plugins/mk_sshd_config
index eaccdebca13..6b34926034a 100755
--- a/agents/plugins/mk_sshd_config
+++ b/agents/plugins/mk_sshd_config
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if command -v sshd >/dev/null 2>&1; then
echo "<<>>"
diff --git a/agents/plugins/mk_suseconnect b/agents/plugins/mk_suseconnect
index f5e9e723224..66e1c85819e 100755
--- a/agents/plugins/mk_suseconnect
+++ b/agents/plugins/mk_suseconnect
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if type SUSEConnect >/dev/null; then
echo '<<>>'
diff --git a/agents/plugins/mk_tinkerforge.py b/agents/plugins/mk_tinkerforge.py
index 7bb94846a4c..2bbd4be793e 100755
--- a/agents/plugins/mk_tinkerforge.py
+++ b/agents/plugins/mk_tinkerforge.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
###################################################
# plugin to retrieve data from tinkerforge devices.
@@ -117,21 +117,27 @@ def print_generic(settings, sensor_type, ident, factor, unit, *values):
def print_ambient_light(conn, settings, uid):
- from tinkerforge.bricklet_ambient_light import BrickletAmbientLight # type: ignore[import]
+ from tinkerforge.bricklet_ambient_light import ( # type: ignore[import-not-found]
+ BrickletAmbientLight,
+ )
br = BrickletAmbientLight(uid, conn)
print_generic(settings, "ambient", br.get_identity(), 0.01, "L", br.get_illuminance())
def print_ambient_light_v2(conn, settings, uid):
- from tinkerforge.bricklet_ambient_light_v2 import BrickletAmbientLightV2 # type: ignore[import]
+ from tinkerforge.bricklet_ambient_light_v2 import ( # type: ignore[import-not-found]
+ BrickletAmbientLightV2,
+ )
br = BrickletAmbientLightV2(uid, conn)
print_generic(settings, "ambient", br.get_identity(), 0.01, "L", br.get_illuminance())
def print_temperature(conn, settings, uid):
- from tinkerforge.bricklet_temperature import BrickletTemperature # type: ignore[import]
+ from tinkerforge.bricklet_temperature import ( # type: ignore[import-not-found]
+ BrickletTemperature,
+ )
br = BrickletTemperature(uid, conn)
print_generic(
@@ -140,7 +146,7 @@ def print_temperature(conn, settings, uid):
def print_temperature_ext(conn, settings, uid):
- from tinkerforge.bricklet_ptc import BrickletPTC # type: ignore[import]
+ from tinkerforge.bricklet_ptc import BrickletPTC # type: ignore[import-not-found]
br = BrickletPTC(uid, conn)
print_generic(
@@ -154,14 +160,14 @@ def print_temperature_ext(conn, settings, uid):
def print_humidity(conn, settings, uid):
- from tinkerforge.bricklet_humidity import BrickletHumidity # type: ignore[import]
+ from tinkerforge.bricklet_humidity import BrickletHumidity # type: ignore[import-not-found]
br = BrickletHumidity(uid, conn)
print_generic(settings, "humidity", br.get_identity(), 0.1, "RH", br.get_humidity())
def print_master(conn, settings, uid):
- from tinkerforge.brick_master import BrickMaster # type: ignore[import]
+ from tinkerforge.brick_master import BrickMaster # type: ignore[import-not-found]
br = BrickMaster(uid, conn)
print_generic(
@@ -177,7 +183,9 @@ def print_master(conn, settings, uid):
def print_motion_detector(conn, settings, uid):
- from tinkerforge.bricklet_motion_detector import BrickletMotionDetector # type: ignore[import]
+ from tinkerforge.bricklet_motion_detector import ( # type: ignore[import-not-found]
+ BrickletMotionDetector,
+ )
br = BrickletMotionDetector(uid, conn)
print_generic(settings, "motion", br.get_identity(), 1.0, "", br.get_motion_detected())
@@ -212,7 +220,7 @@ def display_on_segment(conn, settings, text):
"\N{DEGREE SIGN}": 0x63,
}
- from tinkerforge.bricklet_segment_display_4x7 import ( # type: ignore[import]
+ from tinkerforge.bricklet_segment_display_4x7 import ( # type: ignore[import-not-found]
BrickletSegmentDisplay4x7,
)
@@ -340,7 +348,7 @@ def main():
return install()
try:
- from tinkerforge.ip_connection import IPConnection # type: ignore[import]
+ from tinkerforge.ip_connection import IPConnection # type: ignore[import-not-found]
except ImportError:
sys.stdout.write("<<>>\n")
sys.stdout.write("master,0.0.0,tinkerforge api isn't installed\n")
diff --git a/agents/plugins/mk_tsm b/agents/plugins/mk_tsm
index 1154d767a07..298852d931b 100755
--- a/agents/plugins/mk_tsm
+++ b/agents/plugins/mk_tsm
@@ -9,25 +9,26 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Agent for Linux/UNIX for Tivoli Storage Manager (TSM)
-# Configuration is needed for username and password for dsmadmc
-# You need to create a configuration file /etc/check_mk/tsm.cfg
-# with the following two lines:
-# TSM_USER=foo
-# TSM_PASSWORD=bar
-# If you have more than once instance, make sure that the upper
-# login works on all of them.
-
-# shellcheck source=agents/cfg_examples/tsm.cfg
-. "$MK_CONFDIR/tsm.cfg" || exit 1
-
-if [ -z "${TSM_USER}" ] || [ -z "${TSM_PASSWORD}" ]; then
- echo "Please set TSM_USER and TSM_PASSWORD in ${MK_CONFDIR}/tsm.cfg" >&2
- exit 1
-fi
+read_plugin_config() {
+ # Configuration is needed for username and password for dsmadmc
+ # You need to create a configuration file /etc/check_mk/tsm.cfg
+ # with the following two lines:
+ # TSM_USER=foo
+ # TSM_PASSWORD=bar
+ # If you have more than once instance, make sure that the upper
+ # login works on all of them.
+
+ # shellcheck source=agents/cfg_examples/tsm.cfg
+ . "${MK_CONFDIR}/tsm.cfg" || exit 1
+ if [ -z "${TSM_USER}" ] || [ -z "${TSM_PASSWORD}" ]; then
+ echo "Please set TSM_USER and TSM_PASSWORD in ${MK_CONFDIR}/tsm.cfg" >&2
+ exit 1
+ fi
+}
do_tsm_checks() {
INST=${DSMSERV_DIR##*/}
@@ -63,17 +64,26 @@ EOF
}
-# Find in the list of processes TSM daemons. Example output of 'ps xewwg'
-# 8781984 - A 127:26 dsmserv _=/usr/bin/dsmserv LANG=en_US LOGIN=root PATH=/usr/bin:/etc:/usr/sbin:/usr/ucb:/usr/bin/X11:/sbin:/usr/java5/jre/bin:/usr/java5/bin LC_ALL=en_US DSMSERV_CONFIG=/foobar_17g/dsmserv.opt LC__FASTMSG=true LOGNAME=root MAIL=/var/spool/mail/root LOCPATH=/usr/lib/nls/loc DSMSERV_DIR=/foobar_17g USER=root AUTHSTATE=compat AIXTHREAD_MNRATIO=1:1 SHELL=/usr/bin/ksh ODMDIR=/etc/objrepos HOME=/ SSH_CLIENT=192.168.21.199 37725 22 SSH_CONNECTION=192.168.21.199 37725 192.168.21.214 22 PWD=/foobar_17g TZ=Europe/Bucharest AIXTHREAD_SCOPE=S DSMSERV_ACCOUNTING_DIR=/foobar_17g/acc NLSPATH=/usr/lib/nls/msg/%L/%N:/usr/lib/nls/msg/%L/%N.cat LIBPATH=/usr/local/Centera_SDK/lib/64/
-
-# SC2162: read without -r will mangle backslashes.
-# We suppress it here for compatibility (curretly backslashes e.g. before spaces are dropped).
-# Since escaping of field seperators is not relevant when reading into one variable, we probably
-# would have wanted "read -r".
-# shellcheck disable=SC2162
-ps xewwg | sed -n '/[\s\/]dsmserv .* DSMSERV_CONFIG=/s/^.*dsmserv //p' |
- while read line; do
- # Take over all relevant environment into our own
- eval "$(echo "$line" | tr ' ' '\n' | sed -n '/^DSMSERV_/s/^/export /p')"
+get_dsmserv_processes() {
+ # Find in the list of processes TSM daemons. Example output of 'ps xewwg'
+ # 8781984 - A 127:26 dsmserv _=/usr/bin/dsmserv LANG=en_US LOGIN=root PATH=/usr/bin:/etc:/usr/sbin:/usr/ucb:/usr/bin/X11:/sbin:/usr/java5/jre/bin:/usr/java5/bin LC_ALL=en_US DSMSERV_CONFIG=/foobar_17g/dsmserv.opt LC__FASTMSG=true LOGNAME=root MAIL=/var/spool/mail/root LOCPATH=/usr/lib/nls/loc DSMSERV_DIR=/foobar_17g USER=root AUTHSTATE=compat AIXTHREAD_MNRATIO=1:1 SHELL=/usr/bin/ksh ODMDIR=/etc/objrepos HOME=/ SSH_CLIENT=192.168.21.199 37725 22 SSH_CONNECTION=192.168.21.199 37725 192.168.21.214 22 PWD=/foobar_17g TZ=Europe/Bucharest AIXTHREAD_SCOPE=S DSMSERV_ACCOUNTING_DIR=/foobar_17g/acc NLSPATH=/usr/lib/nls/msg/%L/%N:/usr/lib/nls/msg/%L/%N.cat LIBPATH=/usr/local/Centera_SDK/lib/64/
+ ps xewwg | sed -n '/dsmserv .* DSMSERV_CONFIG=/s/^.*dsmserv //p'
+}
+
+export_extracted_env() {
+ while read -r name value; do
+ [ -n "$name" ] && export "${name}"="${value}"
+ done <<<"$(printf "%s\n" "${1}" | tr ' ' '\n' | sed -n '/^DSMSERV_/s/=/ /p')"
+}
+
+main() {
+
+ read_plugin_config
+
+ while read -r line; do
+ export_extracted_env "${line}"
do_tsm_checks
- done
+ done <<<"$(get_dsmserv_processes)"
+}
+
+[ -z "${MK_SOURCE_ONLY}" ] && main "$@"
diff --git a/agents/plugins/mk_zypper b/agents/plugins/mk_zypper
index c1b369d793b..5959a1d0326 100755
--- a/agents/plugins/mk_zypper
+++ b/agents/plugins/mk_zypper
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
#SuSE-release is deprecated and was removed with SLE 15. os-release should be used for the new versions.
diff --git a/agents/plugins/mtr.py b/agents/plugins/mtr.py
index c92fa9bbedc..82ec55bd143 100755
--- a/agents/plugins/mtr.py
+++ b/agents/plugins/mtr.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
# This plugin was sponsored by BenV. Thanks!
# https://notes.benv.junerules.com/mtr/
@@ -20,7 +20,7 @@
try:
import configparser
except ImportError: # Python 2
- import ConfigParser as configparser # type: ignore[import,no-redef]
+ import ConfigParser as configparser # type: ignore[import-not-found,no-redef]
import glob
import os
diff --git a/agents/plugins/netstat.aix b/agents/plugins/netstat.aix
index 521c4fc0009..33fe9fe2cf4 100755
--- a/agents/plugins/netstat.aix
+++ b/agents/plugins/netstat.aix
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# This is not part of the standard agent since it can produce much
# output data of the table is large. This plugin is just needed for
diff --git a/agents/plugins/netstat.linux b/agents/plugins/netstat.linux
index e5b235178cf..75c961b17af 100755
--- a/agents/plugins/netstat.linux
+++ b/agents/plugins/netstat.linux
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# This is not part of the standard agent since it can take very
# long to run if your TCP/UDP table is large. Netstat seems to
diff --git a/agents/plugins/netstat.solaris b/agents/plugins/netstat.solaris
index 6dbf3968efb..65cae1a6001 100755
--- a/agents/plugins/netstat.solaris
+++ b/agents/plugins/netstat.solaris
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# This is not part of the standard agent since it can take very
# long to run if your TCP/UDP table is large. Netstat seems to
diff --git a/agents/plugins/nfsexports b/agents/plugins/nfsexports
index e49a65f80e5..29c84f3de50 100755
--- a/agents/plugins/nfsexports
+++ b/agents/plugins/nfsexports
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# this check will only run if we have a working nfs environment or SHOULD have one.
# not tested for nfs3
diff --git a/agents/plugins/nfsexports.solaris b/agents/plugins/nfsexports.solaris
index 1892f2fa644..8817b6fcfac 100755
--- a/agents/plugins/nfsexports.solaris
+++ b/agents/plugins/nfsexports.solaris
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Checkmk agent plugin for monitoring nfsexports on Solaris. This plugin
# has been tested with solaris 10 in a standalone and cluster setting.
diff --git a/agents/plugins/nginx_status.py b/agents/plugins/nginx_status.py
index d0285d48c4e..66864abaede 100755
--- a/agents/plugins/nginx_status.py
+++ b/agents/plugins/nginx_status.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
USER_AGENT = "checkmk-agent-nginx_status-" + __version__
# Checkmk-Agent-Plugin - Nginx Server Status
@@ -171,7 +171,7 @@ def main(): # pylint: disable=too-many-branches
raise
for line in ensure_str(fd.read()).split("\n"):
- if not line.strip():
+ if not line or line.isspace():
continue
if line.lstrip()[0] == "<":
# seems to be html output. Skip this server.
diff --git a/agents/plugins/plesk_backups.py b/agents/plugins/plesk_backups.py
index 6430b021614..dc2a6f0e124 100755
--- a/agents/plugins/plesk_backups.py
+++ b/agents/plugins/plesk_backups.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
# Monitors FTP backup spaces of plesk domains.
# Data format
@@ -23,7 +23,7 @@
pass
try:
- import MySQLdb # type: ignore[import] # pylint: disable=import-error
+ import MySQLdb # type: ignore[import-untyped] # pylint: disable=import-error
except ImportError as e:
sys.stdout.write(
"<<>>\n%s. Please install missing module via pip install ." % e
diff --git a/agents/plugins/plesk_domains.py b/agents/plugins/plesk_domains.py
index 124bcbfaaa6..234770f82ef 100755
--- a/agents/plugins/plesk_domains.py
+++ b/agents/plugins/plesk_domains.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
# Lists all domains configured in plesk
#
@@ -14,7 +14,7 @@
import sys
try:
- import MySQLdb # type: ignore[import] # pylint: disable=import-error
+ import MySQLdb # type: ignore[import-untyped] # pylint: disable=import-error
except ImportError as e:
sys.stdout.write(
"<<>>\n%s. Please install missing module via pip install ." % e
diff --git a/agents/plugins/runas b/agents/plugins/runas
index 8c6fc685a97..8b813b60576 100755
--- a/agents/plugins/runas
+++ b/agents/plugins/runas
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# This plugin allows to execute mrpe, local and plugin skripts with a different user context
# It is configured with in the file $MK_CONFDIR/runas.cfg
diff --git a/agents/plugins/smart b/agents/plugins/smart
index 4180ee85c09..a2a95475091 100755
--- a/agents/plugins/smart
+++ b/agents/plugins/smart
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# Function to replace "if type [somecmd]" idiom
# 'command -v' tends to be more robust vs 'which' and 'type' based tests
diff --git a/agents/plugins/symantec_av b/agents/plugins/symantec_av
index 890b2fc8eb0..496cc308212 100755
--- a/agents/plugins/symantec_av
+++ b/agents/plugins/symantec_av
@@ -9,9 +9,37 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
+
+# check that no users other than root that can change the file
+root_owned() {
+ read -r permissions _ owner group _ <>>"
/opt/Symantec/symantec_antivirus/sav info -d
diff --git a/agents/plugins/unitrends_backup b/agents/plugins/unitrends_backup
index a29ab176d5a..4c0dcba00b7 100755
--- a/agents/plugins/unitrends_backup
+++ b/agents/plugins/unitrends_backup
@@ -3,7 +3,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
//
//
diff --git a/agents/plugins/unitrends_replication.py b/agents/plugins/unitrends_replication.py
index 8b763d08497..fb2d2aaf148 100755
--- a/agents/plugins/unitrends_replication.py
+++ b/agents/plugins/unitrends_replication.py
@@ -4,7 +4,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
import sys
import time
diff --git a/agents/plugins/vxvm b/agents/plugins/vxvm
index d2dfafa8547..624d893d551 100755
--- a/agents/plugins/vxvm
+++ b/agents/plugins/vxvm
@@ -9,7 +9,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
# This plugin has been tested on Linux and HPUX.
diff --git a/agents/plugins/websphere_mq b/agents/plugins/websphere_mq
deleted file mode 100755
index 406f690902d..00000000000
--- a/agents/plugins/websphere_mq
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/bin/sh
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# Reason for this no-op: shellcheck disable=... before the first command disables the error for the
-# entire script.
-:
-
-# Disable unused variable error (needed to keep track of version)
-# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
-
-# Monitor Websphere MQ
-# WWI Version 18.05.2016
-
-# NOTE: This plugin is deprecated. Please use the ibm_mq plugin.
-# plugin for websphere_mq_* checks
-
-if [ "$1" = "" ]; then
- # wwi --------------------------------------
- # deletion of "-"
- # preventing a faulty "TCP Connection" section of the main agent
- # by using the "-" option, the AIX environment show's the message "[YOU HAVE NEW MAIL]"
- # before the section start: <<>>
- # so the next section will be useless for the OMD server check.
- # su - mqm -c "/usr/lib/check_mk_agent/plugins/websphere_mq run"
- su mqm -c "/usr/lib/check_mk_agent/plugins/websphere_mq run"
-
-else
- # Loop over all local mq instances
- for QM in $(ps -ef | grep "[/]usr/mqm/bin/runmqlsr" | awk -v FS="-m" '{print $2}' | awk '{print $1}' | uniq); do
- echo '<<>>'
- for i in $(echo " display CHANNEL (*) TYPE (SDR) " | /usr/bin/runmqsc "$QM" | grep CHLTYPE |
- grep -v SYSTEM | awk '{print $1}'); do
-
- j=$(echo "display $i " | /usr/bin/runmqsc "$QM" | grep XMITQ | tr " " "\n" |
- grep XMITQ | sed '1,$s/(/ /g' | sed '1,$s/)/ /g' | awk '{print $2 }')
-
- a=$(echo " display qlocal ($j) CURDEPTH " | /usr/bin/runmqsc "$QM" | grep CURDEPTH |
- tr " " "\n" | grep CURDEPTH | sed '1,$s/(/ /g' | sed '1,$s/)/ /g' |
- awk '{print $2 }' | tr "\n" " ")
-
- c=$(echo " display qlocal ($j) MAXDEPTH " | /usr/bin/runmqsc "$QM" | grep MAXDEPTH |
- tr " " "\n" | grep MAXDEPTH | sed '1,$s/(/ /g' | sed '1,$s/)/ /g' |
- awk '{print $2 }' | tr "\n" " ")
-
- l=$(echo "$i" | sed '1,$s/(/ /g' | sed '1,$s/)/ /g' | awk '{print $2 }')
-
- s=$(echo " display chstatus($l)" | /usr/bin/runmqsc "$QM" | grep STATUS | tail -1 |
- sed '1,$s/(/ /g' | sed '1,$s/)/ /g' | awk '{print $NF }')
-
- if [ "$s" = "" ]; then
- s="Unknown"
- fi
- echo "$a $i $c $s"
- done
-
- echo '<<>>'
- for t in $(echo " display queue (*) where (USAGE EQ NORMAL) " | /usr/bin/runmqsc "$QM" |
- grep -v SYSTEM | grep -v MQMON | grep -v MONITOR | grep -v _T0 | grep -v _T1 |
- grep -v _T2 | grep -v _T3 | grep -v mqtest | grep QUEUE | awk '{ print $1 }' |
- sed '1,$s/(/ /g' | sed '1,$s/)/ /g' | awk '{print $2 }'); do
-
- # wwi MQ admin change, to get more queues which are needed
- a=$(echo " display qlocal ($t) CURDEPTH " | /usr/bin/runmqsc "$QM" | grep CURDEPTH |
- tr " " "\n" | grep CURDEPTH | sed '1,$s/(/ /g' | sed '1,$s/)/ /g' |
- awk '{print $2 }' | tr "\n" " ")
-
- b=$(echo " display qlocal ($t) MAXDEPTH " | /usr/bin/runmqsc "$QM" | grep MAXDEPTH |
- tr " " "\n" | grep MAXDEPTH | sed '1,$s/(/ /g' | sed '1,$s/)/ /g' |
- awk '{print $2 }' | tr "\n" " ")
-
- c=$(echo " dis qs($t) monitor " | /usr/bin/runmqsc "$QM" | grep -e LGETDATE -e LGETTIME |
- tr '\n' ' ' | awk -v FS="LPUTDATE" '{print $1}' | sed 's/ //g')
-
- NOW=$(date +%Y_%m_%d"-"%H_%M_%S)
-
- # Muster: Anzahl eingehender Messages $a auf $t Max-Queues $b
- # wwi -------------------------
-
- if [ "$a" = "" ] || [ "$a" = " " ]; then
- a=" 0"
- t=" $t"
- c="CURDEPTH(0)LGETDATE()LGETTIME()"
- fi
-
- if [ "$b" = "" ] || [ "$b" = " " ]; then
- b=0
- c="CURDEPTH(0)LGETDATE()LGETTIME()"
- fi
-
- if [ "$c" = "" ] || [ "$c" = " " ]; then
- c="CURDEPTH(0)LGETDATE()LGETTIME()"
- fi
-
- echo "$a $t $b $c $NOW"
- done # for t
- done # for QM
-fi
-
-if type dspmq >/dev/null; then
- echo "<<>>"
- dspmq -x
- echo "MQv$(dspmqver | grep -e Version -e ^Level -e Mode | awk -v FS=":" '{print $2","}' | tr -d ' ' | tr -d '\n' | sed 's/,$/;/g')"
- dspmq -o all
-fi
diff --git a/agents/plugins/zorp b/agents/plugins/zorp
index 49c4605cb73..93533ba7b99 100755
--- a/agents/plugins/zorp
+++ b/agents/plugins/zorp
@@ -6,7 +6,7 @@
# Disable unused variable error (needed to keep track of version)
# shellcheck disable=SC2034
-CMK_VERSION="2.3.0b1"
+CMK_VERSION="2.4.0b1"
if type zorpctl >/dev/null; then
echo '<<>>'
diff --git a/agents/scripts/super-server/0_systemd/setup b/agents/scripts/super-server/0_systemd/setup
index 7a13365642a..a827806aa0b 100755
--- a/agents/scripts/super-server/0_systemd/setup
+++ b/agents/scripts/super-server/0_systemd/setup
@@ -66,13 +66,17 @@ _need_ip_access_list() {
}
_deploy_controller() {
+ [ -e "/var/lib/cmk-agent/cmk-agent-ctl.gz" ] || {
+ printf "%s\n" "Agent Controller is not packaged. Falling back to legacy systemd setup."
+ return 1
+ }
printf "Deploying agent controller: %s\n" "${CONTROLLER_TARGET}"
gunzip --force --quiet -c "/var/lib/cmk-agent/cmk-agent-ctl.gz" >"/var/lib/cmk-agent/cmk-agent-ctl" || {
- printf "%s\n" "...Failed to unpack with gzip."
+ printf "%s\n" "...Failed to unpack with gzip. Falling back to legacy systemd setup."
return 1
}
install -m 755 "/var/lib/cmk-agent/cmk-agent-ctl" "${CONTROLLER_TARGET}" || {
- printf "%s\n" "...Failed to move to target location."
+ printf "%s\n" "...Failed to move to target location. Falling back to legacy systemd setup."
return 1
}
rm -f "/var/lib/cmk-agent/cmk-agent-ctl"
diff --git a/agents/special/.f12 b/agents/special/.f12
index aab3eed04ce..4d130c0e6a0 100755
--- a/agents/special/.f12
+++ b/agents/special/.f12
@@ -12,4 +12,4 @@
SITE=${SITE:-$(omd sites --bare | head -n 1)}
ROOT=/omd/sites/$SITE
-sudo cp agent_* "$ROOT/share/check_mk/agents/special/"
+cp agent_* "$ROOT/share/check_mk/agents/special/"
diff --git a/agents/special/agent_3par b/agents/special/agent_3par
deleted file mode 100755
index 557d9ac3dae..00000000000
--- a/agents/special/agent_3par
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-import sys
-
-from cmk.special_agents.agent_3par import main
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/agents/special/agent_alertmanager b/agents/special/agent_alertmanager
index 0260f278021..47582b4118f 100755
--- a/agents/special/agent_alertmanager
+++ b/agents/special/agent_alertmanager
@@ -5,7 +5,7 @@
import sys
-from cmk.special_agents.agent_alertmanager import main
+from cmk.plugins.alertmanager.special_agents.agent_alertmanager import main
if __name__ == "__main__":
sys.exit(main())
diff --git a/agents/special/agent_kube b/agents/special/agent_kube
index a1872bc5d68..021b5c0eb43 100755
--- a/agents/special/agent_kube
+++ b/agents/special/agent_kube
@@ -5,7 +5,7 @@
import sys
-from cmk.special_agents.agent_kube import main
+from cmk.plugins.kube.special_agents.agent_kube import main
if __name__ == "__main__":
sys.exit(main())
diff --git a/agents/special/agent_netapp_ontap b/agents/special/agent_netapp_ontap
new file mode 100755
index 00000000000..95829a1e8dd
--- /dev/null
+++ b/agents/special/agent_netapp_ontap
@@ -0,0 +1,11 @@
+#!/usr/bin/env python3
+# Copyright (C) 2024 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+import sys
+
+from cmk.special_agents.agent_netapp_ontap import main
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/agents/special/agent_prometheus b/agents/special/agent_prometheus
index 3dc3015bcb6..a85bcb00e5a 100755
--- a/agents/special/agent_prometheus
+++ b/agents/special/agent_prometheus
@@ -5,7 +5,7 @@
import sys
-from cmk.special_agents.agent_prometheus import main
+from cmk.plugins.prometheus.special_agents.agent_prometheus import main
if __name__ == "__main__":
sys.exit(main())
diff --git a/agents/windows/plugins/.f12 b/agents/windows/plugins/.f12
index b9124f01080..6b9f3f3e78e 100755
--- a/agents/windows/plugins/.f12
+++ b/agents/windows/plugins/.f12
@@ -12,4 +12,4 @@
SITE="${SITE:-$(omd sites --bare | head -n 1)}"
ROOT="/omd/sites/$SITE"
-sudo rsync -r --delete --chmod 755 . "$ROOT/share/check_mk/agents/windows/plugins"
+rsync -r --delete --chmod 755 . "$ROOT/share/check_mk/agents/windows/plugins"
diff --git a/agents/windows/plugins/ad_replication.bat b/agents/windows/plugins/ad_replication.bat
index 4be0aceb98e..89cda8162e5 100644
--- a/agents/windows/plugins/ad_replication.bat
+++ b/agents/windows/plugins/ad_replication.bat
@@ -1,5 +1,5 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
REM ***
REM * Following information concerns only Windows Server <2012R2
diff --git a/agents/windows/plugins/arcserve_backup.ps1 b/agents/windows/plugins/arcserve_backup.ps1
index 4a54c273132..96667c6604f 100644
--- a/agents/windows/plugins/arcserve_backup.ps1
+++ b/agents/windows/plugins/arcserve_backup.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
####
## ArcServe.ps1
####
diff --git a/agents/windows/plugins/citrix_farm.ps1 b/agents/windows/plugins/citrix_farm.ps1
index 94447d1e8ea..e61b67ef6ec 100644
--- a/agents/windows/plugins/citrix_farm.ps1
+++ b/agents/windows/plugins/citrix_farm.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
diff --git a/agents/windows/plugins/citrix_licenses.vbs b/agents/windows/plugins/citrix_licenses.vbs
index a3f3721e65f..bb1cb68c7ad 100644
--- a/agents/windows/plugins/citrix_licenses.vbs
+++ b/agents/windows/plugins/citrix_licenses.vbs
@@ -1,6 +1,6 @@
' Check for citrix licenses
' This is an almost unmodified version of ctx_licensecheck.vbs from Stefan Senftleben.
-Const CMK_VERSION = "2.3.0b1"
+Const CMK_VERSION = "2.4.0b1"
On Error Resume Next
Dim objWMI : Set objWMI = GetObject("winmgmts:\\" & strComputer)
Dim strComputer : strComputer = "."
diff --git a/agents/windows/plugins/citrix_xenapp.ps1 b/agents/windows/plugins/citrix_xenapp.ps1
index debf83f3212..a5bd7946d78 100644
--- a/agents/windows/plugins/citrix_xenapp.ps1
+++ b/agents/windows/plugins/citrix_xenapp.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
Add-PSSnapin Citrix*
### Citrix XenApp Serverload
diff --git a/agents/windows/plugins/deprecated/citrix_serverload.ps1 b/agents/windows/plugins/deprecated/citrix_serverload.ps1
deleted file mode 100644
index 684845012b8..00000000000
--- a/agents/windows/plugins/deprecated/citrix_serverload.ps1
+++ /dev/null
@@ -1,6 +0,0 @@
-$computer = "localhost"
-
-### Citrix XenApp Serverload
-$loadObject = Get-WmiObject -Namespace 'Root\Citrix' -class 'MetaFrame_Server_LoadLevel' -ComputerName $computer -ErrorAction Stop
-"<<>>"
-$loadObject.LoadLevel
diff --git a/agents/windows/plugins/deprecated/citrix_sessions.ps1 b/agents/windows/plugins/deprecated/citrix_sessions.ps1
deleted file mode 100644
index f78ba3efd59..00000000000
--- a/agents/windows/plugins/deprecated/citrix_sessions.ps1
+++ /dev/null
@@ -1,8 +0,0 @@
-$computer = "localhost"
-
-### Citrix XenApp Sessions
-$serverObject = Get-WmiObject -Namespace root\citrix -Class Metaframe_Server -ComputerName $computer
-"<<>>"
-"sessions {0}" -f $serverObject.NumberOfSessions
-"active_sessions {0}" -f $serverObject.NumberOfActiveSessions
-"inactive_sessions {0}" -f $serverObject.NumberOfDisconnectedSessions
diff --git a/agents/windows/plugins/deprecated/dmi_sysinfo.bat b/agents/windows/plugins/deprecated/dmi_sysinfo.bat
deleted file mode 100644
index 6b566b696f0..00000000000
--- a/agents/windows/plugins/deprecated/dmi_sysinfo.bat
+++ /dev/null
@@ -1,15 +0,0 @@
-@echo off
-
-REM ***
-REM * To be able to run this check you need to install dmidecode
-REM * on your windows client.
-REM *
-REM * You can download dmidecode for windows from
-REM *
-REM *
-REM * This plugin should work out of the box if you install dmidecode
-REM * to the default location.
-REM ***
-
-echo ^<^<^^>^>
-C:\Programme\GnuWin32\sbin\dmidecode.exe -t 1 -q
diff --git a/agents/windows/plugins/deprecated/mk_inventory.ps1 b/agents/windows/plugins/deprecated/mk_inventory.ps1
deleted file mode 100644
index e03a5a89d96..00000000000
--- a/agents/windows/plugins/deprecated/mk_inventory.ps1
+++ /dev/null
@@ -1,101 +0,0 @@
-# Configuration
-$delay = 14400 # execute agent only every $delay seconds
-$exe_paths = @("c:\Program Files (x86)")
-
-[System.Threading.Thread]::CurrentThread.CurrentCulture = [Globalization.CultureInfo]::InvariantCulture
-[System.Threading.Thread]::CurrentThread.CurrentUICulture = [Globalization.CultureInfo]::InvariantCulture
-[Console]::OutputEncoding = [System.Text.Encoding]::UTF8
-write-output "" # workaround to prevent the byte order mark to be at the beginning of the first section
-$name = (Get-Item env:\Computername).Value
-$separator = "|"
-# filename for timestamp
-$remote_host = $env:REMOTE_HOST
-$state_dir = $env:MK_STATEDIR
-
-# Fallback if the (old) agent does not provide the MK_STATEDIR
-if (!$state_dir) {
- $state_dir = "c:\Program Files (x86)\check_mk\state"
-}
-
-$timestamp = $state_dir + "\timestamp."+ $remote_host.Replace(":", "_")
-
-# does $timestamp exist?
-If (Test-Path $timestamp){
- $filedate = (ls $timestamp).LastWriteTime
- $now = Get-Date
- $earlier = $now.AddSeconds(-$delay)
- # exit if timestamp to young
- if ( $filedate -gt $earlier ) { exit }
-}
-# create new timestamp file
-New-Item $timestamp -type file -force | Out-Null
-
-# calculate unix timestamp
-$epoch=[int][double]::Parse($(Get-Date -date (Get-Date).ToUniversalTime()-uformat %s))
-
-# convert it to integer and add $delay seconds plus 5 minutes
-$until = [int]($epoch -replace ",.*", "") + $delay + 600
-
-# Processor
-write-host "<<>>"
-$cpu = Get-WmiObject Win32_Processor -ComputerName $name
-$cpu_vars = @( "Name","Manufacturer","Caption","DeviceID","MaxClockSpeed","AddressWidth","L2CacheSize","L3CacheSize","Architecture","NumberOfCores","NumberOfLogicalProcessors","CurrentVoltage","Status" )
-foreach ( $entry in $cpu ) { foreach ( $item in $cpu_vars) { write-host $item ":" $entry.$item } }
-
-# OS Version
-write-host "<<>>"
-Get-WmiObject Win32_OperatingSystem -ComputerName $name | foreach-object { write-host -separator $separator $_.csname, $_.caption, $_.version, $_.OSArchitecture, $_.servicepackmajorversion, $_.ServicePackMinorVersion, $_.InstallDate }
-
-# Memory
-#Get-WmiObject Win32_PhysicalMemory -ComputerName $name | select BankLabel,DeviceLocator,Capacity,Manufacturer,PartNumber,SerialNumber,Speed
-
-# BIOS
-write-host "<<>>"
-$bios = Get-WmiObject win32_bios -ComputerName $name
-$bios_vars= @( "Manufacturer","Name","SerialNumber","InstallDate","BIOSVersion","ListOfLanguages","PrimaryBIOS","ReleaseDate","SMBIOSBIOSVersion","SMBIOSMajorVersion","SMBIOSMinorVersion" )
-foreach ( $entry in $bios ) { foreach ( $item in $bios_vars) { write-host $item ":" $entry.$item } }
-
-# System
-write-host "<<>>"
-$system = Get-WmiObject Win32_SystemEnclosure -ComputerName $name
-$system_vars = @( "Manufacturer","Name","Model","HotSwappable","InstallDate","PartNumber","SerialNumber" )
-foreach ( $entry in $system ) { foreach ( $item in $system_vars) { write-host $item ":" $entry.$item } }
-
-# Hard-Disk
-write-host "<<>>"
-$disk = Get-WmiObject win32_diskDrive -ComputerName $name
-$disk_vars = @( "Manufacturer","InterfaceType","Model","Name","SerialNumber","Size","MediaType","Signature" )
-foreach ( $entry in $disk ) { foreach ( $item in $disk_vars) { write-host $item ":" $entry.$item } }
-
-# Graphics Adapter
-write-host "<<>>"
-$adapters=Get-WmiObject Win32_VideoController -ComputerName $name
-$adapter_vars = @( "Name", "Description", "Caption", "AdapterCompatibility", "VideoModeDescription", "VideoProcessor", "DriverVersion", "DriverDate", "MaxMemorySupported")
-foreach ( $entry in $adapters ) { foreach ( $item in $adapter_vars) { write-host $item ":" $entry.$item } }
-
-# Installed Software
-write-host "<<>>"
-Get-WmiObject Win32_Product -ComputerName $name | foreach-object { write-host -separator $separator $_.Name, $_.Vendor, $_.Version, $_.InstallDate }
-
-# Search Registry
-write-host "<<>>"
-$paths = @("HKLM:\Software\Microsoft\Windows\CurrentVersion\Uninstall")
-foreach ($path in $paths) {
- Get-ChildItem $path | foreach-object { $path2 = $path+"\"+$_.PSChildName; get-ItemProperty -path $path2 |
-
- foreach-object {
- $Publisher = $_.Publisher -replace "`0", ""
- write-host -separator $separator $_.DisplayName, $Publisher , $_.InstallLocation, $_.PSChildName, $_.DisplayVersion, $_.EstimatedSize, $_.InstallDate }}
-}
-
-# Search exes
-write-host "<<>>"
-foreach ($item in $exe_paths)
-{
- if ((Test-Path $item -pathType container))
- {
- Get-ChildItem -Path $item -include *.exe -Recurse | foreach-object { write-host -separator $separator $_.Fullname, $_.LastWriteTime, $_.Length, $_.VersionInfo.FileDescription, $_.VersionInfo.ProductVersion, $_.VersionInfo.ProductName }
- }
-}
-
-
diff --git a/agents/windows/plugins/deprecated/psperf.bat b/agents/windows/plugins/deprecated/psperf.bat
deleted file mode 100644
index 283b9e680fb..00000000000
--- a/agents/windows/plugins/deprecated/psperf.bat
+++ /dev/null
@@ -1,8 +0,0 @@
-@echo off
-rem This plugin obsoletes wmicchecks.bat. It is better because it is
-rem directly supported by the normal ps check.
-
-echo ^<^<^^>^>
-echo [wmic process]
-wmic process get ProcessId,name,pagefileusage,virtualsize,workingsetsize,usermodetime,kernelmodetime,ThreadCount,HandleCount /format:csv
-echo [wmic process end]
\ No newline at end of file
diff --git a/agents/windows/plugins/deprecated/win_printers.ps1 b/agents/windows/plugins/deprecated/win_printers.ps1
deleted file mode 100644
index 7ea4a4f0935..00000000000
--- a/agents/windows/plugins/deprecated/win_printers.ps1
+++ /dev/null
@@ -1,254 +0,0 @@
-####
-#
-# http://blogs.msdn.com/b/powershell/archive/2012/07/13/join-object.aspx
-#
-####
-
-###
-## http://blogs.technet.com/b/heyscriptingguy/archive/2006/12/04/how-can-i-expand-the-width-of-the-windows-powershell-console.aspx
-
-$pshost = get-host
-$pswindow = $pshost.ui.rawui
-
-$newsize = $pswindow.buffersize
-$newsize.height = 300
-$newsize.width = 150
-$pswindow.buffersize = $newsize
-
-###
-
-Write-Host "<<>>"
-$Data_Set1 = Get-WMIObject Win32_PerfFormattedData_Spooler_PrintQueue | Select Name, @{Expression={$_.jobs};Label="CurrentJobs"}
-$Data_Set2 = Get-WmiObject win32_printer | select name, printerstatus, detectederrorstate
-$Data_Set2 = Get-WmiObject win32_printer | ?{$_.PortName -notmatch '^TS'} | Select name, printerstatus, detectederrorstate
-
-
-function AddItemProperties($item, $properties, $output)
-{
- if($item -ne $null)
- {
- foreach($property in $properties)
- {
- $propertyHash =$property -as [hashtable]
- if($propertyHash -ne $null)
- {
- $hashName=$propertyHash["name"] -as [string]
- if($hashName -eq $null)
- {
- throw "there should be a string Name"
- }
-
- $expression=$propertyHash["expression"] -as [scriptblock]
- if($expression -eq $null)
- {
- throw "there should be a ScriptBlock Expression"
- }
-
- $_=$item
- $expressionValue=& $expression
-
- $output | add-member -MemberType "NoteProperty" -Name $hashName -Value $expressionValue
- }
- else
- {
- # .psobject.Properties allows you to list the properties of any object, also known as "reflection"
- foreach($itemProperty in $item.psobject.Properties)
- {
- if ($itemProperty.Name -like $property)
- {
- $output | add-member -MemberType "NoteProperty" -Name $itemProperty.Name -Value $itemProperty.Value
- }
- }
- }
- }
- }
-}
-
-
-function WriteJoinObjectOutput($leftItem, $rightItem, $leftProperties, $rightProperties, $Type)
-{
- $output = new-object psobject
-
- if($Type -eq "AllInRight")
- {
- # This mix of rightItem with LeftProperties and vice versa is due to
- # the switch of Left and Right arguments for AllInRight
- AddItemProperties $rightItem $leftProperties $output
- AddItemProperties $leftItem $rightProperties $output
- }
- else
- {
- AddItemProperties $leftItem $leftProperties $output
- AddItemProperties $rightItem $rightProperties $output
- }
- $output
-}
-
-<#
-.Synopsis
- Joins two lists of objects
-.DESCRIPTION
- Joins two lists of objects
-.EXAMPLE
- Join-Object $a $b "Id" ("Name","Salary")
-#>
-function Join-Object
-{
- [CmdletBinding()]
- [OutputType([int])]
- Param
- (
- # List to join with $Right
- [Parameter(Mandatory=$true,
- Position=0)]
- [object[]]
- $Left,
-
- # List to join with $Left
- [Parameter(Mandatory=$true,
- Position=1)]
- [object[]]
- $Right,
-
- # Condition in which an item in the left matches an item in the right
- # typically something like: {$args[0].Id -eq $args[1].Id}
- [Parameter(Mandatory=$true,
- Position=2)]
- [scriptblock]
- $Where,
-
- # Properties from $Left we want in the output.
- # Each property can:
- # - Be a plain property name like "Name"
- # - Contain wildcards like "*"
- # - Be a hashtable like @{Name="Product Name";Expression={$_.Name}}. Name is the output property name
- # and Expression is the property value. The same syntax is available in select-object and it is
- # important for join-object because joined lists could have a property with the same name
- [Parameter(Mandatory=$true,
- Position=3)]
- [object[]]
- $LeftProperties,
-
- # Properties from $Right we want in the output.
- # Like LeftProperties, each can be a plain name, wildcard or hashtable. See the LeftProperties comments.
- [Parameter(Mandatory=$true,
- Position=4)]
- [object[]]
- $RightProperties,
-
- # Type of join.
- # AllInLeft will have all elements from Left at least once in the output, and might appear more than once
- # if the where clause is true for more than one element in right, Left elements with matches in Right are
- # preceded by elements with no matches. This is equivalent to an outer left join (or simply left join)
- # SQL statement.
- # AllInRight is similar to AllInLeft.
- # OnlyIfInBoth will cause all elements from Left to be placed in the output, only if there is at least one
- # match in Right. This is equivalent to a SQL inner join (or simply join) statement.
- # AllInBoth will have all entries in right and left in the output. Specifically, it will have all entries
- # in right with at least one match in left, followed by all entries in Right with no matches in left,
- # followed by all entries in Left with no matches in Right.This is equivallent to a SQL full join.
- [Parameter(Mandatory=$false,
- Position=5)]
- [ValidateSet("AllInLeft","OnlyIfInBoth","AllInBoth", "AllInRight")]
- [string]
- $Type="OnlyIfInBoth"
- )
-
- Begin
- {
- # a list of the matches in right for each object in left
- $leftMatchesInRight = new-object System.Collections.ArrayList
-
- # the count for all matches
- $rightMatchesCount = New-Object "object[]" $Right.Count
-
- for($i=0;$i -lt $Right.Count;$i++)
- {
- $rightMatchesCount[$i]=0
- }
- }
-
- Process
- {
- if($Type -eq "AllInRight")
- {
- # for AllInRight we just switch Left and Right
- $aux = $Left
- $Left = $Right
- $Right = $aux
- }
-
- # go over items in $Left and produce the list of matches
- foreach($leftItem in $Left)
- {
- $leftItemMatchesInRight = new-object System.Collections.ArrayList
- $null = $leftMatchesInRight.Add($leftItemMatchesInRight)
-
- for($i=0; $i -lt $right.Count;$i++)
- {
- $rightItem=$right[$i]
-
- if($Type -eq "AllInRight")
- {
- # For AllInRight, we want $args[0] to refer to the left and $args[1] to refer to right,
- # but since we switched left and right, we have to switch the where arguments
- $whereLeft = $rightItem
- $whereRight = $leftItem
- }
- else
- {
- $whereLeft = $leftItem
- $whereRight = $rightItem
- }
-
- if(Invoke-Command -ScriptBlock $where -ArgumentList $whereLeft,$whereRight)
- {
- $null = $leftItemMatchesInRight.Add($rightItem)
- $rightMatchesCount[$i]++
- }
-
- }
- }
-
- # go over the list of matches and produce output
- for($i=0; $i -lt $left.Count;$i++)
- {
- $leftItemMatchesInRight=$leftMatchesInRight[$i]
- $leftItem=$left[$i]
-
- if($leftItemMatchesInRight.Count -eq 0)
- {
- if($Type -ne "OnlyIfInBoth")
- {
- WriteJoinObjectOutput $leftItem $null $LeftProperties $RightProperties $Type
- }
-
- continue
- }
-
- foreach($leftItemMatchInRight in $leftItemMatchesInRight)
- {
- WriteJoinObjectOutput $leftItem $leftItemMatchInRight $LeftProperties $RightProperties $Type
- }
- }
- }
-
- End
- {
- #produce final output for members of right with no matches for the AllInBoth option
- if($Type -eq "AllInBoth")
- {
- for($i=0; $i -lt $right.Count;$i++)
- {
- $rightMatchCount=$rightMatchesCount[$i]
- if($rightMatchCount -eq 0)
- {
- $rightItem=$Right[$i]
- WriteJoinObjectOutput $null $rightItem $LeftProperties $RightProperties $Type
- }
- }
- }
- }
-}
-
-Join-Object -Left $Data_Set1 -Right $Data_Set2 -Where {$args[0].Name -eq $args[1].Name} -LeftProperties "Name","CurrentJobs" -RightProperties "printerstatus","detectederrorstate" -Type OnlyIfInBoth | format-table -HideTableHeaders
diff --git a/agents/windows/plugins/deprecated/windows_time.bat b/agents/windows/plugins/deprecated/windows_time.bat
deleted file mode 100644
index 035ff7c762b..00000000000
--- a/agents/windows/plugins/deprecated/windows_time.bat
+++ /dev/null
@@ -1,3 +0,0 @@
-@echo off
-echo ^<^<^^>^>
-w32tm /query /status
diff --git a/agents/windows/plugins/deprecated/wmicchecks.bat b/agents/windows/plugins/deprecated/wmicchecks.bat
deleted file mode 100644
index 034ef8d0f0b..00000000000
--- a/agents/windows/plugins/deprecated/wmicchecks.bat
+++ /dev/null
@@ -1,3 +0,0 @@
-@echo off
-echo ^<^<^^>^>
-wmic process get name,pagefileusage,virtualsize,workingsetsize,usermodetime,kernelmodetime,ThreadCount /format:csv
diff --git a/agents/windows/plugins/hyperv_vms.ps1 b/agents/windows/plugins/hyperv_vms.ps1
index 7e36a08a8ff..7d8cca86f6f 100644
--- a/agents/windows/plugins/hyperv_vms.ps1
+++ b/agents/windows/plugins/hyperv_vms.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
Write-Host "<<>>"
Get-VM | select Name, State, Uptime, Status | ConvertTo-Csv -Delimiter "`t" -NoTypeInformation
diff --git a/agents/windows/plugins/hyperv_vms_guestinfos.ps1 b/agents/windows/plugins/hyperv_vms_guestinfos.ps1
index 9ccad8e068f..4d645ad5d8d 100644
--- a/agents/windows/plugins/hyperv_vms_guestinfos.ps1
+++ b/agents/windows/plugins/hyperv_vms_guestinfos.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
####
## Hyper-V VM state
####
diff --git a/agents/windows/plugins/iis_app_pool_state.ps1 b/agents/windows/plugins/iis_app_pool_state.ps1
index b3a9bbe0f75..fb0b023badc 100644
--- a/agents/windows/plugins/iis_app_pool_state.ps1
+++ b/agents/windows/plugins/iis_app_pool_state.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
# Copyright (C) 2021 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
diff --git a/agents/windows/plugins/kaspersky_av_client.vbs b/agents/windows/plugins/kaspersky_av_client.vbs
index 8599e5df42e..042cc8f6b90 100644
--- a/agents/windows/plugins/kaspersky_av_client.vbs
+++ b/agents/windows/plugins/kaspersky_av_client.vbs
@@ -13,7 +13,7 @@
' -----------------------------------------------------------------------------
Option Explicit
-Const CMK_VERSION = "2.3.0b1"
+Const CMK_VERSION = "2.4.0b1"
dim strStatisticsLoc, strProtection_BasesDate, strProtection_LastFscan, strProtection_LastConnected
dim strBIASLoc, strBIAS
dim objShell
diff --git a/agents/windows/plugins/mcafee_av_client.bat b/agents/windows/plugins/mcafee_av_client.bat
index c4e7384014b..9cf24ef2eac 100644
--- a/agents/windows/plugins/mcafee_av_client.bat
+++ b/agents/windows/plugins/mcafee_av_client.bat
@@ -1,5 +1,5 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
rem # -----------------------------------------------------------------------------
rem # Check_MK windows agent plugin to gather information about signature date
rem # of Mcafee Virusscan and ENS Anti-Virus software.
diff --git a/agents/windows/plugins/megaraid.bat b/agents/windows/plugins/megaraid.bat
index c0268a5a1d5..d5816a5a201 100644
--- a/agents/windows/plugins/megaraid.bat
+++ b/agents/windows/plugins/megaraid.bat
@@ -1,5 +1,5 @@
@ECHO off & setlocal EnableDelayedExpansion
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
REM **********************************************************************
REM * Script: megaraid.bat
REM * Author: Josef Hack
diff --git a/agents/windows/plugins/mk_dhcp_enabled.bat b/agents/windows/plugins/mk_dhcp_enabled.bat
index 41ba8cbf8d8..bbd53311b47 100644
--- a/agents/windows/plugins/mk_dhcp_enabled.bat
+++ b/agents/windows/plugins/mk_dhcp_enabled.bat
@@ -1,4 +1,4 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
echo ^<^<^^>^>
wmic path Win32_NetworkAdapterConfiguration get Description, dhcpenabled
diff --git a/agents/windows/plugins/mk_inventory.vbs b/agents/windows/plugins/mk_inventory.vbs
index 012671e76fc..0a9fbfb7351 100644
--- a/agents/windows/plugins/mk_inventory.vbs
+++ b/agents/windows/plugins/mk_inventory.vbs
@@ -4,7 +4,7 @@ If UCase(Right(Wscript.FullName, 11)) = "WSCRIPT.EXE" Then
Wscript.Quit
End If
-Const CMK_VERSION = "2.3.0b1"
+Const CMK_VERSION = "2.4.0b1"
CONST HKLM = &H80000002
Dim delay
@@ -296,6 +296,11 @@ Call startSection("win_computersystem",58,timeUntil)
systemVars = Array( "Manufacturer","Name","Model","InstallDate" )
Call getWMIObject("Win32_ComputerSystem",systemVars)
+' ComputerSystemProduct
+Call startSection("win_computersystemproduct",58,timeUntil)
+computerSystemProductVars = Array( "UUID" )
+Call getWMIObject("Win32_ComputerSystemProduct",computerSystemProductVars)
+
' Hard-Disk
Call startSection("win_disks",58,timeUntil)
diskVars = Array( "Manufacturer","InterfaceType","Model","Name","SerialNumber","Size","MediaType","Signature" )
diff --git a/agents/windows/plugins/mk_msoffice.ps1 b/agents/windows/plugins/mk_msoffice.ps1
index 89be7b158d9..86eb35a2e58 100755
--- a/agents/windows/plugins/mk_msoffice.ps1
+++ b/agents/windows/plugins/mk_msoffice.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
## filename for timestamp
$MK_CONFDIR = $env:MK_CONFDIR
diff --git a/agents/windows/plugins/mk_mysql.vbs b/agents/windows/plugins/mk_mysql.vbs
index 1a308835e49..5ad4a4bbcf5 100644
--- a/agents/windows/plugins/mk_mysql.vbs
+++ b/agents/windows/plugins/mk_mysql.vbs
@@ -6,10 +6,10 @@
' is running one or multiple MySQL server instances locally.
Option Explicit
-Const CMK_VERSION = "2.3.0b1"
+Const CMK_VERSION = "2.4.0b1"
Dim SHO, FSO, WMI, PROC
-Dim cfg_dir, cfg_file, service_list, service, instances, instance, cmd
+Dim cfg_dir, cfg_file, service_list, service, instances, instance, cmd, mysqlLocalIni
Dim output, pos, conn_args
Set instances = CreateObject("Scripting.Dictionary")
@@ -59,6 +59,16 @@ Function BuildPrintDefaultsCmd(instance_name, instance_cmd)
BuildPrintDefaultsCmd = print_defaults_cmd
End Function
+' The following logic exists as well in the linux bash script mk_mysql
+mysqlLocalIni = cfg_dir & "\mysql.local.ini"
+If Not FSO.FileExists(mysqlLocalIni) Then
+ dim inifile
+ Set inifile = FSO.CreateTextFile(mysqlLocalIni, True)
+ inifile.WriteLine("# This file is created by mk_mysql.vbs because some versions of mysqladmin")
+ inifile.WriteLine("# issue a warning if there are missing includes.")
+ inifile.Close
+End If
+
For Each instance In instances.Keys
' Use either an instance specific config file named mysql_.ini
' or the default mysql.ini file.
diff --git a/agents/windows/plugins/mk_oracle.ps1 b/agents/windows/plugins/mk_oracle.ps1
index 8d6ef1a38cb..63034cca4d3 100644
--- a/agents/windows/plugins/mk_oracle.ps1
+++ b/agents/windows/plugins/mk_oracle.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
@@ -65,6 +65,16 @@ $SYNC_SECTIONS = @("instance", "sessions", "logswitches", "undostat", "recovery_
# if debug is on, debug messages are shown on the screen
$DEBUG = 0
+# Entries which assumed as safe during testing security permission check
+# ----------------------------------------------------------------------
+# Add to the list of entries which
+# 1. May have write access to Oracle binaries
+# 2. Are not Administrators
+# Typical example: 'DOMAIN\DbInstaller' or 'MYPC\SpecialUser'
+# For the case above you need to add $CURRENT_SAFE_ENTRIES = @("DOMAIN\\DbInstaller", "MYPC\\SpecialUser)
+$CURRENT_SAFE_ENTRIES = @()
+
+
# Sections that are run in the background and at a larger interval.
# These sections take longer to run and are therefore run in the background
# Note: sections not listed in ASYNC_ASM_SECTIONS, SYNC_SECTIONS or
@@ -107,6 +117,11 @@ $CACHE_MAXAGE = 600
# $EXCLUDE_mysid="sessions logswitches"
#
+function Test-Administrator {
+ return (([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole]::Administrator))
+}
+
+$is_admin = Test-Administrator
Function debug_echo {
Param(
[Parameter(Mandatory = $True, Position = 1)]
@@ -117,6 +132,25 @@ Function debug_echo {
$MYTIME = Get-Date -Format o
echo "${MYTIME} DEBUG:${error_message}"
}
+ # log to a %PROGRAMDATA%\temp\cmk_oracle_plugin-{%USERNAME%}{user|admin}.log if PROGRAMDATA/Temp/cmk_enable_oracle_logging exists
+ try {
+ $temp = Join-Path -Path $env:PROGRAMDATA -ChildPath "Temp"
+ if (Test-Path (Join-Path -Path $temp -ChildPath "cmk_enable_oracle_logging")) {
+ $MYTIME = Get-Date -Format o
+ if ($is_admin) {
+ $mode = "admin"
+ }
+ else {
+ $mode = "user"
+ }
+ $user_name = (whoami).replace("\", ",")
+ $log_file = Join-Path -Path $temp -ChildPath "cmk_oracle_plugin-{$user_name}{$mode}.log"
+ Add-Content -Path $log_file -Value "${MYTIME} [LOG] ${error_message}"
+ }
+ }
+ catch {
+ # do nothing on "surprise"
+ }
}
# filename for timestamp
@@ -127,14 +161,22 @@ if (!$MK_CONFDIR) {
$MK_CONFDIR = "C:\ProgramData\checkmk\agent\config"
}
-# directory for tempfiles
$MK_TEMPDIR = $env:MK_TEMPDIR
+if ($is_admin) {
+ debug_echo "Admin mode"
+}
+else {
+ debug_echo "User mode"
+}
+
# To execute the script standalone in the environment of the installed agent
if (!$MK_TEMPDIR) {
$MK_TEMPDIR = "C:\ProgramData\checkmk\agent\tmp"
}
+debug_echo "MK_TEMPDIR = $MK_TEMPDIR"
+debug_echo "MK_CONFDIR = $MK_CONFDIR"
# Source the optional configuration file for this agent plugin
$CONFIG_FILE = "${MK_CONFDIR}\mk_oracle_cfg.ps1"
@@ -164,6 +206,8 @@ if ($ORACLE_HOME) {
# setting the output error language to be English
$env:NLS_LANG = "AMERICAN_AMERICA.AL32UTF8"
+$ASYNC_PROC_PATH = "$MK_TEMPDIR\async_proc.txt"
+
#.
# .--SQL Queries---------------------------------------------------------.
# | ____ ___ _ ___ _ |
@@ -189,7 +233,122 @@ set echo on
$LESS_THAN = '<'
Function should_exclude($exclude, $section) {
- return (($exclude -Match "ALL") -or ($exclude -Match $section))
+ return (($exclude -Match "ALL") -or ($exclude -Match $section))
+}
+
+<#
+ .SYNOPSIS
+ Checks that some sid is allowed
+ .DESCRIPTION
+ As for now allowed are 'Domain Admins' = 'S-1-5-32-512' and 'Enterprise Admins' 'S-1-5-32-519'.
+ The reason: those groups must not be included in Administrators group but certainly are safe.
+#>
+function Test-DomainSid([string]$sid) {
+ $domain_sid_pattern = "S-1-5-(.*)-51[2,9]"
+ ($sid -match $domain_sid_pattern)[0]
+ # TODO(sk): check whether domain is valid, matches[1] contains domain id
+ # it is highly unlikely that domain id will mismatch
+ # still we may check it, but in the future
+}
+
+<#
+ .SYNOPSIS
+ Checks that some entry is allowed to have write permission to the file.
+ .DESCRIPTION
+ Uses two lists of safe entries: CURRENT_SAFE_ENTRIES and WINDOWS_SAFE_ENTRIES where
+ CURRENT_SAFE_ENTRIES is hardcoded in this script
+ WINDOWS_SAFE_ENTRIES is generated by WATO
+#>
+function Test-SafeEntry([string]$entry) {
+ $safe_entries = $CURRENT_SAFE_ENTRIES + $WINDOWS_SAFE_ENTRIES
+ foreach ( $safe in $safe_entries ) {
+ if (-not $safe ) {
+ continue
+ }
+ if ( $entry.ToLower() -eq $safe.ToLower()) {
+ return $True
+ }
+ }
+ return $False
+}
+
+<#
+ .SYNOPSIS
+ Checks that file is safe to run by administrator.
+ .DESCRIPTION
+ If non-admin users have Write, Modify or Full Control to the file
+ then returns error with detailed description.
+#>
+function Invoke-SafetyCheck( [String]$file ) {
+ if (-not (Test-Path -path $file)) {
+ return
+ }
+
+ # the groups are confirmed by Security team too
+ $admin_sids = @(
+ "S-1-5-18", # SYSTEM
+ "S-1-5-32-544" # Administrators
+ )
+ $forbidden_rights = @("Modify", "FullControl", "Write")
+ class Actor {
+ [string]$name
+ [string]$sid
+ [string]$rights
+ }
+ try {
+ $acl = Get-Acl $file -ErrorAction Stop
+ $access = $acl.Access
+ $admins = Get-LocalGroupMember -SID "S-1-5-32-544"
+ $actors = $access | ForEach-Object {
+ $a = [Actor]::new()
+ $object = New-Object System.Security.Principal.NTAccount -ArgumentList $_.IdentityReference
+ $a.name = $object
+ try {
+ $a.sid = $object.Translate([System.Security.Principal.SecurityIdentifier])
+ $a.rights = $_.FileSystemRights.ToString()
+ $a
+ }
+ catch {
+ # we skip silently not convertable from the name to sid objects
+ # they are doesn't exist for the OS
+ # code below may be executed for files that are not converted to trace problems
+ # debug_echo "$_ $object can not be translated"
+ }
+ }
+
+ foreach ($entry in $actors ) {
+ $name = $entry.name
+ $sid = $entry.sid
+ if ( $admin_sids -contains $sid ) {
+ # predefined admin groups are safe
+ continue
+ }
+ if (Test-DomainSid $sid) {
+ # 'Domain Admins' and 'Enterprise Admins' are safe too
+ continue
+ }
+ if ( Test-SafeEntry $name ) {
+ # Some entries may be assumed safe, see $CURRENT_SAFE_ENTRIES
+ continue
+ }
+ if ( $admins.Name -contains "$name" ) {
+ # members of local admin groups are safe
+ continue
+ }
+
+ # check for forbidden rights
+ $rights = $entry.rights
+ $forbidden_rights |
+ Foreach-Object {
+ if ($rights -match $_) {
+ return "$name has '$_' access permissions '$file'"
+ }
+ }
+ }
+ }
+ catch {
+ return "Exception '$_' during check '$file'"
+ }
}
Function get_dbversion_database ($ORACLE_HOME) {
@@ -206,24 +365,24 @@ Function get_dbversion_database ($ORACLE_HOME) {
}
-function is_async_running ($async_proc_path, $fullPath) {
- if (-not(Test-Path -path "$async_proc_path")) {
+function is_async_running ($fullPath) {
+ if (-not(Test-Path -path "$ASYNC_PROC_PATH")) {
# no file, no running process
return $false
}
- $proc_pid = (Get-Content ${async_proc_path})
+ $proc_pid = (Get-Content ${ASYNC_PROC_PATH})
# Check if the process with `$proc_pid` is still running AND if its commandline contains `$fullPath`.
# Our async process always contains `$fullPath` in their own command line.
$command_line = (Get-WmiObject -Query "SELECT CommandLine FROM Win32_Process WHERE ProcessID = $proc_pid").commandline
if ($command_line -like "*$fullPath*") {
- return $true
+ return $true
}
# The process to the PID cannot be found, so remove also the proc file
- rm $async_proc_path
+ rm $ASYNC_PROC_PATH
return $false
}
@@ -241,7 +400,7 @@ Function sqlcall {
[Parameter(Mandatory = $True, Position = 3)]
[int]$run_async,
- [Parameter(Mandatory = $True, Position = 5)]
+ [Parameter(Mandatory = $True, Position = 4)]
[string]$sqlsid
)
################################################################################
@@ -510,28 +669,27 @@ Function sqlcall {
#####################################################
# now we ensure that the async SQL Calls have up-to-date SQL outputs, running this job asynchronously...
#####################################################
- $async_proc_path = "$MK_TEMPDIR\async_proc.$sqlsid.txt"
debug_echo "about to call bg task $sql_message"
- if (-not(is_async_running($async_proc_path, $fullPath))) {
+ if (-not(is_async_running($fullPath))) {
$command = {
- param([string]$sql_connect, [string]$sql, [string]$path, [string]$sql_sid)
- $res = ("$sql" | sqlplus -s -L $sql_connect)
- if ($LastExitCode -eq 0) {
- $res | Set-Content $path
- }
- else {
- $stripped_res = '$sql_sid|FAILURE|' + $res | select-string -pattern 'ERROR'
- '<<>>' | Set-Content $path
- $stripped_res | Add-Content $path
- }
+ param([string]$sql_connect, [string]$sql, [string]$path, [string]$sql_sid)
+ $res = ("$sql" | sqlplus -s -L $sql_connect)
+ if ($LastExitCode -eq 0) {
+ $res | Set-Content $path
+ }
+ else {
+ $stripped_res = '$sql_sid|FAILURE|' + $res | select-string -pattern 'ERROR'
+ '<<>>' | Set-Content $path
+ $stripped_res | Add-Content $path
+ }
}
# This escaping is needed as it seems the here string attribute gets lost or has no effect when passing the
# variable to the script block
$escaped_sql = $THE_SQL.replace("'", "''")
$async_proc = Start-Process -PassThru powershell -windowstyle hidden -ArgumentList "-command invoke-command -scriptblock {$command} -argumentlist '$SQL_CONNECT', '$escaped_sql', '$fullpath', '$sqlsid'"
- $async_proc.id | set-content $async_proc_path
+ $async_proc.id | set-content $ASYNC_PROC_PATH
debug_echo "should be run here $run_async"
}
}
@@ -1034,7 +1192,9 @@ Function sql_dataguard_stats {
JOIN v$parameter vp on 1=1
JOIN v$instance i on 1=1
left outer join V$dataguard_stats ds on 1=1
- left outer join v$managed_standby ms on ms.process = 'MRP0'
+ left outer join (select listagg(to_char(inst_id) || '.' || status, ', ') WITHIN GROUP (ORDER BY to_char(inst_id) || '.' || status) status
+ from gv$managed_standby
+ where process = 'MRP0') ms on 1=1
WHERE vp.name = 'log_archive_config'
AND vp.value is not null
ORDER BY 1;
@@ -1656,7 +1816,7 @@ Function sql_systemparameter {
from v$system_parameter, v$instance i
where name not like '!_%' ESCAPE '!';
'@
- echo $query_systemparameter
+ echo $query_systemparameter
}
@@ -2206,15 +2366,31 @@ if ($the_count -gt 0) {
# in some environments HKLM:\SYSTEM\CurrentControlSet\services\OracleService{ORACLE_SID}
# wasn't present, see SUP-10065
try {
- $key = 'HKLM:\SYSTEM\CurrentControlSet\services\OracleService' + $ORACLE_SID
- $val = (Get-ItemProperty -Path $key).ImagePath
+ $key = 'HKLM:\SYSTEM\CurrentControlSet\services\OracleService' + $ORACLE_SID
+ $val = (Get-ItemProperty -Path $key).ImagePath
}
catch {
- $key = 'HKLM:\SYSTEM\CurrentControlSet\services\OracleASMService' + $ORACLE_SID
- $val = (Get-ItemProperty -Path $key).ImagePath
+ $key = 'HKLM:\SYSTEM\CurrentControlSet\services\OracleASMService' + $ORACLE_SID
+ $val = (Get-ItemProperty -Path $key).ImagePath
+ }
+ # $val may contain c:\path\to\file or "c:\path\to\file" or "c:\path\to\file" PARAM
+ $ORACLE_HOME = $val.SubString(0, $val.LastIndexOf('\') - 4).Trim('"')
+
+ if ($is_admin -and ($SKIP_ORACLE_SECURITY_CHECK -ne 1)) {
+ # administrators should use only safe binary
+ $result = Invoke-SafetyCheck($ORACLE_HOME + "\bin\sqlplus.exe")
+ if ($Null -eq $result) {
+ $result = Invoke-SafetyCheck($ORACLE_HOME + "\bin\tnsping.exe")
+ }
+ if ($Null -eq $result) {
+ $result = Invoke-SafetyCheck($ORACLE_HOME + "\bin\crsctl.exe")
+ }
+ if ($Null -ne $result) {
+ Write-Output "<<>>"
+ Write-Output "$ORACLE_SID|FAILURE|$result - Execution is blocked because you try to run unsafe binary as an administrator. Please, disable 'Write', 'Modify' and 'Full control' access to the the file by non-admin users. Alternatively, you can try to adjust the settings in 'ORACLE databases (Linux, Solaris, AIX, Windows)'."
+ continue
+ }
}
- $ORACLE_HOME = $val.SubString(0, $val.LastIndexOf('\') - 4)
-
# reset errors found for this instance to zero
$ERROR_FOUND = 0
diff --git a/agents/windows/plugins/msexch_dag.ps1 b/agents/windows/plugins/msexch_dag.ps1
index f770f6c74f9..beb2bf9b4b5 100644
--- a/agents/windows/plugins/msexch_dag.ps1
+++ b/agents/windows/plugins/msexch_dag.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
## MSExchange Replication
## Load Exchange Management Powershell Plugin
try{ (Add-PSSnapin Microsoft.Exchange.Management.PowerShell.E2010 -ErrorAction:Stop) }
diff --git a/agents/windows/plugins/msexch_database.ps1 b/agents/windows/plugins/msexch_database.ps1
index 9fe87bb7454..dd8a2f01f33 100644
--- a/agents/windows/plugins/msexch_database.ps1
+++ b/agents/windows/plugins/msexch_database.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
## MS Exchange Database counters
## localize counter name
diff --git a/agents/windows/plugins/mssql.vbs b/agents/windows/plugins/mssql.vbs
index 4f2d611c29f..72d0e21c527 100644
--- a/agents/windows/plugins/mssql.vbs
+++ b/agents/windows/plugins/mssql.vbs
@@ -34,7 +34,7 @@
' -----------------------------------------------------------------------------
Option Explicit
-Const CMK_VERSION = "2.3.0b1"
+Const CMK_VERSION = "2.4.0b1"
Dim WMI, FSO, objStdout, SHO, items, objItem, prop, instVersion, registry
Dim sources, instances, instance, instance_id, instance_name, instance_excluded, service_name
@@ -707,29 +707,67 @@ For Each instance_id In instances.Keys: Do ' Continue trick
' Loop all databases to get the date of the last backup. Only show databases
' which have at least one backup
+ ' The last backup date is converted to UTC in the process by removing the timezone offset, given in 15 min
+ ' intervals (or as 127 if unknown)
Dim lastBackupDate, backup_type, is_primary_replica, replica_id, backup_machine_name, backup_database, found_db_backups
addOutput(sections("backup"))
- sqlString = "DECLARE @HADRStatus sql_variant; DECLARE @SQLCommand nvarchar(max); " & _
- "SET @HADRStatus = (SELECT SERVERPROPERTY ('IsHadrEnabled')); " & _
- "IF (@HADRStatus IS NULL or @HADRStatus <> 1) " & _
- "BEGIN " & _
- "SET @SQLCommand = 'SELECT CONVERT(VARCHAR, DATEADD(s, DATEDIFF(s, ''19700101'', MAX(backup_finish_date)), ''19700101''), 120) AS last_backup_date, " & _
- "type, machine_name, ''True'' as is_primary_replica, ''1'' as is_local, '''' as replica_id,database_name FROM msdb.dbo.backupset " & _
- "WHERE UPPER(machine_name) = UPPER(CAST(SERVERPROPERTY(''Machinename'') AS VARCHAR)) " & _
- "GROUP BY type, machine_name,database_name ' " & _
- "END " & _
- "ELSE " & _
- "BEGIN " & _
- "SET @SQLCommand = 'SELECT CONVERT(VARCHAR, DATEADD(s, DATEDIFF(s, ''19700101'', MAX(b.backup_finish_date)), ''19700101''), 120) AS last_backup_date, " & _
- "b.type, b.machine_name, isnull(rep.is_primary_replica,0) as is_primary_replica, rep.is_local, isnull(convert(varchar(40), rep.replica_id), '''') AS replica_id,database_name " & _
- "FROM msdb.dbo.backupset b " & _
- "LEFT OUTER JOIN sys.databases db ON b.database_name = db.name " & _
- "LEFT OUTER JOIN sys.dm_hadr_database_replica_states rep ON db.database_id = rep.database_id " & _
- "WHERE (rep.is_local is null or rep.is_local = 1) " & _
- "AND (rep.is_primary_replica is null or rep.is_primary_replica = ''True'') and UPPER(machine_name) = UPPER(CAST(SERVERPROPERTY(''Machinename'') AS VARCHAR)) " & _
- "GROUP BY type, rep.replica_id, rep.is_primary_replica, rep.is_local, b.database_name, b.machine_name, rep.synchronization_state, rep.synchronization_health' " & _
- "END " & _
- "EXEC (@SQLCommand)"
+ sqlString = "" & _
+ "DECLARE @HADRStatus sql_variant; " & _
+ "DECLARE @SQLCommand nvarchar(max); " & _
+ "SET @HADRStatus = (SELECT SERVERPROPERTY ('IsHadrEnabled')); " & _
+ "IF (@HADRStatus IS NULL or @HADRStatus <> 1) " & _
+ "BEGIN " & _
+ "SET @SQLCommand = ' " & _
+ "SELECT " & _
+ " CONVERT(VARCHAR, DATEADD(s, MAX(DATEDIFF(s, ''19700101'', backup_finish_date) - (CASE WHEN time_zone IS NOT NULL AND time_zone <> 127 THEN 60 * 15 * time_zone ELSE 0 END)), ''19700101''), 120) AS last_backup_date, " & _
+ " type, " & _
+ " machine_name, " & _
+ " ''True'' as is_primary_replica, " & _
+ " ''1'' as is_local, " & _
+ " '''' as replica_id, " & _
+ " sys.databases.name AS database_name " & _
+ "FROM " & _
+ " msdb.dbo.backupset " & _
+ " LEFT OUTER JOIN sys.databases ON sys.databases.name = msdb.dbo.backupset.database_name " & _
+ "WHERE " & _
+ " UPPER(machine_name) = UPPER(CAST(SERVERPROPERTY(''Machinename'') AS VARCHAR)) " & _
+ "GROUP BY " & _
+ " type, " & _
+ " machine_name, " & _
+ " sys.databases.name " & _
+ "' " & _
+ "END " & _
+ "ELSE " & _
+ "BEGIN " & _
+ "SET @SQLCommand = ' " & _
+ "SELECT " & _
+ " CONVERT(VARCHAR, DATEADD(s, MAX(DATEDIFF(s, ''19700101'', b.backup_finish_date) - (CASE WHEN time_zone IS NOT NULL AND time_zone <> 127 THEN 60 * 15 * time_zone ELSE 0 END)), ''19700101''), 120) AS last_backup_date," & _
+ " b.type, " & _
+ " b.machine_name, " & _
+ " isnull(rep.is_primary_replica, 0) as is_primary_replica, " & _
+ " rep.is_local, " & _
+ " isnull(convert(varchar(40), rep.replica_id), '''') AS replica_id, " & _
+ " db.name AS database_name " & _
+ "FROM " & _
+ " msdb.dbo.backupset b " & _
+ " LEFT OUTER JOIN sys.databases db ON b.database_name = db.name " & _
+ " LEFT OUTER JOIN sys.dm_hadr_database_replica_states rep ON db.database_id = rep.database_id " & _
+ "WHERE " & _
+ " (rep.is_local is null or rep.is_local = 1) " & _
+ " AND (rep.is_primary_replica is null or rep.is_primary_replica = ''True'') " & _
+ " AND UPPER(machine_name) = UPPER(CAST(SERVERPROPERTY(''Machinename'') AS VARCHAR)) " & _
+ "GROUP BY " & _
+ " type, " & _
+ " rep.replica_id, " & _
+ " rep.is_primary_replica, " & _
+ " rep.is_local, " & _
+ " db.name, " & _
+ " b.machine_name, " & _
+ " rep.synchronization_state, " & _
+ " rep.synchronization_health " & _
+ "' " & _
+ "END " & _
+ "EXEC (@SQLCommand)"
Set databaseResponse = databaseSession.queryDatabase("master", sqlString)
Set found_db_backups = CreateObject("Scripting.Dictionary")
@@ -742,7 +780,7 @@ For Each instance_id In instances.Keys: Do ' Continue trick
backup_database = Trim(record("database_name"))
If dbNames.Exists(backup_database) Then
backup_database = Replace(backup_database, " ", "_")
- found_db_backups.add backup_database, ""
+ found_db_backups.add LCase(backup_database), ""
lastBackupDate = Trim(record("last_backup_date"))
backup_type = Trim(record("type"))
@@ -756,7 +794,7 @@ For Each instance_id In instances.Keys: Do ' Continue trick
If lastBackupDate <> "" and (replica_id = "" or is_primary_replica = "True") Then
addOutput("MSSQL_" & instance_id & "|" & backup_database & _
- "|" & Replace(lastBackupDate, " ", "|") & "|" & backup_type)
+ "|" & Replace(lastBackupDate, " ", "|") & "+00:00" & "|" & backup_type)
End If
End If
Next
@@ -771,7 +809,7 @@ For Each instance_id In instances.Keys: Do ' Continue trick
If databaseResponse.hasError Then
addOutput("MSSQL_" & instance_id & "|" & backup_database & "|-|-|-|" & databaseResponse.errorMessage)
End If
- If Not found_db_backups.Exists(backup_database) Then
+ If Not found_db_backups.Exists(LCase(backup_database)) Then
addOutput("MSSQL_" & instance_id & "|" & backup_database & "|-|-|-|no backup found")
End If
Next
diff --git a/agents/windows/plugins/netstat_an.bat b/agents/windows/plugins/netstat_an.bat
index 70657ab8448..396df7b595a 100644
--- a/agents/windows/plugins/netstat_an.bat
+++ b/agents/windows/plugins/netstat_an.bat
@@ -1,4 +1,4 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
echo ^<^<^^>^>
netstat -anp TCP & netstat -anp TCPv6 & netstat -anp UDP
diff --git a/agents/windows/plugins/nvidia_smi.ps1 b/agents/windows/plugins/nvidia_smi.ps1
index e6e1fa4f6c4..d0ae10aa9db 100644
--- a/agents/windows/plugins/nvidia_smi.ps1
+++ b/agents/windows/plugins/nvidia_smi.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
Write-Host "<<>>"
diff --git a/agents/windows/plugins/rds_licenses.vbs b/agents/windows/plugins/rds_licenses.vbs
index 0ce2f196267..b62ef5364ff 100644
--- a/agents/windows/plugins/rds_licenses.vbs
+++ b/agents/windows/plugins/rds_licenses.vbs
@@ -6,7 +6,7 @@
'----------------------------------------------------------------------------
' The entire argument block is currently not configurable via WATO
-Const CMK_VERSION = "2.3.0b1"
+Const CMK_VERSION = "2.4.0b1"
SET Args = WScript.Arguments
NameSpace = "root\cimv2"
ClassName = "Win32_TSIssuedLicense"
diff --git a/agents/windows/plugins/rstcli.bat b/agents/windows/plugins/rstcli.bat
index 3ff045fed15..8949bb81f75 100644
--- a/agents/windows/plugins/rstcli.bat
+++ b/agents/windows/plugins/rstcli.bat
@@ -1,5 +1,5 @@
@ECHO off & setlocal EnableDelayedExpansion
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
REM ***
REM * plugin to to monitor Intel RST raids
REM * customize StorCli path to your needs
diff --git a/agents/windows/plugins/sansymphony.ps1 b/agents/windows/plugins/sansymphony.ps1
index 5dda5e54244..c05603e43c0 100644
--- a/agents/windows/plugins/sansymphony.ps1
+++ b/agents/windows/plugins/sansymphony.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
# check_datacore.ps1
# Version 0.2
# Author : Andre Eckstein, Andre.Eckstein@Bechtle.com
diff --git a/agents/windows/plugins/storcli.bat b/agents/windows/plugins/storcli.bat
index ac79916846e..a0eec99c7d3 100644
--- a/agents/windows/plugins/storcli.bat
+++ b/agents/windows/plugins/storcli.bat
@@ -1,5 +1,5 @@
@ECHO off & setlocal EnableDelayedExpansion
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
REM ***
REM * plugin to to monitor RAID status via StorCLI utility
REM * customize StorCli path to your needs
diff --git a/agents/windows/plugins/tsm_checks.bat b/agents/windows/plugins/tsm_checks.bat
index 48e26333114..b23a1d8d896 100644
--- a/agents/windows/plugins/tsm_checks.bat
+++ b/agents/windows/plugins/tsm_checks.bat
@@ -1,5 +1,5 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
cd C:\Progra~1\Tivoli\TSM\baclient\
SET COMMAND=dsmadmc -dataonly=YES -id=admin -password=password -displaymode=table
diff --git a/agents/windows/plugins/veeam_backup_status.ps1 b/agents/windows/plugins/veeam_backup_status.ps1
index 0d7cd0dcf60..1f73d511eb6 100644
--- a/agents/windows/plugins/veeam_backup_status.ps1
+++ b/agents/windows/plugins/veeam_backup_status.ps1
@@ -1,5 +1,5 @@
param ([switch] $Debug)
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
## VEEAM Backups
## This powershell script needs to be run with the 64bit powershell
## and thus from a 64bit check_mk agent
diff --git a/agents/windows/plugins/win_dhcp_pools.bat b/agents/windows/plugins/win_dhcp_pools.bat
index 1440131636e..912f4174cec 100644
--- a/agents/windows/plugins/win_dhcp_pools.bat
+++ b/agents/windows/plugins/win_dhcp_pools.bat
@@ -1,4 +1,4 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
echo ^<^<^^>^>
netsh dhcp server show mibinfo | find /V ": dhcp." | find /v "Server may not function properly." | find /v "Unable to determine the DHCP Server version for the Server" | find /V "DHCP-Serverversion wurde" | find /V "nicht richtig funktionieren." | find /V ": dhcp server show mibinfo."
diff --git a/agents/windows/plugins/win_dmidecode.bat b/agents/windows/plugins/win_dmidecode.bat
index b854eef06ea..8d8a861efc6 100644
--- a/agents/windows/plugins/win_dmidecode.bat
+++ b/agents/windows/plugins/win_dmidecode.bat
@@ -1,5 +1,5 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
REM ***
REM * To be able to run this plugin you need to install dmidecode
diff --git a/agents/windows/plugins/win_license.bat b/agents/windows/plugins/win_license.bat
index c5303ab9b82..9a5b6ceb50d 100644
--- a/agents/windows/plugins/win_license.bat
+++ b/agents/windows/plugins/win_license.bat
@@ -1,5 +1,5 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
REM ***
REM * plugin to gather and output Windows activation status
REM ***
diff --git a/agents/windows/plugins/win_printers.ps1 b/agents/windows/plugins/win_printers.ps1
index 28741c85a52..8895680b680 100644
--- a/agents/windows/plugins/win_printers.ps1
+++ b/agents/windows/plugins/win_printers.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
#
# http://blogs.technet.com/b/heyscriptingguy/archive/2006/12/04/how-can-i-expand-the-width-of-the-windows-powershell-console.aspx
# Output is a 4 column table of (Name: str, Jobs: int, PrinterStatus: int, Detectederrorstate: int)
diff --git a/agents/windows/plugins/windows_broadcom_bonding.bat b/agents/windows/plugins/windows_broadcom_bonding.bat
index 491c6f320c8..e07f4c7724b 100644
--- a/agents/windows/plugins/windows_broadcom_bonding.bat
+++ b/agents/windows/plugins/windows_broadcom_bonding.bat
@@ -1,5 +1,5 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
echo ^<^<^^>^>
rem Tested with BroadCom BASP v1.6.3
diff --git a/agents/windows/plugins/windows_if.ps1 b/agents/windows/plugins/windows_if.ps1
index d41fb4cfba7..9d2dc130e68 100644
--- a/agents/windows/plugins/windows_if.ps1
+++ b/agents/windows/plugins/windows_if.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
## runs on windows 2012 or newer
## TeamName TeamingMode LoadBalancingAlgorithm MemberMACAddresses MemberNames MemberDescriptions Speed GUID
diff --git a/agents/windows/plugins/windows_intel_bonding.bat b/agents/windows/plugins/windows_intel_bonding.bat
index e06924c5144..73c04023a11 100644
--- a/agents/windows/plugins/windows_intel_bonding.bat
+++ b/agents/windows/plugins/windows_intel_bonding.bat
@@ -1,5 +1,5 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
echo ^<^<^^>^>
diff --git a/agents/windows/plugins/windows_multipath.vbs b/agents/windows/plugins/windows_multipath.vbs
index 55e8063619e..936331a0809 100644
--- a/agents/windows/plugins/windows_multipath.vbs
+++ b/agents/windows/plugins/windows_multipath.vbs
@@ -4,7 +4,7 @@
' Date: 2011-02-15
' -----------------------------------------------'
Option Explicit
-Const CMK_VERSION = "2.3.0b1"
+Const CMK_VERSION = "2.4.0b1"
Dim objWMIService, objItem, colItems, strComputer
' On Error Resume Next
diff --git a/agents/windows/plugins/windows_os_bonding.ps1 b/agents/windows/plugins/windows_os_bonding.ps1
index 2fcc805a11d..fd52e473b32 100644
--- a/agents/windows/plugins/windows_os_bonding.ps1
+++ b/agents/windows/plugins/windows_os_bonding.ps1
@@ -1,4 +1,4 @@
-$CMK_VERSION = "2.3.0b1"
+$CMK_VERSION = "2.4.0b1"
## Windows Bonding interfaces
## you need this agent plugin if you want to monitor bonding interfaces
## on windows configured on operating system level
diff --git a/agents/windows/plugins/windows_tasks.ps1 b/agents/windows/plugins/windows_tasks.ps1
index 2617e163d31..383bcd52bf9 100644
--- a/agents/windows/plugins/windows_tasks.ps1
+++ b/agents/windows/plugins/windows_tasks.ps1
@@ -1,44 +1,68 @@
-$CMK_VERSION = "2.3.0b1"
-####
-## Monitor Windows Tasks
-##
+$CMK_VERSION = "2.4.0b1"
Write-Host "<<>>"
-$lang = Get-UICulture | select -expand LCID
+$oldPreference = $ErrorActionPreference
+$ErrorActionPreference = "stop"
+try {
+ ## These PowerShell functions require Windows 8 or above.
+ $tasks = Get-ScheduledTask
+ foreach ($task in $tasks) {
+ $task_name = "$($task.TaskPath.ToString())$($task.TaskName.ToString())"
+ $task_info = $task | Get-ScheduledTaskInfo
+ if (!$task.TaskPath.StartsWith("\Microsoft")){
+ Write-Host "TaskName `t: "$task_name
+ Write-Host "Last Run Time `t: "$task_info.LastRunTime
+ Write-Host "Next Run Time `t: "$task_info.NextRunTime
+ Write-Host "Last Result `t: "$task_info.LastTaskResult
+ if ($task.'State' -eq 'Disabled'){
+ Write-Host "Scheduled Task State `t: Disabled"
+ } else {
+ Write-Host "Scheduled Task State `t: Enabled"
+ }
+ Write-Host ""
+ }
+ }
+} Catch {
+ ## Functionality related to Windows 7 or older. We keep this for compatibility. Do not update
+ ## this. We don't support these versions of Windows.
+ $lang = Get-UICulture | select -expand LCID
-## "..",".\n..","aaa"\n
-## We assume that correct newline can be placed only after "
-## Processing
-## \r\n -> 'Z_Z'
-## " -> 'o_o'
-## o_oZ_Z -> '\"\r\n' # the only valid new line symbols
-## o_o -> '"'
-## Z_Z -> ''
+ ## "..",".\n..","aaa"\n
+ ## We assume that correct newline can be placed only after "
+ ## Processing
+ ## \r\n -> 'Z_Z'
+ ## " -> 'o_o'
+ ## o_oZ_Z -> '\"\r\n' # the only valid new line symbols
+ ## o_o -> '"'
+ ## Z_Z -> ''
-## encoding "\n and "
-$raw = (schtasks /query /fo csv -v | out-string) -replace '\r\n', 'Z_Z'
-$l = $raw -replace '\"', 'o_o'
-## decoding
-$d = $l -replace 'o_oZ_Z', "`"`r`n"
-$d = $d -replace 'o_o', '"'
-$d = $d -replace 'Z_Z', ''
-$tasks = $d | ConvertFrom-Csv
+ ## encoding "\n and "
+ $raw = (schtasks /query /fo csv -v | out-string) -replace '\r\n', 'Z_Z'
+ $l = $raw -replace '\"', 'o_o'
+ ## decoding
+ $d = $l -replace 'o_oZ_Z', "`"`r`n"
+ $d = $d -replace 'o_o', '"'
+ $d = $d -replace 'Z_Z', ''
+ $tasks = $d | ConvertFrom-Csv
-if ($lang -eq 1031){
- foreach ($task in $tasks){
- if (($task.HostName -match "^$($Env:Computername)$") -and ($task.AufgabenName -notlike '\Microsoft\*')){
- Write-Host "TaskName `t: "$task.AufgabenName
- Write-Host "Last Run Time `t: "$task.'Letzte Laufzeit'
- Write-Host "Next Run Time `t: "$task.'Nächste Laufzeit'
- Write-Host "Last Result `t: "$task.'Letztes Ergebnis'
- if ($task.'Status der geplanten Aufgabe' -eq 'Aktiviert'){
- Write-Host "Scheduled Task State `t: Enabled"
- } else {
- Write-Host "Scheduled Task State `t: "$task.'Status der geplanten Aufgabe'
- }
- Write-Host ""
- }
- }
-} elseif ($lang -eq 1033 -or $lang -eq 2057){
- $tasks | ? {$_.HostName -match "^$($Env:Computername)$" -and $_.TaskName -notlike '\Microsoft\*'} | fl taskname,"last run time","next run time","last result","scheduled task state" | out-string -width 4096
+ if ($lang -eq 1031){
+ foreach ($task in $tasks){
+ if (($task.HostName -match "^$($Env:Computername)$") -and ($task.AufgabenName -notlike '\Microsoft\*')){
+ Write-Host "TaskName `t: "$task.AufgabenName
+ Write-Host "Last Run Time `t: "$task.'Letzte Laufzeit'
+ Write-Host "Next Run Time `t: "$task.'Nächste Laufzeit'
+ Write-Host "Last Result `t: "$task.'Letztes Ergebnis'
+ if ($task.'Status der geplanten Aufgabe' -eq 'Aktiviert'){
+ Write-Host "Scheduled Task State `t: Enabled"
+ } else {
+ Write-Host "Scheduled Task State `t: "$task.'Status der geplanten Aufgabe'
+ }
+ Write-Host ""
+ }
+ }
+ } elseif ($lang -eq 1033 -or $lang -eq 2057){
+ $tasks | ? {$_.HostName -match "^$($Env:Computername)$" -and $_.TaskName -notlike '\Microsoft\*'} | fl taskname,"last run time","next run time","last result","scheduled task state" | out-string -width 4096
+ }
+} Finally {
+ $ErrorActionPreference=$oldPreference
}
diff --git a/agents/windows/plugins/windows_updates.vbs b/agents/windows/plugins/windows_updates.vbs
index 7f54c58ab09..4858e42ad4a 100644
--- a/agents/windows/plugins/windows_updates.vbs
+++ b/agents/windows/plugins/windows_updates.vbs
@@ -21,7 +21,7 @@
' -----------------------------------------------------------------------------------------
Option Explicit
-Const CMK_VERSION = "2.3.0b1"
+Const CMK_VERSION = "2.4.0b1"
Dim fso
Dim objStdout
diff --git a/agents/windows/plugins/wmic_if.bat b/agents/windows/plugins/wmic_if.bat
index 95edd3233f3..35357fb8541 100644
--- a/agents/windows/plugins/wmic_if.bat
+++ b/agents/windows/plugins/wmic_if.bat
@@ -1,4 +1,4 @@
@echo off
-set CMK_VERSION="2.3.0b1"
+set CMK_VERSION="2.4.0b1"
echo ^<^<^^>^>
wmic path Win32_NetworkAdapter get speed,macaddress,name,netconnectionid,netconnectionstatus /format:csv
diff --git a/agents/wnx/.vscode/settings.json b/agents/wnx/.vscode/settings.json
new file mode 100644
index 00000000000..7a73a41bfdf
--- /dev/null
+++ b/agents/wnx/.vscode/settings.json
@@ -0,0 +1,2 @@
+{
+}
\ No newline at end of file
diff --git a/agents/wnx/Makefile b/agents/wnx/Makefile
index ab7225ce566..17c6c5b7f3b 100644
--- a/agents/wnx/Makefile
+++ b/agents/wnx/Makefile
@@ -25,7 +25,7 @@ ASIO_VERSION := asio-1.24.0-patched
PACKAGE_FMT := $(TOP_SRC_DIR)\third_party\fmt
# ATTENTION: this value is hardcoded in the vcxproj files - fix it, please, too
-FMT_VERSION := fmt-9.0.0
+FMT_VERSION := fmt-10.2.1
PACKAGE_OHM := $(TOP_SRC_DIR)\third_party\openhardwaremonitor
# ATTENTION: this value is hardcoded in the vcxproj files - fix it, please, too
diff --git a/agents/wnx/agent_keeper/Cargo.lock b/agents/wnx/agent_keeper/Cargo.lock
index 811c6c8998f..c0fbdfa2deb 100644
--- a/agents/wnx/agent_keeper/Cargo.lock
+++ b/agents/wnx/agent_keeper/Cargo.lock
@@ -574,6 +574,12 @@ dependencies = [
"cfg-if",
]
+[[package]]
+name = "equivalent"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+
[[package]]
name = "errno"
version = "0.3.1"
@@ -796,9 +802,9 @@ dependencies = [
[[package]]
name = "h2"
-version = "0.3.21"
+version = "0.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833"
+checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9"
dependencies = [
"bytes",
"fnv",
@@ -815,9 +821,9 @@ dependencies = [
[[package]]
name = "hashbrown"
-version = "0.12.3"
+version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
[[package]]
name = "heck"
@@ -957,11 +963,11 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "1.9.3"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
dependencies = [
- "autocfg",
+ "equivalent",
"hashbrown",
]
@@ -1044,9 +1050,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.144"
+version = "0.2.153"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1"
+checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]]
name = "linux-raw-sys"
@@ -1139,9 +1145,9 @@ dependencies = [
[[package]]
name = "mio"
-version = "0.8.8"
+version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
+checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
dependencies = [
"libc",
"wasi",
@@ -1254,9 +1260,9 @@ checksum = "9670a07f94779e00908f3e686eab508878ebb390ba6e604d3a284c00e8d0487b"
[[package]]
name = "openssl"
-version = "0.10.57"
+version = "0.10.62"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c"
+checksum = "8cde4d2d9200ad5909f8dac647e29482e07c3a35de8a13fce7c9c7747ad9f671"
dependencies = [
"bitflags 2.4.0",
"cfg-if",
@@ -1286,18 +1292,18 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-src"
-version = "111.27.0+1.1.1v"
+version = "300.2.1+3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "06e8f197c82d7511c5b014030c9b1efeda40d7d5f99d23b4ceed3524a5e63f02"
+checksum = "3fe476c29791a5ca0d1273c697e96085bbabbbea2ef7afd5617e78a4b40332d3"
dependencies = [
"cc",
]
[[package]]
name = "openssl-sys"
-version = "0.9.92"
+version = "0.9.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db7e971c2c2bba161b2d2fdf37080177eff520b3bc044787c7f1f5f9e78d869b"
+checksum = "c1665caf8ab2dc9aef43d1c0023bd904633a6a05cb30b0ad59bec2ae986e57a7"
dependencies = [
"cc",
"libc",
@@ -1546,9 +1552,9 @@ dependencies = [
[[package]]
name = "rustix"
-version = "0.37.19"
+version = "0.37.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d"
+checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2"
dependencies = [
"bitflags 1.3.2",
"errno",
diff --git a/agents/wnx/agent_keeper/Cargo.toml b/agents/wnx/agent_keeper/Cargo.toml
index cbf5b9f5873..051029ae30d 100644
--- a/agents/wnx/agent_keeper/Cargo.toml
+++ b/agents/wnx/agent_keeper/Cargo.toml
@@ -14,8 +14,8 @@ cmk-agent-ctl = { path = "../../../packages/cmk-agent-ctl" }
[dependencies.windows-sys]
version = "0.48"
features = [
- "Win32_Foundation",
- "Win32_Security",
- "Win32_System_Threading",
- "Win32_UI_WindowsAndMessaging",
+ "Win32_Foundation",
+ "Win32_Security",
+ "Win32_System_Threading",
+ "Win32_UI_WindowsAndMessaging",
]
diff --git a/agents/wnx/agent_keeper/run.cmd b/agents/wnx/agent_keeper/run.cmd
index c15fe4ce6b0..2cb524506d5 100644
--- a/agents/wnx/agent_keeper/run.cmd
+++ b/agents/wnx/agent_keeper/run.cmd
@@ -1,95 +1,2 @@
-@echo off
-::
-:: Script to build/test/sign/deploy
-::
-:: Prefix worker, standart for NON-COMPOSITE run.cmd
-::
-:: Similar in functionality with run scripts from packages
-::
-
-SETLOCAL EnableDelayedExpansion
-
-if "%*" == "" (
-echo: Run default...
-set worker_arg_all=1
-) else (
-:CheckOpts
-if "%~1"=="-h" goto Usage
-if "%~1"=="-help" goto Usage
-if "%~1"=="-?" goto Usage
-
-if not "%worker_var_value%" == "" (
-set %worker_var_name%=%worker_var_value%
-set worker_var_value=
-)
-
-if "%~1"=="-A" (set worker_arg_all=1) & shift & goto CheckOpts
-if "%~1"=="--all" (set worker_arg_all=1) & shift & goto CheckOpts
-
-if "%~1"=="-c" (set worker_arg_clean=1) & shift & goto CheckOpts
-if "%~1"=="--clean" (set worker_arg_clean=1) & shift & goto CheckOpts
-
-if "%~1"=="-f" (set worker_arg_format=1) & shift & goto CheckOpts
-if "%~1"=="--format" (set worker_arg_format=1) & shift & goto CheckOpts
-
-if "%~1"=="-F" (set worker_arg_check_format=1) & shift & goto CheckOpts
-if "%~1"=="--check-format" (set worker_arg_check_format=1) & shift & goto CheckOpts
-
-if "%~1"=="-C" (set worker_arg_clippy=1) & shift & goto CheckOpts
-if "%~1"=="--clippy" (set worker_arg_clippy=1) & shift & goto CheckOpts
-
-if "%~1"=="-B" (set worker_arg_build=1) & shift & goto CheckOpts
-if "%~1"=="--build" (set worker_arg_build=1) & shift & goto CheckOpts
-
-if "%~1"=="-T" (set worker_arg_test=1) & shift & goto CheckOpts
-if "%~1"=="--test" (set worker_arg_test=1) & shift & goto CheckOpts
-
-if "%~1"=="-D" (set worker_arg_doc=1) & shift & goto CheckOpts
-if "%~1"=="--documentation" (set worker_arg_doc=1) & shift & goto CheckOpts
-
-if "%~1"=="--var" (set worker_var_name=%~2) & (set worker_var_value=%~3) & shift & shift & shift & goto CheckOpts
-
-if "%~1"=="--sign" (set worker_arg_sign_file=%~2) & (set worker_arg_sign_secret=%~3) & (set worker_arg_sign=1) & shift & shift & shift goto CheckOpts
-)
-if "%worker_arg_all%"=="1" (set worker_arg_clippy=1) & (set worker_arg_build=1) & (set worker_arg_test=1) & (set worker_arg_check_format=1)
-
-:: Configure environment variables
-set worker_cur_dir=%cd%
-call setup_config.cmd
-if ERRORLEVEL 1 powershell Write-Host "Failed to configure" -Foreground Red && exit /b 99
-set worker_arte=%worker_root_dir%\artefacts
-mkdir %worker_arte% 2> nul
-
-:: Setup shortcut call for CI(to make names shorter than 255 chars)
-set ci_root_dir=workdir\workspace\checkmk\master
-set ci_junction_to_root_dir=y
-set script_to_run=.\scripts\cargo_build_core.cmd
-powershell -ExecutionPolicy ByPass -File %worker_root_dir%/scripts/windows/shorten_dir_and_call.ps1 %ci_root_dir% %ci_junction_to_root_dir% %script_to_run%
-GOTO :EOF
-
-
-:Usage
-echo.
-echo.Usage:
-echo.
-echo.%~nx0 [arguments]
-echo.
-echo.Available arguments:
-echo. -?, -h, --help display help and exit
-echo. -A, --all shortcut to -F -C -B -T: check format, clippy, build and test
-echo. -c, --clean clean artifacts
-echo. -C, --clippy run clippy for %worker_name%
-echo. -D, --documentation create documentation
-echo. -f, --format format sources
-echo. -F, --check-format check for correct formatting
-echo. -B, --build build %worker_name%
-echo. -T, --test test %worker_name%
-echo. --sign file secret sign %worker_name% with file and secret
-echo.
-echo.Examples:
-echo.
-echo %~nx0 --clippy
-echo %~nx0 --build --test
-echo %~nx0 --build --test -S mypasswd -C
-echo %~nx0 -A
-GOTO :EOF
+@echo "This is a temporary wrapper script for run.ps1."
+@pwsh ./run.ps1 %*
diff --git a/agents/wnx/agent_keeper/run.ps1 b/agents/wnx/agent_keeper/run.ps1
new file mode 100644
index 00000000000..30bb56b856d
--- /dev/null
+++ b/agents/wnx/agent_keeper/run.ps1
@@ -0,0 +1,274 @@
+#!/usr/bin/env pwsh
+# Copyright (C) 2023 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+# This is adaptation of our standard run script to Windows reality
+# Most noticeble change are artifacts upload and path shortening
+
+# CI uses normally path d:\workspace\checkmk\master\checkout as a root to repo
+# we add link d:\y to d:\workspace\checkmk\master\
+# and as sctipt to use path d:\y\checkout
+# The reason is inability of Windows to handle very long paths especially when
+# we have to build OpenSSL for Rust
+
+
+Write-Host "run script starts" -ForegroundColor Gray
+
+if ((get-host).version.major -lt 7) {
+ Write-Host "PowerShell version 7 or higher is required." -ForegroundColor Red
+ exit
+}
+
+$package_name = Split-Path -Path (Get-Location) -Leaf
+
+$exe_name = "$package_name.exe"
+$work_dir = "$pwd"
+#set target=x86_64-pc-windows-mscvc # 64 bit not used now
+$cargo_target = "i686-pc-windows-msvc"
+$exe_dir = "target/$cargo_target/release"
+
+$packBuild = $false
+$packClippy = $false
+$packFormat = $false
+$packCheckFormat = $false
+$packTest = $false
+$packDoc = $false
+
+# repo/branch specific short path
+# TODO(sk): move it to CI upon confirmation that screen works as intended
+$shortenPath = "workdir\workspace\checkmk\master"
+$shortenLink = "y"
+
+if ("$env:arg_var_value" -ne "") {
+ $env:arg_val_name = $env:arg_var_value
+}
+else {
+ $env:arg_val_name = ""
+}
+
+function Write-Help() {
+ $x = Get-Item $PSCommandPath
+ $x.BaseName
+ $name = "pwsh -File " + $x.BaseName + ".ps1"
+
+ Write-Host "Usage:"
+ Write-Host ""
+ Write-Host "$name [arguments]"
+ Write-Host ""
+ Write-Host "Available arguments:"
+ Write-Host " -?, -h, --help display help and exit"
+ Write-Host " -A, --all shortcut to -B -C -T -F: build, cluippy, test, check format"
+ Write-Host " --clean clean"
+ Write-Host " -C, --clippy run $package_name clippy"
+ Write-Host " -D, --documentation create $package_name documentation"
+ Write-Host " -f, --format format $package_name sources"
+ Write-Host " -F, --check-format check for $package_name correct formatting"
+ Write-Host " -B, --build build binary $package_name"
+ Write-Host " -T, --test run $package_name unit tests"
+ Write-Host " --shorten link path change dir from current using link"
+ Write-Host ""
+ Write-Host "Examples:"
+ Write-Host ""
+ Write-Host "$name --clippy"
+ Write-Host "$name --build --test"
+ Write-Host "$name --shorten y workspace\checkout"
+}
+
+
+if ($args.Length -eq 0) {
+ Write-Host "No arguments provided. Running with default flags." -ForegroundColor Yellow
+ $packAll = $true
+}
+else {
+ for ($i = 0; $i -lt $args.Length; $i++) {
+ switch ($args[$i]) {
+ { $("-?", "-h", "--help") -contains "$_" } { Write-Help; return }
+ { $("-A", "--all") -contains $_ } { $packAll = $true }
+ { $("-f", "--format") -contains $_ } { $packFormat = $true }
+ { $("-F", "--check-format") -contains $_ } { $packCheckFormat = $true }
+ { $("-B", "--build") -contains $_ } { $packBuild = $true }
+ { $("-C", "--clippy") -contains $_ } { $packClippy = $true }
+ { $("-T", "--test") -contains $_ } { $packTest = $true }
+ { $("-D", "--documentation") -contains $_ } { $packDoc = $true }
+ "--clean" { $packClean = $true }
+ "--var" {
+ [Environment]::SetEnvironmentVariable($args[++$i], $args[++$i])
+ }
+ "--shorten" {
+ $shortenLink = $args[++$i]
+ $shortenPath = $args[++$i]
+ }
+ }
+ }
+}
+
+
+if ($packAll) {
+ $packBuild = $true
+ $packClippy = $true
+ $packCheckFormat = $true
+ $packTest = $true
+}
+
+
+function Start-ShortenPath($tgt_link, $path) {
+ if ($tgt_link -eq "" -and $path -eq "") {
+ Write-Host "No path shortening $tgt_link $path" -ForegroundColor Yellow
+ return
+ }
+
+ [string]$inp = Get-Location
+ [string]$new = $inp.tolower().replace($path, $tgt_link)
+ if ($new.tolower() -eq $inp) {
+ Write-Host "Can't shorten path $inp doesn't contain $path" -ForegroundColor Yellow
+ return
+ }
+ Write-Host "propose to shorten to: $new ($path, $tgt_link)"
+ try {
+ Set-Location $new -ErrorAction Stop
+ Write-Host "current dir $pwd" -ForegroundColor White
+ }
+ catch {
+ Write-Host "Failed to shorten path, $new doesn't exist" -ForegroundColor Yellow
+ }
+}
+
+
+function Invoke-Cargo($cmd) {
+ Write-Host "$cmd $package_name" -ForegroundColor White
+ & cargo $cmd
+
+ if ($lastexitcode -ne 0) {
+ Write-Error "Failed to $cmd $package_name with code $lastexitcode" -ErrorAction Stop
+ }
+}
+
+function Invoke-Cargo($cmd) {
+ Write-Host "$cmd $package_name" -ForegroundColor White
+ & cargo $cmd
+
+ if ($lastexitcode -ne 0) {
+ Write-Error "Failed to $cmd $package_name with code $lastexitcode" -ErrorAction Stop
+ }
+}
+
+function Test-Administrator {
+ [OutputType([bool])]
+ param()
+ process {
+ [Security.Principal.WindowsPrincipal]$user = [Security.Principal.WindowsIdentity]::GetCurrent();
+ return $user.IsInRole([Security.Principal.WindowsBuiltinRole]::Administrator);
+ }
+}
+
+function Update-Dirs() {
+ $root_dir = "$pwd"
+ While (!(Test-Path "$root_dir/.werks" -ErrorAction SilentlyContinue)) {
+ $root_dir = Split-Path -Parent $root_dir -ErrorAction Stop
+ if ($root_dir -eq "") {
+ Write-Error "Not found repo root" -ErrorAction Stop
+ }
+ }
+ $global:root_dir = $root_dir
+ Write-Host "Found root dir: '$global:root_dir'" -ForegroundColor White
+
+ $arte_dir = "$root_dir/artefacts"
+ If (!(Test-Path -PathType container $arte_dir)) {
+ Remove-Item $arte_dir -ErrorAction SilentlyContinue # we may have find strange files from bad scripts
+ Write-Host "Creating arte dir: '$arte_dir'" -ForegroundColor White
+ New-Item -ItemType Directory -Path $arte_dir -ErrorAction Stop > nul
+ }
+ $global:arte_dir = "$arte_dir"
+ Write-Host "Using arte dir: '$global:arte_dir'" -ForegroundColor White
+}
+
+$result = 1
+try {
+ $mainStartTime = Get-Date
+
+ & cargo --version > nul
+ if ($lastexitcode -ne 0) {
+ Write-Error "Cargo not found, please install it and/or add to PATH" -ErrorAction Stop
+ }
+ &rustup update
+ &rustup target add $cargo_target
+ & rustc -V
+ & cargo -V
+
+ # Disable assert()s in C/C++ parts (e.g. wepoll-ffi), they map to _assert()/_wassert(),
+ # which is not provided by libucrt. The latter is needed for static linking.
+ # https://github.com/rust-lang/cc-rs#external-configuration-via-environment-variables
+ $env:CFLAGS = "-DNDEBUG"
+
+ # shorten path
+ Start-ShortenPath "$shortenLink" "$shortenPath"
+ Update-Dirs
+
+ if ($packClean) {
+ Invoke-Cargo "clean"
+ }
+ if ($packBuild) {
+ $cwd = Get-Location
+ $target_dir = Join-Path -Path "$cwd" -ChildPath "target/$cargo_target"
+ Write-Host "Killing processes in $target_dir" -ForegroundColor White
+ Get-Process | Where-Object { $_.path -and ($_.path -like "$target_dir\*") } | Stop-Process -Force
+ &cargo build --release --target $cargo_target
+ if ($lastexitcode -ne 0) {
+ Write-Error "Failed to build $package_name with code $lastexitcode" -ErrorAction Stop
+ }
+ }
+ if ($packClippy) {
+ &cargo clippy --release --target $cargo_target --tests -- --deny warnings
+ if ($lastexitcode -ne 0) {
+ Write-Error "Failed to clippy $package_name with code $lastexitcode" -ErrorAction Stop
+ }
+ }
+
+ if ($packFormat) {
+ Invoke-Cargo "fmt"
+ }
+
+ if ($packCheckFormat) {
+ Write-Host "test format $package_name" -ForegroundColor White
+ cargo fmt -- --check
+ if ($lastexitcode -ne 0) {
+ Write-Error "Failed to test format $package_name" -ErrorAction Stop
+ }
+ }
+ if ($packTest) {
+ if (-not (Test-Administrator)) {
+ Write-Error "Testing must be executed as Administrator." -ErrorAction Stop
+ }
+ cargo test --release --target $cargo_target -- --test-threads=4
+ if ($lastexitcode -ne 0) {
+ Write-Error "Failed to test $package_name" -ErrorAction Stop
+ }
+ }
+ if ($packBuild -and $packTest -and $packClippy) {
+ Write-Host "Uploading artifacts: [ $exe_dir/$exe_name -> $arte_dir/$exe_name ] ..." -Foreground White
+ Copy-Item $exe_dir/$exe_name $arte_dir/$exe_name -Force -ErrorAction Stop
+ }
+ if ($packDoc) {
+ Invoke-Cargo "doc"
+ }
+
+ Write-Host "SUCCESS" -ForegroundColor Green
+ $result = 0
+}
+catch {
+ Write-Host "Error: " $_ -ForegroundColor Red
+ Write-Host "Trace stack: " -ForegroundColor Yellow
+ Write-Host $_.ScriptStackTrace -ForegroundColor Yellow
+}
+finally {
+ Write-Host "Restore path to $work_dir" -ForegroundColor White
+ Set-Location $work_dir
+ $endTime = Get-Date
+ $elapsedTime = $endTime - $mainStartTime
+ Write-Host "Elapsed time: $($elapsedTime.Hours):$($elapsedTime.Minutes):$($elapsedTime.Seconds)"
+ Write-Host "run script starts" -ForegroundColor Gray
+}
+
+
+exit $result
\ No newline at end of file
diff --git a/agents/wnx/agent_keeper/rust-toolchain.toml b/agents/wnx/agent_keeper/rust-toolchain.toml
index a24bdd29944..e151a4648e4 100644
--- a/agents/wnx/agent_keeper/rust-toolchain.toml
+++ b/agents/wnx/agent_keeper/rust-toolchain.toml
@@ -1,5 +1,5 @@
[toolchain]
-channel = "1.72" # must be in sync with install-rust-cargo.sh
+channel = "1.75" # must be in sync with install-rust-cargo.sh
[toolchain.windows]
targets = ["i686-pc-windows-msvc"]
diff --git a/agents/wnx/agent_keeper/scripts/cargo_build_core.cmd b/agents/wnx/agent_keeper/scripts/cargo_build_core.cmd
deleted file mode 100644
index 8ab59241868..00000000000
--- a/agents/wnx/agent_keeper/scripts/cargo_build_core.cmd
+++ /dev/null
@@ -1,159 +0,0 @@
-@echo off
-:: Script to Build Rust executable and sign it
-::
-:: Sign mode:
-:: cargo_build_core file password
-:: file is located in c:\common\store and must be well protected from access
-:: The password is delivered by jenkins(in a turn from our password store)
-:: In future we could download file too(from the password store), but this will
-:: not change the requirement to protect the file from free access.
-::
-:: Standard Mode:
-:: cargo_build_core
-::
-
-SETLOCAL EnableDelayedExpansion
-
-set RUST_BACKTRACE=1
-
-if "%worker_arte%" == "" powershell Write-Host "worker_arte is not defined" -Foreground Red && exit /b 79
-if "%worker_cur_dir%" == "" powershell Write-Host "worker_cur_dir is not defined" -Foreground Red && exit /b 79
-if "%worker_root_dir%" == "" powershell Write-Host "worker_root_dir is not defined" -Foreground Red && exit /b 79
-if "%worker_exe_name%" == "" powershell Write-Host "worker_exe_name is not defined" -Foreground Red && exit /b 79
-if "%worker_target%" == "" powershell Write-Host "worker_target is not defined" -Foreground Red && exit /b 79
-
-
-:: Jenkins calls windows scripts in a quite strange manner, better to check is cargo available
-where cargo > nul
-if not %errorlevel% == 0 powershell Write-Host "Cargo not found, please install it and/or add to PATH" -Foreground Red && exit /b 7
-rustup update
-rustup target add %worker_target%
-
-:: 64-bit
-::set target=x86_64-pc-windows-mscvc
-:: 32-bit
-set exe=target\%worker_target%\release\%worker_exe_name%
-@echo RUST versions:
-cargo -V
-rustc -V
-echo Settings:
-echo worker_arg_build=%worker_arg_build%
-echo worker_arg_clippy=%worker_arg_clippy%
-echo worker_arg_test=%worker_arg_test%
-echo worker_target=%worker_target%
-
-:: Disable assert()s in C/C++ parts (e.g. wepoll-ffi), they map to _assert()/_wassert(),
-:: which is not provided by libucrt. The latter is needed for static linking.
-:: https://github.com/rust-lang/cc-rs#external-configuration-via-environment-variables
-set CFLAGS=-DNDEBUG
-
-:: Clean
-if "%worker_arg_clean%" == "1" (
- powershell Write-Host "Run Rust clean" -Foreground White
- cargo clean
-)
-
-:: Check Format
-if "%worker_arg_check_format%" == "1" (
- powershell Write-Host "Run Rust check format" -Foreground White
- cargo fmt -- --check
-)
-
-:: Format
-if "%worker_arg_format%" == "1" (
- powershell Write-Host "Run Rust format" -Foreground White
- cargo fmt
-)
-
-:: Clippy
-if "%worker_arg_clippy%" == "1" (
- powershell Write-Host "Run Rust clippy" -Foreground White
- cargo clippy --release --target %worker_target% --tests -- --deny warnings
- if ERRORLEVEL 1 (
- powershell Write-Host "Failed cargo clippy" -Foreground Red
- exit /b 17
- ) else (
- powershell Write-Host "Checking Rust SUCCESS" -Foreground Green
- )
-) else (
- powershell Write-Host "Skip Rust clippy" -Foreground Yellow
-)
-
-:: Build
-if "%worker_arg_build%" == "1" (
- rem On windows we want to kill exe before starting rebuild.
- rem Use case CI starts testing, for some reasoms process hangs up longer as expected thus
- rem rebuild/retest will be not possible: we get strange/inconsistent results.
- call %worker_root_dir%\scripts\windows\kill_processes_in_targets.cmd %worker_target%\release || echo: ok...
- del /Q %worker_arte%\%worker_exe_name% 2> nul
-
- powershell Write-Host "Building Rust executables" -Foreground White
- cargo build --release --target %worker_target% 2>&1
- if ERRORLEVEL 1 (
- powershell Write-Host "Failed cargo build" -Foreground Red
- exit /b 18
- ) else (
- powershell Write-Host "Building Rust SUCCESS" -Foreground Green
- )
-) else (
- powershell Write-Host "Skip Rust build" -Foreground Yellow
-)
-
-:: Test
-if "%worker_arg_test%" == "1" (
-rem Validate elevation, because full testing is possible only in elevated mode!
-
- if "%worker_need_elevation%" == "1" (
- net session > nul 2>&1
- IF ERRORLEVEL 1 (
- echo You must be elevated. Exiting...
- exit /B 21
- )
- )
- powershell Write-Host "Testing Rust executables" -Foreground White
- cargo test --release --target %worker_target% -- --test-threads=4 2>&1
- if ERRORLEVEL 1 (
- powershell Write-Host "Failed cargo test" -Foreground Red
- exit /b 19
- ) else (
- powershell Write-Host "Testing Rust SUCCESS" -Foreground Green
- )
-) else (
- powershell Write-Host "Skip Rust test" -Foreground Yellow
-)
-
-:: [optional] Signing
-if "%worker_arg_sign%" == "1" (
- powershell Write-Host "Signing Rust executables" -Foreground White
- @call %worker_root_dir%\agents\wnx\sign_windows_exe c:\common\store\%worker_arg_sign_file% %worker_arg_sign_secret% %exe%
- if ERRORLEVEL 1 (
- powershell Write-Host "Failed signing %exe%" -Foreground Red
- exit /b 20
- )
-) else (
- powershell Write-Host "Skip Rust sign" -Foreground Yellow
-)
-
-:: 5. Storing artifacts
-if "%worker_arg_build%" == "1" (
- powershell Write-Host "Uploading artifacts: [ %exe% ] ..." -Foreground White
- copy %exe% %worker_arte%\%worker_exe_name%
- if ERRORLEVEL 1 (
- powershell Write-Host "Failed to copy %exe%" -Foreground Red
- exit /b 22
- ) else (
- powershell Write-Host "Done." -Foreground Green
- )
-) else (
- powershell Write-Host "Skip Rust upload" -Foreground Yellow
-)
-
-:: Documentation
-if "%worker_arg_doc%" == "1" (
- powershell Write-Host "Creating documentation" -Foreground White
- cargo doc
-) else (
- powershell Write-Host "Skip creating documentation" -Foreground Yellow
-)
-
-
diff --git a/agents/wnx/agent_keeper/scripts/kill_all_processes_in_folder.cmd b/agents/wnx/agent_keeper/scripts/kill_all_processes_in_folder.cmd
deleted file mode 100644
index df55e96489f..00000000000
--- a/agents/wnx/agent_keeper/scripts/kill_all_processes_in_folder.cmd
+++ /dev/null
@@ -1,10 +0,0 @@
-@echo off
-if "%1" == "" (
-echo "First parameter should represent target" -Foreground Red
-exit /b 1
-)
-echo "Target is %1"
-dir
-pushd target\%1 || echo "failed to change dir to target\%1 - nothing to do" && exit /b 0
-powershell -ExecutionPolicy ByPass -File ..\..\..\..\..\..\scripts\windows\kill_all_processes_in_folder.ps1 || echo "failed to find kill script" && exit 1
-popd
\ No newline at end of file
diff --git a/agents/wnx/agent_keeper/scripts/kill_processes_in_targets.cmd b/agents/wnx/agent_keeper/scripts/kill_processes_in_targets.cmd
deleted file mode 100644
index 12f4c8c63f2..00000000000
--- a/agents/wnx/agent_keeper/scripts/kill_processes_in_targets.cmd
+++ /dev/null
@@ -1,8 +0,0 @@
-@echo off
-if "%1" == "" (
-echo "First parameter should represent target" -Foreground Red
-exit /b 1
-)
-pushd target\%1 || echo "failed to change dir to target\%1 - nothing to do" && exit /b 0
-powershell -ExecutionPolicy ByPass -File ..\..\..\..\..\..\scripts\windows\kill_all_processes_in_folder.ps1 || echo "failed to find kill script" && exit 1
-popd
\ No newline at end of file
diff --git a/agents/wnx/agent_keeper/scripts/shorten_dir_and_call.ps1 b/agents/wnx/agent_keeper/scripts/shorten_dir_and_call.ps1
deleted file mode 100644
index f37efd92f56..00000000000
--- a/agents/wnx/agent_keeper/scripts/shorten_dir_and_call.ps1
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-# Simple wrapper to change dir for short one and call some script
-# Required for CI with lobg paths
-#
-#
-# Script to Call Build Rust executable using junction(if available)
-#
-# shorten_dit_and_call.ps1 p1 p2 p3
-#
-# Where:
-# Arg What Example from CI
-# p1 = from\path workdir\workspace
-# p2 = to\path x
-# p3 = the script .\scripts\cargo_build_core.cmd
-#
-# We assume that CI is building in the workdir\workspavce and there is a junction x
-# This situation is applicable only for Checkmk CI infrastructure
-
-[string]$inp = Get-Location
-[string]$new = $inp.tolower().replace($args[0], $args[1])
-Write-Host "shorten dir: " $new
-Set-Location $new
-&$args[2]
-[Environment]::Exit($LASTEXITCODE)
\ No newline at end of file
diff --git a/agents/wnx/build_ohm.cmd b/agents/wnx/build_ohm.cmd
deleted file mode 100644
index e5b920e1f2e..00000000000
--- a/agents/wnx/build_ohm.cmd
+++ /dev/null
@@ -1,23 +0,0 @@
-:: Copyright (C) 2022 Checkmk GmbH - License: GNU General Public License v2
-:: This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-:: conditions defined in the file COPYING, which is part of this source code package.
-
-:: Script to build Open Hardware Monitor
-::
-@echo off
-if "%msbuild_exe%" == "" set msbuild_exe=C:\Program Files\Microsoft Visual Studio\2022\Professional\MSBuild\Current\Bin\msbuild.exe
-if not exist "%msbuild_exe%" powershell Write-Host "Install Visual Studio 2022, please" -Foreground Red && exit /b 8
-
-make install_extlibs
-
-set cur_dir=%cd%
-set arte=%cur_dir%\..\..\artefacts
-set all_dir=%cur_dir%\build\ohm\
-
-powershell Write-Host "Building OHM using %msbuild_exe%" -Foreground White
-"%msbuild_exe%" .\ohm\ohm.sln /p:OutDir=%all_dir%;TargetFrameworkVersion=v4.6;Configuration=Release
-if not %errorlevel% == 0 powershell Write-Host "Failed Build" -Foreground Red && exit /b 14
-:: copy %build_dir%\ohm_host.exe %arte%\ /y || powershell Write-Host "Failed to copy ohm_host.exe" -Foreground Red && exit /b 33
-:: copy %build_dir%\ohm_bridge.dll %arte%\ /Y || powershell Write-Host "Failed to copy ohm_host.exe" -Foreground Red && exit /b 35
-copy %all_dir%\OpenHardwareMonitorLib.dll %arte%\ /Y || powershell Write-Host "Failed to copy OpenHardwareMonitorLib.dll" -Foreground Red && exit /b 34
-copy %all_dir%\OpenHardwareMonitorCLI.exe %arte%\ /Y || powershell Write-Host "Failed to copy OpenHardwareMonitorCLI.exe" -Foreground Red && exit /b 35
diff --git a/agents/wnx/build_watest.cmd b/agents/wnx/build_watest.cmd
deleted file mode 100644
index 9f7d5e5f235..00000000000
--- a/agents/wnx/build_watest.cmd
+++ /dev/null
@@ -1,28 +0,0 @@
-@echo off
-rem Short Build File
-rem parameter both build 32 and 64 bit tests
-
-set cur_dir=%cd%
-set build_dir=.\build
-set arte=%cur_dir%\..\..\artefacts
-mkdir %arte% 2> nul
-
-set LOCAL_IMAGES_PDB=%build_dir%
-set LOCAL_IMAGES_EXE=%build_dir%
-
-set ExternalCompilerOptions=/DDECREASE_COMPILE_TIME
-
-powershell Write-Host "Building WATEST with default ms build..." -Foreground Green
-if "%msbuild_exe%" == "" set msbuild_exe="C:\Program Files\Microsoft Visual Studio\2022\Professional\MSBuild\Current\Bin\msbuild.exe"
-if not exist "%msbuild_exe%" powershell Write-Host "Install Visual Studio 2022, please" -Foreground Red && exit /b 8
-:execute
-set exec=watest
-powershell Write-Host "Building WATEST-32..." -Foreground Green
-%msbuild_exe% wamain.sln /t:%exec% /p:Configuration=Release,Platform=x86
-if "%errorlevel%" NEQ "0" powershell Write-Host "Failed %exec%-32" -Foreground Red && exit /b 6
-if "%1" == "both" powershell Write-Host "Building WATEST-64..." -Foreground Green && %msbuild_exe% wamain.sln /t:%exec% /p:Configuration=Release,Platform=x86 || powershell Write-Host "Failed %exec%-64" -Foreground Red && exit /b 7
-copy "%build_dir%\watest\Win32\Release\watest32.exe" "%arte%" /y
-if "%1" == "both" copy "%build_dir%\watest\x64\Release\watest64.exe" "%arte%" /Y
-
-
-
diff --git a/agents/wnx/call_unit_tests.cmd b/agents/wnx/call_unit_tests.cmd
deleted file mode 100644
index 0855cf49f86..00000000000
--- a/agents/wnx/call_unit_tests.cmd
+++ /dev/null
@@ -1,84 +0,0 @@
-@Echo Off & Setlocal DisableDelayedExpansion
-::: do not need this
-::: mode 170,40
-
-::: { Creates variable /AE = Ascii-27 escape code.
-::: - %/AE% can be used with and without DelayedExpansion.
- For /F %%a in ('echo prompt $E ^| cmd')do set "/AE=%%a"
-::: }
-
-(Set \n=^^^
-%=Newline DNR=%
-)
-::: / Color Print Macro -
-::: Usage: %Print%{RRR;GGG;BBB}text to output
-::: \n at the end of the string echo's a new line
-::: valid range for RGB values: 0 - 255
- Set Print=For %%n in (1 2)Do If %%n==2 (%\n%
- For /F "Delims=" %%G in ("!Args!")Do (%\n%
- For /F "Tokens=1 Delims={}" %%i in ("%%G")Do Set "Output=%/AE%[0m%/AE%[38;2;%%im!Args:{%%~i}=!"%\n%
- ^< Nul set /P "=!Output:\n=!%/AE%[0m"%\n%
- If "!Output:~-2!"=="\n" (Echo/^&Endlocal)Else (Endlocal)%\n%
- )%\n%
- )Else Setlocal EnableDelayedExpansion ^& Set Args=
-::: / Erase Macro -
-::: Usage: %Erase%{string of the length to be erased}
- Set Erase=For %%n in (1 2)Do If %%n==2 (%\n%
- For /F "Tokens=1 Delims={}" %%G in ("!Args!")Do (%\n%
- Set "Nul=!Args:{%%G}=%%G!"%\n%
- For /L %%# in (0 1 100) Do (If Not "!Nul:~%%#,1!"=="" ^< Nul set /P "=%/AE%[D%/AE%[K")%\n%
- )%\n%
- Endlocal%\n%
- )Else Setlocal EnableDelayedExpansion ^& Set Args=
-
-
-
-set arte=%cd%\..\..\artefacts
-set results=unit_tests_results.zip
-
-if "%1" == "SIMULATE_OK" powershell Write-Host "Unit test SUCCESS" -Foreground Green && exit /b 0
-if "%1" == "SIMULATE_FAIL" powershell Write-Host "Unit test FAIL" -Foreground Red && del %arte%\check_mk_service.msi && exit /b 100
-if NOT "%1" == "" set param=--gtest_filter=%1
-set sec_param=%2
-if "%param%" == "" powershell Write-Host "Full and Looooooong test was requested." -Foreground Cyan && set sec_param=both
-if "%param%" == "*Integration" powershell Write-Host "Mid tests" -Foreground Cyan && set results=mid_tests_results.zip
-
-%Print%{255;255;255}32-bit test\n
-::call build_watest.cmd %sec_param%
-::if not %errorlevel% == 0 echo "build failed" goto error
-
-set WNX_TEST_ROOT=%temp%\test_%random%
-mkdir %WNX_TEST_ROOT%
-call prepare_to_tests.cmd %WNX_TEST_ROOT%\test
-mklink %WNX_TEST_ROOT%\watest32.exe %arte%\watest32.exe
-net stop WinRing0_1_2_0
-%WNX_TEST_ROOT%\watest32.exe %param%
-if not %errorlevel% == 0 powershell Write-Host "This is ERROR %errorlevel% in testing 32" -Foreground Red && goto error
-if NOT "%sec_param%" == "both" powershell Write-Host "This is end of testing. QUICK test was requested." -Foreground Cyan && goto success
-
-@rem 64-bit is tested quickly
-powershell Write-Host "64-bit test" -Foreground Cyan
-if "%1" == "" set param=--gtest_filter=-PluginTest.Sync*:PluginTest.Async*
-mklink %WNX_TEST_ROOT%\watest32.exe %arte%\watest32.exe
-WNX_TEST_ROOT%\watest64.exe %param%
-if not %errorlevel% == 0 powershell Write-Host "This is ERROR %errorlevel% in testing 64" -Foreground Red && goto error
-%Print%{0;255;255}This is end of testing. FULL test was requested.\n
-:success
-%Print%{0;255;0}Unit test "%param%": SUCCESS\n
-call :zip_results
-exit /b 0
-:error
-%Print%{255;0;0} Unit test "%param%": failed with error level "%errorlevel%" \n
-call :zip_results
-exit /b 78
-
-:zip_results
-ren %arte%\%results% %arte%\%results%.bak 2> nul
-pushd %WNX_TEST_ROOT% && ( call :zip_and_remove & popd )
-exit /b
-
-:zip_and_remove
-echo zipping results with remove
-7z a -r -y -tzip %arte%\%results% >nul
-rmdir /s/q "%WNX_TEST_ROOT%" 2>nul
-exit /b
diff --git a/agents/wnx/call_unit_tests_quick.cmd b/agents/wnx/call_unit_tests_quick.cmd
deleted file mode 100644
index 9175f26993d..00000000000
--- a/agents/wnx/call_unit_tests_quick.cmd
+++ /dev/null
@@ -1 +0,0 @@
-call call_unit_tests.cmd -*_Long
\ No newline at end of file
diff --git a/agents/wnx/extensions/robotmk_ext/Cargo.lock b/agents/wnx/extensions/robotmk_ext/Cargo.lock
index 7705cb1056a..9ee897b9884 100644
--- a/agents/wnx/extensions/robotmk_ext/Cargo.lock
+++ b/agents/wnx/extensions/robotmk_ext/Cargo.lock
@@ -4,84 +4,67 @@ version = 3
[[package]]
name = "anstream"
-version = "0.3.2"
+version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163"
+checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
- "is-terminal",
"utf8parse",
]
[[package]]
name = "anstyle"
-version = "1.0.1"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd"
+checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87"
[[package]]
name = "anstyle-parse"
-version = "0.2.1"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
+checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
-version = "1.0.0"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
+checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
dependencies = [
"windows-sys",
]
[[package]]
name = "anstyle-wincon"
-version = "1.0.1"
+version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188"
+checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
dependencies = [
"anstyle",
"windows-sys",
]
-[[package]]
-name = "bitflags"
-version = "2.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42"
-
-[[package]]
-name = "cc"
-version = "1.0.82"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01"
-dependencies = [
- "libc",
-]
-
[[package]]
name = "clap"
-version = "4.3.21"
+version = "4.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c27cdf28c0f604ba3f512b0c9a409f8de8513e4816705deb0498b627e7c3a3fd"
+checksum = "33e92c5c1a78c62968ec57dbc2440366a2d6e5a23faf829970ff1585dc6b18e2"
dependencies = [
"clap_builder",
"clap_derive",
- "once_cell",
]
[[package]]
name = "clap_builder"
-version = "4.3.21"
+version = "4.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08a9f1ab5e9f01a9b81f202e8562eb9a10de70abf9eaeac1be465c28b75aa4aa"
+checksum = "f4323769dc8a61e2c39ad7dc26f6f2800524691a44d74fe3d1071a5c24db6370"
dependencies = [
"anstream",
"anstyle",
@@ -91,9 +74,9 @@ dependencies = [
[[package]]
name = "clap_derive"
-version = "4.3.12"
+version = "4.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050"
+checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442"
dependencies = [
"heck",
"proc-macro2",
@@ -103,9 +86,9 @@ dependencies = [
[[package]]
name = "clap_lex"
-version = "0.5.0"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
+checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1"
[[package]]
name = "colorchoice"
@@ -113,82 +96,26 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
-[[package]]
-name = "errno"
-version = "0.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f"
-dependencies = [
- "errno-dragonfly",
- "libc",
- "windows-sys",
-]
-
-[[package]]
-name = "errno-dragonfly"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
-dependencies = [
- "cc",
- "libc",
-]
-
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
-[[package]]
-name = "hermit-abi"
-version = "0.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
-
-[[package]]
-name = "is-terminal"
-version = "0.4.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
-dependencies = [
- "hermit-abi",
- "rustix",
- "windows-sys",
-]
-
-[[package]]
-name = "libc"
-version = "0.2.147"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
-
-[[package]]
-name = "linux-raw-sys"
-version = "0.4.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
-
-[[package]]
-name = "once_cell"
-version = "1.18.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
-
[[package]]
name = "proc-macro2"
-version = "1.0.66"
+version = "1.0.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
+checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.32"
+version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965"
+checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
@@ -200,19 +127,6 @@ dependencies = [
"clap",
]
-[[package]]
-name = "rustix"
-version = "0.38.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "172891ebdceb05aa0005f533a6cbfca599ddd7d966f6f5d4d9b2e70478e70399"
-dependencies = [
- "bitflags",
- "errno",
- "libc",
- "linux-raw-sys",
- "windows-sys",
-]
-
[[package]]
name = "strsim"
version = "0.10.0"
@@ -221,9 +135,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "syn"
-version = "2.0.28"
+version = "2.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
+checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
dependencies = [
"proc-macro2",
"quote",
@@ -232,9 +146,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
-version = "1.0.11"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
+checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "utf8parse"
@@ -244,18 +158,18 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "windows-sys"
-version = "0.48.0"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
-version = "0.48.1"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
+checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
@@ -268,42 +182,42 @@ dependencies = [
[[package]]
name = "windows_aarch64_gnullvm"
-version = "0.48.0"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
[[package]]
name = "windows_aarch64_msvc"
-version = "0.48.0"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
[[package]]
name = "windows_i686_gnu"
-version = "0.48.0"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
[[package]]
name = "windows_i686_msvc"
-version = "0.48.0"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
[[package]]
name = "windows_x86_64_gnu"
-version = "0.48.0"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
[[package]]
name = "windows_x86_64_gnullvm"
-version = "0.48.0"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
[[package]]
name = "windows_x86_64_msvc"
-version = "0.48.0"
+version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
diff --git a/agents/wnx/include/common/wnx_version.h b/agents/wnx/include/common/wnx_version.h
index 310530c0343..f67da648610 100644
--- a/agents/wnx/include/common/wnx_version.h
+++ b/agents/wnx/include/common/wnx_version.h
@@ -1,4 +1,4 @@
-#define CMK_WIN_AGENT_VERSION "2.3.0b1"
+#define CMK_WIN_AGENT_VERSION "2.4.0b1"
// DO NOT CHANGE the structure of the first line
// First line is part of the build script
diff --git a/agents/wnx/include/common/wtools.h b/agents/wnx/include/common/wtools.h
index 6027dea4c71..75c78aecd17 100644
--- a/agents/wnx/include/common/wtools.h
+++ b/agents/wnx/include/common/wtools.h
@@ -11,18 +11,16 @@
#ifndef WTOOLS_H
#define WTOOLS_H
-#if defined(_WIN32)
-#include
-#include
-#include "Windows.h"
-#include "winperf.h"
+#include // here to help iphlpapi.h
#define _WIN32_DCOM // NOLINT
+#include
#include
#include
-#endif
+#include
+#include
#include
#include
@@ -33,11 +31,15 @@
#include
#include
+#include "Windows.h"
#include "datablock.h"
#include "tools/_win.h"
#include "tools/_xlog.h"
+#include "winperf.h"
namespace wtools {
+constexpr std::string_view safe_temp_sub_dir = "cmk_service";
+
inline void *ProcessHeapAlloc(size_t size) noexcept {
return ::HeapAlloc(::GetProcessHeap(), HEAP_ZERO_MEMORY, size);
}
@@ -304,12 +306,15 @@ class DirectPipe {
SECURITY_ATTRIBUTES sa_ = {};
};
-// scans all processes in system and calls op
-// returns false only when something is really bad
+enum class ScanAction { terminate, advance };
+
+// scans all processes in system and calls action
+// returns false on error
// based on ToolHelp api family
// normally require elevation
-// if op returns false, scan will be stopped(this is only optimization)
-bool ScanProcessList(const std::function &op);
+// if action returns false, scan will be stopped(this is only optimization)
+bool ScanProcessList(
+ const std::function &action);
// standard process terminator
bool KillProcess(uint32_t pid, int exit_code) noexcept;
@@ -1055,6 +1060,12 @@ std::filesystem::path ExecuteCommands(std::wstring_view name,
const std::vector &commands,
ExecuteMode mode);
+/// Create folder in %Temp% and set only owner permissions
+///
+/// Returns path
+std::optional MakeSafeTempFolder(
+ std::string_view sub_dir);
+
/// Changes Access Rights in Windows crazy manner
///
///
@@ -1100,6 +1111,32 @@ bool CheckProcessUsePort(uint16_t port, uint32_t pid, uint16_t peer_port);
std::optional GetConnectionPid(uint16_t port, uint16_t peer_port);
uint32_t GetServiceStatus(const std::wstring &name) noexcept;
+
+struct AdapterInfo {
+ std::string guid;
+ std::wstring friendly_name;
+ std::wstring description;
+ IFTYPE if_type;
+ std::optional receive_speed;
+ std::optional transmit_speed;
+ IF_OPER_STATUS oper_status;
+ std::string mac_address;
+};
+using AdapterInfoStore = std::unordered_map;
+
+AdapterInfoStore GetAdapterInfoStore();
+
+//// Mangles names for use as a counter names
+/// See: MSDN, PerformanceCounter.InstanceName Property
+/// https://learn.microsoft.com/en-us/dotnet/api/system.diagnostics.performancecounter.instancename?view=dotnet-plat-ext-8.0
+std::wstring MangleNameForPerfCounter(std::wstring_view name) noexcept;
+
+struct OsInfo {
+ std::wstring name;
+ std::wstring version;
+};
+
+std::optional GetOsInfo();
} // namespace wtools
#endif // WTOOLS_H
diff --git a/agents/wnx/include/providers/check_mk.h b/agents/wnx/include/providers/check_mk.h
index f84988a816f..792dadf8a91 100644
--- a/agents/wnx/include/providers/check_mk.h
+++ b/agents/wnx/include/providers/check_mk.h
@@ -7,6 +7,7 @@
#ifndef CHECK_MK_H
#define CHECK_MK_H
+#include
#include
#include
@@ -34,6 +35,10 @@ class CheckMk final : public Synchronous {
static std::string makeOnlyFrom();
};
+std::string GetTimezoneOffset();
+
+std::string PrintIsoTime(
+ std::chrono::time_point now);
} // namespace cma::provider
#endif // CHECK_MK_H
diff --git a/agents/wnx/include/providers/p_perf_counters.h b/agents/wnx/include/providers/p_perf_counters.h
index f953c7200c6..24792236a78 100644
--- a/agents/wnx/include/providers/p_perf_counters.h
+++ b/agents/wnx/include/providers/p_perf_counters.h
@@ -44,6 +44,14 @@ class UptimeAsync : public Asynchronous {
std::string makeBody() override;
};
+namespace winperf {
+constexpr std::wstring_view if_section_name = L"if";
+constexpr std::wstring_view if_state_pseudo_counter = L"2002";
+constexpr std::wstring_view if_state_pseudo_counter_type = L"text";
+constexpr std::wstring_view if_mac_pseudo_counter = L"2006";
+constexpr std::wstring_view if_mac_pseudo_counter_type = L"text";
+} // namespace winperf
+
// probably should go in another namespace(used also by skype)
namespace details {
// low level registry scanners
diff --git a/agents/wnx/include/wnx/cma_core.h b/agents/wnx/include/wnx/cma_core.h
index 9cfadd41d9d..b05c843eeb6 100644
--- a/agents/wnx/include/wnx/cma_core.h
+++ b/agents/wnx/include/wnx/cma_core.h
@@ -99,7 +99,8 @@ bool IsExecutable(const std::filesystem::path &file_to_exec);
std::wstring FindPowershellExe() noexcept;
std::wstring LocatePs1Proxy();
-std::wstring MakePowershellWrapper() noexcept;
+std::wstring MakePowershellWrapper(
+ const std::filesystem::path &script) noexcept;
// add to scripts interpreter
inline std::wstring ConstructCommandToExec(
@@ -109,26 +110,19 @@ inline std::wstring ConstructCommandToExec(
std::wstring wrapper;
if (IsExecutable(path)) {
- wrapper = L"\"{}\"";
+ return fmt::format(L"\"{}\"", path.wstring());
} else if (extension == L".pl") {
- wrapper = L"perl.exe \"{}\"";
+ return fmt::format(L"perl.exe \"{}\"", path.wstring());
} else if (extension == L".py") {
- wrapper = L"python.exe \"{}\"";
+ return fmt::format(L"python.exe \"{}\"", path.wstring());
} else if (extension == L".vbs") {
- wrapper = L"cscript.exe //Nologo \"{}\"";
+ return fmt::format(L"cscript.exe //Nologo \"{}\"", path.wstring());
} else if (extension == L".ps1") {
- wrapper = MakePowershellWrapper();
+ return MakePowershellWrapper(path);
} else {
XLOG::l("Not supported extension file {}", path);
return {};
}
-
- if (wrapper.empty()) {
- XLOG::l("impossible to find exe for file {}", path);
- return {};
- }
-
- return fmt::format(wrapper, path.wstring());
} catch (const std::exception &e) {
XLOG::l("Format failed for file '{}' exception: '{}'", path, e);
}
diff --git a/agents/wnx/include/wnx/install_api.h b/agents/wnx/include/wnx/install_api.h
index acd0c21a4e9..21b0822e0b2 100644
--- a/agents/wnx/include/wnx/install_api.h
+++ b/agents/wnx/include/wnx/install_api.h
@@ -59,7 +59,7 @@ class ExecuteUpdate {
constexpr std::wstring_view kDefaultMsiFileName{L"check_mk_agent.msi"};
constexpr std::string_view kMsiLogFileName{"agent_msi.log"};
// string below is patched by Makefile
-constexpr std::wstring_view kAgentProductName{L"Checkmk Agent 2.3"};
+constexpr std::wstring_view kAgentProductName{L"Checkmk Agent 2.4"};
namespace api_err {
constexpr std::string_view kLogFileName{"install_api.log"};
constexpr std::string_view kFailMarker{"fail: "};
diff --git a/agents/wnx/include/wnx/service_processor.h b/agents/wnx/include/wnx/service_processor.h
index 7ee67595e21..c883c406ba1 100644
--- a/agents/wnx/include/wnx/service_processor.h
+++ b/agents/wnx/include/wnx/service_processor.h
@@ -702,6 +702,8 @@ void WaitForAsyncPluginThreads(std::chrono::duration allowed_wait) {
PluginEntry::threadCount(), (allowed_wait - wait_time).count());
}
+std::string FindWinPerfExe(std::string_view exe_name);
+
} // namespace cma::srv
#endif // SERVICE_PROCESSOR_H
diff --git a/agents/wnx/include/wnx/stdafx_defines.h b/agents/wnx/include/wnx/stdafx_defines.h
index 4dfcbfaa83c..95033bec42b 100644
--- a/agents/wnx/include/wnx/stdafx_defines.h
+++ b/agents/wnx/include/wnx/stdafx_defines.h
@@ -45,7 +45,8 @@
#define _CRT_SECURE_NO_WARNINGS 1 // NOLINT
-#define _SILENCE_CXX17_STRSTREAM_DEPRECATION_WARNING // strstream in xlog
+#define _SILENCE_CXX17_STRSTREAM_DEPRECATION_WARNING // strstream in xlog
+#define _SILENCE_STDEXT_ARR_ITERS_DEPRECATION_WARNING // std format 9.0
#define ASIO_STANDALONE // no boost
#define ASIO_HEADER_ONLY // to lazy to add cpp
@@ -58,3 +59,4 @@
#define NOMINMAX // for Windows
#define _SILENCE_CLANG_COROUTINE_MESSAGE // clang coroutines
+#define _SILENCE_STDEXT_ARR_ITERS_DEPRECATION_WARNING
diff --git a/agents/wnx/install/Product.wxs b/agents/wnx/install/Product.wxs
index d69b9f4adbf..89bf34947a9 100644
--- a/agents/wnx/install/Product.wxs
+++ b/agents/wnx/install/Product.wxs
@@ -20,7 +20,7 @@ just to signal that real build script was not called -->
-
+
-
+
diff --git a/agents/wnx/msb.ps1 b/agents/wnx/msb.ps1
deleted file mode 100644
index 9116eca557d..00000000000
--- a/agents/wnx/msb.ps1
+++ /dev/null
@@ -1,175 +0,0 @@
-# Powershell file to start in parallel jobs to build the agent
-# unfortunately we have no good & alternative in Windows
-#
-# TODO: make output visible after job starting
-# TODO: make diagnostic better
-# 2019 (c) Checkmk GmbH
-#
-
-$make_exe = $Env:make_exe
-if( "$make_exe" -eq "" ){
- Write-Host "make_exe should be defined for the build" -foreground Red
- return 1
-}
-
-$sln = (Get-Item -Path ".\").FullName + "\wamain_build.sln" # 'repo\check_mk\agents\wnx\wamain.sln'
-$makefile = (Get-Item -Path ".\").FullName + "\Makefile"
-$host_dir = (Get-Item -Path ".\").FullName
-$cmk_agent_ctl_dir = (Get-Item -Path ".\").FullName + "\..\..\packages\cmk-agent-ctl"
-# string below is used to quckly switch to the Powershell ISE, do not delete it
-# $sln = 'c:\z\m\check_mk\agents\wnx\wamain.sln'
-
-$platforms = "Configuration=Release,Platform=x86", "Configuration=Release,Platform=x64"
-$err = 0
-$StartTime = $(get-date)
-
-function RunningCount($j_all){
- $running_count = 0
- foreach ($job in $j_all) {
- if ($job.State -eq 'Running'){
- $running_count +=1
- }
- }
-
- if($running_count -eq 0){
- Write-Host "end" -foreground Cyan
- return 0
- }
- $elapsedTime = $(get-date) - $global:StartTime
- Write-Host -NoNewLine "`rStill running " $running_count " seconds elapsed: " $elapsedTime.seconds -foreground Cyan
- return $running_count
-}
-
-
-function RcvJob($j, $name){
- if($err -ne 0){
- Stop-Job -Job $j
- return
- }
-
- Receive-Job -Job $j
- if ($j.State -eq 'Failed') {
- Write-Host "On " $name ":" ($job.ChildJobs[0].JobStateInfo.Reason.Message) -ForegroundColor Red
- $script:err = 1
- } else {
- Write-Host "On " $name ": Success" -ForegroundColor Green
- }
- return
-}
-
-# Bases
-$msb = {
-& Set-Location $using:host_dir
-& "$Env:msbuild_exe" $args
-if ($LASTEXITCODE -ne 0) {
- Write-Host "Error: " $LASTEXITCODE -foreground Red
- throw "Failed"
-}
-else {
- Write-Host "Success!" -foreground Green
-}
-}
-
-$mk = {
-& "$Env:make_exe" $args
-if ($LASTEXITCODE -ne 0) {
- Write-Host "Error: " $LASTEXITCODE -foreground Red
- throw "Failed make"
-}
-else {
- Write-Host "Success make!" -foreground Green
-}
-}
-
-$cargo_b = {
-& Set-Location $using:cmk_agent_ctl_dir; .\run.cmd
-if ($LASTEXITCODE -ne 0) {
- Write-Host "Error in cargo build: " $LASTEXITCODE -foreground Red
- throw "Failed cargo build..."
-}
-else {
- Write-Host "Success cargo build!" -foreground Green
-}
-}
-
-# disabled as unstable
-# $j_make = start-job -Init ([ScriptBlock]::Create("Set-Location '$pwd'")) -scriptblock $mk -argumentlist "-w", "-j", "2", "frozen_binaries"
-
-#$j_r = @()
-#Write-Host "Starting Rust Job" -foreground White
-#$j_r += start-job -name Other -scriptblock $cargo_b
-
-# Write-Host "Starting Ohm Job" -foreground Blue
-# $j_r += start-job -scriptblock $msb -argumentlist ".\ohm\ohm.sln", "/p:Configuration=Release"
-
-
-# Exe 32 & 64 bits
-$j_s = @()
-$target = "engine"
-foreach($p in $platforms){
- Write-Host "Starting Job $target - $p" -foreground Blue
- $j_s += start-job -scriptblock $msb -argumentlist $sln, "/m:4", "/t:$target", "/p:$p"
-}
-Write-Host "Jobs waiting... This may take few minutes" -foreground White
-
-do {
- Wait-Job -Job $j_s -Timeout 5 | Out-Null
-} while(RunningCount($j_s) -eq 0 )
-
-Write-Host "Jobs ready" -foreground Blue
-foreach ($job in $j_s) {
- RcvJob $job "engine"
-}
-
-if($err -ne 0){
- Write-Host "Exiting powershell script" -foreground Red
- exit 1
-}
-
-#
-$j_w = @()
-$target = "check_mk_service"
-foreach($p in $platforms){
- Write-Host "Starting Job $target - $p" -foreground Blue
- $n = "$target" + "_"+ $pid.ToString() + "_""$p"
- $j_w += start-job -Name $n -scriptblock $msb -argumentlist $sln, "/m:4", "/t:$target", "/p:$p"
-}
-$target = "watest"
-foreach($p in $platforms){
- Write-Host "Starting Job $target - $p" -foreground Blue
- $n = "$target" + "_"+ $pid.ToString() + "_""$p"
- $j_w += start-job -Name $n -scriptblock $msb -argumentlist $sln, "/m:4", "/t:$target", "/p:$p"
-}
-Write-Host "Jobs waiting... This may take few minutes" -foreground White
-do {
- Wait-Job -Job $j_w -Timeout 5 | Out-Null
-} while(RunningCount($j_w) -eq 0 )
-
-Write-Host "Jobs ready" -foreground Blue
-foreach ($job in $j_w) {
- RcvJob $job $job.Name
-}
-
-#Write-Host "Job rust/ohm waiting... This may take few minutes" -foreground White
-#do {
-# Wait-Job -Job $j_r -Timeout 5 | Out-Null
-#} while(RunningCount($j_r) -eq 0 )
-
-#Write-Host "Job rust/ohm ready" -foreground Blue
-#foreach ($job in $j_r) {
-# RcvJob $job Other
-#}
-
-
-# disabled as unstable ###############
-# Wait-Job -Job $j_make | Out-Null
-#
-# Write-Host "Make ready" -foreground Blue
-# RcvJob $j_make $j_make.Name
-
-
-if($err -ne 0)
-{
- Write-Host "Exiting powershell script" -foreground Red
- exit 1
-}
diff --git a/agents/wnx/parallel.ps1 b/agents/wnx/parallel.ps1
new file mode 100644
index 00000000000..0e9796f1017
--- /dev/null
+++ b/agents/wnx/parallel.ps1
@@ -0,0 +1,183 @@
+# Powershell file to start in parallel jobs to build the agent
+# unfortunately we have no good & alternative in Windows
+#
+# TODO: make output visible after job starting
+# TODO: make diagnostic better
+# 2023 (c) Checkmk GmbH
+#
+
+$make_exe = $Env:make_exe
+if ( "$make_exe" -eq "" ) {
+ $make_exe = where.exe make | Out-String
+}
+
+$msbuild_exe = $Env:msbuild_exe
+if ( "$msbuild_exe" -eq "" ) {
+ msbuild_exe = "C:\Program Files\Microsoft Visual Studio\2022\Professional\MSBuild\Current\Bin\msbuild.exe"
+}
+
+$sln = (Get-Item -Path ".\").FullName + "\wamain_build.sln" # 'repo\check_mk\agents\wnx\wamain.sln'
+$makefile = (Get-Item -Path ".\").FullName + "\Makefile"
+$host_dir = (Get-Item -Path ".\").FullName
+$cmk_agent_ctl_dir = (Get-Item -Path ".\").FullName + "\..\..\packages\cmk-agent-ctl"
+# string below is used to quckly switch to the Powershell ISE, do not delete it
+# $sln = 'c:\z\m\check_mk\agents\wnx\wamain.sln'
+
+$platforms = "Configuration=Release,Platform=x86", "Configuration=Release,Platform=x64"
+$err = 0
+$env:StartTime = "$(get-date)"
+
+function RunningCount($j_all) {
+ $running_count = 0
+ foreach ($job in $j_all) {
+ if ($job.State -eq 'Running') {
+ $running_count += 1
+ }
+ }
+
+ if ($running_count -eq 0) {
+ Write-Host "end" -foreground Cyan
+ return 0
+ }
+ $t1 = [datetime]$env:StartTime
+ $t2 = [datetime]$(get-date)
+ $elapsedTime = [int]($t2 - $t1).TotalSeconds
+ Write-Host -NoNewLine "`r Still running " $running_count " seconds elapsed: " $elapsedTime "....."-foreground Cyan
+ return $running_count
+}
+
+
+function RcvJob($j, $name) {
+ if ($err -ne 0) {
+ Stop-Job -Job $j
+ return
+ }
+
+ Receive-Job -Job $j
+ if ($j.State -eq 'Failed') {
+ Write-Host "On " $name ":" ($job.ChildJobs[0].JobStateInfo.Reason.Message) -ForegroundColor Red
+ $script:err = 1
+ }
+ else {
+ Write-Host "On " $name ": Success" -ForegroundColor Green
+ }
+ return
+}
+
+# Bases
+$msb = {
+ & Set-Location $using:host_dir
+ & "$Env:msbuild_exe" $args
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Error: " $LASTEXITCODE -foreground Red
+ throw "Failed"
+ }
+ else {
+ Write-Host "Success!" -foreground Green
+ }
+}
+
+$mk = {
+ & "$Env:make_exe" $args
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Error: " $LASTEXITCODE -foreground Red
+ throw "Failed make"
+ }
+ else {
+ Write-Host "Success make!" -foreground Green
+ }
+}
+
+$cargo_b = {
+ & Set-Location $using:cmk_agent_ctl_dir; .\run.cmd
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Error in cargo build: " $LASTEXITCODE -foreground Red
+ throw "Failed cargo build..."
+ }
+ else {
+ Write-Host "Success cargo build!" -foreground Green
+ }
+}
+
+# disabled as unstable
+# $j_make = start-job -Init ([ScriptBlock]::Create("Set-Location '$pwd'")) -scriptblock $mk -argumentlist "-w", "-j", "2", "frozen_binaries"
+
+#$j_r = @()
+#Write-Host "Starting Rust Job" -foreground White
+#$j_r += start-job -name Other -scriptblock $cargo_b
+
+# Write-Host "Starting Ohm Job" -foreground Blue
+# $j_r += start-job -scriptblock $msb -argumentlist ".\ohm\ohm.sln", "/p:Configuration=Release"
+
+
+# Exe 32 & 64 bits
+$j_s = @()
+$target = "engine"
+foreach ($p in $platforms) {
+ Write-Host "Starting Job $target - $p" -foreground Blue
+ $j_s += start-job -scriptblock $msb -argumentlist $sln, "/m:4", "/t:$target", "/p:$p"
+ # for sequential execution
+ # & $Env:msbuild_exe $sln "/m:4" "/t:$target" "/p:$p"
+}
+Write-Host "Jobs waiting... This may take few minutes" -foreground White
+
+do {
+ Wait-Job -Job $j_s -Timeout 5 | Out-Null
+} while (RunningCount($j_s) -eq 0 )
+
+Write-Host "Jobs ready" -foreground Blue
+foreach ($job in $j_s) {
+ RcvJob $job "engine"
+}
+
+if ($err -ne 0) {
+ Write-Host "Exiting powershell script" -foreground Red
+ throw "Failed!"
+}
+
+#
+$j_w = @()
+$target = "check_mk_service"
+foreach ($p in $platforms) {
+ Write-Host "Starting Job $target - $p" -foreground Blue
+ $n = "$target" + "_" + $pid.ToString() + "_""$p"
+ $j_w += start-job -Name $n -scriptblock $msb -argumentlist $sln, "/m:4", "/t:$target", "/p:$p"
+}
+$target = "watest"
+foreach ($p in $platforms) {
+ Write-Host "Starting Job $target - $p" -foreground Blue
+ $n = "$target" + "_" + $pid.ToString() + "_""$p"
+ $j_w += start-job -Name $n -scriptblock $msb -argumentlist $sln, "/m:4", "/t:$target", "/p:$p"
+}
+Write-Host "Jobs waiting... This may take few minutes" -foreground White
+do {
+ Wait-Job -Job $j_w -Timeout 5 | Out-Null
+} while (RunningCount($j_w) -eq 0 )
+
+Write-Host "Jobs ready" -foreground Blue
+foreach ($job in $j_w) {
+ RcvJob $job $job.Name
+}
+
+#Write-Host "Job rust/ohm waiting... This may take few minutes" -foreground White
+#do {
+# Wait-Job -Job $j_r -Timeout 5 | Out-Null
+#} while(RunningCount($j_r) -eq 0 )
+
+#Write-Host "Job rust/ohm ready" -foreground Blue
+#foreach ($job in $j_r) {
+# RcvJob $job Other
+#}
+
+
+# disabled as unstable ###############
+# Wait-Job -Job $j_make | Out-Null
+#
+# Write-Host "Make ready" -foreground Blue
+# RcvJob $j_make $j_make.Name
+
+
+if ($err -ne 0) {
+ Write-Host "Exiting powershell script" -foreground Red
+ throw "Failed!"
+}
diff --git a/agents/wnx/prepare_to_tests.cmd b/agents/wnx/prepare_to_tests.cmd
deleted file mode 100644
index aa67e69d52f..00000000000
--- a/agents/wnx/prepare_to_tests.cmd
+++ /dev/null
@@ -1,73 +0,0 @@
-@Echo Off & Setlocal DisableDelayedExpansion
-::: do not need this
-::: mode 170,40
-
-::: { Creates variable /AE = Ascii-27 escape code.
-::: - %/AE% can be used with and without DelayedExpansion.
- For /F %%a in ('echo prompt $E ^| cmd')do set "/AE=%%a"
-::: }
-
-(Set \n=^^^
-%=Newline DNR=%
-)
-::: / Color Print Macro -
-::: Usage: %Print%{RRR;GGG;BBB}text to output
-::: \n at the end of the string echo's a new line
-::: valid range for RGB values: 0 - 255
- Set Print=For %%n in (1 2)Do If %%n==2 (%\n%
- For /F "Delims=" %%G in ("!Args!")Do (%\n%
- For /F "Tokens=1 Delims={}" %%i in ("%%G")Do Set "Output=%/AE%[0m%/AE%[38;2;%%im!Args:{%%~i}=!"%\n%
- ^< Nul set /P "=!Output:\n=!%/AE%[0m"%\n%
- If "!Output:~-2!"=="\n" (Echo/^&Endlocal)Else (Endlocal)%\n%
- )%\n%
- )Else Setlocal EnableDelayedExpansion ^& Set Args=
-::: / Erase Macro -
-::: Usage: %Erase%{string of the length to be erased}
- Set Erase=For %%n in (1 2)Do If %%n==2 (%\n%
- For /F "Tokens=1 Delims={}" %%G in ("!Args!")Do (%\n%
- Set "Nul=!Args:{%%G}=%%G!"%\n%
- For /L %%# in (0 1 100) Do (If Not "!Nul:~%%#,1!"=="" ^< Nul set /P "=%/AE%[D%/AE%[K")%\n%
- )%\n%
- Endlocal%\n%
- )Else Setlocal EnableDelayedExpansion ^& Set Args=
-
-
-if "%1" == "" (
- %Print%{255;0;0}Dir is absent \n
- exit /b 33
-)
-%Print%{0;255;0}%0 is running... \n
-set root=%1\root
-set data=%1\data
-set user_dir=%data%
-if not exist "%root%" (
- %Print%{0;255;255}Making folder %root% ... \n
- mkdir %root% 2> nul
-)
-mkdir %root%\plugins 2> nul
-
-if not exist "%user_dir%" (
- %Print%{0;255;255}Making folder %user_dir% \n
- mkdir %user_dir% 2> nul
-)
-mkdir %user_dir%\bin 2> nul
-
-if not exist "..\windows\plugins" powershell Write-Host "Folder agents\windows\plugins doesnt exist. Check prep\checkout routine" -Foreground Red && exit /b 33
-
-%Print%{128;255;0}Installation simulation Root Folder: plugins, ohm, yml\n
-xcopy ..\windows\plugins\*.* %root%\plugins /D /Y > nul || powershell Write-Host "Failed plugins copy" -Foreground Red && exit /b 3
-xcopy .\tests\files\ohm\cli\*.* %user_dir%\bin /D /Y > nul || powershell Write-Host "Failed ohm copy. Try to kill Open Hardware Monitor: taskkill /F /IM OpenhardwareMonitorCLI.exe" -Foreground Yellow
-xcopy .\install\resources\check_mk.yml %root% /D /Y> nul || powershell Write-Host "Failed check_mk.yml copy" -Foreground Red && exit /b 5
-
-%Print%{128;255;0}1. Test machine preparation: Root Folder\n
-xcopy .\test_files\config\*.yml %root% /D /Y> nul || powershell Write-Host "Failed test ymls copy" -Foreground Red && exit /b 7
-
-%Print%{128;255;0}2. Test machine preparation: User Folder\n
-xcopy .\test_files\config\*.cfg %user_dir% /D /Y> nul || powershell Write-Host "Failed test cfgs copy" -Foreground Red && exit /b 8
-xcopy .\test_files\config\*.test.ini %user_dir% /D /Y> nul || powershell Write-Host "Failed test inis copy" -Foreground Red && exit /b 9
-xcopy .\test_files\cap\*.test.cap %user_dir% /D /Y> nul || powershell Write-Host "Failed test caps copy" -Foreground Red && exit /b 11
-xcopy .\test_files\unit_test\*.ini %user_dir% /D /Y> nul || powershell Write-Host "Failed test ini copy" -Foreground Red && exit /b 12
-xcopy .\test_files\unit_test\*.dat %user_dir% /D /Y> nul || powershell Write-Host "Failed test dat copy" -Foreground Red && exit /b 13
-xcopy .\test_files\unit_test\*.state %user_dir% /D /Y> nul || powershell Write-Host "Failed test state copy" -Foreground Red && exit /b 14
-xcopy .\test_files\config\*.yml %user_dir% /D /Y > nul || powershell Write-Host "Failed test ymls copy" -Foreground Red && exit /b 15
-
diff --git a/agents/wnx/python_package_install.cmd b/agents/wnx/python_package_install.cmd
deleted file mode 100644
index 92a6999908f..00000000000
--- a/agents/wnx/python_package_install.cmd
+++ /dev/null
@@ -1,11 +0,0 @@
-@rem installs one package of the python
-@rem command line is python path + package name
-@echo off
-set ppath=%1
-set pack=%2
-@%ppath%\pip install %pack% > nul 2>&1
-if "%errorlevel%" == "0" (
-powershell Write-Host "%pack% installed" -Foreground Green
-) else (
- powershell Write-Host "%pack% install Failed" -Foreground Red
-)
diff --git a/agents/wnx/python_packages.cmd b/agents/wnx/python_packages.cmd
index 2e9c455b64b..02c78c0b7b3 100644
--- a/agents/wnx/python_packages.cmd
+++ b/agents/wnx/python_packages.cmd
@@ -4,56 +4,34 @@ SETLOCAL EnableDelayedExpansion
@rem Installs all required packages GLOBALLY
if "%1"=="" (
-set ppath=C:\Python310\Scripts
-powershell Write-Host "Using default directory !ppath!" -Foreground Green
+:: Windows method to save output to a variable :(
+:: Run default python to find install
+powershell Write-Host "Using default python!" -Foreground Yellow
+for /f %%i in ('python -c "import os;import sys;print(os.path.dirname(sys.executable))"') do set python_dir=%%i
) else (
-set ppath=%1\Scripts
-powershell Write-Host "Using directory !ppath!" -Foreground Green
+powershell Write-Host "Using custom python" -Foreground Green
+set python_dir=%1
)
-@if not exist !ppath! powershell Write-Host "You have to install 32 bit python in directory !ppath!" -Foreground Red && exit /B 1
+set ppath=%python_dir%
+powershell Write-Host "Using directory !ppath!" -Foreground Green
+@if not exist !ppath! powershell Write-Host "You have to install 32 bit python in directory !python_dir!" -Foreground Red && exit /B 1
-@%ppath%\pip install --upgrade pip > nul 2>&1
+@%ppath%\python -m pip install --upgrade pip > nul 2>&1
if "%errorlevel%" == "0" (
-powershell Write-Host "Pip Upgraded" -Foreground Green
+ powershell Write-Host "Pip Upgraded" -Foreground Green
) else (
powershell Write-Host "Pip Upgrade Failed" -Foreground Red
)
-@!ppath!\pip install pyinstaller > nul 2>&1
-if "%errorlevel%" == "0" (
-powershell Write-Host "pyinstaller installed" -Foreground Green
-) else (
- powershell Write-Host "pyinstaller install Failed" -Foreground Red
-)
+set uniqueFileName=%tmp%\bat~%RANDOM%.tmp
-@!ppath!\pip install future> nul 2>&1
-if "%errorlevel%" == "0" (
-powershell Write-Host "future installed" -Foreground Green
-) else (
- powershell Write-Host "future install Failed" -Foreground Red
-)
+set packages=pyinstaller future pipenv pyyaml pypiwin32 requests pyOpenSSL requests[socks] pathlib pathlib2 typing pytest time-machine bs4 black isort pytest-mock telnetlib3 asyncio marshmallow
-@!ppath!\pip install pipenv> nul 2>&1
+%ppath%\python -m pip install %packages% > %uniqueFileName%
if "%errorlevel%" == "0" (
-powershell Write-Host "pipenv installed" -Foreground Green
+ powershell Write-Host "Installed" -Foreground Green
) else (
- powershell Write-Host "pipenv install Failed" -Foreground Red
+ powershell Write-Host "Failed" -Foreground Red
+ type %uniqueFileName%
)
-
-call python_package_install !ppath! pyyaml
-call python_package_install !ppath! pypiwin32
-call python_package_install !ppath! requests
-call python_package_install !ppath! pyOpenSSL
-call python_package_install !ppath! requests[socks]
-call python_package_install !ppath! pathlib
-call python_package_install !ppath! pathlib2
-call python_package_install !ppath! typing
-call python_package_install !ppath! pytest
-call python_package_install !ppath! freezegun
-call python_package_install !ppath! bs4
-call python_package_install !ppath! black
-call python_package_install !ppath! isort
-call python_package_install !ppath! pytest-mock
-call python_package_install !ppath! telnetlib3
-call python_package_install !ppath! asyncio
diff --git a/agents/wnx/readme.md b/agents/wnx/readme.md
index ab68e67d6c9..4ab0fd4de53 100644
--- a/agents/wnx/readme.md
+++ b/agents/wnx/readme.md
@@ -19,12 +19,11 @@ Run *windows_setup.cmd*. This is **Simplest** method to install some required Wi
Alternatively you can **choco install make** and use Makefile
## Build Scripts
-1. run.cmd - to build/unit test agent
-2. Dev tool: build_watest.cmd- to build 32-bit watest to be used later
+1. run.ps1 - to build/unit test agent
## Test Scripts
-1. All Other Tests: run_tests.cmd
+1. run_tests.cmd
## Assorted
To build and measure time use ptime
diff --git a/agents/wnx/run.cmd b/agents/wnx/run.cmd
index cf13c00eccc..2cb524506d5 100644
--- a/agents/wnx/run.cmd
+++ b/agents/wnx/run.cmd
@@ -1,410 +1,2 @@
-@echo off
-:: File to Build MSI usingMS BUild system
-:: Problem based on the fact that we have one x86 Playfrom for MSI
-:: but two, x86 and x64, for exe and MSI must have both targets
-:: build before owm build start.
-:: this is for command line only
-:: In GUI we should do Batch Rebuild of everything
-:: variables to set OPTOIONALLY, when you are using the same git checkout multiple times
-:: arte - final artefacts, expected bz build script
-:: WNX_BUILD - in the future this is name of subfloder to build out
-:: creates # artefacts in the output folder
-
-::
-:: Sign mode:
-:: run --all --sign file password
-:: file is always in c:\common\store should be well protected from access
-::
-:: Standard Mode:
-:: run --all
-::
-
-SETLOCAL EnableDelayedExpansion
-
-if "%*" == "" (
-echo: Run default...
-set arg_all=1
-) else (
-:CheckOpts
-if "%~1"=="-h" goto Usage
-if "%~1"=="--help" goto Usage
-if "%~1"=="-?" goto Usage
-
-if not "%arg_var_value%" == "" (
-set %arg_var_name%=%arg_var_value%
-set arg_var_value=
-)
-
-
-if "%~1"=="-A" (set arg_all=1) & shift & goto CheckOpts
-if "%~1"=="--all" (set arg_all=1) & shift & goto CheckOpts
-
-if "%~1"=="-c" (set arg_clean=1) & shift & goto CheckOpts
-if "%~1"=="--clean" (set arg_clean=1) & shift & goto CheckOpts
-
-if "%~1"=="-S" (set arg_setup=1) & shift & goto CheckOpts
-if "%~1"=="--setup" (set arg_setup=1) & shift & goto CheckOpts
-
-if "%~1"=="-f" (set arg_format=1) & shift & goto CheckOpts
-if "%~1"=="--format" (set arg_format=1) & shift & goto CheckOpts
-
-if "%~1"=="-F" (set arg_check_format=1) & shift & goto CheckOpts
-if "%~1"=="--check-format" (set arg_check_format=1) & shift & goto CheckOpts
-
-if "%~1"=="-C" (set arg_ctl=1) & shift & goto CheckOpts
-if "%~1"=="--controller" (set arg_ctl=1) & shift & goto CheckOpts
-
-if "%~1"=="-B" (set arg_build=1) & shift & goto CheckOpts
-if "%~1"=="--build" (set arg_build=1) & shift & goto CheckOpts
-
-if "%~1"=="-M" (set arg_msi=1) & shift & goto CheckOpts
-if "%~1"=="--msi" (set arg_msi=1) & shift & goto CheckOpts
-
-if "%~1"=="-O" (set arg_ohm=1) & shift & goto CheckOpts
-if "%~1"=="--ohm" (set arg_ohm=1) & shift & goto CheckOpts
-
-if "%~1"=="-Q" (set arg_check_sql=1) & shift & goto CheckOpts
-if "%~1"=="--check-sql" (set arg_check_sql=1) & shift & goto CheckOpts
-
-if "%~1"=="-E" (set arg_ext=1) & shift & goto CheckOpts
-if "%~1"=="--extensions" (set arg_ext=1) & shift & goto CheckOpts
-
-if "%~1"=="-T" (set arg_test=1) & shift & goto CheckOpts
-if "%~1"=="--test" (set arg_test=1) & shift & goto CheckOpts
-
-if "%~1"=="-D" (set arg_doc=1) & shift & goto CheckOpts
-if "%~1"=="--documentation" (set arg_doc=1) & shift & goto CheckOpts
-
-if "%~1"=="--detach" (set arg_detach=1) & shift & goto CheckOpts
-
-if "%~1"=="--var" (set arg_var_name=%~2) & (set arg_var_value=%~3) & shift & shift & shift & goto CheckOpts
-
-if "%~1"=="--sign" (set arg_detach=1) & (set arg_sign_file=%~2) & (set arg_sign_secret=%~3) & (set arg_sign=1) & shift & shift & shift & goto CheckOpts
-)
-if "%arg_all%"=="1" (set arg_ctl=1) & (set arg_build=1) & (set arg_test=1) & (set arg_setup=1) & (set arg_ohm=1) & (set arg_check_sql=1) & (set arg_ext=1) & (set arg_msi=1)
-
-@echo logonserver: "%LOGONSERVER%" user: "%USERNAME%"
-
-::Get start time:
-for /F "tokens=1-4 delims=:.," %%a in ("%time%") do (
- set /A "start=(((%%a*60)+1%%b %% 100)*60+1%%c %% 100)*100+1%%d %% 100"
-)
-
-:: arg_setup
-call :check_choco
-call :check_make
-call :check_repo_crlf
-call :check_msvc
-
-set cur_dir=%cd%
-set arte=%cur_dir%\..\..\artefacts
-set build_dir=.\build
-set SKIP_MINOR_BINARIES=YES
-set ExternalCompilerOptions=/DDECREASE_COMPILE_TIME
-set hash_file=%arte%\windows_files_hashes.txt
-set usbip_exe=c:\common\usbip-win-0.3.6-dev\usbip.exe
-
-:: arg_clean
-call :clean
-
-:: arg_build
-call :set_wnx_version
-if "%arg_build%" == "1" call %cur_dir%\scripts\clean_artifacts.cmd
-if "%arg_build%" == "1" call scripts\unpack_packs.cmd
-if "%arg_build%" == "1" make install_extlibs || ( powershell Write-Host "Failed to install packages" -Foreground Red & call :halt 33 )
-call :build_windows_agent || call :halt 81
-
-:: arg_test
-call :unit_test || call :halt 81
-
-:: arg_ctl
-call :build_agent_controller || call :halt 81
-
-:: arg_check_sql
-call :build_check_sql || call :halt 81
-
-:: arg_ohm
-call :build_ohm || call :halt 81
-
-:: arg_ext
-call :build_ext || call :halt 81
-
-:: arg_sign
-call :sign_binaries || call :halt 81
-
-:: arg_msi
-call :build_msi || call :halt 81
-call :set_msi_version || call :halt 81
-call :deploy_to_artifacts || call :halt 81
-
-::Get end time:
-for /F "tokens=1-4 delims=:.," %%a in ("%time%") do (
- set /A "end=(((%%a*60)+1%%b %% 100)*60+1%%c %% 100)*100+1%%d %% 100"
-)
-
-:: Get elapsed time:
-set /A elapsed=end-start
-
-:: Show elapsed time:
-set /A hh=elapsed/(60*60*100), rest=elapsed%%(60*60*100), mm=rest/(60*100), rest%%=60*100, ss=rest/100, cc=rest%%100
-if %mm% lss 10 set mm=0%mm%
-if %ss% lss 10 set ss=0%ss%
-if %cc% lss 10 set cc=0%cc%
-powershell Write-Host "Elapsed time: %hh%:%mm%:%ss%,%cc%" -Foreground Blue
-
-call :patch_msi_code
-call :sign_msi
-call :detach
-powershell Write-Host "FULL SUCCESS" -Foreground Blue
-exit /b 0
-
-:: CHECK FOR CHOCO
-:: if choco is absent then build is not possible(we can't dynamically control environment)
-:check_choco
-if not "%arg_setup%" == "1" powershell Write-Host "Skipped setup check" -Foreground Yellow & goto :eof
-powershell Write-Host "Looking for choco..." -Foreground White
-@choco -v > nul
-@if "%errorlevel%" NEQ "0" powershell Write-Host "choco must be installed!" -Foreground Red & call :halt 55
-powershell Write-Host "[+] choco" -Foreground Green
-goto :eof
-
-
-:: CHECK FOR make
-:: if make is absent then we try to install it using choco. Failure meand build fail, make is mandatory
-:check_make
-if not "%arg_setup%" == "1" goto :eof
-powershell Write-Host "Looking for make..." -Foreground White
-for /f %%i in ('where make') do set make_exe=%%i
-if "!make_exe!" == "" (
-powershell Write-Host "make not found, try to install" -Foreground Yellow
-choco install make -y
-for /f %%i in ('where make') do set make_exe=%%i
-if "!make_exe!" == "" powershell Write-Host "make not found, something is really bad" -Foreground Red & call :halt 57
-)
-powershell Write-Host "[+] make" -Foreground Green
-goto :eof
-
-:: CHECK for line ending
-:check_repo_crlf
-if not "%arg_setup%" == "1" goto :eof
-@py -3 scripts\check_crlf.py
-@if errorlevel 1 powershell Write-Host "Line Encoding Error`r`n`tPlease check how good repo was checked out" -Foreground Red & call :halt 113
-goto :eof
-
-
-:: CHECK for MSVC
-:check_msvc
-if not "%arg_setup%" == "1" goto :eof
-powershell Write-Host "Looking for MSVC 2022..." -Foreground White
-set msbuild_exe=C:\Program Files\Microsoft Visual Studio\2022\Professional\MSBuild\Current\Bin\msbuild.exe
-if not exist "%msbuild_exe%" powershell Write-Host "Install Visual Studio 2022, please" -Foreground Red & call :halt 8
-powershell Write-Host "[+] Found MSVC 2022" -Foreground Green
-goto :eof
-
-:: clean artifacts
-:clean
-if not "%arg_clean%" == "1" powershell Write-Host "Skipped clean" & goto :eof
-powershell Write-Host "Cleaning..." -Foreground White
-if "%arte%" == "" powershell Write-Host "arte is not defined" -Foreground Red & call :halt 99
-del /Q %arte%\*.msi > nul
-del /Q %arte%\*.exe > nul
-del /Q %arte%\*.yml > nul
-del /Q %arte%\*.log > nul
-del /Q %arte%\*.log > nul
-powershell Write-Host "Done." -Foreground Green
-goto :eof
-
-:set_wnx_version
-if not "%arg_build%" == "1" goto :eof
-:: read version from the C++ agent
-set /p wnx_version_raw=nul
-powershell Write-Host "Signing Executables" -Foreground White
-@call scripts\sign_code.cmd %build_dir%\check_mk_service\x64\Release\check_mk_service64.exe %hash_file%
-@call scripts\sign_code.cmd %build_dir%\check_mk_service\Win32\Release\check_mk_service32.exe %hash_file%
-@call scripts\sign_code.cmd %arte%\cmk-agent-ctl.exe %hash_file%
-@call scripts\sign_code.cmd %arte%\check-sql.exe %hash_file%
-@call scripts\sign_code.cmd %build_dir%\ohm\OpenHardwareMonitorLib.dll %hash_file%
-@call scripts\sign_code.cmd %build_dir%\ohm\OpenHardwareMonitorCLI.exe %hash_file%
-goto :eof
-
-
-:: Deploy Phase: post processing/build special modules using make
-:deploy_to_artifacts
-if not "%arg_msi%" == "1" goto :eof
-powershell Write-Host "run:Artifacts deploy..." -Foreground White
-copy %build_dir%\install\Release\check_mk_service.msi %arte%\check_mk_agent.msi /y || call :halt 33
-copy %build_dir%\check_mk_service\x64\Release\check_mk_service64.exe %arte%\check_mk_agent-64.exe /Y || call :halt 34
-copy %build_dir%\check_mk_service\Win32\Release\check_mk_service32.exe %arte%\check_mk_agent.exe /Y || call :halt 35
-copy %build_dir%\ohm\OpenHardwareMonitorCLI.exe %arte%\OpenHardwareMonitorCLI.exe /Y || call :halt 36
-copy %build_dir%\ohm\OpenHardwareMonitorLib.dll %arte%\OpenHardwareMonitorLib.dll /Y || call :halt 37
-copy install\resources\check_mk.user.yml %arte%
-copy install\resources\check_mk.yml %arte%
-powershell Write-Host "File Deployment succeeded" -Foreground Green
-goto :eof
-
-:: Additional Phase: post processing/build special modules using make
-:patch_msi_code
-if not "%arg_msi%" == "1" goto :eof
-!make_exe! msi_patch
-if errorlevel 1 powershell Write-Host "Failed to patch MSI exec" -Foreground Red & call :halt 36
-copy /Y %arte%\check_mk_agent.msi %arte%\check_mk_agent_unsigned.msi > nul
-goto :eof
-
-:sign_msi
-if not "%arg_sign%" == "1" powershell Write-Host "Signing MSI skipped" -Foreground Yellow & goto :eof
-powershell Write-Host "run:Signing MSI" -Foreground White
-@call scripts\sign_code.cmd %arte%\check_mk_agent.msi %hash_file%
-call scripts\detach_usb_token.cmd %usbip_exe%
-call scripts\call_signing_tests.cmd
-if errorlevel 1 call powershell Write-Host "Failed MSI signing test %errorlevel%" -Foreground Red & :halt 41
-@py -3 scripts\check_hashes.py %hash_file%
-if errorlevel 1 call powershell Write-Host "Failed hashing test %errorlevel%" -Foreground Red & :halt 42
-powershell Write-Host "MSI signing succeeded" -Foreground Green
-goto :eof
-
-:detach
-if "%arg_detach%" == "1" call scripts\detach_usb_token.cmd %usbip_exe%
-goto :eof
-
-:: Sets the errorlevel and stops the batch immediately
-:halt
-if exist %usbip_exe% call scripts\detach_usb_token.cmd %usbip_exe%
-call :__SetErrorLevel %1
-call :__ErrorExit 2> nul
-goto :eof
-
-:__ErrorExit
-rem Creates a syntax error, stops immediately
-()
-goto :eof
-
-:__SetErrorLevel
-exit /b %time:~-2%
-goto :eof
-
-:Usage
-echo.
-echo.Usage:
-echo.
-echo.%~nx0 [arguments]
-echo.
-echo.Available arguments:
-echo. -?, -h, --help display help and exit
-echo. -A, --all shortcut to -S -B -E -C -T -M: setup, build, ctl, ohm, unit, msi, extensions
-echo. -c, --clean clean artifacts
-echo. -S, --setup check setup
-echo. -C, --ctl build controller
-echo. -D, --documentation create documentation
-echo. -f, --format format sources
-echo. -F, --check-format check for correct formatting
-echo. -B, --build build controller
-echo. -M, --msi build msi
-echo. -O, --ohm build ohm
-echo. -E, --extensions build extensions
-echo. -T, --test run unit test controller
-echo. --sign file secret sign controller with file in c:\common and secret
-echo.
-echo.Examples:
-echo.
-echo %~nx0 --ctl
-echo %~nx0 --build --test
-echo %~nx0 --build -T --sign the_file secret
-echo %~nx0 -A
-GOTO :EOF
+@echo "This is a temporary wrapper script for run.ps1."
+@pwsh ./run.ps1 %*
diff --git a/agents/wnx/run.ps1 b/agents/wnx/run.ps1
new file mode 100644
index 00000000000..e60a576a6d7
--- /dev/null
+++ b/agents/wnx/run.ps1
@@ -0,0 +1,576 @@
+# Copyright (C) 2023 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+# This is reinterpretation of our standard run script
+# Some features are not implemented because it is top level script
+# -format
+# -check-format
+# -documentation
+# -setup
+
+if ((get-host).version.major -lt 7) {
+ Write-Host "PowerShell version 7 or higher is required." -ForegroundColor Red
+ exit
+}
+
+$argAll = $false
+$argClean = $false
+$argCtl = $false
+$argBuild = $false
+$argTest = $false
+$argSign = $false
+$argMsi = $false
+$argOhm = $false
+$argExt = $false
+$argSql = $false
+$argDetach = $false
+
+$msbuild_exe = "C:\Program Files\Microsoft Visual Studio\2022\Professional\MSBuild\Current\Bin\msbuild.exe"
+$repo_root = (get-item $pwd).parent.parent.FullName
+$arte = "$repo_root/artefacts"
+$build_dir = "$pwd/build"
+$ohm_dir = "$build_dir/ohm/"
+$env:ExternalCompilerOptions = "/DDECREASE_COMPILE_TIME"
+$hash_file = "$arte\windows_files_hashes.txt"
+$usbip_exe = "c:\common\usbip-win-0.3.6-dev\usbip.exe"
+$make_exe = where.exe make | Out-String
+
+
+if ("$env:arg_var_value" -ne "") {
+ $env:arg_val_name = $env:arg_var_value
+}
+else {
+ $env:arg_val_name = ""
+}
+
+function Write-Help() {
+ $x = Get-Item $PSCommandPath
+ $x.BaseName
+ $name = "powershell -File " + $x.BaseName + ".ps1"
+
+ Write-Host "Usage:"
+ Write-Host ""
+ Write-Host "$name [arguments]"
+ Write-Host ""
+ Write-Host "Available arguments:"
+ Write-Host " -?, -h, --help display help and exit"
+ Write-Host " -A, --all shortcut to -B -C -O -T -M -E -Q: build, ctl, ohm, unit, msi, extensions, mk-sql"
+ Write-Host " --clean-all clean literally all, use with care"
+ Write-Host " --clean-artifacts clean artifacts"
+ Write-Host " -C, --ctl build controller"
+ Write-Host " -Q, --mk-sql build mk-sql"
+ Write-Host " -B, --build build agent"
+ Write-Host " -M, --msi build msi"
+ Write-Host " -O, --ohm build ohm"
+ Write-Host " -E, --extensions build extensions"
+ Write-Host " -T, --test run agent component tests using binary in repo_root/artefacts"
+ Write-Host " --detach detach USB before running"
+ Write-Host " --sign sign controller using Yubikey based Code Certificate"
+ Write-Host ""
+ Write-Host "Examples:"
+ Write-Host ""
+ Write-Host "$name --ctl"
+ Write-Host "$name --build --test"
+ Write-Host "$name --build -T --sign"
+ Write-Host "$name -A"
+ Write-Host "$name --all --sign"
+}
+
+
+if ($args.Length -eq 0) {
+ Write-Host "No arguments provided. Running with default flags." -ForegroundColor Yellow
+ $argAll = $true
+}
+else {
+ for ($i = 0; $i -lt $args.Length; $i++) {
+ switch ($args[$i]) {
+ { $("-?", "-h", "--help") -contains "$_" } { Write-Help; return }
+ { $("-A", "--all") -contains $_ } { $argAll = $true }
+ { $("-C", "--controller") -contains $_ } { $argCtl = $true }
+ { $("-B", "--build") -contains $_ } { $argBuild = $true }
+ { $("-M", "--msi") -contains $_ } { $argMsi = $true }
+ { $("-O", "--ohm") -contains $_ } { $argOhm = $true }
+ { $("-Q", "--mk-sql") -contains $_ } { $argSql = $true }
+ { $("-E", "--extensions") -contains $_ } { $argExt = $true }
+ { $("-T", "--test") -contains $_ } { $argTest = $true }
+ "--clean-all" { $argClean = $true; $argCleanArtifacts = $true }
+ "--clean-artifacts" { $argCleanArtifacts = $true }
+ "--detach" { $argDetach = $true }
+ "--var" {
+ [Environment]::SetEnvironmentVariable($args[++$i], $args[++$i])
+ }
+ "--sign" { $argSign = $true }
+ }
+ }
+}
+
+
+if ($argAll) {
+ $argBuild = $true
+ $argOhm = $true
+ $argCtl = $true
+ $argTest = $true
+ $argSql = $true
+ $argExt = $true
+ $argMsi = $true
+}
+
+
+# Example of setting environment variables (equivalent to SETLOCAL in batch)
+$env:LOGONSERVER = "YourLogonServerHere"
+$env:USERNAME = "YourUsernameHere"
+
+function Invoke-CheckApp( [String]$title, [String]$cmdline ) {
+ try {
+ Invoke-Expression $cmdline > $null
+ if ($LASTEXITCODE -ne 0) {
+ throw
+ }
+ Write-Host "[+] $title" -Fore Green
+ }
+ catch {
+ Write-Host "[-] $title :$_" -Fore Red
+ Exit 55
+ }
+}
+
+function Add-HashLine($file_to_hash, $out_file) {
+ Write-Host "$file_to_hash is to be hashed to $out_file"
+
+ try {
+ $file_to_hash_name = Get-ChildItem -Path $file_to_hash | Select-Object Name -ExpandProperty Name
+ Add-Content -Path $out_file -Value ($file_to_hash_name + " ") -NoNewLine
+ Get-FileHash $file_to_hash -Algorithm SHA256 -ErrorAction Stop | Select-Object Hash -ExpandProperty Hash | Add-Content -Path $out_file
+ }
+ catch {
+ Write-Host "Failed to hash $file_to_hash with error $_" -ForegroundColor Red
+ }
+}
+
+
+function Get-Version {
+ $first_line = Get-Content -Path "include\common\wnx_version.h" -TotalCount 1
+ if ($first_line.Substring(0, 29) -eq "#define CMK_WIN_AGENT_VERSION") {
+ return $first_line.Substring(30, $first_line.Length - 30)
+ }
+
+ Write-Error "wnx_version not found in include\common\wnx_version.h" -ErrorAction Stop
+}
+
+function Build-Agent {
+ if ($argBuild -ne $true) {
+ Write-Host "Skipping Agent build..." -ForegroundColor Yellow
+ return
+ }
+
+ Write-Host "Building agent..." -ForegroundColor White
+ $env:msbuild_exe = $msbuild_exe
+ $env:make_exe = $make_exe.trim()
+ $env:wnx_version = Get-Version
+ Write-Host "Used version: $env:wnx_version"
+ Write-Host make is $env:make_exe
+ & $env:make_exe install_extlibs
+ if ($lastexitcode -ne 0) {
+ Write-Error "Failed to install extlibs, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+ Write-Host "Start build" -ForegroundColor White
+ & "$PSScriptRoot\parallel.ps1"
+ if ($lastexitcode -ne 0) {
+ Write-Error "Failed to build Agent, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+
+ # upload test artifacts for separate testing
+ Copy-Item $build_dir/watest/Win32/Release/watest32.exe $arte -Force -ErrorAction Stop
+ Copy-Item $build_dir/watest/x64/Release/watest64.exe $arte -Force -ErrorAction Stop
+
+ Write-Host "Success building agent" -ForegroundColor Green
+}
+
+function Build-Package([bool]$exec, [System.IO.FileInfo]$dir, [string]$name, [string]$cmd) {
+ if ($exec -ne $true) {
+ Write-Host "Skipping $name build..." -ForegroundColor Yellow
+ return
+ }
+
+ Write-Host "Building $name..." -ForegroundColor White
+ $cwd = Get-Location
+ Set-Location "../../packages/$dir"
+ & ./run.cmd $cmd
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Error building $name, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+ Set-Location $cwd
+ Write-Host "Success building $name :" -foreground Green
+}
+
+function Build-Ext {
+ if ($argExt -ne $true) {
+ Write-Host "Skipping Ext build..." -ForegroundColor Yellow
+ return
+ }
+ Write-Host "Building Ext..." -ForegroundColor White
+ $cwd = Get-Location
+ Set-Location "extensions\robotmk_ext"
+ & ../../scripts/cargo_build_robotmk.cmd
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Error building Ext, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+
+ Write-Host "Success building Ext" -foreground Green
+ Set-Location $cwd
+}
+
+function Build-OHM() {
+ if ($argOhm -ne $true) {
+ Write-Host "Skipping OHM build..." -ForegroundColor Yellow
+ return
+ }
+ Write-Host "Building OHM..." -ForegroundColor White
+ & $msbuild_exe .\ohm\ohm.sln "/p:OutDir=$ohm_dir;TargetFrameworkVersion=v4.6;Configuration=Release"
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Error building OHM, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+
+ Write-Host "Uploading OHM" -foreground Green
+ Copy-Item "$ohm_dir/OpenHardwareMonitorLib.dll" $arte -Force -ErrorAction Stop
+ Copy-Item "$ohm_dir/OpenHardwareMonitorCLI.exe" $arte -Force -ErrorAction Stop
+ Write-Host "Success building OHM" -foreground Green
+}
+
+function Build-MSI {
+ if ($argMsi -ne $true) {
+ Write-Host "Skipping Ext build..." -ForegroundColor Yellow
+ return
+ }
+ Write-Host "Building MSI..." -ForegroundColor White
+ Remove-Item "$build_dir/install/Release/check_mk_service.msi" -Force -ErrorAction SilentlyContinue
+
+ & $msbuild_exe wamain.sln "/t:install" "/p:Configuration=Release,Platform=x86"
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Error building MSI, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+ Write-Host "Success building MSI" -foreground Green
+}
+
+
+function Invoke-ChangeMsiProperties([string]$file, $version) {
+ $Installer = new-object -comobject WindowsInstaller.Installer
+ $MSIOpenDatabaseModeTransact = 2
+ $MsiFilePath = $file
+
+ $MsiDBCom = $Installer.GetType().InvokeMember(
+ "OpenDatabase",
+ "InvokeMethod",
+ $Null,
+ $Installer,
+ @($MsiFilePath, $MSIOpenDatabaseModeTransact)
+ )
+ $query = "UPDATE `Property` SET `Property`.`Value`='$version_base' WHERE `Property`.`Property`='ProductVersion'"
+ $Insert = $MsiDBCom.GetType().InvokeMember("OpenView", "InvokeMethod", $Null, $MsiDBCom, ($query))
+ $Insert.GetType().InvokeMember("Execute", "InvokeMethod", $Null, $Insert, $Null)
+ $Insert.GetType().InvokeMember("Close", "InvokeMethod", $Null, $Insert, $Null)
+ [System.Runtime.Interopservices.Marshal]::ReleaseComObject($Insert) | Out-Null
+
+ $MsiDBCom.GetType().InvokeMember("Commit", "InvokeMethod", $Null, $MsiDBCom, $Null)
+ [System.Runtime.Interopservices.Marshal]::ReleaseComObject($MsiDBCom) | Out-Null
+}
+
+function Set-MSI-Version {
+ if ($argMsi -ne $true) {
+ Write-Host "Skipping Set MSI version..." -ForegroundColor Yellow
+ return
+ }
+
+ $version = Get-Version
+ $version_base = $version.substring(1, $version.length - 2)
+ Write-Host "Setting MSI version: $version_base" -ForegroundColor White
+ Invoke-ChangeMsiProperties $build_dir\install\Release\check_mk_service.msi $version_base
+ # deprecated:
+ # & echo cscript.exe //nologo scripts\WiRunSQL.vbs $build_dir\install\Release\check_mk_service.msi "UPDATE `Property` SET `Property`.`Value`='$version_base' WHERE `Property`.`Property`='ProductVersion'"
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Error setting version MSI, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+ Write-Host "Success setting version MSI" -foreground Green
+}
+
+function Start-UnitTests {
+ if ($argTest -ne $true) {
+ Write-Host "Skipping unit testing..." -ForegroundColor Yellow
+ return
+ }
+ Write-Host "Running unit tests..." -ForegroundColor White
+ & ./run_tests.ps1 --unit
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Error unit Testing, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+ Write-Host "Success unit tests" -foreground Green
+}
+
+function Invoke-Attach($usbip, $addr, $port) {
+ if ($argSign -ne $true) {
+ Write-Host "Skipping attach" -ForegroundColor Yellow
+ return
+ }
+ &$usbip attach -r $addr -b $port
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Failed to attach USB token" $LASTEXITCODE -foreground Red
+ $argSign = $False
+ return
+ }
+ Write-Host "Attached USB" -ForegroundColor Green
+ return
+}
+
+function Test-Administrator {
+ [OutputType([bool])]
+ param()
+ process {
+ [Security.Principal.WindowsPrincipal]$user = [Security.Principal.WindowsIdentity]::GetCurrent();
+ return $user.IsInRole([Security.Principal.WindowsBuiltinRole]::Administrator);
+ }
+}
+
+function Invoke-TestSigning($usbip) {
+ if ($argSign -ne $true) {
+ Write-Host "Skipping Test Signing..." -ForegroundColor Yellow
+ return
+ }
+
+ if (-not(Test-Path -Path $usbip -PathType Leaf)) {
+ Write-Host "$usbip doesn't exist" -ForegroundColor Red
+ $argSign = $False
+ return
+ }
+
+ if (-not (Test-Administrator)) {
+ Write-Host "This script must be executed as Administrator." -ForegroundColor Red
+ $argSign = $False
+ return
+ }
+
+ Write-Host "check port"
+ &$usbip port
+ if ($LastExitCode -eq 3) {
+ Write-Host "No chance"
+ $argSign = $False
+ return
+ }
+ Write-Host "try to detach"
+
+ &$usbip detach -p 00
+ if ($LastExitCode -eq 0) {
+ Write-Host "Should not happen: connection has been established"
+ }
+
+}
+
+function Start-MsiControlBuild {
+ if ($argSign -ne $true) {
+ Write-Host "Skipping MSI Control Build..." -ForegroundColor Yellow
+ return
+ }
+
+ Write-Host "Building controlly MSI..." -ForegroundColor White
+ & $msbuild_exe wamain.sln "/t:install" "/p:Configuration=Release,Platform=x86"
+ if ($LASTEXITCODE -ne 0 ) {
+ Write-Error "Build Failed, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+}
+
+function Start-BinarySigning {
+ if ($argSign -ne $true) {
+ Write-Host "Skipping Signing..." -ForegroundColor Yellow
+ return
+ }
+
+ Write-Host "Binary signing..." -ForegroundColor White
+ Remove-Item $hash_file -Force
+
+ $files_to_sign = @(
+ "$build_dir/check_mk_service/x64/Release/check_mk_service64.exe",
+ "$build_dir/check_mk_service/Win32/Release/check_mk_service32.exe",
+ "$arte/cmk-agent-ctl.exe",
+ "$arte/cmk-sql.exe",
+ "$ohm_dir/OpenHardwareMonitorLib.dll",
+ "$ohm_dir/OpenHardwareMonitorCLI.exe"
+ )
+
+ foreach ($file in $files_to_sign) {
+ & ./scripts/sign_code.cmd $file
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Error Signing, error code is $LASTEXITCODE" -ErrorAction Stop
+ }
+ Add-HashLine $file $hash_file
+
+ }
+ Write-Host "Success binary signing" -foreground Green
+}
+
+
+function Start-ArtifactUploading {
+ if ($argMsi -ne $true) {
+ Write-Host "Skipping upload to artifacts..." -ForegroundColor Yellow
+ return
+ }
+
+ Write-Host "Artifact upload..." -ForegroundColor White
+ $artifacts = @(
+ @("$build_dir/install/Release/check_mk_service.msi", "$arte/check_mk_agent.msi"),
+ @("$build_dir/check_mk_service/x64/Release/check_mk_service64.exe", "$arte/check_mk_agent-64.exe"),
+ @("$build_dir/check_mk_service/Win32/Release/check_mk_service32.exe", "$arte/check_mk_agent.exe"),
+ @("$build_dir/ohm/OpenHardwareMonitorCLI.exe", "$arte/OpenHardwareMonitorCLI.exe"),
+ @("$build_dir/ohm/OpenHardwareMonitorLib.dll", "$arte/OpenHardwareMonitorLib.dll"),
+ @("./install/resources/check_mk.user.yml", "$arte/check_mk.user.yml"),
+ @("./install/resources/check_mk.yml", "$arte/check_mk.yml")
+ )
+ foreach ($artifact in $artifacts) {
+ Copy-Item $artifact[0] $artifact[1] -Force -ErrorAction Stop
+ }
+ Write-Host "Success artifact uploading" -foreground Green
+}
+
+
+function Start-MsiPatching {
+ if ($argMsi -ne $true) {
+ Write-Host "Skipping MSI patching..." -ForegroundColor Yellow
+ return
+ }
+
+ Write-Host "MSI Patching..." -ForegroundColor White
+ & "$make_exe" msi_patch
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Failed to patch MSI " $LASTEXITCODE -ErrorAction Stop
+ }
+ Copy-Item $arte/check_mk_agent.msi $arte/check_mk_agent_unsigned.msi -Force
+ Write-Host "Success artifact uploading" -foreground Green
+}
+
+function Invoke-Detach($argFlag) {
+ if ($argFlag -ne $true) {
+ return
+ }
+ & $usbip_exe detach -p 00
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Failed to detach " $LASTEXITCODE -foreground Yellow
+ return
+ }
+ Start-Sleep -Seconds 2
+ Write-Host "Detached!" -ForegroundColor Green
+}
+
+
+
+function Start-MsiSigning {
+ if ($argSign -ne $true) {
+ Write-Host "Skipping MSI signing..." -ForegroundColor Yellow
+ return
+ }
+
+ Write-Host "MSI signing..." -ForegroundColor White
+ & ./scripts/sign_code.cmd $arte/check_mk_agent.msi
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Failed sign MSI " $LASTEXITCODE -foreground Red
+ return
+ }
+ Add-HashLine $arte/check_mk_agent.msi $hash_file
+ Invoke-Detach $argSign
+ & ./scripts/call_signing_tests.cmd
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Failed test MSI " $LASTEXITCODE -foreground Red
+ return
+ }
+ & py "-3" "./scripts/check_hashes.py" "$hash_file"
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Failed hashing test " $LASTEXITCODE -foreground Red
+ return
+ }
+ powershell Write-Host "MSI signing succeeded" -Foreground Green
+}
+
+function Clear-Artifacts() {
+ if ($argCleanArtifacts -ne $true) {
+ return
+ }
+ Write-Host "Cleaning artifacts..."
+ $masks = "*.msi", "*.exe", "*.log", "*.yml"
+ foreach ($mask in $masks) {
+ Remove-Item -Path "$arte\$mask" -Force -ErrorAction SilentlyContinue
+ }
+}
+
+function Clear-All() {
+ if ($argClean -ne $true) {
+ return
+ }
+
+ Write-Host "Cleaning packages..."
+ Build-Package $true "cmk-agent-ctl" "Controller" "--clean"
+ Build-Package $true "mk-sql" "MK-SQL" "--clean"
+
+ Clear-Artifacts
+
+ Write-Host "Cleaning $build_dir..."
+ Remove-Item -Path "$build_dir" -Recurse -Force -ErrorAction SilentlyContinue
+}
+
+function Update-ArtefactDirs() {
+ If (Test-Path -PathType container $arte) {
+ Write-Host "Using arte dir: '$arte'" -ForegroundColor White
+ }
+ else {
+ Remove-Item $arte -ErrorAction SilentlyContinue # we may have find strange files from bad scripts
+ Write-Host "Creating arte dir: '$arte'" -ForegroundColor White
+ New-Item -ItemType Directory -Path $arte -ErrorAction Stop > nul
+ }
+}
+
+
+Invoke-CheckApp "choco" "choco -v"
+Invoke-CheckApp "perl" "perl -v"
+Invoke-CheckApp "make" "make -v"
+Invoke-CheckApp "msvc" "& ""$msbuild_exe"" --version"
+Invoke-CheckApp "is_crlf" "python .\scripts\check_crlf.py"
+
+$argAttached = $false
+$result = 1
+try {
+ $mainStartTime = Get-Date
+ Invoke-Detach $argDetach
+ Update-ArtefactDirs
+ Clear-Artifacts
+ Clear-All
+ Build-Agent
+ Build-Package $argCtl "cmk-agent-ctl" "Controller"
+ Build-Package $argSql "mk-sql" "MK-SQL"
+ Build-Ohm
+ Build-Ext
+ Build-MSI
+ Set-Msi-Version
+ Start-UnitTests
+ Invoke-TestSigning $usbip_exe
+ Start-MsiControlBuild
+ Invoke-Attach $usbip_exe "yubi-usbserver.lan.checkmk.net" "1-1.2"
+ if ($argSign -eq $true) {
+ $argAttached = $true
+ }
+ Start-BinarySigning
+ Start-ArtifactUploading
+ Start-MsiPatching
+ Start-MsiSigning
+ $endTime = Get-Date
+ $elapsedTime = $endTime - $mainStartTime
+ Write-Host "Elapsed time: $($elapsedTime.Hours):$($elapsedTime.Minutes):$($elapsedTime.Seconds)"
+ $result = 0
+}
+catch {
+ Write-Host "Error: " $_ -ForegroundColor Red
+ Write-Host "Trace stack: " -ForegroundColor Yellow
+ Write-Host $_.ScriptStackTrace -ForegroundColor Yellow
+}
+finally {
+ Invoke-Detach $argAttached
+}
+exit $result
+
+
diff --git a/agents/wnx/run_tests.cmd b/agents/wnx/run_tests.cmd
index aa44efb8e4c..a6fc961377b 100644
--- a/agents/wnx/run_tests.cmd
+++ b/agents/wnx/run_tests.cmd
@@ -1,169 +1,2 @@
-@echo off
-:: To execute all complicated tests of windows agent
-:: params regression, component, ext, simulation, integration, all
-::
-:: CI must run regression, component, integration, all
-:: Dev machine must run also ext and simulation
-:: later tests may require some additional package installed which ae not suitable for CI VM
-SETLOCAL EnableDelayedExpansion
-
-if "%*" == "" (
-echo: Run default...
-set int_arg_component=1
-set int_arg_ext=
-set int_arg_simulation=
-set int_arg_integration=
-set int_arg_regression=
-set int_arg_plugins=
-) else (
-:CheckOpts
-if "%~1"=="-h" goto Usage
-if "%~1"=="--help" goto Usage
-if "%~1"=="-?" goto Usage
-
-if "%~1"=="-A" (set int_arg_all=1) & shift & goto CheckOpts
-if "%~1"=="--all" (set int_arg_all=1) & shift & goto CheckOpts
-
-if "%~1"=="-C" (set int_arg_component=1) & (set int_arg_build=1) & shift & goto CheckOpts
-if "%~1"=="--component" (set int_arg_component=1) & (set int_arg_build=1) & shift & goto CheckOpts
-
-if "%~1"=="-E" (set int_arg_ext=1) & (set int_arg_build=1) & shift & goto CheckOpts
-if "%~1"=="--ext" (set int_arg_ext=1) & (set int_arg_build=1) & shift & goto CheckOpts
-
-if "%~1"=="-S" (set int_arg_simulation=1) & shift & goto CheckOpts
-if "%~1"=="--simulation" (set int_arg_simulation=1) & shift & goto CheckOpts
-
-if "%~1"=="-I" (set int_arg_integration=1) & (set int_arg_build=1) & shift & goto CheckOpts
-if "%~1"=="--integration" (set int_arg_integration=1) & (set int_arg_build=1) & shift & goto CheckOpts
-
-if "%~1"=="-P" (set int_arg_plugins=1) & shift & goto CheckOpts
-if "%~1"=="--plugins" (set int_arg_plugins=1) & shift & goto CheckOpts
-
-if "%~1"=="-R" (set int_arg_regression=1) & shift & goto CheckOpts
-if "%~1"=="--regression" (set int_arg_regression=1) & shift & goto CheckOpts
-
-)
-if "%int_arg_all%"=="1" (
-set int_arg_component=1
-set int_arg_ext=1
-set int_arg_simulation=1
-set int_arg_integration=1
-set int_arg_regression=1
-set int_arg_plugins=1
-)
-
-set cur_dir=%cd%
-set arte=%cur_dir%\..\..\artefacts
-set CHECKMK_GIT_DIR=%cur_dir%\..\..\
-
-
-call :watest_build
-call :component
-call :ext
-call :simulation
-call :regression
-call :integration
-call :plugins
-goto :end
-
-goto :end
-
-
-
-:watest_build
-if not "%int_arg_build%" == "1" powershell Write-Host "Skipped build watest" -Foreground Yellow & goto :eof
-call scripts\unpack_packs.cmd
-make install_extlibs || ( powershell Write-Host "Failed to install packages" -Foreground Red & call :halt 33 )
-call build_watest.cmd
-if errorlevel 1 powershell write-Host "Build watest FAIL!" -Foreground Red & call :halt 19
-powershell write-Host "Build watest SUCCESS!" -Foreground Green
-goto :eof
-
-
-:component
-if not "%int_arg_component%" == "1" powershell Write-Host "Skipped component tests" -Foreground Yellow & goto :eof
-call call_unit_tests.cmd *Component
-if errorlevel 1 powershell write-Host "Component FAIL!" -Foreground Red & call :halt 20
-powershell write-Host "Component SUCCESS!" -Foreground Green
-goto :eof
-
-
-:ext
-if not "%int_arg_ext%" == "1" powershell Write-Host "Skipped ext tests" -Foreground Yellow & goto :eof
-call call_unit_tests.cmd *ComponentExt
-if errorlevel 1 powershell write-Host "Ext FAIL!" -Foreground Red & call :halt 21
-powershell write-Host "Ext SUCCESS!" -Foreground Green
-goto :eof
-
-:simulation
-if not "%int_arg_simulation%" == "1" powershell Write-Host "Skipped simulation tests" -Foreground Yellow & goto :eof
-call call_unit_tests.cmd *_Simulation
-if errorlevel 1 powershell write-Host "Simulation FAIL!" -Foreground Red & call :halt 21
-powershell write-Host "Simulation SUCCESS!" -Foreground Green
-goto :eof
-
-:regression
-if not "%int_arg_regression%" == "1" powershell Write-Host "Skipped regression tests" -Foreground Yellow & goto :eof
-call scripts\call_regression_tests.cmd
-if errorlevel 1 powershell write-Host "Regression FAIL!" -Foreground Red & call :halt 21
-powershell write-Host "Regression SUCCESS!" -Foreground Green
-goto :eof
-
-
-:integration
-if not "%int_arg_integration%" == "1" powershell Write-Host "Skipped integration tests" -Foreground Yellow & goto :eof
-call scripts\call_integration_tests.cmd
-if errorlevel 1 powershell write-Host "integration FAIL!" -Foreground Red & call :halt 21
-powershell write-Host "integration SUCCESS!" -Foreground Green
-goto :eof
-
-
-:plugins
-if not "%int_arg_plugins%" == "1" powershell Write-Host "Skipped plugins tests" -Foreground Yellow & goto :eof
-call scripts\call_ap_tests.cmd
-if errorlevel 1 powershell write-Host "plugins FAIL!" -Foreground Red & call :halt 21
-powershell write-Host "plugins SUCCESS!" -Foreground Green
-goto :eof
-
-
-:: Sets the errorlevel and stops the batch immediately
-:halt
-call :__SetErrorLevel %1
-call :__ErrorExit 2> nul
-goto :eof
-
-:__ErrorExit
-rem Creates a syntax error, stops immediately
-()
-goto :eof
-
-:__SetErrorLevel
-exit /b %time:~-2%
-goto :eof
-
-:end
-exit /b 0
-goto :eof
-
-:Usage
-echo.
-echo.Usage:
-echo.
-echo.%~nx0 [arguments]
-echo.
-echo.Available arguments:
-echo. -?, -h, --help display help and exit
-echo. -A, --all run all possible tests
-echo. -C, --component component tests(marked with Component suffix )
-echo. -E, --ext extended component tests(marked with ComponentExt suffix )
-echo. -S, --simulation simulation tests(marked with _Simulation suffix )
-echo. -I, --integration integration tests
-echo. -D, --regression regression tests
-echo. -f, --plugins agent plugins test
-echo.
-echo.Examples:
-echo.
-echo %~nx0 --component
-echo %~nx0 -R -I
-echo %~nx0 -A
-GOTO :EOF
+@echo "This is a temporary wrapper script for run_tests.ps1."
+@pwsh ./run_tests.ps1 %*
diff --git a/agents/wnx/run_tests.ps1 b/agents/wnx/run_tests.ps1
new file mode 100644
index 00000000000..0d67a64c11b
--- /dev/null
+++ b/agents/wnx/run_tests.ps1
@@ -0,0 +1,392 @@
+# To execute all complicated tests of windows agent
+# params regression, component, ext, simulation, integration, all
+#
+# CI must run regression, component, integration, all
+# Dev machine must run also ext and simulation
+# later tests may require some additional package installed which ae not suitable for CI VM
+
+if ((get-host).version.major -lt 7) {
+ Write-Host "PowerShell version 7 or higher is required." -ForegroundColor Red
+ exit
+}
+
+
+$testComponent = $false
+$testExt = $false
+$testSimulation = $false
+$testIntegration = $false
+$testRegression = $false
+$testPlugins = $false
+$testBuild = $false
+$testAll = $false
+$testUnit = $false
+$repo_root = (get-item $pwd).parent.parent.FullName
+$cur_dir = $pwd
+$arte = "$repo_root/artefacts"
+
+function Write-Help() {
+ $x = Get-Item $PSCommandPath
+ $x.BaseName
+ $name = "powershell -File " + $x.BaseName + ".ps1"
+
+ Write-Host "Usage:"
+ Write-Host ""
+ Write-Host "$name [arguments]"
+ Write-Host ""
+ Write-Host "Available arguments:"
+ Write-Host " -?, -h, --help display help and exit"
+ Write-Host " -A, --all shortcut to -B -C -S -E -R -I -P"
+ Write-Host " -B, --build build"
+ Write-Host " -C, --component component testing"
+ Write-Host " -S, --simulation component testing"
+ Write-Host " -E, --ext ext component testing"
+ Write-Host " -R, --regression regression testing"
+ Write-Host " -I, --integration integration testing"
+ Write-Host " -P, --plugins plugins testing"
+ Write-Host " -U, --unit unit testing"
+ Write-Host ""
+ Write-Host "Examples:"
+ Write-Host ""
+ Write-Host "$name --component"
+ Write-Host "$name --build --integration"
+}
+
+
+if ($args.Length -eq 0) {
+ Write-Host "No arguments provided. Running with default flags." -ForegroundColor Yellow
+ $testComponent = $true
+}
+else {
+ for ($i = 0; $i -lt $args.Length; $i++) {
+ switch ($args[$i]) {
+ { $("-?", "-h", "--help") -contains "$_" } { Write-Help; return }
+ { $("-A", "--all") -contains $_ } { $testAll = $true }
+ { $("-B", "--build") -contains $_ } { $testBuild = $true }
+ { $("-C", "--component") -contains $_ } { $testComponent = $true }
+ { $("-E", "--ext") -contains $_ } { $testExt = $true }
+ { $("-S", "--simulation") -contains $_ } { $testSimulation = $true }
+ { $("-P", "--plugins") -contains $_ } { $testPlugins = $true }
+ { $("-I", "--integration") -contains $_ } { $testIntegration = $true }
+ { $("-R", "--regression") -contains $_ } { $testRegression = $true }
+ { $("-U", "--unit") -contains $_ } { $testUnit = $true }
+ }
+ }
+}
+
+if ($testExt -or $testIntegration -or $testComponent) {
+ $testBuild = $true
+}
+
+if ($testAll) {
+ $testComponent = $true
+ $testExt = $true
+ $testSimulation = $true
+ $testIntegration = $true
+ $testRegression = $true
+ $testPlugins = $true
+ $testBuild = $true
+ $testAll = $true
+ $testUnit = $true
+}
+
+function New-TemporaryDirectory() {
+ param(
+ [Parameter(
+ Mandatory = $True,
+ Position = 0
+ )]
+ [String]
+ $prefix
+ )
+ try {
+ $parent = [System.IO.Path]::GetTempPath()
+ $name = "$prefix" + [System.IO.Path]::GetRandomFileName()
+ $k = New-Item -ItemType Directory -Path (Join-Path $parent $name)
+ return $k.FullName
+ }
+ catch {
+ return ""
+ }
+}
+
+
+function Test-Administrator {
+ [OutputType([bool])]
+ param()
+ process {
+ [Security.Principal.WindowsPrincipal]$user = [Security.Principal.WindowsIdentity]::GetCurrent();
+ return $user.IsInRole([Security.Principal.WindowsBuiltinRole]::Administrator);
+ }
+}
+
+
+function Invoke-PrepareTests($base) {
+ $root = "$base\root"
+ $data = "$base\data"
+ $user_dir = $data
+ New-Item -ItemType Directory -Path "$root\plugins" -ErrorAction Stop > nul
+ New-Item -ItemType Directory -Path "$user_dir\bin" -ErrorAction Stop > nul
+
+ & xcopy "..\windows\plugins\*.*" "$root\plugins" "/D" "/Y" > nul
+ & xcopy ".\tests\files\ohm\cli\*.*" "$user_dir\bin" "/D" "/Y" > nul
+ & xcopy ".\install\resources\check_mk.yml" "$root" "/D" "/Y" > nul
+
+ & xcopy ".\test_files\config\*.yml" "$root" "/D" "/Y" > nul
+
+ & xcopy ".\test_files\config\*.cfg" "$user_dir" "/D" "/Y" > nul
+ & xcopy ".\test_files\config\*.test.ini" "$user_dir" "/D" "/Y" > nul
+ & xcopy ".\test_files\cap\*.test.cap" "$user_dir" "/D" "/Y" > nul
+ & xcopy ".\test_files\unit_test\*.ini" "$user_dir" "/D" "/Y" > nul
+ & xcopy ".\test_files\unit_test\*.dat" "$user_dir" "/D" "/Y" > nul
+ & xcopy ".\test_files\unit_test\*.state" "$user_dir" "/D" "/Y" > nul
+ & xcopy ".\test_files\config\*.yml" "$user_dir" "/D" "/Y" > nul
+
+}
+
+function Invoke-UnitTest([bool]$run, [String]$name, [String]$cmdline) {
+ if (!$run) {
+ Write-Host "Skipping test $name..." -Foreground Yellow
+ return
+ }
+
+ Write-Host "Running $name test..." -Foreground White
+ $results = "${name}_tests_results.zip"
+ $wnx_test_root = ""
+ $prefix = "checkmk_$name_"
+ try {
+ $wnx_test_root = New-TemporaryDirectory -prefix "$prefix"
+ if ($wnx_test_root -eq "") {
+ Write-Error "Failed to create temporary directory" -ErrorAction Stop
+ }
+ $env:WNX_TEST_ROOT = $wnx_test_root
+ Write-Host "Using temporary directory $wnx_test_root..." -Foreground White
+ Invoke-PrepareTests "$wnx_test_root\test"
+ New-Item -Path "$wnx_test_root\watest32.exe" -ItemType SymbolicLink -Value "$arte\watest32.exe" > nul
+ & net stop WinRing0_1_2_0
+ & "$wnx_test_root\watest32.exe" "--gtest_filter=$cmdline"
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "[-] $name test :$_" -ErrorAction Stop
+ }
+ Write-Host "Success $name test" -Foreground Green
+ }
+ finally {
+ try {
+ Remove-Item "$arte\$results" -ErrorAction SilentlyContinue
+ Compress-Archive -Path $wnx_test_root -DestinationPath "$arte\$results"
+ }
+ catch {
+ Write-Host "Failed to compress $wnx_test_root :$_" -Foreground Red
+ }
+ if ($wnx_test_root -like "*temp*$prefix*") {
+ Write-Host "Removing temporary directory $wnx_test_root..." -Foreground White
+ Remove-Item $wnx_test_root -Force -Recurse -ErrorAction SilentlyContinue
+ }
+ }
+
+}
+
+
+function Invoke-RegressionTest() {
+ if (!$testRegression) {
+ Write-Host "Skipping regression test..." -Foreground Yellow
+ return
+ }
+ if (-not (Test-Administrator)) {
+ Write-Error "Regression Testing must be executed as Administrator." -ErrorAction Stop
+ }
+
+ Write-Host "Running regression test..." -Foreground White
+ $results = "regression_tests_results.zip"
+ $wnx_test_root = ""
+ $work_dir = "$pwd"
+ $prefix = "checkmk_regression_"
+ try {
+ $wnx_test_root = New-TemporaryDirectory -prefix "$prefix"
+ if ($wnx_test_root -eq "") {
+ Write-Error "Failed to create temporary directory" -ErrorAction Stop
+ }
+ Write-Host "Using temporary directory $wnx_test_root..." -Foreground White
+ $plugins_dir = "$wnx_test_root/test/root/plugins"
+ $data_dir = "$wnx_test_root/test/data"
+ New-Item -ItemType Directory -Path $plugins_dir -ErrorAction Stop > nul
+ New-Item -ItemType Directory -Path $data_dir -ErrorAction Stop > nul
+ Remove-NetFirewallRule -DisplayName "AllowRegression" 2> nul
+ New-NetFirewallRule -DisplayName "AllowRegression" -Direction Inbound -Program "$wnx_test_root\check_mk_agent.exe" -RemoteAddress LocalSubnet -Action Allow >nul
+ Copy-Item $arte\check_mk_agent.exe $wnx_test_root\check_mk_agent.exe > nul
+ Copy-Item $arte\check_mk.yml $wnx_test_root\test\root\check_mk.yml > nul
+ & xcopy "..\windows\plugins\*.*" "$wnx_test_root\test\root\plugins" "/D" "/Y" > nul
+ $env:WNX_REGRESSION_BASE_DIR = "$wnx_test_root"
+ $env:WNX_INTEGRATION_BASE_DIR = ""
+ $env:arte = $arte
+ Set-Location ".\tests\regression"
+ py -3 -m pytest
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "[-] Regression test :$_" -ErrorAction Stop
+ }
+ Write-Host "Success regression test..." -Foreground Green
+ }
+ finally {
+ try {
+ Remove-Item "$arte\$results" -ErrorAction SilentlyContinue
+ Compress-Archive -Path $wnx_test_root -DestinationPath "$arte\$results"
+ }
+ catch {
+ Write-Host "Failed to compress $wnx_test_root :$_" -Foreground Red
+ }
+ Set-Location $work_dir -ErrorAction SilentlyContinue
+ Remove-NetFirewallRule -DisplayName "AllowRegression" >nul
+ if ($wnx_test_root -like "*temp*$prefix*") {
+ Write-Host "Removing temporary directory $wnx_test_root..." -Foreground White
+ Remove-Item $wnx_test_root -Force -Recurse -ErrorAction SilentlyContinue
+ }
+ }
+
+}
+
+function Invoke-IntegrationTest() {
+ if (!$testIntegration) {
+ Write-Host "Skipping integration test..." -Foreground Yellow
+ return
+ }
+ if (-not (Test-Administrator)) {
+ Write-Error "Integration Testing must be executed as Administrator." -ErrorAction Stop
+ }
+ $env:CHECKMK_GIT_DIR = $repo_root
+
+ Write-Host "Running integration test..." -Foreground White
+ $results = "integration_tests_results.zip"
+ $wnx_test_root = ""
+ $prefix = "checkmk_integration_"
+ try {
+ $wnx_test_root = New-TemporaryDirectory -prefix "$prefix"
+ if ($wnx_test_root -eq "") {
+ Write-Error "Failed to create temporary directory" -ErrorAction Stop
+ }
+ Write-Host "Using temporary directory $wnx_test_root..." -Foreground White
+ $root_dir = "$wnx_test_root\test\root"
+ $data_dir = "$wnx_test_root\test\data"
+
+ Write-Host "Prepare dirs..." -Foreground White
+ New-Item -ItemType Directory -Path $root_dir -ErrorAction Stop > nul
+ New-Item -ItemType Directory -Path "$data_dir\plugins" -ErrorAction Stop > nul
+ New-Item -ItemType Directory -Path "$data_dir\bin" -ErrorAction Stop > nul
+
+ Write-Host "Prepare firewall..." -Foreground White
+ Remove-NetFirewallRule -DisplayName "AllowIntegration1" 2> nul
+ New-NetFirewallRule -DisplayName "AllowIntegration1" -Direction Inbound -Program "$wnx_test_root\check_mk_agent.exe" -RemoteAddress LocalSubnet -Action Allow >nul
+ Remove-NetFirewallRule -DisplayName "AllowIntegration2" 2>nul
+ New-NetFirewallRule -DisplayName "AllowIntegration2" -Direction Inbound -Program "$data_dir\bin\cmk-agent-ctl.exe" -RemoteAddress LocalSubnet -Action Allow > nul
+
+ Write-Host "Copy exe..." -Foreground White
+ Copy-Item $arte\check_mk_agent.exe $wnx_test_root\check_mk_agent.exe > nul
+
+ Write-Host "Copy yml..." -Foreground White
+ Copy-Item $arte\check_mk.yml $wnx_test_root\test\root\check_mk.yml > nul
+ & xcopy "..\windows\plugins\*.*" "$wnx_test_root\test\root\plugins\" "/D" "/Y" > nul
+ $env:WNX_REGRESSION_BASE_DIR = ""
+ $env:WNX_INTEGRATION_BASE_DIR = "$wnx_test_root"
+ $env:arte = $arte
+
+ Write-Host "RUN INTEGRATION!" -Foreground White
+ py -3 -m pytest tests\integration\
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "[-] Integration test :$_" -ErrorAction Stop
+ }
+ Write-Host "Success integration test..." -Foreground Green
+ }
+ finally {
+ try {
+ Remove-Item "$arte\$results" -ErrorAction SilentlyContinue
+ Compress-Archive -Path $wnx_test_root -DestinationPath "$arte\$results"
+ }
+ catch {
+ Write-Host "Failed to compress $wnx_test_root :$_" -Foreground Red
+ }
+ Remove-NetFirewallRule -DisplayName "AllowIntegration1" 2>nul
+ Remove-NetFirewallRule -DisplayName "AllowIntegration2" 2>nul
+ if ($wnx_test_root -like "*temp*$prefix*") {
+ Write-Host "Removing temporary directory $wnx_test_root..." -Foreground White
+ Remove-Item $wnx_test_root -Force -Recurse -ErrorAction SilentlyContinue
+ }
+ }
+
+}
+
+
+function Invoke-Exe {
+ param(
+ [Parameter(
+ Mandatory = $True,
+ Position = 0
+ )]
+ [bool]
+ $run,
+ [Parameter(
+ Mandatory = $True,
+ Position = 1
+ )]
+ [string]
+ $name,
+ [Parameter(
+ Mandatory = $True,
+ Position = 2
+ )]
+ [string]
+ $exe,
+ [Parameter(
+ Mandatory = $True,
+ ValueFromRemainingArguments = $true,
+ Position = 3
+ )][string[]]
+ $listArgs
+ )
+ if ($run -ne $true) {
+ Write-Host "Skipping $name..." -Foreground Yellow
+ return
+ }
+ Write-Host "Running $name..." -Foreground White
+ & $exe $listArgs
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "[-] $name :$_" -ErrorAction Stop
+ }
+ Write-Host "Running $name..." -Foreground White
+}
+
+
+$result = 1
+try {
+ $mainStartTime = Get-Date
+ if ($testBuild) {
+ & pwsh ./run.ps1 --build
+ }
+
+ Invoke-UnitTest -run $testUnit -name "unit" -cmdline "-*_Simulation:*Component:*ComponentExt:*Flaky"
+ Invoke-UnitTest -run $testComponent -name "component" -cmdline "*Component"
+ Invoke-UnitTest -run $testExt -name "ext" -cmdline "*ComponentExt"
+ Invoke-UnitTest -run $testSimulation -name "simulation" -cmdline "*_Simulation"
+
+ Invoke-RegressionTest
+ Invoke-IntegrationTest
+ try {
+ Set-Location $repo_root
+ $env:CHECKMK_GIT_DIR = $repo_root
+ Invoke-Exe -run $testPlugins -name "plugins" -exe "py" "-3" "-m" "pytest" "$cur_dir\tests\ap\test_mk_logwatch_win.py"
+ }
+ finally {
+ Set-Location $cur_dir
+ }
+
+ $endTime = Get-Date
+ $elapsedTime = $endTime - $mainStartTime
+ Write-Host "Elapsed time: $($elapsedTime.Hours):$($elapsedTime.Minutes):$($elapsedTime.Seconds)"
+ $result = 0
+}
+catch {
+ Write-Host "Error: " $_ -ForegroundColor Red
+ Write-Host "Trace stack: " -ForegroundColor Yellow
+ Write-Host $_.ScriptStackTrace -ForegroundColor Yellow
+}
+if ($result -eq 0) {
+ Write-Host "SUCCESS" -ForegroundColor Green
+}
+exit $result
diff --git a/agents/wnx/scripts/add_hash_line.ps1 b/agents/wnx/scripts/add_hash_line.ps1
deleted file mode 100644
index 20d1b3b71ab..00000000000
--- a/agents/wnx/scripts/add_hash_line.ps1
+++ /dev/null
@@ -1,16 +0,0 @@
-# Add hash SHA-256 to a file
-# args[0] file to be hashed
-# args[1] file for output
-# format of line
-#
-# The resulting file is need for installer
-#
-# 2023 (c) Checkmk GmbH
-#
-
-$file_to_hash = $args[0]
-$out_file = $args[1]
-
-$file_to_hash_name = Get-ChildItem -Path $file_to_hash | Select-Object Name -ExpandProperty Name
-Add-Content -Path $out_file -Value ($file_to_hash_name + " ") -NoNewLine
-Get-FileHash $file_to_hash -Algorithm SHA256 | Select-Object Hash -ExpandProperty Hash | Add-Content -Path $out_file
\ No newline at end of file
diff --git a/agents/wnx/scripts/attach.ps1 b/agents/wnx/scripts/attach.ps1
deleted file mode 100644
index 01f74ea4432..00000000000
--- a/agents/wnx/scripts/attach.ps1
+++ /dev/null
@@ -1,44 +0,0 @@
-param($usbip, $addr, $port)
-function Test-Administrator {
- [OutputType([bool])]
- param()
- process {
- [Security.Principal.WindowsPrincipal]$user = [Security.Principal.WindowsIdentity]::GetCurrent();
- return $user.IsInRole([Security.Principal.WindowsBuiltinRole]::Administrator);
- }
-}
-
-if (-not(Test-Path -Path $usbip -PathType Leaf)) {
- Write-Host "$usbip doesn't exist" -ForegroundColor Red
- exit 1
-}
-
-if (-not (Test-Administrator)) {
- Write-Host "This script must be executed as Administrator." -ForegroundColor Red
- exit 1
-}
-
-&$usbip port
-if ($LastExitCode -eq 3) {
- Write-Host "No chance"
- exit 1
-}
-
-&$usbip detach -p 00
-if ($LastExitCode -eq 0) {
- Write-Host "Should not happen"
-}
-
-$sleep = 10
-for ($i = 0; $i -lt 3; $i++) {
- Write-Host wait....
- &$usbip attach -r $addr -b $port
- if ($? ) {
- Write-Host "success!"
- exit 0
- }
- Write-Host "error $LastExitCode"
- Start-Sleep $sleep
-}
-
-exit 1
\ No newline at end of file
diff --git a/agents/wnx/scripts/attach_usb_token.cmd b/agents/wnx/scripts/attach_usb_token.cmd
deleted file mode 100644
index ccd74ea6ae6..00000000000
--- a/agents/wnx/scripts/attach_usb_token.cmd
+++ /dev/null
@@ -1,22 +0,0 @@
-@echo off
-::
-:: File to attach some usb token with YK
-
-if "%1" == "" powershell Write-Host "Invalid usbip" -ForegroundColor Red && goto usage
-if "%2" == "" powershell Write-Host "Invalid server" -ForegroundColor Red && goto usage
-if "%3" == "" powershell Write-Host "Invalid port" -ForegroundColor Red && goto usage
-if "%4" == "" powershell Write-Host "Invalid control script" -ForegroundColor Red && goto usage
-
-@powershell -ExecutionPolicy ByPass -File %4 %1 %2 %3
-IF %ERRORLEVEL% NEQ 0 powershell Write-Host "Failed" -ForegroundColor Red & exit /b 1
-powershell Start-Sleep -Seconds 5
-powershell Write-Host "Attached!" -ForegroundColor Green
-exit /b 0
-
-
-:usage
-powershell Write-Host "Usage:" -ForegroundColor DarkGreen
-powershell Write-Host "attach_usb_token.cmd usb_ip_path usb_server port script" -ForegroundColor DarkGreen
-powershell Write-Host "Example:" -ForegroundColor DarkGreen
-powershell Write-Host " attach_usb_token.cmd c:\common\usb_ip\usbip.exe yubi-usbserver.lan.checkmk.net 1-1.2 scripts\attach.ps1" -ForegroundColor DarkGreen
-:exit
diff --git a/agents/wnx/scripts/call_ap_tests.cmd b/agents/wnx/scripts/call_ap_tests.cmd
deleted file mode 100644
index 173d8fe92e3..00000000000
--- a/agents/wnx/scripts/call_ap_tests.cmd
+++ /dev/null
@@ -1,30 +0,0 @@
-@echo off
-::
-:: File to run Agent Plugins Integration Tests
-::
-:: local testing code below
-::set arte=c:\Users\sk\git\check_mk\artefacts
-::set WNX_INTEGRATION_BASE_DIR=c:\Users\sk\git\check_mk\agents\wnx\build\integration
-::set DEBUG_HOME_DIR=c:\Users\sk\git\check_mk\agents\wnx\build\integration\test\data
-::pytest -v -s tests/integration/test_check_mk_run.py
-
-if "%cur_dir%" == "" powershell Write-Host "cur_dir not defined" -Foreground Red & exit /b 1
-if "%arte%" == "" powershell Write-Host "arte not defined" -Foreground Red & exit /b 1
-if "%CHECKMK_GIT_DIR%" == "" powershell Write-Host "CHECKMK_GIT_DIR not defined" -Foreground Red & exit /b 1
-
-powershell Write-Host "Windows agent Plugin Tests are starting" -Foreground Cyan
-if "%CHECKMK_GIT_DIR%" == "" (
-powershell Write-Host "Test Failed: variable CHECKMK_GIT_DIR is not set" -Foreground Red
-exit /b 1
-)
-chdir "%CHECKMK_GIT_DIR%" || ( echo "can't change dir to root" && exit /b 1 )
-set WNX_DIR=%CHECKMK_GIT_DIR%\agents\wnx
-
-py -3 -m pytest %WNX_DIR%\tests\ap\test_mk_logwatch_win.py || set failed=1
-
-if "%failed%" == "1" (
-powershell Write-Host "Test Failed" -Foreground Red
-exit /b 1
-)
-powershell Write-Host "Test Success" -Foreground Green
-exit /b 0
diff --git a/agents/wnx/scripts/call_integration_tests.cmd b/agents/wnx/scripts/call_integration_tests.cmd
deleted file mode 100644
index e142f8af545..00000000000
--- a/agents/wnx/scripts/call_integration_tests.cmd
+++ /dev/null
@@ -1,64 +0,0 @@
-@echo off
-::
-:: File to run Integration Tests in the tests/integration folder
-:: Should be called after successful build with correct artifacts
-::
-:: local testing code below
-::set arte=c:\Users\sk\git\check_mk\artefacts
-::set WNX_INTEGRATION_BASE_DIR=c:\Users\sk\git\check_mk\agents\wnx\build\integration
-::set DEBUG_HOME_DIR=c:\Users\sk\git\check_mk\agents\wnx\build\integration\test\data
-::pytest -v -s tests/integration/test_check_mk_run.py
-
-if "%cur_dir%" == "" powershell Write-Host "cur_dir not defined" -Foreground Red & exit /b 1
-if "%arte%" == "" powershell Write-Host "arte not defined" -Foreground Red & exit /b 1
-if "%CHECKMK_GIT_DIR%" == "" powershell Write-Host "CHECKMK_GIT_DIR not defined" -Foreground Red & exit /b 1
-
-set WNX_REGRESSION_BASE_DIR=
-set WNX_INTEGRATION_BASE_DIR=%temp%\test_wnx_%random%
-set DEBUG_HOME_DIR=%WNX_INTEGRATION_BASE_DIR%\test\data
-set WNX_TEST_ROOT=%WNX_INTEGRATION_BASE_DIR%\test\root
-set results=integration_tests_results.zip
-
-powershell Write-Host "Windows agent Integration Tests are starting in %WNX_INTEGRATION_BASE_DIR%" -Foreground Cyan
-
-:: Firewall processing
-echo fw rule - AllowIntegration1
-powershell Remove-NetFirewallRule -DisplayName "AllowIntegration1" 2>nul
-powershell New-NetFirewallRule -DisplayName "AllowIntegration1" -Direction Inbound -Program %WNX_TEST_ROOT%\check_mk_agent.exe -RemoteAddress LocalSubnet -Action Allow >nul
-echo fw rule - AllowIntegration2
-powershell Remove-NetFirewallRule -DisplayName "AllowIntegration2" 2>nul
-powershell New-NetFirewallRule -DisplayName "AllowIntegration2" -Direction Inbound -Program %DEBUG_HOME_DIR%\bin\cmk-agent-ctl.exe -RemoteAddress LocalSubnet -Action Allow >nul
-
-:: TEST!
-py -3 -m pytest tests\integration\%* || set failed=1
-
-:: Firewall processing again
-echo fw rules deletion...
-powershell Remove-NetFirewallRule -DisplayName "AllowIntegration1" 2>nul
-powershell Remove-NetFirewallRule -DisplayName "AllowIntegration2" 2>nul
-
-call :zip_results
-if "%failed%" == "1" (
-powershell Write-Host "Integration Test Failed" -Foreground Red
-exit /b 1
-)
-powershell Write-Host "Integration Test Success" -Foreground Green
-exit /b 0
-
-:: NOT REACHABLE
-:zip_results
-echo backing up %arte%\%results% ...
-ren %arte%\%results% %results%.sav 2>nul
-echo switch to "%WNX_INTEGRATION_BASE_DIR%"
-dir "%WNX_INTEGRATION_BASE_DIR%"
-pushd "%WNX_INTEGRATION_BASE_DIR%" && ( call :zip_and_remove & popd )
-exit /b
-
-:zip_and_remove
-echo zipping results in
-cd
-7z a -r -y -tzip %arte%\%results% >nul
-echo cleaning...
-rmdir /s/q "%WNX_INTEGRATION_BASE_DIR%" 2>nul
-rmdir "%WNX_INTEGRATION_BASE_DIR%" 2>nul
-exit /b
diff --git a/agents/wnx/scripts/call_regression_tests.cmd b/agents/wnx/scripts/call_regression_tests.cmd
deleted file mode 100644
index dd758fb16c4..00000000000
--- a/agents/wnx/scripts/call_regression_tests.cmd
+++ /dev/null
@@ -1,51 +0,0 @@
-@echo off
-:: File to run Regression Tests in the tests/regression folder
-:: should be called after successful build with correct artifacts
-
-
-if "%cur_dir%" == "" powershell Write-Host "cur_dir not defined" -Foreground Red & exit /b 1
-if "%arte%" == "" powershell Write-Host "arte not defined" -Foreground Red & exit /b 1
-if "%CHECKMK_GIT_DIR%" == "" powershell Write-Host "CHECKMK_GIT_DIR not defined" -Foreground Red & exit /b 1
-
-set results=regression_tests_results.zip
-set WNX_TEST_ROOT=%temp%\test_i_%random%
-mkdir %WNX_TEST_ROOT%
-::net stop checkmkservice
-
-powershell Write-Host "Windows agent Regression Tests are starting in %WNX_TEST_ROOT%" -Foreground Cyan
-
-:: Prepare test folder for testing
-mkdir %WNX_TEST_ROOT%\test\root\plugins
-mkdir %WNX_TEST_ROOT%\test\data
-copy %arte%\check_mk_agent.exe %WNX_TEST_ROOT%\check_mk_agent.exe >nul
-copy %arte%\check_mk.yml %WNX_TEST_ROOT%\test\root\check_mk.yml >nul
-powershell Remove-NetFirewallRule -DisplayName "AllowRegression" 2>nul
-powershell New-NetFirewallRule -DisplayName "AllowRegression" -Direction Inbound -Program %WNX_TEST_ROOT%\check_mk_agent.exe -RemoteAddress LocalSubnet -Action Allow >nul
-xcopy ..\windows\plugins\*.* %WNX_TEST_ROOT%\test\root\plugins /D /Y> nul
-:: Testing
-cd tests\regression
-:: tests wait for this env variable
-set WNX_REGRESSION_BASE_DIR=%WNX_TEST_ROOT%
-set WNX_INTEGRATION_BASE_DIR=
-py -3 -m pytest %* || set failed=1
-powershell Remove-NetFirewallRule -DisplayName "AllowRegression" >nul
-
-call :zip_results
-cd %cur_dir%
-if "%failed%" == "1" (
-powershell Write-Host "Regression Test Failed" -Foreground Red
-exit /b 81
-)
-powershell Write-Host "Regression Test Success" -Foreground Green
-exit /b 0
-
-
-:zip_results
-del %arte%\%results% 2> nul
-pushd %WNX_TEST_ROOT% && ( call :zip_and_remove & popd )
-exit /b
-
-:zip_and_remove
-7z a -r -y -tzip %arte%\%results% >nul
-rmdir /s/q "%WNX_TEST_ROOT%" 2>nul
-exit /b
diff --git a/agents/wnx/scripts/cargo_build_robotmk.cmd b/agents/wnx/scripts/cargo_build_robotmk.cmd
index 41c6c6fc81a..2b8d3c0951a 100644
--- a/agents/wnx/scripts/cargo_build_robotmk.cmd
+++ b/agents/wnx/scripts/cargo_build_robotmk.cmd
@@ -29,9 +29,9 @@ set target=i686-pc-windows-msvc
set exe_name=robotmk_ext.exe
set exe=target\%target%\release\%exe_name%
rustup toolchain list
-rustup default 1.66.0
+rustup default 1.72.0
rustup target add %target%
-rustup update 1.66.0
+rustup update 1.72.0
:: Build
powershell Write-Host "Building Rust executables" -Foreground White
diff --git a/agents/wnx/scripts/compile_msi.cmd b/agents/wnx/scripts/deprecated/compile_msi.cmd
similarity index 100%
rename from agents/wnx/scripts/compile_msi.cmd
rename to agents/wnx/scripts/deprecated/compile_msi.cmd
diff --git a/agents/wnx/scripts/link_msi.cmd b/agents/wnx/scripts/deprecated/link_msi.cmd
similarity index 100%
rename from agents/wnx/scripts/link_msi.cmd
rename to agents/wnx/scripts/deprecated/link_msi.cmd
diff --git a/agents/wnx/scripts/readme.txt b/agents/wnx/scripts/deprecated/readme.txt
similarity index 100%
rename from agents/wnx/scripts/readme.txt
rename to agents/wnx/scripts/deprecated/readme.txt
diff --git a/agents/wnx/scripts/detach_usb_token.cmd b/agents/wnx/scripts/detach_usb_token.cmd
deleted file mode 100644
index 8f6191f556f..00000000000
--- a/agents/wnx/scripts/detach_usb_token.cmd
+++ /dev/null
@@ -1,19 +0,0 @@
-@echo off
-::
-:: File to detach some usb token with YK
-
-if "%1" == "" powershell Write-Host "Invalid usbip" -ForegroundColor Red && goto usage
-
-@%1 detach -p 00
-IF %ERRORLEVEL% NEQ 0 powershell Write-Host "Failed to detach" -ForegroundColor Yellow & exit /b 0
-powershell Start-Sleep -Seconds 2
-powershell Write-Host "Detached!" -ForegroundColor Green
-exit /b 0
-
-
-:usage
-powershell Write-Host "Usage:" -ForegroundColor DarkGreen
-powershell Write-Host "detach_usb_token.cmd usb_ip_path" -ForegroundColor DarkGreen
-powershell Write-Host "Example:" -ForegroundColor DarkGreen
-powershell Write-Host " detach_usb_token.cmd c:\common\usb_ip\usbip.exe" -ForegroundColor DarkGreen
-:exit
diff --git a/agents/wnx/scripts/os_setup/wc1.cmd b/agents/wnx/scripts/os_setup/wc1.cmd
index 4ed984b33bf..0d237f740f3 100644
--- a/agents/wnx/scripts/os_setup/wc1.cmd
+++ b/agents/wnx/scripts/os_setup/wc1.cmd
@@ -13,7 +13,7 @@ powershell Set-ItemProperty -Path "HKLM:\System\CurrentControlSet\Control\FileSy
rem python, version is latest
set pkg=python
-set version=--version 3.10.2
+set version=--version 3.12.1
call :process
rem make, version is latest
diff --git a/agents/wnx/scripts/sign_code.cmd b/agents/wnx/scripts/sign_code.cmd
index 69d1dbc5a90..97f51abc41b 100644
--- a/agents/wnx/scripts/sign_code.cmd
+++ b/agents/wnx/scripts/sign_code.cmd
@@ -1,6 +1,7 @@
@echo off
::
:: File to sign code using YK token
+::
if "%1" == "" powershell Write-Host "Invalid binary to sign" -ForegroundColor Red && goto usage
@@ -9,20 +10,12 @@ set pin=469673
set cert=7b97b15df65358623576584b7aafbe04d6668a0e
copy /Y %1 %1.%ext%
c:\common\scsigntool.exe -pin %pin% sign /sha1 %cert% /tr http://timestamp.sectigo.com /td sha256 /fd sha256 %1
-
-:: Create hash
-if "%2" == "" (
-echo Hashing is not required
-) else (
-echo Saving hash of %1 into %2
-powershell -File .\scripts\add_hash_line.ps1 %1 %2
-)
exit /b 0
:usage
powershell Write-Host "Usage:" -ForegroundColor DarkGreen
-powershell Write-Host "sign_code.cmd file [hash_file]" -ForegroundColor DarkGreen
+powershell Write-Host "sign_code.cmd file" -ForegroundColor DarkGreen
powershell Write-Host "Example:" -ForegroundColor DarkGreen
powershell Write-Host " sign_code.cmd check_mk_agent.exe check_mk_agent.exe.hash" -ForegroundColor DarkGreen
:exit
diff --git a/agents/wnx/scripts/unpack_packs.cmd b/agents/wnx/scripts/unpack_packs.cmd
deleted file mode 100644
index 439c6c26544..00000000000
--- a/agents/wnx/scripts/unpack_packs.cmd
+++ /dev/null
@@ -1,12 +0,0 @@
-@echo off
-rem check and prepare output folder for smooth deploy and testing
-if not "%VS_DEPLOY%" == "YES" goto end
-if not exist %REMOTE_MACHINE% echo Error, REMOTE_MACHINE not defined && goto unpack_all
-if not exist %LOCAL_IMAGES_EXE% echo Error, LOCAL_IMAGES_EXE not defined && goto unpack_all
-if not exist %LOCAL_IMAGES_PDB% echo Error, LOCAL_IMAGES_PDB not defined && goto unpack_all
-if not exist %REMOTE_MACHINE%\bin mkdir %REMOTE_MACHINE%\bin
-if not exist %REMOTE_MACHINE%\utils mkdir %REMOTE_MACHINE%\utils
-if not exist %REMOTE_MACHINE%\plugins mkdir %REMOTE_MACHINE%\plugins
-if not exist %REMOTE_MACHINE%\local mkdir %REMOTE_MACHINE%\local
-if not exist %REMOTE_MACHINE%\watest mkdir %REMOTE_MACHINE%\watest
-:end
diff --git a/agents/wnx/sign_windows_exe.cmd b/agents/wnx/sign_windows_exe.cmd
deleted file mode 100644
index 4db265c86b7..00000000000
--- a/agents/wnx/sign_windows_exe.cmd
+++ /dev/null
@@ -1,85 +0,0 @@
-:: Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-:: This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-:: conditions defined in the file COPYING, which is part of this source code package.
-
-:: Script to sign arbitrary windows exe with provided pfx file and password
-:: Usage: sign_windows_exe.cmd pfx_file password exe_file
-::
-:: pfx file must be obtained with the help of the Certificate provider(for example Comodo)
-::
-:: To obtain certificate:
-:: 1. Buy certificate
-:: 2. Identify company and self. This is long and complicated process.
-:: 3. Request Certificate: Supply CSR - use Internet Explorer to generate it
-:: 4. After successful verification you will get a mail with a link.
-:: 5. Use Internet Explorer to get install certificate using link from p.4.
-:: 6. Export obtained certificate with private key.
-:: 7. Deliver exported certificate(pfx file) and password to the CI team.
-:: 8. Use this script to sign exe.
-:: *) Read documentation carefully and do not hesitate to ping tech support.
-::
-
-@Echo Off & Setlocal DisableDelayedExpansion
-::: do not need this
-::: mode 170,40
-
-::: { Creates variable /AE = Ascii-27 escape code.
-::: - %/AE% can be used with and without DelayedExpansion.
- For /F %%a in ('echo prompt $E ^| cmd')do set "/AE=%%a"
-::: }
-
-(Set \n=^^^
-%=Newline DNR=%
-)
-::: / Color Print Macro -
-::: Usage: %Print%{RRR;GGG;BBB}text to output
-::: \n at the end of the string echo's a new line
-::: valid range for RGB values: 0 - 255
- Set Print=For %%n in (1 2)Do If %%n==2 (%\n%
- For /F "Delims=" %%G in ("!Args!")Do (%\n%
- For /F "Tokens=1 Delims={}" %%i in ("%%G")Do Set "Output=%/AE%[0m%/AE%[38;2;%%im!Args:{%%~i}=!"%\n%
- ^< Nul set /P "=!Output:\n=!%/AE%[0m"%\n%
- If "!Output:~-2!"=="\n" (Echo/^&Endlocal)Else (Endlocal)%\n%
- )%\n%
- )Else Setlocal EnableDelayedExpansion ^& Set Args=
-::: / Erase Macro -
-::: Usage: %Erase%{string of the length to be erased}
- Set Erase=For %%n in (1 2)Do If %%n==2 (%\n%
- For /F "Tokens=1 Delims={}" %%G in ("!Args!")Do (%\n%
- Set "Nul=!Args:{%%G}=%%G!"%\n%
- For /L %%# in (0 1 100) Do (If Not "!Nul:~%%#,1!"=="" ^< Nul set /P "=%/AE%[D%/AE%[K")%\n%
- )%\n%
- Endlocal%\n%
- )Else Setlocal EnableDelayedExpansion ^& Set Args=
-
-if "%3" == "" (
-%Print%{255;0;0}Invalid parameters\n
-%Print%{0;255;0}Usage:
-%Print%{255;255;255}sign_windows_exe.cmd pfx_file password exe_file\n
-exit /b 1
-)
-
-%Print%{255;255;255}Signing %3 using key %1\n
-set ext=raw
-set loc_1="C:\Program Files (x86)\Microsoft SDKs\ClickOnce\SignTool\signtool.exe"
-set loc_2="C:\Program Files (x86)\Windows Kits\10\App Certification Kit\signtool.exe"
-if exist %loc_1% (
-copy %3 %3.%ext%
-%loc_1% sign /tr http://timestamp.digicert.com /fd sha256 /td sha256 /f %1 /p %2 %3
-) else (
-if exist %loc_2% (
-copy %3 %3.%ext%
-%loc_2% sign /tr http://timestamp.digicert.com /fd sha256 /td sha256 /f %1 /p %2 %3
-) else (
-%Print%{255;0;0}Not found signtool.exe\n
-)
-)
-:: Create hash
-if "%4" == "" (
-%Print%{255;255;255}Hashing is not required\n
-) else (
-echo Saving hash of %3 into %4
-powershell -File .\scripts\add_hash_line.ps1 %3 %4
-)
-
-exit /b 0
\ No newline at end of file
diff --git a/agents/wnx/src/check_mk_service.vcxproj b/agents/wnx/src/check_mk_service.vcxproj
index 2b2dba7df51..cad9e6aba27 100644
--- a/agents/wnx/src/check_mk_service.vcxproj
+++ b/agents/wnx/src/check_mk_service.vcxproj
@@ -23,7 +23,7 @@
{FF2EBEE8-1220-462C-9F01-1E1163F949A3}
Win32Proj
checkmkservice
- fmt-9.0.0
+ fmt-10.2.1
yaml-cpp.9a362420
10.0
@@ -147,10 +147,12 @@
- echo "prebuild"
+
+
- pause 'aaaaaa'
+
+
$(SolutionDir)\check_mk.manifest %(AdditionalManifestFiles)
@@ -185,10 +187,12 @@
- echo "prebuild"
+
+
- pause 'aaaaaa'
+
+
$(SolutionDir)\check_mk.manifest %(AdditionalManifestFiles)
@@ -222,14 +226,16 @@
../build/engine\$(Platform)\$(Configuration)\engine.lib;userenv.lib;%(AdditionalDependencies)
- echo "prebuild"
+
+
- pause 'aaaaaa'
+
+
$(SolutionDir)\check_mk.manifest %(AdditionalManifestFiles)
@@ -263,14 +269,16 @@
../build/engine\$(Platform)\$(Configuration)\engine.lib;userenv.lib;%(AdditionalDependencies)
- echo "prebuild"
+
+
- pause 'aaaaaa'
+
+
$(SolutionDir)\check_mk.manifest %(AdditionalManifestFiles)
diff --git a/agents/wnx/src/common/wtools.cpp b/agents/wnx/src/common/wtools.cpp
index ea9a7efacf6..92a72af3eda 100644
--- a/agents/wnx/src/common/wtools.cpp
+++ b/agents/wnx/src/common/wtools.cpp
@@ -8,7 +8,6 @@
#include
#include
#include
-#include
#include
#include
@@ -31,9 +30,11 @@
#pragma comment(lib, "iphlpapi.lib")
namespace fs = std::filesystem;
+namespace rs = std::ranges;
using namespace std::chrono_literals;
namespace wtools {
+
bool ChangeAccessRights(
const wchar_t *object_name, // name of object
SE_OBJECT_TYPE object_type, // type of object
@@ -140,7 +141,7 @@ int KillProcessesByDir(const fs::path &dir) noexcept {
const auto pid = entry.th32ProcessID;
const auto exe = wtools::GetProcessPath(pid);
if (exe.length() < minimum_path_len) {
- return true; // skip short path
+ return ScanAction::advance;
}
fs::path p{exe};
@@ -151,7 +152,7 @@ int KillProcessesByDir(const fs::path &dir) noexcept {
KillProcess(pid, 99);
killed_count++;
}
- return true;
+ return ScanAction::advance;
});
return killed_count;
@@ -166,7 +167,7 @@ void KillProcessesByFullPath(const fs::path &path) noexcept {
XLOG::d.i("Killing process '{}'", exe);
KillProcess(pid, 99);
}
- return true;
+ return ScanAction::advance;
});
}
@@ -185,11 +186,11 @@ void KillProcessesByPathEndAndPid(const fs::path &path_end,
uint32_t need_pid) noexcept {
ScanProcessList([&](const PROCESSENTRY32W &entry) {
if (!IsSameProcess(entry, path_end, need_pid)) {
- return true;
+ return ScanAction::advance;
}
XLOG::d.i("Killing process '{}' with pid {}", path_end, need_pid);
KillProcess(need_pid, 99);
- return false;
+ return ScanAction::terminate;
});
}
@@ -198,10 +199,10 @@ bool FindProcessByPathEndAndPid(const fs::path &path_end,
bool found{false};
ScanProcessList([&](const PROCESSENTRY32W &entry) {
if (!IsSameProcess(entry, path_end, need_pid)) {
- return true;
+ return ScanAction::advance;
}
found = true;
- return false;
+ return ScanAction::terminate;
});
return found;
@@ -616,8 +617,8 @@ void ServiceController::setServiceStatus(DWORD current_state,
: check_point++;
const auto ret = ::SetServiceStatus(status_handle_, &status_);
- XLOG::l("Setting state {} result {}", current_state,
- ret != 0 ? 0 : GetLastError());
+ XLOG::l.i("Setting service state {} result {}", current_state,
+ ret != 0 ? 0 : GetLastError());
}
void ServiceController::initStatus(bool can_stop, bool can_shutdown,
@@ -2180,8 +2181,9 @@ std::string WmiPostProcess(const std::string &in, StatusColumn status_column,
/// returns false on system failure
// based on ToolHelp api family
// normally require elevation
-// if op returns false, scan will be stopped(this is only optimization)
-bool ScanProcessList(const std::function &op) {
+// if action returns false, scan will be stopped(this is only optimization)
+bool ScanProcessList(
+ const std::function &action) {
auto *snapshot = CreateToolhelp32Snapshot(TH32CS_SNAPALL, NULL);
if (snapshot == nullptr) {
return false;
@@ -2189,16 +2191,17 @@ bool ScanProcessList(const std::function &op) {
ON_OUT_OF_SCOPE(::CloseHandle(snapshot));
- auto current_process_id = ::GetCurrentProcessId();
- // scan...
+ const auto current_process_id = ::GetCurrentProcessId();
PROCESSENTRY32 entry32 = {};
entry32.dwSize = sizeof entry32;
auto result = ::Process32First(snapshot, &entry32);
while (result != FALSE) {
- if (entry32.th32ProcessID != current_process_id && !op(entry32)) {
- return true; // break on false returned
+ if (entry32.th32ProcessID == current_process_id ||
+ action(entry32) == ScanAction::advance) {
+ result = ::Process32Next(snapshot, &entry32);
+ } else {
+ return true;
}
- result = ::Process32Next(snapshot, &entry32);
}
return true;
@@ -2210,14 +2213,13 @@ bool KillProcessFully(const std::wstring &process_name,
std::vector processes_to_kill;
std::wstring name{process_name};
cma::tools::WideLower(name);
- ScanProcessList(
- [&processes_to_kill, name](const PROCESSENTRY32 &entry) -> bool {
- std::wstring incoming_name = entry.szExeFile;
- cma::tools::WideLower(incoming_name);
- if (name == incoming_name)
- processes_to_kill.push_back(entry.th32ProcessID);
- return true;
- });
+ ScanProcessList([&processes_to_kill, name](const PROCESSENTRY32 &entry) {
+ std::wstring incoming_name = entry.szExeFile;
+ cma::tools::WideLower(incoming_name);
+ if (name == incoming_name)
+ processes_to_kill.push_back(entry.th32ProcessID);
+ return ScanAction::advance;
+ });
for (auto proc_id : processes_to_kill) {
KillProcessTree(proc_id);
@@ -2238,7 +2240,7 @@ int FindProcess(std::wstring_view process_name) noexcept {
if (name == incoming_name) {
count++;
}
- return true;
+ return ScanAction::advance;
});
return count;
@@ -2316,8 +2318,8 @@ size_t GetCommitCharge(uint32_t pid) noexcept {
::OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, FALSE, pid)};
if (!h) {
- XLOG::l("Can't open process with pid [{}], error [{}]", pid,
- ::GetLastError());
+ XLOG::t.i("Can't open process with pid [{}], error [{}]", pid,
+ ::GetLastError());
return 0;
}
@@ -2663,8 +2665,16 @@ std::wstring GenerateCmaUserNameInGroup(std::wstring_view group,
return {};
}
- return prefix.empty() ? std::wstring{}
- : std::wstring{prefix} + group.data();
+ auto group_name = std::wstring{group};
+ rs::replace(group_name, ' ', '_');
+ auto name =
+ prefix.empty() ? std::wstring{} : std::wstring{prefix} + group_name;
+ // sometimes some Windows may restrict user name length
+ if (name.size() > 20) {
+ XLOG::l("User name '{}' is too long", ToUtf8(name));
+ name = name.substr(0, 20);
+ }
+ return name;
}
std::wstring GenerateCmaUserNameInGroup(std::wstring_view group) noexcept {
@@ -2685,29 +2695,41 @@ InternalUser CreateCmaUserInGroup(const std::wstring &group_name,
auto pwd = GenerateRandomString(12);
- uc::LdapControl primary_dc;
- const auto ret = primary_dc.userDel(name);
- XLOG::t(ret == uc::Status::success ? "delete success" : "delete fail");
+ const uc::LdapControl primary_dc;
const auto add_user_status = primary_dc.userAdd(name, pwd);
- if (add_user_status != uc::Status::success) {
- XLOG::l("Can't add user '{}'", ToUtf8(name));
- return {};
+ switch (add_user_status) {
+ case uc::Status::success:
+ break;
+ case uc::Status::exists:
+ XLOG::d.i("User '{}' already exists, updating credentials",
+ ToUtf8(name));
+ if (primary_dc.changeUserPassword(name, pwd) !=
+ uc::Status::success) {
+ XLOG::l("Failed to change password for user '{}'",
+ ToUtf8(name));
+ return {};
+ }
+ return {name, pwd};
+ case uc::Status::error:
+ case uc::Status::no_domain_service:
+ case uc::Status::absent:
+ XLOG::l("Can't add user '{}' status = {}", ToUtf8(name),
+ static_cast(add_user_status));
+ return {};
}
- if (primary_dc.localGroupAddMembers(group_name, name) !=
+ if (primary_dc.localGroupAddMembers(group_name, name) ==
uc::Status::error) {
- return {name, pwd};
- }
+ XLOG::l("Can't add user '{}' to group_name '{}'", ToUtf8(name),
+ ToUtf8(group_name));
+ if (add_user_status == uc::Status::success) {
+ const auto del_ret = primary_dc.userDel(name);
+ XLOG::t("recover delete state {}", static_cast(del_ret));
+ }
- XLOG::l("Can't add user '{}' to group_name '{}'", ToUtf8(name),
- ToUtf8(group_name));
- if (add_user_status == uc::Status::success) {
- const auto primary_ret = primary_dc.userDel(name);
- XLOG::t(primary_ret == uc::Status::success ? "delete success"
- : "delete faid");
+ return {};
}
-
- return {};
+ return {name, pwd};
}
bool RemoveCmaUser(const std::wstring &user_name) noexcept {
@@ -2721,14 +2743,19 @@ void ProtectPathFromUserWrite(const fs::path &path,
// "programdata/checkmk" we must remove inherited write rights for
// Users in checkmk root data folder.
- constexpr std::wstring_view command_templates[] = {
- L"icacls \"{}\" /inheritance:d /c", // disable inheritance
- L"icacls \"{}\" /remove:g *S-1-5-32-545 /c", // remove all user rights
- L"icacls \"{}\" /grant:r *S-1-5-32-545:(OI)(CI)(RX) /c"}; // read/exec
+ // disable inheritance
+ commands.emplace_back(
+ fmt::format(L"icacls \"{}\" /inheritance:d /c", path.wstring()));
+ // remove all user rights
+ commands.emplace_back(fmt::format(
+ L"icacls \"{}\" /remove:g *S-1-5-32-545 /c", path.wstring()));
+ // read/exec
+ commands.emplace_back(
+ fmt::format(L"icacls \"{}\" /grant:r *S-1-5-32-545:(OI)(CI)(RX) /c",
+ path.wstring()));
- for (auto const t : command_templates) {
- auto cmd = fmt::format(std::wstring{t}, path.wstring());
- commands.emplace_back(cmd);
+ for (auto const t : commands) {
+ commands.emplace_back(t);
}
XLOG::l.i("Protect path from User write '{}'", path);
}
@@ -2739,53 +2766,53 @@ void ProtectFileFromUserWrite(const fs::path &path,
// folder "programdata/checkmk" we must remove inherited write rights
// for Users in checkmk root data folder.
- constexpr std::wstring_view command_templates[] = {
- L"icacls \"{}\" /inheritance:d /c", // disable inheritance
- L"icacls \"{}\" /remove:g *S-1-5-32-545 /c", // remove all user
- // rights
- L"icacls \"{}\" /grant:r *S-1-5-32-545:(RX) /c"}; // read/exec
+ // disable inheritance
+ commands.emplace_back(
+ fmt::format(L"icacls \"{}\" /inheritance:d /c", path.wstring()));
+ // remove all user rights
+ commands.emplace_back(fmt::format(
+ L"icacls \"{}\" /remove:g *S-1-5-32-545 /c", path.wstring()));
+ // read/exec
+ commands.emplace_back(fmt::format(
+ L"icacls \"{}\" /grant:r *S-1-5-32-545:(RX) /c", path.wstring()));
- for (auto const t : command_templates) {
- auto cmd = fmt::format(t.data(), path.wstring());
- commands.emplace_back(cmd);
- }
XLOG::l.i("Protect file from User write '{}'", path);
}
void ProtectPathFromUserAccess(const fs::path &entry,
std::vector &commands) {
// CONTEXT: some files must be protected from the user fully
- constexpr std::wstring_view command_templates[] = {
- L"icacls \"{}\" /inheritance:d /c", // disable inheritance
- L"icacls \"{}\" /remove:g *S-1-5-32-545 /c" // remove all user
- // rights
- };
+ // disable inheritance
+ commands.emplace_back(
+ fmt::format(L"icacls \"{}\" /inheritance:d /c", entry.wstring()));
+ // remove all user rights
+ commands.emplace_back(fmt::format(
+ L"icacls \"{}\" /remove:g *S-1-5-32-545 /c", entry.wstring()));
- for (auto const t : command_templates) {
- auto cmd = fmt::format(t.data(), entry.wstring());
- commands.emplace_back(cmd);
- }
XLOG::l.i("Protect path from User access '{}'", entry);
}
namespace {
-fs::path MakeCmdFileInTemp(std::wstring_view name,
+fs::path MakeCmdFileInTemp(std::string_view sub_dir, std::wstring_view name,
const std::vector &commands) {
try {
auto pid = ::GetCurrentProcessId();
static int counter = 0;
counter++;
+ const auto dir = MakeSafeTempFolder(sub_dir);
+ if (dir.has_value()) {
+ auto tmp_file =
+ *dir / fmt::format(L"cmk_{}_{}_{}.cmd", name, pid, counter);
+ std::ofstream ofs(tmp_file, std::ios::trunc);
+ for (const auto &c : commands) {
+ ofs << ToUtf8(c) << "\n";
+ }
- std::error_code ec;
- auto temp_folder = fs::temp_directory_path(ec);
- auto tmp_file =
- temp_folder / fmt::format(L"cmk_{}_{}_{}.cmd", name, pid, counter);
- std::ofstream ofs(tmp_file, std::ios::trunc);
- for (const auto &c : commands) {
- ofs << ToUtf8(c) << "\n";
+ return tmp_file;
+ } else {
+ XLOG::l("Can't create file");
+ return {};
}
-
- return tmp_file;
} catch (const std::exception &e) {
XLOG::l("Exception creating file '{}'", e.what());
return {};
@@ -2793,6 +2820,185 @@ fs::path MakeCmdFileInTemp(std::wstring_view name,
}
} // namespace
+class Sid {
+public:
+ enum class Type { admin, everyone };
+ Sid(const Sid &) = delete;
+
+ Sid(Sid &&rhs) {
+ sid_ = rhs.sid_;
+ type_ = rhs.type_;
+ rhs.sid_ = nullptr;
+ }
+
+ Sid &operator=(const Sid &) = delete;
+ Sid &operator=(Sid &&) = delete;
+ explicit Sid(Type type) : type_{type} {
+ XLOG::l.i("sid");
+ switch (type_) {
+ case Type::admin: {
+ SID_IDENTIFIER_AUTHORITY SIDAuthNT = SECURITY_NT_AUTHORITY;
+ AllocateAndInitializeSid(
+ &SIDAuthNT, 2, SECURITY_BUILTIN_DOMAIN_RID,
+ DOMAIN_ALIAS_RID_ADMINS, 0, 0, 0, 0, 0, 0, &sid_);
+ break;
+ }
+ case Type::everyone: {
+ SID_IDENTIFIER_AUTHORITY SIDAuthWorld =
+ SECURITY_WORLD_SID_AUTHORITY;
+ AllocateAndInitializeSid(&SIDAuthWorld, 1, SECURITY_WORLD_RID,
+ 0, 0, 0, 0, 0, 0, 0, &sid_);
+ break;
+ }
+ }
+ }
+ ~Sid() {
+ if (sid_ != nullptr) {
+ FreeSid(sid_);
+ }
+ }
+
+ PSID sid() const { return sid_; }
+ TRUSTEE_TYPE trusteeType() const {
+ switch (type_) {
+ case Type::admin:
+ return TRUSTEE_IS_GROUP;
+ case Type::everyone:
+ return TRUSTEE_IS_WELL_KNOWN_GROUP;
+ }
+ // unreachable
+ return TRUSTEE_IS_WELL_KNOWN_GROUP;
+ }
+
+private:
+ PSID sid_;
+ Type type_;
+};
+
+class Acl {
+ class Store {
+ public:
+ Store(std::vector> input) {
+ for (const auto &[type, permission] : input) {
+ auto s = Sid{type};
+ sids_.emplace_back(std::move(s));
+ eas_.emplace_back(EXPLICIT_ACCESS{
+ .grfAccessPermissions = permission,
+ .grfAccessMode = SET_ACCESS,
+ .grfInheritance = NO_INHERITANCE,
+ .Trustee = {
+ .TrusteeForm = TRUSTEE_IS_SID,
+ .TrusteeType = sids_.back().trusteeType(),
+ .ptstrName = static_cast(sids_.back().sid()),
+ }});
+ }
+ }
+ std::vector &eas() { return eas_; }
+
+ private:
+ std::vector eas_;
+ std::vector sids_;
+ };
+
+public:
+ Acl(const std::vector> &input)
+ : store_{input} {
+ std::vector &eas = store_.eas();
+ if (SetEntriesInAcl(static_cast(eas.size()), eas.data(), nullptr,
+ &acl_) != ERROR_SUCCESS) {
+ return;
+ }
+ }
+ Acl(const Acl &) = delete;
+ Acl &operator=(const Acl &) = delete;
+
+ PACL acl() const { return acl_; }
+
+ ~Acl() {
+ if (acl_ != nullptr) {
+ LocalFree(acl_);
+ }
+ }
+
+private:
+ Store store_;
+ PACL acl_{nullptr};
+};
+
+class Sd {
+public:
+ Sd(const Acl &acl) {
+ sd_ = (PSECURITY_DESCRIPTOR)LocalAlloc(LPTR,
+ SECURITY_DESCRIPTOR_MIN_LENGTH);
+ if (sd_ == nullptr) {
+ return;
+ }
+ if (!InitializeSecurityDescriptor(sd_, SECURITY_DESCRIPTOR_REVISION)) {
+ return;
+ }
+ // Add the ACL to the security descriptor.
+ if (!SetSecurityDescriptorDacl(sd_,
+ TRUE, // bDaclPresent flag
+ acl.acl(),
+ FALSE)) // not a default DACL
+ {
+ XLOG::l("Failed to set acl");
+ return;
+ }
+ }
+ Sd(const Sd &) = delete;
+ Sd &operator=(const Sd &) = delete;
+
+ PSECURITY_DESCRIPTOR sd() const { return sd_; }
+ ~Sd() {
+ if (sd_ != nullptr) {
+ LocalFree(sd_);
+ }
+ }
+
+private:
+ PSECURITY_DESCRIPTOR sd_{nullptr};
+};
+
+class SecurityAttribute {
+public:
+ explicit SecurityAttribute(
+ const std::vector> &input)
+ : acl_{input}, sd_{acl_} {
+ sa_.nLength = sizeof(SECURITY_ATTRIBUTES);
+ sa_.lpSecurityDescriptor = sd_.sd();
+ sa_.bInheritHandle = FALSE;
+ }
+
+ SECURITY_ATTRIBUTES *securityAttributes() {
+ if (sa_.lpSecurityDescriptor == nullptr) {
+ return nullptr;
+ }
+ return &sa_;
+ }
+
+private:
+ Acl acl_;
+ Sd sd_;
+ SECURITY_ATTRIBUTES sa_;
+};
+
+std::optional MakeSafeTempFolder(std::string_view sub_dir) {
+ SecurityAttribute sa{
+ {{Sid::Type::everyone, 0}, {Sid::Type::admin, GENERIC_ALL}}};
+ std::error_code ec;
+ fs::remove_all(fs::temp_directory_path(ec) / sub_dir, ec);
+ auto temp_folder = fs::temp_directory_path(ec) / sub_dir;
+ const auto ret =
+ CreateDirectoryW(temp_folder.wstring().data(), sa.securityAttributes());
+ if (!ret) {
+ XLOG::l("Failed to create temp folder '{}' {}", temp_folder,
+ GetLastError());
+ return {};
+ }
+ return temp_folder;
+}
+
fs::path ExecuteCommands(std::wstring_view name,
const std::vector &commands,
ExecuteMode mode) {
@@ -2802,7 +3008,7 @@ fs::path ExecuteCommands(std::wstring_view name,
return {};
}
- auto to_exec = MakeCmdFileInTemp(name, commands);
+ auto to_exec = MakeCmdFileInTemp(safe_temp_sub_dir, name, commands);
if (!to_exec.empty()) {
auto pid = cma::tools::RunStdCommand(to_exec.wstring(),
mode == ExecuteMode::sync
@@ -3460,6 +3666,11 @@ InternalUser InternalUsersDb::obtainUser(std::wstring_view group) {
}
void InternalUsersDb::killAll() {
+ if (cma::GetModus() == cma::Modus::service) {
+ XLOG::d.i("service doesn't delete own users");
+ return;
+ }
+
std::lock_guard lk(users_lock_);
for (const auto &iu : users_ | std::views::values) {
RemoveCmaUser(iu.first);
@@ -3497,6 +3708,122 @@ inline std::string ToUtf8(const std::wstring_view src,
return str;
}
+std::optional _to_speed(uint64_t speed) {
+ return speed == 0xFFFF'FFFF'FFFF'FFFF ? std::nullopt : std::optional{speed};
+}
+
+AdapterInfo ToAdapterInfo(const IP_ADAPTER_ADDRESSES &a) {
+ const auto address = std::accumulate(
+ std::next(std::begin(a.PhysicalAddress)), std::end(a.PhysicalAddress),
+ fmt::format("{:02X}", a.PhysicalAddress[0]),
+ [](std::string_view a, BYTE b) -> std::string {
+ return fmt::format("{}:{:02X}", a, b);
+ });
+ return AdapterInfo{
+ .guid{a.AdapterName},
+ .friendly_name{a.FriendlyName},
+ .description{a.Description},
+ .if_type{a.IfType},
+ .receive_speed{_to_speed(a.ReceiveLinkSpeed)},
+ .transmit_speed{_to_speed(a.TransmitLinkSpeed)},
+ .oper_status{a.OperStatus},
+ .mac_address{address},
+ };
+}
+
+using AdapterInfoStore = std::unordered_map;
+
+std::wstring MangleNameForPerfCounter(std::wstring_view name) noexcept {
+ std::wstring output{name};
+ for (auto &c : output) {
+ switch (c) {
+ case L'(':
+ c = L'[';
+ break;
+ case L')':
+ c = L']';
+ break;
+ case L'\\':
+ case L'/':
+ case L'#':
+ c = L'_';
+ break;
+ }
+ }
+ return output;
+}
+
+AdapterInfoStore GetAdapterInfoStore() {
+ constexpr auto max_interfaces = 500;
+ const auto buffer =
+ std::make_unique(max_interfaces);
+
+ AdapterInfoStore store;
+ ULONG length = max_interfaces * sizeof(IP_ADAPTER_ADDRESSES);
+
+ if (const auto error =
+ GetAdaptersAddresses(AF_UNSPEC, GAA_FLAG_INCLUDE_ALL_INTERFACES,
+ nullptr, buffer.get(), &length);
+ error != ERROR_SUCCESS) {
+ return store;
+ }
+
+ const auto head = buffer.get();
+ for (auto cur = head; cur; cur = cur->Next) {
+ store[MangleNameForPerfCounter(cur->Description)] = ToAdapterInfo(*cur);
+ }
+ return store;
+}
+
+namespace {
+/// return string vector with Name and Version
+/// on error empty vector
+std::vector GetOsRawInfo() {
+ wtools::WmiWrapper wmi;
+ wmi.open();
+ wmi.connect(L"ROOT\\CIMV2");
+ if (!wmi.impersonate()) {
+ XLOG::l("Failed to impersonate");
+ }
+ auto [result, status] = wmi.queryTable({L"Name", L"Version"},
+ L"Win32_OperatingSystem", L"\t", 5);
+ if (status != WmiStatus::ok) {
+ XLOG::l("Failed to query Win32_OperatingSystem");
+ return {};
+ }
+ const auto rows = cma::tools::SplitString(result, L"\n");
+ if (rows.size() != 2) {
+ XLOG::l("Query Win32_OperatingSystem returns bad data {}",
+ wtools::ToUtf8(result));
+ return {};
+ }
+ auto values = cma::tools::SplitString(rows[1], L"\t");
+ if (values.size() != 2) {
+ XLOG::l("Query Win32_OperatingSystem returns bad data {}",
+ wtools::ToUtf8(result));
+ return {};
+ }
+ const auto name_and_dirs = cma::tools::SplitString(values[0], L"|");
+
+ // contains smth like:
+ // Microsoft Windows 10 Pro|C:\Windows|\Device\Harddisk0\Partition3
+ values[0] = name_and_dirs[0];
+
+ return values;
+}
+} // namespace
+
+std::optional GetOsInfo() {
+ static auto os_info = GetOsRawInfo();
+ if (os_info.empty()) {
+ os_info = GetOsRawInfo();
+ }
+ if (os_info.empty()) {
+ return {};
+ }
+ return OsInfo{.name = os_info[0], .version = os_info[1]};
+}
+
} // namespace wtools
// verified code from the legacy client
diff --git a/agents/wnx/src/engine/cma_core.cpp b/agents/wnx/src/engine/cma_core.cpp
index 3848b8dc4cc..3327b4b12a7 100644
--- a/agents/wnx/src/engine/cma_core.cpp
+++ b/agents/wnx/src/engine/cma_core.cpp
@@ -1572,20 +1572,20 @@ void PickupAsync0data(int timeout, PluginMap &plugins, std::vector &out,
std::pair, int> RunAsyncPlugins(PluginMap &plugins,
bool start_immediately) {
- std::vector out;
+ std::vector result;
int count = 0;
for (auto &plugin : plugins | std::views::values) {
if (!plugin.async() || !provider::config::IsRunAsync(plugin)) {
continue;
}
- auto ret = plugin.getResultsAsync(start_immediately);
- if (!ret.empty()) {
+ auto data = plugin.getResultsAsync(start_immediately);
+ if (!data.empty()) {
++count;
}
- tools::AddVector(out, ret);
+ tools::AddVector(result, data);
}
- return {out, count};
+ return {result, count};
}
} // namespace cma
@@ -1597,7 +1597,7 @@ std::wstring LocatePs1Proxy() {
return L"";
}
- auto path_to_configure_and_exec =
+ const auto path_to_configure_and_exec =
fs::path{cfg::GetRootInstallDir()} / cfg::files::kConfigureAndExecPs1;
std::error_code ec;
return fs::exists(path_to_configure_and_exec, ec)
@@ -1605,16 +1605,15 @@ std::wstring LocatePs1Proxy() {
: L"";
}
-std::wstring MakePowershellWrapper() noexcept {
+std::wstring MakePowershellWrapper(const fs::path &script) noexcept {
try {
- auto powershell_exe = FindPowershellExe();
+ const auto powershell_exe = FindPowershellExe();
auto proxy = LocatePs1Proxy();
return powershell_exe +
fmt::format(
- L" -NoLogo -NoProfile -ExecutionPolicy Bypass -File{}",
- proxy) +
- L" \"{}\"";
+ L" -NoLogo -NoProfile -ExecutionPolicy Bypass -File{} \"{}\"",
+ proxy, script.wstring());
} catch (const std::exception &e) {
XLOG::l("Exception when finding powershell e:{}", e);
return L"";
diff --git a/agents/wnx/src/engine/engine.vcxproj b/agents/wnx/src/engine/engine.vcxproj
index 6d9a7f558ca..2e6b1c4620d 100644
--- a/agents/wnx/src/engine/engine.vcxproj
+++ b/agents/wnx/src/engine/engine.vcxproj
@@ -24,7 +24,7 @@
Win32Proj
engine
asio-1.24.0-patched
- fmt-9.0.0
+ fmt-10.2.1
simpleini-2af65fc
yaml-cpp.9a362420
10.0
@@ -155,9 +155,8 @@
true
- cd ..\..
-scripts\unpack_packs.cmd
-cd $(ProjectDir)
+
+
@@ -190,9 +189,8 @@ cd $(ProjectDir)
true
- cd ..\..
-scripts\unpack_packs.cmd
-cd $(ProjectDir)
+
+
@@ -605,7 +603,6 @@ cd $(ProjectDir)
-
diff --git a/agents/wnx/src/engine/engine.vcxproj.filters b/agents/wnx/src/engine/engine.vcxproj.filters
index 9241129d795..b6a0b2f9d06 100644
--- a/agents/wnx/src/engine/engine.vcxproj.filters
+++ b/agents/wnx/src/engine/engine.vcxproj.filters
@@ -532,9 +532,6 @@
-
- Source Files
-
data
diff --git a/agents/wnx/src/engine/modules.cpp b/agents/wnx/src/engine/modules.cpp
index 322707879dc..272505814dc 100644
--- a/agents/wnx/src/engine/modules.cpp
+++ b/agents/wnx/src/engine/modules.cpp
@@ -186,8 +186,9 @@ std::wstring Module::buildCommandLineForced(
if (bin().empty()) {
return {};
}
- auto actual_dir = fs::path{GetUserDir()} / dir();
- return fmt::format((actual_dir / exec()).wstring(), script.wstring());
+ const auto actual_dir = fs::path{GetUserDir()} / dir();
+ return fmt::format(fmt::runtime((actual_dir / exec()).wstring()),
+ script.wstring());
} catch (const std::exception &e) {
XLOG::d("can't build valid command line for '{}', exception is '{}'",
name(), e);
diff --git a/agents/wnx/src/engine/providers/agent_plugins.cpp b/agents/wnx/src/engine/providers/agent_plugins.cpp
index cfd28d1adcd..8d55bf80f0c 100644
--- a/agents/wnx/src/engine/providers/agent_plugins.cpp
+++ b/agents/wnx/src/engine/providers/agent_plugins.cpp
@@ -20,27 +20,36 @@ namespace cma::provider {
namespace {
enum class FileType { ps1, cmd, vbs, py, other };
-size_t GetLength(std::ifstream &ifs) {
- ifs.seekg(0, std::ios_base::end);
- const auto length = ifs.tellg();
- ifs.seekg(0, std::ios_base::beg);
- return static_cast(length);
+std::optional GetLength(std::ifstream &ifs) {
+ try {
+ ifs.seekg(0, std::ios_base::end);
+ const auto length = ifs.tellg();
+ ifs.seekg(0, std::ios_base::beg);
+ return {static_cast(length)};
+ } catch (const std::ios_base::failure &e) {
+ XLOG::d("Can't get length exception '{}'", e.what());
+ return {};
+ }
}
std::string ReadFileToString(const fs::path &file) {
std::string ret;
std::ifstream ifs(file, std::ifstream::in);
- if (ifs) {
- const auto length = GetLength(ifs);
- ret.resize(length);
- ifs.read(ret.data(), static_cast(length));
- if (ifs.good() || ifs.eof()) {
- return ret;
- }
- XLOG::d("Can't read '{}'", file.u8string());
- } else {
+ if (!ifs) {
XLOG::d("Can't open '{}'", file.u8string());
+ return {};
+ }
+ const auto length = GetLength(ifs);
+ if (!length.has_value()) {
+ return {};
+ }
+
+ ret.resize(*length);
+ ifs.read(ret.data(), static_cast(*length));
+ if (ifs.good() || ifs.eof()) {
+ return ret;
}
+ XLOG::d("Can't read '{}'", file.u8string());
return {};
}
@@ -63,8 +72,8 @@ std::string Marker(FileType file_type) {
std::string FindVersionInfo(const fs::path &file, FileType file_type) {
try {
- std::string ret = ReadFileToString(file);
- auto marker = Marker(file_type);
+ const std::string ret = ReadFileToString(file);
+ const auto marker = Marker(file_type);
if (marker.empty()) {
XLOG::t("This file type '{}' is not supported", file);
return {};
diff --git a/agents/wnx/src/engine/providers/check_mk.cpp b/agents/wnx/src/engine/providers/check_mk.cpp
index 51de6d3dff8..3143937fa41 100644
--- a/agents/wnx/src/engine/providers/check_mk.cpp
+++ b/agents/wnx/src/engine/providers/check_mk.cpp
@@ -20,6 +20,15 @@ using namespace std::string_literals;
namespace cma::provider {
+std::string GetTimezoneOffset() {
+ const auto tm =
+ std::chrono::system_clock::to_time_t(std::chrono::system_clock::now());
+ const auto ret = std::put_time(std::localtime(&tm), "%z");
+ std::stringstream sss;
+ sss << ret;
+ return sss.str();
+}
+
// function to provide format compatibility for monitoring site
// probably, a bit to pedantic
std::string AddressToCheckMkString(std::string_view entry) {
@@ -63,16 +72,28 @@ std::string CheckMk::makeOnlyFrom() {
namespace {
std::string MakeInfo() {
- const std::pair infos[] = {
- {"Version", CHECK_MK_VERSION},
- {"BuildDate", __DATE__},
- {"AgentOS", "windows"},
- {"Hostname", cfg::GetHostName()},
- {"Architecture", tgt::Is64bit() ? "64bit" : "32bit"},
+ const auto os = wtools::GetOsInfo();
+ const std::pair> infos[] = {
+ {"Version", {CHECK_MK_VERSION}},
+ {"BuildDate", {__DATE__}},
+ {"AgentOS", {"windows"}},
+ {"Hostname", {cfg::GetHostName()}},
+ {"Architecture", {tgt::Is64bit() ? "64bit" : "32bit"}},
+ {"OSName", os.has_value() ? std::optional{wtools::ToUtf8(os->name)}
+ : std::nullopt},
+ {"OSVersion", os.has_value()
+ ? std::optional{wtools::ToUtf8(os->version)}
+ : std::nullopt},
+ {"OSType", {"windows"}},
+ {"Time", {PrintIsoTime(std::chrono::system_clock::now())}},
};
std::string out;
for (const auto &info : infos) {
- out += fmt::format("{}: {}\n", info.first, info.second);
+ if (info.second.has_value()) {
+ out += fmt::format("{}: {}\n", info.first, info.second.value());
+ } else {
+ XLOG::l("Info '{}' is empty", info.first);
+ }
}
return out;
@@ -99,8 +120,23 @@ std::string MakeDirs() {
return out;
}
+std::tm ToLocalTime(std::chrono::time_point now) {
+ const std::time_t now_c = std::chrono::system_clock::to_time_t(now);
+ std::tm local_time;
+ auto _ = localtime_s(&now_c, &local_time);
+ return local_time;
+}
+
} // namespace
+std::string PrintIsoTime(
+ std::chrono::time_point now) {
+ auto lt = ToLocalTime(now);
+ return fmt::format("{:4}-{:02}-{:02}T{:02}:{:02}:{:02}{}",
+ lt.tm_year + 1900, lt.tm_mon + 1, lt.tm_mday, lt.tm_hour,
+ lt.tm_min, lt.tm_sec, GetTimezoneOffset());
+}
+
std::string CheckMk::makeBody() {
auto out = MakeInfo();
out += MakeDirs();
diff --git a/agents/wnx/src/engine/providers/p_perf_counters.cpp b/agents/wnx/src/engine/providers/p_perf_counters.cpp
index 7fa0ef7033a..907ffd7ccbc 100644
--- a/agents/wnx/src/engine/providers/p_perf_counters.cpp
+++ b/agents/wnx/src/engine/providers/p_perf_counters.cpp
@@ -7,6 +7,7 @@
#include "providers/p_perf_counters.h"
+#include
#include
#include
@@ -15,6 +16,7 @@
#include "wnx/logger.h"
namespace rs = std::ranges;
+using namespace std::string_view_literals;
namespace cma::provider {
@@ -172,6 +174,53 @@ std::string MakeWinPerfNakedList(const PERF_OBJECT_TYPE *perf_object,
} // namespace details
+namespace {
+std::optional FindAdapterInfo(
+ const wtools::AdapterInfoStore &store, const std::wstring &name) {
+ if (const auto search = store.find(name); search != store.end()) {
+ return {search->second};
+ }
+
+ XLOG::t("IF {} not found", wtools::ToUtf8(name));
+ for (auto &&adapter_info : store | rs::views::values) {
+ if (name == adapter_info.friendly_name) {
+ XLOG::t("IF {} found by friendly name", wtools::ToUtf8(name));
+ return {adapter_info};
+ }
+ }
+
+ return {};
+}
+} // namespace
+
+template
+std::string AddRow(const std::vector &names,
+ const wtools::AdapterInfoStore &store,
+ const std::wstring_view counter_name,
+ std::function get_value,
+ T default_value) {
+ std::vector values;
+ values.reserve(names.size() + 2);
+ values.emplace_back(counter_name);
+ for (auto &&name : names) {
+ const auto adapter_info = FindAdapterInfo(store, name);
+ if (adapter_info.has_value()) {
+ values.emplace_back(wtools::ConvertToUtf16(
+ fmt::format("{}", get_value(*adapter_info))));
+ } else {
+ const auto default_value_as_string =
+ wtools::ConvertToUtf16(fmt::format("{}", default_value));
+ values.emplace_back(default_value_as_string);
+ }
+ }
+ values.emplace_back(winperf::if_state_pseudo_counter_type);
+ return wtools::ToUtf8(
+ std::accumulate(std::next(values.begin()), values.end(), values[0],
+ [](const std::wstring &a, const std::wstring &b) {
+ return a + L' ' + b;
+ }));
+}
+
// builds a section
// empty string on error
// Also this is good example how to use our Perf API
@@ -203,6 +252,29 @@ std::string BuildWinPerfSection(std::wstring_view prefix,
// naked list
accu += details::MakeWinPerfNakedList(object, key_index);
+ if (name == winperf::if_section_name) {
+ const auto store = wtools::GetAdapterInfoStore();
+ if (store.empty()) {
+ XLOG::d("No adapters found");
+ }
+ const auto names = wtools::perf::GenerateInstanceNames(object);
+
+ accu += AddRow(
+ names, store, winperf::if_state_pseudo_counter,
+ [](const wtools::AdapterInfo &info) -> int {
+ return static_cast(info.oper_status);
+ },
+ static_cast(IF_OPER_STATUS::IfOperStatusUp)) +
+ '\n';
+ accu += AddRow(
+ names, store, winperf::if_mac_pseudo_counter,
+ [](const wtools::AdapterInfo &info) -> std::string {
+ return info.mac_address;
+ },
+ "0") +
+ '\n';
+ }
+
return accu;
}
diff --git a/agents/wnx/src/engine/service_processor.cpp b/agents/wnx/src/engine/service_processor.cpp
index c4b1df9d301..b6d244dd820 100644
--- a/agents/wnx/src/engine/service_processor.cpp
+++ b/agents/wnx/src/engine/service_processor.cpp
@@ -209,39 +209,35 @@ void ServiceProcessor::stopTestingMainThread() {
thread_.join();
}
-namespace {
-std::string FindWinPerfExe() {
- auto exe_name = cfg::groups::g_winperf.exe();
+std::string FindWinPerfExe(std::string_view exe_name) {
+ if (!tools::IsEqual(exe_name, "agent")) {
+ XLOG::d.i("Looking for agent '{}'", exe_name);
+ return std::string{exe_name};
+ }
- if (tools::IsEqual(exe_name, "agent")) {
- XLOG::t.i("Looking for default agent");
- const fs::path f{cfg::GetRootDir()};
- std::vector names{f / cfg::kDefaultAppFileName};
+ XLOG::t.i("Looking for default agent");
+ const fs::path f{cfg::GetRootDir()};
+ std::vector names{f / cfg::kDefaultAppFileName};
- if constexpr (tgt::Is64bit()) {
- names.emplace_back(f / "check_mk_service64.exe");
- }
+ if constexpr (tgt::Is64bit()) {
+ names.emplace_back(f / "check_mk_service64.exe");
+ }
- names.emplace_back(f / "check_mk_service32.exe");
+ names.emplace_back(f / "check_mk_service32.exe");
- exe_name.clear();
- for (const auto &name : names) {
- std::error_code ec;
- if (fs::exists(name, ec)) {
- XLOG::d.i("Using file '{}' for winperf", name);
- break;
- }
+ for (const auto &name : names) {
+ std::error_code ec;
+ if (fs::exists(name, ec)) {
+ XLOG::d.i("Using file '{}' for winperf", name);
+ return name.string();
}
- if (exe_name.empty()) {
- XLOG::l.crit("In folder '{}' not found binaries to exec winperf");
- return {};
- }
- } else {
- XLOG::d.i("Looking for agent '{}'", exe_name);
}
- return exe_name;
+
+ XLOG::l.crit("In folder '{}' not found binaries to exec winperf");
+ return {};
}
+namespace {
std::wstring GetWinPerfLogFile() {
return cfg::groups::g_winperf.isTrace()
? (fs::path{cfg::GetLogDir()} / "winperf.log").wstring()
@@ -257,7 +253,8 @@ void ServiceProcessor::kickWinPerf(AnswerId answer_id,
cmd_line = L"ip:" + wtools::ConvertToUtf16(ip_addr) + L" " + cmd_line;
}
- auto exe_name = wtools::ConvertToUtf16(FindWinPerfExe());
+ auto exe_name =
+ wtools::ConvertToUtf16(FindWinPerfExe(cfg::groups::g_winperf.exe()));
const auto timeout = cfg::groups::g_winperf.timeout();
auto prefix = wtools::ConvertToUtf16(cfg::groups::g_winperf.prefix());
@@ -549,9 +546,9 @@ bool FindProcessByPid(uint32_t pid) {
wtools::ScanProcessList([&found, pid](const PROCESSENTRY32 &entry) {
if (entry.th32ProcessID == pid) {
found = true;
- return false;
+ return wtools::ScanAction::terminate;
}
- return true;
+ return wtools::ScanAction::advance;
});
return found;
}
@@ -702,6 +699,24 @@ world::ExternalPort::IoParam AsIoParam(
: std::optional{},
};
}
+
+void PrepareTempFolder() {
+ try {
+ const auto path = wtools::MakeSafeTempFolder(wtools::safe_temp_sub_dir);
+ if (path.has_value()) {
+ for (const auto &entry :
+ std::filesystem::directory_iterator(*path)) {
+ fs::remove_all(entry.path());
+ }
+ XLOG::l.i("Temp folder: {}", path);
+ } else {
+ XLOG::l("Failed to create temp folder");
+ }
+
+ } catch (const std::exception &e) {
+ XLOG::l("Failed to create temp folder: {}", e.what());
+ }
+}
} // namespace
///
@@ -740,6 +755,7 @@ void ServiceProcessor::mainThread(world::ExternalPort *ex_port,
if (is_service) {
mc_.InstallDefault(cfg::modules::InstallMode::normal);
install::ClearPostInstallFlag();
+ PrepareTempFolder();
} else {
mc_.LoadDefault();
}
diff --git a/agents/wnx/src/engine/upgrade.cpp b/agents/wnx/src/engine/upgrade.cpp
index 1e201cc46c7..12570d57d89 100644
--- a/agents/wnx/src/engine/upgrade.cpp
+++ b/agents/wnx/src/engine/upgrade.cpp
@@ -625,7 +625,7 @@ bool FindStopDeactivateLegacyAgent() {
}
auto path = FindLegacyAgent();
if (path.empty()) {
- XLOG::l.t("There is no legacy Check Mk agent installed");
+ XLOG::l.t("There is no legacy Checkmk agent installed");
return true;
}
@@ -702,7 +702,7 @@ bool FindActivateStartLegacyAgent(AddAction action) {
auto path = FindLegacyAgent();
if (path.empty()) {
- XLOG::l.t("There is no legacy Check Mk agent installed");
+ XLOG::l.t("There is no legacy Checkmk agent installed");
return true;
}
diff --git a/agents/wnx/src/engine/windows_service_api.cpp b/agents/wnx/src/engine/windows_service_api.cpp
index 1c3e1ccce52..e1fcedbb566 100644
--- a/agents/wnx/src/engine/windows_service_api.cpp
+++ b/agents/wnx/src/engine/windows_service_api.cpp
@@ -638,13 +638,13 @@ void ReportNoPythonModule(const std::vector ¶ms) {
int ExecCmkUpdateAgent(const std::vector ¶ms) {
ModifyStdio(true);
- fs::path plugins_dir{cma::cfg::GetUserPluginsDir()};
+ fs::path plugins_dir{cfg::GetUserPluginsDir()};
if (!fs::exists(plugins_dir)) {
ReportNoPluginDir(plugins_dir);
return 1;
}
- auto updater_file = plugins_dir / cma::cfg::files::kAgentUpdaterPython;
+ auto updater_file = plugins_dir / cfg::files::kAgentUpdaterPython;
if (!fs::exists(updater_file)) {
ReportNoUpdaterFile(updater_file, params);
return 1;
@@ -652,7 +652,7 @@ int ExecCmkUpdateAgent(const std::vector ¶ms) {
XLOG::d.i("'{}' will be used for updater", updater_file);
- cma::cfg::modules::ModuleCommander mc;
+ cfg::modules::ModuleCommander mc;
mc.LoadDefault();
auto command_to_run = mc.buildCommandLine(wtools::ToStr(updater_file));
if (command_to_run.empty()) {
@@ -662,14 +662,13 @@ int ExecCmkUpdateAgent(const std::vector ¶ms) {
for (auto &p : params) command_to_run += L" " + p;
- cma::cfg::SetupPluginEnvironment();
+ cfg::SetupPluginEnvironment();
ModifyStdio(false);
- auto proc_id =
- cma::tools::RunStdCommand(command_to_run, tools::WaitForEnd::yes);
+ auto proc_id = tools::RunStdCommand(command_to_run, tools::WaitForEnd::yes);
ModifyStdio(true);
- if (proc_id > 0) {
+ if (proc_id.has_value() && *proc_id > 0) {
XLOG::l.i("Agent Updater process [{}] started\n", proc_id);
return 0;
}
@@ -853,8 +852,8 @@ int ExecSkypeTest() {
XLOG::setup::ColoredOutputOnStdio(true);
ON_OUT_OF_SCOPE(XLOG::setup::DuplicateOnStdio(false););
XLOG::l.i("<<>>");
- cma::provider::SkypeProvider skype;
- auto result = skype.generateContent(cma::section::kUseEmbeddedName, true);
+ provider::SkypeProvider skype;
+ auto result = skype.generateContent(section::kUseEmbeddedName, true);
XLOG::l.i("*******************************************************");
if (!result.empty())
XLOG::l.i("{}", result);
@@ -938,14 +937,13 @@ class UdpServer {
}
// decoding
- auto [success, len] = crypt_.decode(
- data_ + cma::rt::kDataOffset, length - cma::rt::kDataOffset, true);
+ auto [success, len] = crypt_.decode(data_ + rt::kDataOffset,
+ length - rt::kDataOffset, true);
// printing
if (success) {
- data_[cma::rt::kDataOffset + len] = 0;
- XLOG::l.t("{}",
- std::string_view(data_ + cma::rt::kDataOffset, length));
+ data_[rt::kDataOffset + len] = 0;
+ XLOG::l.t("{}", std::string_view(data_ + rt::kDataOffset, length));
} else {
XLOG::l("Failed to decrypt data");
}
@@ -956,7 +954,7 @@ class UdpServer {
}
const std::string password_{kRtTestPassword};
- cma::encrypt::Commander crypt_{password_};
+ encrypt::Commander crypt_{password_};
asio::ip::udp::socket socket_;
asio::ip::udp::endpoint sender_endpoint_;
@@ -992,10 +990,10 @@ int ExecRealtimeTest(bool print) {
xlog::sendStringToStdio(
"Press any key to START testing Realtime Sections\n",
xlog::internal::Colors::green);
- cma::tools::GetKeyPress(); // blocking wait for key press
+ tools::GetKeyPress(); // blocking wait for key press
dev.connectFrom("127.0.0.1", kRtTestPort,
{"mem", "df", "winperf_processor"}, kRtTestPassword, 30);
- cma::tools::GetKeyPress(); // blocking wait for key press
+ tools::GetKeyPress(); // blocking wait for key press
dev.stop();
context.stop();
@@ -1122,7 +1120,7 @@ wtools::WinService::ErrorMode GetServiceErrorModeFromCfg(
// called once on start of the service
// also on reload of the config
bool ProcessServiceConfiguration(std::wstring_view service_name) {
- using namespace cma::cfg;
+ using namespace cfg;
wtools::WinService ws(service_name);
@@ -1177,9 +1175,9 @@ int ServiceAsService(std::wstring_view /*app_name*/,
return 0;
}
- cma::OnStartApp();
+ OnStartApp();
XLOG::l.i("service to run");
- ON_OUT_OF_SCOPE(cma::OnExit());
+ ON_OUT_OF_SCOPE(OnExit());
SelfConfigure();
@@ -1329,11 +1327,11 @@ SC_HANDLE SelfOpen() {
}
ON_OUT_OF_SCOPE(::CloseServiceHandle(manager_handle));
- auto *handle = ::OpenService(manager_handle, cma::srv::kServiceName,
- SERVICE_ALL_ACCESS);
+ auto *handle =
+ ::OpenService(manager_handle, srv::kServiceName, SERVICE_ALL_ACCESS);
if (handle == nullptr) {
XLOG::l.crit("Cannot open Service {}, error = {}",
- wtools::ToUtf8(cma::srv::kServiceName), ::GetLastError());
+ wtools::ToUtf8(srv::kServiceName), ::GetLastError());
}
return handle;
@@ -1343,7 +1341,7 @@ void SelfConfigure() {
auto *handle = SelfOpen();
ON_OUT_OF_SCOPE(::CloseServiceHandle(handle));
if (!IsServiceConfigured(handle)) {
- XLOG::l.i("Configure check mk service");
+ XLOG::l.i("Configure Checkmk service");
ConfigureServiceAsRestartable(handle);
}
}
diff --git a/agents/wnx/src/main/pch.h b/agents/wnx/src/main/pch.h
index 27ee00876bf..20758e15ce1 100644
--- a/agents/wnx/src/main/pch.h
+++ b/agents/wnx/src/main/pch.h
@@ -13,7 +13,8 @@
#define _CRT_SECURE_NO_WARNINGS // NOLINT
-#define _SILENCE_CXX17_STRSTREAM_DEPRECATION_WARNING // NOLINT
+#define _SILENCE_CXX17_STRSTREAM_DEPRECATION_WARNING // NOLINT
+#define _SILENCE_STDEXT_ARR_ITERS_DEPRECATION_WARNING // std format 9.0
#define NOMINMAX
#define WIN32_LEAN_AND_MEAN
diff --git a/agents/wnx/test_files/unit_test/agent_msi.failed.python.log b/agents/wnx/test_files/unit_test/agent_msi.failed.python.log
index 82259650683..0bb58f28690 100644
Binary files a/agents/wnx/test_files/unit_test/agent_msi.failed.python.log and b/agents/wnx/test_files/unit_test/agent_msi.failed.python.log differ
diff --git a/agents/wnx/tests/ap/test_mk_logwatch_win.py b/agents/wnx/tests/ap/test_mk_logwatch_win.py
index 8f811ef10b3..b7a37b2e430 100644
--- a/agents/wnx/tests/ap/test_mk_logwatch_win.py
+++ b/agents/wnx/tests/ap/test_mk_logwatch_win.py
@@ -487,7 +487,7 @@ def test_log_lines_iter_encoding(monkeypatch:pytest.MonkeyPatch, buff:bytes, enc
def test_log_lines_iter() -> None:
txt_file = lw.__file__.rstrip('c')
- with lw.LogLinesIter(txt_file, "utf-8" if os.name == "nt" else None) as log_iter:
+ with lw.LogLinesIter(txt_file, None) as log_iter:
log_iter.set_position(122)
assert log_iter.get_position() == 122
@@ -616,7 +616,7 @@ def isatty(self):
},
[
"[[[%s]]]\n" % __file__,
- "W ('W', re.compile(u'^[^u]*W.*I m\xe4tch \xf6nly mys\xe9lf \U0001f9da', re.UNICODE), [], []),\n"
+ "W ('W', re.compile('^[^u]*W.*I m\xe4tch \xf6nly mys\xe9lf \U0001f9da', re.UNICODE), [], []),\n"
],
),
(
@@ -657,7 +657,7 @@ def isatty(self):
},
[
"[[[%s]]]\n" % __file__,
- "C ('C', re.compile(u'\U0001f409', re.UNICODE), [], []),\n",
+ "C ('C', re.compile('\U0001f409', re.UNICODE), [], []),\n",
],
),
('locked door', [], {}, {}, ["[[[locked door:cannotopen]]]\n"]),
diff --git a/agents/wnx/tests/files/scripts/combine_utf.py b/agents/wnx/tests/files/scripts/combine_utf.py
index 2ebd6ad0196..85294deb45a 100644
--- a/agents/wnx/tests/files/scripts/combine_utf.py
+++ b/agents/wnx/tests/files/scripts/combine_utf.py
@@ -1,3 +1,8 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
import argparse
from typing import Final
diff --git a/agents/wnx/tests/integration/test_check_mk_run.py b/agents/wnx/tests/integration/test_check_mk_run.py
index 4352afa4152..17d64fb9038 100644
--- a/agents/wnx/tests/integration/test_check_mk_run.py
+++ b/agents/wnx/tests/integration/test_check_mk_run.py
@@ -79,7 +79,7 @@ def test_check_mk_base(
assert sections.count("<<<>>>") == 2
assert _INTERNAL_SECTIONS.issubset(
set(sections)
- ), f"Missing sections: {_INTERNAL_SECTIONS.difference((set(sections)))}"
+ ), f"Missing sections: {_INTERNAL_SECTIONS.difference(set(sections))}"
assert sections[-1] == "<<>>"
assert len(sections) == SECTION_COUNT
diff --git a/agents/wnx/tests/integration/test_python_module.py b/agents/wnx/tests/integration/test_python_module.py
index f03191503da..39124576da5 100644
--- a/agents/wnx/tests/integration/test_python_module.py
+++ b/agents/wnx/tests/integration/test_python_module.py
@@ -53,7 +53,7 @@ def test_python_module(
assert output.ret_code == 1
assert output.stdout.startswith("\r\n\tYou must install Agent Updater Python plugin")
copy_cmk_updater(
- git_dir / "enterprise" / "agents" / "plugins",
+ git_dir / "non-free" / "cmk-update-agent",
data_dir / "plugins",
)
output = run_agent(
diff --git a/agents/wnx/tests/integration/utils.py b/agents/wnx/tests/integration/utils.py
index 408ad7af7a0..ff6a718ad9f 100644
--- a/agents/wnx/tests/integration/utils.py
+++ b/agents/wnx/tests/integration/utils.py
@@ -14,17 +14,42 @@
from pathlib import Path
from typing import Any, Final, NamedTuple
-import telnetlib3 # type: ignore[import]
+import telnetlib3 # type: ignore[import-untyped]
import yaml
+# check_mk section, example of output
+# <<>>
+# Version: 2.3.0b1
+# BuildDate: Jan 5 2024
+# AgentOS: windows
+# OSName: Microsoft Windows 10 Pro
+# OSVersion: 10.0.19045
+# OSType: windows
+# Hostname: klapp-9999
+# Architecture: 64bit
+# Time: 2024-01-05T14:47:46+0100
+# WorkingDirectory: C:\Program Files (x86)\checkmk\service
+# ConfigFile: C:\Program Files (x86)\checkmk\service\check_mk.yml
+# LocalConfigFile: C:\ProgramData\checkmk\agent\check_mk.user.yml
+# AgentDirectory: C:\Program Files (x86)\checkmk\service
+# PluginsDirectory: C:\ProgramData\checkmk\agent\plugins
+# StateDirectory: C:\ProgramData\checkmk\agent\state
+# ConfigDirectory: C:\ProgramData\checkmk\agent\config
+# TempDirectory: C:\ProgramData\checkmk\agent\tmp
+# LogDirectory: C:\ProgramData\checkmk\agent\log
+# SpoolDirectory: C:\ProgramData\checkmk\agent\spool
+# LocalDirectory: C:\ProgramData\checkmk\agent\local
+# OnlyFrom:
+
+
YamlDict = dict[str, dict[str, Any]]
INTEGRATION_PORT: Final = 25998
AGENT_EXE_NAME: Final = "check_mk_agent.exe"
_HOST: Final = "localhost"
USER_YAML_CONFIG: Final = "check_mk.user.yml"
SECTION_COUNT: Final = 18
-ONLY_FROM_LINE: Final = 17
-CTL_STATUS_LINE: Final = 19
+ONLY_FROM_LINE: Final = 21
+CTL_STATUS_LINE: Final = ONLY_FROM_LINE + 2
PYTHON_CAB_NAME: Final = "python-3.cab"
CMK_UPDATER_PY: Final = "cmk_update_agent.py"
CMK_UPDATER_CHECKMK_PY: Final = "cmk_update_agent.checkmk.py"
diff --git a/agents/wnx/tests/regression/conftest.py b/agents/wnx/tests/regression/conftest.py
index 924c7730f60..80c285b265d 100644
--- a/agents/wnx/tests/regression/conftest.py
+++ b/agents/wnx/tests/regression/conftest.py
@@ -8,7 +8,7 @@
import time
import pytest
-import telnetlib3 # type: ignore[import]
+import telnetlib3 # type: ignore[import-untyped]
import yaml
from .local import DEFAULT_CONFIG, host, main_exe, port, run_agent, user_yaml_config
diff --git a/agents/wnx/tests/regression/test_section_check_mk.py b/agents/wnx/tests/regression/test_section_check_mk.py
index 4e76e02f1cd..284525f6e61 100644
--- a/agents/wnx/tests/regression/test_section_check_mk.py
+++ b/agents/wnx/tests/regression/test_section_check_mk.py
@@ -56,11 +56,15 @@ def testconfig_only_from_engine(request, testconfig_host):
# live example of valid output
_EXAMPLE = """
<<>>
-Version: 2.0.0i1
-BuildDate: Jun 7 2019
+Version: 2.3.0-2024.03.15
+BuildDate: Mar 15 2024
AgentOS: windows
-Hostname: SERG-DELL
-Architecture: 32bit
+Hostname: klapp-0336
+Architecture: 64bit
+OSName: Microsoft Windows 10 Pro
+OSVersion: 10.0.19045
+OSType: windows
+Time: 2024-03-19T13:42:18+0100
WorkingDirectory: c:\\dev\\shared
ConfigFile: c:\\dev\\shared\\check_mk.yml
LocalConfigFile: C:\\ProgramData\\checkmk\\agent\\check_mk.user.yml
@@ -95,16 +99,16 @@ def make_only_from_array(ipv4):
def expected_output_engine():
ipv4 = Globals.only_from.split() if Globals.only_from is not None else None
expected = [
- # Note: The first two lines are output with crash_debug = yes in 1.2.8
- # but no longer in 1.4.0:
- # r'<<>>\',
- # r'[[[Check_MK Agent]]]','
r"<<<%s>>>" % Globals.section,
r"Version: \d+\.\d+\.\d+([bi]\d+)?(p\d+)?",
r"BuildDate: [A-Z][a-z]{2} (\d{2}| \d) \d{4}",
r"AgentOS: windows",
r"Hostname: .+",
r"Architecture: \d{2}bit",
+ r"OSName: Microsoft .+",
+ r"OSVersion: 10.+",
+ r"OSType: windows",
+ r"Time: 20\d\d-\d\d-\d\dT\d\d:\d\d:\d\d[\+,-]\d\d\d\d",
r"WorkingDirectory: %s" % (re.escape(os.getcwd())),
r"ConfigFile: %s" % (re.escape(get_main_yaml_name(root_dir))),
r"LocalConfigFile: %s" % (re.escape(get_user_yaml_name(user_dir))),
@@ -116,18 +120,6 @@ def expected_output_engine():
r"LogDirectory: %s" % (re.escape(os.path.join(user_dir, "log"))),
r"SpoolDirectory: %s" % (re.escape(os.path.join(user_dir, "spool"))),
r"LocalDirectory: %s" % (re.escape(os.path.join(user_dir, "local"))),
- # r'ScriptStatistics: Plugin C:0 E:0 T:0 Local C:0 E:0 T:0',
- # Note: The following three lines are output with crash_debug = yes in
- # 1.2.8 but no longer in 1.4.0:
- # r'ConnectionLog: %s%s' %
- # (drive_letter,
- # re.escape(os.path.join(exec_dir, 'log', 'connection.log'))),
- # r'CrashLog: %s%s' %
- # (drive_letter,
- # re.escape(os.path.join(exec_dir, 'log', 'crash.log'))),
- # r'SuccessLog: %s%s' %
- # (drive_letter,
- # re.escape(os.path.join(exec_dir, 'log', 'success.log'))),
(
r"OnlyFrom: %s %s" % tuple(make_only_from_array(ipv4))
if Globals.only_from
diff --git a/agents/wnx/tests/regression/test_section_eventlog.py b/agents/wnx/tests/regression/test_section_eventlog.py
index d5e3370917c..8af6c2f4fb7 100644
--- a/agents/wnx/tests/regression/test_section_eventlog.py
+++ b/agents/wnx/tests/regression/test_section_eventlog.py
@@ -12,7 +12,7 @@
from itertools import chain, repeat
import pytest
-import win32evtlog # type: ignore[import] # pylint: disable=import-error
+import win32evtlog # type: ignore[import-not-found] # pylint: disable=import-error
from .local import assert_subprocess, host, local_test, user_dir
diff --git a/agents/wnx/tests/regression/test_section_winperf.py b/agents/wnx/tests/regression/test_section_winperf.py
index fbfa270eb40..6ba9e98d6c8 100644
--- a/agents/wnx/tests/regression/test_section_winperf.py
+++ b/agents/wnx/tests/regression/test_section_winperf.py
@@ -45,6 +45,7 @@ def expected_output_engine():
r"|\d+\.\d{2} \d+ \d+"
r"|\d+ instances\:( [^ ]+)+"
r"|\-?\d+( \d+)+ [\w\(\)]+"
+ r"|\d\d\d[2|6]( .+)+ text"
)
if not Globals.alone:
re_str += r"|" + re.escape(r"<<>>") + r"|\d+"
diff --git a/agents/wnx/watest/test-ohm.cpp b/agents/wnx/watest/test-ohm.cpp
index fd6af97dff2..df9381ab089 100644
--- a/agents/wnx/watest/test-ohm.cpp
+++ b/agents/wnx/watest/test-ohm.cpp
@@ -33,7 +33,7 @@ int CalcOhmCount() {
cma::provider::ohm::kExeModule)) {
count++;
}
- return true;
+ return wtools::ScanAction::advance;
});
return count;
}
diff --git a/agents/wnx/watest/test-plugin.cpp b/agents/wnx/watest/test-plugin.cpp
index eff48b7fcd7..adc947f5379 100644
--- a/agents/wnx/watest/test-plugin.cpp
+++ b/agents/wnx/watest/test-plugin.cpp
@@ -251,7 +251,7 @@ TEST(PluginTest, JobStartStopComponent) {
}
TEST(PluginTest, Extensions) {
- auto pshell = MakePowershellWrapper();
+ auto pshell = MakePowershellWrapper("a");
EXPECT_TRUE(pshell.find(L"powershell.exe") != std::wstring::npos);
auto p = ConstructCommandToExec(L"a.exe");
@@ -649,8 +649,7 @@ TEST(PluginTest, FilesAndFoldersComponent) {
auto files = cma::GatherAllFiles(pv);
auto yaml_units = cfg::GetArray(
cfg::groups::kLocal, cfg::vars::kPluginsExecution);
- const auto exe_units =
- cfg::LoadExeUnitsFromYaml(yaml_units);
+ const auto exe_units = cfg::LoadExeUnitsFromYaml(yaml_units);
// no local files
PluginMap pm;
UpdatePluginMap(nullptr, pm, ExecType::local, files, exe_units, true);
diff --git a/agents/wnx/watest/test-section_logwatchevent.cpp b/agents/wnx/watest/test-section_logwatchevent.cpp
index b9a3093fdad..3926eb9ae3f 100644
--- a/agents/wnx/watest/test-section_logwatchevent.cpp
+++ b/agents/wnx/watest/test-section_logwatchevent.cpp
@@ -193,9 +193,9 @@ TEST(LogWatchEventTest, DumpEventLog) {
auto start = steady_clock::now();
auto [_, out] = DumpEventLog(*ptr, state, lwl);
auto end = steady_clock::now();
- EXPECT_TRUE(
- std::chrono::duration_cast(end - start)
- .count() < 2);
+ EXPECT_LE(std::chrono::duration_cast(end - start)
+ .count(),
+ 3);
}
}
diff --git a/agents/wnx/watest/test-section_providers.cpp b/agents/wnx/watest/test-section_providers.cpp
index d0ff55d2452..4385333f02a 100644
--- a/agents/wnx/watest/test-section_providers.cpp
+++ b/agents/wnx/watest/test-section_providers.cpp
@@ -111,15 +111,16 @@ TEST(SectionProviders, SystemTime) {
class SectionProviderCheckMkFixture : public ::testing::Test {
public:
- static constexpr size_t core_lines_ = 19;
+ static constexpr size_t core_lines_ = 23;
static constexpr size_t full_lines_ = core_lines_ + 3;
static constexpr std::string_view names_[core_lines_ - 1] = {
- "Version", "BuildDate", "AgentOS",
- "Hostname", "Architecture", "WorkingDirectory",
- "ConfigFile", "LocalConfigFile", "AgentDirectory",
- "PluginsDirectory", "StateDirectory", "ConfigDirectory",
- "TempDirectory", "LogDirectory", "SpoolDirectory",
- "LocalDirectory", "OnlyFrom"};
+ "Version", "BuildDate", "AgentOS",
+ "Hostname", "Architecture", "OSName",
+ "OSVersion", "OSType", "Time",
+ "WorkingDirectory", "ConfigFile", "LocalConfigFile",
+ "AgentDirectory", "PluginsDirectory", "StateDirectory",
+ "ConfigDirectory", "TempDirectory", "LogDirectory",
+ "SpoolDirectory", "LocalDirectory", "OnlyFrom"};
static constexpr std::pair
only_from_cases_[] = {
diff --git a/agents/wnx/watest/test-service-processor.cpp b/agents/wnx/watest/test-service-processor.cpp
index 8fd46e1e93a..0f8488ea73b 100644
--- a/agents/wnx/watest/test-service-processor.cpp
+++ b/agents/wnx/watest/test-service-processor.cpp
@@ -371,4 +371,21 @@ TEST(ServiceProcessorTest, DirectCallWmi) {
provider::kSubSectionComputerSystem));
}
+TEST(ServiceProcessorTest, FindWinPerfDefault) {
+ const auto temp_fs = tst::TempCfgFs::Create();
+ ASSERT_TRUE(temp_fs->loadFactoryConfig());
+ tst::CreateTextFile(temp_fs->root() / "check-mk-service32.exe", "");
+ EXPECT_TRUE(FindWinPerfExe("agent").empty());
+ tst::CreateTextFile(temp_fs->root() / "check_mk_service32.exe", "");
+ EXPECT_EQ(FindWinPerfExe("agent"),
+ temp_fs->root() / "check_mk_service32.exe");
+ tst::CreateTextFile(temp_fs->root() / "check_mk_agent.exe", "");
+ EXPECT_EQ(FindWinPerfExe("agent"), temp_fs->root() / "check_mk_agent.exe");
+}
+
+TEST(ServiceProcessorTest, FindWinPerfCustom) {
+ EXPECT_TRUE(FindWinPerfExe("agent").empty());
+ EXPECT_EQ(FindWinPerfExe("agent.exe"), "agent.exe");
+}
+
} // namespace cma::srv
diff --git a/agents/wnx/watest/test-winperf.cpp b/agents/wnx/watest/test-winperf.cpp
index c0057e0cf05..96c24f9ec62 100644
--- a/agents/wnx/watest/test-winperf.cpp
+++ b/agents/wnx/watest/test-winperf.cpp
@@ -54,8 +54,8 @@ TEST(WinPerf, ValidateFabricConfig) {
ASSERT_NE(cfg_timeout, 1234567);
EXPECT_EQ(groups::g_winperf.timeout(), cfg_timeout);
- EXPECT_TRUE(wp_group[vars::kWinPerfFork].as(false));
- EXPECT_TRUE(groups::g_winperf.isFork());
+ EXPECT_FALSE(wp_group[vars::kWinPerfFork].as(true));
+ EXPECT_FALSE(groups::g_winperf.isFork());
EXPECT_FALSE(wp_group[vars::kWinPerfTrace].as(true));
EXPECT_FALSE(groups::g_winperf.isTrace());
@@ -156,6 +156,10 @@ TEST(WinPerf, InvalidCounter) {
EXPECT_TRUE(BuildWinPerfSection(L"winp", name, index).empty());
}
+bool IsMacLike(const std::string &s) {
+ return tools::SplitString(s, ":").size() == 8;
+}
+
TEST(WinPerf, IfCounter) {
const auto x = BuildWinPerfSection(L"winp", L"if", L"510");
const auto table = tools::SplitString(x, "\n");
@@ -165,6 +169,7 @@ TEST(WinPerf, IfCounter) {
EXPECT_EQ(table[0], "<<>>"s);
const auto stamp = tools::SplitString(table[1], " ");
ASSERT_EQ(stamp.size(), 3);
+ const auto names = tools::SplitString(table[2], " ");
// check stamp
const auto stamp_time = tools::ConvertToUint64(stamp[0], 12345678);
@@ -177,6 +182,29 @@ TEST(WinPerf, IfCounter) {
cfg::GetPerformanceFrequency());
// check at least one negative value is in
EXPECT_TRUE(rs::any_of(table, [](auto &l) { return l[0] == '-'; }));
+
+ // check pseudo counter is in last line
+ EXPECT_TRUE(table[table.size() - 2].starts_with(
+ wtools::ToUtf8(winperf::if_state_pseudo_counter)));
+
+ // check pseudo counter is in last line
+ EXPECT_TRUE(table[table.size() - 2].ends_with(
+ wtools::ToUtf8(winperf::if_state_pseudo_counter_type)));
+ // check pseudo counter is in last line
+ EXPECT_TRUE(table[table.size() - 1].starts_with(
+ wtools::ToUtf8(winperf::if_mac_pseudo_counter)));
+ auto pre_last_row = tools::SplitString(table[table.size() - 2], " ");
+ EXPECT_EQ(pre_last_row.size(), names.size());
+
+ // check pseudo counter is in last line
+ EXPECT_TRUE(table[table.size() - 1].ends_with(
+ wtools::ToUtf8(winperf::if_mac_pseudo_counter_type)));
+ auto last_row = tools::SplitString(table[table.size() - 1], " ");
+ EXPECT_EQ(last_row.size(), names.size());
+ EXPECT_TRUE(rs::all_of(std::next(last_row.begin()),
+ std::prev(last_row.end()),
+ [](const std ::string &e) { return IsMacLike(e); }))
+ << "Not all MACs found in:" << table[table.size() - 1];
}
TEST(WinPerf, TcpConnCounter) {
diff --git a/agents/wnx/watest/test-wtools.cpp b/agents/wnx/watest/test-wtools.cpp
index cfab469f180..644a1f5abd8 100644
--- a/agents/wnx/watest/test-wtools.cpp
+++ b/agents/wnx/watest/test-wtools.cpp
@@ -9,6 +9,7 @@
#include
#include
+#include
#include "common/wtools.h"
#include "common/wtools_user_control.h"
@@ -19,16 +20,17 @@
using namespace std::string_literals;
using namespace std::chrono_literals;
namespace fs = std::filesystem;
+namespace rs = std::ranges;
namespace {
// Internal description of assorted counter params.
// Should be valid for all windows versions
struct CounterParam {
const wchar_t *const name_; // usually number
- const uint32_t index_; // the same as name
- const uint32_t counters_count;
- const uint32_t instances_min_;
- const uint32_t instances_max_;
+ uint32_t index_; // the same as name
+ uint32_t counters_count;
+ uint32_t instances_min_;
+ uint32_t instances_max_;
};
constexpr CounterParam g_cpu_counter = {.name_ = L"238",
@@ -44,7 +46,7 @@ constexpr CounterParam g_disk_counter = {.name_ = L"234",
} // namespace
-namespace wtools { // to become friendly for cma::cfg classes
+namespace wtools {
class WtoolsKillProcFixture : public ::testing::Test {
protected:
@@ -52,12 +54,11 @@ class WtoolsKillProcFixture : public ::testing::Test {
static constexpr std::wstring_view nameToUse() { return L"kill_proc.exe"; }
static void KillTmpProcesses() {
- // kill process
ScanProcessList([](const PROCESSENTRY32 &entry) {
if (std::wstring{entry.szExeFile} == nameToUse()) {
KillProcess(entry.th32ProcessID, 99);
}
- return true; // continue scan
+ return ScanAction::advance;
});
}
@@ -84,12 +85,12 @@ class WtoolsKillProcFixture : public ::testing::Test {
std::wstring path;
ScanProcessList([&](const PROCESSENTRY32 &entry) {
if (std::wstring{entry.szExeFile} != nameToUse()) {
- return true; // continue scan
+ return ScanAction::advance;
}
path = GetProcessPath(entry.th32ProcessID);
pid = entry.th32ProcessID;
- return false;
+ return ScanAction::terminate;
});
return {path, pid};
@@ -224,7 +225,7 @@ class WtoolsKillProcessTreeFixture : public ::testing::Test {
names.back(), entry.th32ProcessID,
entry.th32ParentProcessID, ::GetCurrentProcessId());
}
- return true;
+ return ScanAction::advance;
});
EXPECT_TRUE(!names.empty());
for (auto &name : names) {
@@ -259,9 +260,9 @@ class WtoolsKillProcessTreeFixture : public ::testing::Test {
ScanProcessList([&](const PROCESSENTRY32 &entry) {
if (entry.th32ProcessID == pid) {
found = true;
- return false;
+ return ScanAction::terminate;
}
- return true;
+ return ScanAction::advance;
});
return found;
}
@@ -271,9 +272,9 @@ class WtoolsKillProcessTreeFixture : public ::testing::Test {
ScanProcessList([&](const PROCESSENTRY32 &entry) {
if (entry.th32ParentProcessID == pid) {
found = true;
- return false;
+ return ScanAction::terminate;
}
- return true;
+ return ScanAction::advance;
});
return found;
}
@@ -284,11 +285,11 @@ class WtoolsKillProcessTreeFixture : public ::testing::Test {
DWORD parent_process_id = 0;
ScanProcessList([&](const PROCESSENTRY32 &entry) {
if (entry.th32ProcessID != proc_id) {
- return true; // continue
+ return ScanAction::advance;
}
proc_name = entry.szExeFile;
parent_process_id = entry.th32ParentProcessID;
- return false; // found
+ return ScanAction::terminate;
});
return {proc_name, parent_process_id};
@@ -798,22 +799,67 @@ TEST(Wtools, GetServiceStatus) {
}
TEST(Wtools, InternalUsersDbIntegration) {
- const auto group_name = SidToName(L"S-1-5-32-545", SidTypeGroup);
+ // Power Users
+ const auto group = SidToName(L"S-1-5-32-547", SidTypeGroup);
+ auto group_name = group;
+ rs::replace(group_name, ' ', '_');
+
auto iu = std::make_unique();
- auto [name, pwd] = iu->obtainUser(group_name);
- if (!name.empty()) {
- EXPECT_EQ(name, L"cmk_TST_"s + group_name);
- EXPECT_EQ(iu->size(), 1U);
-
- auto [name_2, pwd_2] = iu->obtainUser(group_name);
- EXPECT_TRUE(!name_2.empty());
- EXPECT_EQ(name_2, L"cmk_TST_"s + group_name);
- EXPECT_EQ(name, name_2);
- EXPECT_EQ(iu->size(), 1U);
- iu.reset();
- const uc::LdapControl lc;
- ASSERT_EQ(lc.userDel(name), uc::Status::absent);
+ const auto nothing = iu->obtainUser(L"weird group");
+ EXPECT_TRUE(nothing.first.empty());
+
+ auto [name, pwd] = iu->obtainUser(group);
+ if (name.empty()) {
+ GTEST_SKIP() << "can't get user, admin rights?";
+ }
+
+ EXPECT_EQ(name, L"cmk_TST_"s + group_name);
+ EXPECT_EQ(iu->size(), 1U);
+
+ auto [name_2, pwd_2] = iu->obtainUser(group);
+ EXPECT_TRUE(!name_2.empty());
+ EXPECT_EQ(name_2, L"cmk_TST_"s + group_name);
+ EXPECT_EQ(name, name_2);
+ EXPECT_EQ(iu->size(), 1U);
+ iu.reset();
+ const uc::LdapControl lc;
+ ASSERT_EQ(lc.userDel(name), uc::Status::absent);
+}
+
+TEST(Wtools, MakeSafeFolderIntegration) {
+ const auto path = MakeSafeTempFolder("temp"); //
+ EXPECT_TRUE(fs::exists(*path));
+ fs::remove_all(*path);
+}
+
+TEST(Wtools, GetAdapterInfoStore) {
+ const auto store = GetAdapterInfoStore();
+ EXPECT_GE(store.size(), 1U);
+ std::unordered_set types;
+ for (auto &&info : store | std::views::values) {
+ types.insert(info.oper_status);
}
+ EXPECT_TRUE(types.contains(IF_OPER_STATUS::IfOperStatusUp));
+ EXPECT_TRUE(types.contains(IF_OPER_STATUS::IfOperStatusDown));
+}
+
+TEST(Wtools, MangleNameForPerfCounter) {
+ EXPECT_EQ(MangleNameForPerfCounter(L"abc"), L"abc");
+ EXPECT_EQ(MangleNameForPerfCounter(L"/\\!@#$%^&**()__ `~'\""),
+ L"__!@_$%^&**[]__ `~'\"");
+}
+
+TEST(Wtools, OsInfo) {
+ const auto obtained = *GetOsInfo();
+ EXPECT_TRUE(obtained.name.starts_with(L"Microsoft Windows"));
+ EXPECT_TRUE(obtained.name.ends_with(L"Pro") || // local
+ obtained.name.ends_with(L"Standard")); // CI
+
+ const auto num_strings = cma::tools::SplitString(obtained.version, L".");
+ // 10.0.14559
+ EXPECT_GE(std::stoi(num_strings[0]), 10);
+ EXPECT_GE(std::stoi(num_strings[1]), 0);
+ EXPECT_GE(std::stoi(num_strings[2]), 20);
}
} // namespace wtools
diff --git a/agents/wnx/watest/test-wtools_runas.cpp b/agents/wnx/watest/test-wtools_runas.cpp
index 18496187732..8c0f578559d 100644
--- a/agents/wnx/watest/test-wtools_runas.cpp
+++ b/agents/wnx/watest/test-wtools_runas.cpp
@@ -5,13 +5,14 @@
#include
-#include "wnx/cma_core.h"
#include "common/wtools.h"
#include "common/wtools_runas.h"
#include "common/wtools_user_control.h"
#include "watest/test_tools.h"
+#include "wnx/cma_core.h"
using namespace std::chrono_literals;
using namespace std::string_literals;
+namespace fs = std::filesystem;
namespace wtools::runas {
@@ -63,51 +64,87 @@ TEST(WtoolsRunAs, NoUser_Component) {
EXPECT_EQ(cma::tools::win::GetEnv("USERNAME"s) + "\r\nmarker 1\r\n", data);
}
-// TODO(sk,au): Check why the test doesn't work on CI
-TEST(WtoolsRunAs, TestUser_ComponentExt) {
- wtools::uc::LdapControl lc;
- auto pwd = GenerateRandomString(12);
- std::wstring user = L"a1" + fmt::format(L"_{}", ::GetCurrentProcessId());
- auto status = lc.userAdd(user, pwd);
- if (status == uc::Status::exists) {
- status = lc.changeUserPassword(user, pwd);
+class WtoolsRunAsFixture : public ::testing::Test {
+public:
+ void SetUp() override {
+ user_ = L"a1" + fmt::format(L"_{}", ::GetCurrentProcessId());
+ pwd_ = GenerateRandomString(12);
+ }
+
+ void TearDown() override { std::ignore = lc_.userDel(user_); }
+
+ fs::path TempDir() const { return temp_dir_.in(); }
+ uc::Status DelUser(const std::wstring_view user) const {
+ return lc_.userDel(user);
+ }
+ uc::Status AddUser(const std::wstring_view user,
+ const std::wstring pwd) const {
+ return lc_.userAdd(user, pwd);
}
- if (status != uc::Status::success) {
+ std::wstring User() const { return user_; }
+ std::wstring Pwd() const { return pwd_; }
+ uc::Status ChangePwd() {
+ pwd_ = GenerateRandomString(12);
+ return lc_.changeUserPassword(User(), Pwd());
+ }
+
+private:
+ uc::LdapControl lc_;
+ std::wstring pwd_;
+ std::wstring user_;
+ tst::TempDirPair temp_dir_{"WtoolsRunAs"};
+};
+
+TEST_F(WtoolsRunAsFixture, TestUser_ComponentExt) {
+ std::ignore = DelUser(User()); // silently del old trash
+
+ if (AddUser(User(), Pwd()) != uc::Status::success) {
GTEST_SKIP() << "failed to set password, maybe not admin?";
}
+ const auto old_pwd = Pwd();
+ ASSERT_EQ(AddUser(User(), Pwd()), uc::Status::exists);
- auto temp_fs = tst::TempCfgFs::Create();
- ASSERT_TRUE(temp_fs->loadFactoryConfig());
- auto in = temp_fs->data();
- tst::CreateWorkFile(in / "runc.cmd",
+ const auto in = TempDir();
+ const auto batch_file = in / "runc.cmd";
+ tst::CreateWorkFile(batch_file,
"@powershell Start-Sleep -Milliseconds 150\n"
"@echo %USERNAME%\n"
"@powershell Start-Sleep -Milliseconds 150\n"
"@echo marker %1");
+ // new password
+ ASSERT_EQ(ChangePwd(), uc::Status::success);
+ const auto new_pwd = Pwd();
+ ASSERT_NE(old_pwd, new_pwd);
// Allow Users to use the file
// Must be done for testing. Plugin Engine must use own method to allow
// execution
- EXPECT_TRUE(wtools::ChangeAccessRights(in / "runc.cmd", user,
+ // VALIDATE ALSO MANUALLY: script is complicated
+ EXPECT_TRUE(wtools::ChangeAccessRights(batch_file, User(),
STANDARD_RIGHTS_ALL | GENERIC_ALL,
GRANT_ACCESS, OBJECT_INHERIT_ACE));
wtools::AppRunner ar;
- auto ret =
- ar.goExecAsJobAndUser(user, pwd, (in / "runc.cmd").wstring() + L" 1");
- ASSERT_TRUE(ret)
+ // wrong password
+ const auto fail =
+ ar.goExecAsJobAndUser(User(), old_pwd, batch_file.wstring() + L" 1");
+ ASSERT_FALSE(fail)
+ << "password must be invalid or expired or you have problems with Access rights";
+
+ // good password
+ const auto success =
+ ar.goExecAsJobAndUser(User(), new_pwd, batch_file.wstring() + L" 1");
+ ASSERT_TRUE(success)
<< "password is invalid or expired or you have problems with Access rights";
- auto b = WaitForExit(ar.processId());
+ const auto b = WaitForExit(ar.processId());
if (!b) {
XLOG::SendStringToStdio("Retry waiting for the process\n",
XLOG::Colors::yellow);
WaitForExit(ar.processId()); // we are starting waiter two times
}
ASSERT_TRUE(b);
- auto data = ReadFromHandle(ar.getStdioRead());
- EXPECT_EQ("a1"s + fmt::format("_{}", ::GetCurrentProcessId()) +
- "\r\nmarker 1\r\n",
- data);
+ const auto data = ReadFromHandle(ar.getStdioRead());
+ EXPECT_EQ(wtools::ToUtf8(User()) + "\r\nmarker 1\r\n", data);
}
} // namespace wtools::runas
diff --git a/agents/wnx/watest/test_check_mk.cpp b/agents/wnx/watest/test_check_mk.cpp
index 8d1d137243c..4b27566ed76 100644
--- a/agents/wnx/watest/test_check_mk.cpp
+++ b/agents/wnx/watest/test_check_mk.cpp
@@ -3,23 +3,36 @@
#include "pch.h"
-#include "wnx/cfg.h"
#include "providers/check_mk.h"
+#include "wnx/cfg.h"
namespace cma::provider {
-TEST(CheckMkHeader, Convert) {
- auto local_host = AddressToCheckMkString("127.0.0.1");
- EXPECT_EQ(local_host, "127.0.0.1");
- auto usual_addr = AddressToCheckMkString("10.1.2.3");
- EXPECT_EQ(usual_addr, "10.1.2.3");
+namespace {
+
+/// This function is used to calculate the current timezone offset
+std::string CalcCurrentOffset() {
+ int hours = 0;
+ _get_daylight(&hours);
+ long tz = 0;
+ _get_timezone(&tz);
+ return fmt::format("{:+05}", tz * -100 / 60 / 60 + hours * 100);
+}
+
+} // namespace
- auto ipv6_addr =
- AddressToCheckMkString("2001:0db8:85a3:0000:0000:8a2e:0370:7334");
- EXPECT_EQ(ipv6_addr, "2001:0db8:85a3:0000:0000:8a2e:0370:7334");
+TEST(CheckMkHeader, IsoTime) {
+ constexpr std::chrono::system_clock::time_point tp;
+ EXPECT_EQ(PrintIsoTime(tp),
+ fmt::format("1970-01-01T01:00:00{}", CalcCurrentOffset()));
+}
- auto a3 = AddressToCheckMkString("10.1.2.3/4");
- EXPECT_EQ(a3, "10.1.2.3/4");
+TEST(CheckMkHeader, Convert) {
+ EXPECT_EQ(AddressToCheckMkString("127.0.0.1"), "127.0.0.1");
+ EXPECT_EQ(AddressToCheckMkString("10.1.2.3"), "10.1.2.3");
+ EXPECT_EQ(AddressToCheckMkString("2001:0db8:85a3:0000:0000:8a2e:0370:7334"),
+ "2001:0db8:85a3:0000:0000:8a2e:0370:7334");
+ EXPECT_EQ(AddressToCheckMkString("10.1.2.3/4"), "10.1.2.3/4");
}
} // namespace cma::provider
diff --git a/agents/wnx/watest/test_tools.cpp b/agents/wnx/watest/test_tools.cpp
index f832e294072..c5ff8dd1549 100644
--- a/agents/wnx/watest/test_tools.cpp
+++ b/agents/wnx/watest/test_tools.cpp
@@ -31,14 +31,13 @@ namespace tst {
void AllowReadWriteAccess(const fs::path &path,
std::vector &commands) {
- const std::vector command_templates = {
- L"icacls \"{}\" /inheritance:d /c", // disable inheritance
- L"icacls \"{}\" /grant:r *S-1-5-32-545:(OI)(CI)(RX) /c"}; // read/exec
-
- for (const auto &t : command_templates) {
- auto cmd = fmt::format(t, path.wstring());
- commands.emplace_back(cmd);
- }
+ // disable inheritance
+ commands.emplace_back(
+ fmt::format(L"icacls \"{}\" /inheritance:d /c", path.wstring()));
+ // read/exec
+ commands.emplace_back(
+ fmt::format(L"icacls \"{}\" /grant:r *S-1-5-32-545:(OI)(CI)(RX) /c",
+ path.wstring()));
XLOG::l.i("Protect file from User write '{}'", path);
}
diff --git a/agents/wnx/watest/watest.vcxproj b/agents/wnx/watest/watest.vcxproj
index 8d5aab02f1f..f8620936c14 100644
--- a/agents/wnx/watest/watest.vcxproj
+++ b/agents/wnx/watest/watest.vcxproj
@@ -25,7 +25,7 @@
watest
10.0
asio-1.24.0-patched
- fmt-9.0.0
+ fmt-10.2.1
googletest-71140c3ca7-patched
yaml-cpp.9a362420
@@ -173,10 +173,8 @@
- @echo Installing test in "$(OutDirFullPath)"
-@cd $(SolutionDir)
-@prepare_to_tests.cmd $(TargetDir)test
-
+
+
@@ -214,10 +212,8 @@
- @echo Installing test in "$(OutDirFullPath)"
-@cd $(SolutionDir)
-@prepare_to_tests.cmd $(TargetDir)test
-
+
+
@@ -257,10 +253,8 @@
- @echo Installing test in "$(OutDirFullPath)"
-@cd $(SolutionDir)
-@prepare_to_tests.cmd $(TargetDir)test
-
+
+
@@ -300,10 +294,8 @@
- @echo Installing test in "$(OutDirFullPath)"
-@cd $(SolutionDir)
-@prepare_to_tests.cmd $(TargetDir)test
-
+
+
diff --git a/agents/z_os/check_mk_agent.zOS b/agents/z_os/check_mk_agent.zOS
index 38c6d956089..59e156f90dc 100644
--- a/agents/z_os/check_mk_agent.zOS
+++ b/agents/z_os/check_mk_agent.zOS
@@ -30,6 +30,8 @@ echo PluginsDirectory: $PLUGINSDIR
echo LocalDirectory: $LOCALDIR
echo SpoolDirectory: $SPOOLDIR
echo AgentDirectory: $MK_CONFDIR
+echo 'OSType: z/os'
+echo 'OSName: z/OS'
dir=$(pwd)
diff --git a/artifacts.make b/artifacts.make
index fe1c6710040..6f9c9bf704c 100644
--- a/artifacts.make
+++ b/artifacts.make
@@ -9,48 +9,39 @@ CHECK_MK_ANNOUNCE_FOLDER := $(REPO_PATH)/announce
CHECK_MK_ANNOUNCE_MD := $(CHECK_MK_ANNOUNCE_FOLDER)/announce-$(CHECK_MK_ANNOUNCE_VERSION).md
CHECK_MK_ANNOUNCE_TXT := $(CHECK_MK_ANNOUNCE_FOLDER)/announce-$(CHECK_MK_ANNOUNCE_VERSION).txt
-JAVASCRIPT_MINI := $(foreach jmini,main vue mobile side zxcvbn,$(REPO_PATH)/web/htdocs/js/$(jmini)_min.js)
-
-THEMES := facelift modern-dark
-THEME_CSS_FILES := $(addprefix $(REPO_PATH)/web/htdocs/themes/,$(addsuffix /theme.css,$(THEMES)))
-THEME_JSON_FILES := $(addprefix $(REPO_PATH)/web/htdocs/themes/,$(addsuffix /theme.json,$(THEMES)))
-THEME_IMAGE_DIRS := $(addprefix $(REPO_PATH)/web/htdocs/themes/,$(addsuffix /images,$(THEMES)))
-THEME_RESOURCES := $(THEME_CSS_FILES) $(THEME_JSON_FILES) $(THEME_IMAGE_DIRS)
-
# These artifacts are created independent of the distro the Checkmk package is
# built on either by an "upstream job" or while creating the source package
SOURCE_BUILT_LINUX_AGENTS := \
$(REPO_PATH)/agents/check-mk-agent-$(VERSION)-1.noarch.rpm \
$(REPO_PATH)/agents/check-mk-agent_$(VERSION)-1_all.deb \
- $(REPO_PATH)/agents/linux/check-sql \
+ $(REPO_PATH)/agents/linux/mk-sql \
$(REPO_PATH)/agents/linux/cmk-agent-ctl \
$(REPO_PATH)/agents/linux/cmk-agent-ctl.gz
ifeq ($(ENTERPRISE),yes)
SOURCE_BUILT_AGENT_UPDATER := \
- $(REPO_PATH)/enterprise/agents/plugins/cmk-update-agent \
- $(REPO_PATH)/enterprise/agents/plugins/cmk-update-agent-32
+ $(REPO_PATH)/non-free/cmk-update-agent/cmk-update-agent \
+ $(REPO_PATH)/non-free/cmk-update-agent/cmk-update-agent-32
else
SOURCE_BUILT_AGENT_UPDATER :=
endif
SOURCE_BUILT_OHM := \
$(REPO_PATH)/agents/windows/OpenHardwareMonitorCLI.exe \
$(REPO_PATH)/agents/windows/OpenHardwareMonitorLib.dll
-SOURCE_BUILT_EXT := $(REPO_PATH)/agents/windows/robotmk_ext.exe
-SOURCE_BUILT_CHECK_SQL := $(REPO_PATH)/agents/windows/check-sql.exe
+SOURCE_BUILT_EXT := $(REPO_PATH)/agents/windows/robotmk_ext.exe
+SOURCE_BUILT_MK_SQL := $(REPO_PATH)/agents/windows/mk-sql.exe
SOURCE_BUILT_WINDOWS := \
$(REPO_PATH)/agents/windows/check_mk_agent.msi \
$(REPO_PATH)/agents/windows/python-3.cab \
$(REPO_PATH)/agents/windows/unsign-msi.patch
SOURCE_BUILT_AGENTS := \
$(SOURCE_BUILT_LINUX_AGENTS) \
- $(SOURCE_BUILT_AGENT_UPDATER) \
$(SOURCE_BUILT_OHM) \
$(SOURCE_BUILT_EXT) \
- $(SOURCE_BUILT_CHECK_SQL) \
+ $(SOURCE_BUILT_MK_SQL) \
$(SOURCE_BUILT_WINDOWS)
ifeq ($(ENTERPRISE),yes)
-PROTO_PYTHON_OUT := $(REPO_PATH)/enterprise/cmc_proto
+PROTO_PYTHON_OUT := $(REPO_PATH)/non-free/cmc-protocols/cmc_proto
CMC_PROTO_MODULES := \
$(PROTO_PYTHON_OUT)/config/v1/types_pb2.py \
$(PROTO_PYTHON_OUT)/cycletime/v1/types_pb2.py \
diff --git a/bandit.yaml b/bandit.yaml
index 45bdf05913f..eda93a8a4a6 100644
--- a/bandit.yaml
+++ b/bandit.yaml
@@ -9,7 +9,6 @@
### using the -t/-s CLI options. Note that the same test ID should not appear
### in both 'tests' and 'skips', this would be nonsensical and is detected by
### Bandit at runtime.
-
# Available tests:
# B101 : assert_used
# B102 : exec_used
@@ -21,7 +20,9 @@
# B108 : hardcoded_tmp_directory
# B110 : try_except_pass
# B112 : try_except_continue
+# B113 : request_without_timeout
# B201 : flask_debug_true
+# B202 : tarfile_unsafe_members
# B301 : pickle
# B302 : marshal
# B303 : Use of insecure MD2, MD4, MD5, or SHA1 hash function.
@@ -80,6 +81,7 @@
# B609 : linux_commands_wildcard_injection
# B610 : django_extra_used
# B611 : django_rawsql_used
+# B612 : logging_config_insecure_listen
# B701 : jinja2_autoescape_false
# B702 : use_of_mako_templates
# B703 : django_mark_safe
@@ -95,7 +97,6 @@ skips:
# medium severity tests excluded for now; enable step-by-step
- B104 # hardcoded_bind_all_interfaces
- - B108 # hardcoded_tmp_directory
- B302 # marshal
- B608 # hardcoded_sql_expressions
@@ -197,6 +198,8 @@ ssl_with_bad_defaults:
- PROTOCOL_TLSv1
- SSLv3_METHOD
- TLSv1_METHOD
+ - PROTOCOL_TLSv1_1
+ - TLSv1_1_METHOD
ssl_with_bad_version:
bad_protocol_versions:
- PROTOCOL_SSLv2
@@ -206,6 +209,8 @@ ssl_with_bad_version:
- PROTOCOL_TLSv1
- SSLv3_METHOD
- TLSv1_METHOD
+ - PROTOCOL_TLSv1_1
+ - TLSv1_1_METHOD
start_process_with_a_shell:
no_shell:
- os.execl
diff --git a/bin/.f12 b/bin/.f12
index d2b5116bcb4..6f06f68a956 100755
--- a/bin/.f12
+++ b/bin/.f12
@@ -18,8 +18,10 @@
mkevent \
mkbackup \
cmk-update-config \
+ cmk-compute-api-spec \
cmk-update-license-usage \
cmk-passwd \
+ cmk-trigger-api-spec-job \
post-rename-site \
init-redis \
"$ROOT"/bin/
diff --git a/bin/cmk-compute-api-spec b/bin/cmk-compute-api-spec
new file mode 100755
index 00000000000..533ec470cfb
--- /dev/null
+++ b/bin/cmk-compute-api-spec
@@ -0,0 +1,11 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+import sys
+
+from cmk.gui.openapi.spec.spec_generator import main
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/bin/cmk-trigger-api-spec-job b/bin/cmk-trigger-api-spec-job
new file mode 100755
index 00000000000..f81c90244ef
--- /dev/null
+++ b/bin/cmk-trigger-api-spec-job
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+# Copyright (C) 2024 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+import sys
+from collections.abc import Sequence
+from logging import getLogger
+
+from cmk.gui.openapi.spec.spec_generator_job import trigger_spec_generation_in_background
+
+
+def main(args: Sequence[str]) -> int:
+ logger = getLogger("api-spec")
+ try:
+ trigger_spec_generation_in_background(user_id=None)
+ except Exception as e:
+ logger.error("ERROR: Failed to initialize background job for regenerating openapi spec")
+ logger.error(e)
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/bin/cmk-update-config b/bin/cmk-update-config
index a793dffacee..a28e22b4ad7 100755
--- a/bin/cmk-update-config
+++ b/bin/cmk-update-config
@@ -5,7 +5,7 @@
import sys
-from cmk.update_config.main import main
+from cmk.update_config.main import ensure_site_is_stopped, main
if __name__ == "__main__":
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:], ensure_site_is_stopped))
diff --git a/bin/cmk-validate-plugins b/bin/cmk-validate-plugins
new file mode 100755
index 00000000000..174c284ca71
--- /dev/null
+++ b/bin/cmk-validate-plugins
@@ -0,0 +1,11 @@
+#!/usr/bin/env python3
+# Copyright (C) 2023 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+import sys
+
+from cmk.validate_plugins import main
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/bin/livedump b/bin/livedump
index 089290319b1..065db521492 100755
--- a/bin/livedump
+++ b/bin/livedump
@@ -14,7 +14,7 @@ from typing import NoReturn
from livestatus import SingleSiteConnection
# This will be substituted at 'make dist' time.
-__version__ = "2.3.0b1"
+__version__ = "2.4.0b1"
@dataclass(slots=True)
@@ -171,7 +171,7 @@ def connect(socket_path: str | None) -> SingleSiteConnection:
if socket_path is not None:
return SingleSiteConnection(socket_path)
if omd_root := getenv("OMD_ROOT"):
- return SingleSiteConnection(f"unix:{omd_root}/tmp/run/live")
+ return SingleSiteConnection(f"unix:{omd_root}/tmp/run/live") # nosec # BNS:13b2c8
bail_out("specify Livestatus socket or set OMD_ROOT")
diff --git a/bin/mkbackup b/bin/mkbackup
index 716122acd0d..96829d1ced3 100755
--- a/bin/mkbackup
+++ b/bin/mkbackup
@@ -173,7 +173,7 @@ def backup_state(job: Job) -> State:
def restore_state() -> State:
- path = Path("/tmp")
+ path = Path("/tmp") # nosec B108 # BNS:13b2c8
name = f"restore-{current_site_id()}.state"
return State(path / name)
diff --git a/bin/mkp b/bin/mkp
index 1e995a14f60..a35a51bcbdf 100755
--- a/bin/mkp
+++ b/bin/mkp
@@ -18,8 +18,6 @@ from cmk.discover_plugins import addons_plugins_local_path, plugins_local_path
from cmk.mkp_tool import (
cli,
make_post_package_change_actions,
- PackageOperationCallbacks,
- PackagePart,
PackageStore,
PathConfig,
reload_apache,
@@ -56,20 +54,9 @@ _SITE_CONTEXT: Final = cli.SiteContext(
local_dir=cmk.utils.paths.local_optional_packages_dir,
shipped_dir=cmk.utils.paths.optional_packages_dir,
),
- callbacks={
- PackagePart.EC_RULE_PACKS: PackageOperationCallbacks(
- install=ec.install_packaged_rule_packs,
- uninstall=ec.uninstall_packaged_rule_packs,
- release=ec.release_packaged_rule_packs,
- ),
- },
+ callbacks=ec.mkp_callbacks(),
post_package_change_actions=make_post_package_change_actions(
- ((PackagePart.GUI, PackagePart.WEB), reload_apache),
- ((PackagePart.GUI, PackagePart.WEB), invalidate_visuals_cache),
- (
- (PackagePart.GUI, PackagePart.WEB, PackagePart.EC_RULE_PACKS),
- request_index_rebuild,
- ),
+ on_any_change=(reload_apache, invalidate_visuals_cache, request_index_rebuild)
),
version=__version__,
parse_version=parse_check_mk_version,
diff --git a/buildscripts/docker_image_aliases/IMAGE_ALMALINUX_9/meta.yml b/buildscripts/docker_image_aliases/IMAGE_ALMALINUX_9/meta.yml
index afc28c426d8..531c752ad85 100644
--- a/buildscripts/docker_image_aliases/IMAGE_ALMALINUX_9/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_ALMALINUX_9/meta.yml
@@ -1,2 +1,2 @@
-source: almalinux:9
-tag: artifacts.lan.tribe29.com:4000/almalinux:9-image-alias-master-0f4a544d4d1
+source: almalinux:9.2
+tag: artifacts.lan.tribe29.com:4000/almalinux:9.2-image-alias-master-f2fdb270d85
diff --git a/buildscripts/docker_image_aliases/IMAGE_CENTOS_8/meta.yml b/buildscripts/docker_image_aliases/IMAGE_CENTOS_8/meta.yml
index 8737b2e833e..e0363a6fd36 100644
--- a/buildscripts/docker_image_aliases/IMAGE_CENTOS_8/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_CENTOS_8/meta.yml
@@ -1,2 +1,2 @@
source: centos:centos8
-tag: artifacts.lan.tribe29.com:4000/centos:centos8-image-alias-master-0f4a544d4d1
+tag: artifacts.lan.tribe29.com:4000/centos:centos8-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_10/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_10/Dockerfile
index c7188d5d0bc..7c194d2600b 100644
--- a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_10/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_10/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/debian@sha256:88a98482ebe4b8ef20104c844d74ac59a7241e8782c9ea3a1c1d47503dbbbddb
+FROM artifacts.lan.tribe29.com:4000/debian@sha256:255eec9d157d35e00a81a45f1e958fd19437d504139e8eb4ea6cc380ea741ed4
diff --git a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_10/meta.yml b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_10/meta.yml
index 3152163b32c..613bad5ed4f 100644
--- a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_10/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_10/meta.yml
@@ -1,2 +1,2 @@
source: debian:10
-tag: artifacts.lan.tribe29.com:4000/debian:10-image-alias-master-0f4a544d4d1
+tag: artifacts.lan.tribe29.com:4000/debian:10-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_11/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_11/Dockerfile
index 8ec54259796..2f588b86439 100644
--- a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_11/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_11/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/debian@sha256:3da0c9fb1282040a13e26074dc1712f656fc2fa6d8d2e264612e5c7266f17653
+FROM artifacts.lan.tribe29.com:4000/debian@sha256:38fb0f1618bfa65b0cf1dd279293ebc70ef94aab2f2dc49274a2efc6ee29880e
diff --git a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_11/meta.yml b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_11/meta.yml
index b3667223cf2..afb3c3e1941 100644
--- a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_11/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_11/meta.yml
@@ -1,2 +1,2 @@
source: debian:11
-tag: artifacts.lan.tribe29.com:4000/debian:11-image-alias-master-0f4a544d4d1
+tag: artifacts.lan.tribe29.com:4000/debian:11-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_12/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_12/Dockerfile
index 92b13f925cb..f8305ff8522 100644
--- a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_12/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_12/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/debian@sha256:60774985572749dc3c39147d43089d53e7ce17b844eebcf619d84467160217ab
+FROM artifacts.lan.tribe29.com:4000/debian@sha256:40f71cd223a60afc1bac2adf1b204bfabef29cdef725e74993f86098ff87f92f
diff --git a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_12/meta.yml b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_12/meta.yml
index 053df285af8..908d9cfb2c7 100644
--- a/buildscripts/docker_image_aliases/IMAGE_DEBIAN_12/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_DEBIAN_12/meta.yml
@@ -1,2 +1,2 @@
source: debian:12
-tag: artifacts.lan.tribe29.com:4000/debian:12-image-alias-master-38f62762277
+tag: artifacts.lan.tribe29.com:4000/debian:12-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_ORACLE_DB_23C/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_ORACLE_DB_23C/Dockerfile
new file mode 100644
index 00000000000..5f0fd95d71c
--- /dev/null
+++ b/buildscripts/docker_image_aliases/IMAGE_ORACLE_DB_23C/Dockerfile
@@ -0,0 +1 @@
+FROM artifacts.lan.tribe29.com:4000/free@sha256:3a47c3d891573cc348c41042a0acf80509a40fce3d8d0322fe4d65800397f910
diff --git a/buildscripts/docker_image_aliases/IMAGE_ORACLE_DB_23C/meta.yml b/buildscripts/docker_image_aliases/IMAGE_ORACLE_DB_23C/meta.yml
new file mode 100644
index 00000000000..154194f260b
--- /dev/null
+++ b/buildscripts/docker_image_aliases/IMAGE_ORACLE_DB_23C/meta.yml
@@ -0,0 +1,2 @@
+source: container-registry.oracle.com/database/free
+tag: artifacts.lan.tribe29.com:4000/free:latest-image-alias-master-f8f47b17ce4
diff --git a/buildscripts/docker_image_aliases/IMAGE_SLES_12SP5/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_SLES_12SP5/Dockerfile
index 63d9914c3c6..48a05b88930 100644
--- a/buildscripts/docker_image_aliases/IMAGE_SLES_12SP5/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_SLES_12SP5/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/sles12sp5@sha256:e74ac74e36b1e28ec33163a393306d05eb43883bb14a6b3afddb1cccf77dfc12
+FROM artifacts.lan.tribe29.com:4000/sles12sp5@sha256:02707be67d78bc3e4cd8811b9056b1efe15fdc3c206cd7765e73416ab1f9e09d
diff --git a/buildscripts/docker_image_aliases/IMAGE_SLES_12SP5/meta.yml b/buildscripts/docker_image_aliases/IMAGE_SLES_12SP5/meta.yml
index d829ebbd192..f85e6e6ba36 100644
--- a/buildscripts/docker_image_aliases/IMAGE_SLES_12SP5/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_SLES_12SP5/meta.yml
@@ -1,2 +1,2 @@
source: registry.suse.com/suse/sles12sp5
-tag: artifacts.lan.tribe29.com:4000/sles12sp5:latest-image-alias-master-0f4a544d4d1
+tag: artifacts.lan.tribe29.com:4000/sles12sp5:latest-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_SLES_15SP3/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_SLES_15SP3/Dockerfile
index dafa7ec858f..996578a40a2 100644
--- a/buildscripts/docker_image_aliases/IMAGE_SLES_15SP3/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_SLES_15SP3/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/sle15@sha256:513c7c527a7e0328a932ff57595d3b634a8fa03e81b68d0631a2cb5ea3675bf0
+FROM artifacts.lan.tribe29.com:4000/sle15@sha256:5a8b8540dcfc448bcff2a412e9f88f4996aef475d31baf7726e43b4d5855270a
diff --git a/buildscripts/docker_image_aliases/IMAGE_SLES_15SP3/meta.yml b/buildscripts/docker_image_aliases/IMAGE_SLES_15SP3/meta.yml
index 4cffe3ba42e..d320449cb1e 100644
--- a/buildscripts/docker_image_aliases/IMAGE_SLES_15SP3/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_SLES_15SP3/meta.yml
@@ -1,2 +1,2 @@
source: registry.suse.com/suse/sle15:15.3
-tag: artifacts.lan.tribe29.com:4000/sle15:15.3-image-alias-master-0f4a544d4d1
+tag: artifacts.lan.tribe29.com:4000/sle15:15.3-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_SLES_15SP4/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_SLES_15SP4/Dockerfile
index 5f2c7d6e9da..1d8bf633788 100644
--- a/buildscripts/docker_image_aliases/IMAGE_SLES_15SP4/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_SLES_15SP4/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/sle15@sha256:fca8efb3b0cb6add2c4792d1f4073441f2ba0aea82dcd20e05a60efd12dad375
+FROM artifacts.lan.tribe29.com:4000/sle15@sha256:9eef897a9dc6c208a757b250002b68908f88481e3e06a3a9ec293852f4ae463e
diff --git a/buildscripts/docker_image_aliases/IMAGE_SLES_15SP4/meta.yml b/buildscripts/docker_image_aliases/IMAGE_SLES_15SP4/meta.yml
index 31813c33eec..f437c47c931 100644
--- a/buildscripts/docker_image_aliases/IMAGE_SLES_15SP4/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_SLES_15SP4/meta.yml
@@ -1,2 +1,2 @@
source: registry.suse.com/suse/sle15:15.4
-tag: artifacts.lan.tribe29.com:4000/sle15:15.4-image-alias-master-0f4a544d4d1
+tag: artifacts.lan.tribe29.com:4000/sle15:15.4-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_TESTING/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_TESTING/Dockerfile
index 9dddc8c1b17..2f6f3e640cb 100644
--- a/buildscripts/docker_image_aliases/IMAGE_TESTING/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_TESTING/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/ubuntu-20.04@sha256:f6f23aa591b845c873a59639b4bf25ab66c66d2a3277da11a49d1331fa498dc1
+FROM artifacts.lan.tribe29.com:4000/ubuntu-20.04@sha256:0f19488657465562b349c89f7fb150793ff53c77e18e3694d00fdefe91a3edb3
diff --git a/buildscripts/docker_image_aliases/IMAGE_TESTING/meta.yml b/buildscripts/docker_image_aliases/IMAGE_TESTING/meta.yml
index 681e0a60225..f026be98ca1 100644
--- a/buildscripts/docker_image_aliases/IMAGE_TESTING/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_TESTING/meta.yml
@@ -1,2 +1,2 @@
source: artifacts.lan.tribe29.com:4000/ubuntu-20.04:master-latest
-tag: artifacts.lan.tribe29.com:4000/ubuntu-20.04:master-latest-image-alias-master-41d84e675e5
+tag: artifacts.lan.tribe29.com:4000/ubuntu-20.04:master-latest-image-alias-master-8c4083e9826
diff --git a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_20_04/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_20_04/Dockerfile
index e9e42a286f5..77fe1cc1be9 100644
--- a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_20_04/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_20_04/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/ubuntu@sha256:d3917e616eab568c824fa8bb601049271b6b5c6661b8a27df7b9359cfc8c13ca
+FROM artifacts.lan.tribe29.com:4000/ubuntu@sha256:f9f2f63b5645fd2d1b823793dd3b7bdea9d6765f236afcad829b9ac0ea4fb688
diff --git a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_20_04/meta.yml b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_20_04/meta.yml
index bccd380611e..588a0a88390 100644
--- a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_20_04/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_20_04/meta.yml
@@ -1,2 +1,2 @@
source: ubuntu:20.04
-tag: artifacts.lan.tribe29.com:4000/ubuntu:20.04-image-alias-master-68c64cee82d
+tag: artifacts.lan.tribe29.com:4000/ubuntu:20.04-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_22_04/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_22_04/Dockerfile
index 4e7ef73b2fb..3c558f29493 100644
--- a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_22_04/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_22_04/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/ubuntu@sha256:f29870aec43cb049f711f7b807626214c9a97e428f93dfcdf3ba23d2c51c2fa5
+FROM artifacts.lan.tribe29.com:4000/ubuntu@sha256:5e474741ce14bd13a5cb558faef253cb23b12b53e8ba00a41119ac0f724a78f9
diff --git a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_22_04/meta.yml b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_22_04/meta.yml
index a8a33c96e11..7540d79732a 100644
--- a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_22_04/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_22_04/meta.yml
@@ -1,2 +1,2 @@
source: ubuntu:22.04
-tag: artifacts.lan.tribe29.com:4000/ubuntu:22.04-image-alias-master-0f4a544d4d1
+tag: artifacts.lan.tribe29.com:4000/ubuntu:22.04-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_04/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_04/Dockerfile
deleted file mode 100644
index 7f353bdaf11..00000000000
--- a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_04/Dockerfile
+++ /dev/null
@@ -1 +0,0 @@
-FROM artifacts.lan.tribe29.com:4000/ubuntu@sha256:d28a9afbc67117a33ef0776bd0844762a91c4a0739aee2ab217e43466eb38400
diff --git a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_04/meta.yml b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_04/meta.yml
deleted file mode 100644
index 6e8a5a00a29..00000000000
--- a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_04/meta.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-source: ubuntu:23.04
-tag: artifacts.lan.tribe29.com:4000/ubuntu:23.04-image-alias-master-3a5fab1acc5
diff --git a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_10/Dockerfile b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_10/Dockerfile
index 66655d0a979..c1407ed24f1 100644
--- a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_10/Dockerfile
+++ b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_10/Dockerfile
@@ -1 +1 @@
-FROM artifacts.lan.tribe29.com:4000/ubuntu@sha256:0d9929ff691673c2cfe42d12674a0f3950dc1c0c666836d8accf4393e52ecd53
+FROM artifacts.lan.tribe29.com:4000/ubuntu@sha256:d4a91205bf856bfb3a3a3c5ff7786f27481fc9e2806257466135052f1cea6219
diff --git a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_10/meta.yml b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_10/meta.yml
index f394dc799e7..43b66ce5628 100644
--- a/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_10/meta.yml
+++ b/buildscripts/docker_image_aliases/IMAGE_UBUNTU_23_10/meta.yml
@@ -1,2 +1,2 @@
source: ubuntu:23.10
-tag: artifacts.lan.tribe29.com:4000/ubuntu:23.10-image-alias-master-3821332d4b2
+tag: artifacts.lan.tribe29.com:4000/ubuntu:23.10-image-alias-master-2795a997c8b
diff --git a/buildscripts/docker_image_aliases/resolve.py b/buildscripts/docker_image_aliases/resolve.py
index f2241b7e7c8..0434e9e96ec 100755
--- a/buildscripts/docker_image_aliases/resolve.py
+++ b/buildscripts/docker_image_aliases/resolve.py
@@ -3,7 +3,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-"""Maps a given Docker Image Alias name (e.g. IMAGE_TESTING) to an unambiguous
+"""Maps a given Docker Image Alias name (e.g. IMAGE_CMK_BASE) to an unambiguous
image ID, defined in correspondingly named folders containing Dockerfiles.
So the mapping is SCM tracked and thus branch specific and reproducible."""
@@ -46,7 +46,6 @@ def image_id(alias_name: str) -> str:
" and the image exists on the registry.",
file=sys.stderr,
)
- print("If IMAGE_TESTING got repinned recently, try a rebase!", file=sys.stderr)
print("INVALID_IMAGE_ID")
diff --git a/buildscripts/infrastructure/build-nodes/almalinux-9/Dockerfile b/buildscripts/infrastructure/build-nodes/almalinux-9/Dockerfile
index cb8e67338fb..7bafb3a40e2 100644
--- a/buildscripts/infrastructure/build-nodes/almalinux-9/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/almalinux-9/Dockerfile
@@ -1,9 +1,14 @@
-ARG IMAGE_ALMALINUX_9
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_ALMALINUX_9} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
+ENV \
+ LC_ALL=C.UTF-8 \
+ LANG=C.UTF-8 \
+ PATH="/opt/bin:${PATH}"
RUN yum -y --enablerepo=crb makecache && yum repolist --enablerepo=crb \
&& yum -y --allowerasing --enablerepo=crb install \
@@ -83,7 +88,9 @@ RUN yum -y makecache \
xmlsec1-devel \
xmlsec1-openssl-devel \
&& yum clean all
+
# --nogpgcheck: Workaround for failing installation, not locally reproducable
+
RUN yum -y makecache \
&& yum -y --enablerepo=crb reinstall \
kernel-headers \
@@ -116,7 +123,7 @@ RUN if test -f /usr/lib/rpm/redhat/redhat-annobin-cc1; then \
&& touch /usr/lib/rpm/redhat/redhat-annobin-cc1; \
fi
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -127,33 +134,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-COPY scripts/* /opt/
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
+
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -166,8 +215,14 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
+
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
RUN /opt/install-cmk-dependencies.sh
+
+COPY --from=scripts install-patchelf.sh /opt/
RUN /opt/install-patchelf.sh
# The /etc/fstab does not exist in the base image we use. A missing fstab prevents OMD from
@@ -175,17 +230,12 @@ RUN /opt/install-patchelf.sh
# simply solve this by pre-creating the empty file here.
RUN touch /etc/fstab
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/centos-8/Dockerfile b/buildscripts/infrastructure/build-nodes/centos-8/Dockerfile
index 77e8556f205..0ca8cb35e9e 100644
--- a/buildscripts/infrastructure/build-nodes/centos-8/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/centos-8/Dockerfile
@@ -1,9 +1,14 @@
-ARG IMAGE_CENTOS_8
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_CENTOS_8} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
+ENV \
+ LC_ALL=C.UTF-8 \
+ LANG=C.UTF-8 \
+ PATH="/opt/bin:${PATH}"
# Centos-8 is EOL, therfor we switch to the vault repos
RUN sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/* \
@@ -88,7 +93,9 @@ RUN yum -y makecache \
xmlsec1-devel \
xmlsec1-openssl-devel \
&& yum clean all
+
# --nogpgcheck: Workaround for failing installation, not locally reproducable
+
RUN yum -y makecache \
&& yum -y --enablerepo=powertools reinstall \
kernel-headers \
@@ -121,7 +128,7 @@ RUN if test -f /usr/lib/rpm/redhat/redhat-annobin-cc1; then \
&& touch /usr/lib/rpm/redhat/redhat-annobin-cc1; \
fi
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -132,33 +139,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-COPY scripts/* /opt/
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
+
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -171,8 +220,14 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
+
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
RUN /opt/install-cmk-dependencies.sh
+
+COPY --from=scripts install-patchelf.sh /opt/
RUN /opt/install-patchelf.sh
# The /etc/fstab does not exist in the base image we use. A missing fstab prevents OMD from
@@ -180,17 +235,12 @@ RUN /opt/install-patchelf.sh
# simply solve this by pre-creating the empty file here.
RUN touch /etc/fstab
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/debian-10/Dockerfile b/buildscripts/infrastructure/build-nodes/debian-10/Dockerfile
index b0b03f89f2b..06534cec4b5 100644
--- a/buildscripts/infrastructure/build-nodes/debian-10/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/debian-10/Dockerfile
@@ -1,12 +1,19 @@
-ARG IMAGE_DEBIAN_10
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_DEBIAN_10} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV DEBIAN_FRONTEND=noninteractive LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
+ENV \
+ DEBIAN_FRONTEND=noninteractive \
+ LC_ALL=C.UTF-8 \
+ LANG=C.UTF-8 \
+ PATH="/opt/bin:${PATH}"
+
+RUN apt-get update
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
git \
libenchant1c2a \
librrd-dev \
@@ -17,11 +24,9 @@ RUN apt-get update \
rrdtool \
strace \
sudo \
- vim \
- && rm -rf /var/lib/apt/lists/*
+ vim
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
apache2 \
apache2-dev \
build-essential \
@@ -67,10 +72,13 @@ RUN apt-get update \
smbclient \
texinfo \
tk-dev \
- uuid-dev \
- && rm -rf /var/lib/apt/lists/*
+ uuid-dev
+
+# remove apt service because we don't need it and we run into problems, see
+# https://jira.lan.tribe29.com/browse/CMK-16607
+RUN rm /etc/cron.daily/apt-compat
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -81,33 +89,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
-COPY scripts/* /opt/
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -120,21 +170,22 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
-RUN /opt/install-cmk-dependencies.sh
-RUN /opt/install-patchelf.sh
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
+RUN /opt/install-cmk-dependencies.sh
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
+COPY --from=scripts install-patchelf.sh /opt/
+RUN /opt/install-patchelf.sh
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/debian-11/Dockerfile b/buildscripts/infrastructure/build-nodes/debian-11/Dockerfile
index 7aeb2925201..a7a0cef7586 100644
--- a/buildscripts/infrastructure/build-nodes/debian-11/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/debian-11/Dockerfile
@@ -1,12 +1,19 @@
-ARG IMAGE_DEBIAN_11
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_DEBIAN_11} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV DEBIAN_FRONTEND=noninteractive LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
+ENV \
+ DEBIAN_FRONTEND=noninteractive \
+ LC_ALL=C.UTF-8 \
+ LANG=C.UTF-8 \
+ PATH="/opt/bin:${PATH}"
+
+RUN apt-get update
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
git \
librrd-dev \
libsasl2-dev \
@@ -16,11 +23,9 @@ RUN apt-get update \
rrdtool \
strace \
sudo \
- vim \
- && rm -rf /var/lib/apt/lists/*
+ vim
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
apache2 \
apache2-dev \
build-essential \
@@ -66,10 +71,13 @@ RUN apt-get update \
smbclient \
texinfo \
tk-dev \
- uuid-dev \
- && rm -rf /var/lib/apt/lists/*
+ uuid-dev
+
+# remove apt service because we don't need it and we run into problems, see
+# https://jira.lan.tribe29.com/browse/CMK-16607
+RUN rm /etc/cron.daily/apt-compat
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -80,33 +88,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
-COPY scripts/* /opt/
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -119,21 +169,22 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
-RUN /opt/install-cmk-dependencies.sh
-RUN /opt/install-patchelf.sh
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
+RUN /opt/install-cmk-dependencies.sh
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
+COPY --from=scripts install-patchelf.sh /opt/
+RUN /opt/install-patchelf.sh
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/debian-12/Dockerfile b/buildscripts/infrastructure/build-nodes/debian-12/Dockerfile
index da9672cd018..a893ff686b8 100644
--- a/buildscripts/infrastructure/build-nodes/debian-12/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/debian-12/Dockerfile
@@ -1,12 +1,19 @@
-ARG IMAGE_DEBIAN_12
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_DEBIAN_12} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV DEBIAN_FRONTEND=noninteractive LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
+ENV \
+ DEBIAN_FRONTEND=noninteractive \
+ LC_ALL=C.UTF-8 \
+ LANG=C.UTF-8 \
+ PATH="/opt/bin:${PATH}"
+
+RUN apt-get update
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
git \
librrd-dev \
libsasl2-dev \
@@ -16,11 +23,9 @@ RUN apt-get update \
rrdtool \
strace \
sudo \
- vim \
- && rm -rf /var/lib/apt/lists/*
+ vim
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
apache2 \
apache2-dev \
build-essential \
@@ -65,10 +70,13 @@ RUN apt-get update \
smbclient \
texinfo \
tk-dev \
- uuid-dev \
- && rm -rf /var/lib/apt/lists/*
+ uuid-dev
+
+# remove apt service because we don't need it and we run into problems, see
+# https://jira.lan.tribe29.com/browse/CMK-16607
+RUN rm /etc/cron.daily/apt-compat
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -79,33 +87,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
-COPY scripts/* /opt/
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -118,21 +168,22 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
-RUN /opt/install-cmk-dependencies.sh
-RUN /opt/install-patchelf.sh
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
+RUN /opt/install-cmk-dependencies.sh
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
+COPY --from=scripts install-patchelf.sh /opt/
+RUN /opt/install-patchelf.sh
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/laptops/Dockerfile b/buildscripts/infrastructure/build-nodes/laptops/Dockerfile
new file mode 100644
index 00000000000..5415b87bf0d
--- /dev/null
+++ b/buildscripts/infrastructure/build-nodes/laptops/Dockerfile
@@ -0,0 +1,38 @@
+ARG BASE_IMAGE
+# hadolint ignore=DL3006
+FROM ${BASE_IMAGE}
+
+SHELL ["/bin/bash", "-c"]
+ENV DEBIAN_FRONTEND=noninteractive LC_ALL=C.UTF-8 LANG=C.UTF-8
+
+ARG NEXUS_ARCHIVES_URL
+ARG NEXUS_USERNAME
+ARG NEXUS_PASSWORD
+ARG DISTRO
+ARG CI
+ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" CI="$CI"
+
+ARG ADDITIONAL_ARGS
+
+# used by install scripts
+COPY scripts/.bazelversion /opt/
+COPY scripts/defines.make /opt/
+COPY scripts/package_versions.bzl /opt/
+COPY scripts/static_variables.bzl /opt/
+COPY scripts/strip_binaries /opt/
+COPY scripts/*.mk /opt/
+COPY scripts/*.sh /opt/
+
+RUN /opt/install-development.sh --profile all ${ADDITIONAL_ARGS}
+
+# run the installation a second time to confirm an additional run does not fail
+# used by install scripts
+COPY scripts/.bazelversion /opt/
+COPY scripts/defines.make /opt/
+COPY scripts/package_versions.bzl /opt/
+COPY scripts/static_variables.bzl /opt/
+COPY scripts/strip_binaries /opt/
+COPY scripts/*.mk /opt/
+COPY scripts/*.sh /opt/
+RUN rm -rf /root/.pyenv
+RUN /opt/install-development.sh --profile all ${ADDITIONAL_ARGS}
diff --git a/buildscripts/infrastructure/build-nodes/scripts/build_lib.sh b/buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
index d3a67949f02..4726c1ba7fc 100644
--- a/buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
@@ -9,6 +9,33 @@ failure() {
exit 1
}
+# some style settings defined here
+txtRed=$'\e[41m'
+txtGreen=$'\e[32m'
+txtBlue=$'\e[34m'
+resetColor=$'\e[0m'
+
+print_red() {
+ printf "%s%s%s\n" "${txtRed}" "$1" "${resetColor}"
+}
+
+print_green() {
+ printf "%s%s%s\n" "${txtGreen}" "$1" "${resetColor}"
+}
+
+print_blue() {
+ printf "%s%s%s\n" "${txtBlue}" "$1" "${resetColor}"
+}
+
+print_debug() {
+ print_blue " $1"
+}
+
+get_desired_python_version() {
+ # to use "make print-PYTHON_VERISON" the git repo with "Makefile" and "artifacts.make" would be necessary at a known location
+ sed -n 's|^PYTHON_VERSION = \"\(\S*\)\"$|\1|p' "${1}"/package_versions.bzl
+}
+
_artifact_name() {
local DIR_NAME="$1"
local DISTRO="$2"
diff --git a/buildscripts/infrastructure/build-nodes/scripts/checksums b/buildscripts/infrastructure/build-nodes/scripts/checksums
new file mode 100755
index 00000000000..771071b841d
--- /dev/null
+++ b/buildscripts/infrastructure/build-nodes/scripts/checksums
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+# Outputs details with checksums of files contained in provided directories.
+# To be used to find differences in directories.
+
+print_details() {
+ if [ -d "$1" ]; then
+ stat --printf="%n %A %s" "$1"
+ else
+ echo "$(stat --printf="%n %A %s" "$1") $(sha1sum "$1" | cut -d' ' -f1)"
+ fi
+}
+export -f print_details
+
+for path in "$@"; do
+ find "${path}" -print0 |
+ xargs -0 -n 1 -P 10 -I {} bash -c 'print_details "$@"' _ {} |
+ sort
+done
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-aws-cli.sh b/buildscripts/infrastructure/build-nodes/scripts/install-aws-cli.sh
index 8ca2ab527a2..cf305828d51 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-aws-cli.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-aws-cli.sh
@@ -12,7 +12,7 @@ case "$DISTRO" in
curl -s "https://awscli.amazonaws.com/awscli-exe-linux-x86_64-2.11.9.zip" -o "awscliv2.zip"
unzip -q awscliv2.zip
- ./aws/install
+ ./aws/install --update
rm -r aws awscliv2.zip
# Test the installation
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-bazel.sh b/buildscripts/infrastructure/build-nodes/scripts/install-bazel.sh
index 52d537d5654..463364312e8 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-bazel.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-bazel.sh
@@ -13,7 +13,7 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
. "${SCRIPT_DIR}/build_lib.sh"
DIR_NAME="bazel"
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
BAZEL_VERSION="$(<"${SCRIPT_DIR}"/.bazelversion)"
BAZEL_EXE_FILE="bazel-${BAZEL_VERSION}-linux-x86_64"
@@ -26,6 +26,6 @@ if [ "$1" != "link-only" ]; then
chmod +x "${BAZEL_EXE_FILE}"
fi
-ln -s "${TARGET_DIR}/${DIR_NAME}/${BAZEL_EXE_FILE}" "/usr/bin/bazel"
+ln -sf "${TARGET_DIR}/${DIR_NAME}/${BAZEL_EXE_FILE}" "/usr/bin/bazel"
test_package "bazel --version" "^bazel $BAZEL_VERSION$"
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-buildifier.sh b/buildscripts/infrastructure/build-nodes/scripts/install-buildifier.sh
index eaa75171639..7ff346716c3 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-buildifier.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-buildifier.sh
@@ -16,7 +16,7 @@ BUILDIFIER_VERSION="6.1.0"
install_package() {
echo "Installing buildifier@${BUILDIFIER_VERSION}"
# GO111MODULE=on is the default with Go 1.16
- GOPATH=/opt \
+ GOPATH="${TARGET_DIR:-/opt}" \
GO111MODULE=on \
go get github.com/bazelbuild/buildtools/buildifier@${BUILDIFIER_VERSION}
}
@@ -27,7 +27,7 @@ case "$DISTRO" in
test_package "go version" "go$GO_VERSION\."
;;
*)
- echo "ERROR: Unhandled DISTRO: $DISTRO - buildifier should only be available in IMAGE_TESTING!"
+ echo "ERROR: Unhandled DISTRO: $DISTRO - buildifier should only be available in reference image!"
exit 1
;;
esac
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-clang.sh b/buildscripts/infrastructure/build-nodes/scripts/install-clang.sh
index b07af356d44..0d1dc668876 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-clang.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-clang.sh
@@ -8,7 +8,7 @@
# This script will install the llvm toolchain on the different
# Debian and Ubuntu versions
-set -eux
+set -eu
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
# shellcheck source=buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
@@ -39,6 +39,7 @@ CLANG_VERSION_PATTERNS[14]="-14"
CLANG_VERSION_PATTERNS[15]="-15"
CLANG_VERSION_PATTERNS[16]="-16"
CLANG_VERSION_PATTERNS[17]="-17"
+CLANG_VERSION_PATTERNS[18]="-18"
if [ ! ${CLANG_VERSION_PATTERNS[$CLANG_VERSION]+_} ]; then
failure "This script does not support LLVM version $CLANG_VERSION"
@@ -64,12 +65,19 @@ case "$DIST_VERSION" in
Ubuntu_22.04) REPO_NAME="deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy$CLANG_VERSION_STRING main" ;;
Ubuntu_22.10) REPO_NAME="deb http://apt.llvm.org/kinetic/ llvm-toolchain-kinectic$CLANG_VERSION_STRING main" ;;
Ubuntu_23.04) REPO_NAME="deb http://apt.llvm.org/lunar/ llvm-toolchain-lunar$CLANG_VERSION_STRING main" ;;
+ Ubuntu_23.10) REPO_NAME="deb http://apt.llvm.org/mantic/ llvm-toolchain-mantic$CLANG_VERSION_STRING main" ;;
*) failure "Distribution '$DISTRO' in version '$VERSION' is not supported by this script (${DIST_VERSION})." >&2 ;;
esac
# install everything
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
-add-apt-repository "${REPO_NAME}"
+if [[ -e "/etc/apt/sources.list.d/clang.list" ]]; then
+ if ! grep -Fxq "${REPO_NAME}" /etc/apt/sources.list.d/clang.list; then
+ echo "${REPO_NAME}" >/etc/apt/sources.list.d/clang.list
+ fi
+else
+ echo "${REPO_NAME}" >>/etc/apt/sources.list.d/clang.list
+fi
apt-get update
apt-get install -y \
"clang-$CLANG_VERSION" \
@@ -79,7 +87,6 @@ apt-get install -y \
"lld-$CLANG_VERSION" \
"lldb-$CLANG_VERSION" \
"libclang-$CLANG_VERSION-dev"
-rm -rf /var/lib/apt/lists/*
# Workaround for https://github.com/llvm/llvm-project/issues/61550
if [ "$CLANG_VERSION" = 16 ]; then
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-cmake.sh b/buildscripts/infrastructure/build-nodes/scripts/install-cmake.sh
index b7c5287fe88..a7f8ddb1c15 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-cmake.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-cmake.sh
@@ -12,7 +12,7 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
CMAKE_VERSION=3.25.0
DIR_NAME=cmake-${CMAKE_VERSION}-Linux-x86_64
ARCHIVE_NAME=${DIR_NAME}.tar.gz
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
# Increase this to enforce a recreation of the build cache
BUILD_ID=2
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-cmk-dependencies.sh b/buildscripts/infrastructure/build-nodes/scripts/install-cmk-dependencies.sh
index eedc3c3948e..cdcdc370c63 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-cmk-dependencies.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-cmk-dependencies.sh
@@ -8,7 +8,7 @@
set -e -o pipefail
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
if [ "$DISTRO" = "cma-3" ] || [ "$DISTRO" = "cma-4" ]; then
# As there are no system tests for the appliance, an installation of CMK
# dependencies is not required
@@ -31,7 +31,6 @@ add_gpg_key() {
cleanup() {
rm -f "$TARGET_DIR"/needed-packages
- rm -rf /var/lib/apt/lists/*
}
extract_needed_packages
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-development.sh b/buildscripts/infrastructure/build-nodes/scripts/install-development.sh
new file mode 100755
index 00000000000..96b2aa96c9e
--- /dev/null
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-development.sh
@@ -0,0 +1,559 @@
+#!/bin/bash
+# Copyright (C) 2023 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+# To get this thing up and running the following tools need to be installed:
+# - git, to clone the repo and get this script
+# - make (optional), to run the script via "make setup", simply call "apt-get install make"
+#
+# How to use
+# ./buildscripts/infrastructure/build-nodes/scripts/install-development.sh \
+# --installpath $PWD/qwertz \
+# --profile cpp,python \
+# --dry
+set -e -o pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
+# SCRIPT_DIR="buildscripts/infrastructure/build-nodes/scripts"
+# shellcheck source=buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
+. "${SCRIPT_DIR}/build_lib.sh"
+
+IMPORTANT_MESSAGES=()
+
+trap perform_cleanup EXIT
+
+install_packages() {
+ echo "Install for: ${*}"
+
+ INSTALL_STR=""
+ for i in "${@}"; do
+ INSTALL_STR+="$i "
+ done
+
+ if [[ $DRY_RUN -gt 0 ]]; then
+ print_blue "This is a dry run"
+ print_blue "Would install these packages: '$INSTALL_STR'"
+ else
+ INSTALL_STR="apt-get install -y ${INSTALL_STR}"
+ print_debug "$INSTALL_STR"
+ apt-get update
+ ${INSTALL_STR}
+ rm -rf /var/lib/apt/lists/*
+ fi
+}
+
+install_basic_tools() {
+ print_green "Installing common basic tools ..."
+ local PACKAGES_TO_INSTALL=(
+ "binutils" # "strip" required to cleanup during strip_binaries
+ "curl" # curl is used to download artifacts from Nexus
+ "doxygen" # to be able to create docs in the unlikely event
+ "gawk" # TBC
+ "git" # git is used by install-[bazel, cmake, iwyu, patchelf, protobuf-cpp].sh
+ "gnupg" # "apt-key" used by install-docker
+ "lsb-release" # lsb is used by install-[clang, docker, packer, nodejs].sh
+ "make" # don't forget your towel when you're taveling :)
+ "sudo" # some make calls require sudo
+ "wget" # wget is used by install-[clang, packer, protobuf-cpp].sh
+ )
+ install_packages "${PACKAGES_TO_INSTALL[@]}"
+ print_green "Common basic tool installation done"
+}
+
+copy_files_around() {
+ print_green "Copy necessary files around and to ${INSTALL_PATH} ..."
+ # /opt would be used in install-cmk-dependencies.sh by default
+ mkdir -p "${INSTALL_PATH}"
+ DISTRO_NAME=$(lsb_release -is)
+ VERSION_NUMBER=$(lsb_release -sr)
+ cp omd/distros/UBUNTU_"$VERSION_NUMBER".mk "${INSTALL_PATH}"
+ # copy files to buildscripts/infrastructure/build-nodes/scripts
+ cp .bazelversion defines.make package_versions.bzl static_variables.bzl "${SCRIPT_DIR}"
+ print_green "Necessary file copying done"
+}
+
+perform_cleanup() {
+ print_green "Cleanup ..."
+ rm -f "${INSTALL_PATH}"/UBUNTU_"$VERSION_NUMBER".mk
+ rm -f "${SCRIPT_DIR}"/.bazelversion
+ rm -f "${SCRIPT_DIR}"/defines.make
+ rm -f "${SCRIPT_DIR}"/static_variables.bzl
+ rm -f "${SCRIPT_DIR}"/package_versions.bzl
+ rm -f "${SCRIPT_DIR}"/*.mk
+ print_green "Cleanup done"
+}
+
+setup_env_variables() {
+ print_green "Setup env variables ..."
+ DISTRO_NAME=$(lsb_release -is)
+ VERSION_NUMBER=$(lsb_release -sr)
+ BRANCH_NAME=$(get_version "$SCRIPT_DIR" BRANCH_NAME)
+ BRANCH_VERSION=$(get_version "$SCRIPT_DIR" BRANCH_VERSION)
+ CLANG_VERSION=$(get_version "$SCRIPT_DIR" CLANG_VERSION)
+ PIPENV_VERSION=$(get_version "$SCRIPT_DIR" PIPENV_VERSION)
+ VIRTUALENV_VERSION=$(get_version "$SCRIPT_DIR" VIRTUALENV_VERSION)
+ export DISTRO="${DISTRO_NAME,,}-${VERSION_NUMBER}"
+ # export NEXUS_ARCHIVES_URL here (as well) in case no creds have to be collected, e.g. CI build
+ export NEXUS_ARCHIVES_URL="https://artifacts.lan.tribe29.com/repository/archives/"
+ export BRANCH_NAME
+ export BRANCH_VERSION
+ export CLANG_VERSION
+ export PIPENV_VERSION
+ export VIRTUALENV_VERSION
+ export DISTRO_NAME
+ export VERSION_NUMBER
+ print_debug "DISTRO = ${DISTRO}"
+ print_debug "DISTRO_NAME = ${DISTRO_NAME}"
+ print_debug "VERSION_NUMBER = ${VERSION_NUMBER}"
+ print_debug "NEXUS_ARCHIVES_URL = ${NEXUS_ARCHIVES_URL}"
+ print_debug "BRANCH_NAME = ${BRANCH_NAME}"
+ print_debug "BRANCH_VERSION = ${BRANCH_VERSION}"
+ print_debug "CLANG_VERSION = ${CLANG_VERSION}"
+ print_debug "PIPENV_VERSION = ${PIPENV_VERSION}"
+ print_debug "VIRTUALENV_VERSION = ${VIRTUALENV_VERSION}"
+ print_green "Env variables setup done"
+}
+
+collect_user_input() {
+ print_green "Collect user input ... to get artifacts instead of building from scratch"
+ read -rp "Enter Nexus Username: " NEXUS_USERNAME
+ export NEXUS_USERNAME
+ read -rsp "Enter Nexus Password: " NEXUS_PASSWORD
+ export NEXUS_PASSWORD
+ echo
+ export NEXUS_ARCHIVES_URL="https://artifacts.lan.tribe29.com/repository/archives/"
+ print_debug "Please stand by while the connection to '${NEXUS_ARCHIVES_URL}' with the provided creds is tested ..."
+
+ if ! type curl >/dev/null 2>&1; then
+ install_packages curl
+ fi
+
+ # test for valid credentials
+ output=$(curl -sSL -u "${NEXUS_USERNAME}:${NEXUS_PASSWORD}" -X GET -G $NEXUS_ARCHIVES_URL)
+ if [ -n "$output" ]; then
+ print_green "Nexus login successfull"
+ else
+ print_red "Failed to login to Nexus"
+ read -rp "Retry entering correct Nexus Username and Password (y/n): " RETRY_LOGIN
+ echo # (optional) move to a new line
+ if [[ $RETRY_LOGIN =~ ^[Yy]$ ]]; then
+ collect_user_input
+ else
+ read -rp "Continuing without valid Nexus credentials? This might lead to building packages from scratch (y/n): " CONTINUE_INSTALLATION
+ echo # (optional) move to a new line
+ if [[ $CONTINUE_INSTALLATION =~ ^[Yy]$ ]]; then
+ print_blue "Alright, grab a coffee and stand by"
+ else
+ exit 0
+ fi
+ fi
+ fi
+
+ print_green "User input collection done"
+}
+
+strip_binaries() {
+ STRIP_PATH="${INSTALL_PATH}"
+ if [ $# -eq 1 ]; then
+ STRIP_PATH=$1
+ fi
+
+ if [[ -n ${CI} ]]; then
+ # CI build, located at /opt
+ /opt/strip_binaries "${STRIP_PATH}"
+ else
+ omd/strip_binaries "${STRIP_PATH}"
+ fi
+}
+
+install_for_python_dev() {
+ print_green "Installing everything for Python development ..."
+
+ local PACKAGES_TO_INSTALL=(
+ # https://github.com/pyenv/pyenv/wiki#suggested-build-environment
+ "build-essential"
+ "libssl-dev"
+ "zlib1g-dev"
+ "libbz2-dev"
+ "libreadline-dev"
+ "libsqlite3-dev"
+ "libncursesw5-dev"
+ "xz-utils"
+ "tk-dev"
+ "libxml2-dev"
+ "libxmlsec1-dev"
+ "libffi-dev"
+ "liblzma-dev"
+ )
+ install_packages "${PACKAGES_TO_INSTALL[@]}"
+
+ sudo -u "${SUDO_USER:-root}" \
+ TARGET_DIR="${INSTALL_PATH}" \
+ CI="${CI}" \
+ "${SCRIPT_DIR}"/install-pyenv.sh
+
+ if [[ -e "${SCRIPT_DIR}"/INSTALLED_BY_PYENV ]]; then
+ # show me a better way to do it
+ INSTALLED_BY_PYENV=1
+ rm "${SCRIPT_DIR}"/INSTALLED_BY_PYENV
+ print_debug "INSTALLED_BY_PYENV: $INSTALLED_BY_PYENV"
+ else
+ # not installed via pyenv, do it the oldschool way
+ print_blue "All right, Python will be installed as done in the CI to $TARGET_DIR"
+ install_python_and_teammates
+ "${SCRIPT_DIR}"/install-pipenv.sh
+ fi
+
+ print_green "Installation for Python development done"
+}
+
+install_python_and_teammates() {
+ export TARGET_DIR="${INSTALL_PATH}"
+ "${SCRIPT_DIR}"/install-openssl.sh
+ "${SCRIPT_DIR}"/install-python.sh
+
+ if [[ $STRIP_LATER -eq 1 ]]; then
+ print_blue "strip_binaries during Python setup"
+ strip_for_python
+ "${SCRIPT_DIR}"/install-python.sh link-only
+ fi
+}
+
+strip_for_python() {
+ # strip only the content of the latest created directory
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "Python-*" -print -quit | head -n 1)"
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "openssl-*" -print -quit | head -n 1)"
+}
+
+install_for_cpp_dev() {
+ print_green "Installing everything for CPP development ..."
+
+ local PACKAGES_TO_INSTALL=(
+ "python3-dev" # "libpython3.10.so.1.0" required by gdb, ouch
+ "pkg-config" # used by install-protobuf-cpp.sh
+ "bison" # to build binutils
+ "texinfo" # to build gdb
+ "tk-dev" # to build gdb
+ "libgmp-dev" # https://stackoverflow.com/questions/70380547/gmp-is-missing-while-configuring-building-gdb-from-source
+ "build-essential" # why not
+ # the following packages are copied from the old make setup step
+ "libjpeg-dev"
+ "libkrb5-dev"
+ "libldap2-dev"
+ "libmariadb-dev-compat"
+ "libpcap-dev"
+ "libpango1.0-dev"
+ "libpq-dev"
+ "libreadline-dev"
+ "librrd-dev"
+ "libsasl2-dev"
+ "libsqlite3-dev"
+ "libtool-bin"
+ "libxml2-dev"
+ "libxslt-dev"
+ "p7zip-full"
+ "zlib1g-dev"
+ )
+ install_packages "${PACKAGES_TO_INSTALL[@]}"
+
+ export TARGET_DIR="${INSTALL_PATH}"
+ # /usr/bin/gdb: error while loading shared libraries: libpython3.11.so.1.0:
+ # cannot open shared object file: No such file or directory
+ "${SCRIPT_DIR}"/install-gnu-toolchain.sh
+ "${SCRIPT_DIR}"/install-valgrind.sh
+ "${SCRIPT_DIR}"/install-cmake.sh
+ "${SCRIPT_DIR}"/install-clang.sh
+ "${SCRIPT_DIR}"/install-protobuf-cpp.sh
+ "${SCRIPT_DIR}"/install-freetds.sh
+
+ if [[ $STRIP_LATER -eq 1 ]]; then
+ print_blue "strip_binaries during CPP setup"
+ strip_for_cpp
+ "${SCRIPT_DIR}"/install-gnu-toolchain.sh link-only
+ "${SCRIPT_DIR}"/install-valgrind.sh link-only
+ "${SCRIPT_DIR}"/install-cmake.sh link-only
+ # no need to link aka install protobuf again
+ # "${SCRIPT_DIR}"/install-protobuf-cpp.sh --link-only
+ "${SCRIPT_DIR}"/install-freetds.sh link-only
+ fi
+
+ "${SCRIPT_DIR}"/install-patchelf.sh
+
+ print_green "Installation for CPP development done"
+}
+
+strip_for_cpp() {
+ # strip only the content of the latest created directory
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "gcc-*" -print -quit | head -n 1)"
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "iwyu-*" -print -quit | head -n 1)"
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "valgrind-*" -print -quit | head -n 1)"
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "cmake-*" -print -quit | head -n 1)"
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "protobuf-*" -print -quit | head -n 1)"
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "freetds-*" -print -quit | head -n 1)"
+}
+
+install_for_gdb() {
+ print_green "Installing everything for GDB ..."
+
+ # install GDB after Python as it requires shared object files, see CMK-15854
+ install_for_python_dev
+ # after here we're potentially root again, without knowledge of $HOME/.pyenv of a user
+
+ # source potential default pyenv path as the user calling this script did not source its bashrc file at this point
+ potential_sudo_user_home=$(eval echo ~"${SUDO_USER:-root}")
+ if [[ -d "${potential_sudo_user_home}/.pyenv/bin" ]]; then
+ print_debug "Potential pyenv installation found at: ${potential_sudo_user_home}"
+ export PYENV_ROOT="${potential_sudo_user_home}/.pyenv"
+ export PATH="$PYENV_ROOT/bin:$PATH"
+ eval "$(pyenv init -)"
+ else
+ # maybe it has been installed without pyenv ...
+ export PATH="${TARGET_DIR}/bin:$PATH"
+ fi
+ test_package "python3 --version" "$(get_desired_python_version "${SCRIPT_DIR}")"
+
+ "${SCRIPT_DIR}"/install-gdb.sh
+
+ print_green "Installation for GDB with $(python3 --version) done"
+}
+
+install_cmk_package_dependencies() {
+ print_green "Installing everything for CMK development ..."
+
+ "${SCRIPT_DIR}"/install-cmk-dependencies.sh
+
+ print_green "Installation for CMK development done"
+}
+
+install_for_rust_dev() {
+ print_green "Installing everything for Rust development ..."
+
+ export TARGET_DIR="${INSTALL_PATH}"
+ "${SCRIPT_DIR}"/install-freetds.sh
+ "${SCRIPT_DIR}"/install-rust-cargo.sh
+
+ if [[ $STRIP_LATER -eq 1 ]]; then
+ print_blue "strip_binaries during Rust setup"
+ strip_for_rust
+ "${SCRIPT_DIR}"/install-rust-cargo.sh link-only
+ fi
+
+ print_green "Installation for Rust development done"
+
+ IMPORTANT_MESSAGES+=("Don't forget to call: export RUSTUP_HOME=${INSTALL_PATH}/rust/rustup")
+ print_red "${IMPORTANT_MESSAGES[${#IMPORTANT_MESSAGES[@]} - 1]}"
+}
+
+strip_for_rust() {
+ # strip only the content of the latest created directory
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "freetds-*" -print -quit | head -n 1)"
+ strip_binaries "$(find "${INSTALL_PATH}" -maxdepth 1 -type d -name "rust" -print -quit | head -n 1)"
+}
+
+install_for_frontend_dev() {
+ print_green "Installing everything for Frontend development ..."
+
+ "${SCRIPT_DIR}"/install-nodejs.sh
+
+ print_green "Installation for Frontend development done"
+}
+
+install_for_localize_dev() {
+ print_green "Installing everything for Localization development ..."
+
+ install_packages gettext
+
+ print_green "Installation for Localization development done"
+}
+
+POSITIONAL_ARGS=()
+PROFILE_ARGS=()
+INSTALL_PATH=/opt
+while [[ $# -gt 0 ]]; do
+ case $1 in
+ --dry)
+ DRY_RUN=1
+ shift # past argument
+ ;;
+ -p | --installpath)
+ INSTALL_PATH="$2"
+ shift # past argument
+ shift # past value
+ print_red "Installation might take longer, some tools need to be built from scratch"
+ print_red "Custom installation path is not yet supported"
+ exit 1
+ ;;
+ --profile)
+ INSTALL_PROFILE="$2"
+ IFS=',' read -ra PROFILE_ARGS <<<"$INSTALL_PROFILE"
+ shift # past argument
+ shift # past value
+ ;;
+ --* | -*)
+ echo "Unknown option $1"
+ exit 1
+ ;;
+ *)
+ POSITIONAL_ARGS+=("$1") # save positional arg
+ shift # past argument
+ ;;
+ esac
+done
+set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
+
+print_debug "SCRIPT_DIR = ${SCRIPT_DIR}"
+print_debug "INSTALL_PATH = ${INSTALL_PATH}"
+print_debug "INSTALL_PROFILE = ${INSTALL_PROFILE}"
+print_debug "DRY_RUN = ${DRY_RUN}"
+print_debug "POSITIONAL_ARGS = ${POSITIONAL_ARGS[*]}"
+print_debug "PROFILE_ARGS = ${PROFILE_ARGS[*]}"
+
+REQUIRES_NEXUS=0
+INSTALL_FOR_PYTHON=0
+INSTALL_FOR_CPP=0
+INSTALL_FOR_RUST=0
+INSTALL_FOR_FRONTEND=0
+INSTALL_FOR_LOCALIZE=0
+INSTALLED_BY_PYENV=0
+# strip only once, if "all" or multiple profiles selected, do it at the end
+STRIP_LATER=0
+for PROFILE in "${PROFILE_ARGS[@]}"; do
+ case "$PROFILE" in
+ all)
+ ((REQUIRES_NEXUS += 1))
+ INSTALL_FOR_PYTHON=1
+ INSTALL_FOR_CPP=1
+ INSTALL_FOR_RUST=1
+ INSTALL_FOR_FRONTEND=1
+ INSTALL_FOR_LOCALIZE=1
+ ((STRIP_LATER += 5))
+ ;;
+ python)
+ ((REQUIRES_NEXUS += 1))
+ INSTALL_FOR_PYTHON=1
+ ((STRIP_LATER += 1))
+ ;;
+ cpp)
+ ((REQUIRES_NEXUS += 1))
+ INSTALL_FOR_CPP=1
+ ((STRIP_LATER += 1))
+ ;;
+ rust)
+ ((REQUIRES_NEXUS += 1))
+ INSTALL_FOR_RUST=1
+ ((STRIP_LATER += 1))
+ ;;
+ frontend)
+ INSTALL_FOR_FRONTEND=1
+ ;;
+ localize)
+ INSTALL_FOR_LOCALIZE=1
+ ;;
+ *)
+ print_red "Unknown installation profile $INSTALL_PROFILE"
+ print_debug "Choose from 'all', 'python', 'cpp', 'rust', 'frontend', 'localize'"
+ exit 1
+ ;;
+ esac
+done
+print_debug "INSTALL_FOR_PYTHON = ${INSTALL_FOR_PYTHON}"
+print_debug "INSTALL_FOR_CPP = ${INSTALL_FOR_CPP}"
+print_debug "INSTALL_FOR_RUST = ${INSTALL_FOR_RUST}"
+print_debug "INSTALL_FOR_FRONTEND = ${INSTALL_FOR_FRONTEND}"
+print_debug "INSTALL_FOR_LOCALIZE = ${INSTALL_FOR_LOCALIZE}"
+print_debug "REQUIRES_NEXUS = ${REQUIRES_NEXUS}"
+print_debug "STRIP_LATER = ${STRIP_LATER}"
+
+if [[ -n ${CI} ]]; then
+ print_debug "It is a CI build, don't ask for a password"
+ REQUIRES_NEXUS=0
+else
+ print_debug "No CI build, ask human for password if required"
+fi
+
+if [[ $REQUIRES_NEXUS -ge 1 ]]; then
+ collect_user_input
+fi
+
+install_basic_tools
+
+if [[ -z ${CI} ]]; then
+ # non CI build, Dockerfile is responsible for placing the files
+ copy_files_around
+fi
+
+setup_env_variables
+
+if [[ $INSTALL_FOR_CPP -eq 1 ]]; then
+ install_for_cpp_dev
+
+ # Python needs to be installed before GDB as "libpython3.10.so.1.0" is required
+ # "python3-dev" package might provide a different version than specified
+ install_for_gdb
+fi
+if [[ $INSTALL_FOR_PYTHON -eq 1 ]]; then
+ install_for_python_dev
+fi
+if [[ $INSTALL_FOR_RUST -eq 1 ]]; then
+ install_for_rust_dev
+fi
+if [[ $INSTALL_FOR_FRONTEND -eq 1 ]]; then
+ install_for_frontend_dev
+fi
+if [[ $INSTALL_FOR_LOCALIZE -eq 1 ]]; then
+ install_for_localize_dev
+fi
+
+if [[ $STRIP_LATER -gt 1 ]]; then
+ print_blue "strip_binaries finally"
+
+ if [[ $INSTALL_FOR_CPP -eq 1 ]]; then
+ print_debug "Link CPP things"
+ strip_for_cpp
+ "${SCRIPT_DIR}"/install-gnu-toolchain.sh link-only
+ "${SCRIPT_DIR}"/install-valgrind.sh link-only
+ "${SCRIPT_DIR}"/install-cmake.sh link-only
+ # no need to link aka install protobuf again
+ # "${SCRIPT_DIR}"/install-protobuf-cpp.sh link-only
+ "${SCRIPT_DIR}"/install-freetds.sh link-only
+ fi
+
+ if [[ $INSTALL_FOR_PYTHON -eq 1 && $INSTALLED_BY_PYENV -eq 0 ]]; then
+ print_debug "Link Python"
+ strip_for_python
+ "${SCRIPT_DIR}"/install-python.sh link-only
+ fi
+
+ if [[ $INSTALL_FOR_RUST -eq 1 ]]; then
+ print_debug "Link Rust"
+ strip_for_rust
+ "${SCRIPT_DIR}"/install-rust-cargo.sh link-only
+ fi
+fi
+
+# basic tools and env variables required to install docker
+"${SCRIPT_DIR}"/install-docker.sh
+
+# CMK dependencies should always be installed
+install_cmk_package_dependencies
+
+if [[ $REQUIRES_NEXUS -gt 0 ]]; then
+ # only localize or web is installed, which don't require nexus interactions
+ # install Bazel for package building
+ export TARGET_DIR="${INSTALL_PATH}"
+ "${SCRIPT_DIR}"/install-bazel.sh
+fi
+
+# install_packages golang-go
+# "${SCRIPT_DIR}"/install-buildifier.sh
+
+perform_cleanup
+
+if [[ ${#IMPORTANT_MESSAGES[@]} -gt 0 ]]; then
+ for i in "${IMPORTANT_MESSAGES[@]}"; do
+ print_red "$i"
+ done
+fi
+
+exit 0
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-docker.sh b/buildscripts/infrastructure/build-nodes/scripts/install-docker.sh
index 400e05c6dce..b42f1c65f02 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-docker.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-docker.sh
@@ -11,11 +11,10 @@ case "$DISTRO" in
echo "Installing for Ubuntu"
# Install docker software
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
echo "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" >/etc/apt/sources.list.d/docker.list
apt-get update
apt-get install -y docker-ce
- rm -rf /var/lib/apt/lists/*
# Test the installation
docker --version || exit $?
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-freetds.sh b/buildscripts/infrastructure/build-nodes/scripts/install-freetds.sh
index 0b4bccc62cf..b7531e4969b 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-freetds.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-freetds.sh
@@ -12,7 +12,7 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
FREETDS_VERSION=0.95.95
DIR_NAME=freetds-${FREETDS_VERSION}
ARCHIVE_NAME=${DIR_NAME}.tgz
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
# Increase this to enforce a recreation of the build cache
BUILD_ID=1
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-gdb.sh b/buildscripts/infrastructure/build-nodes/scripts/install-gdb.sh
new file mode 100755
index 00000000000..6f59f40e80d
--- /dev/null
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-gdb.sh
@@ -0,0 +1,85 @@
+#!/bin/bash
+# Copyright (C) 2024 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+set -e -o pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
+# shellcheck source=buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
+. "${SCRIPT_DIR}/build_lib.sh"
+
+MIRROR_URL="https://ftp.gnu.org/gnu/"
+
+GCC_MAJOR=$(get_version "$SCRIPT_DIR" GCC_VERSION_MAJOR)
+GCC_MINOR=$(get_version "$SCRIPT_DIR" GCC_VERSION_MINOR)
+GCC_PATCHLEVEL=$(get_version "$SCRIPT_DIR" GCC_VERSION_PATCHLEVEL)
+GCC_VERSION="${GCC_MAJOR}.${GCC_MINOR}.${GCC_PATCHLEVEL}"
+
+GDB_VERSION="13.2"
+GDB_ARCHIVE_NAME="gdb-${GDB_VERSION}.tar.gz"
+GDB_URL="${MIRROR_URL}gdb/${GDB_ARCHIVE_NAME}"
+
+DIR_NAME=gcc-${GCC_VERSION}
+TARGET_DIR="${TARGET_DIR:-/opt}"
+PREFIX=${TARGET_DIR}/${DIR_NAME}
+BUILD_DIR="${TARGET_DIR}/src"
+
+# Increase this to enforce a recreation of the build cache
+BUILD_ID="${GDB_VERSION}-1"
+
+download_sources() {
+ # Get the sources from nexus or upstream
+ mirrored_download "${GDB_ARCHIVE_NAME}" "${GDB_URL}"
+}
+
+build_gdb() {
+ log "Build gdb-${GDB_VERSION}"
+ cd "${BUILD_DIR}"
+ tar xzf gdb-${GDB_VERSION}.tar.gz
+ # remove potential older build directories
+ if [[ -d gdb-${GDB_VERSION}-build ]]; then
+ rm -rf "gdb-${GDB_VERSION}-build"
+ fi
+ mkdir gdb-${GDB_VERSION}-build
+ cd gdb-${GDB_VERSION}-build
+ ../gdb-${GDB_VERSION}/configure \
+ --prefix="${PREFIX}" \
+ CC="${PREFIX}/bin/gcc-${GCC_MAJOR}" \
+ CXX="${PREFIX}/bin/g++-${GCC_MAJOR}" \
+ "$(python -V 2>&1 | grep -q 'Python 2\.4\.' && echo "--with-python=no")"
+ make -j4
+ make install
+}
+
+set_symlinks() {
+ log "Set symlink"
+
+ # We should not mess with the files below /usr/bin. Instead we should only deploy to /opt/bin to
+ # prevent conflicts.
+ # Right now it seems binutils is installed by install-cmk-dependencies.sh which then overwrites
+ # our /usr/bin/as symlink. As an intermediate fix, we additionally install the link to /opt/bin.
+ # As a follow-up, we should move everything to /opt/bin - but that needs separate testing.
+ [ -d "${TARGET_DIR}/bin" ] || mkdir -p "${TARGET_DIR}/bin"
+ ln -sf "${PREFIX}/bin/"* "${TARGET_DIR}"/bin
+
+ ln -sf "${PREFIX}/bin/"* /usr/bin
+}
+
+build_package() {
+ mkdir -p "$TARGET_DIR/src"
+ cd "$TARGET_DIR/src"
+
+ download_sources
+ build_gdb
+
+ cd "$TARGET_DIR"
+ rm -rf "$TARGET_DIR/src"
+}
+
+if [ "$1" != "link-only" ]; then
+ cached_build "${TARGET_DIR}" "${DIR_NAME}" "${BUILD_ID}" "${DISTRO}" "${BRANCH_VERSION}"
+fi
+set_symlinks
+
+test_package "/usr/bin/gdb --version" "$GDB_VERSION"
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-gnu-toolchain.sh b/buildscripts/infrastructure/build-nodes/scripts/install-gnu-toolchain.sh
index 8f774631b73..b7e2dd5b9b2 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-gnu-toolchain.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-gnu-toolchain.sh
@@ -22,26 +22,18 @@ BINUTILS_VERSION="2.41"
BINUTILS_ARCHIVE_NAME="binutils-${BINUTILS_VERSION}.tar.gz"
BINUTILS_URL="${MIRROR_URL}binutils/${BINUTILS_ARCHIVE_NAME}"
-GDB_VERSION="13.2"
-GDB_ARCHIVE_NAME="gdb-${GDB_VERSION}.tar.gz"
-GDB_URL="${MIRROR_URL}gdb/${GDB_ARCHIVE_NAME}"
-
DIR_NAME=gcc-${GCC_VERSION}
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
PREFIX=${TARGET_DIR}/${DIR_NAME}
BUILD_DIR="${TARGET_DIR}/src"
# Increase this to enforce a recreation of the build cache
-# NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE NOTE
-# Only the GCC_VERSION is part of the cache key, so be sure to bump this, too,
-# e.g. when changing the binutils or gdb version!
-BUILD_ID=1
+BUILD_ID="${BINUTILS_VERSION}-1"
download_sources() {
# Get the sources from nexus or upstream
mirrored_download "${BINUTILS_ARCHIVE_NAME}" "${BINUTILS_URL}"
mirrored_download "${GCC_ARCHIVE_NAME}" "${GCC_URL}"
- mirrored_download "${GDB_ARCHIVE_NAME}" "${GDB_URL}"
# Some GCC dependency download optimization
local FILE_NAME="gcc-${GCC_VERSION}-with-prerequisites.tar.gz"
@@ -63,6 +55,10 @@ build_binutils() {
log "Build binutils-${BINUTILS_VERSION}"
cd "${BUILD_DIR}"
tar xzf binutils-${BINUTILS_VERSION}.tar.gz
+ # remove potential older build directories
+ if [[ -d binutils-${BINUTILS_VERSION}-build ]]; then
+ rm -rf "binutils-${BINUTILS_VERSION}-build"
+ fi
mkdir binutils-${BINUTILS_VERSION}-build
cd binutils-${BINUTILS_VERSION}-build
# HACK: Dispatching on the distro is not nice, we should really check the versions.
@@ -88,6 +84,10 @@ build_gcc() {
log "Build gcc-${GCC_VERSION}"
cd "${BUILD_DIR}"
tar xzf "gcc-${GCC_VERSION}-with-prerequisites.tar.gz"
+ # remove potential older build directories
+ if [[ -d gcc-${GCC_VERSION}-build ]]; then
+ rm -rf "gcc-${GCC_VERSION}-build"
+ fi
mkdir "gcc-${GCC_VERSION}-build"
cd "gcc-${GCC_VERSION}-build"
"../gcc-${GCC_VERSION}/configure" \
@@ -100,21 +100,6 @@ build_gcc() {
make install
}
-build_gdb() {
- log "Build gdb-${GDB_VERSION}"
- cd "${BUILD_DIR}"
- tar xzf gdb-${GDB_VERSION}.tar.gz
- mkdir gdb-${GDB_VERSION}-build
- cd gdb-${GDB_VERSION}-build
- ../gdb-${GDB_VERSION}/configure \
- --prefix="${PREFIX}" \
- CC="${PREFIX}/bin/gcc-${GCC_MAJOR}" \
- CXX="${PREFIX}/bin/g++-${GCC_MAJOR}" \
- "$(python -V 2>&1 | grep -q 'Python 2\.4\.' && echo "--with-python=no")"
- make -j4
- make install
-}
-
set_symlinks() {
log "Set symlink"
@@ -124,9 +109,9 @@ set_symlinks() {
# our /usr/bin/as symlink. As an intermediate fix, we additionally install the link to /opt/bin.
# As a follow-up, we should move everything to /opt/bin - but that needs separate testing.
[ -d "${TARGET_DIR}/bin" ] || mkdir -p "${TARGET_DIR}/bin"
- ln -sf "${PREFIX}/bin/"* ${TARGET_DIR}/bin
- ln -sf "${PREFIX}/bin/gcc-${GCC_MAJOR}" ${TARGET_DIR}/bin/gcc
- ln -sf "${PREFIX}/bin/g++-${GCC_MAJOR}" ${TARGET_DIR}/bin/g++
+ ln -sf "${PREFIX}/bin/"* "${TARGET_DIR}"/bin
+ ln -sf "${PREFIX}/bin/gcc-${GCC_MAJOR}" "${TARGET_DIR}"/bin/gcc
+ ln -sf "${PREFIX}/bin/g++-${GCC_MAJOR}" "${TARGET_DIR}"/bin/g++
# Save distro executables under [name]-orig. It is used by some build steps
# later that need to use the distro original compiler. For some platforms
@@ -150,7 +135,6 @@ build_package() {
download_sources
build_binutils
build_gcc
- build_gdb
cd "$TARGET_DIR"
rm -rf "$TARGET_DIR/src"
@@ -160,12 +144,12 @@ test_packages() {
for i in $(dpkg -L binutils | grep '/bin/'); do
this_version=$($i --version)
if [[ "$this_version" == *"Binutils)"* ]]; then
- echo "$this_version" | grep -q "$BINUTILS_VERSION" >/dev/null 2>&1 || (
- echo "Invalid version: $(i)"
+ echo "$this_version" | grep -q "${BINUTILS_VERSION}" >/dev/null 2>&1 || (
+ echo "Invalid version: ${i}: ${this_version}!=${BINUTILS_VERSION}"
exit 1
)
else
- echo "$i not of interest"
+ echo "${i} not of interest"
# e.g. /usr/bin/dwp would report "GNU dwp (GNU Binutils for Ubuntu) 2.34"
fi
done
@@ -178,4 +162,3 @@ set_symlinks
test_packages
test_package "/usr/bin/gcc --version" "$GCC_VERSION"
-test_package "/usr/bin/gdb --version" "$GDB_VERSION"
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-iwyu.sh b/buildscripts/infrastructure/build-nodes/scripts/install-iwyu.sh
index 108b27e513b..36c9e49057c 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-iwyu.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-iwyu.sh
@@ -11,7 +11,7 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
INSTALL_PREFIX=""
CLANG_VERSION=""
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
# option parsing ###############################################################
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-make-dist-deps.sh b/buildscripts/infrastructure/build-nodes/scripts/install-make-dist-deps.sh
index f6c832ea1e0..578ac0ea635 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-make-dist-deps.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-make-dist-deps.sh
@@ -20,7 +20,6 @@ case "$DISTRO" in
rpm \
alien \
dpkg-sig
- rm -rf /var/lib/apt/lists/*
# Test the installations
EXIT_STATUS=0
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-musl-tools.sh b/buildscripts/infrastructure/build-nodes/scripts/install-musl-tools.sh
index 7a14f6c5366..2ae29fc71cd 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-musl-tools.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-musl-tools.sh
@@ -12,7 +12,6 @@ case "$DISTRO" in
apt-get update
apt-get install -y musl-tools
- rm -rf /var/lib/apt/lists/*
# Test the installation
musl-gcc --version || exit $?
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-nodejs.sh b/buildscripts/infrastructure/build-nodes/scripts/install-nodejs.sh
index de0eab31cc0..705b66b990a 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-nodejs.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-nodejs.sh
@@ -22,11 +22,18 @@ install_package() {
echo "Installing nodejs"
mkdir -p /etc/apt/keyrings
- curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
- echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODEJS_VERSION.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
+ if [[ ! -e "/etc/apt/keyrings/nodesource.gpg" ]]; then
+ curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
+ fi
+ if [[ -e "/etc/apt/sources.list.d/nodesource.list" ]]; then
+ if ! grep -Fxq "${NODEJS_VERSION}" /etc/apt/sources.list.d/nodesource.list; then
+ echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODEJS_VERSION.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
+ fi
+ else
+ echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODEJS_VERSION.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
+ fi
apt-get update
apt-get install -y nodejs
- rm -rf /var/lib/apt/lists/*
}
install_package
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-openssl.sh b/buildscripts/infrastructure/build-nodes/scripts/install-openssl.sh
index 14ac4fdd378..2d0d76fdc37 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-openssl.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-openssl.sh
@@ -9,10 +9,10 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
# shellcheck source=buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
. "${SCRIPT_DIR}/build_lib.sh"
-OPENSSL_VERSION=3.0.12
+OPENSSL_VERSION=3.0.13
DIR_NAME=openssl-${OPENSSL_VERSION}
ARCHIVE_NAME=${DIR_NAME}.tar.gz
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
TARGET="" # for x64, use the default target
# OpenSSL "config" seems to have problems with detecting 32bit architecture in some cases
@@ -30,7 +30,7 @@ build_package() {
# Now build the package
tar xf "${ARCHIVE_NAME}"
cd "${DIR_NAME}"
- ./config "${TARGET}" --libdir=lib --prefix="${TARGET_DIR}/${DIR_NAME}" enable-md2 -Wl,-rpath,/opt/"${DIR_NAME}"/lib
+ ./config "${TARGET}" --libdir=lib --prefix="${TARGET_DIR}/${DIR_NAME}" enable-md2 -Wl,-rpath,"${TARGET_DIR}/${DIR_NAME}"/lib
make -j6
make install
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-packer.sh b/buildscripts/infrastructure/build-nodes/scripts/install-packer.sh
index fd685005431..7d5c0c426c9 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-packer.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-packer.sh
@@ -9,11 +9,12 @@ case "$DISTRO" in
ubuntu-*)
# installable on all Ubuntu versions to be potentially usable by developers
echo "Installing for Ubuntu"
-
- wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor | sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg
+ wget -O- https://apt.releases.hashicorp.com/gpg |
+ gpg --dearmor |
+ sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg >/dev/null
echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list
- apt-get update && apt-get install -y packer
- rm -rf /var/lib/apt/lists/*
+ apt-get update
+ apt-get install -y packer
# Test the installation
packer --version || exit $?
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-pipenv.sh b/buildscripts/infrastructure/build-nodes/scripts/install-pipenv.sh
index e8e81ecaac1..d86eaae5ab1 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-pipenv.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-pipenv.sh
@@ -9,6 +9,8 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
# shellcheck source=buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
. "${SCRIPT_DIR}/build_lib.sh"
+TARGET_DIR="${TARGET_DIR:-/opt}"
+
# read optional command line argument
if [ "$#" -eq 1 ]; then
PYTHON_VERSION=$1
@@ -19,15 +21,20 @@ fi
PIPENV_VERSION=$(get_version "$SCRIPT_DIR" PIPENV_VERSION)
VIRTUALENV_VERSION=$(get_version "$SCRIPT_DIR" VIRTUALENV_VERSION)
-pip3 install \
+if type pip3 >/dev/null 2>&1; then
+ CMD="pip3"
+else
+ CMD="${TARGET_DIR}/Python-${PYTHON_VERSION}/bin/pip3"
+fi
+$CMD install \
pipenv=="$PIPENV_VERSION" \
virtualenv=="$VIRTUALENV_VERSION"
# link pipenv to /usr/bin to be in PATH. Fallback to /opt/bin if no permissions for writting to /usr/bin.
# /opt/bin does not work as default, because `make -C omd deb` requires it to be in /usr/bin.
# only /usr/bin does not work, because GitHub Actions do not have permissions to write there.
-PIPENV_PATH="/opt/Python-${PYTHON_VERSION}/bin/pipenv"
-ln -sf "${PIPENV_PATH}"* /usr/bin || ln -sf "${PIPENV_PATH}"* /opt/bin
+PIPENV_PATH="${TARGET_DIR}/Python-${PYTHON_VERSION}/bin/pipenv"
+ln -sf "${PIPENV_PATH}"* /usr/bin || ln -sf "${PIPENV_PATH}"* "${TARGET_DIR}"/bin
test_package "pipenv --version" "$PIPENV_VERSION$"
-test_package "pip3 freeze" "virtualenv"
+test_package "$CMD freeze" "virtualenv"
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-protobuf-cpp.sh b/buildscripts/infrastructure/build-nodes/scripts/install-protobuf-cpp.sh
index f6dd7f30a95..41f59d8590c 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-protobuf-cpp.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-protobuf-cpp.sh
@@ -18,7 +18,7 @@ ARCHIVE_NAME=${PACKAGE_NAME}-${PROTOBUF_VERSION}.tar.gz
BUILD_ID=2
INSTALL_PREFIX=
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
USE_BUILD_CACHE=1
VERIFY_INSTALL=1
@@ -56,7 +56,8 @@ install() {
fi
mkdir -p "${INSTALL_PREFIX}/usr/include"
- cp -prl "${TARGET_DIR}/${DIR_NAME}/include/"* "${INSTALL_PREFIX}/usr/include"
+ # CMK-15362, rerunning the installation would fail as the files exist already
+ cp -prlu "${TARGET_DIR}/${DIR_NAME}/include/"* "${INSTALL_PREFIX}/usr/include"
if [ -d "${INSTALL_PREFIX}/usr/lib64/pkgconfig" ]; then
PKGCONFIG_DIR=${INSTALL_PREFIX}/usr/lib64/pkgconfig
@@ -65,7 +66,8 @@ install() {
fi
mkdir -p "${PKGCONFIG_DIR}"
- cp -prl "${TARGET_DIR}/${DIR_NAME}/lib/pkgconfig/"*.pc "${PKGCONFIG_DIR}"
+ # CMK-15362, rerunning the installation would fail as the files exist already
+ cp -prlu "${TARGET_DIR}/${DIR_NAME}/lib/pkgconfig/"*.pc "${PKGCONFIG_DIR}"
}
verify_install() {
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-pyenv.sh b/buildscripts/infrastructure/build-nodes/scripts/install-pyenv.sh
new file mode 100755
index 00000000000..23138ebb98e
--- /dev/null
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-pyenv.sh
@@ -0,0 +1,98 @@
+#!/bin/bash
+# Copyright (C) 2024 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+set -e -o pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
+# SCRIPT_DIR="buildscripts/infrastructure/build-nodes/scripts"
+# shellcheck source=buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
+. "${SCRIPT_DIR}/build_lib.sh"
+
+TARGET_DIR="${TARGET_DIR:-/opt}"
+
+install() {
+ DESIRED_PYTHON_VERSION=$(get_desired_python_version "${SCRIPT_DIR}")
+ print_debug "Desired python version: ${DESIRED_PYTHON_VERSION}"
+
+ # source potential default pyenv path as the user calling this script did not source its bashrc file
+ if [[ -d "$HOME/.pyenv/bin" ]]; then
+ print_debug "Potential pyenv installation found"
+ # there is a potential pyenv installation available
+ export PYENV_ROOT="$HOME/.pyenv"
+ export PATH="$PYENV_ROOT/bin:$PATH"
+ eval "$(pyenv init -)"
+ fi
+
+ if type pyenv >/dev/null 2>&1; then
+ # show me a better way to communicate between scripts called by different users
+ echo "1" >>"${SCRIPT_DIR}"/INSTALLED_BY_PYENV
+
+ # Update available versions for pyenv
+ cd "$HOME"/.pyenv/plugins/python-build/../.. && git pull && cd -
+
+ pyenv update
+ pyenv install "${DESIRED_PYTHON_VERSION}" --skip-existing
+ pyenv global "${DESIRED_PYTHON_VERSION}" # make pip3 available
+ install_pipenv
+ else
+ print_blue "Team CI recommends to install pyenv for easy use. It is currently not yet installed."
+
+ if [[ -n ${CI} ]]; then
+ # CI build, don't ask
+ INSTALL_PYENV="y"
+ else
+ read -rp "Should pyenv be installed now? (y/n): " INSTALL_PYENV
+ echo # (optional) move to a new line
+ fi
+ if [[ $INSTALL_PYENV =~ ^[Yy]$ ]]; then
+ # show me a better way to communicate between scripts called by different users
+ echo "1" >>"${SCRIPT_DIR}"/INSTALLED_BY_PYENV
+ curl https://pyenv.run | bash
+
+ cat <<'EOF' >>~/.bashrc
+export PYENV_ROOT="$HOME/.pyenv"
+command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"
+eval "$(pyenv init -)"
+EOF
+
+ if [[ -n ${CI} ]]; then
+ # CI build
+ export PYENV_ROOT="$HOME/.pyenv"
+ export PATH="$PYENV_ROOT/bin:$PATH"
+ eval "$(pyenv init -)"
+ else
+ # eval hack :P
+ # a shebang with "#!/bin/bash -i" would work as well
+ # https://askubuntu.com/questions/64387/cannot-successfully-source-bashrc-from-a-shell-script
+ eval "$(tail -n -3 ~/.bashrc)"
+ fi
+
+ pyenv install "${DESIRED_PYTHON_VERSION}"
+ pyenv global "${DESIRED_PYTHON_VERSION}" # make pip3 available
+ install_pipenv
+ fi
+ fi
+}
+
+install_pipenv() {
+ PIPENV_VERSION=$(get_version "$SCRIPT_DIR" PIPENV_VERSION)
+ VIRTUALENV_VERSION=$(get_version "$SCRIPT_DIR" VIRTUALENV_VERSION)
+
+ pip3 install \
+ pipenv=="$PIPENV_VERSION" \
+ virtualenv=="$VIRTUALENV_VERSION"
+
+ # link pipenv to /usr/bin to be in PATH. Fallback to /opt/bin if no permissions for writting to /usr/bin.
+ # /opt/bin does not work as default, because `make -C omd deb` requires it to be in /usr/bin.
+ # only /usr/bin does not work, because GitHub Actions do not have permissions to write there.
+ PIPENV_PATH=$(command -v pipenv)
+ print_debug "Creating symlink to /usr/bin or ${TARGET_DIR}/bin for OMD usage"
+ sudo ln -sf "${PIPENV_PATH}"* /usr/bin || sudo ln -sf "${PIPENV_PATH}"* "${TARGET_DIR}"/bin
+
+ test_package "pipenv --version" "$PIPENV_VERSION$"
+ test_package "pip3 freeze" "virtualenv"
+}
+
+install
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-python.sh b/buildscripts/infrastructure/build-nodes/scripts/install-python.sh
index 0e738b51a15..982f51ea7c4 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-python.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-python.sh
@@ -16,14 +16,14 @@ else
PYTHON_VERSION=$(get_version "$SCRIPT_DIR" PYTHON_VERSION)
fi
-OPENSSL_VERSION=3.0.12
-OPENSSL_PATH="/opt/openssl-${OPENSSL_VERSION}"
+TARGET_DIR="${TARGET_DIR:-/opt}"
+OPENSSL_VERSION=3.0.13
+OPENSSL_PATH="${TARGET_DIR}/openssl-${OPENSSL_VERSION}"
DIR_NAME=Python-${PYTHON_VERSION}
ARCHIVE_NAME=${DIR_NAME}.tgz
-TARGET_DIR="/opt"
-# Increase this to enforce a recreation of the build cache
-BUILD_ID=10
+# Increase the numeric suffix to enforce a recreation of the build cache
+BUILD_ID="openssl-${OPENSSL_VERSION}-11"
build_package() {
mkdir -p "$TARGET_DIR/src"
@@ -35,7 +35,9 @@ build_package() {
# Now build the package
tar xf "${ARCHIVE_NAME}"
cd "${DIR_NAME}"
- LD_LIBRARY_PATH="${OPENSSL_PATH}/lib" \
+ # Under sles12sp5, we need to pass ncursesw include dir explicitly... no idea why
+ CPPFLAGS="-I/usr/include/ncursesw" \
+ LD_LIBRARY_PATH="${OPENSSL_PATH}/lib" \
LDFLAGS="-Wl,--rpath,${TARGET_DIR}/${DIR_NAME}/lib -Wl,--rpath,${OPENSSL_PATH}/lib -L${OPENSSL_PATH}/lib" \
./configure \
--prefix="${TARGET_DIR}/${DIR_NAME}" \
@@ -57,4 +59,4 @@ if [ "$1" != "link-only" ]; then
fi
set_bin_symlinks "${TARGET_DIR}" "${DIR_NAME}"
-test_package "/opt/bin/python3 --version" "Python $(get_version "$SCRIPT_DIR" PYTHON_VERSION)"
+test_package "${TARGET_DIR}/bin/python3 --version" "Python $(get_version "$SCRIPT_DIR" PYTHON_VERSION)"
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-rust-cargo.sh b/buildscripts/infrastructure/build-nodes/scripts/install-rust-cargo.sh
index 77d4dac23aa..36ccf41ec13 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-rust-cargo.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-rust-cargo.sh
@@ -14,12 +14,22 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
# define toolchain version explicitly
# 'stable' is allowed only for main(master) branch
-# https://github.com/rust-lang/rust/issues/112286 for the reason of pinning the version
-TOOLCHAIN_VERSION="1.72"
+DEFAULT_TOOLCHAIN_VERSION="1.75"
+# Some packages require specific toolchain versions.
+# These versions will be installed in addition to the default toolchain version.
+# List the versions separated by space, e.g. "1 2 3", and add a reason below.
+#
+# Reasons for added toolchains:
+# - 1.72: mk-sql is currently known to properly work with this version
+ADDITIONAL_TOOLCHAIN_VERSIONS="1.72"
-DEFAULT_TOOLCHAIN="${TOOLCHAIN_VERSION}-x86_64-unknown-linux-gnu"
+DEFAULT_TARGET="x86_64-unknown-linux-gnu"
+# List additional targets here, separated by space.
+# These targets will be installed for all toolchain versions.
+ADDITIONAL_TARGETS="x86_64-unknown-linux-musl"
+DEFAULT_TOOLCHAIN="${DEFAULT_TOOLCHAIN_VERSION}-${DEFAULT_TARGET}"
DIR_NAME="rust"
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
CARGO_HOME="$TARGET_DIR/$DIR_NAME/cargo"
export CARGO_HOME
@@ -27,7 +37,18 @@ RUSTUP_HOME="$TARGET_DIR/$DIR_NAME/rustup"
export RUSTUP_HOME
# Increase this to enforce a recreation of the build cache
-BUILD_ID="8-$TOOLCHAIN_VERSION"
+BUILD_ID="9-$DEFAULT_TOOLCHAIN_VERSION"
+# This adds all present toolchain versions to the build ID to make sure they are
+# included in the cached archive.
+for toolchain_version in $ADDITIONAL_TOOLCHAIN_VERSIONS; do
+ BUILD_ID="$BUILD_ID-$toolchain_version"
+done
+# This adds all present targets to the build ID to make sure they are included
+# in the cached archive.
+BUILD_ID="$BUILD_ID-$DEFAULT_TARGET"
+for target in $ADDITIONAL_TARGETS; do
+ BUILD_ID="$BUILD_ID-$target"
+done
build_package() {
WORK_DIR=$(mktemp -d)
@@ -48,14 +69,37 @@ build_package() {
mirrored_download "rustup-init.sh" "https://sh.rustup.rs"
chmod +x rustup-init.sh
./rustup-init.sh -y --no-modify-path --default-toolchain "$DEFAULT_TOOLCHAIN"
+ "${CARGO_HOME}"/bin/rustup toolchain install $DEFAULT_TOOLCHAIN_VERSION $ADDITIONAL_TOOLCHAIN_VERSIONS
+
+ # Install additional targets for all versions
+ for target in $ADDITIONAL_TARGETS; do
+ "${CARGO_HOME}/bin/rustup" target add "${target}" --toolchain $DEFAULT_TOOLCHAIN_VERSION
+
+ for toolchain_version in $ADDITIONAL_TOOLCHAIN_VERSIONS; do
+ "${CARGO_HOME}/bin/rustup" target add "${target}" --toolchain "${toolchain_version}"
+ done
+ done
+ "${CARGO_HOME}"/bin/rustup default $DEFAULT_TOOLCHAIN_VERSION
"${CARGO_HOME}"/bin/rustup update
- "${CARGO_HOME}"/bin/rustup target add x86_64-unknown-linux-musl
- "${CARGO_HOME}"/bin/rustup default $TOOLCHAIN_VERSION
# saves space
- rm -rf "$RUSTUP_HOME/toolchains/$DEFAULT_TOOLCHAIN/share/doc/"
- rm -rf "$RUSTUP_HOME/toolchains/$DEFAULT_TOOLCHAIN/share/man/"
- rm -rf "$RUSTUP_HOME/toolchains/$DEFAULT_TOOLCHAIN/share/zsh/"
+ remove_doc_dirs() {
+ echo "Removing rust documentation for $1"
+ rm -rf "$RUSTUP_HOME/toolchains/$1/share/doc/"
+ rm -rf "$RUSTUP_HOME/toolchains/$1/share/man/"
+ rm -rf "$RUSTUP_HOME/toolchains/$1/share/zsh/"
+ }
+
+ remove_doc_dirs "$DEFAULT_TOOLCHAIN"
+ for toolchain_version in $ADDITIONAL_TOOLCHAIN_VERSIONS; do
+ remove_doc_dirs "${toolchain_version}-${DEFAULT_TARGET}"
+ done
+ for target in $ADDITIONAL_TARGETS; do
+ remove_doc_dirs "${DEFAULT_TOOLCHAIN_VERSION}-${target}"
+ for toolchain_version in $ADDITIONAL_TOOLCHAIN_VERSIONS; do
+ remove_doc_dirs "${toolchain_version}-${target}"
+ done
+ done
}
if [ "$1" != "link-only" ]; then
@@ -63,4 +107,7 @@ if [ "$1" != "link-only" ]; then
fi
ln -sf "${CARGO_HOME}/bin/"* /usr/bin/
-test_package "rustc --version" "^rustc $TOOLCHAIN_VERSION\."
+test_package "rustc --version" "^rustc $DEFAULT_TOOLCHAIN_VERSION\."
+for toolchain_version in $ADDITIONAL_TOOLCHAIN_VERSIONS; do
+ test_package "$RUSTUP_HOME/toolchains/${toolchain_version}-${DEFAULT_TARGET}/bin/rustc --version" "^rustc $toolchain_version\."
+done
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-shellcheck.sh b/buildscripts/infrastructure/build-nodes/scripts/install-shellcheck.sh
index 5c9fe5ef7ac..fa8cbad5625 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-shellcheck.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-shellcheck.sh
@@ -12,7 +12,6 @@ case "$DISTRO" in
apt-get update
apt-get install -y shellcheck
- rm -rf /var/lib/apt/lists/*
# Test the installation
shellcheck --version || exit $?
diff --git a/buildscripts/infrastructure/build-nodes/scripts/install-valgrind.sh b/buildscripts/infrastructure/build-nodes/scripts/install-valgrind.sh
index bbb905e1180..00a7c44ddf0 100755
--- a/buildscripts/infrastructure/build-nodes/scripts/install-valgrind.sh
+++ b/buildscripts/infrastructure/build-nodes/scripts/install-valgrind.sh
@@ -9,7 +9,7 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
# shellcheck source=buildscripts/infrastructure/build-nodes/scripts/build_lib.sh
. "${SCRIPT_DIR}/build_lib.sh"
-TARGET_DIR="/opt"
+TARGET_DIR="${TARGET_DIR:-/opt}"
VALGRIND_VERSION=3.19.0
DIR_NAME=valgrind-${VALGRIND_VERSION}
@@ -42,4 +42,4 @@ if [ "$1" != "link-only" ]; then
fi
set_bin_symlinks "${TARGET_DIR}" "${DIR_NAME}"
-test_package "valgrind --version" "^valgrind-$VALGRIND_VERSION$"
+test_package "${TARGET_DIR}/bin/valgrind --version" "^valgrind-$VALGRIND_VERSION$"
diff --git a/buildscripts/infrastructure/build-nodes/sles-12sp5/Dockerfile b/buildscripts/infrastructure/build-nodes/sles-12sp5/Dockerfile
index 4804dba8229..40899e104af 100644
--- a/buildscripts/infrastructure/build-nodes/sles-12sp5/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/sles-12sp5/Dockerfile
@@ -1,9 +1,12 @@
-ARG IMAGE_SLES_12SP5
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_SLES_12SP5} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV PATH="/opt/bin:${PATH}"
+ENV \
+ PATH="/opt/bin:${PATH}"
ARG ARTIFACT_STORAGE
@@ -94,7 +97,7 @@ RUN zypper ref -s \
# That's why, we're going this direct way now.
ENV PATH="/usr/lib/mit/bin:${PATH}"
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -105,33 +108,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-COPY scripts/* /opt/
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
+
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -144,8 +189,14 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
+
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
RUN /opt/install-cmk-dependencies.sh
+
+COPY --from=scripts install-patchelf.sh /opt/
RUN /opt/install-patchelf.sh
# The /etc/fstab does not exist in the base image we use. A missing fstab prevents OMD from
@@ -153,17 +204,12 @@ RUN /opt/install-patchelf.sh
# simply solve this by pre-creating the empty file here.
RUN touch /etc/fstab
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/sles-15sp3/Dockerfile b/buildscripts/infrastructure/build-nodes/sles-15sp3/Dockerfile
index 654b7118aa3..7f97b6ea936 100644
--- a/buildscripts/infrastructure/build-nodes/sles-15sp3/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/sles-15sp3/Dockerfile
@@ -1,13 +1,17 @@
-ARG IMAGE_SLES_15SP3
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_SLES_15SP3} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV PATH="/opt/bin:${PATH}"
+ENV \
+ PATH="/opt/bin:${PATH}"
ARG ARTIFACT_STORAGE
-RUN zypper -n --no-gpg-checks in --replacefiles \
+RUN zypper addrepo -G ${ARTIFACT_STORAGE}/repository/sles15sp3 sles15sp3 \
+ && zypper -n --no-gpg-checks in --replacefiles \
gawk \
&& zypper -n --no-gpg-checks in --replacefiles \
curl \
@@ -87,7 +91,7 @@ RUN zypper ref -s \
# That's why, we're going this direct way now.
ENV PATH="/usr/lib/mit/bin:${PATH}"
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -98,33 +102,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-COPY scripts/* /opt/
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
+
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -137,8 +183,14 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
+
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
RUN /opt/install-cmk-dependencies.sh
+
+COPY --from=scripts install-patchelf.sh /opt/
RUN /opt/install-patchelf.sh
# The /etc/fstab does not exist in the base image we use. A missing fstab prevents OMD from
@@ -146,17 +198,12 @@ RUN /opt/install-patchelf.sh
# simply solve this by pre-creating the empty file here.
RUN touch /etc/fstab
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/sles-15sp4/Dockerfile b/buildscripts/infrastructure/build-nodes/sles-15sp4/Dockerfile
index effbe398bd9..efbd4451394 100644
--- a/buildscripts/infrastructure/build-nodes/sles-15sp4/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/sles-15sp4/Dockerfile
@@ -1,9 +1,12 @@
-ARG IMAGE_SLES_15SP4
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_SLES_15SP4} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV PATH="/opt/bin:${PATH}"
+ENV \
+ PATH="/opt/bin:${PATH}"
ARG ARTIFACT_STORAGE
@@ -87,7 +90,7 @@ RUN zypper ref -s \
# That's why, we're going this direct way now.
ENV PATH="/usr/lib/mit/bin:${PATH}"
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -98,33 +101,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-COPY scripts/* /opt/
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
+
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -137,8 +182,14 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
+
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
RUN /opt/install-cmk-dependencies.sh
+
+COPY --from=scripts install-patchelf.sh /opt/
RUN /opt/install-patchelf.sh
# The /etc/fstab does not exist in the base image we use. A missing fstab prevents OMD from
@@ -146,17 +197,12 @@ RUN /opt/install-patchelf.sh
# simply solve this by pre-creating the empty file here.
RUN touch /etc/fstab
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/sles-15sp5/Dockerfile b/buildscripts/infrastructure/build-nodes/sles-15sp5/Dockerfile
index 41d622bbdd1..b99a4b3d1a4 100644
--- a/buildscripts/infrastructure/build-nodes/sles-15sp5/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/sles-15sp5/Dockerfile
@@ -1,9 +1,12 @@
-ARG IMAGE_SLES_15SP5
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_SLES_15SP5} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV PATH="/opt/bin:${PATH}"
+ENV \
+ PATH="/opt/bin:${PATH}"
ARG ARTIFACT_STORAGE
@@ -86,7 +89,7 @@ RUN zypper ref -s \
# That's why, we're going this direct way now.
ENV PATH="/usr/lib/mit/bin:${PATH}"
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -97,33 +100,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-COPY scripts/* /opt/
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
+
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -136,8 +181,14 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
+
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
RUN /opt/install-cmk-dependencies.sh
+
+COPY --from=scripts install-patchelf.sh /opt/
RUN /opt/install-patchelf.sh
# The /etc/fstab does not exist in the base image we use. A missing fstab prevents OMD from
@@ -145,17 +196,12 @@ RUN /opt/install-patchelf.sh
# simply solve this by pre-creating the empty file here.
RUN touch /etc/fstab
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/ubuntu-20.04/Dockerfile b/buildscripts/infrastructure/build-nodes/ubuntu-20.04/Dockerfile
index 518152ed2d3..4cf98e41d65 100644
--- a/buildscripts/infrastructure/build-nodes/ubuntu-20.04/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/ubuntu-20.04/Dockerfile
@@ -1,49 +1,47 @@
-ARG IMAGE_UBUNTU_20_04
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
# Create base image
-FROM ${IMAGE_UBUNTU_20_04} as base
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV DEBIAN_FRONTEND=noninteractive LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
+ENV \
+ DEBIAN_FRONTEND=noninteractive \
+ LC_ALL=C.UTF-8 \
+ LANG=C.UTF-8 \
+ PATH="/opt/bin:${PATH}"
+
+RUN apt-get update
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
dialog \
git \
graphviz \
- libenchant1c2a \
librrd-dev \
libsasl2-dev \
libxml2 \
make \
nullmailer \
- php7.4-common \
- python-dev \
python3-pip \
rrdtool \
strace \
sudo \
- vim \
- && rm -rf /var/lib/apt/lists/*
+ vim
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
apache2 \
apache2-dev \
autoconf \
build-essential \
bison \
curl \
- default-jdk \
devscripts \
dnsutils \
- dpatch \
flex \
gawk \
gettext \
git-buildpackage \
gtk-doc-tools \
- golang-go \
iputils-ping \
joe \
libc6-dbg \
@@ -71,7 +69,6 @@ RUN apt-get update \
libtool \
libxml2-dev \
libxmlsec1-dev \
- msitools \
openssh-client \
patch \
rpcbind \
@@ -80,9 +77,11 @@ RUN apt-get update \
software-properties-common \
texinfo \
tk-dev \
- uuid-dev \
- upx-ucl \
- && rm -rf /var/lib/apt/lists/*
+ uuid-dev
+
+# remove apt service because we don't need it and we run into problems, see
+# https://jira.lan.tribe29.com/browse/CMK-16607
+RUN rm /etc/cron.daily/apt-compat
# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
@@ -95,51 +94,61 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-
-COPY scripts/.bazelversion /opt/
-COPY scripts/install-iwyu.sh /opt/
-COPY scripts/Check_MK-pubkey.gpg /opt/
-
-# used by lots of scripts (has to be copied to `scripts/` before)
-COPY scripts/package_versions.bzl /opt/
-COPY scripts/defines.make /opt/
-
-# used by install scripts
-COPY scripts/build_lib.sh /opt/
-
-COPY scripts/install-gnu-toolchain.sh /opt/
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
+
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
-COPY scripts/install-valgrind.sh /opt/
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
-COPY scripts/install-cmake.sh /opt/
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
-COPY scripts/install-protobuf-cpp.sh /opt/
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
-COPY scripts/install-openssl.sh /opt/
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
-COPY scripts/install-python.sh /opt/
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
-COPY scripts/install-freetds.sh /opt/
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
-COPY scripts/install-rust-cargo.sh /opt/
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
-COPY scripts/strip_binaries /opt/
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
-COPY scripts/install-bazel.sh /opt/
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
### Actual Build Image ###
@@ -149,8 +158,11 @@ FROM base
COPY --from=builder /opt /opt
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -160,65 +172,25 @@ RUN /opt/install-protobuf-cpp.sh --link-only
RUN /opt/install-python.sh link-only
RUN /opt/install-freetds.sh link-only
RUN /opt/install-bazel.sh link-only
-
-# Exclusively for this distro at the moment, because it is used by several CI
-# jobs for special tasks (checking out git, "make dist", orchestrating other
-# containers, ...). Scripts should be usable with other Ubuntu versions.
-COPY scripts/install-docker.sh /opt/
-RUN /opt/install-docker.sh
-
-COPY scripts/install-make-dist-deps.sh /opt/
-RUN /opt/install-make-dist-deps.sh
-
-COPY scripts/install-nodejs.sh /opt/
-RUN /opt/install-nodejs.sh
-
-COPY scripts/install-clang.sh /opt/
-RUN /opt/install-clang.sh
-
-COPY scripts/install-shellcheck.sh /opt/
-RUN /opt/install-shellcheck.sh
-
-COPY scripts/install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh link-only
-COPY scripts/install-musl-tools.sh /opt/
-RUN /opt/install-musl-tools.sh
-
# Install non cached dependencies
-COPY scripts/install-pipenv.sh /opt/
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
-COPY scripts/UBUNTU_20.04.mk /opt/
-COPY scripts/install-cmk-dependencies.sh /opt/
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
RUN /opt/install-cmk-dependencies.sh
-COPY scripts/install-patchelf.sh /opt/
+COPY --from=scripts install-patchelf.sh /opt/
RUN /opt/install-patchelf.sh
-COPY scripts/install-packer.sh /opt/
-RUN /opt/install-packer.sh
-
-COPY scripts/install-aws-cli.sh /opt/
-RUN /opt/install-aws-cli.sh
-
-COPY scripts/install-buildifier.sh /opt/
-RUN /opt/install-buildifier.sh
-
-# save a massive 108kB of space
-RUN rm /opt/*.sh /opt/strip_binaries /opt/*.mk
-
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/ubuntu-22.04/Dockerfile b/buildscripts/infrastructure/build-nodes/ubuntu-22.04/Dockerfile
index cc6c1f1079e..a2cc1ab7479 100644
--- a/buildscripts/infrastructure/build-nodes/ubuntu-22.04/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/ubuntu-22.04/Dockerfile
@@ -1,12 +1,19 @@
-ARG IMAGE_UBUNTU_22_04
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_UBUNTU_22_04} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV DEBIAN_FRONTEND=noninteractive LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
+ENV \
+ DEBIAN_FRONTEND=noninteractive \
+ LC_ALL=C.UTF-8 \
+ LANG=C.UTF-8 \
+ PATH="/opt/bin:${PATH}"
+
+RUN apt-get update
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
dialog \
git \
graphviz \
@@ -21,11 +28,9 @@ RUN apt-get update \
rrdtool \
strace \
sudo \
- vim \
- && rm -rf /var/lib/apt/lists/*
+ vim
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get install -y \
apache2 \
apache2-dev \
autoconf \
@@ -75,10 +80,13 @@ RUN apt-get update \
software-properties-common \
texinfo \
tk-dev \
- uuid-dev \
- && rm -rf /var/lib/apt/lists/*
+ uuid-dev
+
+# remove apt service because we don't need it and we run into problems, see
+# https://jira.lan.tribe29.com/browse/CMK-16607
+RUN rm /etc/cron.daily/apt-compat
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -89,33 +97,75 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
-COPY scripts/* /opt/
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -128,21 +178,22 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
-RUN /opt/install-cmk-dependencies.sh
-RUN /opt/install-patchelf.sh
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
+RUN /opt/install-cmk-dependencies.sh
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
+COPY --from=scripts install-patchelf.sh /opt/
+RUN /opt/install-patchelf.sh
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/infrastructure/build-nodes/ubuntu-23.04/Dockerfile b/buildscripts/infrastructure/build-nodes/ubuntu-23.04/Dockerfile
deleted file mode 100644
index f96c6195ca1..00000000000
--- a/buildscripts/infrastructure/build-nodes/ubuntu-23.04/Dockerfile
+++ /dev/null
@@ -1,152 +0,0 @@
-ARG IMAGE_UBUNTU_23_04
-# hadolint ignore=DL3006
-FROM ${IMAGE_UBUNTU_23_04} as base
-
-SHELL ["/bin/bash", "-c"]
-ENV DEBIAN_FRONTEND=noninteractive LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
-
-RUN apt-get update \
- && apt-get install -y \
- dialog \
- gdebi \
- git \
- graphviz \
- librrd-dev \
- libsasl2-dev \
- libxml2 \
- make \
- nullmailer \
- php-common \
- python3-dev \
- python3-pip \
- rrdtool \
- strace \
- sudo \
- vim \
- && rm -rf /var/lib/apt/lists/*
-
-RUN apt-get update \
- && apt-get install -y \
- apache2 \
- apache2-dev \
- autoconf \
- build-essential \
- bison \
- curl \
- devscripts \
- dnsutils \
- flex \
- gawk \
- gettext \
- git-buildpackage \
- gtk-doc-tools \
- iputils-ping \
- joe \
- libc6-dbg \
- libcurl4-openssl-dev \
- libevent-dev \
- libffi-dev \
- libfreeradius-dev \
- libgd-dev \
- libglib2.0-dev \
- libgnutls28-dev \
- libgsf-1-dev \
- libkrb5-dev \
- libldap2-dev \
- libltdl-dev \
- libmcrypt-dev \
- libmysqlclient-dev \
- libncurses5-dev \
- libpango1.0-dev \
- libpcap-dev \
- libperl-dev \
- libpq-dev \
- libreadline-dev \
- libsqlite3-dev \
- libssl-dev \
- libtool \
- libxml2-dev \
- libxmlsec1-dev \
- openssh-client \
- patch \
- rpcbind \
- rsync \
- smbclient \
- software-properties-common \
- texinfo \
- tk-dev \
- uuid-dev \
- && rm -rf /var/lib/apt/lists/*
-
-# Install our standard tool chain for building
-# - gnu-toolchain is needed for compiling all the C++ stuff
-# - cmake is needed for e.g. building re2
-# - openssl is needed by Python 3.7+
-# - python is needed by our build / test chain
-FROM base as builder
-ARG NEXUS_ARCHIVES_URL
-ARG NEXUS_USERNAME
-ARG NEXUS_PASSWORD
-ARG DISTRO
-ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-COPY scripts/* /opt/
-RUN /opt/install-gnu-toolchain.sh
-RUN /opt/install-valgrind.sh
-RUN /opt/install-cmake.sh
-RUN /opt/install-protobuf-cpp.sh
-RUN /opt/install-openssl.sh
-RUN /opt/install-python.sh
-RUN /opt/install-freetds.sh
-RUN /opt/install-rust-cargo.sh
-
-# Now shrink all the binaries and libraries we produced to build a small image
-# in the next step
-RUN /opt/strip_binaries /opt
-
-# Run this AFTER strip_binaries!!
-RUN /opt/install-bazel.sh
-
-### Actual Image ###
-FROM base
-
-# Copy our standard tool chain for building
-COPY --from=builder /opt /opt
-ARG DISTRO
-ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-# Set symlinks
-RUN /opt/install-gnu-toolchain.sh link-only
-RUN /opt/install-valgrind.sh link-only
-RUN /opt/install-cmake.sh link-only
-RUN /opt/install-protobuf-cpp.sh --link-only
-RUN /opt/install-python.sh link-only
-RUN /opt/install-freetds.sh link-only
-RUN /opt/install-bazel.sh link-only
-RUN /opt/install-rust-cargo.sh link-only
-
-# Install non cached dependencies
-RUN /opt/install-pipenv.sh
-RUN /opt/install-cmk-dependencies.sh
-RUN /opt/install-patchelf.sh
-
-# Ubuntu 23.04 introduces a user group named "ubuntu" with the ID 1000
-# Jenkins is using this group ID everywhere, so lets move it
-RUN groupmod -g 3000 ubuntu
-
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
-ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
-
-ENV RUSTUP_HOME=/opt/rust/rustup
-LABEL \
- com.tribe29.image_type="build-image"
diff --git a/buildscripts/infrastructure/build-nodes/ubuntu-23.10/Dockerfile b/buildscripts/infrastructure/build-nodes/ubuntu-23.10/Dockerfile
index 20796ce7752..8ae7f07df9a 100644
--- a/buildscripts/infrastructure/build-nodes/ubuntu-23.10/Dockerfile
+++ b/buildscripts/infrastructure/build-nodes/ubuntu-23.10/Dockerfile
@@ -1,12 +1,19 @@
-ARG IMAGE_UBUNTU_23_10
+ARG DISTRO_IMAGE_BASE
+
# hadolint ignore=DL3006
-FROM ${IMAGE_UBUNTU_23_10} as base
+# Create base image
+FROM ${DISTRO_IMAGE_BASE} as base
SHELL ["/bin/bash", "-c"]
-ENV DEBIAN_FRONTEND=noninteractive LC_ALL=C.UTF-8 LANG=C.UTF-8 PATH="/opt/bin:${PATH}"
+ENV \
+ DEBIAN_FRONTEND=noninteractive \
+ LC_ALL=C.UTF-8 \
+ LANG=C.UTF-8 \
+ PATH="/opt/bin:${PATH}"
-RUN apt-get update \
- && apt-get install -y \
+RUN apt-get update
+
+RUN apt-get install -y \
dialog \
gdebi \
git \
@@ -22,8 +29,7 @@ RUN apt-get update \
rrdtool \
strace \
sudo \
- vim \
- && rm -rf /var/lib/apt/lists/*
+ vim
RUN apt-get update \
&& apt-get install -y \
@@ -75,10 +81,13 @@ RUN apt-get update \
software-properties-common \
texinfo \
tk-dev \
- uuid-dev \
- && rm -rf /var/lib/apt/lists/*
+ uuid-dev
+
+# remove apt service because we don't need it and we run into problems, see
+# https://jira.lan.tribe29.com/browse/CMK-16607
+RUN rm /etc/cron.daily/apt-compat
-# Install our standard tool chain for building
+# Install our standard tool chain for building in seperate container
# - gnu-toolchain is needed for compiling all the C++ stuff
# - cmake is needed for e.g. building re2
# - openssl is needed by Python 3.7+
@@ -89,33 +98,74 @@ ARG NEXUS_USERNAME
ARG NEXUS_PASSWORD
ARG DISTRO
ARG BRANCH_VERSION
-ENV NEXUS_ARCHIVES_URL="$NEXUS_ARCHIVES_URL" NEXUS_USERNAME="$NEXUS_USERNAME" NEXUS_PASSWORD="$NEXUS_PASSWORD" DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
-
-COPY scripts/* /opt/
+ENV \
+ NEXUS_ARCHIVES_URL="${NEXUS_ARCHIVES_URL}" \
+ NEXUS_USERNAME="${NEXUS_USERNAME}" \
+ NEXUS_PASSWORD="${NEXUS_PASSWORD}" \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
+
+# Copy over stuff that's needed by lots of scripts (has to be copied to context before)
+COPY \
+ .bazelversion \
+ package_versions.bzl \
+ static_variables.bzl \
+ defines.make \
+ /opt/
+COPY --from=scripts \
+ build_lib.sh \
+ Check_MK-pubkey.gpg \
+ /opt/
+
+COPY --from=scripts install-gnu-toolchain.sh /opt/
RUN /opt/install-gnu-toolchain.sh
+
+COPY --from=scripts install-valgrind.sh /opt/
RUN /opt/install-valgrind.sh
+
+COPY --from=scripts install-cmake.sh /opt/
RUN /opt/install-cmake.sh
+
+COPY --from=scripts install-protobuf-cpp.sh /opt/
RUN /opt/install-protobuf-cpp.sh
+
+COPY --from=scripts install-openssl.sh /opt/
RUN /opt/install-openssl.sh
+
+COPY --from=scripts install-python.sh /opt/
RUN /opt/install-python.sh
+
+# install GDB after Python as it requires shared object files, see CMK-15854
+COPY --from=scripts install-gdb.sh /opt/
+RUN /opt/install-gdb.sh
+
+COPY --from=scripts install-freetds.sh /opt/
RUN /opt/install-freetds.sh
+
+COPY --from=scripts install-rust-cargo.sh /opt/
RUN /opt/install-rust-cargo.sh
# Now shrink all the binaries and libraries we produced to build a small image
# in the next step
+COPY strip_binaries /opt/
RUN /opt/strip_binaries /opt
# Run this AFTER strip_binaries!!
+COPY --from=scripts install-bazel.sh /opt/
RUN /opt/install-bazel.sh
-### Actual Image ###
+### Actual Build Image ###
FROM base
# Copy our standard tool chain for building
COPY --from=builder /opt /opt
+
ARG DISTRO
+ARG DISTRO_MK_FILE
ARG BRANCH_VERSION
-ENV DISTRO="$DISTRO" BRANCH_VERSION="$BRANCH_VERSION"
+ENV \
+ DISTRO="${DISTRO}" \
+ BRANCH_VERSION="${BRANCH_VERSION}"
# Set symlinks
RUN /opt/install-gnu-toolchain.sh link-only
@@ -128,25 +178,26 @@ RUN /opt/install-bazel.sh link-only
RUN /opt/install-rust-cargo.sh link-only
# Install non cached dependencies
+COPY --from=scripts install-pipenv.sh /opt/
RUN /opt/install-pipenv.sh
+
+COPY --from=omd_distros "${DISTRO_MK_FILE}" /opt/
+COPY --from=scripts install-cmk-dependencies.sh /opt/
RUN /opt/install-cmk-dependencies.sh
+
+COPY --from=scripts install-patchelf.sh /opt/
RUN /opt/install-patchelf.sh
# Ubuntu 23.10 introduces a user group named "ubuntu" with the ID 1000
# Jenkins is using this group ID everywhere, so lets move it
RUN groupmod -g 3000 ubuntu
-# Ensure all our build containers have the jenkins user (with same uid/gid). The non privileged
-# jobs will be executed as this user in the container
-RUN groupadd -g 1000 jenkins \
- && useradd -m -u 1001 -g 1000 -s /bin/bash jenkins
-
-RUN mkdir -p /home/jenkins/.cache/ \
- && chown jenkins:jenkins /home/jenkins/.cache/
-
ARG VERS_TAG
-RUN echo $VERS_TAG > /version.txt
+RUN echo "${VERS_TAG}" > /version.txt
ENV RUSTUP_HOME=/opt/rust/rustup
LABEL \
- com.tribe29.image_type="build-image"
+ com.checkmk.image_type="build-image"
+
+COPY --from=dev_images entrypoint.sh /opt/
+ENTRYPOINT ["/opt/entrypoint.sh"]
diff --git a/buildscripts/scripts/assert-release-build-artifacts.groovy b/buildscripts/scripts/assert-release-build-artifacts.groovy
new file mode 100644
index 00000000000..babfdf5cb7a
--- /dev/null
+++ b/buildscripts/scripts/assert-release-build-artifacts.groovy
@@ -0,0 +1,35 @@
+#!groovy
+
+// file: assert-release-build-artifacts.groovy
+
+def main() {
+ stage("Assert release build artifacts") {
+ check_job_parameters([
+ "VERSION_TO_CHECK",
+ ])
+ inside_container(
+ set_docker_group_id: true,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+ withCredentials([
+ usernamePassword(
+ credentialsId: 'nexus',
+ passwordVariable: 'NEXUS_PASSWORD',
+ usernameVariable: 'NEXUS_USER')]) {
+ withEnv(["PYTHONUNBUFFERED=1"]) {
+ dir("${checkout_dir}") {
+ sh(script: """scripts/run-pipenv run \
+ buildscripts/scripts/get_distros.py \
+ --editions_file "${checkout_dir}/editions.yml" \
+ assert_build_artifacts \
+ --version "${VERSION_TO_CHECK}" \
+ """);
+ }
+ }
+ }
+ }
+ }
+}
+
+return this;
diff --git a/buildscripts/scripts/bazel_execution_log_parser.py b/buildscripts/scripts/bazel_execution_log_parser.py
new file mode 100755
index 00000000000..13fcd276686
--- /dev/null
+++ b/buildscripts/scripts/bazel_execution_log_parser.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+# Copyright (C) 2023 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+import argparse
+import json
+from pathlib import Path
+from pprint import pprint as pp
+from typing import Iterator, NamedTuple
+
+from pydantic import BaseModel
+
+
+class Summary(NamedTuple):
+ overallTargets: int
+ cacheHits: int
+ percentRemoteCacheHits: float
+ targetsWithMissedCache: list[str]
+ numberUncacheableTargets: int
+ numberRemotableTargets: int
+
+
+class ExecutionMetrics(BaseModel):
+ targetLabel: str
+ cacheHit: bool
+ cacheable: bool
+ remotable: bool
+
+
+def parse_arguments() -> argparse.Namespace:
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument(
+ "--execution_logs_root", default=Path(__file__).resolve().parent.parent.parent, type=Path
+ )
+
+ parser.add_argument(
+ "--bazel_log_file_pattern",
+ default="bazel_execution_log*.json",
+ )
+
+ parser.add_argument(
+ "--summary_file",
+ default=Path(__file__).resolve().parent.parent.parent / "bazel_statistics.json",
+ type=Path,
+ )
+
+ parser.add_argument(
+ "--cachehit_csv",
+ )
+
+ parser.add_argument(
+ "--distro",
+ default="unknown",
+ )
+ return parser.parse_args()
+
+
+def parse_execution_logs(log_files: list[Path]) -> Iterator[ExecutionMetrics]:
+ """
+ Parse the bazel execution logs.
+ We need to use raw_decode as the logs are not a valid json, see:
+ https://github.com/bazelbuild/bazel/issues/14209
+ """
+ for log_file in log_files:
+ with open(log_file) as f:
+ data = f.read()
+
+ decoder = json.JSONDecoder()
+ d = data
+ while len(d):
+ (parsed_data, offset) = decoder.raw_decode(d)
+ d = d[offset:]
+ yield ExecutionMetrics(**parsed_data)
+
+
+def build_summary(parsed_logs: list[ExecutionMetrics]) -> Summary:
+ overall_targets = len(parsed_logs)
+ cache_hits = sum(1 for log in parsed_logs if log.cacheHit)
+
+ return Summary(
+ overallTargets=overall_targets,
+ cacheHits=cache_hits,
+ percentRemoteCacheHits=round(cache_hits / overall_targets * 100, 2),
+ targetsWithMissedCache=[log.targetLabel for log in parsed_logs if not log.cacheHit],
+ numberUncacheableTargets=sum(1 for log in parsed_logs if not log.cacheable),
+ numberRemotableTargets=sum(1 for log in parsed_logs if log.remotable),
+ )
+
+
+def write_statistics(summary: Summary, file_name: Path) -> None:
+ with open(file_name, "w") as f:
+ json.dump(summary._asdict(), f)
+
+
+def write_cachehit_csv(summary: Summary, file_path: Path, distro: str) -> None:
+ print(f"Writing cachehit csv to {file_path}")
+ with open(file_path, "w") as f:
+ f.write(f'"{distro}"\n')
+ f.write(f"{summary.percentRemoteCacheHits}\n")
+
+
+def main():
+ args = parse_arguments()
+
+ bazel_log_files = list(args.execution_logs_root.glob(args.bazel_log_file_pattern))
+ print("Analyzing the following log files: ")
+ pp(bazel_log_files)
+
+ summary = build_summary(list(parse_execution_logs(bazel_log_files)))
+ write_statistics(summary, args.summary_file)
+ pp(summary._asdict())
+
+ if args.cachehit_csv:
+ write_cachehit_csv(summary, args.cachehit_csv, args.distro)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/buildscripts/scripts/build-announcement.groovy b/buildscripts/scripts/build-announcement.groovy
index c6334786400..385edeeaf30 100644
--- a/buildscripts/scripts/build-announcement.groovy
+++ b/buildscripts/scripts/build-announcement.groovy
@@ -5,7 +5,6 @@
// Builds a tar.gz which contains announcement text for publishing in the forum and on the mailing list.
// Artifacts will be consumed by bw-release.
-
def main() {
stage("Clean workspace") {
dir("${checkout_dir}") {
@@ -14,7 +13,7 @@ def main() {
}
}
stage("Build announcement") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
+ inside_container() {
dir("${checkout_dir}") {
def announce_file = sh(script: 'make print-CHECK_MK_ANNOUNCE_TAR_FILE', returnStdout: true).trim();
sh(script: "make announcement");
diff --git a/buildscripts/scripts/build-build-images.groovy b/buildscripts/scripts/build-build-images.groovy
index 11119276c9e..36591a7f609 100644
--- a/buildscripts/scripts/build-build-images.groovy
+++ b/buildscripts/scripts/build-build-images.groovy
@@ -16,6 +16,7 @@ def main() {
"PUBLISH_IMAGES",
"OVERRIDE_DISTROS",
"BUILD_IMAGE_WITHOUT_CACHE",
+ "CIPARAM_OVERRIDE_DOCKER_TAG_BUILD",
]);
check_environment_variables([
@@ -24,9 +25,10 @@ def main() {
]);
def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def distros = versioning.configured_or_overridden_distros("enterprise", OVERRIDE_DISTROS);
+ def all_distros = versioning.get_distros(override: "all")
+ def distros = versioning.get_distros(edition: "all", use_case: "all", override: OVERRIDE_DISTROS);
- def vers_tag = versioning.get_docker_tag(scm, checkout_dir);
+ def vers_tag = params.CIPARAM_OVERRIDE_DOCKER_TAG_BUILD ?: versioning.get_docker_tag(scm, checkout_dir);
def branch_name = versioning.safe_branch_name(scm);
def branch_version = versioning.get_branch_version(checkout_dir);
def publish_images = PUBLISH_IMAGES=='true'; // FIXME should be case sensitive
@@ -34,6 +36,7 @@ def main() {
print(
"""
|===== CONFIGURATION ===============================
+ |all_distros:....................(local) │${all_distros}│
|distros:........................(local) │${distros}│
|publish_images:.................(local) │${publish_images}│
|vers_tag:.......................(local) │${vers_tag}│
@@ -48,71 +51,132 @@ def main() {
|${distros}
|""".stripMargin());
+ stage("Prepare workspace") {
+ dir("${checkout_dir}") {
+ sh("""
+ rm -rf temp-build-context
+ mkdir temp-build-context
+ defines/dev-images/populate-build-context.sh temp-build-context
+ """);
+ }
+ }
withCredentials([
usernamePassword(
credentialsId: 'nexus',
- usernameVariable: 'USERNAME',
- passwordVariable: 'PASSWORD')
+ usernameVariable: 'NEXUS_USERNAME',
+ passwordVariable: 'NEXUS_PASSWORD')
]) {
- def alias_names = [:];
- def image_ids = [:];
- def dockerfiles = [:];
-
- dir("${checkout_dir}") {
- distros.each { distro ->
- dockerfiles[distro] = "${distro}/Dockerfile";
- alias_names[distro] = cmd_output(
- "grep 'ARG IMAGE_' " +
- "buildscripts/infrastructure/build-nodes/${dockerfiles[distro]}" +
- " | awk '{print \$2}'").replaceAll("[\r\n]+", "");
- image_ids[distro] = resolve_docker_image_alias(alias_names[distro]);
+ def distro_base_image_id = [:];
+ def real_distro_name = [:];
+
+ stage("Provide\nupstream images") {
+ dir("${checkout_dir}") {
+ distro_base_image_id = distros.collectEntries { distro -> [
+ (distro) : {
+ real_distro_name[distro] = cmd_output(
+ "basename \$(realpath buildscripts/infrastructure/build-nodes/${distro})");
+ resolve_docker_image_alias(
+ "IMAGE_${real_distro_name[distro].toUpperCase().replaceAll('\\.', '_').replaceAll('-', '_')}")
+ }()
+ ]};
}
- sh("""
- cp defines.make package_versions.bzl .bazelversion omd/strip_binaries \
- buildscripts/infrastructure/build-nodes/scripts
-
- cp omd/distros/*.mk buildscripts/infrastructure/build-nodes/scripts
- """);
}
- dir("${checkout_dir}/buildscripts/infrastructure/build-nodes") {
- // TODO: here it would be nice to iterate through all known distros
- // and use a conditional_stage(distro in distros) approach
- def stages = distros.collectEntries { distro ->
- [("${distro}") : {
- stage("Build ${distro}") {
- def DOCKER_ARGS = (
- " --build-arg ${alias_names[distro]}=${image_ids[distro]}" +
- " --build-arg DOCKER_REGISTRY='${docker_registry_no_http}'" +
- " --build-arg NEXUS_ARCHIVES_URL='$NEXUS_ARCHIVES_URL'" +
- " --build-arg DISTRO='$distro'" +
- " --build-arg NEXUS_USERNAME='$USERNAME'" +
- " --build-arg NEXUS_PASSWORD='$PASSWORD'" +
- " --build-arg ARTIFACT_STORAGE='$ARTIFACT_STORAGE'" +
- " --build-arg VERS_TAG='$vers_tag'" +
- " --build-arg BRANCH_VERSION='$branch_version'" +
- " -f ${dockerfiles[distro]} .");
-
- if (params.BUILD_IMAGE_WITHOUT_CACHE) {
- DOCKER_ARGS = "--no-cache " + DOCKER_ARGS;
- }
- docker.build("${distro}:${vers_tag}", DOCKER_ARGS);
- }}
- ]
- }
- def images = parallel(stages);
+ // TODO: here it would be nice to iterate through all known distros
+ // and use a conditional_stage(distro in distros) approach
+ def stages = all_distros.collectEntries { distro ->
+ [("${distro}") : {
+ conditional_stage("Build\n${distro}", distro in distros) {
+ def image_name = "${distro}:${vers_tag}";
+ def distro_mk_file_name = "${real_distro_name[distro].toUpperCase().replaceAll('-', '_')}.mk";
+ def docker_build_args = (""
+ + " --build-context scripts=buildscripts/infrastructure/build-nodes/scripts"
+ + " --build-context omd_distros=omd/distros"
+ + " --build-context dev_images=defines/dev-images"
+
+ + " --build-arg DISTRO_IMAGE_BASE='${distro_base_image_id[distro]}'"
+ + " --build-arg DISTRO_MK_FILE='${distro_mk_file_name}'"
+ + " --build-arg DISTRO='${distro}'"
+ + " --build-arg VERS_TAG='${vers_tag}'"
+ + " --build-arg BRANCH_VERSION='${branch_version}'"
+
+ + " --build-arg DOCKER_REGISTRY='${docker_registry_no_http}'"
+ + " --build-arg NEXUS_ARCHIVES_URL='${NEXUS_ARCHIVES_URL}'"
+ + " --build-arg NEXUS_USERNAME='${NEXUS_USERNAME}'"
+ + " --build-arg NEXUS_PASSWORD='${NEXUS_PASSWORD}'"
+ + " --build-arg ARTIFACT_STORAGE='${ARTIFACT_STORAGE}'"
+
+ + " -f 'buildscripts/infrastructure/build-nodes/${distro}/Dockerfile'"
+ + " temp-build-context"
+ );
+
+ if (params.BUILD_IMAGE_WITHOUT_CACHE) {
+ docker_build_args = "--no-cache " + docker_build_args;
+ }
+ dir("${checkout_dir}") {
+ docker.build(image_name, docker_build_args);
+ }
+ }
+ }
+ ]
+ }
+ def images = parallel(stages);
- conditional_stage('upload images', publish_images) {
- docker.withRegistry(DOCKER_REGISTRY, "nexus") {
- images.each { distro, image ->
+ conditional_stage('upload images', publish_images) {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
+ images.each { distro, image ->
+ if (image) {
image.push();
- image.push("${branch_name}-latest");
+ if (branch_name ==~ /master|\d\.\d\.\d/) {
+ image.push("${branch_name}-latest");
+ }
}
}
}
}
}
+
+ /// build and use reference image in order to check if it's working at all
+ /// and to fill caches. Also do some tests in order to check if permissions
+ /// are fine and everything gets cleaned up
+ stage("Use reference image") {
+ dir("${checkout_dir}") {
+ /// First check the bash script, since it yields more useful log output
+ /// in erroneous cases
+ show_duration("check reference image") {
+ sh("""
+ POPULATE_BUILD_CACHE=1 \
+ VERBOSE=1 \
+ PULL_BASE_IMAGE=1 \
+ ${checkout_dir}/scripts/run-in-docker.sh cat /etc/os-release
+ """);
+ }
+ /// also check the default way to use a container
+ inside_container() {
+ sh("""
+ echo Hello from reference image
+ cat /etc/os-release
+ echo \$USER
+ echo \$HOME
+ echo Hello from reference image
+ cat /etc/os-release
+ echo \$USER
+ echo \$HOME
+ ls -alF \$HOME
+ ls -alF \$HOME/.cache
+ echo fcache > \$HOME/.cache/fcache
+ ls -alF ${checkout_dir}/shared_cargo_folder
+ echo fcargo > ${checkout_dir}/shared_cargo_folder/fcargo
+ """);
+ sh("git status");
+ }
+ /// also test the run-in-docker.sh script
+ sh("""
+ ${checkout_dir}/scripts/run-in-docker.sh cat /etc/os-release
+ """);
+ }
+ }
}
return this;
diff --git a/buildscripts/scripts/build-cloud-images.groovy b/buildscripts/scripts/build-cloud-images.groovy
index bdb4d10a0d9..cfd2e55afe5 100644
--- a/buildscripts/scripts/build-cloud-images.groovy
+++ b/buildscripts/scripts/build-cloud-images.groovy
@@ -10,56 +10,83 @@
/// Jenkins artifacts: will be directly pushed into the cloud
/// Depends on: Ubuntu 22.04 package beeing available on download.checkmk.com (will be fetched by ansible collection)
+def build_cloud_images_names(version) {
+ def version_suffix = "${version}-build-${env.BUILD_NUMBER}";
+ return ["cmk-ami-https-${version_suffix}", "cmk-azure-${version_suffix}"]
+}
+
def main() {
check_job_parameters([
"EDITION",
"VERSION",
+ "BUILD_CLOUD_IMAGES",
+ "PUBLISH_IN_MARKETPLACE",
])
if (EDITION != 'cloud') {
- error "The AMI/Azure builds must currently *only* use the cloud edition."
+ error("The AMI/Azure builds must currently *only* use the cloud edition.");
}
def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy")
def branch_version = versioning.get_branch_version(checkout_dir);
def cmk_version = versioning.get_cmk_version(versioning.safe_branch_name(scm), branch_version, VERSION)
if (cmk_version != versioning.strip_rc_number_from_version(cmk_version)) {
- error "You may try to build a release candidate (${cmk_version}) for the cloud images but " +
+ error("You may try to build a release candidate (${cmk_version}) for the cloud images but " +
"this is currently not supported. During a release, we will build the cloud images when a package rc was " +
- "tested to be OK."
+ "tested to be OK.");
}
- shout("Building cloud images for version: ${cmk_version}")
+ shout("Building cloud images for version: ${cmk_version}");
- def version_suffix = "${cmk_version}-build-${env.BUILD_NUMBER}"
- def env_secret_map = build_env_secret_map(cmk_version, version_suffix)
+ def ami_image_name = build_cloud_images_names(cmk_version)[0];
+ def azure_image_name = build_cloud_images_names(cmk_version)[1];
+ def env_secret_map = build_env_secret_map(cmk_version, ami_image_name, azure_image_name)
def cloud_targets = ["amazon-ebs", "azure-arm"]
+ def build_cloud_images = params.BUILD_CLOUD_IMAGES
+ def publish_cloud_images = params.PUBLISH_IN_MARKETPLACE
+ def packer_envvars = ['CHECKPOINT_DISABLE=1', "PACKER_CONFIG_DIR=${checkout_dir}/packer/.packer"]
currentBuild.description += (
"""
|Building the Cloud images
- |""".stripMargin())
+ |""".stripMargin());
stage('Cleanup') {
dir("${checkout_dir}") {
- sh("git clean -xdf")
+ sh("git clean -xdf");
}
}
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Packer init') {
- dir("${checkout_dir}/packer") {
+ // Build Phase
+ inside_container() {
+ smart_stage(
+ name: 'Packer init',
+ condition: build_cloud_images,
+ raiseOnError: true,
+ ) {
+ dir("${checkout_dir}/packer") {
+ // https://developer.hashicorp.com/packer/docs/configure#environment-variables-usable-for-packer
+ withEnv(packer_envvars){
// This step cannot be done during building images as it needs the *.pkr.hcl scripts from the repo
- sh("packer init .")
+ sh("packer init .");
}
}
- parallel(create_stages(cloud_targets, env_secret_map));
+ parallel(create_build_stages(cloud_targets, env_secret_map, build_cloud_images, packer_envvars));
+ }
+ }
+
+ // Publish Phase
+ inside_container() {
+ dir("${checkout_dir}") {
+ // As we're using the same .venv for multiple cloud targets in parallel, we need to make sure the
+ // .venv is up-to-date before parallelisation. Otherwise one process may fail due to a invalid .venv.
+ sh("make .venv");
+ parallel(create_publish_stages(["aws": ami_image_name, "azure": azure_image_name], cmk_version, publish_cloud_images))
}
}
}
-def build_env_secret_map(cmk_version, version_suffix) {
+def build_env_secret_map(cmk_version, ami, azure) {
return [
"env" : [
// ~~~ COMMON ~~~
@@ -67,14 +94,14 @@ def build_env_secret_map(cmk_version, version_suffix) {
// ~~~ QUEMU ~~~
"PKR_VAR_qemu_output_dir_name=cmk",
// ~~~ AWS ~~~
- "PKR_VAR_aws_ami_name=cmk-ami-https-${version_suffix}",
+ "PKR_VAR_aws_ami_name=${ami}",
// ~~~ AZURE ~~~
"PKR_VAR_azure_resource_group=rg-packer-dev-weu",
"PKR_VAR_azure_build_resource_group_name=rg-packer-dev-weu",
"PKR_VAR_azure_virtual_network_resource_group_name=rg-spokes-network-weu",
"PKR_VAR_azure_virtual_network_name=vnet-spoke-packer-dev-weu",
"PKR_VAR_azure_virtual_network_subnet_name=snet-spoke-packer-dev-default-weu",
- "PKR_VAR_azure_image_name=cmk-azure-${version_suffix}"
+ "PKR_VAR_azure_image_name=${azure}"
],
"secrets": [
// ~~~ COMMON ~~~
@@ -101,16 +128,19 @@ def build_env_secret_map(cmk_version, version_suffix) {
credentialsId: 'azure_tenant_id',
variable: 'PKR_VAR_azure_tenant_id'),
],
- ]
-
+ ];
}
-def create_stages(cloud_targets, env_secret_map) {
+def create_build_stages(cloud_targets, env_secret_map, build_images, packer_envvars) {
return cloud_targets.collectEntries { target ->
- [("${target}"): {
- stage("Building target ${target}") {
+ [("Building target ${target}"): {
+ smart_stage(
+ name: "Building target ${target}",
+ condition: build_images,
+ raiseOnError: true,
+ ) {
withCredentials(env_secret_map["secrets"]) {
- withEnv(env_secret_map["env"]) {
+ withEnv(env_secret_map["env"] + packer_envvars) {
dir("${checkout_dir}/packer") {
sh("""
packer build -only="checkmk-ansible.${target}.builder" .;
@@ -124,4 +154,50 @@ def create_stages(cloud_targets, env_secret_map) {
}
}
-return this
+def create_publish_stages(targets_names, version, publish) {
+ return targets_names.collectEntries { target, name ->
+ [("Publish ${target} in marketplace"): {
+ smart_stage(
+ name: 'Publish in marketplace',
+ condition: publish,
+ raiseOnError: true
+ ) {
+ withEnv(["AWS_DEFAULT_REGION=us-east-1", "PYTHONUNBUFFERED=1", "AZURE_RESOURCE_GROUP=rg-packer-dev-weu"]) {
+ withCredentials([
+ string(
+ credentialsId: 'aws_publisher_secret_key',
+ variable: 'AWS_SECRET_ACCESS_KEY'),
+ string(
+ credentialsId: 'aws_publisher_access_key',
+ variable: 'AWS_ACCESS_KEY_ID'),
+ usernamePassword(
+ credentialsId: 'azure_client',
+ passwordVariable: 'AZURE_CLIENT_SECRET',
+ usernameVariable: 'AZURE_CLIENT_ID'),
+ string(
+ credentialsId: 'azure_subscription_id',
+ variable: 'SUBSCRIPTION_ID'),
+ string(
+ credentialsId: 'azure_tenant_id',
+ variable: 'AZURE_TENANT_ID'),
+ ]) {
+ // Used global env variable from jenkins:
+ // AWS_MARKETPLACE_SCANNER_ARN and AWS_AMI_IMAGE_PRODUCT_ID
+ sh("""
+ scripts/run-pipenv run buildscripts/scripts/publish_cloud_images.py \
+ --cloud-type ${target} --new-version ${version} \
+ --build-tag '${env.JOB_BASE_NAME}-${env.BUILD_NUMBER}' --image-name ${name} \
+ --marketplace-scanner-arn '${AWS_MARKETPLACE_SCANNER_ARN}' \
+ --product-id '${AWS_AMI_IMAGE_PRODUCT_ID}' \
+ --azure-subscription-id '${SUBSCRIPTION_ID}' \
+ --azure-resource-group '${AZURE_RESOURCE_GROUP}';
+ """)
+ }
+ }
+ }
+ }
+ ]
+ }
+}
+
+return this;
diff --git a/buildscripts/scripts/build-cmk-container.py b/buildscripts/scripts/build-cmk-container.py
index 3afadf27f1c..d022bb572ef 100644
--- a/buildscripts/scripts/build-cmk-container.py
+++ b/buildscripts/scripts/build-cmk-container.py
@@ -249,6 +249,9 @@ def docker_tag(
)
LOG.debug("Done")
+ image.reload()
+ LOG.debug(f"Final image tags: {image.tags}")
+
def docker_login(registry: str, docker_username: str, docker_passphrase: str) -> None:
"""Log into a registry"""
@@ -400,22 +403,34 @@ def build_tar_gz(
LOG.info(f"Creating Image-Tarball {tar_name} ...")
if "-rc" in version_tag:
- LOG.info(f"{version_tag} contains rc information, do a retagging before docker save.")
+ LOG.info(
+ f"{version_tag} contains rc information, do a retagging before docker save with {args.version}."
+ )
+
+ # image.tag() is required to make image.save() work properly.
+ # See docs of image.save(chunk_size=2097152, named=False):
+ # If set to True, the first tag in the tags list will be used to identify the image.
+ # Alternatively, any element of the tags list can be used as an argument to use that specific tag as the saved identifier.
image.tag(
repository=f"{docker_repo_name}/check-mk-{args.edition}",
tag=f"{args.version}",
)
-
+ # reload this object from the server and update attrs
+ image.reload()
+ LOG.debug(f"Image tags after re-tagging: {image.tags}")
+ this_tag = f"{docker_repo_name}/check-mk-{args.edition}:{args.version}"
with gzip.open(tar_name, "wb") as tar_ball:
- for chunk in image.save():
+ # image.save() can only take elements of the tags list of an image
+ # as new tags are appended to the list of tags, the "oldest" one would be used if nothing is specified by the named keyword
+ for chunk in image.save(named=this_tag):
tar_ball.write(chunk)
-
- docker_client.images.remove(
- image=f"{docker_repo_name}/check-mk-{args.edition}:{args.version}"
+ LOG.debug(
+ f"Remove image {this_tag} now, it will be loaded from tar.gz at a later point again, see CMK-16498"
)
+ docker_client.images.remove(image=this_tag)
else:
with gzip.open(tar_name, "wb") as tar_ball:
- for chunk in image.save():
+ for chunk in image.save(named=this_tag):
tar_ball.write(chunk)
@@ -445,7 +460,7 @@ def build_image(
name=f"{args.source_path}/check-mk-{args.edition}-{args.version}{suffix}.tar.gz",
mode="r:gz",
) as tar:
- tar.extractall(tmp_path)
+ tar.extractall(tmp_path, filter="data")
LOG.info("Copy debian package ...")
run_cmd(cmd=["cp", f"{args.source_path}/{pkg_file}", docker_path])
diff --git a/buildscripts/scripts/build-cmk-distro-package.groovy b/buildscripts/scripts/build-cmk-distro-package.groovy
index cf2a50820d2..1d54a96d0b5 100644
--- a/buildscripts/scripts/build-cmk-distro-package.groovy
+++ b/buildscripts/scripts/build-cmk-distro-package.groovy
@@ -5,6 +5,7 @@
/// Builds a distribution package (.rpm, .dep, etc.) for a given edition/distribution
/// at a given git hash
+/* groovylint-disable MethodSize */
def main() {
check_job_parameters([
["EDITION", true],
@@ -22,15 +23,12 @@ def main() {
]);
def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def artifacts_helper = load("${checkout_dir}/buildscripts/scripts/utils/upload_artifacts.groovy");
- def docker_args = "${mount_reference_repo_dir}";
def omd_env_vars = [
"DEBFULLNAME='Checkmk Team'",
"DEBEMAIL='feedback@checkmk.com'",
] + (params.DISABLE_CACHE ? [
"NEXUS_BUILD_CACHE_URL=",
- "PYTHON_ENABLE_OPTIMIZATIONS=",
"BAZEL_CACHE_URL=",
"BAZEL_CACHE_USER=",
"BAZEL_CACHE_PASSWORD="] : []);
@@ -43,13 +41,21 @@ def main() {
def branch_name = "master";
def branch_version = versioning.get_branch_version(checkout_dir);
- //FIXME
+ // FIXME
def cmk_version_rc_aware = versioning.get_cmk_version(branch_name, branch_version, VERSION);
def cmk_version = versioning.strip_rc_number_from_version(cmk_version_rc_aware);
def docker_tag = versioning.select_docker_tag(branch_name, DOCKER_TAG_BUILD, DOCKER_TAG_BUILD);
+ /* groovylint-disable LineLength */
def container_name = "build-cmk-package-${distro}-${edition}-${cmd_output("git --git-dir=${checkout_dir}/.git log -n 1 --pretty=format:'%h'")}";
+ /* groovylint-enable LineLength */
+
+ def causes = currentBuild.getBuildCauses();
+ def triggerd_by = "";
+ for(cause in causes) {
+ triggerd_by += cause.upstreamProject + "/" + cause.upstreamBuild + "\n";
+ }
print(
"""
@@ -60,106 +66,105 @@ def main() {
|branch_name:.............. │${branch_name}│
|omd_env_vars:............. │${omd_env_vars}│
|docker_tag:............... │${docker_tag}│
- |docker_args:.............. │${docker_args}│
|checkout_dir:............. │${checkout_dir}│
|container_name:........... │${checkout_dir}│
+ |triggerd_by:.............. |${triggerd_by}|
|===================================================
""".stripMargin());
stage("Prepare workspace") {
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside("${docker_args}") {
+ inside_container() {
+ dir("${checkout_dir}") {
+ sh("make buildclean");
+ sh("find . -name *.pth -delete");
+ versioning.configure_checkout_folder(edition, cmk_version);
+ }
- dir("${checkout_dir}") {
- sh("make buildclean");
+ // FIXME: should this be done by another job?
+ dir("${checkout_dir}") {
+ sh("make cmk-frontend frontend-vue");
+ }
- sh("find . -name *.pth -delete");
+ dir("${checkout_dir}") {
+ sh("make .venv");
+ }
- versioning.configure_checkout_folder(edition, cmk_version);
+ stage("Fetch agent binaries") {
+ // shout("Fetch agent binaries");
+
+ upstream_build(
+ relative_job_name: "builders/build-linux-agent-updater",
+ build_params: [
+ DISABLE_CACHE: DISABLE_CACHE,
+ VERSION: VERSION,
+ ],
+ // TODO: SPoT!!, see https://jira.lan.tribe29.com/browse/CMK-13857
+ dependency_paths: ["agents", "non-free/cmk-update-agent"],
+ dest: "artifacts/build-linux-agent-updater",
+ );
+ dir("${checkout_dir}/artifacts/build-linux-agent-updater") {
+ sh("find .");
+ sh("cp *.deb *.rpm ${checkout_dir}/agents/");
+ sh("mkdir -p ${checkout_dir}/agents/linux");
+ sh("cp cmk-agent-ctl* mk-sql ${checkout_dir}/agents/linux/");
+ if (edition != "raw") {
+ sh("cp cmk-update-agent* ${checkout_dir}/non-free/cmk-update-agent/");
+ }
}
- // FIXME: should this be done by another job?
- dir("${checkout_dir}") {
- sh("make .ran-webpack");
+ upstream_build(
+ relative_job_name: "winagt-build", // TODO: move to builders
+ build_params: [
+ DISABLE_CACHE: DISABLE_CACHE,
+ VERSION: VERSION,
+ ],
+ // TODO: SPoT!!, see https://jira.lan.tribe29.com/browse/CMK-13857
+ dependency_paths: [
+ "agents/wnx",
+ "agents/windows",
+ "packages/cmk-agent-ctl",
+ "packages/mk-sql"
+ ],
+ dest: "artifacts/winagt-build",
+ );
+ dir("${checkout_dir}/artifacts/winagt-build") {
+ sh("find .");
+ sh("mkdir -p ${checkout_dir}/agents/windows");
+ // TODO: SPoT!!
+ sh("""cp \
+ check_mk_agent-64.exe \
+ check_mk_agent.exe \
+ check_mk_agent.msi \
+ check_mk_agent_unsigned.msi \
+ check_mk.user.yml \
+ OpenHardwareMonitorLib.dll \
+ OpenHardwareMonitorCLI.exe \
+ mk-sql.exe \
+ robotmk_ext.exe \
+ windows_files_hashes.txt \
+ ${checkout_dir}/agents/windows/
+ """);
+ }
+ dir("${checkout_dir}/agents/windows") {
+ sh("""
+ ${checkout_dir}/buildscripts/scripts/create_unsign_msi_patch.sh \
+ check_mk_agent.msi check_mk_agent_unsigned.msi unsign-msi.patch
+ """);
}
- stage("Fetch agent binaries") {
- // shout("Fetch agent binaries");
-
- fetch_job_artifacts(
- relative_job_name: "builders/build-linux-agent-updater",
- params: [
- DISABLE_CACHE: DISABLE_CACHE,
- VERSION: VERSION,
- ],
- // TODO: SPoT!!, see https://jira.lan.tribe29.com/browse/CMK-13857
- dependency_paths: ["agents", "enterprise/agents/plugins"],
- dest: "artifacts/build-linux-agent-updater",
- );
- dir("${checkout_dir}/artifacts/build-linux-agent-updater") {
- sh("find .");
- sh("cp *.deb *.rpm ${checkout_dir}/agents/");
- sh("mkdir -p ${checkout_dir}/agents/linux");
- sh("cp cmk-agent-ctl* check-sql ${checkout_dir}/agents/linux/");
- if (edition != "raw") {
- sh("cp cmk-update-agent* ${checkout_dir}/enterprise/agents/plugins/");
- }
- }
-
- fetch_job_artifacts(
- relative_job_name: "winagt-build", // TODO: move to builders
- params: [
- DISABLE_CACHE: DISABLE_CACHE,
- VERSION: VERSION,
- ],
- // TODO: SPoT!!, see https://jira.lan.tribe29.com/browse/CMK-13857
- dependency_paths: [
- "agents/wnx",
- "agents/windows",
- "packages/cmk-agent-ctl",
- "packages/check-sql"
- ],
- dest: "artifacts/winagt-build",
- );
- dir("${checkout_dir}/artifacts/winagt-build") {
- sh("find .");
- sh("mkdir -p ${checkout_dir}/agents/windows");
- // TODO: SPoT!!
- sh("""cp \
- check_mk_agent-64.exe \
- check_mk_agent.exe \
- check_mk_agent.msi \
- check_mk_agent_unsigned.msi \
- check_mk.user.yml \
- OpenHardwareMonitorLib.dll \
- OpenHardwareMonitorCLI.exe \
- check-sql.exe \
- robotmk_ext.exe \
- windows_files_hashes.txt \
- ${checkout_dir}/agents/windows/
- """);
- }
- dir("${checkout_dir}/agents/windows") {
- sh("""
- ${checkout_dir}/buildscripts/scripts/create_unsign_msi_patch.sh \
- check_mk_agent.msi check_mk_agent_unsigned.msi unsign-msi.patch
- """);
- }
-
- fetch_job_artifacts(
- relative_job_name: "winagt-build-modules", // TODO: move to builders
- params: [
- DISABLE_CACHE: DISABLE_CACHE,
- VERSION: VERSION,
- ],
- // TODO: SPoT!!, see https://jira.lan.tribe29.com/browse/CMK-13857
- dependency_paths: ["agents/modules/windows"],
- dest: "artifacts/winagt-build-modules",
- );
- dir("${checkout_dir}/agents/windows") {
- sh("find ${checkout_dir}/artifacts/winagt-build-modules");
- sh("cp ${checkout_dir}/artifacts/winagt-build-modules/*.cab .");
- }
+ upstream_build(
+ relative_job_name: "winagt-build-modules", // TODO: move to builders
+ build_params: [
+ DISABLE_CACHE: DISABLE_CACHE,
+ VERSION: VERSION,
+ ],
+ // TODO: SPoT!!, see https://jira.lan.tribe29.com/browse/CMK-13857
+ dependency_paths: ["agents/modules/windows"],
+ dest: "artifacts/winagt-build-modules",
+ );
+ dir("${checkout_dir}/agents/windows") {
+ sh("find ${checkout_dir}/artifacts/winagt-build-modules");
+ sh("cp ${checkout_dir}/artifacts/winagt-build-modules/*.cab .");
}
}
}
@@ -174,12 +179,15 @@ def main() {
stage("Prepare environment") {
shout("Prepare environment");
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker.image("${distro}:${docker_tag}").inside(
- "--name ${container_name}" +
- " ${docker_args} " +
- "-v ${checkout_dir}:${checkout_dir} " +
- "--hostname ${distro}") {
+ lock(label: 'bzl_lock_' + env.NODE_NAME.split("\\.")[0].split("-")[-1], quantity: 1, resource : null) {
+ inside_container(
+ image: docker.image("${distro}:${docker_tag}"),
+ args: [
+ "--name ${container_name}",
+ " -v ${checkout_dir}:${checkout_dir}",
+ " --hostname ${distro}",
+ ],
+ ) {
sh("""
cd ${checkout_dir}
make .venv
@@ -193,7 +201,7 @@ def main() {
credentialsId: 'bazel-caching-credentials',
/// BAZEL_CACHE_URL must be set already, e.g. via Jenkins config
passwordVariable: 'BAZEL_CACHE_PASSWORD',
- usernameVariable: 'BAZEL_CACHE_USER')
+ usernameVariable: 'BAZEL_CACHE_USER'),
]) {
versioning.print_image_tag();
// Don't use withEnv, see
@@ -201,17 +209,6 @@ def main() {
stage("Build package") {
sh("""
cd ${checkout_dir}/omd
-
- # ps wauxw | grep bazel
- # bazel clean
- #strace \
- # --trace='fork,vfork,clone,clone3,execve,openat,write' \
- # -ttt \
- # -f --decode-pids='pidns' \
- # --columns=0 --abbrev='none' -s 65536 \
- # -o "make-omd.strace.log" \
- ps wauxw | grep bazel
-
${omd_env_vars.join(' ')} \
make ${distro_package_type(distro)}
""");
@@ -225,7 +222,8 @@ def main() {
dir("${checkout_dir}") {
setCustomBuildProperty(
key: "path_hashes",
- value: scm_directory_hashes(scm.extensions));
+ value: scm_directory_hashes(scm.extensions)
+ );
show_duration("archiveArtifacts") {
archiveArtifacts(
artifacts: "*.deb,*.rpm,*.cma",
@@ -235,4 +233,5 @@ def main() {
}
}
}
+
return this;
diff --git a/buildscripts/scripts/build-cmk-image.groovy b/buildscripts/scripts/build-cmk-image.groovy
index 64ee6b160c7..a2b5857c1b7 100644
--- a/buildscripts/scripts/build-cmk-image.groovy
+++ b/buildscripts/scripts/build-cmk-image.groovy
@@ -8,7 +8,7 @@
/// Other artifacts: ???
/// Depends on: Jammy Ubuntu 22.04, see check_mk/docker_image/Dockerfile
-
+/* groovylint-disable MethodSize */
def main() {
check_job_parameters([
"EDITION",
@@ -40,13 +40,12 @@ def main() {
def branch_name = (VERSION == "daily") ? versioning.safe_branch_name(scm) : branch_version;
def cmk_version_rc_aware = versioning.get_cmk_version(branch_name, branch_version, VERSION);
def cmk_version = versioning.strip_rc_number_from_version(cmk_version_rc_aware);
- def source_dir = package_dir + "/" + cmk_version_rc_aware
- def docker_args = "--ulimit nofile=1024:1024 --group-add=${get_docker_group_id()} -v /var/run/docker.sock:/var/run/docker.sock";
+ def source_dir = package_dir + "/" + cmk_version_rc_aware;
def push_to_registry = PUSH_TO_REGISTRY=='true';
def build_image = PUSH_TO_REGISTRY_ONLY!='true';
- print(
+ print(
"""
|===== CONFIGURATION ===============================
|branch_name:......... │${branch_name}│
@@ -62,146 +61,148 @@ def main() {
|Building the CMK docker image
""".stripMargin());
-
shout("build image");
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
-
- docker_image_from_alias("IMAGE_TESTING").inside("${docker_args}") {
- withCredentials([
- usernamePassword(
- credentialsId: registry_credentials_id(EDITION),
- passwordVariable: 'DOCKER_PASSPHRASE',
- usernameVariable: 'DOCKER_USERNAME'),
- usernamePassword(
- credentialsId: 'nexus',
- passwordVariable: 'NEXUS_PASSWORD',
- usernameVariable: 'NEXUS_USERNAME')
- ]) {
- dir("${checkout_dir}") {
-
- conditional_stage('Prepare package directory', build_image) {
- cleanup_directory("${package_dir}");
+ inside_container(
+ args: [
+ "--env HOME=/home/jenkins",
+ ],
+ ulimit_nofile: 1024,
+ set_docker_group_id: true,
+ priviliged: true,
+ ) {
+ withCredentials([
+ usernamePassword(
+ credentialsId: registry_credentials_id(EDITION),
+ passwordVariable: 'DOCKER_PASSPHRASE',
+ usernameVariable: 'DOCKER_USERNAME'),
+ usernamePassword(
+ credentialsId: 'nexus',
+ passwordVariable: 'NEXUS_PASSWORD',
+ usernameVariable: 'NEXUS_USERNAME'),
+ ]) {
+ dir("${checkout_dir}") {
+ conditional_stage('Prepare package directory', build_image) {
+ cleanup_directory("${package_dir}");
+ }
+
+ conditional_stage('Build Image', build_image) {
+ on_dry_run_omit(LONG_RUNNING, "Download build sources") {
+ artifacts_helper.download_deb(
+ "${INTERNAL_DEPLOY_DEST}",
+ "${INTERNAL_DEPLOY_PORT}",
+ "${cmk_version_rc_aware}",
+ "${source_dir}",
+ "${EDITION}",
+ "jammy");
+ artifacts_helper.download_source_tar(
+ "${INTERNAL_DEPLOY_DEST}",
+ "${INTERNAL_DEPLOY_PORT}",
+ "${cmk_version_rc_aware}",
+ "${source_dir}",
+ "${EDITION}");
}
- conditional_stage('Build Image', build_image) {
- on_dry_run_omit(LONG_RUNNING, "Download build sources") {
- artifacts_helper.download_deb(
- "${INTERNAL_DEPLOY_DEST}",
- "${INTERNAL_DEPLOY_PORT}",
+ on_dry_run_omit(LONG_RUNNING, "Run build-cmk-container.sh") {
+ /// TODO: fix this:
+ /// build-cmk-container does not support the downloads dir
+ /// to have an arbitrary location, so we have to provide
+ /// `download` inside the checkout_dir
+ sh("""
+ scripts/run-pipenv run python \
+ buildscripts/scripts/build-cmk-container.py \
+ --branch=${branch_name} \
+ --edition=${EDITION} \
+ --version=${cmk_version} \
+ --source_path=${source_dir} \
+ --set_latest_tag=${SET_LATEST_TAG} \
+ --set_branch_latest_tag=${SET_BRANCH_LATEST_TAG} \
+ --no_cache=${BUILD_IMAGE_WITHOUT_CACHE} \
+ --image_cmk_base=${CUSTOM_CMK_BASE_IMAGE} \
+ --action=build \
+ -vvvv
+ """);
+ }
+
+ def filename = versioning.get_docker_artifact_name(EDITION, cmk_version);
+ on_dry_run_omit(LONG_RUNNING, "Upload ${filename}") {
+ stage("Upload ${filename}") {
+ artifacts_helper.upload_via_rsync(
+ "${package_dir}",
"${cmk_version_rc_aware}",
- "${source_dir}",
- "${EDITION}",
- "jammy");
- artifacts_helper.download_source_tar(
+ "${filename}",
"${INTERNAL_DEPLOY_DEST}",
"${INTERNAL_DEPLOY_PORT}",
- "${cmk_version_rc_aware}",
- "${source_dir}",
- "${EDITION}");
- }
-
- on_dry_run_omit(LONG_RUNNING, "Run build-cmk-container.sh") {
- /// TODO: fix this:
- /// build-cmk-container does not support the downloads dir
- /// to have an arbitrary location, so we have to provide
- /// `download` inside the checkout_dir
- sh("""
- scripts/run-pipenv run python \
- buildscripts/scripts/build-cmk-container.py \
- --branch=${branch_name} \
- --edition=${EDITION} \
- --version=${cmk_version} \
- --source_path=${source_dir} \
- --set_latest_tag=${SET_LATEST_TAG} \
- --set_branch_latest_tag=${SET_BRANCH_LATEST_TAG} \
- --no_cache=${BUILD_IMAGE_WITHOUT_CACHE} \
- --image_cmk_base=${CUSTOM_CMK_BASE_IMAGE} \
- --action=build \
- -vvvv
- """);
- }
-
- def filename = versioning.get_docker_artifact_name(EDITION, cmk_version);
- on_dry_run_omit(LONG_RUNNING, "Upload ${filename}") {
- stage("Upload ${filename}") {
- artifacts_helper.upload_via_rsync(
- "${package_dir}",
- "${cmk_version_rc_aware}",
- "${filename}",
- "${INTERNAL_DEPLOY_DEST}",
- "${INTERNAL_DEPLOY_PORT}",
- );
- }
- }
-
- if (branch_name.contains("sandbox") ) {
- print("Skip uploading ${filename} due to sandbox branch");
- } else if ("${EDITION}" == "saas"){
- print("Skip uploading ${filename} due to saas edition");
- } else {
- stage("Upload ${filename}") {
- artifacts_helper.upload_via_rsync(
- "${package_dir}",
- "${cmk_version_rc_aware}",
- "${filename}",
- "${WEB_DEPLOY_DEST}",
- "${WEB_DEPLOY_PORT}",
- );
- }
+ );
}
}
- conditional_stage("Load image", !build_image) {
- withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) {
- sh("""
- scripts/run-pipenv run python \
- buildscripts/scripts/build-cmk-container.py \
- --branch=${branch_name} \
- --edition=${EDITION} \
- --version=${cmk_version} \
- --version_rc_aware=${cmk_version_rc_aware} \
- --source_path=${source_dir} \
- --action=load \
- -vvvv
- """);
+ if (branch_name.contains("sandbox") ) {
+ print("Skip uploading ${filename} due to sandbox branch");
+ } else if ("${EDITION}" == "saas"){
+ print("Skip uploading ${filename} due to saas edition");
+ } else {
+ stage("Upload ${filename}") {
+ artifacts_helper.upload_via_rsync(
+ "${package_dir}",
+ "${cmk_version_rc_aware}",
+ "${filename}",
+ "${WEB_DEPLOY_DEST}",
+ "${WEB_DEPLOY_PORT}",
+ );
}
}
+ }
- conditional_stage("Push images", push_to_registry) {
+ conditional_stage("Load image", !build_image) {
+ withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) {
sh("""
scripts/run-pipenv run python \
buildscripts/scripts/build-cmk-container.py \
--branch=${branch_name} \
--edition=${EDITION} \
--version=${cmk_version} \
+ --version_rc_aware=${cmk_version_rc_aware} \
--source_path=${source_dir} \
- --set_latest_tag=${SET_LATEST_TAG} \
- --set_branch_latest_tag=${SET_BRANCH_LATEST_TAG} \
- --no_cache=${BUILD_IMAGE_WITHOUT_CACHE} \
- --image_cmk_base=${CUSTOM_CMK_BASE_IMAGE} \
- --action=push \
+ --action=load \
-vvvv
""");
}
}
+
+ conditional_stage("Push images", push_to_registry) {
+ sh("""
+ scripts/run-pipenv run python \
+ buildscripts/scripts/build-cmk-container.py \
+ --branch=${branch_name} \
+ --edition=${EDITION} \
+ --version=${cmk_version} \
+ --source_path=${source_dir} \
+ --set_latest_tag=${SET_LATEST_TAG} \
+ --set_branch_latest_tag=${SET_BRANCH_LATEST_TAG} \
+ --no_cache=${BUILD_IMAGE_WITHOUT_CACHE} \
+ --image_cmk_base=${CUSTOM_CMK_BASE_IMAGE} \
+ --action=push \
+ -vvvv
+ """);
+ }
}
}
}
}
def registry_credentials_id(edition) {
- switch(edition) {
- case "raw":
- case "cloud":
- return "11fb3d5f-e44e-4f33-a651-274227cc48ab"
- case "enterprise":
- case "managed":
- return "registry.checkmk.com"
- case "saas":
- return "nexus"
- default:
- throw new Exception("Cannot provide registry credentials id for edition '${edition}'")
+ switch(edition) {
+ case "raw":
+ case "cloud":
+ return "11fb3d5f-e44e-4f33-a651-274227cc48ab";
+ case "enterprise":
+ case "managed":
+ return "registry.checkmk.com";
+ case "saas":
+ return "nexus";
+ default:
+ throw new Exception("Cannot provide registry credentials id for edition '${edition}'");
}
}
diff --git a/buildscripts/scripts/build-cmk-packages.groovy b/buildscripts/scripts/build-cmk-packages.groovy
index e99c3712c8e..d34cb4414db 100644
--- a/buildscripts/scripts/build-cmk-packages.groovy
+++ b/buildscripts/scripts/build-cmk-packages.groovy
@@ -12,6 +12,7 @@
/// different scenario: packages are built for a subset of distros only and
/// OMD package and Python optimizations are disabled.
+/* groovylint-disable MethodSize */
def main() {
check_job_parameters([
"EDITION",
@@ -21,6 +22,7 @@ def main() {
"DEPLOY_TO_WEBSITE_ONLY",
"DOCKER_TAG_BUILD",
"FAKE_WINDOWS_ARTIFACTS",
+ "USE_CASE",
]);
check_environment_variables([
@@ -41,9 +43,7 @@ def main() {
shout("configure");
- /// don't add $WORKSPACE based values here, since $docker_args is being
- /// used on different nodes
- def docker_args = "${mount_reference_repo_dir} --ulimit nofile=1024:1024";
+ def bazel_log_prefix = "bazel_log_"
def (jenkins_base_folder, use_case, omd_env_vars, upload_path_suffix) = (
env.JOB_BASE_NAME == "testbuild" ? [
@@ -51,16 +51,17 @@ def main() {
"testbuild",
/// Testbuilds: Do not use our build cache to ensure we catch build related
/// issues. And disable python optimizations to execute the build faster
- ["NEXUS_BUILD_CACHE_URL=", "PYTHON_ENABLE_OPTIMIZATIONS="],
+ ["NEXUS_BUILD_CACHE_URL="],
"testbuild/",
] : [
new File(new File(currentBuild.fullProjectName).parent).parent,
- VERSION == "daily" ? "daily" : "release",
+ VERSION == "daily" ? params.USE_CASE : "release",
[],
"",
]);
- def distros = versioning.configured_or_overridden_distros(edition, OVERRIDE_DISTROS, use_case);
+ def all_distros = versioning.get_distros(override: "all");
+ def distros = versioning.get_distros(edition: edition, use_case: use_case, override: OVERRIDE_DISTROS);
def deploy_to_website = !params.SKIP_DEPLOY_TO_WEBSITE && !jenkins_base_folder.startsWith("Testing");
@@ -83,6 +84,7 @@ def main() {
"""
|===== CONFIGURATION ===============================
|distros:.................. │${distros}│
+ |all_distros:.............. │${all_distros}│
|deploy_to_website:........ │${deploy_to_website}│
|branch_name:.............. │${branch_name}│
|cmk_version:.............. │${cmk_version}│
@@ -111,9 +113,9 @@ def main() {
if (params.DEPLOY_TO_WEBSITE_ONLY) {
// This stage is used only by bauwelt/bw-release in order to publish an already built release
stage('Deploying previously build version to website only') {
- docker_image_from_alias("IMAGE_TESTING").inside(docker_args) {
- artifacts_helper.deploy_to_website(cmk_version_rc_aware)
- artifacts_helper.cleanup_rc_candidates_of_version(cmk_version_rc_aware)
+ inside_container(ulimit_nofile: 1024) {
+ artifacts_helper.deploy_to_website(cmk_version_rc_aware);
+ artifacts_helper.cleanup_rc_candidates_of_version(cmk_version_rc_aware);
}
}
return;
@@ -123,7 +125,8 @@ def main() {
stage("Cleanup") {
cleanup_directory("${WORKSPACE}/versions");
cleanup_directory("${WORKSPACE}/agents");
- docker_image_from_alias("IMAGE_TESTING").inside(docker_args) {
+ sh("rm -rf ${WORKSPACE}/${bazel_log_prefix}*");
+ inside_container(ulimit_nofile: 1024) {
dir("${checkout_dir}") {
sh("make buildclean");
versioning.configure_checkout_folder(EDITION, cmk_version);
@@ -138,14 +141,15 @@ def main() {
/// https://review.lan.tribe29.com/c/check_mk/+/34634
/// Anyway this whole upload/download mayhem hopfully evaporates with
/// bazel..
- shout("pull packages");
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- distros.each { distro ->
- docker.image("${distro}:${docker_tag}").pull();
+ shout("pull build images");
+ stage("Pull build images") {
+ docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
+ distros.each { distro ->
+ docker.image("${distro}:${docker_tag}").pull();
+ }
}
}
-
shout("agents");
// TODO iterate over all agent variants and put the condition per edition
@@ -162,21 +166,22 @@ def main() {
selector: specific(get_valid_build_id(win_project_name)),
target: "agents",
fingerprintArtifacts: true
- )
+ );
copyArtifacts(
projectName: win_py_project_name,
selector: specific(get_valid_build_id(win_py_project_name)),
target: "agents",
fingerprintArtifacts: true
- )
+ );
} else {
/// must take place in $WORKSPACE since we need to
/// access $WORKSPACE/agents
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside(
- "${docker_args} --group-add=${docker_group_id} -v /var/run/docker.sock:/var/run/docker.sock") {
- build_linux_agent_updater(agent, EDITION, branch_version, docker_registry_no_http);
- }
+ inside_container(
+ set_docker_group_id: true,
+ ulimit_nofile: 1024,
+ priviliged: true,
+ ) {
+ build_linux_agent_updater(agent, EDITION, branch_version, docker_registry_no_http);
}
}
}
@@ -194,7 +199,7 @@ def main() {
}
shout("create_source_package");
- docker_image_from_alias("IMAGE_TESTING").inside("${docker_args} ${mount_reference_repo_dir}") {
+ inside_container(ulimit_nofile: 2048) {
// TODO creates stages
create_source_package(WORKSPACE, checkout_dir, cmk_version);
@@ -214,7 +219,7 @@ def main() {
cleanup_source_package(checkout_dir, FINAL_SOURCE_PACKAGE_PATH);
}
stage("Test source package") {
- test_package(FINAL_SOURCE_PACKAGE_PATH, "source", WORKSPACE, checkout_dir, cmk_version)
+ test_package(FINAL_SOURCE_PACKAGE_PATH, "source", WORKSPACE, checkout_dir, cmk_version);
}
stage("Upload source package") {
artifacts_helper.upload_via_rsync(
@@ -229,124 +234,206 @@ def main() {
}
shout("packages");
- def package_builds = distros.collectEntries { distro ->
+ def package_builds = all_distros.collectEntries { distro ->
[("distro ${distro}") : {
- // The following node call allocates a new workspace for each
- // DISTRO.
- //
- // Note: Do it inside the first node block to ensure all distro
- // workspaces start with a fresh one. Otherwise one of the node
- // calls would reuse the workspace of the source package step.
- //
- // The DISTRO workspaces will then be initialized with the contents
- // of the first workspace, which contains the prepared git repo.
-
- /// For now make sure, we're on the SAME node (but different WORKDIR)
- /// To make the builds run across different nodes we have to
- /// use `stash` to distribute the source
- node(env.NODE_NAME) {
- /// $WORKSPACE is different now - we must not use variables
- /// like $checkout_dir which are based on the parent
- /// workspace accidentally (and
- assert "${WORKSPACE}/checkout" != checkout_dir;
-
- def distro_dir = "${WORKSPACE}/checkout";
-
- lock(label: 'bzl_lock_' + env.NODE_NAME.split("\\.")[0].split("-")[-1], quantity: 1, resource : null) {
+ if (! (distro in distros)) {
+ conditional_stage("${distro} initialize workspace", false) {}
+ conditional_stage("${distro} build package", false) {}
+ conditional_stage("${distro} sign package", false) {}
+ conditional_stage("${distro} test package", false) {}
+ conditional_stage("${distro} copy package", false) {}
+ conditional_stage("${distro} upload package", false) {}
+ return;
+ }
+ // The following node call allocates a new workspace for each
+ // DISTRO.
+ //
+ // Note: Do it inside the first node block to ensure all distro
+ // workspaces start with a fresh one. Otherwise one of the node
+ // calls would reuse the workspace of the source package step.
+ //
+ // The DISTRO workspaces will then be initialized with the contents
+ // of the first workspace, which contains the prepared git repo.
+
+ /// For now make sure, we're on the SAME node (but different WORKDIR)
+ /// To make the builds run across different nodes we have to
+ /// use `stash` to distribute the source
+ node(env.NODE_NAME) {
+ /// $WORKSPACE is different now - we must not use variables
+ /// like $checkout_dir which are based on the parent
+ /// workspace accidentally (and
+ assert "${WORKSPACE}/checkout" != checkout_dir;
+
+ def distro_dir = "${WORKSPACE}/checkout";
+
+ lock(label: 'bzl_lock_' + env.NODE_NAME.split("\\.")[0].split("-")[-1], quantity: 1, resource : null) {
docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
// For the package build we need a higher ulimit
// * Bazel opens many files which can lead to crashes
// * See CMK-12159
- docker.image("${distro}:${docker_tag}").inside(
- "${mount_reference_repo_dir} --ulimit nofile=16384:32768 -v ${checkout_dir}:${checkout_dir}:ro --hostname ${distro}") {
+ inside_container(
+ image: docker.image("${distro}:${docker_tag}"),
+ args: [
+ "--ulimit nofile=16384:32768",
+ "-v ${checkout_dir}:${checkout_dir}:ro",
+ "--hostname ${distro}",
+ ],
+ init: true,
+ ) {
stage("${distro} initialize workspace") {
cleanup_directory("${WORKSPACE}/versions");
- sh("rm -rf ${distro_dir}")
- sh("rsync -a ${checkout_dir}/ ${distro_dir}/")
+ sh("rm -rf ${distro_dir}");
+ sh("rsync -a ${checkout_dir}/ ${distro_dir}/");
+ sh("rm -rf ${distro_dir}/bazel_execution_log*");
}
+
stage("${distro} build package") {
- withCredentials([usernamePassword(
+ withCredentials([
+ usernamePassword(
credentialsId: 'nexus',
passwordVariable: 'NEXUS_PASSWORD',
- usernameVariable: 'NEXUS_USERNAME')
- ]) {
- withCredentials([usernamePassword(
+ usernameVariable: 'NEXUS_USERNAME'),
+ usernamePassword(
credentialsId: 'bazel-caching-credentials',
/// BAZEL_CACHE_URL must be set already, e.g. via Jenkins config
passwordVariable: 'BAZEL_CACHE_PASSWORD',
- usernameVariable: 'BAZEL_CACHE_USER')
- ]) {
- versioning.print_image_tag();
- build_package(distro_package_type(distro), distro_dir, omd_env_vars);
- }
+ usernameVariable: 'BAZEL_CACHE_USER'),
+ ]) {
+ versioning.print_image_tag();
+ build_package(distro_package_type(distro), distro_dir, omd_env_vars);
}
- }
- }
- }
- docker_image_from_alias("IMAGE_TESTING").inside(
- "${docker_args} -v ${checkout_dir}:${checkout_dir}:ro") {
- def package_name = get_package_name(distro_dir, distro_package_type(distro), EDITION, cmk_version);
- def build_package_path = "${distro_dir}/${package_name}";
- def node_version_dir = "${WORKSPACE}/versions";
- def final_package_path = "${node_version_dir}/${cmk_version_rc_aware}/${package_name}";
+ sh("""echo ==== ${distro} =====
+ ps wauxw
+ """)
- stage("${distro} sign package") {
- sign_package(distro_dir, build_package_path);
+ try_parse_bazel_execution_log(distro, distro_dir, bazel_log_prefix)
}
+ }
+ }
+ }
+ inside_container(
+ args: [
+ "-v ${checkout_dir}:${checkout_dir}:ro",
+ ],
+ ulimit_nofile: 1024,
+ ) {
+ def package_name = get_package_name(distro_dir, distro_package_type(distro), EDITION, cmk_version);
+ def build_package_path = "${distro_dir}/${package_name}";
+ def node_version_dir = "${WORKSPACE}/versions";
+ def final_package_path = "${node_version_dir}/${cmk_version_rc_aware}/${package_name}";
+
+ stage("${distro} sign package") {
+ sign_package(distro_dir, build_package_path);
+ }
- stage("${distro} test package") {
- test_package(build_package_path, distro, WORKSPACE, distro_dir, cmk_version);
- }
+ stage("${distro} test package") {
+ test_package(build_package_path, distro, WORKSPACE, distro_dir, cmk_version);
+ }
- stage("${distro} copy package") {
- copy_package(build_package_path, distro, final_package_path);
- }
+ stage("${distro} copy package") {
+ copy_package(build_package_path, distro, final_package_path);
+ }
- stage("${distro} upload package") {
- artifacts_helper.upload_via_rsync(
- "${node_version_dir}",
- "${cmk_version_rc_aware}",
- "${package_name}",
- "${upload_path}",
- INTERNAL_DEPLOY_PORT,
- );
- }
- }
+ stage("${distro} upload package") {
+ artifacts_helper.upload_via_rsync(
+ "${node_version_dir}",
+ "${cmk_version_rc_aware}",
+ "${package_name}",
+ "${upload_path}",
+ INTERNAL_DEPLOY_PORT,
+ );
}
}
- }]
+ }
+ }]
}
parallel package_builds;
+ stage("Plot cache hits") {
+ try_plot_cache_hits(bazel_log_prefix, distros);
+ }
+
conditional_stage('Upload', !jenkins_base_folder.startsWith("Testing")) {
currentBuild.description += (
""" |${currentBuild.description}
|Download Artifacts
|""".stripMargin());
- def exclude_pattern = versioning.get_internal_distros_pattern()
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside("${docker_args} ${mount_reference_repo_dir}") {
- assert_no_dirty_files(checkout_dir);
- artifacts_helper.download_version_dir(
- upload_path,
- INTERNAL_DEPLOY_PORT,
- cmk_version_rc_aware,
- "${WORKSPACE}/versions/${cmk_version_rc_aware}",
- "*",
- "all packages",
- exclude_pattern,
- )
- artifacts_helper.upload_version_dir(
- "${WORKSPACE}/versions/${cmk_version_rc_aware}", WEB_DEPLOY_DEST, WEB_DEPLOY_PORT, EXCLUDE_PATTERN=exclude_pattern);
- if (deploy_to_website) {
- artifacts_helper.deploy_to_website(cmk_version_rc_aware);
- }
+ def exclude_pattern = versioning.get_internal_artifacts_pattern();
+ inside_container(ulimit_nofile: 1024) {
+ assert_no_dirty_files(checkout_dir);
+ artifacts_helper.download_version_dir(
+ upload_path,
+ INTERNAL_DEPLOY_PORT,
+ cmk_version_rc_aware,
+ "${WORKSPACE}/versions/${cmk_version_rc_aware}",
+ "*",
+ "all packages",
+ exclude_pattern,
+ );
+ artifacts_helper.upload_version_dir(
+ "${WORKSPACE}/versions/${cmk_version_rc_aware}", WEB_DEPLOY_DEST, WEB_DEPLOY_PORT, EXCLUDE_PATTERN=exclude_pattern);
+ if (deploy_to_website) {
+ artifacts_helper.deploy_to_website(cmk_version_rc_aware);
}
}
}
}
+def try_parse_bazel_execution_log(distro, distro_dir, bazel_log_prefix) {
+ try {
+ dir("${distro_dir}") {
+ def summary_file="${distro_dir}/${bazel_log_prefix}execution_summary_${distro}.json";
+ def cache_hits_file="${distro_dir}/${bazel_log_prefix}cache_hits_${distro}.csv";
+ sh("""scripts/run-pipenv run \
+ buildscripts/scripts/bazel_execution_log_parser.py \
+ --execution_logs_root "${distro_dir}" \
+ --bazel_log_file_pattern "bazel_execution_log*" \
+ --summary_file "${summary_file}" \
+ --cachehit_csv "${cache_hits_file}" \
+ --distro "${distro}"
+ """);
+ stash(name: "${bazel_log_prefix}${distro}", includes: "${bazel_log_prefix}*")
+ }
+ } catch (Exception e) {
+ print("Failed to parse bazel execution logs: ${e}");
+ }
+}
+
+def try_plot_cache_hits(bazel_log_prefix, distros) {
+ try {
+ distros.each { distro ->
+ try {
+ print("Unstashing for distro ${distro}...")
+ unstash(name: "${bazel_log_prefix}${distro}")
+ }
+ catch (Exception e) {
+ print("No stash for ${distro}")
+ }
+ }
+
+ plot csvFileName: 'bazel_cache_hits.csv',
+ csvSeries:
+ distros.collect {[file: "${bazel_log_prefix}cache_hits_${it}.csv"]},
+ description: 'Bazel Remote Cache Analysis',
+ group: 'Bazel Cache',
+ numBuilds: '30',
+ propertiesSeries: [[file: '', label: '']],
+ style: 'line',
+ title: 'Cache hits',
+ yaxis: 'Cache hits in percent',
+ yaxisMaximum: '100',
+ yaxisMinimum: '0'
+
+ archiveArtifacts(
+ artifacts: "${bazel_log_prefix}*",
+ )
+ }
+ catch (Exception e) {
+ print("Failed to plot cache hits: ${e}");
+ }
+}
+
def get_agent_list(edition) {
return (edition == "raw" ?
["windows"] :
@@ -364,7 +451,7 @@ def build_linux_agent_updater(agent, edition, branch_version, registry) {
passwordVariable: 'NEXUS_PASSWORD',
usernameVariable: 'NEXUS_USERNAME')
]) {
- dir("${checkout_dir}/enterprise/agents/plugins") {
+ dir("${checkout_dir}/non-free/cmk-update-agent") {
def cmd = "BRANCH_VERSION=${branch_version} DOCKER_REGISTRY_NO_HTTP=${registry} ./make-agent-updater${suffix}";
on_dry_run_omit(LONG_RUNNING, "RUN ${cmd}") {
sh(cmd);
@@ -372,7 +459,7 @@ def build_linux_agent_updater(agent, edition, branch_version, registry) {
}
}
dir("${WORKSPACE}/agents") {
- def cmd = "cp ${checkout_dir}/enterprise/agents/plugins/cmk-update-agent${suffix} .";
+ def cmd = "cp ${checkout_dir}/non-free/cmk-update-agent/cmk-update-agent${suffix} .";
on_dry_run_omit(LONG_RUNNING, "RUN ${cmd}") {
sh(cmd);
}
@@ -400,15 +487,23 @@ def create_and_upload_bom(workspace, branch_version, version) {
credentialsId: '058f09c4-21c9-49ae-b72b-0b9d2f465da6',
url: 'ssh://jenkins@review.lan.tribe29.com:29418/dependencyscanner'
]
- ]
- ])
+ ],
+ ]);
scanner_image = docker.build("dependencyscanner", "--tag dependencyscanner .");
}
}
stage('Create BOM') {
on_dry_run_omit(LONG_RUNNING, "Create BOM") {
- scanner_image.inside("-v ${checkout_dir}:${checkout_dir}") {
- sh("python3 -m dependencyscanner --stage prod --outfile '${bom_path}' '${checkout_dir}'");
+ inside_container(
+ image: scanner_image,
+ args: ["-v ${checkout_dir}:${checkout_dir}"], // why?!
+ ) {
+ sh("""python3 -m dependencyscanner \
+ --stage prod \
+ --outfile '${bom_path}' \
+ --research_file researched_master.yml \
+ --license_cache license_cache_master.json \
+ '${checkout_dir}'""");
}
}
}
@@ -418,7 +513,13 @@ def create_and_upload_bom(workspace, branch_version, version) {
variable: 'DTRACK_API_KEY')]) {
withEnv(["DTRACK_URL=${DTRACK_URL}"]) {
on_dry_run_omit(LONG_RUNNING, "Upload BOM") {
- scanner_image.inside("-v ${checkout_dir}:${checkout_dir} --env DTRACK_URL,DTRACK_API_KEY") {
+ inside_container(
+ image: scanner_image,
+ args: [
+ "-v ${checkout_dir}:${checkout_dir}", // why?!
+ "--env DTRACK_URL,DTRACK_API_KEY",
+ ],
+ ) {
sh("""scripts/upload-bom \
--bom-path '${bom_path}' \
--project-name 'Checkmk ${branch_version}' \
@@ -442,7 +543,7 @@ def create_source_package(workspace, source_dir, cmk_version) {
}
stage("Vanilla agent sign package") {
- sign_package(source_dir, "${source_dir}/agents/check-mk-agent-${cmk_version}-1.noarch.rpm")
+ sign_package(source_dir, "${source_dir}/agents/check-mk-agent-${cmk_version}-1.noarch.rpm");
}
stage("Create source package") {
@@ -451,35 +552,47 @@ def create_source_package(workspace, source_dir, cmk_version) {
def unsigned_msi = "check_mk_agent_unsigned.msi";
def target_dir = "agents/windows";
def scripts_dir = "${checkout_dir}/buildscripts/scripts";
- def patch_script = "create_unsign_msi_patch.sh"
- def patch_file = "unsign-msi.patch"
- def ohm_files = "OpenHardwareMonitorLib.dll,OpenHardwareMonitorCLI.exe"
- def ext_files = "robotmk_ext.exe"
- def check_sql = "check-sql.exe"
- def hashes_file = "windows_files_hashes.txt"
- def artifacts = "check_mk_agent-64.exe,check_mk_agent.exe,${signed_msi},${unsigned_msi},check_mk.user.yml,python-3.cab,${ohm_files},${ext_files},${check_sql},${hashes_file}"
+ def patch_script = "create_unsign_msi_patch.sh";
+ def patch_file = "unsign-msi.patch";
+ def ohm_files = "OpenHardwareMonitorLib.dll,OpenHardwareMonitorCLI.exe";
+ def ext_files = "robotmk_ext.exe";
+ def mk_sql = "mk-sql.exe";
+ def hashes_file = "windows_files_hashes.txt";
+ def artifacts = [
+ "check_mk_agent-64.exe",
+ "check_mk_agent.exe",
+ "${signed_msi}",
+ "${unsigned_msi}",
+ "check_mk.user.yml",
+ "python-3.cab",
+ "${ohm_files}",
+ "${ext_files}",
+ "${mk_sql}",
+ "${hashes_file}",
+ ].join(",");
+
if (params.FAKE_WINDOWS_ARTIFACTS) {
- sh "mkdir -p ${agents_dir}"
+ sh("mkdir -p ${agents_dir}");
if(EDITION != 'raw') {
- sh "touch ${agents_dir}/cmk-update-agent"
- sh "touch ${agents_dir}/cmk-update-agent-32"
+ sh("touch ${agents_dir}/cmk-update-agent");
+ sh("touch ${agents_dir}/cmk-update-agent-32");
}
- sh "touch ${agents_dir}/{${artifacts}}"
+ sh("touch ${agents_dir}/{${artifacts}}");
}
dir("${checkout_dir}") {
if(EDITION != 'raw') {
- sh "cp ${agents_dir}/cmk-update-agent enterprise/agents/plugins/"
- sh "cp ${agents_dir}/cmk-update-agent-32 enterprise/agents/plugins/"
+ sh("cp ${agents_dir}/cmk-update-agent non-free/cmk-update-agent/");
+ sh("cp ${agents_dir}/cmk-update-agent-32 non-free/cmk-update-agent/");
}
- sh "cp ${agents_dir}/{${artifacts}} ${target_dir}"
- sh "${scripts_dir}/${patch_script} ${target_dir}/${signed_msi} ${target_dir}/${unsigned_msi} ${target_dir}/${patch_file}"
+ sh("cp ${agents_dir}/{${artifacts}} ${target_dir}");
+ sh("${scripts_dir}/${patch_script} ${target_dir}/${signed_msi} ${target_dir}/${unsigned_msi} ${target_dir}/${patch_file}");
withCredentials([
usernamePassword(
credentialsId: 'nexus',
passwordVariable: 'NEXUS_PASSWORD',
usernameVariable: 'NEXUS_USERNAME')
]) {
- sh 'make dist || cat /root/.npm/_logs/*-debug.log'
+ sh('make dist');
}
}
}
@@ -500,8 +613,8 @@ def cleanup_source_package(source_dir, package_path) {
def copy_source_package(package_path, archive_path) {
print("FN copy_source_package(package_path=${package_path}, archive_path=${archive_path})");
- sh "mkdir -p \$(dirname ${archive_path})"
- sh "cp ${package_path} ${archive_path}"
+ sh("mkdir -p \$(dirname ${archive_path})");
+ sh("cp ${package_path} ${archive_path}");
}
def build_package(package_type, build_dir, env) {
@@ -509,23 +622,29 @@ def build_package(package_type, build_dir, env) {
dir(build_dir) {
// TODO: THIS MUST GO AWAY ASAP
// Backgroud:
- // * currently we're building protobuf during source packaging (make dist) in IMAGE_TESTING.
+ // * currently we're building protobuf during source packaging (make dist) in reference container.
// * then, we're simply rsyncing the whole workspace in the different distro workspaces (including the protoc)
// * as protobuf exists then in the intermediate_install, it will be used (and not obtained from a correct
// cache key, including DISTRO information...)
// * if we then build under an old distro, we get linker issues
// * so as long as we don't have the protobuf build bazelized, we need to manually clean it up here.
- sh("rm -fr omd/build/intermediate_install/protobuf*")
- sh("rm -fr omd/build/stamps/protobuf*")
+ sh("rm -fr omd/build/intermediate_install/protobuf*");
+ sh("rm -fr omd/build/stamps/protobuf*");
// used withEnv(env) before, but sadly Jenkins does not set 0 length environment variables
// see also: https://issues.jenkins.io/browse/JENKINS-43632
try {
- def env_str = env.join(" ")
+ def env_str = env.join(" ");
sh("${env_str} DEBFULLNAME='Checkmk Team' DEBEMAIL='feedback@checkmk.com' make -C omd ${package_type}");
} finally {
- sh("cd '${checkout_dir}/omd'; echo 'Maximum heap size:'; bazel info peak-heap-size; echo 'Server log:'; cat \$(bazel info server_log)");
+ sh("""
+ cd '${checkout_dir}/omd'
+ echo 'Maximum heap size:'
+ bazel info peak-heap-size
+ echo 'Server log:'
+ cat \$(bazel info server_log)
+ """);
}
}
}
@@ -566,19 +685,21 @@ def sign_package(source_dir, package_path) {
}
def test_package(package_path, name, workspace, source_dir, cmk_version) {
+ /* groovylint-disable LineLength */
print("FN test_package(package_path=${package_path}, name=${name}, workspace=${workspace}, source_dir=${source_dir}, cmk_version=${cmk_version})");
+ /* groovylint-enable LineLength */
try {
withEnv([
- "PACKAGE_PATH=${package_path}",
- "PYTEST_ADDOPTS='--junitxml=${workspace}/junit-${name}.xml'",
+ "PACKAGE_PATH=${package_path}",
+ "PYTEST_ADDOPTS='--junitxml=${workspace}/junit-${name}.xml'",
]) {
- sh("make -C '${source_dir}/tests' VERSION=${cmk_version} test-packaging")
+ sh("make -C '${source_dir}/tests' VERSION=${cmk_version} test-packaging");
}
} finally {
step([
$class: "JUnitResultArchiver",
testResults: "junit-${name}.xml",
- ])
+ ]);
}
}
@@ -605,7 +726,7 @@ def get_valid_build_id(jobName) {
calendar.setTime(lastBuild.getTime());
def lastBuildDay = calendar.get(Calendar.DAY_OF_YEAR);
- if (currentBuildVersion in ["daily", "git"] &&
+ if (currentBuildVersion == "daily" &&
lastBuildParameters.VERSION == currentBuildVersion &&
lastBuildDay == currentBuildDay &&
lastBuild.result.toString().equals("SUCCESS")
diff --git a/buildscripts/scripts/build-linux-agent-updater.groovy b/buildscripts/scripts/build-linux-agent-updater.groovy
index aa1b6c7428a..5bdfa625a96 100644
--- a/buildscripts/scripts/build-linux-agent-updater.groovy
+++ b/buildscripts/scripts/build-linux-agent-updater.groovy
@@ -18,7 +18,6 @@ def main() {
shout("configure");
- def docker_args = "${mount_reference_repo_dir}";
def branch_version = versioning.get_branch_version(checkout_dir);
// FIXME
@@ -44,62 +43,62 @@ def main() {
|===================================================
""".stripMargin());
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside(
- "${docker_args} --group-add=${docker_group_id} -v /var/run/docker.sock:/var/run/docker.sock") {
- // TODO: check why this doesn't work
- // docker_image_from_alias("IMAGE_TESTING").inside(docker_args) {
+ inside_container(
+ set_docker_group_id: true,
+ priviliged: true,
+ ) {
+ // TODO: check why this doesn't work
+ // docker_reference_image().inside(docker_args) {
- dir("${checkout_dir}") {
- sh("""
- make buildclean
- rm -rf ${WORKSPACE}/build
- """);
- versioning.set_version(cmk_version);
- }
+ dir("${checkout_dir}") {
+ sh("""
+ make buildclean
+ rm -rf ${WORKSPACE}/build
+ """);
+ versioning.set_version(cmk_version);
+ }
- def agent_builds = ["au-linux-64bit", "au-linux-32bit"].collectEntries { agent ->
- [("agent ${agent}") : {
- stage("Build Agent for ${agent}") {
- def suffix = agent == "au-linux-32bit" ? "-32" : "";
- withCredentials([
- usernamePassword(
- credentialsId: 'nexus',
- passwordVariable: 'NEXUS_PASSWORD',
- usernameVariable: 'NEXUS_USERNAME')
- ]) {
- dir("${checkout_dir}/enterprise/agents/plugins") {
- sh("""
- BRANCH_VERSION=${branch_version} \
- DOCKER_REGISTRY_NO_HTTP=${docker_registry_no_http} \
- ./make-agent-updater${suffix}
- """);
- }
- }
- dir("${WORKSPACE}/build") {
- sh("cp ${checkout_dir}/enterprise/agents/plugins/cmk-update-agent${suffix} .");
+ def agent_builds = ["au-linux-64bit", "au-linux-32bit"].collectEntries { agent ->
+ [("agent ${agent}") : {
+ stage("Build Agent for ${agent}") {
+ def suffix = agent == "au-linux-32bit" ? "-32" : "";
+ withCredentials([
+ usernamePassword(
+ credentialsId: 'nexus',
+ passwordVariable: 'NEXUS_PASSWORD',
+ usernameVariable: 'NEXUS_USERNAME')
+ ]) {
+ dir("${checkout_dir}/non-free/cmk-update-agent") {
+ sh("""
+ BRANCH_VERSION=${branch_version} \
+ DOCKER_REGISTRY_NO_HTTP=${docker_registry_no_http} \
+ ./make-agent-updater${suffix}
+ """);
}
}
- }]
- }
- parallel agent_builds;
-
- stage("Create and sign deb/rpm packages") {
- dir("${checkout_dir}/agents") {
- sh("make rpm");
- sh("make deb");
- }
- def package_name_rpm = cmd_output("find ${checkout_dir} -name *.rpm");
- def package_name_deb = cmd_output("find ${checkout_dir} -name *.deb");
- sign_package(checkout_dir, package_name_rpm)
- dir("${WORKSPACE}/build") {
- sh("""
- cp ${package_name_rpm} .
- cp ${package_name_deb} .
- cp ${checkout_dir}/agents/linux/cmk-agent-ctl* .
- cp ${checkout_dir}/agents/linux/check-sql* .
- """);
+ dir("${WORKSPACE}/build") {
+ sh("cp ${checkout_dir}/non-free/cmk-update-agent/cmk-update-agent${suffix} .");
+ }
}
+ }]
+ }
+ parallel agent_builds;
+
+ stage("Create and sign deb/rpm packages") {
+ dir("${checkout_dir}/agents") {
+ sh("make rpm");
+ sh("make deb");
+ }
+ def package_name_rpm = cmd_output("find ${checkout_dir} -name *.rpm");
+ def package_name_deb = cmd_output("find ${checkout_dir} -name *.deb");
+ sign_package(checkout_dir, package_name_rpm)
+ dir("${WORKSPACE}/build") {
+ sh("""
+ cp ${package_name_rpm} .
+ cp ${package_name_deb} .
+ cp ${checkout_dir}/agents/linux/cmk-agent-ctl* .
+ cp ${checkout_dir}/agents/linux/mk-sql* .
+ """);
}
}
}
@@ -109,7 +108,8 @@ def main() {
setCustomBuildProperty(
key: "path_hashes",
// TODO: this must go to some SPoT
- value: directory_hashes(["agents", "enterprise/agents/plugins"]));
+ value: directory_hashes(["agents", "non-free/cmk-update-agent"])
+ );
}
dir("${WORKSPACE}/build") {
show_duration("archiveArtifacts") {
diff --git a/buildscripts/scripts/collect_packages.py b/buildscripts/scripts/collect_packages.py
new file mode 100755
index 00000000000..f99aca3d647
--- /dev/null
+++ b/buildscripts/scripts/collect_packages.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+# Copyright (C) 2024 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+import argparse
+import json
+import sys
+from dataclasses import asdict, dataclass, field
+from pathlib import Path
+from typing import Iterator
+
+
+@dataclass
+class Package:
+ name: str
+ path: str
+ command_line: str
+ maintainers: list[str] = field(default_factory=lambda: [])
+ sec_vars: list[str] = field(default_factory=lambda: [])
+ dependencies: list[str] = field(default_factory=lambda: [])
+
+ def __post_init__(self):
+ # Fallback for now...
+ self.maintainers.append("team-ci@checkmk.com")
+
+
+def parse_arguments() -> argparse.Namespace:
+ parser = argparse.ArgumentParser()
+ parser.add_argument("packages_path")
+ return parser.parse_args()
+
+
+def parse_package(meta_file: Path, name: str) -> Package:
+ with open(meta_file) as f:
+ data = {**json.load(f), "name": name, "path": str(meta_file.parent)}
+ return Package(**data)
+
+
+def discover_packages(args: argparse.Namespace) -> Iterator[Package]:
+ for package_dir in Path(args.packages_path).iterdir():
+ if not package_dir.is_dir():
+ continue
+ meta_file = package_dir / "ci.json"
+ if not meta_file.exists():
+ sys.stderr.write(
+ f"Skipping {package_dir} as it does not contain a {meta_file} (yet). "
+ f"TODO: Add it to be discovered. \n"
+ )
+ continue
+ try:
+ package = parse_package(meta_file, package_dir.name)
+ except Exception as e:
+ sys.stderr.write(f"Skipping {package_dir} as it has invalid meta data: {e}\n")
+ continue
+ yield package
+
+
+def main():
+ args = parse_arguments()
+ print(json.dumps([asdict(p) for p in discover_packages(args)], indent=2))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/buildscripts/scripts/compile-all-werks.groovy b/buildscripts/scripts/compile-all-werks.groovy
index 6a01617bb49..eb0813e8822 100644
--- a/buildscripts/scripts/compile-all-werks.groovy
+++ b/buildscripts/scripts/compile-all-werks.groovy
@@ -3,19 +3,16 @@
/// file: compile-all-werks.groovy
def main() {
- def docker_args = "${mount_reference_repo_dir}";
-
def target_path = "/home/mkde/werks/all_werks_v2.json";
def targets_credentials = [
[env.WEB_STAGING, "web-staging"],
["checkmk.com", "checkmk-deploy"],
- ["customer.checkmk.com", "customer-deploy"]
+ ["customer.checkmk.com", "customer-deploy"],
];
print(
"""
|===== CONFIGURATION ===============================
- |docker_args:.............. │${docker_args}│
|checkout_dir:............. │${checkout_dir}│
|===================================================
""".stripMargin());
@@ -28,63 +25,63 @@ def main() {
}
stage("Compile werks") {
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside("${docker_args}") {
- dir("${checkout_dir}") {
- sh("""
- scripts/run-pipenv run echo build venv...
- scripts/run-pipenv run python3 -m cmk.utils.werks collect cmk ./ > cmk.json
- scripts/run-pipenv run python3 -m cmk.utils.werks collect cma ${WORKSPACE}/cma > cma.json
- scripts/run-pipenv run python3 -m cmk.utils.werks collect checkmk_kube_agent ${WORKSPACE}/checkmk_kube_agent > kube.json
+ inside_container() {
+ dir("${checkout_dir}") {
+ /* groovylint-disable LineLength */
+ sh("""
+ scripts/run-pipenv run echo build venv...
+ scripts/run-pipenv run python3 -m cmk.utils.werks collect cmk ./ > cmk.json
+ scripts/run-pipenv run python3 -m cmk.utils.werks collect cma ${WORKSPACE}/cma > cma.json
+ scripts/run-pipenv run python3 -m cmk.utils.werks collect checkmk_kube_agent ${WORKSPACE}/checkmk_kube_agent > kube.json
- # jq -s '.[0] * .[1] * .[2]' cma.json cmk.json kube.json > all_werks.json
- # no need to install jq!!!!!
- python3 -c 'import json, sys; print(json.dumps({k: v for f in sys.argv[1:] for k, v in json.load(open(f)).items()}, indent=4))' \
- cmk.json \
- cma.json \
- kube.json \
- > all_werks.json
- """);
+ # jq -s '.[0] * .[1] * .[2]' cma.json cmk.json kube.json > all_werks.json
+ # no need to install jq!!!!!
+ python3 -c 'import json, sys; print(json.dumps({k: v for f in sys.argv[1:] for k, v in json.load(open(f)).items()}, indent=4))' \
+ cmk.json \
+ cma.json \
+ kube.json \
+ > all_werks.json
+ """);
+ /* groovylint-enable LineLength */
- archiveArtifacts(
- artifacts: "all_werks.json",
- fingerprint: true,
- );
- }
+ archiveArtifacts(
+ artifacts: "all_werks.json",
+ fingerprint: true,
+ );
}
}
}
stage("Validate HTML") {
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside("${docker_args}") {
- dir("${checkout_dir}") {
- try {
- sh(script: """
- ./scripts/npm-ci
- echo 'werks' > validate-werks.html
- # still no need for jq!
- python3 -c 'import json; print("\\n".join(("\\n\\n{}
\\n{}".format(key, value["description"]) for key, value in json.load(open("all_werks.json")).items())))' >> validate-werks.html
- echo '' >> validate-werks.html
- java \
- -jar node_modules/vnu-jar/build/dist/vnu.jar \
- --filterpattern 'The .tt. element is obsolete\\. Use CSS instead\\.' \
- --stdout \
- --format gnu \
- - < validate-werks.html \
- > validate-werks.error.txt
- """);
- } catch(Exception e) {
- archiveArtifacts(
- artifacts: "validate-werks.*, all_werks.json",
- fingerprint: true,
- );
- sh("""
- cat "validate-werks.error.txt"
- echo "Found invalid HTML. See errors above, compare the line numbers with validate-werks.html artifact."
- """);
- throw e;
- }
+ inside_container() {
+ dir("${checkout_dir}") {
+ try {
+ /* groovylint-disable LineLength */
+ sh(script: """
+ (cd packages/cmk-frontend && ./run --setup-environment)
+ echo 'werks' > validate-werks.html
+ # still no need for jq!
+ python3 -c 'import json; print("\\n".join(("\\n\\n{}
\\n{}".format(key, value["description"]) for key, value in json.load(open("all_werks.json")).items())))' >> validate-werks.html
+ echo '' >> validate-werks.html
+ java \
+ -jar packages/cmk-frontend/node_modules/vnu-jar/build/dist/vnu.jar \
+ --filterpattern 'The .tt. element is obsolete\\. Use CSS instead\\.' \
+ --stdout \
+ --format gnu \
+ - < validate-werks.html \
+ > validate-werks.error.txt
+ """);
+ /* groovylint-enable LineLength */
+ } catch(Exception e) {
+ archiveArtifacts(
+ artifacts: "validate-werks.*, all_werks.json",
+ fingerprint: true,
+ );
+ sh("""
+ cat "validate-werks.error.txt"
+ echo "Found invalid HTML. See errors above, compare the line numbers with validate-werks.html artifact."
+ """);
+ throw e;
}
}
}
@@ -96,7 +93,10 @@ def main() {
stage("Update werks on ${target}") {
withCredentials([
- sshUserPrivateKey(credentialsId: credentials_id, keyFileVariable: 'keyfile', usernameVariable: 'user')
+ sshUserPrivateKey(
+ credentialsId: credentials_id,
+ keyFileVariable: 'keyfile',
+ usernameVariable: 'user')
]) {
sh("""
rsync --verbose \
diff --git a/buildscripts/scripts/ensure-workspace-integrity b/buildscripts/scripts/ensure-workspace-integrity
index 5a590ecb3f7..143103c3f49 100755
--- a/buildscripts/scripts/ensure-workspace-integrity
+++ b/buildscripts/scripts/ensure-workspace-integrity
@@ -3,39 +3,29 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-set -e -o pipefail
REPO_PATH="$(git rev-parse --show-toplevel)"
PREVIOUS_IMAGE_FILENAME="${REPO_PATH}/version_txt_from_previous_run"
-CURRENT_IMAGE_VERSION_FILENAME="/version.txt"
+CURRENT_IMAGE_IDENTIFICATION="$(lsb_release --codename --short)-$(cat /version.txt)"
log() {
echo "ensure-workspace-integrity: ${1}"
}
update_previous_used_image() {
- log "Updating ${PREVIOUS_IMAGE_FILENAME} from ${CURRENT_IMAGE_FILENAME}... "
- cat "${CURRENT_IMAGE_VERSION_FILENAME}" >"${PREVIOUS_IMAGE_FILENAME}"
+ log "Updating ${PREVIOUS_IMAGE_FILENAME} with ${CURRENT_IMAGE_IDENTIFICATION}... "
+ echo "${CURRENT_IMAGE_IDENTIFICATION}" >"${PREVIOUS_IMAGE_FILENAME}"
}
if ! test -f "${PREVIOUS_IMAGE_FILENAME}"; then
log "${PREVIOUS_IMAGE_FILENAME} does not exists (yet)"
- # We have no information regarding the previously used image in that workspace
- # * blow away other aritfacts which may be incompatible now
- git clean -f -d -x
+ make -C "${REPO_PATH}" docker-context-clean
update_previous_used_image
exit 0
fi
-if cmp -s "${PREVIOUS_IMAGE_FILENAME}" ${CURRENT_IMAGE_VERSION_FILENAME}; then
- log "${PREVIOUS_IMAGE_FILENAME} and ${CURRENT_IMAGE_VERSION_FILENAME} are the same."
- # Clean the workspace with the following logic:
- # * don't clean .gitignore-d stuff (e.g. .venv or mypy cache) -> we want fast execution
- # * clean stuff which might got generated by previous runs -> they might create false failures
- git clean -f -d
-else
- log "${PREVIOUS_IMAGE_FILENAME} and ${CURRENT_IMAGE_VERSION_FILENAME} differ!"
- # IMAGE_TESTING changed in relation to the previous run in that workspace.
- # * blow away other aritfacts which may be incompatible now
- git clean -f -d -x
+if ! echo "${CURRENT_IMAGE_IDENTIFICATION}" | diff "${PREVIOUS_IMAGE_FILENAME}" -; then
+ log "Previously used image in this WORKSPACE does not match current image (${CURRENT_IMAGE_IDENTIFICATION})!"
+ make -C "${REPO_PATH}" docker-context-clean
update_previous_used_image
+ exit 0
fi
diff --git a/buildscripts/scripts/generic-package-job.groovy b/buildscripts/scripts/generic-package-job.groovy
new file mode 100644
index 00000000000..213911dd300
--- /dev/null
+++ b/buildscripts/scripts/generic-package-job.groovy
@@ -0,0 +1,52 @@
+#!groovy
+
+/// file: generic-package-job.groovy
+
+def secret_list(secret_vars) {
+ return secret_vars ? secret_vars.split(',') : [];
+}
+
+def validate_parameters() {
+ if (COMMAND_LINE == "") {
+ error("COMMAND_LINE parameter is empty - you need to specify a command to run.");
+ }
+}
+
+def main() {
+ check_job_parameters([
+ "PACKAGE_PATH",
+ "SECRET_VARS",
+ "COMMAND_LINE",
+ "DEPENDENCY_PATH_HASHES",
+ ]);
+
+ validate_parameters()
+
+ def helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
+ currentBuild.description = "Running ${PACKAGE_PATH}
${currentBuild.description}";
+
+
+ def output_file = PACKAGE_PATH.split("/")[-1] + ".log"
+ dir(checkout_dir) {
+ inside_container() {
+ withCredentials(secret_list(SECRET_VARS).collect { string(credentialsId: it, variable: it) }) {
+ helper.execute_test([
+ name : PACKAGE_PATH,
+ cmd : "cd ${PACKAGE_PATH}; ${COMMAND_LINE}",
+ output_file: output_file,
+ ]);
+ }
+ sh("mv ${PACKAGE_PATH}/${output_file} ${checkout_dir}");
+ }
+ archiveArtifacts(
+ artifacts: "${output_file}",
+ fingerprint: true,
+ );
+ setCustomBuildProperty(
+ key: "path_hashes",
+ value: directory_hashes(DEPENDENCY_PATH_HASHES.split(",").grep().collect {keyvalue -> keyvalue.split("=")[0]}),
+ );
+ }
+}
+
+return this;
diff --git a/buildscripts/scripts/get_distros.py b/buildscripts/scripts/get_distros.py
index 1e6d8424622..2bb74abf504 100755
--- a/buildscripts/scripts/get_distros.py
+++ b/buildscripts/scripts/get_distros.py
@@ -3,59 +3,372 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
import argparse
-from collections.abc import Callable, Mapping
+import os
+import sys
+import urllib.parse
+from argparse import Namespace as Args
+from collections.abc import Callable, Iterable, Iterator, Sequence
+from contextlib import suppress
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import NamedTuple
+import docker # type: ignore
+import requests
import yaml
+sys.path.insert(0, Path(__file__).parent.parent.parent.as_posix())
-def print_internal_distros(arguments: argparse.Namespace, loaded_yaml: dict) -> None:
- distros = flatten_list(loaded_yaml["internal_distros"])
- if arguments.as_codename:
+from tests.testlib.utils import get_cmk_download_credentials
+from tests.testlib.version import ABCPackageManager, code_name
+
+from cmk.utils.version import Edition
+
+
+class DockerCredentials(NamedTuple):
+ username: str
+ password: str
+
+
+class Credentials(NamedTuple):
+ username: str
+ password: str
+
+
+class DockerImage(NamedTuple):
+ image_name: str
+ tag: str
+
+ def full_name(self) -> str:
+ return f"{self.image_name}:{self.tag}"
+
+
+@dataclass
+class Registry:
+ editions: Sequence[str]
+ url: str = field(init=False)
+ credentials: Credentials = field(init=False)
+ client: docker.DockerClient = field(init=False)
+ image_exists: Callable[[DockerImage, str], bool] = field(init=False)
+
+ def image_exists_docker_hub(self, image: DockerImage, _edition: str) -> bool:
+ sys.stdout.write(f"Test if {image.full_name()} is available...")
+ with suppress(docker.errors.NotFound):
+ self.client.images.get_registry_data(image.full_name())
+ sys.stdout.write(" OK\n")
+ return True
+ return False
+
+ def image_exists_and_can_be_pulled_enterprise(self, image: DockerImage, edition: str) -> bool:
+ if not self.image_exists_enterprise(image, edition):
+ return False
+
+ return self.image_can_be_pulled_enterprise(image, edition)
+
+ def image_exists_enterprise(self, image: DockerImage, edition: str) -> bool:
+ url = f"{self.url}/v2/{image.image_name}/tags/list"
+ sys.stdout.write(f"Test if {image.tag} can be found in {url}...")
+ exists = (
+ image.tag
+ in requests.get(
+ url,
+ auth=(self.credentials.username, self.credentials.password),
+ ).json()["tags"]
+ )
+ if not exists:
+ sys.stdout.write(" NO!\n")
+ return False
+ sys.stdout.write(" OK\n")
+ return True
+
+ def image_can_be_pulled_enterprise(self, image: DockerImage, edition: str) -> bool:
+ repository = f"{urllib.parse.urlparse(self.url).netloc}/{edition}/check-mk-{edition}"
+ sys.stdout.write(f"Test if {image.tag} can be pulled from {repository}...")
+
+ # Be sure we don't have the image locally... there is no force pull
+ with suppress(docker.errors.ImageNotFound):
+ self.client.images.remove(f"{repository}:{image.tag}")
+
+ try:
+ self.client.images.pull(
+ tag=image.tag,
+ repository=repository,
+ )
+ except docker.errors.APIError as e:
+ sys.stdout.write(f" NO! Error was: {e}\n")
+ return False
+
+ sys.stdout.write(" OK\n")
+ return True
+
+ def __post_init__(self):
+ self.client = docker.client.from_env()
+ self.credentials = get_credentials()
+ match self.editions:
+ case ["enterprise", "managed"]:
+ self.url = "https://registry.checkmk.com"
+ # Asking why we're also pulling? -> CMK-14567
+ self.image_exists = self.image_exists_and_can_be_pulled_enterprise
+ self.client.login(
+ registry=self.url,
+ username=self.credentials.username,
+ password=self.credentials.password,
+ )
+ case ["raw", "cloud"]:
+ self.url = "https://docker.io"
+ self.image_exists = self.image_exists_docker_hub
+ case ["saas"]:
+ self.url = "https://artifacts.lan.tribe29.com:4000"
+ self.image_exists = self.image_exists_enterprise
+ # For nexus, d-intern is not authorized
+ self.credentials = Credentials(
+ username=os.environ["NEXUS_USER"],
+ password=os.environ["NEXUS_PASSWORD"],
+ )
+ case _:
+ raise RuntimeError(f"Cannnot match editions to registry: {self.editions}")
+
+
+def get_credentials() -> Credentials:
+ return Credentials(*get_cmk_download_credentials())
+
+
+def hash_file(artifact_name: str) -> str:
+ return f"{artifact_name}.hash"
+
+
+def edition_to_registry(ed: str, registries: list[Registry]) -> Registry:
+ for r in registries:
+ if ed in r.editions:
+ return r
+ raise RuntimeError(f"Cannot determine registry for edition: {ed}!")
+
+
+def build_source_artifacts(args: Args, loaded_yaml: dict) -> Iterator[tuple[str, bool]]:
+ for edition in loaded_yaml["editions"]:
+ file_name = (
+ f"check-mk-{edition}-{args.version}.{Edition.from_long_edition(edition).short}.tar.gz"
+ )
+ internal_only = edition in loaded_yaml["internal_editions"]
+ yield file_name, internal_only
+ yield hash_file(file_name), internal_only
+
+
+def build_docker_artifacts(args: Args, loaded_yaml: dict) -> Iterator[tuple[str, bool]]:
+ for edition in loaded_yaml["editions"]:
+ file_name = f"check-mk-{edition}-docker-{args.version}.tar.gz"
+ internal_only = edition in loaded_yaml["internal_editions"]
+ yield file_name, internal_only
+ yield hash_file(file_name), internal_only
+
+
+def build_docker_image_name_and_registry(
+ args: Args, loaded_yaml: dict, registries: list[Registry]
+) -> Iterator[tuple[DockerImage, str, Registry]]:
+ def build_folder(ed: str) -> str:
+ # TODO: Merge with build-cmk-container.py
+ match ed:
+ case "raw" | "cloud":
+ return "checkmk/"
+ case "enterprise" | "managed":
+ return f"{ed}/"
+ case "saas":
+ return ""
+ case _:
+ raise RuntimeError(f"Unknown edition {ed}")
+
+ for edition in loaded_yaml["editions"]:
+ registry = edition_to_registry(edition, registries)
+ yield (
+ DockerImage(tag=args.version, image_name=f"{build_folder(edition)}check-mk-{edition}"),
+ edition,
+ registry,
+ )
+
+
+def build_package_artifacts(args: Args, loaded_yaml: dict) -> Iterator[tuple[str, bool]]:
+ for edition in loaded_yaml["editions"]:
+ for distro in flatten(loaded_yaml["editions"][edition]["release"]):
+ package_name = ABCPackageManager.factory(code_name(distro)).package_name(
+ Edition.from_long_edition(edition), version=args.version
+ )
+ internal_only = (
+ distro in loaded_yaml["internal_distros"]
+ or edition in loaded_yaml["internal_editions"]
+ )
+ yield package_name, internal_only
+ yield hash_file(package_name), internal_only
+
+
+def file_exists_on_download_server(filename: str, version: str, credentials: Credentials) -> bool:
+ url = f"https://download.checkmk.com/checkmk/{version}/{filename}"
+ sys.stdout.write(f"Checking for {url}...")
+ if (
+ requests.head(
+ f"https://download.checkmk.com/checkmk/{version}/{filename}",
+ auth=(credentials.username, credentials.password),
+ ).status_code
+ != 200
+ ):
+ sys.stdout.write(" MISSING\n")
+ return False
+ sys.stdout.write(" AVAILABLE\n")
+ return True
+
+
+def assert_presence_on_download_server(
+ args: Args, internal_only: bool, artifact_name: str, credentials: Credentials
+) -> None:
+ if (
+ not file_exists_on_download_server(artifact_name, args.version, credentials)
+ != internal_only
+ ):
+ raise RuntimeError(
+ f"{artifact_name} should {'not' if internal_only else ''} "
+ "be available on download server!"
+ )
+
+
+def assert_build_artifacts(args: Args, loaded_yaml: dict) -> None:
+ credentials = get_credentials()
+ registries = [
+ Registry(
+ editions=["enterprise", "managed"],
+ ),
+ Registry(
+ editions=["raw", "cloud"],
+ ),
+ Registry(
+ editions=["saas"],
+ ),
+ ]
+ for artifact_name, internal_only in build_source_artifacts(args, loaded_yaml):
+ assert_presence_on_download_server(args, internal_only, artifact_name, credentials)
+
+ for artifact_name, internal_only in build_package_artifacts(args, loaded_yaml):
+ assert_presence_on_download_server(args, internal_only, artifact_name, credentials)
+
+ for artifact_name, internal_only in build_docker_artifacts(args, loaded_yaml):
+ assert_presence_on_download_server(args, internal_only, artifact_name, credentials)
+
+ for image_name, edition, registry in build_docker_image_name_and_registry(
+ args, loaded_yaml, registries
+ ):
+ if not registry.image_exists(image_name, edition):
+ raise RuntimeError(f"{image_name} not found!")
+
+ # cloud images
+ # TODO
+
+
+def print_internal_build_artifacts(args: Args, loaded_yaml: dict) -> None:
+ distros = flatten(loaded_yaml["internal_distros"])
+ editions = flatten(loaded_yaml["internal_editions"])
+
+ if args.as_codename:
if diff := distros - loaded_yaml["distro_to_codename"].keys():
raise Exception(
f"{args.editions_file} is missing the distro code for the following distros: "
f"{diff}. Please add the corresponding distro code."
)
distros = [loaded_yaml["distro_to_codename"][d] for d in distros]
- if arguments.as_rsync_exclude_pattern:
- print("{" + ",".join([f"'*{d}*'" for d in distros]) + "}")
+ if args.as_rsync_exclude_pattern:
+ print("{" + ",".join([f"'*{d}*'" for d in list(distros) + list(editions)]) + "}")
return
- print(" ".join(distros))
+ print(" ".join(sorted(set(distros).union(set(editions)))))
-def print_distros_for_use_case(arguments: argparse.Namespace, loaded_yaml: dict) -> None:
- print(" ".join(flatten_list(loaded_yaml["editions"][arguments.edition][arguments.use_case])))
+def distros_for_use_case(edition_distros: dict, edition: str, use_case: str) -> Iterable[str]:
+ return sorted(
+ set(
+ distro
+ for _edition, use_cases in edition_distros.items()
+ if edition in (_edition, "all")
+ for _use_case, distros in use_cases.items()
+ if use_case in (_use_case, "all")
+ for distro in flatten(distros)
+ )
+ )
-COMMANDS_TO_FUNCTION: Mapping[str, Callable[[argparse.Namespace, dict], None]] = {
- "internal_distros": print_internal_distros,
- "use_cases": print_distros_for_use_case,
-}
+def print_distros_for_use_case(args: argparse.Namespace, loaded_yaml: dict) -> None:
+ edition_distros = loaded_yaml["editions"]
+ edition = args.edition or "all"
+ use_case = args.use_case or "all"
+ print(" ".join(distros_for_use_case(edition_distros, edition, use_case)))
-def flatten_list(list_to_flatten: list[list[str] | str]) -> list[str]:
+def flatten(list_to_flatten: Iterable[Iterable[str] | str]) -> Iterable[str]:
# This is a workaround the fact that yaml cannot "extend" a predefined node which is a list:
# https://stackoverflow.com/questions/19502522/extend-an-array-in-yaml
- return [h for elem in list_to_flatten for h in (elem if isinstance(elem, list) else [elem])]
+ return [h for elem in list_to_flatten for h in ([elem] if isinstance(elem, str) else elem)]
+
+def test_distro_lists():
+ with open(Path(__file__).parent.parent.parent / "editions.yml") as editions_file:
+ edition_distros = yaml.load(editions_file, Loader=yaml.FullLoader)["editions"]
+ # fmt: off
+ assert distros_for_use_case(edition_distros, "enterprise", "release") == [
+ "almalinux-9", "centos-8",
+ "cma-3", "cma-4",
+ "debian-10", "debian-11", "debian-12",
+ "sles-12sp5", "sles-15sp3", "sles-15sp4", "sles-15sp5",
+ "ubuntu-20.04", "ubuntu-22.04",
+ ]
+ assert distros_for_use_case(edition_distros, "enterprise", "daily") == [
+ "almalinux-9", "centos-8",
+ "cma-4",
+ "debian-12",
+ "sles-15sp5",
+ "ubuntu-20.04", "ubuntu-22.04", "ubuntu-23.10",
+ ]
+ assert distros_for_use_case(edition_distros, "all", "all") == [
+ "almalinux-9", "centos-8",
+ "cma-3", "cma-4",
+ "debian-10", "debian-11", "debian-12",
+ "sles-12sp5", "sles-15sp3", "sles-15sp4", "sles-15sp5",
+ "ubuntu-20.04", "ubuntu-22.04", "ubuntu-23.10",
+ ]
+ # fmt: on
-def parse_arguments() -> argparse.Namespace:
+
+def parse_arguments() -> Args:
parser = argparse.ArgumentParser()
parser.add_argument("--editions_file", required=True)
+
subparsers = parser.add_subparsers(required=True, dest="command")
+
+ use_cases = subparsers.add_parser("all", help="a help")
+ use_cases.set_defaults(func=print_distros_for_use_case)
+ use_cases.add_argument("--edition", default="all")
+ use_cases.add_argument("--use_case", default="all")
+
use_cases = subparsers.add_parser("use_cases", help="a help")
+ use_cases.set_defaults(func=print_distros_for_use_case)
use_cases.add_argument("--edition", required=True)
use_cases.add_argument("--use_case", required=True)
- internal_distros = subparsers.add_parser("internal_distros")
- internal_distros.add_argument("--as-codename", default=False, action="store_true")
- internal_distros.add_argument("--as-rsync-exclude-pattern", default=False, action="store_true")
+ internal_build_artifacts = subparsers.add_parser("internal_build_artifacts")
+ internal_build_artifacts.set_defaults(func=print_internal_build_artifacts)
+ internal_build_artifacts.add_argument("--as-codename", default=False, action="store_true")
+ internal_build_artifacts.add_argument(
+ "--as-rsync-exclude-pattern", default=False, action="store_true"
+ )
+
+ sub_assert_build_artifacts = subparsers.add_parser("assert_build_artifacts")
+ sub_assert_build_artifacts.set_defaults(func=assert_build_artifacts)
+ sub_assert_build_artifacts.add_argument("--version", required=True, default=False)
return parser.parse_args()
-args = parse_arguments()
-with open(args.editions_file) as editions_file:
- COMMANDS_TO_FUNCTION[args.command](args, yaml.load(editions_file, Loader=yaml.FullLoader))
+def main() -> None:
+ args = parse_arguments()
+ with open(args.editions_file) as editions_file:
+ args.func(args, yaml.load(editions_file, Loader=yaml.FullLoader))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/buildscripts/scripts/jenkins_job_entry.groovy b/buildscripts/scripts/jenkins_job_entry.groovy
index f08a330c033..5e18cbaae5b 100644
--- a/buildscripts/scripts/jenkins_job_entry.groovy
+++ b/buildscripts/scripts/jenkins_job_entry.groovy
@@ -41,5 +41,5 @@ def main(job_definition_file) {
}
}
}
-return this;
+return this;
diff --git a/buildscripts/scripts/pin_dependencies.py b/buildscripts/scripts/pin_dependencies.py
new file mode 100755
index 00000000000..0f21e1c816b
--- /dev/null
+++ b/buildscripts/scripts/pin_dependencies.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+# Copyright (C) 2023 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+import argparse
+import enum
+import json
+import re
+from typing import Iterator
+
+import pipfile # type: ignore
+
+
+def insert_pinned_version(p_to_v: dict[str, str], args: argparse.Namespace) -> None:
+ # In order to preserve comments, we need to go the manual path
+ with open(args.path_to_pipfile) as pipfile_read:
+ lines = pipfile_read.readlines()
+
+ for i, line in enumerate(lines):
+ if package_match := re.match(r"(^.*) = \"*\".*", line):
+ package_name = package_match.groups()[0]
+ if pinned_version := p_to_v.get(package_name):
+ lines[i] = line.replace('"*"', f'"{pinned_version}"')
+
+ with open(args.path_to_pipfile, "w") as pipfile_write:
+ pipfile_write.writelines(lines)
+
+
+class PackageType(enum.StrEnum):
+ # See also discussion in slack why we're also pinning dev-packages:
+ # https://tribe29.slack.com/archives/CGBE6U2PK/p1706523946468149
+ # and wiki
+ # https://wiki.lan.tribe29.com/books/how-to/page/creating-a-new-beta-branch#bkmrk-pin-dev-dependencies
+ default = "packages"
+ develop = "dev-packages"
+
+
+def find_unpinned_packages(data: dict) -> Iterator[str]:
+ yield from [p for p, v in data.items() if "*" in v]
+
+
+def get_version_from_pipfile_lock(package_name: str, package_type: str, pipfile_lock: dict) -> str:
+ return pipfile_lock[package_type][package_name]["version"]
+
+
+def parse_arguments() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(
+ description="""
+ This script can be used to replace unpinned package versions with the version which is
+ currently locked in a Pipfile.lock. We usally do this when branching from master
+ into the next stable release in order to have reproducible builds.
+ """
+ )
+
+ parser.add_argument("--path_to_pipfile", required=True)
+ parser.add_argument("--path_to_pipfile_lock", required=True)
+ return parser.parse_args()
+
+
+if __name__ == "__main__":
+ args = parse_arguments()
+ loaded_pipfile = pipfile.Pipfile.load(args.path_to_pipfile)
+ with open(args.path_to_pipfile_lock) as pl:
+ loaded_pipfile_lock = json.load(pl)
+
+ package_to_version = {}
+
+ for p_type in PackageType:
+ for package in find_unpinned_packages(loaded_pipfile.data[p_type.name]):
+ version = get_version_from_pipfile_lock(
+ package,
+ package_type=p_type.name,
+ pipfile_lock=loaded_pipfile_lock,
+ )
+ package_to_version[package] = version
+ insert_pinned_version(package_to_version, args)
diff --git a/buildscripts/scripts/publish_cloud_images.py b/buildscripts/scripts/publish_cloud_images.py
new file mode 100755
index 00000000000..384c3264ff6
--- /dev/null
+++ b/buildscripts/scripts/publish_cloud_images.py
@@ -0,0 +1,395 @@
+#!/usr/bin/env python3
+# Copyright (C) 2023 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+import abc
+import argparse
+import asyncio
+import enum
+import json
+import os
+import sys
+from typing import Final, Iterator
+
+import boto3 # type: ignore[import]
+from azure.identity import DefaultAzureCredential
+from azure.mgmt.compute import ComputeManagementClient
+from azure.mgmt.compute.models import (
+ GalleryArtifactVersionSource,
+ GalleryImageVersion,
+ GalleryImageVersionPublishingProfile,
+ GalleryImageVersionStorageProfile,
+ GalleryOSDiskImage,
+ TargetRegion,
+)
+from azure.mgmt.resource import ResourceManagementClient
+from msrest.polling import LROPoller
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
+from cmk.utils.version import _PatchVersion, Version
+
+
+def parse_arguments() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(
+ description="This script is used for publishing new versions "
+ "for our AWS and Azure cloud images."
+ )
+ parser.add_argument(
+ "--cloud-type",
+ help="Choose here the type of the cloud",
+ action="store",
+ required=True,
+ choices=CloudPublisher.CLOUD_TYPES,
+ )
+ parser.add_argument(
+ "--new-version",
+ help="The new version which will be used for the update",
+ action="store",
+ required=True,
+ )
+ parser.add_argument(
+ "--build-tag",
+ help="The jenkins build tag to pass to the change sets for later identification",
+ action="store",
+ required=True,
+ )
+ parser.add_argument(
+ "--image-name",
+ help="The name of the cloud image which can be found in the cloud",
+ action="store",
+ required=True,
+ )
+ parser.add_argument(
+ "--marketplace-scanner-arn",
+ help="The arn of an aws role which can access our ami images",
+ action="store",
+ required=True,
+ )
+ parser.add_argument(
+ "--product-id",
+ help="The product id of the product which should receive a new version",
+ action="store",
+ required=True,
+ )
+ parser.add_argument(
+ "--azure-subscription-id",
+ help="Azure's subscription id",
+ action="store",
+ required=True,
+ )
+ parser.add_argument(
+ "--azure-resource-group",
+ help="Azure's resource group",
+ action="store",
+ required=True,
+ )
+ return parser.parse_args()
+
+
+class CloudPublisher(abc.ABC):
+ # Currently, both AWS and Azure should long be finished after that time
+ SECONDS_TO_TIMEOUT_PUBLISH_PROCESS: Final = 3 * 60 * 60
+ CLOUD_TYPES: Final = ["aws", "azure"]
+ SECONDS_TO_WAIT_FOR_NEXT_STATUS: Final = 20
+
+ def __init__(self, version: Version, build_tag: str, image_name: str):
+ self.version = version
+ self.build_tag = build_tag
+ self.image_name = image_name
+
+ @abc.abstractmethod
+ async def publish(self):
+ ...
+
+ @staticmethod
+ def build_release_notes_url(version: str) -> str:
+ """
+ >>> CloudPublisher.build_release_notes_url("2.2.0p5")
+ 'https://forum.checkmk.com/t/release-checkmk-stable-release-2-2-0p5/'
+ """
+ return (
+ f"https://forum.checkmk.com/t/release-checkmk-stable-release-"
+ f"{version.replace('.','-')}/"
+ )
+
+
+class AWSPublisher(CloudPublisher):
+ ENTITY_TYPE_WITH_VERSION = "AmiProduct@1.0"
+ CATALOG = "AWSMarketplace"
+
+ def __init__(
+ self,
+ version: Version,
+ build_tag: str,
+ image_name: str,
+ marketplace_scanner_arn: str,
+ product_id: str,
+ ):
+ super().__init__(version, build_tag, image_name)
+ self.client_ec2 = boto3.client("ec2")
+ self.client_market = boto3.client("marketplace-catalog")
+ self.aws_marketplace_scanner_arn = marketplace_scanner_arn
+ self.production_id = product_id
+
+ class ChangeTypes(enum.StrEnum):
+ ADD_DELIVERY_OPTIONS = "AddDeliveryOptions" # for updating the version
+
+ async def publish(self) -> None:
+ image_id = self.get_ami_image_id()
+ update_details = {
+ "Version": {
+ "VersionTitle": str(self.version),
+ "ReleaseNotes": self.build_release_notes_url(str(self.version)),
+ },
+ "DeliveryOptions": [
+ {
+ "Details": {
+ "AmiDeliveryOptionDetails": {
+ "AmiSource": {
+ "AmiId": image_id,
+ # This role must be able to read our ami images, see:
+ # https://docs.aws.amazon.com/marketplace/latest/userguide/ami-single-ami-products.html#single-ami-marketplace-ami-access
+ "AccessRoleArn": self.aws_marketplace_scanner_arn,
+ "UserName": "ubuntu",
+ "OperatingSystemName": "UBUNTU",
+ # TODO: can we centralize this into editions.yml?
+ "OperatingSystemVersion": "22.04",
+ },
+ "UsageInstructions": "See the Checkmk manual for "
+ "detailed usage instructions: "
+ "https://docs.checkmk.com/latest/en/intro_gui.html",
+ "RecommendedInstanceType": "c6a.large",
+ "SecurityGroups": [
+ {
+ # ssh
+ "IpProtocol": "tcp",
+ "FromPort": 22,
+ "ToPort": 22,
+ "IpRanges": ["0.0.0.0/0"],
+ },
+ {
+ # https
+ "IpProtocol": "tcp",
+ "FromPort": 443,
+ "ToPort": 443,
+ "IpRanges": ["0.0.0.0/0"],
+ },
+ {
+ # agent registration
+ "IpProtocol": "tcp",
+ "FromPort": 8000,
+ "ToPort": 8000,
+ "IpRanges": ["0.0.0.0/0"],
+ },
+ ],
+ }
+ }
+ }
+ ],
+ }
+
+ print(f"Starting change set for ami image {image_id} and version {self.version}")
+ response = self.client_market.start_change_set(
+ Catalog=self.CATALOG,
+ ChangeSet=[
+ {
+ "ChangeType": self.ChangeTypes.ADD_DELIVERY_OPTIONS,
+ "Entity": {
+ "Type": self.ENTITY_TYPE_WITH_VERSION,
+ "Identifier": self.production_id,
+ },
+ "Details": json.dumps(update_details),
+ "ChangeName": "update",
+ },
+ ],
+ ChangeSetName=f"Add new version {self.version} by {self.build_tag}",
+ )
+ await asyncio.wait_for(
+ self.update_successful(response["ChangeSetId"]), self.SECONDS_TO_TIMEOUT_PUBLISH_PROCESS
+ )
+
+ def get_ami_image_id(self) -> str:
+ images = self.client_ec2.describe_images(
+ Filters=[
+ {
+ "Name": "name",
+ "Values": [self.image_name],
+ },
+ ],
+ )["Images"]
+ assert len(images) == 1, (
+ "Cannot identify the correct image to publish, " f"received the following: {images}"
+ )
+ return images[0]["ImageId"]
+
+ async def update_successful(self, change_set_id: str) -> None:
+ while True:
+ response = self.client_market.describe_change_set(
+ Catalog=self.CATALOG,
+ ChangeSetId=change_set_id,
+ )
+ status = response["Status"]
+ match status:
+ case "PREPARING" | "APPLYING":
+ print(
+ f"Got {status=}... "
+ f"sleeping for {self.SECONDS_TO_WAIT_FOR_NEXT_STATUS} seconds..."
+ )
+ await asyncio.sleep(self.SECONDS_TO_WAIT_FOR_NEXT_STATUS)
+ case "CANCELLED" | "FAILED":
+ raise RuntimeError(
+ f"The changeset {change_set_id} returned {status=}.\n"
+ f"The error was: {response['ChangeSet'][0]['ErrorDetailList']}"
+ )
+ case "SUCCEEDED":
+ return
+
+
+class AzurePublisher(CloudPublisher):
+ LOCATION = "westeurope"
+ STORAGE_ACCOUNT_TYPE = "Standard_LRS"
+ GALLERY_NAME = "Marketplace_Publishing_Gallery"
+
+ def __init__(
+ self,
+ version: Version,
+ build_tag: str,
+ image_name: str,
+ subscription_id: str,
+ resource_group: str,
+ ):
+ super().__init__(version, build_tag, image_name)
+ credentials = DefaultAzureCredential()
+ self.subscription_id = subscription_id
+ self.resource_group = resource_group
+ # Use Checkmk_Cloud_Edition_2.2b5 for e.g. testing
+ self.gallery_image_name = (
+ f"Checkmk-Cloud-Edition-{self.version.version.major}.{self.version.version.minor}"
+ )
+ self.compute_client = ComputeManagementClient(
+ credentials,
+ self.subscription_id,
+ )
+ self.resource_client = ResourceManagementClient(
+ credentials,
+ self.subscription_id,
+ )
+
+ def get_azure_image_id(self) -> Iterator[str]:
+ resource_list = self.resource_client.resources.list_by_resource_group(
+ self.resource_group,
+ filter=f"name eq '{self.image_name}'",
+ )
+ yield next(resource_list).id
+ if another_match := next(resource_list, None):
+ raise RuntimeError(
+ f"Cannot identify a unique azure image by using {self.image_name=}. "
+ f"Found also: {another_match}"
+ )
+
+ @staticmethod
+ def azure_compatible_version(version: _PatchVersion) -> str:
+ """
+ Yea, this is great... but azure doesn't accept our versioning schema
+ >>> AzurePublisher.azure_compatible_version(Version("2.2.0p5").version)
+ '2.2.5'
+ """
+ return f"{version.major}.{version.minor}.{version.patch}"
+
+ async def build_gallery_image(self):
+ image_id = list(self.get_azure_image_id())[0]
+ print(f"Creating new gallery image from {self.version=} by using {image_id=}")
+ assert isinstance(self.version.version, _PatchVersion)
+ self.update_succesful(
+ self.compute_client.gallery_image_versions.begin_create_or_update(
+ resource_group_name=self.resource_group,
+ gallery_name=self.GALLERY_NAME,
+ gallery_image_name=self.gallery_image_name,
+ gallery_image_version_name=self.azure_compatible_version(self.version.version),
+ gallery_image_version=GalleryImageVersion(
+ location=self.LOCATION,
+ publishing_profile=GalleryImageVersionPublishingProfile(
+ target_regions=[
+ TargetRegion(name=self.LOCATION),
+ ],
+ storage_account_type=self.STORAGE_ACCOUNT_TYPE,
+ ),
+ storage_profile=GalleryImageVersionStorageProfile(
+ source=GalleryArtifactVersionSource(
+ id=image_id,
+ ),
+ os_disk_image=GalleryOSDiskImage(
+ # Taken from previous images
+ host_caching="ReadWrite",
+ ),
+ ),
+ ),
+ ),
+ )
+
+ async def publish(self):
+ """
+ Azure's update process has 2 steps:
+ * first, we need to create a gallery image from the VM image which was pushed by packer
+ * second, we need to add the new gallery image as technical configuration to our marketplace
+ offer
+ """
+
+ await asyncio.wait_for(
+ self.build_gallery_image(),
+ self.SECONDS_TO_TIMEOUT_PUBLISH_PROCESS,
+ )
+
+ # TODO: Implement step #2
+
+ def update_succesful(self, poller: LROPoller) -> None:
+ while True:
+ result = poller.result(self.SECONDS_TO_WAIT_FOR_NEXT_STATUS)
+ assert isinstance(result, GalleryImageVersion)
+ if provisioning_state := result.provisioning_state:
+ print(f"{provisioning_state=}")
+ match provisioning_state:
+ case "Succeeded":
+ return
+ case _:
+ raise RuntimeError(f"Poller returned {provisioning_state=}")
+ print(
+ f"Got no result yet... "
+ f"sleeping for {self.SECONDS_TO_WAIT_FOR_NEXT_STATUS} seconds..."
+ )
+
+
+def ensure_using_official_release(version: str) -> Version:
+ parsed_version = Version(version)
+ if not isinstance(parsed_version.version, _PatchVersion):
+ raise RuntimeError(
+ f"We only want to publish official patch releases, got {parsed_version} instead."
+ )
+ return parsed_version
+
+
+if __name__ == "__main__":
+ args = parse_arguments()
+
+ new_version = ensure_using_official_release(args.new_version)
+ match args.cloud_type:
+ case "aws":
+ asyncio.run(
+ AWSPublisher(
+ new_version,
+ args.build_tag,
+ args.image_name,
+ args.marketplace_scanner_arn,
+ args.product_id,
+ ).publish()
+ )
+ case "azure":
+ asyncio.run(
+ AzurePublisher(
+ new_version,
+ args.build_tag,
+ args.image_name,
+ args.azure_subscription_id,
+ args.azure_resource_group,
+ ).publish()
+ )
diff --git a/buildscripts/scripts/send-werks-to-mailing-lists.groovy b/buildscripts/scripts/send-werks-to-mailing-lists.groovy
index 631566ed965..cf1eccf18af 100644
--- a/buildscripts/scripts/send-werks-to-mailing-lists.groovy
+++ b/buildscripts/scripts/send-werks-to-mailing-lists.groovy
@@ -4,14 +4,14 @@
def validate_parameters(send_werk_mails, add_werk_git_notes) {
if (send_werk_mails && !add_werk_git_notes) {
- error "Sending the werk mails but not adding the git notes is dangerous: " +
- "We may re-send already published werks again."
+ error("Sending the werk mails but not adding the git notes is dangerous: " +
+ "We may re-send already published werks again.");
}
}
def build_cmd_options_from_params(send_werk_mails, add_werk_git_notes, assume_no_mails_sent_except, werks_mail_address) {
// We let the python code fetch the git notes (and not via JJB/groovy) as this may also push the notes.
- def cmd_line = "--do-fetch-git-notes"
+ def cmd_line = "--do-fetch-git-notes";
if (send_werk_mails) {
cmd_line += " --do-send-mail";
@@ -22,18 +22,18 @@ def build_cmd_options_from_params(send_werk_mails, add_werk_git_notes, assume_no
}
if (assume_no_mails_sent_except != "") {
- cmd_line += " --assume-no-notes-but ${assume_no_mails_sent_except}"
+ cmd_line += " --assume-no-notes-but ${assume_no_mails_sent_except}";
}
if (werks_mail_address != "") {
- cmd_line += " --mail ${werks_mail_address}"
+ cmd_line += " --mail ${werks_mail_address}";
}
- return cmd_line
+ return cmd_line;
}
def was_timer_triggered() {
- return currentBuild.rawBuild.getCauses()[0].toString().contains('TimerTriggerCause')
+ return currentBuild.rawBuild.getCauses()[0].toString().contains('TimerTriggerCause');
}
def main() {
@@ -45,11 +45,12 @@ def main() {
"WERKS_MAIL_ADDRESS",
]);
- def docker_args = "${mount_reference_repo_dir} " +
- "-h lists.checkmk.com " +
- "-v /etc/nullmailer:/etc/nullmailer:ro " +
- "-v /var/spool/nullmailer:/var/spool/nullmailer";
- def send_werk_mails_of_branches = params.SEND_WERK_MAILS_OF_BRANCHES.split(" ")
+ def docker_args = [
+ "-h lists.checkmk.com ",
+ "-v /etc/nullmailer:/etc/nullmailer:ro ",
+ "-v /var/spool/nullmailer:/var/spool/nullmailer",
+ ];
+ def send_werk_mails_of_branches = params.SEND_WERK_MAILS_OF_BRANCHES.split(" ");
def send_werk_mails = params.SEND_WERK_MAILS;
def add_werk_git_notes = params.ADD_WERK_GIT_NOTES;
def assume_no_mails_sent_except = params.ASSUME_NO_MAILS_SENT_EXCEPT;
@@ -81,21 +82,19 @@ def main() {
""".stripMargin());
stage("Send mails") {
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside("${docker_args}") {
- withCredentials([
- sshUserPrivateKey(credentialsId: "ssh-git-gerrit-jenkins", keyFileVariable: 'keyfile', usernameVariable: 'user')
- ]) {
- withEnv(["GIT_SSH_COMMAND=ssh -o \"StrictHostKeyChecking no\" -i ${keyfile} -l ${user}"]) {
- dir("${checkout_dir}") {
- send_werk_mails_of_branches.each{branch ->
- sh("""
- git config --add user.name ${user};
- git config --add user.email ${JENKINS_MAIL};
- scripts/run-pipenv run python3 -m cmk.utils.werks mail \
- . origin/${branch} werk_mail ${cmd_line};
- """);
- }
+ inside_container(args: docker_args) {
+ withCredentials([
+ sshUserPrivateKey(credentialsId: "ssh-git-gerrit-jenkins", keyFileVariable: 'keyfile', usernameVariable: 'user')
+ ]) {
+ withEnv(["GIT_SSH_COMMAND=ssh -o \"StrictHostKeyChecking no\" -i ${keyfile} -l ${user}"]) {
+ dir("${checkout_dir}") {
+ send_werk_mails_of_branches.each{branch ->
+ sh("""
+ git config --add user.name ${user};
+ git config --add user.email ${JENKINS_MAIL};
+ scripts/run-pipenv run python3 -m cmk.utils.werks mail \
+ . origin/${branch} werk_mail ${cmd_line};
+ """);
}
}
}
@@ -103,4 +102,5 @@ def main() {
}
}
}
+
return this;
diff --git a/buildscripts/scripts/stages.yml b/buildscripts/scripts/stages.yml
index a81ec731ced..88f332c03a3 100644
--- a/buildscripts/scripts/stages.yml
+++ b/buildscripts/scripts/stages.yml
@@ -18,14 +18,23 @@ VARIABLES:
SH: "git rev-parse ${PATCHSET_REVISION}^"
- NAME: CHANGED_FILES_REL
- SH: "git diff-tree --no-commit-id --name-only -r ${BASE_COMMIT_ID}..HEAD | sed -E 's#(enterprise|managed|cloud)/cmk/#cmk/#g' | sort"
+ SH: "git diff-tree --no-commit-id --name-only -r ${BASE_COMMIT_ID}..HEAD | sort"
- NAME: CHANGED_MAKEFILE
SH: "echo '${CHANGED_FILES_REL}' | grep '^Makefile' || true"
+ - NAME: CHANGED_REFERENCE_IMAGE
+ SH: "echo '${CHANGED_FILES_REL}' | grep '^defines/dev-images/reference/Dockerfile' || true"
+
- NAME: CHANGED_TESTS_MAKEFILE
SH: "echo '${CHANGED_FILES_REL}' | grep '^tests/Makefile' || true"
+ - NAME: CHANGED_OMDLIB_FILES
+ SH: "git diff-tree --no-commit-id --name-only -r ${BASE_COMMIT_ID}..HEAD | grep '^omd/packages/omd/omdlib/' || true"
+
+ - NAME: CHANGED_OMDLIB_TESTS
+ SH: "git diff-tree --no-commit-id --name-only -r ${BASE_COMMIT_ID}..HEAD | grep '^tests/unit/omdlib/' || true"
+
- NAME: CHANGED_PYTHON_FILES
SH: "git diff-tree --no-commit-id --name-only -r ${BASE_COMMIT_ID}..HEAD | xargs realpath | ${WORKSPACE}/scripts/find-python-files --filter"
@@ -35,6 +44,9 @@ VARIABLES:
- NAME: CHANGED_PYLINT_RC_FILE
SH: "echo '${CHANGED_FILES_REL}' | grep '^\\.pylintrc' || true"
+ - NAME: CHANGED_GROOVY_FILE
+ SH: "echo '${CHANGED_FILES_REL}' | grep '\\.groovy$' || true"
+
- NAME: CHANGED_PYPROJECT_TOML_FILE
SH: "echo '${CHANGED_FILES_REL}' | grep '^pyproject.toml' || true"
@@ -44,6 +56,9 @@ VARIABLES:
- NAME: CHANGED_PIPFILE_LOCK
SH: "echo '${CHANGED_FILES_REL}' | grep '^Pipfile.lock' || true"
+ - NAME: CHANGED_FIND_PYTHON_FILES_SCRIPT
+ SH: "echo '${CHANGED_FILES_REL}' | grep '^scripts/find-python-files' || true"
+
- NAME: CHANGED_RUN_BLACK_SCRIPT
SH: "echo '${CHANGED_FILES_REL}' | grep '^scripts/run-black' || true"
@@ -53,24 +68,9 @@ VARIABLES:
- NAME: CHANGED_RUN_PIPENV_SCRIPT
SH: "echo '${CHANGED_FILES_REL}' | grep '^scripts/run-pipenv' || true"
- - NAME: CHANGED_RUN_PRETTIER_SCRIPT
- SH: "echo '${CHANGED_FILES_REL}' | grep '^scripts/run-prettier' || true"
-
- - NAME: CHANGED_RUN_JS_LINT_SCRIPT
- SH: "echo '${CHANGED_FILES_REL}' | grep '^scripts/check-js-lint.sh' || true"
-
- - NAME: CHANGED_RUN_TYPESCRIPT_TYPES_SCRIPT
- SH: "echo '${CHANGED_FILES_REL}' | grep '^scripts/check-typescript-types.sh' || true"
-
- - NAME: CHANGED_TYPESCRIPT_CONFIG
- SH: "echo '${CHANGED_FILES_REL}' | grep '^tsconfig.*.json' || true"
-
- NAME: CHANGED_BAZEL_FILES
SH: "echo '${CHANGED_FILES_REL}' | grep -e '\\.bzl$' -e 'BUILD.*' || true"
- - NAME: CHANGED_WEBPACK_CONFIG
- SH: "echo '${CHANGED_FILES_REL}' | grep '^webpack.config.js' || true"
-
- NAME: CHANGED_AGENT_PLUGINS
SH: "echo '${CHANGED_FILES_REL}' | grep '^agents/plugins/' || true"
REPLACE_NEWLINES: true
@@ -80,19 +80,7 @@ VARIABLES:
REPLACE_NEWLINES: true
- NAME: CHANGED_CMC_PROTO_FILES
- SH: "echo '${CHANGED_FILES_REL}' | grep '^enterprise/protocols/checkmk/cmc' || true"
- REPLACE_NEWLINES: true
-
- - NAME: CHANGED_JS_FILES
- SH: "echo '${CHANGED_FILES_REL}' | grep '^web/htdocs/js/\\|\\.js$\\|\\.ts$' || true"
- REPLACE_NEWLINES: true
-
- - NAME: CHANGED_NPM_NODE_SETUP
- SH: "echo '${CHANGED_FILES_REL}' | grep -e 'package\\(-lock\\)\\?.json$' -e '^scripts/npm-ci$' || true"
- REPLACE_NEWLINES: true
-
- - NAME: CHANGED_CSS_FILES
- SH: "echo '${CHANGED_FILES_REL}' | grep 'web/htdocs/themes/' | sed '/\\.scss$/!d' || true"
+ SH: "echo '${CHANGED_FILES_REL}' | grep '^non-free/cmc-protocols' || true"
REPLACE_NEWLINES: true
- NAME: CHANGED_SW_DOC_FILES
@@ -103,6 +91,10 @@ VARIABLES:
SH: "echo '${CHANGED_FILES_REL}' | grep '.werks/' || true"
REPLACE_NEWLINES: true
+ - NAME: CHANGED_WERK_CODE_FILES
+ SH: "echo '${CHANGED_FILES_REL}' | grep '^packages/cmk-werks\\|^cmk/utils/werks' || true"
+ REPLACE_NEWLINES: true
+
- NAME: CHANGED_CHECKMAN_FILES
SH: "echo '${CHANGED_FILES_REL}' | grep 'checkman/' || true"
REPLACE_NEWLINES: true
@@ -116,9 +108,6 @@ VARIABLES:
- NAME: CHANGED_RE2_FILES
SH: "echo '${CHANGED_FILES_REL}' | grep '^third_party/re2' || true"
- - NAME: CHANGED_IMAGE_TESTING
- SH: "echo '${CHANGED_FILES_REL}' | grep 'buildscripts/docker_image_aliases/IMAGE_TESTING/' || true"
-
########## packages ########################################################
- NAME: CHANGED_CHECK_CERT_FILES
@@ -127,8 +116,8 @@ VARIABLES:
- NAME: CHANGED_CHECK_HTTP_FILES
SH: "echo '${CHANGED_FILES_REL}' | grep '^packages/check-http/' || true"
- - NAME: CHANGED_CHECK_SQL_FILES
- SH: "echo '${CHANGED_FILES_REL}' | grep '^packages/check-sql/' || true"
+ - NAME: CHANGED_MK_SQL_FILES
+ SH: "echo '${CHANGED_FILES_REL}' | grep '^packages/mk-sql/' || true"
- NAME: CHANGED_CMC_FILES
SH: "echo '${CHANGED_FILES_REL}' | grep '^packages/cmc/' || true"
@@ -169,19 +158,25 @@ VARIABLES:
- NAME: CHANGED_UNIXCAT_FILES
SH: "echo '${CHANGED_FILES_REL}' | grep '^packages/unixcat/' || true"
+ - NAME: CHANGED_FRONTEND_VUE_FILES
+ SH: "echo '${CHANGED_FILES_REL}' | grep '^packages/cmk-frontend-vue/' || true"
+
+ - NAME: CHANGED_CMK_FRONTEND_FILES
+ SH: "echo '${CHANGED_FILES_REL}' | grep '^packages/cmk-frontend/' || true"
+
STAGES:
- NAME: 'Python Typing'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_PYTHON_FILES,CHANGED_PIPFILE_LOCK,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT,CHANGED_RUN_MYPY_SCRIPT
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_FIND_PYTHON_FILES_SCRIPT,CHANGED_PYTHON_FILES,CHANGED_PIPFILE_LOCK,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT,CHANGED_RUN_MYPY_SCRIPT
DIR: 'tests'
ENV_VARS:
- MYPY_ADDOPTS: "--no-color-output --junit-xml ${RESULTS}/mypy.xml"
+ MYPY_ADDOPTS: "--no-color-output --junit-xml ${RESULTS}/mypy.xml --no-incremental"
COMMAND: 'make --quiet test-mypy-docker &> ${RESULTS}/python-typing.txt'
TEXT_ON_SKIP: 'No Python files changed'
RESULT_CHECK_TYPE: "MYPY"
RESULT_CHECK_FILE_PATTERN: "results/python-typing.txt"
- NAME: 'Python Format'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_PYTHON_FILES,CHANGED_PIPFILE_LOCK,CHANGED_PYPROJECT_TOML_FILE,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT,CHANGED_RUN_BLACK_SCRIPT
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_FIND_PYTHON_FILES_SCRIPT,CHANGED_PYTHON_FILES,CHANGED_PIPFILE_LOCK,CHANGED_PYPROJECT_TOML_FILE,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT,CHANGED_RUN_BLACK_SCRIPT
DIR: 'tests'
ENV_VARS:
PYTHON_FILES: "${CHANGED_PYTHON_FILES}"
@@ -191,14 +186,24 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/python-format.txt"
- NAME: 'Python Cyclic Dependencies'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_TESTS_MAKEFILE,CHANGED_PIPFILE_LOCK,CHANGED_CMK_WERKS_FILES,CHANGED_PYTHON_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_TESTS_MAKEFILE,CHANGED_PIPFILE_LOCK,CHANGED_CMK_WERKS_FILES,CHANGED_FIND_PYTHON_FILES_SCRIPT,CHANGED_PYTHON_FILES
DIR: 'tests'
COMMAND: 'make test-cycles-docker &> ${RESULTS}/py-import-cycles.txt'
TEXT_ON_SKIP: 'No Python files changed'
RESULT_CHECK_FILE_PATTERN: "results/py-import-cycles.txt"
+ - NAME: 'omdlib Unit Tests'
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_OMDLIB_FILES,CHANGED_OMDLIB_TESTS
+ DIR: 'tests'
+ ENV_VARS:
+ PYTEST_ADDOPTS: "--junitxml=${RESULTS}/python3-omdlib-junit.xml --color=no"
+ COMMAND: "../scripts/run-in-docker.sh make --quiet test-unit-omdlib"
+ TEXT_ON_SKIP: 'No omdlib files changed'
+ RESULT_CHECK_TYPE: "JUNIT"
+ RESULT_CHECK_FILE_PATTERN: "results/python3-omdlib-junit.xml"
+
- NAME: 'Python Unit Tests'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_PYTHON_FILES,CHANGED_PIPFILE_LOCK,CHANGED_PYPROJECT_TOML_FILE,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT,CHANGED_CHECKMAN_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_FIND_PYTHON_FILES_SCRIPT,CHANGED_PYTHON_FILES,CHANGED_PIPFILE_LOCK,CHANGED_PYPROJECT_TOML_FILE,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT,CHANGED_CHECKMAN_FILES
DIR: 'tests'
ENV_VARS:
PYTEST_ADDOPTS: "--junitxml=${RESULTS}/python3-unit-junit.xml --color=no"
@@ -208,7 +213,7 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/python3-unit-junit.xml"
- NAME: 'Python License Headers Test'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_PYTHON_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_FIND_PYTHON_FILES_SCRIPT,CHANGED_PYTHON_FILES
DIR: 'tests'
ENV_VARS:
PYTEST_ADDOPTS: "--junitxml=${RESULTS}/python3-license-headers-junit.xml --color=no"
@@ -219,7 +224,7 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/python3-license-headers-junit.xml"
- NAME: 'Python Pipfile Test'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_PIPFILE,CHANGED_PIPFILE_LOCK,CHANGED_PYTHON_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_PIPFILE,CHANGED_PIPFILE_LOCK,CHANGED_FIND_PYTHON_FILES_SCRIPT,CHANGED_PYTHON_FILES
DIR: 'tests'
ENV_VARS:
PYTEST_ADDOPTS: "--junitxml=${RESULTS}/python3-pipfile-junit.xml --color=no"
@@ -229,18 +234,23 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/python3-pipfile-junit.xml"
- NAME: 'Python File Content Test'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_FILES_REL
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_FILES_REL
DIR: 'tests'
ENV_VARS:
PYTEST_ADDOPTS: "--junitxml=${RESULTS}/python3-file-content-junit.xml --color=no"
- CHANGED_FILES_REL: "${CHANGED_FILES_REL}"
- COMMAND: "make test-file-content-docker"
+ # ${CHANGED_FILES_REL} can be huge, so we use a file to transport the list
+ # to the pytest command. This will be tidied up with CMK-17048
+ COMMAND: |
+ CHANGED_FILES_REL_FILE=$(mktemp)
+ echo "${CHANGED_FILES_REL}" > $CHANGED_FILES_REL_FILE
+ trap "rm -f $CHANGED_FILES_REL_FILE" EXIT
+ make test-file-content-docker
TEXT_ON_SKIP: 'No files changed'
RESULT_CHECK_TYPE: "JUNIT"
RESULT_CHECK_FILE_PATTERN: "results/python3-file-content-junit.xml"
- NAME: 'Python Werks Test'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_WERK_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_WERK_FILES
DIR: 'tests'
ENV_VARS:
PYTEST_ADDOPTS: "--junitxml=${RESULTS}/python3-werks-junit.xml --color=no"
@@ -250,7 +260,7 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/python3-werks-junit.xml"
- NAME: 'Python Werks validate'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_WERK_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_WERK_FILES
DIR: ''
ENV_VARS:
CHANGED_WERK_FILES: "${CHANGED_WERK_FILES}"
@@ -258,8 +268,23 @@ STAGES:
TEXT_ON_SKIP: 'No Werk files changed'
RESULT_CHECK_FILE_PATTERN: "results/werk_validate.txt"
+ - NAME: 'Python Werks Commands'
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_WERK_CODE_FILES
+ DIR: ''
+ COMMAND: |
+ scripts/run-in-docker.sh scripts/run-pipenv run python3 -m cmk.utils.werks collect cmk . --substitute-branches $(git symbolic-ref --short HEAD):HEAD &> ${RESULTS}/werk_commands.txt
+ scripts/run-in-docker.sh scripts/run-pipenv run python3 -m cmk.utils.werks announce .werks $(make print-VERSION) --format md &>> ${RESULTS}/werk_commands.txt
+ scripts/run-in-docker.sh scripts/run-pipenv run python3 -m cmk.utils.werks announce .werks $(make print-VERSION) --format txt &>> ${RESULTS}/werk_commands.txt
+ scripts/run-in-docker.sh scripts/run-pipenv run python3 -m cmk.utils.werks precompile .werks precompiled.json &>> ${RESULTS}/werk_commands.txt
+ scripts/run-in-docker.sh scripts/run-pipenv run python3 -m cmk.utils.werks changelog CHANGELOG precompiled.json &>> ${RESULTS}/werk_commands.txt
+ # we don't have access to the fixup notes in ci, we might have to fetch them, or temporarily disable this check:
+ scripts/run-in-docker.sh scripts/run-pipenv run python3 -m cmk.utils.werks mail . HEAD werk_mail --assume-no-notes-but=$(git log --before="$(date --date="4 weeks ago" --iso=seconds)" --format="%H" --max-count=1) &>> ${RESULTS}/werk_commands.txt
+ scripts/run-in-docker.sh scripts/run-pipenv run ./werk list &>> ${RESULTS}/werk_commands.txt
+ TEXT_ON_SKIP: 'No Werk code files changed'
+ RESULT_CHECK_FILE_PATTERN: "results/werk_commands.txt"
+
- NAME: 'Python Pylint'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_PYLINT_RC_FILE,CHANGED_PYTHON_FILES,CHANGED_PIPFILE_LOCK,CHANGED_PYPROJECT_TOML_FILE,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_PYLINT_RC_FILE,CHANGED_FIND_PYTHON_FILES_SCRIPT,CHANGED_PYTHON_FILES,CHANGED_PIPFILE_LOCK,CHANGED_PYPROJECT_TOML_FILE,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT
DIR: 'tests'
ENV_VARS:
PYLINT_ARGS: "--jobs=10 --output-format=parseable"
@@ -269,23 +294,23 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/pylint.txt"
- NAME: 'Python Bandit'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_PYTHON_FILES,CHANGED_CMK_WERKS_FILES,CHANGED_PIPFILE_LOCK,CHANGED_PYPROJECT_TOML_FILE,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_FIND_PYTHON_FILES_SCRIPT,CHANGED_PYTHON_FILES,CHANGED_CMK_WERKS_FILES,CHANGED_PIPFILE_LOCK,CHANGED_PYPROJECT_TOML_FILE,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PIPENV_SCRIPT
DIR: 'tests'
ENV_VARS:
- BANDIT_OUTPUT_ARGS: "-f xml -o ${RESULTS}/bandit_results.xml"
+ BANDIT_OUTPUT_ARGS: "-f xml -o ${RESULTS}/bandit_results.xml"
COMMAND: "make test-bandit-docker"
TEXT_ON_SKIP: 'No Python files changed'
RESULT_CHECK_TYPE: "JUNIT"
RESULT_CHECK_FILE_PATTERN: "results/bandit_results.xml"
- NAME: 'Shell Format'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_SHELL_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_SHELL_FILES
DIR: 'tests'
COMMAND: "make --quiet test-format-shell &> ${RESULTS}/shell-format.txt"
RESULT_CHECK_FILE_PATTERN: "results/shell-format.txt"
- NAME: 'Shellcheck'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_SHELL_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_SHELL_FILES
DIR: 'tests'
COMMAND: "make test-shellcheck-docker &> ${RESULTS}/shellcheck.txt"
TEXT_ON_SKIP: 'No Shell files changed'
@@ -293,54 +318,15 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/shellcheck.txt"
- NAME: 'Shell Unit Tests'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_SHELL_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_SHELL_FILES
DIR: 'tests'
COMMAND: "make test-unit-shell-docker &> ${RESULTS}/shell-unit.txt"
TEXT_ON_SKIP: 'No Shell files changed'
RESULT_CHECK_TYPE: "SHELLUNIT"
RESULT_CHECK_FILE_PATTERN: "results/shell-unit.txt"
- - NAME: 'JavaScript Format'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_JS_FILES,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PRETTIER_SCRIPT,CHANGED_NPM_NODE_SETUP
- DIR: 'tests'
- COMMAND: "make --quiet test-format-js-docker &> ${RESULTS}/js-prettier.txt"
- TEXT_ON_SKIP: 'No JavaScript files changed'
- RESULT_CHECK_TYPE: "TSJSFORMAT"
- RESULT_CHECK_FILE_PATTERN: "results/js-prettier.txt"
-
- - NAME: 'JavaScript Build'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_JS_FILES,CHANGED_TESTS_MAKEFILE,CHANGED_WEBPACK_CONFIG,CHANGED_NPM_NODE_SETUP
- DIR: 'tests'
- COMMAND: 'make test-build-js-docker &> ${RESULTS}/js-build.txt'
- TEXT_ON_SKIP: 'No relevant JS/TS files changed'
- RESULT_CHECK_TYPE: "TSJSBUILD"
- RESULT_CHECK_FILE_PATTERN: "results/js-build.txt"
-
- - NAME: 'JavaScript Lint'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_JS_FILES,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_JS_LINT_SCRIPT,CHANGED_NPM_NODE_SETUP
- DIR: 'tests'
- COMMAND: 'make test-lint-js-docker &> ${RESULTS}/js-lint.txt'
- TEXT_ON_SKIP: 'No relevant JS/TS files changed'
- RESULT_CHECK_FILE_PATTERN: "results/js-lint.txt"
-
- - NAME: 'TypeScript Type Checking'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_JS_FILES,CHANGED_TESTS_MAKEFILE,CHANGED_TYPESCRIPT_CONFIG,CHANGED_RUN_TYPESCRIPT_TYPES_SCRIPT,CHANGED_NPM_NODE_SETUP
- DIR: 'tests'
- COMMAND: 'make test-typescript-types-docker &> ${RESULTS}/js-types.txt'
- TEXT_ON_SKIP: 'No relevant JS/TS files changed'
- RESULT_CHECK_TYPE: "TSJSTYPES"
- RESULT_CHECK_FILE_PATTERN: "results/js-types.txt"
-
- - NAME: 'CSS Format'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CSS_FILES,CHANGED_TESTS_MAKEFILE,CHANGED_RUN_PRETTIER_SCRIPT,CHANGED_NPM_NODE_SETUP
- DIR: 'tests'
- COMMAND: "make --quiet test-format-css-docker &> ${RESULTS}/css-prettier.txt"
- TEXT_ON_SKIP: 'No CSS files changed'
- RESULT_CHECK_TYPE: "CSSFORMAT"
- RESULT_CHECK_FILE_PATTERN: "results/css-prettier.txt"
-
- NAME: 'Bazel Format'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_BAZEL_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_BAZEL_FILES
DIR: 'tests'
COMMAND: "make --quiet test-format-bazel-docker &> ${RESULTS}/bazel-prettier.txt"
TEXT_ON_SKIP: 'No Bazel files changed'
@@ -348,22 +334,32 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/bazel-prettier.txt"
- NAME: 'Bazel Lint'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_BAZEL_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_BAZEL_FILES
DIR: 'tests'
COMMAND: "make --quiet test-lint-bazel-docker &> ${RESULTS}/bazel-lint.txt"
TEXT_ON_SKIP: 'No Bazel files changed'
RESULT_CHECK_TYPE: "BAZELLINT"
RESULT_CHECK_FILE_PATTERN: "results/bazel-lint.txt"
+ - NAME: 'Groovy Lint'
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_GROOVY_FILE
+ DIR: 'tests'
+ ENV_VARS:
+ GROOVYLINT_OUTPUT_ARGS: "-o ${RESULTS}/groovy-lint.xml"
+ COMMAND: "make --quiet test-lint-groovy-docker"
+ TEXT_ON_SKIP: 'No Groovy files changed'
+ RESULT_CHECK_TYPE: "GROOVY"
+ RESULT_CHECK_FILE_PATTERN: "results/groovy-lint.xml"
+
- NAME: 'Agent Plugin Unit Tests'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_AGENT_PLUGINS,CHANGED_AGENT_PLUGINS_TESTS,CHANGED_TESTS_MAKEFILE
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_AGENT_PLUGINS,CHANGED_AGENT_PLUGINS_TESTS,CHANGED_TESTS_MAKEFILE
DIR: 'tests'
COMMAND: "make test-agent-plugin-docker > ${RESULTS}/agent-plugin-unit-junit.txt"
TEXT_ON_SKIP: 'No agent files changed'
RESULT_CHECK_FILE_PATTERN: "results/agent-plugin-unit-junit.txt"
- NAME: 'Software Documentation Generation'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_SW_DOC_FILES,CHANGED_MAKEFILE
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_SW_DOC_FILES,CHANGED_MAKEFILE
COMMAND: "make sw-documentation-docker &> ${RESULTS}/sw-documentation.txt"
TEXT_ON_SKIP: 'No SW Documentation files changed'
RESULT_CHECK_FILE_PATTERN: "results/sw-documentation.txt"
@@ -371,35 +367,26 @@ STAGES:
########## packages ########################################################
- NAME: 'Package check-cert'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CHECK_CERT_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CHECK_CERT_FILES
DIR: 'packages/check-cert'
- COMMAND: "../../scripts/run-in-docker.sh ./run --setup-environment --all &> ${RESULTS}/check-cert.txt"
+ COMMAND: "../../scripts/run-in-docker.sh ./run --setup-environment --all --features=vendored &> ${RESULTS}/check-cert.txt"
TEXT_ON_SKIP: 'No check-cert files changed'
RESULT_CHECK_FILE_PATTERN: "results/check-cert.txt"
- NAME: 'Package check-http'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CHECK_HTTP_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CHECK_HTTP_FILES
DIR: 'packages/check-http'
- COMMAND: "../../scripts/run-in-docker.sh ./run --setup-environment --all &> ${RESULTS}/check-http.txt"
+ COMMAND: "../../scripts/run-in-docker.sh ./run --setup-environment --all --features=reqwest/native-tls-vendored &> ${RESULTS}/check-http.txt"
TEXT_ON_SKIP: 'No check-http files changed'
RESULT_CHECK_FILE_PATTERN: "results/check-http.txt"
- - NAME: 'Package check-sql'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CHECK_SQL_FILES
- SEC_VAR_LIST:
- - 'CI_TEST_SQL_DB_ENDPOINT'
- DIR: 'packages/check-sql'
- COMMAND: "../../scripts/run-in-docker.sh ./run --setup-environment --all &> ${RESULTS}/check-sql.txt"
- TEXT_ON_SKIP: 'No check-sql files changed'
- RESULT_CHECK_FILE_PATTERN: "results/check-sql.txt"
-
# NOTE: This has to come *before* the neb & cmc packages, as they depend on
# livestatus. Their run scripts rebuild livestatus, "eating away" compiler
# invocations, leading to IWYU/clang-tidy/... not being called on enough
# files. The ordering is a bit ad hoc, perhaps we should keep the list of
# steps here in dependency order.
- NAME: 'Package livestatus'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_LIVESTATUS_FILES,CHANGED_ASIO_FILES,CHANGED_GOOGLETEST_FILES,CHANGED_RE2_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_LIVESTATUS_FILES,CHANGED_ASIO_FILES,CHANGED_GOOGLETEST_FILES,CHANGED_RE2_FILES
DIR: 'packages/livestatus'
COMMAND: "GCC_TOOLCHAIN=/opt/gcc-13.2.0 ../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/livestatus.txt"
TEXT_ON_SKIP: 'No Livestatus files changed'
@@ -407,7 +394,7 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/livestatus.txt"
- NAME: 'Package cmc'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMC_FILES,CHANGED_CMC_PROTO_FILES,CHANGED_LIVESTATUS_FILES,CHANGED_ASIO_FILES,CHANGED_GOOGLETEST_FILES,CHANGED_RE2_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMC_FILES,CHANGED_CMC_PROTO_FILES,CHANGED_LIVESTATUS_FILES,CHANGED_ASIO_FILES,CHANGED_GOOGLETEST_FILES,CHANGED_RE2_FILES
DIR: 'packages/cmc'
COMMAND: "GCC_TOOLCHAIN=/opt/gcc-13.2.0 ../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmc.txt"
TEXT_ON_SKIP: 'No cmc files changed'
@@ -415,70 +402,86 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/cmc.txt"
- NAME: 'Package cmk-agent-based'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMK_AGENT_BASED_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_AGENT_BASED_FILES
DIR: 'packages/cmk-agent-based'
COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-agent-based.txt"
TEXT_ON_SKIP: 'No cmk-agent-based files changed'
RESULT_CHECK_FILE_PATTERN: "results/cmk-agent-based.txt"
- NAME: 'Package cmk-agent-ctl'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMK_AGENT_CTL_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_AGENT_CTL_FILES
DIR: 'packages/cmk-agent-ctl'
COMMAND: "../../scripts/run-in-docker.sh ./run --setup-environment --all &> ${RESULTS}/cmk-agent-ctl.txt"
TEXT_ON_SKIP: 'No cmk-agent-ctl files changed'
RESULT_CHECK_FILE_PATTERN: "results/cmk-agent-ctl.txt"
- NAME: 'Package cmk-agent-receiver'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMK_AGENT_RECEIVER_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_AGENT_RECEIVER_FILES
DIR: 'packages/cmk-agent-receiver'
COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-agent-receiver.txt"
TEXT_ON_SKIP: 'No cmk-agent-receiver files changed'
RESULT_CHECK_FILE_PATTERN: "results/cmk-agent-receiver.txt"
- NAME: 'Package cmk-graphing'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMK_GRAPHING_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_GRAPHING_FILES
DIR: 'packages/cmk-graphing'
COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-graphing.txt"
TEXT_ON_SKIP: 'No cmk-graphing files changed'
RESULT_CHECK_FILE_PATTERN: "results/cmk-graphing.txt"
- - NAME: 'Package cmk-mkp-tool'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMK_MKP_TOOL_FILES
- DIR: 'packages/cmk-mkp-tool'
- COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-mkp-tool.txt"
- TEXT_ON_SKIP: 'No cmk-mkp-tool files changed'
- RESULT_CHECK_FILE_PATTERN: "results/cmk-mkp-tool.txt"
-
- NAME: 'Package cmk-livestatus-client'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMK_LIVESTATUS_CLIENT_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_LIVESTATUS_CLIENT_FILES
DIR: 'packages/cmk-livestatus-client'
COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-livestatus-client.txt"
TEXT_ON_SKIP: 'No cmk-livestatus-client files changed'
RESULT_CHECK_FILE_PATTERN: "results/cmk-livestatus-client.txt"
+ - NAME: 'Package cmk-mkp-tool'
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_MKP_TOOL_FILES
+ DIR: 'packages/cmk-mkp-tool'
+ COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-mkp-tool.txt"
+ TEXT_ON_SKIP: 'No cmk-mkp-tool files changed'
+ RESULT_CHECK_FILE_PATTERN: "results/cmk-mkp-tool.txt"
+
- NAME: 'Package cmk-rulesets'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_RULESETS_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_RULESETS_FILES
DIR: 'packages/cmk-rulesets'
COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-rulesets.txt"
TEXT_ON_SKIP: 'No cmk-rulesets files changed'
RESULT_CHECK_FILE_PATTERN: "results/cmk-rulesets.txt"
- NAME: 'Package cmk-server-side-calls'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMK_SERVER_SIDE_CALLS_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_SERVER_SIDE_CALLS_FILES
DIR: 'packages/cmk-server-side-calls'
COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-server-side-calls.txt"
TEXT_ON_SKIP: 'No cmk-server-side-calls files changed'
RESULT_CHECK_FILE_PATTERN: "results/cmk-server-side-calls.txt"
- NAME: 'Package cmk-werks'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_CMK_WERKS_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_WERKS_FILES
DIR: 'packages/cmk-werks'
COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-werks.txt"
TEXT_ON_SKIP: 'No cmk-werks package files changed'
RESULT_CHECK_FILE_PATTERN: "results/cmk-werks.txt"
+ - NAME: 'Package cmk-frontend-vue'
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_FRONTEND_VUE_FILES
+ DIR: 'packages/cmk-frontend-vue'
+ COMMAND: "../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/cmk-frontend-vue.txt"
+ TEXT_ON_SKIP: 'No cmk-frontend-vue files changed'
+ RESULT_CHECK_FILE_PATTERN: "results/cmk-frontend-vue.txt"
+
+ - NAME: 'Package mk-sql'
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_MK_SQL_FILES
+ SEC_VAR_LIST:
+ - 'CI_TEST_SQL_DB_ENDPOINT'
+ DIR: 'packages/mk-sql'
+ COMMAND: "../../scripts/run-in-docker.sh ./run --setup-environment --all &> ${RESULTS}/mk-sql.txt"
+ TEXT_ON_SKIP: 'No mk-sql files changed'
+ RESULT_CHECK_FILE_PATTERN: "results/mk-sql.txt"
+
- NAME: 'Package neb'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_NEB_FILES,CHANGED_LIVESTATUS_FILES,CHANGED_ASIO_FILES,CHANGED_GOOGLETEST_FILES,CHANGED_RE2_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_NEB_FILES,CHANGED_LIVESTATUS_FILES,CHANGED_ASIO_FILES,CHANGED_GOOGLETEST_FILES,CHANGED_RE2_FILES
DIR: 'packages/neb'
COMMAND: "GCC_TOOLCHAIN=/opt/gcc-13.2.0 ../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/neb.txt"
TEXT_ON_SKIP: 'No neb files changed'
@@ -486,9 +489,16 @@ STAGES:
RESULT_CHECK_FILE_PATTERN: "results/neb.txt"
- NAME: 'Package unixcat'
- ONLY_WHEN_NOT_EMPTY: CHANGED_IMAGE_TESTING,CHANGED_UNIXCAT_FILES
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_UNIXCAT_FILES
DIR: 'packages/unixcat'
COMMAND: "GCC_TOOLCHAIN=/opt/gcc-13.2.0 ../../scripts/run-in-docker.sh ./run --all &> ${RESULTS}/unixcat.txt"
TEXT_ON_SKIP: 'No unixcat files changed'
RESULT_CHECK_TYPE: "GCC"
RESULT_CHECK_FILE_PATTERN: "results/unixcat.txt"
+
+ - NAME: 'Package cmk-frontend'
+ ONLY_WHEN_NOT_EMPTY: CHANGED_REFERENCE_IMAGE,CHANGED_CMK_FRONTEND_FILES
+ DIR: 'packages/cmk-frontend'
+ COMMAND: "../../scripts/run-in-docker.sh ./run --setup-environment --all &> ${RESULTS}/cmk-frontend.txt"
+ TEXT_ON_SKIP: 'No cmk-frontend files changed'
+ RESULT_CHECK_FILE_PATTERN: "results/cmk-frontend.txt"
diff --git a/buildscripts/scripts/test-agent-controller.groovy b/buildscripts/scripts/test-agent-controller.groovy
deleted file mode 100644
index 054b05b55ea..00000000000
--- a/buildscripts/scripts/test-agent-controller.groovy
+++ /dev/null
@@ -1,14 +0,0 @@
-#!groovy
-
-/// file: test-agent-controller.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Compile & Test Agent Controller') {
- sh("packages/cmk-agent-ctl/run --setup-environment --clean --all");
- }
- }
- }
-}
-return this;
diff --git a/buildscripts/scripts/test-agent-plugin-unit.groovy b/buildscripts/scripts/test-agent-plugin-unit.groovy
index 528f3902706..2cefc5c4f62 100644
--- a/buildscripts/scripts/test-agent-plugin-unit.groovy
+++ b/buildscripts/scripts/test-agent-plugin-unit.groovy
@@ -6,34 +6,36 @@ def get_agent_plugin_python_versions(String git_dir=".") {
dir(git_dir) {
def versions = (cmd_output("make --no-print-directory --file=defines.make print-AGENT_PLUGIN_PYTHON_VERSIONS")
?: raise("Could not read AGENT_PLUGIN_PYTHON_VERSIONS from defines.make"));
- return versions.split(" ")
+ return versions.split(" ");
}
}
-
def main() {
def python_versions = get_agent_plugin_python_versions(checkout_dir);
- def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def docker_args = "-v /var/run/docker.sock:/var/run/docker.sock --group-add=${get_docker_group_id()}";
-
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside(docker_args) {
- dir("${checkout_dir}") {
- // pre-create virtual environments before parallel execution
- stage("prepare virtual environment") {
- sh("make .venv");
- }
- def test_builds = python_versions.collectEntries { python_version ->
- [(python_version) : {
- stage("Test for python${python_version}") {
+ inside_container(
+ set_docker_group_id: true,
+ priviliged: true,
+ ) {
+ dir("${checkout_dir}") {
+ // pre-create virtual environments before parallel execution
+ stage("prepare virtual environment") {
+ sh("make .venv");
+ }
+ def test_builds = python_versions.collectEntries { python_version ->
+ [(python_version) : {
+ stage("Test for python${python_version}") {
+ // Here we need the docker registry as we are using python:VERSION docker images
+ // which are stored on nexus.
+ docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
sh("make -C tests test-agent-plugin-unit-py${python_version}-docker");
}
- }]
- }
- parallel test_builds;
+ }
+ }]
}
+ parallel test_builds;
}
}
}
+
return this;
diff --git a/buildscripts/scripts/test-bazel-format.groovy b/buildscripts/scripts/test-bazel-format.groovy
index 71baa3f770c..4ad680c03c7 100644
--- a/buildscripts/scripts/test-bazel-format.groovy
+++ b/buildscripts/scripts/test-bazel-format.groovy
@@ -9,7 +9,7 @@ def main() {
test_jenkins_helper.execute_test([
name: "test-bazel-format",
cmd: "make -C tests test-format-bazel-docker",
- output_file: "bazel-prettier.txt"
+ output_file: "bazel-prettier.txt",
]);
test_jenkins_helper.analyse_issues("BAZELFORMAT", "bazel-prettier.txt");
diff --git a/buildscripts/scripts/test-bazel-lint.groovy b/buildscripts/scripts/test-bazel-lint.groovy
index fa1bed6dfba..46631ea2bd1 100644
--- a/buildscripts/scripts/test-bazel-lint.groovy
+++ b/buildscripts/scripts/test-bazel-lint.groovy
@@ -9,7 +9,7 @@ def main() {
test_jenkins_helper.execute_test([
name: "test-bazel-lint",
cmd: "make -C tests test-lint-bazel-docker",
- output_file: "bazel-lint.txt"
+ output_file: "bazel-lint.txt",
]);
test_jenkins_helper.analyse_issues("BAZELLINT", "bazel-lint.txt");
diff --git a/buildscripts/scripts/test-check-sql.groovy b/buildscripts/scripts/test-check-sql.groovy
deleted file mode 100644
index 4e9987f3e0f..00000000000
--- a/buildscripts/scripts/test-check-sql.groovy
+++ /dev/null
@@ -1,19 +0,0 @@
-#!groovy
-
-/// file: test-check-sql.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Compile & Test Check SQL') {
- withCredentials([string(
- credentialsId: "CI_TEST_SQL_DB_ENDPOINT",
- variable:"CI_TEST_SQL_DB_ENDPOINT"
- )]) {
- sh("packages/check-sql/run --setup-environment --clean --all");
- }
- }
- }
- }
-}
-return this;
diff --git a/buildscripts/scripts/test-cmc.groovy b/buildscripts/scripts/test-cmc.groovy
deleted file mode 100644
index cd2ce157f15..00000000000
--- a/buildscripts/scripts/test-cmc.groovy
+++ /dev/null
@@ -1,26 +0,0 @@
-#!groovy
-
-/// file: test-cmc.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Compile & Test CMC') {
- sh("GCC_TOOLCHAIN=/opt/gcc-13.2.0 packages/cmc/run --clean --all");
- }
- }
- stage("Analyse Issues") {
- publishIssues(
- issues: [scanForIssues( tool: gcc())],
- trendChartType: 'TOOLS_ONLY',
- qualityGates: [[
- threshold: 1,
- type: 'TOTAL',
- unstable: false,
- ]],
- );
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-composition.groovy b/buildscripts/scripts/test-composition.groovy
index 1480b024206..b2b2fcb969a 100644
--- a/buildscripts/scripts/test-composition.groovy
+++ b/buildscripts/scripts/test-composition.groovy
@@ -13,6 +13,7 @@ def main() {
"EDITION",
"VERSION",
"OVERRIDE_DISTROS",
+ "USE_CASE"
]);
check_environment_variables([
@@ -22,7 +23,7 @@ def main() {
def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
- def distros = versioning.configured_or_overridden_distros(EDITION, OVERRIDE_DISTROS, "daily_tests");
+ def distros = versioning.get_distros(edition: EDITION, use_case: "daily_tests", override: OVERRIDE_DISTROS);
def branch_name = versioning.safe_branch_name(scm);
def branch_version = versioning.get_branch_version(checkout_dir);
@@ -44,19 +45,21 @@ def main() {
// TODO: don't run make test-composition-docker but use docker.inside() instead
stage('test cmk-docker integration') {
- testing_helper.run_make_targets(
- DOCKER_GROUP_ID: get_docker_group_id(),
- DISTRO_LIST: distros,
- EDITION: EDITION,
- VERSION: VERSION,
- DOCKER_TAG: versioning.select_docker_tag(
- branch_name,
- DOCKER_TAG,
- DOCKER_TAG), // FIXME was DOCKER_TAG_DEFAULT before
- MAKE_TARGET: "test-composition-docker",
- BRANCH: branch_name, // FIXME was BRANCH before
- cmk_version: versioning.get_cmk_version(branch_name, branch_version, VERSION),
- )
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
+ testing_helper.run_make_targets(
+ DOCKER_GROUP_ID: get_docker_group_id(),
+ DISTRO_LIST: distros,
+ EDITION: EDITION,
+ VERSION: VERSION,
+ DOCKER_TAG: versioning.select_docker_tag(
+ branch_name,
+ DOCKER_TAG,
+ DOCKER_TAG), // FIXME was DOCKER_TAG_DEFAULT before
+ MAKE_TARGET: "test-composition-docker",
+ BRANCH: branch_name, // FIXME was BRANCH before
+ cmk_version: versioning.get_cmk_version(branch_name, branch_version, VERSION),
+ );
+ }
}
}
diff --git a/buildscripts/scripts/test-css-format.groovy b/buildscripts/scripts/test-css-format.groovy
deleted file mode 100644
index aa7435ca9ee..00000000000
--- a/buildscripts/scripts/test-css-format.groovy
+++ /dev/null
@@ -1,19 +0,0 @@
-#!groovy
-
-/// file: test-css-format.groovy
-
-def main() {
- def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
-
- dir("${checkout_dir}") {
- test_jenkins_helper.execute_test([
- name: "test-javascript-lint",
- cmd: "make -C tests test-format-css-docker",
- output_file: "css-prettier.txt"
- ]);
-
- test_jenkins_helper.analyse_issues("CSSFORMAT", "css-prettier.txt");
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-extension-compatibility.groovy b/buildscripts/scripts/test-extension-compatibility.groovy
index b4078cd10d0..dd82fb430df 100644
--- a/buildscripts/scripts/test-extension-compatibility.groovy
+++ b/buildscripts/scripts/test-extension-compatibility.groovy
@@ -3,51 +3,130 @@
/// file: test-extension-compatibility.groovy
def main() {
- def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
- def branch_name = versioning.safe_branch_name(scm);
- def branch_version = versioning.get_branch_version(checkout_dir);
+ check_job_parameters([
+ ["EDITION", true], // the testees package long edition string (e.g. 'enterprise')
+ ["DISTRO", true], // the testees package distro string (e.g. 'ubuntu-22.04')
+ // "DOCKER_TAG_BUILD", // test base image tag (todo)
+ // "DISABLE_CACHE", // forwarded to package build job (todo)
+ ]);
check_environment_variables([
+ "DOCKER_REGISTRY",
"DOCKER_TAG",
]);
- stage("Check for extension actuality") {
- dir("${checkout_dir}") {
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- sh("""
- scripts/run-pipenv run \
- tests/extension_compatibility/output_popular_extensions.py > /tmp/extension_compatibility.txt
- diff -u --color \
- tests/extension_compatibility/current_extensions_under_test.txt \
- /tmp/extension_compatibility.txt \
- || { cat < tests/extension_compatibility/current_extensions_under_test.txt
+ def make_target = "test-extension-compatibility-docker";
+ currentBuild.description += (
+ """
+ |Run integration tests for packages
+ |safe_branch_name: ${safe_branch_name}
+ |branch_version: ${branch_version}
+ |cmk_version: ${cmk_version}
+ |docker_tag: ${docker_tag}
+ |edition: ${edition}
+ |distro: ${distro}
+ |make_target: ${make_target}
+ """.stripMargin());
-HERE
-}
- """);
+ print(
+ """
+ |===== CONFIGURATION ===============================
+ |safe_branch_name:...... │${safe_branch_name}│
+ |branch_version:........ │${branch_version}│
+ |cmk_version:........... │${cmk_version}
+ |docker_tag:............ │${docker_tag}│
+ |edition:............... │${edition}│
+ |distro:................ │${distro}│
+ |make_target:........... │${make_target}│
+ |===================================================
+ """.stripMargin());
+
+ // todo: add upstream project to description
+ // todo: add error to description
+ // todo: build progress mins?
+
+ stage("Prepare workspace") {
+ inside_container(
+ args: [
+ "--env HOME=/home/jenkins",
+ ],
+ set_docker_group_id: true,
+ ulimit_nofile: 1024,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+ dir("${checkout_dir}") {
+ // Cleanup test results directory before starting the test to prevent previous
+ // runs somehow affecting the current run.
+ sh("rm -rf ${WORKSPACE}/test-results");
+
+ // Initialize our virtual environment before parallelization
+ sh("make .venv");
+
+ stage("Fetch Checkmk package") {
+ upstream_build(
+ relative_job_name: "builders/build-cmk-distro-package",
+ build_params: [
+ /// currently CUSTOM_GIT_REF must match, but in the future
+ /// we should define dependency paths for build-cmk-distro-package
+ CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
+ EDITION: edition,
+ DISTRO: distro,
+ ],
+ dest: "package_download",
+ );
+ }
+ try {
+ stage("Run `make ${make_target}`") {
+ dir("${checkout_dir}/tests") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
+ sh("""
+ RESULT_PATH='${WORKSPACE}/test-results/${distro}' \
+ EDITION='${edition}' \
+ DOCKER_TAG='${docker_tag}' \
+ VERSION="daily" \
+ DISTRO='${distro}' \
+ make ${make_target}
+ """);
+ }
+ }
+ }
+ } finally {
+ stage("Archive / process test reports") {
+ dir("${WORKSPACE}") {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("test-results/**");
+ }
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/junit.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
+ }
+ }
}
}
}
}
-
- testing_helper.run_make_targets(
- DOCKER_GROUP_ID: get_docker_group_id(),
- DISTRO_LIST: ["ubuntu-20.04"],
- EDITION: "enterprise",
- VERSION: "git",
- DOCKER_TAG: versioning.select_docker_tag(
- branch_name,
- "",
- ""), // FIXME was DOCKER_TAG_DEFAULT before
- MAKE_TARGET: "test-extension-compatibility-docker",
- BRANCH: branch_name,
- cmk_version: versioning.get_cmk_version(branch_name, branch_version, "daily"),
- );
}
+
return this;
diff --git a/buildscripts/scripts/test-gerrit.groovy b/buildscripts/scripts/test-gerrit.groovy
index 4c3edc0d5b9..2d4dac4639a 100644
--- a/buildscripts/scripts/test-gerrit.groovy
+++ b/buildscripts/scripts/test-gerrit.groovy
@@ -7,7 +7,6 @@ def main() {
// no `def` - must be global
test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
def result_dir = "${checkout_dir}/results";
- def issues = [];
def time_job_started = new Date();
def time_stage_started = time_job_started;
@@ -38,8 +37,11 @@ def main() {
stage("Prepare workspace") {
dir("${checkout_dir}") {
- sh("scripts/run-in-docker.sh buildscripts/scripts/ensure-workspace-integrity")
- sh("rm -rf ${result_dir}; mkdir ${result_dir}")
+
+ inside_container() {
+ sh("buildscripts/scripts/ensure-workspace-integrity");
+ }
+ sh("rm -rf ${result_dir}; mkdir ${result_dir}");
/// Reason for the following try/catch block:
/// Jenkins will abort jobs (e.g. in case of a new patch set) with SIGKILL (at least this is what we think)
@@ -114,7 +116,7 @@ def main() {
pattern: "results/*junit.xml",
skipNoTestFiles: true,
stopProcessingIfError: true,
- )])
+ )]);
show_duration("archiveArtifacts") {
archiveArtifacts(allowEmptyArchive: true, artifacts: 'results/*');
@@ -124,4 +126,5 @@ def main() {
time_stage_started = test_gerrit_helper.log_stage_duration(time_stage_started);
}
}
+
return this;
diff --git a/buildscripts/scripts/test-github-actions.groovy b/buildscripts/scripts/test-github-actions.groovy
index 7b139fa28b4..42620e8d77a 100644
--- a/buildscripts/scripts/test-github-actions.groovy
+++ b/buildscripts/scripts/test-github-actions.groovy
@@ -4,23 +4,24 @@
def main() {
def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def docker_args = "--ulimit nofile=1024:1024 --init";
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside(docker_args) {
- dir("${checkout_dir}") {
- stage('Prepare checkout folder') {
- versioning.delete_non_cre_files();
- }
- targets = cmd_output(
- "grep target: .github/workflows/pr.yaml | cut -f2 -d':'"
- ).split("\n").collect({target -> target.trim()})
- targets.each({target ->
- stage(target) {
- sh("make -C tests ${target}");
- }
- })
+ inside_container(
+ ulimit_nofile: 1024,
+ init: true,
+ ) {
+ dir("${checkout_dir}") {
+ stage('Prepare checkout folder') {
+ versioning.delete_non_cre_files();
}
+ targets = cmd_output(
+ "grep target: .github/workflows/pr.yaml | cut -f2 -d':'"
+ ).split("\n").collect({target -> target.trim()})
+ targets.each({target ->
+ stage(target) {
+ sh("make -C tests ${target}");
+ }
+ })
}
}
}
+
return this;
diff --git a/buildscripts/scripts/test-groovy-lint.groovy b/buildscripts/scripts/test-groovy-lint.groovy
new file mode 100644
index 00000000000..6fd7aced54d
--- /dev/null
+++ b/buildscripts/scripts/test-groovy-lint.groovy
@@ -0,0 +1,18 @@
+#!groovy
+
+/// file: test-groovy-lint.groovy
+
+def main() {
+ def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
+
+ dir("${checkout_dir}") {
+ test_jenkins_helper.execute_test([
+ name: "test-groovy-lint",
+ cmd: "GROOVYLINT_OUTPUT_ARGS='-o groovy-lint.txt' make -C tests test-lint-groovy-docker",
+ ]);
+
+ test_jenkins_helper.analyse_issues("GROOVY", "groovy-lint.txt");
+ }
+}
+
+return this;
diff --git a/buildscripts/scripts/test-gui-crawl-f12less.groovy b/buildscripts/scripts/test-gui-crawl-f12less.groovy
index 2439e0f7240..b8ad3429b82 100644
--- a/buildscripts/scripts/test-gui-crawl-f12less.groovy
+++ b/buildscripts/scripts/test-gui-crawl-f12less.groovy
@@ -23,9 +23,9 @@ def main() {
def branch_version = versioning.get_branch_version(checkout_dir);
def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, "daily");
def docker_tag = versioning.select_docker_tag(
- safe_branch_name, // 'branch'
- "", // 'build tag'
- "", // 'folder tag'
+ safe_branch_name, // 'branch'
+ "", // 'build tag'
+ "", // 'folder tag'
)
def distro = params.DISTRO;
def edition = params.EDITION;
@@ -62,40 +62,41 @@ def main() {
// todo: build progress mins?
stage("Prepare workspace") {
- docker.withRegistry(DOCKER_REGISTRY, "nexus") {
- docker_image_from_alias("IMAGE_TESTING").inside(
- "--group-add=${get_docker_group_id()} \
- --ulimit nofile=1024:1024 \
- --env HOME=/home/jenkins \
- ${mount_reference_repo_dir} \
- -v /home/jenkins/.cmk-credentials:/home/jenkins/.cmk-credentials:ro \
- -v /var/run/docker.sock:/var/run/docker.sock") {
-
- dir("${checkout_dir}") {
-
- // Cleanup test results directory before starting the test to prevent previous
- // runs somehow affecting the current run.
- sh("rm -rf ${WORKSPACE}/test-results");
-
- // Initialize our virtual environment before parallelization
- sh("make .venv");
-
- stage("Fetch Checkmk package") {
- fetch_job_artifacts(
- relative_job_name: "builders/build-cmk-distro-package",
- params: [
- /// currently CUSTOM_GIT_REF must match, but in the future
- /// we should define dependency paths for build-cmk-distro-package
- CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
- EDITION: edition,
- DISTRO: distro,
- ],
- dest: "package_download",
- );
- }
- try {
- stage("Run `make ${make_target}`") {
- dir("${checkout_dir}/tests") {
+ inside_container(
+ args: [
+ "--env HOME=/home/jenkins",
+ ],
+ set_docker_group_id: true,
+ ulimit_nofile: 1024,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+ dir("${checkout_dir}") {
+
+ // Cleanup test results directory before starting the test to prevent previous
+ // runs somehow affecting the current run.
+ sh("rm -rf ${WORKSPACE}/test-results");
+
+ // Initialize our virtual environment before parallelization
+ sh("make .venv");
+
+ stage("Fetch Checkmk package") {
+ upstream_build(
+ relative_job_name: "builders/build-cmk-distro-package",
+ build_params: [
+ /// currently CUSTOM_GIT_REF must match, but in the future
+ /// we should define dependency paths for build-cmk-distro-package
+ CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
+ EDITION: edition,
+ DISTRO: distro,
+ ],
+ dest: "package_download",
+ );
+ }
+ try {
+ stage("Run `make ${make_target}`") {
+ dir("${checkout_dir}/tests") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
sh("""
RESULT_PATH='${WORKSPACE}/test-results/${distro}' \
EDITION='${edition}' \
@@ -106,34 +107,34 @@ def main() {
""");
}
}
- } finally {
- stage("Archive / process test reports") {
- dir("${WORKSPACE}") {
- show_duration("archiveArtifacts") {
- archiveArtifacts("test-results/**");
- }
- xunit([Custom(
- customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/junit.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )]);
+ }
+ } finally {
+ stage("Archive / process test reports") {
+ dir("${WORKSPACE}") {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("test-results/**");
}
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/junit.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
}
- stage('archive crawler report') {
- dir("${WORKSPACE}") {
- xunit([
- JUnit(
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/crawl.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )
- ])
- }
+ }
+ stage('archive crawler report') {
+ dir("${WORKSPACE}") {
+ xunit([
+ JUnit(
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/crawl.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )
+ ]);
}
}
}
diff --git a/buildscripts/scripts/test-gui-crawl.groovy b/buildscripts/scripts/test-gui-crawl.groovy
deleted file mode 100644
index 80eedc5ea0e..00000000000
--- a/buildscripts/scripts/test-gui-crawl.groovy
+++ /dev/null
@@ -1,47 +0,0 @@
-#!groovy
-
-/// file: test-gui-crawl.groovy
-
-def main() {
- def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
- def branch_name = versioning.safe_branch_name(scm);
- def branch_version = versioning.get_branch_version(checkout_dir);
- def cmk_version = versioning.get_cmk_version(branch_name, branch_version, "daily");
- def docker_group_id = get_docker_group_id();
-
- check_environment_variables([
- "DOCKER_TAG",
- ]);
-
- try {
- testing_helper.run_make_targets(
- DOCKER_GROUP_ID: docker_group_id,
- DISTRO_LIST: ["ubuntu-20.04"],
- EDITION: "enterprise",
- VERSION: "git",
- DOCKER_TAG: versioning.select_docker_tag(
- branch_name,
- "",
- ""), // FIXME was DOCKER_TAG_DEFAULT before
- MAKE_TARGET: "test-gui-crawl-docker",
- BRANCH: branch_name,
- cmk_version: cmk_version,
- )
- } finally {
- stage('archive crawler report') {
- dir("${WORKSPACE}") {
- xunit([
- JUnit(
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/crawl.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )
- ])
- }
- }
- }
-}
-return this;
diff --git a/buildscripts/scripts/test-gui-e2e-f12less.groovy b/buildscripts/scripts/test-gui-e2e-f12less.groovy
index 6038913e93e..e3dea5a3c06 100644
--- a/buildscripts/scripts/test-gui-e2e-f12less.groovy
+++ b/buildscripts/scripts/test-gui-e2e-f12less.groovy
@@ -23,9 +23,9 @@ def main() {
def branch_version = versioning.get_branch_version(checkout_dir);
def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, "daily");
def docker_tag = versioning.select_docker_tag(
- safe_branch_name, // 'branch'
- "", // 'build tag'
- "", // 'folder tag'
+ safe_branch_name, // 'branch'
+ "", // 'build tag'
+ "", // 'folder tag'
)
def distro = params.DISTRO;
def edition = params.EDITION;
@@ -62,40 +62,40 @@ def main() {
// todo: build progress mins?
stage("Prepare workspace") {
- docker.withRegistry(DOCKER_REGISTRY, "nexus") {
- docker_image_from_alias("IMAGE_TESTING").inside(
- "--group-add=${get_docker_group_id()} \
- --ulimit nofile=1024:1024 \
- --env HOME=/home/jenkins \
- ${mount_reference_repo_dir} \
- -v /home/jenkins/.cmk-credentials:/home/jenkins/.cmk-credentials:ro \
- -v /var/run/docker.sock:/var/run/docker.sock") {
-
- dir("${checkout_dir}") {
-
- // Cleanup test results directory before starting the test to prevent previous
- // runs somehow affecting the current run.
- sh("rm -rf ${WORKSPACE}/test-results");
-
- // Initialize our virtual environment before parallelization
- sh("make .venv");
-
- stage("Fetch Checkmk package") {
- fetch_job_artifacts(
- relative_job_name: "builders/build-cmk-distro-package",
- params: [
- /// currently CUSTOM_GIT_REF must match, but in the future
- /// we should define dependency paths for build-cmk-distro-package
- CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
- EDITION: edition,
- DISTRO: distro,
- ],
- dest: "package_download",
- );
- }
- try {
- stage("Run `make ${make_target}`") {
- dir("${checkout_dir}/tests") {
+ inside_container(
+ args: [
+ "--env HOME=/home/jenkins",
+ ],
+ set_docker_group_id: true,
+ ulimit_nofile: 1024,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+ dir("${checkout_dir}") {
+ // Cleanup test results directory before starting the test to prevent previous
+ // runs somehow affecting the current run.
+ sh("rm -rf ${WORKSPACE}/test-results");
+
+ // Initialize our virtual environment before parallelization
+ sh("make .venv");
+
+ stage("Fetch Checkmk package") {
+ upstream_build(
+ relative_job_name: "builders/build-cmk-distro-package",
+ build_params: [
+ /// currently CUSTOM_GIT_REF must match, but in the future
+ /// we should define dependency paths for build-cmk-distro-package
+ CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
+ EDITION: edition,
+ DISTRO: distro,
+ ],
+ dest: "package_download",
+ );
+ }
+ try {
+ stage("Run `make ${make_target}`") {
+ dir("${checkout_dir}/tests") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
sh("""
RESULT_PATH='${WORKSPACE}/test-results/${distro}' \
EDITION='${edition}' \
@@ -107,21 +107,21 @@ def main() {
}
}
}
- finally {
- stage("Archive / process test reports") {
- dir("${WORKSPACE}") {
- show_duration("archiveArtifacts") {
- archiveArtifacts("test-results/**");
- }
- xunit([Custom(
- customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/junit.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )]);
+ }
+ finally {
+ stage("Archive / process test reports") {
+ dir("${WORKSPACE}") {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("test-results/**");
}
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/junit.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
}
}
}
diff --git a/buildscripts/scripts/test-gui-e2e.groovy b/buildscripts/scripts/test-gui-e2e.groovy
deleted file mode 100644
index 6b4adade06b..00000000000
--- a/buildscripts/scripts/test-gui-e2e.groovy
+++ /dev/null
@@ -1,29 +0,0 @@
-#!groovy
-
-/// file: test-gui-e2e.groovy
-
-def main() {
- def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
- def branch_name = versioning.safe_branch_name(scm);
- def branch_version = versioning.get_branch_version(checkout_dir);
-
- check_environment_variables([
- "DOCKER_TAG",
- ]);
-
- testing_helper.run_make_targets(
- DOCKER_GROUP_ID: get_docker_group_id(),
- DISTRO_LIST: ["ubuntu-20.04"],
- EDITION: "enterprise",
- VERSION: "git",
- DOCKER_TAG: versioning.select_docker_tag(
- branch_name,
- "",
- ""), // FIXME was DOCKER_TAG_DEFAULT before
- MAKE_TARGET: "test-gui-e2e-docker",
- BRANCH: branch_name,
- cmk_version: versioning.get_cmk_version(branch_name, branch_version, "daily"),
- );
-}
-return this;
diff --git a/buildscripts/scripts/test-integration-packages.groovy b/buildscripts/scripts/test-integration-packages.groovy
index 868c950ffdc..834a5477492 100644
--- a/buildscripts/scripts/test-integration-packages.groovy
+++ b/buildscripts/scripts/test-integration-packages.groovy
@@ -9,6 +9,7 @@ def main() {
"EDITION",
"VERSION",
"OVERRIDE_DISTROS",
+ "USE_CASE"
]);
check_environment_variables([
@@ -20,7 +21,7 @@ def main() {
def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
- def distros = versioning.configured_or_overridden_distros(EDITION, OVERRIDE_DISTROS, "daily_tests");
+ def distros = versioning.get_distros(edition: EDITION, use_case: "daily_tests", override: OVERRIDE_DISTROS);
def safe_branch_name = versioning.safe_branch_name(scm);
def branch_version = versioning.get_branch_version(checkout_dir);
def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, VERSION);
@@ -50,19 +51,21 @@ def main() {
""".stripMargin());
stage('test integration') { // TODO should not be needed
- // TODO: don't run make test-integration-docker but use docker.inside() instead
- testing_helper.run_make_targets(
- // Get the ID of the docker group from the node(!). This must not be
- // executed inside the container (as long as the IDs are different)
- DOCKER_GROUP_ID: get_docker_group_id(),
- DISTRO_LIST: distros,
- EDITION: EDITION,
- VERSION: VERSION,
- DOCKER_TAG: docker_tag,
- MAKE_TARGET: "test-integration-docker",
- BRANCH: versioning.branch_name(scm),
- cmk_version: cmk_version,
- )
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
+ // TODO: don't run make test-integration-docker but use docker.inside() instead
+ testing_helper.run_make_targets(
+ // Get the ID of the docker group from the node(!). This must not be
+ // executed inside the container (as long as the IDs are different)
+ DOCKER_GROUP_ID: get_docker_group_id(),
+ DISTRO_LIST: distros,
+ EDITION: EDITION,
+ VERSION: VERSION,
+ DOCKER_TAG: docker_tag,
+ MAKE_TARGET: "test-integration-docker",
+ BRANCH: versioning.branch_name(scm),
+ cmk_version: cmk_version,
+ );
+ }
}
}
diff --git a/buildscripts/scripts/test-integration-single-f12less.groovy b/buildscripts/scripts/test-integration-single-f12less.groovy
index 4733a51eaaf..68ac872df1e 100644
--- a/buildscripts/scripts/test-integration-single-f12less.groovy
+++ b/buildscripts/scripts/test-integration-single-f12less.groovy
@@ -22,10 +22,10 @@ def main() {
def branch_version = versioning.get_branch_version(checkout_dir);
def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, "daily");
def docker_tag = versioning.select_docker_tag(
- safe_branch_name, // 'branch'
- "", // 'build tag'
- "", // 'folder tag'
- )
+ safe_branch_name, // 'branch'
+ "", // 'build tag'
+ "", // 'folder tag'
+ );
def distro = params.DISTRO;
def edition = params.EDITION;
@@ -61,40 +61,40 @@ def main() {
// todo: build progress mins?
stage("Prepare workspace") {
- docker.withRegistry(DOCKER_REGISTRY, "nexus") {
- docker_image_from_alias("IMAGE_TESTING").inside(
- "--group-add=${get_docker_group_id()} \
- --ulimit nofile=1024:1024 \
- --env HOME=/home/jenkins \
- ${mount_reference_repo_dir} \
- -v /home/jenkins/.cmk-credentials:/home/jenkins/.cmk-credentials:ro \
- -v /var/run/docker.sock:/var/run/docker.sock") {
-
- dir("${checkout_dir}") {
-
- // Cleanup test results directory before starting the test to prevent previous
- // runs somehow affecting the current run.
- sh("rm -rf ${WORKSPACE}/test-results");
-
- // Initialize our virtual environment before parallelization
- sh("make .venv");
-
- stage("Fetch Checkmk package") {
- fetch_job_artifacts(
- relative_job_name: "builders/build-cmk-distro-package",
- params: [
- /// currently CUSTOM_GIT_REF must match, but in the future
- /// we should define dependency paths for build-cmk-distro-package
- CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
- EDITION: edition,
- DISTRO: distro,
- ],
- dest: "package_download",
- );
- }
- try {
- stage("Run `make ${make_target}`") {
- dir("${checkout_dir}/tests") {
+ inside_container(
+ args: [
+ "--env HOME=/home/jenkins",
+ ],
+ set_docker_group_id: true,
+ ulimit_nofile: 1024,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+ dir("${checkout_dir}") {
+ // Cleanup test results directory before starting the test to prevent previous
+ // runs somehow affecting the current run.
+ sh("rm -rf ${WORKSPACE}/test-results");
+
+ // Initialize our virtual environment before parallelization
+ sh("make .venv");
+
+ stage("Fetch Checkmk package") {
+ upstream_build(
+ relative_job_name: "builders/build-cmk-distro-package",
+ build_params: [
+ /// currently CUSTOM_GIT_REF must match, but in the future
+ /// we should define dependency paths for build-cmk-distro-package
+ CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
+ EDITION: edition,
+ DISTRO: distro,
+ ],
+ dest: "package_download",
+ );
+ }
+ try {
+ stage("Run `make ${make_target}`") {
+ dir("${checkout_dir}/tests") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
sh("""
RESULT_PATH='${WORKSPACE}/test-results/${distro}' \
EDITION='${edition}' \
@@ -105,21 +105,21 @@ def main() {
""");
}
}
- } finally {
- stage("Archive / process test reports") {
- dir("${WORKSPACE}") {
- show_duration("archiveArtifacts") {
- archiveArtifacts("test-results/**");
- }
- xunit([Custom(
- customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/junit.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )]);
+ }
+ } finally {
+ stage("Archive / process test reports") {
+ dir("${WORKSPACE}") {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("test-results/**");
}
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/junit.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
}
}
}
diff --git a/buildscripts/scripts/test-integration-single.groovy b/buildscripts/scripts/test-integration-single.groovy
deleted file mode 100644
index 823d0d3903d..00000000000
--- a/buildscripts/scripts/test-integration-single.groovy
+++ /dev/null
@@ -1,52 +0,0 @@
-#!groovy
-
-/// file: test-integration-single.groovy
-
-def main() {
- check_environment_variables([
- "DOCKER_TAG",
- ]);
-
- def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
- def safe_branch_name = versioning.safe_branch_name(scm);
- def branch_version = versioning.get_branch_version(checkout_dir);
- def distros = ["ubuntu-20.04"];
- def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, "daily");
-
- def docker_tag = versioning.select_docker_tag(
- safe_branch_name,
- "",
- "") // FIXME was DOCKER_TAG_DEFAULT before
-
- currentBuild.description += (
- """
- |Run integration tests for packages
- |safe_branch_name: ${safe_branch_name}
- |cmk_version: ${cmk_version}
- |distros: ${distros}
- """.stripMargin());
-
- print(
- """
- |===== CONFIGURATION ===============================
- |safe_branch_name:...... │${safe_branch_name}│
- |docker_tag:............ │${docker_tag}│
- |distros:............... │${distros}│
- |cmk_version:........... │${cmk_version}
- |===================================================
- """.stripMargin());
-
- testing_helper.run_make_targets(
- DOCKER_GROUP_ID: get_docker_group_id(),
- DISTRO_LIST: distros,
- EDITION: "enterprise",
- VERSION: "git", // ==run .f12 after installing Checkmk
- DOCKER_TAG: docker_tag,
- MAKE_TARGET: "test-integration-docker",
- BRANCH: safe_branch_name,
- cmk_version: cmk_version,
- );
-}
-
-return this;
diff --git a/buildscripts/scripts/test-javascript-build.groovy b/buildscripts/scripts/test-javascript-build.groovy
deleted file mode 100644
index 77938b9096e..00000000000
--- a/buildscripts/scripts/test-javascript-build.groovy
+++ /dev/null
@@ -1,19 +0,0 @@
-#!groovy
-
-/// file: test-javascript-build.groovy
-
-def main() {
- def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
-
- dir("${checkout_dir}") {
- test_jenkins_helper.execute_test([
- name: "test-javascript-build",
- cmd: "make -C tests test-build-js-docker",
- output_file: "js-build.txt"
- ]);
-
- test_jenkins_helper.analyse_issues("TSJSBUILD", "js-build.txt");
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-javascript-format.groovy b/buildscripts/scripts/test-javascript-format.groovy
deleted file mode 100644
index 82362d689b6..00000000000
--- a/buildscripts/scripts/test-javascript-format.groovy
+++ /dev/null
@@ -1,19 +0,0 @@
-#!groovy
-
-/// file: test-javascript-format.groovy
-
-def main() {
- def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
-
- dir("${checkout_dir}") {
- test_jenkins_helper.execute_test([
- name: "test-javascript-format",
- cmd: "make -C tests test-format-js-docker",
- output_file: "js-prettier.txt"
- ]);
-
- test_jenkins_helper.analyse_issues("TSJSFORMAT", "js-prettier.txt");
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-javascript-lint.groovy b/buildscripts/scripts/test-javascript-lint.groovy
deleted file mode 100644
index 21b1720017a..00000000000
--- a/buildscripts/scripts/test-javascript-lint.groovy
+++ /dev/null
@@ -1,22 +0,0 @@
-#!groovy
-
-/// file: test-javascript-lint.groovy
-
-def main() {
- def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
-
- dir("${checkout_dir}") {
- test_jenkins_helper.execute_test([
- name: "test-javascript-lint",
- cmd: """
- truncate -s-1 scripts/check-js-lint.sh
- echo " --format checkstyle > eslint.xml" >> scripts/check-js-lint.sh
- make -C tests test-lint-js-docker
- """,
- ]);
-
- test_jenkins_helper.analyse_issues("ESLINT", "eslint.xml");
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-laptop-setup.groovy b/buildscripts/scripts/test-laptop-setup.groovy
new file mode 100644
index 00000000000..55384023e12
--- /dev/null
+++ b/buildscripts/scripts/test-laptop-setup.groovy
@@ -0,0 +1,81 @@
+#!groovy
+
+/// file: test-laptop-setup.groovy
+
+/// Install required packages for Checkmk development
+
+/// Parameters / environment values:
+///
+/// Jenkins artifacts: ???
+/// Other artifacts: ???
+/// Depends on: image aliases for upstream OS images on Nexus, ???
+
+def main() {
+ check_environment_variables([
+ "NEXUS_ARCHIVES_URL",
+ ]);
+
+ def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
+
+ def branch_name = versioning.safe_branch_name(scm);
+ def branch_version = versioning.get_branch_version(checkout_dir);
+
+ print(
+ """
+ |===== CONFIGURATION ===============================
+ |branch_name:....................(local) │${branch_name}│
+ |branch_version:.................(local) │${branch_version}│
+ |===================================================
+ """.stripMargin());
+
+ withCredentials([
+ usernamePassword(
+ credentialsId: 'nexus',
+ usernameVariable: 'USERNAME',
+ passwordVariable: 'PASSWORD')
+ ]) {
+ def DOCKER_ARGS = (
+ " --no-cache" +
+ " --build-arg NEXUS_ARCHIVES_URL='$NEXUS_ARCHIVES_URL'" +
+ " --build-arg NEXUS_USERNAME='$USERNAME'" +
+ " --build-arg NEXUS_PASSWORD='$PASSWORD'" +
+ " --build-arg CI=1"
+ );
+ // no support for 20.04, sorry
+ // python2 would be required, and the system Python does not support typing in "strip_binaries"
+ def ubuntu_versions = ["22.04", "23.04"];
+
+ dir("${checkout_dir}") {
+ sh("""
+ cp \
+ .bazelversion \
+ defines.make \
+ omd/strip_binaries \
+ omd/distros/*.mk \
+ package_versions.bzl \
+ static_variables.bzl \
+ buildscripts/infrastructure/build-nodes/scripts
+ """);
+ }
+
+ dir("${checkout_dir}/buildscripts/infrastructure/build-nodes") {
+ def stages = ubuntu_versions.collectEntries { distro ->
+ [("${distro}") : {
+ stage("Build ${distro}") {
+ def THIS_DOCKER_ARGS = DOCKER_ARGS + (
+ " --build-arg DISTRO='ubuntu-${distro}'" +
+ " --build-arg BASE_IMAGE='ubuntu:${distro}'" +
+ " -f laptops/Dockerfile ."
+ );
+ print(THIS_DOCKER_ARGS);
+
+ docker.build("test-install-development:${branch_name}-latest", THIS_DOCKER_ARGS);
+ }
+ }];
+ }
+ parallel(stages);
+ }
+ }
+}
+
+return this;
diff --git a/buildscripts/scripts/test-livestatus.groovy b/buildscripts/scripts/test-livestatus.groovy
deleted file mode 100644
index 940e3f7da30..00000000000
--- a/buildscripts/scripts/test-livestatus.groovy
+++ /dev/null
@@ -1,26 +0,0 @@
-#!groovy
-
-/// file: test-livestatus.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Compile & Test Livestatus') {
- sh("GCC_TOOLCHAIN=/opt/gcc-13.2.0 packages/livestatus/run --clean --all");
- }
- }
- stage("Analyse Issues") {
- publishIssues(
- issues: [scanForIssues( tool: gcc())],
- trendChartType: 'TOOLS_ONLY',
- qualityGates: [[
- threshold: 1,
- type: 'TOTAL',
- unstable: false,
- ]],
- );
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-neb.groovy b/buildscripts/scripts/test-neb.groovy
deleted file mode 100644
index b86ea3de017..00000000000
--- a/buildscripts/scripts/test-neb.groovy
+++ /dev/null
@@ -1,26 +0,0 @@
-#!groovy
-
-/// file: test-neb.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Compile & Test NEB') {
- sh("GCC_TOOLCHAIN=/opt/gcc-13.2.0 packages/neb/run --clean --all");
- }
- }
- stage("Analyse Issues") {
- publishIssues(
- issues: [scanForIssues( tool: gcc())],
- trendChartType: 'TOOLS_ONLY',
- qualityGates: [[
- threshold: 1,
- type: 'TOTAL',
- unstable: false,
- ]],
- );
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-package-cmk-agent-based.groovy b/buildscripts/scripts/test-package-cmk-agent-based.groovy
deleted file mode 100644
index 16beeb80bec..00000000000
--- a/buildscripts/scripts/test-package-cmk-agent-based.groovy
+++ /dev/null
@@ -1,15 +0,0 @@
-#!groovy
-
-/// file: test-package-cmk-agent-based.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Test Package cmk-agent-based') {
- sh("packages/cmk-agent-based/run --clean --all");
- }
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-package-cmk-graphing.groovy b/buildscripts/scripts/test-package-cmk-graphing.groovy
deleted file mode 100644
index 780410e4b3c..00000000000
--- a/buildscripts/scripts/test-package-cmk-graphing.groovy
+++ /dev/null
@@ -1,15 +0,0 @@
-#!groovy
-
-/// file: test-package-cmk-graphing.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Test Package cmk-graphing') {
- sh("packages/cmk-graphing/run --clean --all");
- }
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-package-cmk-mkp-tool.groovy b/buildscripts/scripts/test-package-cmk-mkp-tool.groovy
deleted file mode 100644
index 21e4c294297..00000000000
--- a/buildscripts/scripts/test-package-cmk-mkp-tool.groovy
+++ /dev/null
@@ -1,15 +0,0 @@
-#!groovy
-
-/// file: test-package-cmk-mkp-tool.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Test Package cmk-mkp-tool') {
- sh("packages/cmk-mkp-tool/run --clean --all");
- }
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-package-cmk-rulesets.groovy b/buildscripts/scripts/test-package-cmk-rulesets.groovy
deleted file mode 100644
index cd03b39f50c..00000000000
--- a/buildscripts/scripts/test-package-cmk-rulesets.groovy
+++ /dev/null
@@ -1,15 +0,0 @@
-#!groovy
-
-/// file: test-package-cmk-rulesets.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Test Package cmk-rulesets') {
- sh("packages/cmk-rulesets/run --clean --all");
- }
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-package-cmk-server-side-calls.groovy b/buildscripts/scripts/test-package-cmk-server-side-calls.groovy
deleted file mode 100644
index b32d856d816..00000000000
--- a/buildscripts/scripts/test-package-cmk-server-side-calls.groovy
+++ /dev/null
@@ -1,15 +0,0 @@
-#!groovy
-
-/// file: test-package-cmk-server-side-calls.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Test Package cmk-server-side-calls') {
- sh("packages/cmk-server-side-calls/run --clean --all");
- }
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-plugins.groovy b/buildscripts/scripts/test-plugins.groovy
index 78e6985327c..5a84273fef5 100644
--- a/buildscripts/scripts/test-plugins.groovy
+++ b/buildscripts/scripts/test-plugins.groovy
@@ -3,23 +3,132 @@
/// file: test-plugins.groovy
def main() {
- def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
- def branch_version = versioning.get_branch_version(checkout_dir);
+ check_job_parameters([
+ ["EDITION", true], // the testees package long edition string (e.g. 'enterprise')
+ ["DISTRO", true], // the testees package distro string (e.g. 'ubuntu-22.04')
+ // "DOCKER_TAG_BUILD", // test base image tag (todo)
+ // "DISABLE_CACHE", // forwarded to package build job (todo)
+ ]);
check_environment_variables([
+ "DOCKER_REGISTRY",
"DOCKER_TAG",
]);
- testing_helper.run_make_targets(
- DOCKER_GROUP_ID: get_docker_group_id(),
- DISTRO_LIST: ["ubuntu-20.04"],
- EDITION: "enterprise",
- VERSION: "daily",
- DOCKER_TAG: "master-latest",
- MAKE_TARGET: "test-plugins-docker",
- BRANCH: "master",
- cmk_version: versioning.get_cmk_version("master", branch_version, "daily"),
- );
+ def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
+
+ //def safe_branch_name = versioning.safe_branch_name(scm); // todo: this returns rubbish if CUSTOM_GIT_REF is set
+ def safe_branch_name = "master";
+
+ def branch_version = versioning.get_branch_version(checkout_dir);
+ def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, "daily");
+ def docker_tag = versioning.select_docker_tag(
+ safe_branch_name, // 'branch'
+ "", // 'build tag'
+ "", // 'folder tag'
+ )
+ def distro = params.DISTRO;
+ def edition = params.EDITION;
+
+ def make_target = "test-plugins-docker";
+
+ currentBuild.description += (
+ """
+ |Run integration tests for packages
+ |safe_branch_name: ${safe_branch_name}
+ |branch_version: ${branch_version}
+ |cmk_version: ${cmk_version}
+ |docker_tag: ${docker_tag}
+ |edition: ${edition}
+ |distro: ${distro}
+ |make_target: ${make_target}
+ """.stripMargin());
+
+ print(
+ """
+ |===== CONFIGURATION ===============================
+ |safe_branch_name:...... │${safe_branch_name}│
+ |branch_version:........ │${branch_version}│
+ |cmk_version:........... │${cmk_version}
+ |docker_tag:............ │${docker_tag}│
+ |edition:............... │${edition}│
+ |distro:................ │${distro}│
+ |make_target:........... │${make_target}│
+ |===================================================
+ """.stripMargin());
+
+ // todo: add upstream project to description
+ // todo: add error to description
+ // todo: build progress mins?
+
+ stage("Prepare workspace") {
+ inside_container(
+ args: [
+ "--env HOME=/home/jenkins",
+ ],
+ set_docker_group_id: true,
+ ulimit_nofile: 1024,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+
+ dir("${checkout_dir}") {
+ // Cleanup test results directory before starting the test to prevent previous
+ // runs somehow affecting the current run.
+ sh("rm -rf ${WORKSPACE}/test-results");
+
+ // Initialize our virtual environment before parallelization
+ sh("make .venv");
+
+ stage("Fetch Checkmk package") {
+ upstream_build(
+ relative_job_name: "builders/build-cmk-distro-package",
+ build_params: [
+ /// currently CUSTOM_GIT_REF must match, but in the future
+ /// we should define dependency paths for build-cmk-distro-package
+ CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
+ EDITION: edition,
+ DISTRO: distro,
+ ],
+ dest: "package_download",
+ );
+ }
+ try {
+ stage("Run `make ${make_target}`") {
+ dir("${checkout_dir}/tests") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
+ sh("""
+ RESULT_PATH='${WORKSPACE}/test-results/${distro}' \
+ EDITION='${edition}' \
+ DOCKER_TAG='${docker_tag}' \
+ VERSION="daily" \
+ DISTRO='${distro}' \
+ make ${make_target}
+ """);
+ }
+ }
+ }
+ }
+ finally {
+ stage("Archive / process test reports") {
+ dir("${WORKSPACE}") {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("test-results/**");
+ }
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/junit.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
+ }
+ }
+ }
+ }
+ }
+ }
}
+
return this;
diff --git a/buildscripts/scripts/test-python3-bandit.groovy b/buildscripts/scripts/test-python3-bandit.groovy
index 09e1a6a409e..08ea623532a 100644
--- a/buildscripts/scripts/test-python3-bandit.groovy
+++ b/buildscripts/scripts/test-python3-bandit.groovy
@@ -3,42 +3,44 @@
/// file: test-python3-bandit.groovy
def main() {
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside('--ulimit nofile=1024:1024 --init') {
- try {
- stage('run Bandit') {
- dir("${checkout_dir}") {
- sh("BANDIT_OUTPUT_ARGS=\"-f xml -o '$WORKSPACE/bandit_results.xml'\" make -C tests test-bandit");
- }
+ inside_container(
+ init: true,
+ ulimit_nofile: 1024,
+ ) {
+ try {
+ stage('run Bandit') {
+ dir("${checkout_dir}") {
+ sh("BANDIT_OUTPUT_ARGS=\"-f xml -o '$WORKSPACE/bandit_results.xml'\" make -C tests test-bandit");
}
- } finally {
- stage("Archive / process test reports") {
- show_duration("archiveArtifacts") {
- archiveArtifacts("bandit_results.xml");
- }
- xunit([Custom(
- customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/bandit-xunit.xsl",
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "bandit_results.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )]);
+ }
+ } finally {
+ stage("Archive / process test reports") {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("bandit_results.xml");
}
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/bandit-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "bandit_results.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
}
- stage('check nosec markers') {
- try {
- dir("${checkout_dir}") {
- sh("make -C tests test-bandit-nosec-markers");
- }
- } catch(Exception) {
- // Don't fail the job if un-annotated markers are found.
- // Security will have to take care of those later.
- // TODO: once we have a green baseline, mark unstable if new un-annotated markers have been added:
- // unstable("failed to validate nosec marker annotations");
+ }
+ stage('check nosec markers') {
+ try {
+ dir("${checkout_dir}") {
+ sh("make -C tests test-bandit-nosec-markers");
}
+ } catch(Exception) { // groovylint-disable EmptyCatchBlock
+ // Don't fail the job if un-annotated markers are found.
+ // Security will have to take care of those later.
+ // TODO: once we have a green baseline, mark unstable if new un-annotated markers have been added:
+ // unstable("failed to validate nosec marker annotations");
}
}
}
}
+
return this;
diff --git a/buildscripts/scripts/test-python3-code-quality.groovy b/buildscripts/scripts/test-python3-code-quality.groovy
index c971661d45d..77c9afb72be 100644
--- a/buildscripts/scripts/test-python3-code-quality.groovy
+++ b/buildscripts/scripts/test-python3-code-quality.groovy
@@ -3,15 +3,16 @@
/// file: test-python3-code-quality.groovy
def main() {
- def docker_args = "--ulimit nofile=1024:1024 --init";
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside(docker_args) {
- stage('test python3 code quality') {
- dir("${checkout_dir}") {
- sh("make -C tests test-code-quality");
- }
+ inside_container(
+ init: true,
+ ulimit_nofile: 1024,
+ ) {
+ stage('test python3 code quality') {
+ dir("${checkout_dir}") {
+ sh("make -C tests test-code-quality");
}
}
}
}
+
return this;
diff --git a/buildscripts/scripts/test-python3-pylint.groovy b/buildscripts/scripts/test-python3-pylint.groovy
index d38e02392d0..d8f3856d9dd 100644
--- a/buildscripts/scripts/test-python3-pylint.groovy
+++ b/buildscripts/scripts/test-python3-pylint.groovy
@@ -5,17 +5,15 @@
def main() {
def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside('--ulimit nofile=1024:1024 --init') {
- dir("${checkout_dir}") {
- test_jenkins_helper.execute_test([
- name: "test-pylint",
- cmd: "PYLINT_ARGS=--output-format=parseable make -C tests test-pylint",
- output_file: "pylint.txt"
- ]);
+ dir("${checkout_dir}") {
+ inside_container() {
+ test_jenkins_helper.execute_test([
+ name: "test-pylint",
+ cmd: "PYLINT_ARGS=--output-format=parseable make -C tests test-pylint",
+ output_file: "pylint.txt",
+ ]);
- test_jenkins_helper.analyse_issues("PYLINT", "pylint.txt");
- }
+ test_jenkins_helper.analyse_issues("PYLINT", "pylint.txt");
}
}
}
diff --git a/buildscripts/scripts/test-python3-typing.groovy b/buildscripts/scripts/test-python3-typing.groovy
index de5d37981f0..959f5e99d78 100644
--- a/buildscripts/scripts/test-python3-typing.groovy
+++ b/buildscripts/scripts/test-python3-typing.groovy
@@ -3,47 +3,15 @@
/// file: test-python3-typing.groovy
def main() {
- dir("${checkout_dir}") {
- stage("Execute Test") {
- // catch any error, set stage + build result to failure,
- // but continue in order to execute the publishIssues function
- catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
- sh("""
- MYPY_ADDOPTS='--cobertura-xml-report=$checkout_dir/mypy_reports --html-report=$checkout_dir/mypy_reports/html' \
- make -C tests test-mypy-docker
- """);
- }
- }
-
- stage("Archive reports") {
- show_duration("archiveArtifacts") {
- archiveArtifacts(artifacts: "mypy_reports/**");
- }
- }
+ def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
- stage("Analyse Issues") {
- publishIssues(
- issues:[scanForIssues(tool: clang())],
- trendChartType: 'TOOLS_ONLY',
- qualityGates: [[
- threshold: 1,
- type: 'TOTAL',
- unstable: false,
- ]]
- )
- }
+ dir("${checkout_dir}") {
+ test_jenkins_helper.execute_test([
+ name: "test-mypy-docker",
+ cmd: "MYPY_ADDOPTS='--no-color-output --junit-xml mypy.xml' make -C tests test-mypy-docker",
+ ]);
- stage("Publish coverage") {
- publishHTML([
- allowMissing: false,
- alwaysLinkToLastBuild: false,
- keepAll: true,
- reportDir: 'mypy_reports/html',
- reportFiles: 'index.html',
- reportName: 'Typing coverage',
- reportTitles: '',
- ])
- }
+ test_jenkins_helper.analyse_issues("MYPY", "mypy.xml");
}
}
diff --git a/buildscripts/scripts/test-python3-unit-resilience.groovy b/buildscripts/scripts/test-python3-unit-resilience.groovy
index 240cc25407d..4530f076c0b 100644
--- a/buildscripts/scripts/test-python3-unit-resilience.groovy
+++ b/buildscripts/scripts/test-python3-unit-resilience.groovy
@@ -4,26 +4,26 @@
def main() {
def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
- def docker_args = "--ulimit nofile=1024:1024 --init";
def relative_result_path = "results/junit-resilience.xml"
def result_path = "${checkout_dir}/${relative_result_path}";
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside(docker_args) {
- stage('run test-unit-resilience') {
- dir("${checkout_dir}") {
- try {
- withEnv([
- "PYTEST_ADDOPTS='--junitxml=${result_path}'",
- ]) {
- sh("make -C tests test-unit-resilience");
- }
- } catch(Exception e) {
- // We want to keep failed resilience builds in order to follow a process, see CMK-14487
- currentBuild.setKeepLog(true)
- throw e
- } finally {
- test_jenkins_helper.analyse_issues("JUNIT", relative_result_path);
+ inside_container(
+ init: true,
+ ulimit_nofile: 1024,
+ ) {
+ stage('run test-unit-resilience') {
+ dir("${checkout_dir}") {
+ try {
+ withEnv([
+ "PYTEST_ADDOPTS='--junitxml=${result_path}'",
+ ]) {
+ sh("make -C tests test-unit-resilience");
}
+ } catch(Exception e) {
+ // We want to keep failed resilience builds in order to follow a process, see CMK-14487
+ currentBuild.setKeepLog(true);
+ throw e;
+ } finally {
+ test_jenkins_helper.analyse_issues("JUNIT", relative_result_path);
}
}
}
diff --git a/buildscripts/scripts/test-python3-unit-slow.groovy b/buildscripts/scripts/test-python3-unit-slow.groovy
index 24575b67d51..ca664e04aa1 100644
--- a/buildscripts/scripts/test-python3-unit-slow.groovy
+++ b/buildscripts/scripts/test-python3-unit-slow.groovy
@@ -3,13 +3,13 @@
/// file: test-python3-unit-slow.groovy
def main() {
- def docker_args = "--ulimit nofile=1024:1024 --init";
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside(docker_args) {
- stage('run test-unit-slow') {
- dir("${checkout_dir}") {
- sh("make -C tests test-unit-slow");
- }
+ inside_container(
+ init: true,
+ ulimit_nofile: 1024,
+ ) {
+ stage('run test-unit-slow') {
+ dir("${checkout_dir}") {
+ sh("make -C tests test-unit-slow");
}
}
}
diff --git a/buildscripts/scripts/test-schemathesis-openapi-f12less.groovy b/buildscripts/scripts/test-schemathesis-openapi-f12less.groovy
index 5e9143378bc..5f4bbad6772 100644
--- a/buildscripts/scripts/test-schemathesis-openapi-f12less.groovy
+++ b/buildscripts/scripts/test-schemathesis-openapi-f12less.groovy
@@ -22,9 +22,9 @@ def main() {
def branch_version = versioning.get_branch_version(checkout_dir);
def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, "daily");
def docker_tag = versioning.select_docker_tag(
- safe_branch_name, // 'branch'
- "", // 'build tag'
- "", // 'folder tag'
+ safe_branch_name, // 'branch'
+ "", // 'build tag'
+ "", // 'folder tag'
)
def distro = params.DISTRO;
def edition = params.EDITION;
@@ -61,40 +61,40 @@ def main() {
// todo: build progress mins?
stage("Prepare workspace") {
- docker.withRegistry(DOCKER_REGISTRY, "nexus") {
- docker_image_from_alias("IMAGE_TESTING").inside(
- "--group-add=${get_docker_group_id()} \
- --ulimit nofile=1024:1024 \
- --env HOME=/home/jenkins \
- ${mount_reference_repo_dir} \
- -v /home/jenkins/.cmk-credentials:/home/jenkins/.cmk-credentials:ro \
- -v /var/run/docker.sock:/var/run/docker.sock") {
-
- dir("${checkout_dir}") {
-
- // Cleanup test results directory before starting the test to prevent previous
- // runs somehow affecting the current run.
- sh("rm -rf ${WORKSPACE}/test-results");
-
- // Initialize our virtual environment before parallelization
- sh("make .venv");
-
- stage("Fetch Checkmk package") {
- fetch_job_artifacts(
- relative_job_name: "builders/build-cmk-distro-package",
- params: [
- /// currently CUSTOM_GIT_REF must match, but in the future
- /// we should define dependency paths for build-cmk-distro-package
- CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
- EDITION: edition,
- DISTRO: distro,
- ],
- dest: "package_download",
- );
- }
- try {
- stage("Run `make ${make_target}`") {
- dir("${checkout_dir}/tests") {
+ inside_container(
+ args: [
+ "--env HOME=/home/jenkins",
+ ],
+ set_docker_group_id: true,
+ ulimit_nofile: 1024,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+ dir("${checkout_dir}") {
+ // Cleanup test results directory before starting the test to prevent previous
+ // runs somehow affecting the current run.
+ sh("rm -rf ${WORKSPACE}/test-results");
+
+ // Initialize our virtual environment before parallelization
+ sh("make .venv");
+
+ stage("Fetch Checkmk package") {
+ upstream_build(
+ relative_job_name: "builders/build-cmk-distro-package",
+ build_params: [
+ /// currently CUSTOM_GIT_REF must match, but in the future
+ /// we should define dependency paths for build-cmk-distro-package
+ CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
+ EDITION: edition,
+ DISTRO: distro,
+ ],
+ dest: "package_download",
+ );
+ }
+ try {
+ stage("Run `make ${make_target}`") {
+ dir("${checkout_dir}/tests") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
sh("""
RESULT_PATH='${WORKSPACE}/test-results/${distro}' \
EDITION='${edition}' \
@@ -105,21 +105,21 @@ def main() {
""");
}
}
- } finally {
- stage("Archive / process test reports") {
- dir("${WORKSPACE}") {
- show_duration("archiveArtifacts") {
- archiveArtifacts("test-results/**");
- }
- xunit([Custom(
- customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/junit.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )]);
+ }
+ } finally {
+ stage("Archive / process test reports") {
+ dir("${WORKSPACE}") {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("test-results/**");
}
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/junit.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
}
}
}
diff --git a/buildscripts/scripts/test-schemathesis-openapi.groovy b/buildscripts/scripts/test-schemathesis-openapi.groovy
deleted file mode 100644
index 3fcbeb75fc2..00000000000
--- a/buildscripts/scripts/test-schemathesis-openapi.groovy
+++ /dev/null
@@ -1,29 +0,0 @@
-#!groovy
-
-/// file: test-schemathesis-openapi.groovy
-
-def main() {
- def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
- def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
- def branch_name = versioning.safe_branch_name(scm);
- def branch_version = versioning.get_branch_version(checkout_dir);
-
- check_environment_variables([
- "DOCKER_TAG",
- ]);
-
- testing_helper.run_make_targets(
- DOCKER_GROUP_ID: get_docker_group_id(),
- DISTRO_LIST: ["ubuntu-20.04"],
- EDITION: "enterprise",
- VERSION: "git",
- DOCKER_TAG: versioning.select_docker_tag(
- branch_name,
- "",
- ""), // FIXME was DOCKER_TAG_DEFAULT before
- MAKE_TARGET: "test-schemathesis-openapi-docker",
- BRANCH: branch_name,
- cmk_version: versioning.get_cmk_version(branch_name, branch_version, "daily"),
- );
-}
-return this;
diff --git a/buildscripts/scripts/test-shell-unit.groovy b/buildscripts/scripts/test-shell-unit.groovy
index 90440b5de5a..b69bee237bd 100644
--- a/buildscripts/scripts/test-shell-unit.groovy
+++ b/buildscripts/scripts/test-shell-unit.groovy
@@ -5,17 +5,18 @@
def main() {
def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside("--ulimit nofile=1024:1024 --init") {
- dir("${checkout_dir}") {
- test_jenkins_helper.execute_test([
- name: "test-shell-unit",
- cmd: "make -C tests test-unit-shell",
- output_file: "shell-unit.txt"
- ]);
+ inside_container(
+ init: true,
+ ulimit_nofile: 1024,
+ ) {
+ dir("${checkout_dir}") {
+ test_jenkins_helper.execute_test([
+ name: "test-shell-unit",
+ cmd: "make -C tests test-unit-shell",
+ output_file: "shell-unit.txt",
+ ]);
- test_jenkins_helper.analyse_issues("SHELLUNIT", "shell-unit.txt");
- }
+ test_jenkins_helper.analyse_issues("SHELLUNIT", "shell-unit.txt");
}
}
}
diff --git a/buildscripts/scripts/test-shellcheck_agents.groovy b/buildscripts/scripts/test-shellcheck_agents.groovy
index 83822294c5d..1c035194759 100644
--- a/buildscripts/scripts/test-shellcheck_agents.groovy
+++ b/buildscripts/scripts/test-shellcheck_agents.groovy
@@ -5,18 +5,19 @@
def main() {
def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside("--ulimit nofile=1024:1024 --init") {
- dir("${checkout_dir}") {
- test_jenkins_helper.execute_test([
- name: "test-shellcheck",
- // SHELLCHECK_OUTPUT_ARGS="-f gcc"
- cmd: "make -C tests test-shellcheck",
- output_file: "shellcheck.txt"
- ]);
+ inside_container(
+ init: true,
+ ulimit_nofile: 1024,
+ ) {
+ dir("${checkout_dir}") {
+ test_jenkins_helper.execute_test([
+ name: "test-shellcheck",
+ // SHELLCHECK_OUTPUT_ARGS="-f gcc"
+ cmd: "make -C tests test-shellcheck",
+ output_file: "shellcheck.txt",
+ ]);
- test_jenkins_helper.analyse_issues("SHELLCHECK", "shellcheck.txt");
- }
+ test_jenkins_helper.analyse_issues("SHELLCHECK", "shellcheck.txt");
}
}
}
diff --git a/buildscripts/scripts/test-typescript-types.groovy b/buildscripts/scripts/test-typescript-types.groovy
deleted file mode 100644
index f0f8df9b626..00000000000
--- a/buildscripts/scripts/test-typescript-types.groovy
+++ /dev/null
@@ -1,19 +0,0 @@
-#!groovy
-
-/// file: test-typescript-types.groovy
-
-def main() {
- def test_jenkins_helper = load("${checkout_dir}/buildscripts/scripts/utils/test_helper.groovy");
-
- dir("${checkout_dir}") {
- test_jenkins_helper.execute_test([
- name: "test-typescript-types",
- cmd: "make -C tests test-typescript-types-docker",
- output_file: "js-types.txt"
- ]);
-
- test_jenkins_helper.analyse_issues("TSJSTYPES", "js-types.txt");
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-unixcat.groovy b/buildscripts/scripts/test-unixcat.groovy
deleted file mode 100644
index 47ecb5ca7f1..00000000000
--- a/buildscripts/scripts/test-unixcat.groovy
+++ /dev/null
@@ -1,26 +0,0 @@
-#!groovy
-
-/// file: test-unixcat.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Compile & Test Unixcat') {
- sh("GCC_TOOLCHAIN=/opt/gcc-13.2.0 packages/unixcat/run --clean --all");
- }
- }
- stage("Analyse Issues") {
- publishIssues(
- issues: [scanForIssues( tool: gcc())],
- trendChartType: 'TOOLS_ONLY',
- qualityGates: [[
- threshold: 1,
- type: 'TOTAL',
- unstable: false,
- ]],
- );
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-unixcat.groovy b/buildscripts/scripts/test-unixcat.groovy
new file mode 120000
index 00000000000..c894e91ed2d
--- /dev/null
+++ b/buildscripts/scripts/test-unixcat.groovy
@@ -0,0 +1 @@
+test-package-unixcat.groovy
\ No newline at end of file
diff --git a/buildscripts/scripts/test-update.groovy b/buildscripts/scripts/test-update.groovy
index 1f50b73d02e..afeb35ac53b 100644
--- a/buildscripts/scripts/test-update.groovy
+++ b/buildscripts/scripts/test-update.groovy
@@ -7,9 +7,11 @@ def build_make_target(edition) {
def suffix = "-docker";
switch(edition) {
case 'enterprise':
- return prefix + "cee" + suffix
+ return prefix + "cee" + suffix;
case 'cloud':
- return prefix + "cce" + suffix
+ return prefix + "cce" + suffix;
+ case 'saas':
+ return prefix + "cse" + suffix;
default:
error("The update tests are not yet enabled for edition: " + edition);
}
@@ -20,23 +22,32 @@ def main() {
def testing_helper = load("${checkout_dir}/buildscripts/scripts/utils/integration.groovy");
def branch_version = versioning.get_branch_version(checkout_dir);
+ check_job_parameters([
+ ["OVERRIDE_DISTROS"],
+ ]);
+
check_environment_variables([
"DOCKER_TAG",
- "EDITION"
+ "EDITION",
]);
- def distros = versioning.configured_or_overridden_distros(EDITION, false);
+ def distros = versioning.get_distros(edition: EDITION, use_case: "daily_update_tests", override: OVERRIDE_DISTROS);
def make_target = build_make_target(EDITION);
- testing_helper.run_make_targets(
- DOCKER_GROUP_ID: get_docker_group_id(),
- DISTRO_LIST: distros,
- EDITION: EDITION,
- VERSION: "daily",
- DOCKER_TAG: "master-latest",
- MAKE_TARGET: make_target,
- BRANCH: "master",
- cmk_version: versioning.get_cmk_version("master", branch_version, "daily"),
- );
+ stage("Run `make ${make_target}`") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
+ testing_helper.run_make_targets(
+ DOCKER_GROUP_ID: get_docker_group_id(),
+ DISTRO_LIST: distros,
+ EDITION: EDITION,
+ VERSION: "daily",
+ DOCKER_TAG: "master-latest",
+ MAKE_TARGET: make_target,
+ BRANCH: "master",
+ cmk_version: versioning.get_cmk_version("master", branch_version, "daily"),
+ );
+ }
+ }
}
+
return this;
diff --git a/buildscripts/scripts/test-werks-package.groovy b/buildscripts/scripts/test-werks-package.groovy
deleted file mode 100644
index 035cc08892e..00000000000
--- a/buildscripts/scripts/test-werks-package.groovy
+++ /dev/null
@@ -1,19 +0,0 @@
-#!groovy
-
-/// file: test-werks-package.groovy
-
-def main() {
- dir("${checkout_dir}") {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- stage('Test Package cmk-werks') {
- sh("packages/cmk-werks/run --clean --all");
- }
-
- stage('Validate .werks') {
- sh("scripts/run-pipenv run python -m cmk.werks.validate");
- }
- }
- }
-}
-
-return this;
diff --git a/buildscripts/scripts/test-xss-crawl.groovy b/buildscripts/scripts/test-xss-crawl.groovy
index f52b706fef1..df7c0246b9e 100644
--- a/buildscripts/scripts/test-xss-crawl.groovy
+++ b/buildscripts/scripts/test-xss-crawl.groovy
@@ -23,10 +23,10 @@ def main() {
def branch_version = versioning.get_branch_version(checkout_dir);
def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, "daily");
def docker_tag = versioning.select_docker_tag(
- safe_branch_name, // 'branch'
- "", // 'build tag'
- "", // 'folder tag'
- )
+ safe_branch_name, // 'branch'
+ "", // 'build tag'
+ "", // 'folder tag'
+ );
def distro = params.DISTRO;
def edition = params.EDITION;
@@ -62,40 +62,40 @@ def main() {
// todo: build progress mins?
stage("Prepare workspace") {
- docker.withRegistry(DOCKER_REGISTRY, "nexus") {
- docker_image_from_alias("IMAGE_TESTING").inside(
- "--group-add=${get_docker_group_id()} \
- --ulimit nofile=1024:1024 \
- --env HOME=/home/jenkins \
- ${mount_reference_repo_dir} \
- -v /home/jenkins/.cmk-credentials:/home/jenkins/.cmk-credentials:ro \
- -v /var/run/docker.sock:/var/run/docker.sock") {
-
- dir("${checkout_dir}") {
-
- // Cleanup test results directory before starting the test to prevent previous
- // runs somehow affecting the current run.
- sh("rm -rf ${WORKSPACE}/test-results");
-
- // Initialize our virtual environment before parallelization
- sh("make .venv");
-
- stage("Fetch Checkmk package") {
- fetch_job_artifacts(
- relative_job_name: "builders/build-cmk-distro-package",
- params: [
- /// currently CUSTOM_GIT_REF must match, but in the future
- /// we should define dependency paths for build-cmk-distro-package
- CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
- EDITION: edition,
- DISTRO: distro,
- ],
- dest: "package_download",
- );
- }
- try {
- stage("Run `make ${make_target}`") {
- dir("${checkout_dir}/tests") {
+ inside_container(
+ args: [
+ "--env HOME=/home/jenkins",
+ ],
+ set_docker_group_id: true,
+ ulimit_nofile: 1024,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+ dir("${checkout_dir}") {
+ // Cleanup test results directory before starting the test to prevent previous
+ // runs somehow affecting the current run.
+ sh("rm -rf ${WORKSPACE}/test-results");
+
+ // Initialize our virtual environment before parallelization
+ sh("make .venv");
+
+ stage("Fetch Checkmk package") {
+ upstream_build(
+ relative_job_name: "builders/build-cmk-distro-package",
+ build_params: [
+ /// currently CUSTOM_GIT_REF must match, but in the future
+ /// we should define dependency paths for build-cmk-distro-package
+ CUSTOM_GIT_REF: cmd_output("git rev-parse HEAD"),
+ EDITION: edition,
+ DISTRO: distro,
+ ],
+ dest: "package_download",
+ );
+ }
+ try {
+ stage("Run `make ${make_target}`") {
+ dir("${checkout_dir}/tests") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
sh("""
RESULT_PATH='${WORKSPACE}/test-results/${distro}' \
EDITION='${edition}' \
@@ -106,34 +106,34 @@ def main() {
""");
}
}
- } finally {
- stage("Archive / process test reports") {
- dir("${WORKSPACE}") {
- show_duration("archiveArtifacts") {
- archiveArtifacts("test-results/**");
- }
- xunit([Custom(
- customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/junit.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )]);
+ }
+ } finally {
+ stage("Archive / process test reports") {
+ dir("${WORKSPACE}") {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("test-results/**");
}
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/junit.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
}
- stage('archive crawler report') {
- dir("${WORKSPACE}") {
- xunit([
- JUnit(
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/crawl.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )
- ])
- }
+ }
+ stage('archive crawler report') {
+ dir("${WORKSPACE}") {
+ xunit([
+ JUnit(
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/crawl.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )
+ ]);
}
}
}
diff --git a/buildscripts/scripts/trigger-cmk-build-chain.groovy b/buildscripts/scripts/trigger-cmk-build-chain.groovy
index 47f0ac5da9b..4ef595bbd09 100644
--- a/buildscripts/scripts/trigger-cmk-build-chain.groovy
+++ b/buildscripts/scripts/trigger-cmk-build-chain.groovy
@@ -8,6 +8,8 @@
/// Other artifacts: Those of child jobs
/// Depends on: Nothing
+import java.time.LocalDate
+
def main() {
/// make sure the listed parameters are set
@@ -26,6 +28,7 @@ def main() {
def edition = JOB_BASE_NAME.split("-")[-1];
def base_folder = "${currentBuild.fullProjectName.split('/')[0..-2].join('/')}/nightly-${edition}";
+ def use_case = LocalDate.now().getDayOfWeek() in ["SATURDAY", "SUNDAY"] ? "weekly" : "daily"
/// NOTE: this way ALL parameter are being passed through..
def job_parameters = [
@@ -44,11 +47,18 @@ def main() {
[$class: 'BooleanParameterValue', name: 'SET_BRANCH_LATEST_TAG', value: params.SET_BRANCH_LATEST_TAG],
[$class: 'BooleanParameterValue', name: 'PUSH_TO_REGISTRY', value: params.PUSH_TO_REGISTRY],
[$class: 'BooleanParameterValue', name: 'PUSH_TO_REGISTRY_ONLY', value: params.PUSH_TO_REGISTRY_ONLY],
+ [$class: 'BooleanParameterValue', name: 'BUILD_CLOUD_IMAGES', value: true],
+ [$class: 'StringParameterValue', name: 'CUSTOM_GIT_REF', value: params.CUSTOM_GIT_REF],
+ [$class: 'StringParameterValue', name: 'CIPARAM_OVERRIDE_BUILD_NODE', value: params.CIPARAM_OVERRIDE_BUILD_NODE],
+ [$class: 'StringParameterValue', name: 'CIPARAM_CLEANUP_WORKSPACE', value: params.CIPARAM_CLEANUP_WORKSPACE],
+ // PUBLISH_IN_MARKETPLACE will only be set during the release process (aka bw-release)
+ [$class: 'BooleanParameterValue', name: 'PUBLISH_IN_MARKETPLACE', value: false],
+ [$class: 'StringParameterValue', name: 'USE_CASE', value: use_case],
];
// TODO we should take this list from a single source of truth
assert edition in ["enterprise", "raw", "managed", "cloud", "saas"] : (
- "Do not know edition '${edition}' extracted from ${JOB_BASE_NAME}")
+ "Do not know edition '${edition}' extracted from ${JOB_BASE_NAME}");
def build_image = edition != "managed";
def build_cloud_images = edition == "cloud";
@@ -57,7 +67,7 @@ def main() {
def run_int_tests = true;
def run_comp_tests = !(edition in ["saas", "managed"]);
def run_image_tests = edition != "managed";
- def run_update_tests = (edition in ["enterprise", "cloud"]);
+ def run_update_tests = (edition in ["enterprise", "cloud", "saas"]);
print(
"""
@@ -70,6 +80,7 @@ def main() {
|run_int_tests:..........│${run_int_tests}│
|run_image_tests:....... │${run_image_tests}│
|run_update_tests:...... │${run_update_tests}│
+ |use_case:.............. │${use_case}│
|===================================================
""".stripMargin());
@@ -82,14 +93,14 @@ def main() {
success &= smart_stage(
name: "Build CMK IMAGE",
condition: build_image,
- raiseOnError: false) {
+ raiseOnError: false,) {
build(job: "${base_folder}/build-cmk-image", parameters: job_parameters);
}
success &= smart_stage(
name: "Build Cloud Images",
condition: build_cloud_images,
- raiseOnError: false) {
+ raiseOnError: false,) {
build(job: "${base_folder}/build-cmk-cloud-images", parameters: job_parameters);
}
@@ -98,36 +109,42 @@ def main() {
success &= smart_stage(
name: "Integration Test for Docker Container",
condition: run_image_tests,
- raiseOnError: false) {
+ raiseOnError: false,) {
build(job: "${base_folder}/test-integration-docker", parameters: job_parameters);
}
},
-
"Composition Test for Packages": {
success &= smart_stage(
name: "Composition Test for Packages",
condition: run_comp_tests,
- raiseOnError: false) {
+ raiseOnError: false,) {
build(job: "${base_folder}/test-composition", parameters: job_parameters);
}
- }
- ])
+ },
+ ]);
success &= smart_stage(
name: "Integration Test for Packages",
condition: run_int_tests,
- raiseOnError: false) {
+ raiseOnError: false,) {
build(job: "${base_folder}/test-integration-packages", parameters: job_parameters);
}
success &= smart_stage(
name: "Update Test",
condition: run_update_tests,
- raiseOnError: false) {
+ raiseOnError: false,) {
build(job: "${base_folder}/test-update", parameters: job_parameters);
}
- currentBuild.result = success ? "SUCCESS" : "FAILURE";
+ success &= smart_stage(
+ name: "Trigger Saas Gitlab jobs",
+ condition: success && edition == "saas",
+ raiseOnError: false,) {
+ build(job: "${base_folder}/trigger-saas-gitlab", parameters: job_parameters);
+ }
+ currentBuild.result = success ? "SUCCESS" : "FAILURE";
}
+
return this;
diff --git a/buildscripts/scripts/trigger-packages.groovy b/buildscripts/scripts/trigger-packages.groovy
new file mode 100644
index 00000000000..9b75e69e87e
--- /dev/null
+++ b/buildscripts/scripts/trigger-packages.groovy
@@ -0,0 +1,71 @@
+#!groovy
+
+/// file: trigger-packages.groovy
+import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
+
+def build_stages(packages_file, force_build) {
+ def packages = load_json(packages_file);
+ def notify = load("${checkout_dir}/buildscripts/scripts/utils/notify.groovy");
+
+ inside_container() {
+ sh("make .venv")
+ parallel packages.collectEntries { p ->
+ [("${p.name}"): {
+ stage(p.name) {
+ catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
+ def job = upstream_build(
+ download: false,
+ relative_job_name: "builders/build-cmk-package",
+ force_build: force_build,
+ dependency_paths: [p.path] + p.dependencies,
+ build_params: [
+ "PACKAGE_PATH": p.path,
+ "SECRET_VARS": p.sec_vars.join(","),
+ "COMMAND_LINE": p.command_line,
+ ],
+ build_params_no_check: ["CUSTOM_GIT_REF": cmd_output("git rev-parse HEAD")],
+ )
+ if (!job.new_build.asBoolean()) {
+ Utils.markStageSkippedForConditional("${p.name}");
+ }
+ if (job.result != "SUCCESS") {
+ notify.notify_maintainer_of_package(p.maintainers, p.name, "${job.url}" + "console")
+ throw new Exception("Job ${p.name} failed");
+ }
+ }
+ }
+ }
+ ]
+ }
+ }
+}
+
+def preparation(packages_file) {
+ stage("Preparation") {
+ inside_container() {
+ sh("rm -rf results; mkdir results")
+ sh("buildscripts/scripts/collect_packages.py packages > ${packages_file}");
+ }
+ }
+}
+
+def main() {
+ check_job_parameters([
+ "FORCE_BUILD",
+ ]);
+
+ dir("${checkout_dir}") {
+ def results_dir = "results"
+ def packages_file = "${results_dir}/packages_generated.json"
+
+ preparation(packages_file)
+
+ build_stages(packages_file, params.FORCE_BUILD);
+
+ show_duration("archiveArtifacts") {
+ archiveArtifacts(allowEmptyArchive: true, artifacts: 'results/*');
+ }
+ }
+}
+
+return this
diff --git a/buildscripts/scripts/trigger-saas-gitlab.groovy b/buildscripts/scripts/trigger-saas-gitlab.groovy
new file mode 100644
index 00000000000..ea75c85f9e0
--- /dev/null
+++ b/buildscripts/scripts/trigger-saas-gitlab.groovy
@@ -0,0 +1,49 @@
+#!groovy
+
+/// file: trigger-saas-gitlab.groovy
+
+/// Trigger job chains in the saas project on gitlab
+
+def main() {
+ check_job_parameters([
+ "VERSION",
+ ]);
+ check_environment_variables([
+ "GITLAB_URL",
+ ]);
+
+ def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
+
+ def safe_branch_name = versioning.safe_branch_name(scm);
+ def branch_version = versioning.get_branch_version(checkout_dir);
+ def cmk_version = versioning.get_cmk_version(safe_branch_name, branch_version, VERSION);
+
+ print(
+ """
+ |===== CONFIGURATION ===============================
+ |safe_branch_name:...... │${safe_branch_name}│
+ |cmk_version:........... │${cmk_version}
+ |===================================================
+ """.stripMargin());
+
+ print("Triggering GitLab job with Docker tag '${cmk_version}'");
+
+ // Jobs in saas use the version tag to read images and parse version information
+ withCredentials([
+ string(
+ credentialsId: "GITLAB_TRIGGER_TOKEN",
+ variable:"GITLAB_TRIGGER_TOKEN"),
+ ]) {
+ sh("""
+ curl -X POST \
+ --fail \
+ -F token=${GITLAB_TRIGGER_TOKEN} \
+ -F ref="main" \
+ -F variables[BUILD_CMK_TAG]="${cmk_version}" \
+ -F variables[BUILD_CSE]="true" \
+ ${GITLAB_URL}/api/v4/projects/3/trigger/pipeline
+ """);
+ }
+}
+
+return this;
diff --git a/buildscripts/scripts/update-architecture-documentation.groovy b/buildscripts/scripts/update-architecture-documentation.groovy
index 4df4fd07734..ad6ebb53545 100644
--- a/buildscripts/scripts/update-architecture-documentation.groovy
+++ b/buildscripts/scripts/update-architecture-documentation.groovy
@@ -5,16 +5,14 @@
def main() {
dir("${checkout_dir}") {
stage("Update") {
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside() {
- sh("make -C doc/documentation htmlhelp");
- }
+ inside_container() {
+ sh("make -C doc/documentation htmlhelp");
}
stage("Stash") {
stash(
name: "htmlhelp",
includes: "doc/documentation/_build/htmlhelp/**"
- )
+ );
}
}
diff --git a/buildscripts/scripts/utils/common.groovy b/buildscripts/scripts/utils/common.groovy
index 326ddd9c596..d6481e2068f 100644
--- a/buildscripts/scripts/utils/common.groovy
+++ b/buildscripts/scripts/utils/common.groovy
@@ -10,21 +10,19 @@ load_json = { json_file ->
(new groovy.json.JsonSlurperClassic()).parseText(cmd_stdout_result);
}
-
cleanup_directory = { directory ->
assert directory.startsWith(env.HOME);
sh("rm -rf '${directory}/'*");
sh("mkdir -p '${directory}'");
}
-
/// Run a block based on a global "dry run level"
/// Global level = "0" (or unset) means "run everything"
/// Global level "1" means "avoid dangerous side effects"
/// Global level "2" means "avoid dangerous side effects and long running stuff (like builds)"
LONG_RUNNING = 1;
GLOBAL_IMPACT = 2;
-on_dry_run_omit = {level, title, fn ->
+on_dry_run_omit = { level, title, fn ->
if (("${global_dry_run_level}" == "0" && level <= 2) ||
("${global_dry_run_level}" == "1" && level <= 1) ||
("${global_dry_run_level}" == "2" && level <= 0)) {
@@ -47,29 +45,29 @@ on_dry_run_omit = {level, title, fn ->
""".stripMargin());
}
-shout = {msg ->
+shout = { msg ->
sh("figlet -w 150 ${msg}");
}
-check_job_parameters = {param_list ->
+check_job_parameters = { param_list ->
print("""
||== REQUIRED JOB PARAMETERS ===============================================================
${param_list.collect({param_or_tuple ->
- def (param_name, must_be_nonempty) = (param_or_tuple instanceof java.util.ArrayList) ? param_or_tuple : [param_or_tuple, false];
- if (!params.containsKey(param_name)) {
- raise ("Expected job parameter ${param_name} not defined!");
- }
- def param_value = params[param_name];
- if (must_be_nonempty && (param_value instanceof java.lang.String) && !param_value) {
- raise ("Job parameter ${param_name} is expected to be nonempty!");
- }
- "|| ${param_name.padRight(32)} ${"(${param_value.getClass().name.tokenize('.').last()})".padRight(12)} = |${param_value}|"
- }).join("\n")}
+ def (param_name, must_be_nonempty) = (param_or_tuple instanceof java.util.ArrayList) ? param_or_tuple : [param_or_tuple, false];
+ if (!params.containsKey(param_name)) {
+ raise ("Expected job parameter ${param_name} not defined!");
+ }
+ def param_value = params[param_name];
+ if (must_be_nonempty && (param_value instanceof java.lang.String) && !param_value) {
+ raise ("Job parameter ${param_name} is expected to be nonempty!");
+ }
+ "|| ${param_name.padRight(32)} ${"(${param_value.getClass().name.tokenize('.').last()})".padRight(12)} = |${param_value}|"
+ }).join("\n")}
||==========================================================================================
""".stripMargin());
}
-check_environment_variables = {param_list ->
+check_environment_variables = { param_list ->
println("""
||== USED ENVIRONMENT VARIABLES ============================================================
${param_list.collect({param ->
@@ -79,13 +77,13 @@ check_environment_variables = {param_list ->
""".stripMargin());
}
-assert_no_dirty_files = {repo_root ->
+assert_no_dirty_files = { repo_root ->
dir (repo_root) {
assert sh(script: "make -C tests/ test-find-dirty-files-in-git", returnStatus: true) == 0;
}
}
-provide_clone = {repo_name, credentials_id ->
+provide_clone = { repo_name, credentials_id ->
dir("${WORKSPACE}/${repo_name}") {
checkout([$class: "GitSCM",
userRemoteConfigs: [[
diff --git a/buildscripts/scripts/utils/docker_image_aliases_helper.groovy b/buildscripts/scripts/utils/docker_image_aliases_helper.groovy
index 6ce0ce17df7..94b470c9796 100644
--- a/buildscripts/scripts/utils/docker_image_aliases_helper.groovy
+++ b/buildscripts/scripts/utils/docker_image_aliases_helper.groovy
@@ -4,14 +4,87 @@
resolve_docker_image_alias = { alias_name ->
// same as docker.build("build-image:${env.BUILD_ID}",
- // "--pull ${WORKSPACE}/git/buildscripts/docker_image_aliases/IMAGE_TESTING")
+ // "--pull ${WORKSPACE}/git/buildscripts/docker_image_aliases/${alias_name}")
return cmd_output(
"${checkout_dir}/buildscripts/docker_image_aliases/resolve.py ${alias_name}"
).replaceAll("[\r\n]+", "");
}
-
docker_image_from_alias = { alias_name ->
return docker.image(resolve_docker_image_alias(alias_name));
}
+docker_reference_image = { ->
+ dir("${checkout_dir}") {
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
+ return docker.image(
+ cmd_output("VERBOSE=1 PULL_BASE_IMAGE=1 ${checkout_dir}/defines/dev-images/reference-image-id")
+ );
+ }
+ }
+}
+
+/// This function is the CI-equivalent to scripts/run-in-docker.sh. It should do
+/// the same as closely as possible. So if you're editing one of us please also
+/// update the other one, too!
+inside_container = {Map arg1=[:], Closure arg2 ->
+ // strangely providing a default value for @args does not work as expected.
+ // If no value got provided the provided body is taken as @args.
+ // In _script console_ however @arg1 will be [:] (the expected default arg)
+ // if no argument was provided.
+ // So we handle both cases here, setting default value for @args manually
+ def (args, body) = arg2 == null ? [[:], arg1] : [arg1, arg2];
+
+ def container_shadow_workspace = "${WORKSPACE}/container_shadow_workspace_ci";
+ def reference_repo_dir = cmd_output("""
+ if [ -f ${checkout_dir}/.git/objects/info/alternates ]; then \
+ dirname \$(cat ${checkout_dir}/.git/objects/info/alternates);\
+ fi
+ """);
+
+ def image = args.image ?: docker_reference_image();
+ def privileged = args.get("priviliged", false).asBoolean();
+ def init = args.get("init", false).asBoolean();
+ def mount_reference_repo = args.get("mount_reference_repo", true).asBoolean();
+ def mount_credentials = args.get("mount_credentials", false).asBoolean();
+ def set_docker_group_id = args.get("set_docker_group_id", false).asBoolean();
+ def create_cache_folder = args.get("create_cache_folder", true).asBoolean();
+ def mount_host_user_files = args.get("mount_host_user_files", true).asBoolean();
+ def run_args = args.args == null ? [] : args.args;
+ def run_args_str = (
+ run_args
+ + (init ? ["--init"] : [])
+ + (set_docker_group_id ? ["--group-add=${get_docker_group_id()}"] : [])
+ + (args.ulimit_nofile ? ["--ulimit nofile=${args.ulimit_nofile}:${args.ulimit_nofile}"] : [])
+ + (privileged ? ["-v /var/run/docker.sock:/var/run/docker.sock"] : [])
+ + ["-v \"${container_shadow_workspace}/home:${env.HOME}\""]
+ + (mount_credentials ? ["-v ${env.HOME}/.cmk-credentials:${env.HOME}/.cmk-credentials"] : [])
+ + (mount_host_user_files ? ["-v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro"] : [])
+ + ((mount_reference_repo && reference_repo_dir) ? ["-v ${reference_repo_dir}:${reference_repo_dir}:ro"] : [])
+ + (create_cache_folder ? ["-v \"${container_shadow_workspace}/cache:${env.HOME}/.cache\""] : [])
+ ).join(" ");
+ /// We have to make sure both, the source directory and (if applicable) the target
+ /// directory inside an already mounted parent directory (here: /home/)
+ /// exist, since otherwise they will be created with root ownership by
+ /// poor Docker daemon.
+ sh("""
+ if [ -e "${container_shadow_workspace}/cache" ]; then
+ # Bazel creates files without write permission
+ chmod -R a+w ${container_shadow_workspace}/cache
+ fi
+ rm -rf ${container_shadow_workspace}
+ mkdir -p ${container_shadow_workspace}/home
+ mkdir -p ${container_shadow_workspace}/home/.cache
+ mkdir -p ${container_shadow_workspace}/cache
+ mkdir -p ${checkout_dir}/shared_cargo_folder
+ mkdir -p "${container_shadow_workspace}/home/\$(realpath -s --relative-to="${env.HOME}" "${checkout_dir}")"
+ mkdir -p "${container_shadow_workspace}/home/\$(realpath -s --relative-to="${env.HOME}" "${reference_repo_dir}")"
+ """);
+ println("inside_container(image=${image} docker_args: ${run_args_str})");
+ docker.withRegistry(DOCKER_REGISTRY, "nexus") {
+ image.inside(run_args_str) {
+ body();
+ }
+ }
+}
+
diff --git a/buildscripts/scripts/utils/gerrit_stages.groovy b/buildscripts/scripts/utils/gerrit_stages.groovy
index 861d1520fb7..dd3b571b2ec 100644
--- a/buildscripts/scripts/utils/gerrit_stages.groovy
+++ b/buildscripts/scripts/utils/gerrit_stages.groovy
@@ -5,7 +5,7 @@
import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
def log_stage_duration(last_stage_date) {
- def this_stage_date = new Date();
+ def this_stage_date = new Date(); // groovylint-disable NoJavaUtilDate
def duration = groovy.time.TimeCategory.minus(
this_stage_date,
last_stage_date,
@@ -37,8 +37,9 @@ def create_stage(Map args, time_stage_started) {
}
sh(script: "figlet -w 150 '${args.NAME}'", returnStatus: true);
- println("CMD: ${args.COMMAND}")
+ println("CMD: ${args.COMMAND}");
def cmd_status;
+
withCredentials(args.SEC_VAR_LIST.collect{string(credentialsId: it, variable: it)}) {
withEnv(args.ENV_VAR_LIST) {
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
@@ -49,7 +50,8 @@ def create_stage(Map args, time_stage_started) {
desc_add_status_row(
args.NAME,
duration, cmd_status==0 ? "success" : "failure",
- "${args.RESULT_CHECK_FILE_PATTERN}");
+ "${args.RESULT_CHECK_FILE_PATTERN}"
+ );
println("Check results: ${args.RESULT_CHECK_TYPE}");
if (args.RESULT_CHECK_TYPE) {
@@ -78,7 +80,7 @@ def desc_add_line(TEXT) {
}
def desc_add_table_head() {
- currentBuild.description += ""
+ currentBuild.description += "";
}
def desc_add_table_bottom() {
@@ -95,7 +97,7 @@ def desc_rm_table_bottom() {
}
def desc_add_row(ITEM_1, ITEM_2, ITEM_3, ITEM_4) {
- desc_rm_table_bottom()
+ desc_rm_table_bottom();
currentBuild.description += """
${ITEM_1} | ${ITEM_2} | ${ITEM_3} | ${ITEM_4} |
""";
@@ -105,7 +107,7 @@ def desc_add_row(ITEM_1, ITEM_2, ITEM_3, ITEM_4) {
def desc_add_status_row(STAGE, DURATION, status, PATTERN) {
desc_rm_table_bottom();
if (PATTERN != '' && PATTERN != '--') {
- PATTERN = "${PATTERN}"
+ PATTERN = "${PATTERN}";
}
currentBuild.description += """
${STAGE} |
diff --git a/buildscripts/scripts/utils/integration.groovy b/buildscripts/scripts/utils/integration.groovy
index 9f84820b6b9..36d4bbef6ed 100644
--- a/buildscripts/scripts/utils/integration.groovy
+++ b/buildscripts/scripts/utils/integration.groovy
@@ -18,112 +18,113 @@ def run_make_targets(Map args) {
||======================================================================
""".stripMargin());
- def DOCKER_BUILDS = [:]
- def download_dir = "downloaded_packages_for_integration_tests"
- // TODO: this should be done by the top level scripts
- docker.withRegistry(DOCKER_REGISTRY, 'nexus') {
- docker_image_from_alias("IMAGE_TESTING").inside(
- "--group-add=${args.DOCKER_GROUP_ID} \
- --ulimit nofile=1024:1024 \
- --env HOME=/home/jenkins \
- ${mount_reference_repo_dir} \
- -v /home/jenkins/.cmk-credentials:/home/jenkins/.cmk-credentials:ro \
- -v /var/run/docker.sock:/var/run/docker.sock") {
+ def DOCKER_BUILDS = [:];
+ def download_dir = "downloaded_packages_for_integration_tests";
- // TODO dir + set WORKSPACE is needed due to nested dependency
- dir("${checkout_dir}") {
- withEnv(["WORKSPACE=${WORKSPACE}"]) {
+ inside_container(
+ args: [
+ "--ulimit nofile=1024:1024",
+ "--env HOME=/home/jenkins",
+ ],
+ set_docker_group_id: true,
+ mount_credentials: true,
+ priviliged: true,
+ ) {
+ // TODO dir + set WORKSPACE is needed due to nested dependency
+ dir("${checkout_dir}") {
+ withEnv(["WORKSPACE=${WORKSPACE}"]) {
+ // TODO or DO NOT REMOVE: this versioning load is needed in order for uplaod_artifacts to have
+ // versioning.groovy available.... holy moly
+ /* groovylint-disable-next-line UnusedVariable */
+ def versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
+ def artifacts_helper = load("${checkout_dir}/buildscripts/scripts/utils/upload_artifacts.groovy");
- // TODO or DO NOT REMOVE: this versioning load is needed in order for uplaod_artifacts to have
- // versioning.groovy available.... holy moly
- def versioning = load "${checkout_dir}/buildscripts/scripts/utils/versioning.groovy"
- def artifacts_helper = load "${checkout_dir}/buildscripts/scripts/utils/upload_artifacts.groovy"
-
- // TODO make independent from WORKSPACE
- sh("rm -rf \"${WORKSPACE}/${download_dir}\"")
- if (args.DISTRO_LIST == ["ubuntu-20.04"]) {
- artifacts_helper.download_deb(
- INTERNAL_DEPLOY_DEST,
- INTERNAL_DEPLOY_PORT,
- args.cmk_version,
- "${WORKSPACE}/${download_dir}/${args.cmk_version}",
- args.EDITION,
- "focal",
- );
- }
- else if (args.DISTRO_LIST == ["ubuntu-22.04"]) { // needed for saas nightly int tests
- artifacts_helper.download_deb(
- INTERNAL_DEPLOY_DEST,
- INTERNAL_DEPLOY_PORT,
- args.cmk_version,
- "${WORKSPACE}/${download_dir}/${args.cmk_version}",
- args.EDITION,
- "jammy",
- );
- }
- else if(args.DISTRO_LIST.size() == 1) {
- raise("Please add a case to download only the needed package for ${args.DISTRO_LIST}");
- }
- else {
- artifacts_helper.download_version_dir(
- INTERNAL_DEPLOY_DEST,
- INTERNAL_DEPLOY_PORT,
- args.cmk_version,
- "${WORKSPACE}/${download_dir}/${args.cmk_version}",
- );
- }
+ // TODO make independent from WORKSPACE
+ sh("rm -rf \"${WORKSPACE}/${download_dir}\"")
+ if (args.DISTRO_LIST == ["ubuntu-20.04"]) {
+ artifacts_helper.download_deb(
+ INTERNAL_DEPLOY_DEST,
+ INTERNAL_DEPLOY_PORT,
+ args.cmk_version,
+ "${WORKSPACE}/${download_dir}/${args.cmk_version}",
+ args.EDITION,
+ "focal",
+ );
+ }
+ else if (args.DISTRO_LIST == ["ubuntu-22.04"]) { // needed for saas nightly int tests
+ artifacts_helper.download_deb(
+ INTERNAL_DEPLOY_DEST,
+ INTERNAL_DEPLOY_PORT,
+ args.cmk_version,
+ "${WORKSPACE}/${download_dir}/${args.cmk_version}",
+ args.EDITION,
+ "jammy",
+ );
+ }
+ else if(args.DISTRO_LIST.size() == 1) {
+ raise("Please add a case to download only the needed package for ${args.DISTRO_LIST}");
+ }
+ else {
+ artifacts_helper.download_version_dir(
+ INTERNAL_DEPLOY_DEST,
+ INTERNAL_DEPLOY_PORT,
+ args.cmk_version,
+ "${WORKSPACE}/${download_dir}/${args.cmk_version}",
+ );
+ }
- // Cleanup test results directory before starting the test to prevent previous
- // runs somehow affecting the current run.
- sh("[ -d ${WORKSPACE}/test-results ] && rm -rf ${WORKSPACE}/test-results || true")
+ // Cleanup test results directory before starting the test to prevent previous
+ // runs somehow affecting the current run.
+ sh("[ -d ${WORKSPACE}/test-results ] && rm -rf ${WORKSPACE}/test-results || true");
- // Initialize our virtual environment before parallelization
- sh("make .venv")
+ // Initialize our virtual environment before parallelization
+ sh("make .venv");
- // Then execute the tests
+ // Then execute the tests
- // TODO: We still need here the VERSION/git semantic for the make targets:
- // * case VERSION="git" -> use daily build but patch it using f12
- // * case VERSION="2.2.0-2023.06.07" -> use daily build of date as-is
- try {
- args.DISTRO_LIST.each { DISTRO ->
- DOCKER_BUILDS[DISTRO] = {
- stage(DISTRO + ' test') {
- dir ('tests') {
- sh("""RESULT_PATH='${WORKSPACE}/test-results/${DISTRO}' \
- EDITION='${args.EDITION}' \
- DOCKER_TAG='${args.DOCKER_TAG}' \
- VERSION='${args.VERSION in ["git", "daily"] ? args.VERSION : args.cmk_version}' \
- DISTRO='$DISTRO' \
- BRANCH='${args.BRANCH}' \
- make ${args.MAKE_TARGET}""");
- }
+ // TODO: We still need here the VERSION/git semantic for the make targets:
+ // * case VERSION="2.2.0-2023.06.07" -> use daily build of date as-is
+ try {
+ /* groovylint-disable NestedBlockDepth */
+ args.DISTRO_LIST.each { DISTRO ->
+ DOCKER_BUILDS[DISTRO] = {
+ stage(DISTRO + ' test') {
+ dir ('tests') {
+ sh("""RESULT_PATH='${WORKSPACE}/test-results/${DISTRO}' \
+ EDITION='${args.EDITION}' \
+ DOCKER_TAG='${args.DOCKER_TAG}' \
+ VERSION='${args.VERSION == "daily" ? args.VERSION : args.cmk_version}' \
+ DISTRO='$DISTRO' \
+ BRANCH='${args.BRANCH}' \
+ make ${args.MAKE_TARGET}""");
}
}
}
- parallel DOCKER_BUILDS
- } finally {
- stage("Archive / process test reports") {
- dir(WORKSPACE) {
- show_duration("archiveArtifacts") {
- archiveArtifacts("test-results/**");
- }
- xunit([Custom(
- customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
- deleteOutputFiles: true,
- failIfNotNew: true,
- pattern: "**/junit.xml",
- skipNoTestFiles: false,
- stopProcessingIfError: true
- )])
+ }
+ /* groovylint-enable NestedBlockDepth */
+ parallel DOCKER_BUILDS;
+ } finally {
+ stage("Archive / process test reports") {
+ dir(WORKSPACE) {
+ show_duration("archiveArtifacts") {
+ archiveArtifacts("test-results/**");
}
+ xunit([Custom(
+ customXSL: "$JENKINS_HOME/userContent/xunit/JUnit/0.1/pytest-xunit.xsl",
+ deleteOutputFiles: true,
+ failIfNotNew: true,
+ pattern: "**/junit.xml",
+ skipNoTestFiles: false,
+ stopProcessingIfError: true
+ )]);
}
- /// remove downloaded packages since they consume dozens of GiB
- sh("""rm -rf "${WORKSPACE}/${download_dir}" """);
}
+ /// remove downloaded packages since they consume dozens of GiB
+ sh("""rm -rf "${WORKSPACE}/${download_dir}" """);
}
}
}
}
}
+
return this;
diff --git a/buildscripts/scripts/utils/notify.groovy b/buildscripts/scripts/utils/notify.groovy
index c64f2497fd6..89296bd679f 100644
--- a/buildscripts/scripts/utils/notify.groovy
+++ b/buildscripts/scripts/utils/notify.groovy
@@ -9,9 +9,9 @@ def get_author_email() {
// Bug: https://issues.jenkins-ci.org/browse/JENKINS-39838
return (
onWindows ?
- /// windows will replace %ae with ae..
- cmd_output('git log -1 --pretty=format:%%ae') :
- cmd_output('git log -1 --pretty=format:%ae'))
+ /// windows will replace %ae with ae..
+ cmd_output('git log -1 --pretty=format:%%ae') :
+ cmd_output('git log -1 --pretty=format:%ae'));
}
// Send a build failed massage to jenkins
@@ -25,15 +25,33 @@ def slack_build_failed(error) {
|Error Message:
| ${error}
|""".stripMargin()),
- )
+ );
}
+def notify_maintainer_of_package(maintainers, package_name, build_url) {
+ try {
+ mail(
+ to: maintainers.join(","), // TODO: Add the commmiter
+ cc: maintainers.join(","),
+ bcc: "",
+ from: "\"CI\" <${JENKINS_MAIL}>",
+ replyTo: "${TEAM_CI_MAIL}",
+ subject: "[${package_name} failed]",
+ body: ("""
+ |The following package has failed - check the console log here:
+ | ${build_url}
+ |""".stripMargin()),
+ );
+ } catch (Exception exc) { // groovylint-disable CatchException
+ println("Could not sent mail to package owner - got ${exc}");
+ }
+}
def notify_error(error) {
// It seems the option "Allowed domains" is not working properly.
// See: https://ci.lan.tribe29.com/configure
// So ensure here we only notify internal addresses.
- def projectname = currentBuild.fullProjectName
+ def projectname = currentBuild.fullProjectName;
try {
def isChangeValidation = projectname.contains("cv");
def isTesting = projectname.contains("Testing");
@@ -58,7 +76,7 @@ def notify_error(error) {
"jonas.scharpf@checkmk.com",
];
currentBuild.changeSets.each { changeSet ->
- def culprits_emails = changeSet.items.collect {e -> e.authorEmail};
+ def culprits_emails = changeSet.items.collect { e -> e.authorEmail };
print(
"""
||==========================================================================================
@@ -77,26 +95,34 @@ def notify_error(error) {
});
/// Inform cloud devs if cloud burns
- if (projectname.contains("build-cmk-cloud-images")) {
- notify_emails += "max.linke@checkmk.com"
+ if (projectname.contains("build-cmk-cloud-images") || projectname.contains("saas")) {
+ notify_emails += "aws-saas-checkmk-dev@checkmk.com";
}
/// Inform nile devs if our extensions fail
if (projectname.contains("test-extension-compatibility")) {
- notify_emails.addAll(TEAM_NILE_MAIL.split(","))
+ notify_emails.addAll(TEAM_NILE_MAIL.split(","));
}
/// Inform werk workers if something's wrong with the werk jobs
if (projectname.startsWith("werks/")) {
- notify_emails += "benedikt.seidl@checkmk.com"
+ notify_emails += "benedikt.seidl@checkmk.com";
+ }
+
+ /// Inform QA if something's wrong with those jobs
+ if (projectname.contains("test-plugins") || projectname.contains("test-update")) {
+ notify_emails += "matteo.stifano@checkmk.com";
+ notify_emails += "rene.slowenski@checkmk.com";
}
/// fallback - for investigation
+ /* groovylint-disable DuplicateListLiteral */
notify_emails = notify_emails ?: [
"timotheus.bachinger@checkmk.com",
"frans.fuerst@checkmk.com",
"jonas.scharpf@checkmk.com",
];
+ /* groovylint-enable DuplicateListLiteral */
print("|| error-reporting: notify_emails ${notify_emails}");
@@ -118,9 +144,9 @@ def notify_error(error) {
|
|If you feel you got this mail by mistake, please reply and let's fix this together.
|""".stripMargin()),
- )
+ );
}
- } catch(Exception exc) {
+ } catch (Exception exc) { // groovylint-disable CatchException
print("Could not report error by mail - got ${exc}");
}
@@ -134,7 +160,8 @@ def notify_error(error) {
// teamDomain: , channel: build-notifications, color: danger,
// botUser: true, tokenCredentialId: , iconEmoji , username
//
- //ERROR: Slack notification failed with exception: java.lang.IllegalArgumentException: the token with the provided ID could not be found and no token was specified
+ //ERROR: Slack notification failed with exception:
+ //java.lang.IllegalArgumentException: the token with the provided ID could not be found and no token was specified
//
//slack_build_failed(error)
// after notifying everybody, the error needs to be thrown again
@@ -142,7 +169,9 @@ def notify_error(error) {
StackTraceUtils.sanitize(error);
print("ERROR: ${error.stackTrace.head()}: ${error}");
- currentBuild.description += "
The build failed due to an exception (at ${error.stackTrace.head()}):
${error}";
+ currentBuild.description += (
+ "
The build failed due to an exception (at ${error.stackTrace.head()}):" +
+ "
${error}");
throw error;
}
diff --git a/buildscripts/scripts/utils/str_mod.groovy b/buildscripts/scripts/utils/str_mod.groovy
deleted file mode 100644
index f15d7b37a31..00000000000
--- a/buildscripts/scripts/utils/str_mod.groovy
+++ /dev/null
@@ -1,15 +0,0 @@
-#!groovy
-
-/// file: str_mod.groovy
-
-// library for simple string modifications
-package lib
-
-// Strip the protocol from an URL
-// For example, the docker registry is sometimes needed with and without protocol
-def strip_protocol_from_url (URL) {
- def URL_STRIPPED = URL.split('://')[1]
- return URL_STRIPPED
-}
-
-return this
diff --git a/buildscripts/scripts/utils/test_helper.groovy b/buildscripts/scripts/utils/test_helper.groovy
index 64057e3914f..cd734909f21 100644
--- a/buildscripts/scripts/utils/test_helper.groovy
+++ b/buildscripts/scripts/utils/test_helper.groovy
@@ -9,7 +9,7 @@ def execute_test(Map config = [:]) {
def defaultDict = [
name: "",
cmd: "",
- output_file: ""
+ output_file: "",
] << config;
stage("Run ${defaultDict.name}") {
@@ -18,7 +18,7 @@ def execute_test(Map config = [:]) {
catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
def cmd = defaultDict.cmd;
if (defaultDict.output_file) {
- cmd += " 2>&1 | tee ${defaultDict.output_file}"
+ cmd += " 2>&1 | tee ${defaultDict.output_file}";
}
sh("""
set -o pipefail
@@ -31,6 +31,7 @@ def execute_test(Map config = [:]) {
// create issues parser
// in case 'as_stage' is set false, the parser list will be returned
// otherwise a publish issue stage is created
+/* groovylint-disable MethodSize, LineLength */
def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true) {
def issues = [];
def parserId = ''; // used for custom groovyScript parser
@@ -43,7 +44,7 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
name: 'Bazel Format', // Name shown on left side menu
regex: '(.*)\\s#\\s(reformat)$', // RegEx
mapping: 'return builder.setFileName(matcher.group(1)).setMessage(matcher.group(2)).buildOptional()', // Mapping script
- example: "omd/packages/freetds/freetds_http.bzl # reformat" // example log message
+ example: "omd/packages/freetds/freetds_http.bzl # reformat", // example log message
// | 1 | | 2 |
]);
issues.add(scanForIssues(
@@ -60,7 +61,7 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
name: 'Bazel Lint', // Name shown on left side menu
regex: '(.*):(\\d+):(\\d+):(.*)', // RegEx
mapping: 'return builder.setFileName(matcher.group(1)).setMessage(matcher.group(4)).setLineStart(Integer.parseInt(matcher.group(2))).setColumnStart(Integer.parseInt(matcher.group(3))).buildOptional()', // Mapping script
- example: "omd/packages/freetds/freetds_http.bzl:8:19: syntax error near build_file" // example log message
+ example: "omd/packages/freetds/freetds_http.bzl:8:19: syntax error near build_file", // example log message
// | 1 |2|3 | 4 |
]);
issues.add(scanForIssues(
@@ -77,14 +78,21 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
)
));
break;
- case "CSSFORMAT":
- parserId = 'css-format';
+ case "CODENARC":
+ issues.add(scanForIssues(
+ tool: codeNarc(
+ pattern: "${result_check_file_pattern}"
+ )
+ ));
+ break;
+ case "PRETTIER":
+ parserId = 'prettier';
update_custom_parser([
id: parserId, // ID
name: 'CSS Format', // Name shown on left side menu
- regex: '^\\[warn\\]\\s(.*\\.(?:sc|c)ss)$', // RegEx
+ regex: '^\\[warn\\]\\s(.*\\.(?:scss|css|ts|js))$', // RegEx
mapping: 'return builder.setFileName(matcher.group(1)).buildOptional()', // Mapping script
- example: "[warn] web/htdocs/themes/facelift/scss/_bi.scss" // example log message
+ example: "[warn] web/htdocs/themes/facelift/scss/_bi.scss", // example log message
// | | 1 |
]);
issues.add(scanForIssues(
@@ -108,6 +116,26 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
)
));
break;
+ case "GROOVY":
+ parserId = 'groovy-lint';
+ update_custom_parser([
+ id: parserId, // ID
+ name: 'Grooy Lint', // Name shown on left side menu
+ regex: '(.*\\.groovy$)\\n(\\s{2})(\\d+)(\\s+)(\\w+)(\\s{2,})(.*?)(?=\\s{2})(\\s{2})(\\w+)', // RegEx
+ mapping: 'return builder.setFileName(matcher.group(1)).setMessage(matcher.group(7)).setLineStart(Integer.parseInt(matcher.group(3))).setCategory(matcher.group(5)).setType(matcher.group(9)).buildOptional()', // Mapping script
+ example: """/home/jonasscharpf/git/check_mk/buildscripts/scripts/utils/upload_artifacts.groovy
+ 39 error The variable [versioning] in class None is not used UnusedVariable
+ 71 warning Map [credentialsId:Release_Key, variable:RELEASE_KEY] is duplicated. DuplicateMapLiteral""", // example log message
+ // | 1 |
+ // |2|3|4| 5 |6| 7 |8| 9 |
+ ]);
+ issues.add(scanForIssues(
+ tool: groovyScript(
+ parserId: parserId,
+ pattern: "${result_check_file_pattern}"
+ )
+ ));
+ break;
case "JUNIT":
issues.add(scanForIssues(
tool: junitParser(
@@ -138,7 +166,7 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
mapping: 'return builder.setFileName(matcher.group(2)).setCategory(matcher.group(8)).setMessage(matcher.group(9)).setLineStart(Integer.parseInt(matcher.group(4))).buildOptional()', // Mapping script
example: """In ./enterprise/skel/etc/init.d/dcd line 14:
. \"\$OMD_ROOT/.profile\"
- ^------------------^ SC1091 (info): Not following: ./.profile: openBinaryFile: does not exist (No such file or directory)""" // example log message
+ ^------------------^ SC1091 (info): Not following: ./.profile: openBinaryFile: does not exist (No such file or directory)""", // example log message
// |1 | 2 | 3 | 4 |
// | 5 |
// | 6 | 7 | 8 | 9 | 10 |
@@ -159,7 +187,7 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
mapping: 'return builder.setFileName(matcher.group(1)).setCategory(matcher.group(3)).setMessage(matcher.group(6)).buildOptional()', // Mapping script
example: """tests/unit-shell/agents/test_set_up_path.shshunit2:ERROR test_set_up_path_already_in_path() returned non-zero return code.
test_set_up_path_already_in_path
- ASSERT:expected: but was:""" // example log message
+ ASSERT:expected: but was:""", // example log message
// | 1 | 2 | 3 | 4 |
// | 5 |
// | 6 |
@@ -178,7 +206,7 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
name: 'TS/JS build', // Name shown on left side menu
regex: '(.*):\\s(.*):\\s(.*)\\s\\((\\d+):(\\d+)\\)', // RegEx
mapping: 'return builder.setFileName(matcher.group(2)).setCategory(matcher.group(1)).setMessage(matcher.group(3)).setLineStart(Integer.parseInt(matcher.group(4))).setColumnStart(Integer.parseInt(matcher.group(5))).buildOptional()', // Mapping script
- example: "SyntaxError: web/htdocs/js/modules/dashboard.ts: Missing semicolon. (65:30)" // example log message
+ example: "SyntaxError: web/htdocs/js/modules/dashboard.ts: Missing semicolon. (65:30)", // example log message
// | 1 | 2 | 3 | 4 |5 |
]);
issues.add(scanForIssues(
@@ -188,23 +216,6 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
)
));
break;
- case "TSJSFORMAT":
- parserId = 'js-format';
- update_custom_parser([
- id: parserId, // ID
- name: 'TS/JS Format', // Name shown on left side menu
- regex: '^\\[warn\\]\\s(.*\\.(?:j|t)s)$', // RegEx
- mapping: 'return builder.setFileName(matcher.group(2)).buildOptional()', // Mapping script
- example: "[warn] web/htdocs/js/modules/cbor_ext.js" // example log message
- // | 1 | 2 |
- ]);
- issues.add(scanForIssues(
- tool: groovyScript(
- parserId: parserId,
- pattern: "${result_check_file_pattern}"
- )
- ));
- break;
case "TSJSTYPES":
parserId = 'ts-types';
update_custom_parser([
@@ -212,7 +223,7 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
name: 'TS/JS types', // Name shown on left side menu
regex: '(.*\\..*(?:ts|js))\\((\\d+),(\\d+)\\):\\s(.*):\\s(.*)', // RegEx
mapping: 'return builder.setFileName(matcher.group(1)).setCategory(matcher.group(4)).setMessage(matcher.group(5)).setLineStart(Integer.parseInt(matcher.group(2))).setColumnStart(Integer.parseInt(matcher.group(3))).buildOptional()', // Mapping script
- example: "web/htdocs/js/modules/dashboard.js(65,37): error TS1005: ',' expected.s" // example log message
+ example: "web/htdocs/js/modules/dashboard.js(65,37): error TS1005: ',' expected.s", // example log message
// | 1 |2 | 3 | 4 | 5 |
]);
issues.add(scanForIssues(
@@ -234,6 +245,7 @@ def analyse_issues(result_check_type, result_check_file_pattern, as_stage=true)
return issues;
}
}
+/* groovylint-enable MethodSize, LineLength */
// pusblish issues stage based on given issue parser(s)
def analyse_issue_stages(issues) {
@@ -285,7 +297,7 @@ def update_custom_parser(Map config = [:]) {
}
else {
print("${defaultDict.id} undefined, adding parser");
- parser_config.setParsers(existing_parsers.plus(newParser));
+ parser_config.setParsers(existing_parsers.plus(newParser)); // groovylint-disable ExplicitCallToPlusMethod
}
}
diff --git a/buildscripts/scripts/utils/upload_artifacts.groovy b/buildscripts/scripts/utils/upload_artifacts.groovy
index 99091ea9e88..908861aa79c 100644
--- a/buildscripts/scripts/utils/upload_artifacts.groovy
+++ b/buildscripts/scripts/utils/upload_artifacts.groovy
@@ -5,22 +5,26 @@
// library for uploading packages
package lib
-hashfile_extension = ".hash"
-downloads_path = "/var/downloads/checkmk/"
+hashfile_extension = ".hash";
+downloads_path = "/var/downloads/checkmk/";
+tstbuilds_path = "/tstbuilds/";
versioning = load("${checkout_dir}/buildscripts/scripts/utils/versioning.groovy");
+/* groovylint-disable ParameterCount */
def download_deb(DOWNLOAD_SOURCE, PORT, CMK_VERSION, DOWNLOAD_DEST, EDITION, DISTRO) {
CMK_VERSION_RC_LESS = versioning.strip_rc_number_from_version(CMK_VERSION);
- def FILE_PATTERN = "check-mk-${EDITION}-${CMK_VERSION_RC_LESS}_0.${DISTRO}_amd64.deb"
- download_version_dir(DOWNLOAD_SOURCE, PORT, CMK_VERSION, DOWNLOAD_DEST, FILE_PATTERN, DISTRO)
+ def FILE_PATTERN = "check-mk-${EDITION}-${CMK_VERSION_RC_LESS}_0.${DISTRO}_amd64.deb";
+ download_version_dir(DOWNLOAD_SOURCE, PORT, CMK_VERSION, DOWNLOAD_DEST, FILE_PATTERN, DISTRO);
}
+/* groovylint-enable ParameterCount */
def download_source_tar(DOWNLOAD_SOURCE, PORT, CMK_VERSION, DOWNLOAD_DEST, EDITION) {
CMK_VERSION_RC_LESS = versioning.strip_rc_number_from_version(CMK_VERSION);
- def FILE_PATTERN = "check-mk-${EDITION}-${CMK_VERSION_RC_LESS}.*.tar.gz"
- download_version_dir(DOWNLOAD_SOURCE, PORT, CMK_VERSION, DOWNLOAD_DEST, FILE_PATTERN, 'source tar')
+ def FILE_PATTERN = "check-mk-${EDITION}-${CMK_VERSION_RC_LESS}.*.tar.gz";
+ download_version_dir(DOWNLOAD_SOURCE, PORT, CMK_VERSION, DOWNLOAD_DEST, FILE_PATTERN, 'source tar');
}
+/* groovylint-disable ParameterCount */
def download_version_dir(DOWNLOAD_SOURCE,
PORT,
CMK_VERSION,
@@ -42,17 +46,18 @@ def download_version_dir(DOWNLOAD_SOURCE,
""".stripMargin());
stage("Download from shared storage (${INFO})") {
withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) {
- sh("mkdir -p ${DOWNLOAD_DEST}")
- sh """
+ sh("mkdir -p ${DOWNLOAD_DEST}");
+ sh("""
rsync --recursive --links --perms --times --verbose \
--exclude=${EXCLUDE_PATTERN} \
-e "ssh -o StrictHostKeyChecking=no -i ${RELEASE_KEY} -p ${PORT}" \
${DOWNLOAD_SOURCE}/${CMK_VERSION}/${PATTERN} \
${DOWNLOAD_DEST}/
- """
+ """);
}
}
}
+/* groovylint-enable ParameterCount */
def upload_version_dir(SOURCE_PATH, UPLOAD_DEST, PORT, EXCLUDE_PATTERN="") {
println("""
@@ -64,14 +69,14 @@ def upload_version_dir(SOURCE_PATH, UPLOAD_DEST, PORT, EXCLUDE_PATTERN="") {
||==========================================================================================
""".stripMargin());
stage('Upload to download server') {
- withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) {
- sh """
+ withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) { // groovylint-disable DuplicateMapLiteral
+ sh("""
rsync -av \
-e "ssh -o StrictHostKeyChecking=no -i ${RELEASE_KEY} -p ${PORT}" \
--exclude=${EXCLUDE_PATTERN} \
${SOURCE_PATH} \
${UPLOAD_DEST}
- """
+ """);
}
}
}
@@ -87,8 +92,8 @@ def upload_via_rsync(archive_base, cmk_version, filename, upload_dest, upload_po
||======================================================================
""".stripMargin());
- create_hash(archive_base + "/" + cmk_version + "/" + filename)
- withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) {
+ create_hash(archive_base + "/" + cmk_version + "/" + filename);
+ withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) { // groovylint-disable DuplicateMapLiteral
sh("""
rsync -av --relative \
--exclude '*dbgsym*.deb' \
@@ -102,19 +107,27 @@ def upload_via_rsync(archive_base, cmk_version, filename, upload_dest, upload_po
}
def create_hash(FILE_PATH) {
- stage("Create file hash") {
+ sh("""
+ cd \$(dirname ${FILE_PATH});
+ sha256sum -- \$(basename ${FILE_PATH}) > "\$(basename ${FILE_PATH})${hashfile_extension}";
+ """);
+}
+
+def execute_cmd_on_archive_server(cmd) {
+ withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) { // groovylint-disable DuplicateMapLiteral
sh("""
- cd \$(dirname ${FILE_PATH});
- sha256sum -- \$(basename ${FILE_PATH}) > "\$(basename ${FILE_PATH})${hashfile_extension}";
+ ssh -o StrictHostKeyChecking=no -i ${RELEASE_KEY} -p ${WEB_DEPLOY_PORT} ${WEB_DEPLOY_URL} "${cmd}"
""");
}
}
-def execute_cmd_on_archive_server(cmd) {
- withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) {
- sh """
- ssh -o StrictHostKeyChecking=no -i ${RELEASE_KEY} -p ${WEB_DEPLOY_PORT} ${WEB_DEPLOY_URL} "${cmd}"
- """
+def execute_cmd_on_tst_server(cmd) {
+ // INTERNAL_DEPLOY_DEST configured as "deploy@tstbuilds-artifacts.lan.tribe29.com:/tstbuilds/"
+ def internal_deploy_server = INTERNAL_DEPLOY_DEST.split(":")[0];
+ withCredentials([file(credentialsId: 'Release_Key', variable: 'RELEASE_KEY')]) { // groovylint-disable DuplicateMapLiteral
+ sh("""
+ ssh -o StrictHostKeyChecking=no -i ${RELEASE_KEY} -p ${INTERNAL_DEPLOY_PORT} ${internal_deploy_server} "${cmd}"
+ """);
}
}
@@ -122,21 +135,23 @@ def deploy_to_website(CMK_VERS) {
stage("Deploy to Website") {
// CMK_VERS can contain a rc information like v2.1.0p6-rc1.
// On the website, we only want to have official releases.
- def TARGET_VERSION = versioning.strip_rc_number_from_version(CMK_VERS)
- def SYMLINK_PATH = "/smb-share-customer/checkmk/" + TARGET_VERSION
+ def TARGET_VERSION = versioning.strip_rc_number_from_version(CMK_VERS);
+ def SYMLINK_PATH = "/smb-share-customer/checkmk/" + TARGET_VERSION;
// We also do not want to keep rc versions on the archive.
// So rename the folder in case we have a rc
if (TARGET_VERSION != CMK_VERS) {
- execute_cmd_on_archive_server("mv ${downloads_path}${CMK_VERS} ${downloads_path}${TARGET_VERSION};")
+ execute_cmd_on_archive_server("mv ${downloads_path}${CMK_VERS} ${downloads_path}${TARGET_VERSION};");
}
execute_cmd_on_archive_server("ln -sf --no-dereference ${downloads_path}${TARGET_VERSION} ${SYMLINK_PATH};");
}
}
def cleanup_rc_candidates_of_version(CMK_VERS) {
- def TARGET_VERSION = versioning.strip_rc_number_from_version(CMK_VERS)
- execute_cmd_on_archive_server("rm -rf ${downloads_path}${TARGET_VERSION}-rc*;")
+ def TARGET_VERSION = versioning.strip_rc_number_from_version(CMK_VERS);
+ execute_cmd_on_archive_server("rm -rf ${downloads_path}${TARGET_VERSION}-rc*;");
+ // cleanup of tst server would come to early as "build-cmk-container" needs the rc candiates available
+ // that cleanup is and will be done by bw-release
}
-return this
+return this;
diff --git a/buildscripts/scripts/utils/versioning.groovy b/buildscripts/scripts/utils/versioning.groovy
index 70d6fd5917e..d094d292eef 100644
--- a/buildscripts/scripts/utils/versioning.groovy
+++ b/buildscripts/scripts/utils/versioning.groovy
@@ -5,6 +5,7 @@
// library for calculation of version numbers
import groovy.transform.Field
+/* groovylint-disable DuplicateListLiteral */
@Field
def REPO_PATCH_RULES = [\
"raw": [\
@@ -21,9 +22,9 @@ def REPO_PATCH_RULES = [\
"saas", \
"cse", \
"cse.py", \
- "web/htdocs/themes/{facelift,modern-dark}/scss/{cme,cee,cce}"],\
+ "packages/cmk-frontend/src/themes/{facelift,modern-dark}/scss/{cme,cee,cce}"],\
"folders_to_be_created": [\
- "web/htdocs/themes/{facelift,modern-dark}/scss/{cme,cee,cce}"]], \
+ "packages/cmk-frontend/src/themes/{facelift,modern-dark}/scss/{cme,cee,cce}"]], \
"enterprise": [\
"paths_to_be_removed": [\
"managed", \
@@ -35,9 +36,9 @@ def REPO_PATCH_RULES = [\
"saas", \
"cse", \
"cse.py", \
- "web/htdocs/themes/{facelift,modern-dark}/scss/{cme,cce}"], \
+ "packages/cmk-frontend/src/themes/{facelift,modern-dark}/scss/{cme,cce}"], \
"folders_to_be_created": [\
- "web/htdocs/themes/{facelift,modern-dark}/scss/{cme,cce}"]], \
+ "packages/cmk-frontend/src/themes/{facelift,modern-dark}/scss/{cme,cce}"]], \
"managed": [\
"paths_to_be_removed": [\
"saas", \
@@ -52,18 +53,19 @@ def REPO_PATCH_RULES = [\
"saas", \
"cse", \
"cse.py", \
- "web/htdocs/themes/{facelift,modern-dark}/scss/cme"], \
+ "packages/cmk-frontend/src/themes/{facelift,modern-dark}/scss/cme"], \
"folders_to_be_created": [\
- "web/htdocs/themes/{facelift,modern-dark}/scss/cme"]], \
+ "packages/cmk-frontend/src/themes/{facelift,modern-dark}/scss/cme"]], \
"saas": [\
"paths_to_be_removed": [\
"managed", \
"cme", \
"cme.py", \
- "web/htdocs/themes/{facelift,modern-dark}/scss/cme"], \
+ "packages/cmk-frontend/src/themes/{facelift,modern-dark}/scss/cme"], \
"folders_to_be_created": [\
- "web/htdocs/themes/{facelift,modern-dark}/scss/cme"]], \
-]
+ "packages/cmk-frontend/src/themes/{facelift,modern-dark}/scss/cme"]], \
+];
+/* groovylint-enable DuplicateListLiteral */
def branch_name(scm) {
return env.GERRIT_BRANCH ?: scm.branches[0].name;
@@ -73,6 +75,7 @@ def safe_branch_name(scm) {
return branch_name(scm).replaceAll("/", "-");
}
+/* groovylint-disable DuplicateListLiteral */
def get_cmk_version(branch_name, branch_version, version) {
return (
// Experimental builds
@@ -82,37 +85,47 @@ def get_cmk_version(branch_name, branch_version, version) {
// else
"${version}");
}
+/* groovylint-enable DuplicateListLiteral */
-def configured_or_overridden_distros(edition, distro_list, use_case="daily") {
- if(distro_list) {
- return distro_list.trim().replaceAll(',', ' ').split(' ');
+def get_distros(Map args) {
+ def override_distros = args.override.trim() ?: "";
+
+ /// retrieve all available distros if provided distro-list is 'all',
+ /// respect provided arguments otherwise
+ def edition = override_distros == "all" ? "all" : args.edition.trim() ?: "all";
+ def use_case = override_distros == "all" ? "all" : args.use_case.trim() ?: "daily";
+
+ /// return requested list if provided
+ if(override_distros && override_distros != "all") {
+ return override_distros.replaceAll(',', ' ').split(' ').grep();
}
- docker_image_from_alias("IMAGE_TESTING").inside() {
+
+ /// read distros from edition.yml otherwise.
+ inside_container() {
dir("${checkout_dir}") {
- return sh(script: """scripts/run-pipenv run \
+ return cmd_output("""scripts/run-pipenv run \
buildscripts/scripts/get_distros.py \
--editions_file "${checkout_dir}/editions.yml" \
use_cases \
--edition "${edition}" \
--use_case "${use_case}"
- """, returnStdout: true).trim().split();
+ """).split().grep();
}
}
}
-def get_internal_distros_pattern() {
- docker_image_from_alias("IMAGE_TESTING").inside() {
+def get_internal_artifacts_pattern() {
+ inside_container() {
dir("${checkout_dir}") {
return sh(script: """scripts/run-pipenv run \
buildscripts/scripts/get_distros.py \
--editions_file "editions.yml" \
- internal_distros \
+ internal_build_artifacts \
--as-codename \
--as-rsync-exclude-pattern;
""", returnStdout: true).trim();
}
}
-
}
def get_branch_version(String git_dir=".") {
@@ -142,7 +155,7 @@ def get_docker_tag(scm, String git_dir=".") {
}
def get_docker_artifact_name(edition, cmk_version) {
- return "check-mk-${edition}-docker-${cmk_version}.tar.gz"
+ return "check-mk-${edition}-docker-${cmk_version}.tar.gz";
}
def select_docker_tag(BRANCH, BUILD_TAG, FOLDER_TAG) {
@@ -170,22 +183,11 @@ def patch_themes(EDITION) {
// Workaround since scss does not support conditional includes
THEME_LIST.each { THEME ->
sh """
- echo '@mixin graphs_cee {}' > web/htdocs/themes/${THEME}/scss/cee/_graphs_cee.scss
- echo '@mixin reporting {}' > web/htdocs/themes/${THEME}/scss/cee/_reporting.scss
- echo '@mixin ntop {}' > web/htdocs/themes/${THEME}/scss/cee/_ntop.scss
- echo '@mixin license_usage {}' > web/htdocs/themes/${THEME}/scss/cee/_license_usage.scss
- echo '@mixin managed {}' > web/htdocs/themes/${THEME}/scss/cme/_managed.scss
- """
- }
- break
- case 'cloud':
- case 'saas':
- case 'enterprise':
- case 'free':
- // Workaround since scss does not support conditional includes
- THEME_LIST.each { THEME ->
- sh """
- echo '@mixin managed {}' > web/htdocs/themes/${THEME}/scss/cme/_managed.scss
+ echo '@mixin graphs_cee {\n}' > packages/cmk-frontend/src/themes/${THEME}/scss/cee/_graphs_cee.scss
+ echo '@mixin reporting {\n}' > packages/cmk-frontend/src/themes/${THEME}/scss/cee/_reporting.scss
+ echo '@mixin ntop {\n}' > packages/cmk-frontend/src/themes/${THEME}/scss/cee/_ntop.scss
+ echo '@mixin license_usage {\n}' > packages/cmk-frontend/src/themes/${THEME}/scss/cee/_license_usage.scss
+ echo '@mixin robotmk {\n}' > packages/cmk-frontend/src/themes/${THEME}/scss/cee/_robotmk.scss
"""
}
break
@@ -194,7 +196,7 @@ def patch_themes(EDITION) {
def patch_demo(EDITION) {
if (EDITION == 'free') {
- sh '''sed -ri 's/^(FREE[[:space:]]*:?= *).*/\\1'"yes/" defines.make'''
+ sh('''sed -ri 's/^(FREE[[:space:]]*:?= *).*/\\1'"yes/" defines.make''');
}
}
@@ -203,7 +205,7 @@ def set_version(cmk_version) {
}
def configure_checkout_folder(edition, cmk_version) {
- assert edition in REPO_PATCH_RULES: "edition=${edition} not known"
+ assert edition in REPO_PATCH_RULES: "edition=${edition} not known";
patch_folders(edition);
patch_themes(edition);
patch_demo(edition);
@@ -229,13 +231,15 @@ def delete_non_cre_files() {
]
find_pattern = non_cre_paths.collect({p -> "-name ${p}"}).join(" -or ")
// Do not remove files in .git, .venv, .mypy_cache directories
- sh """bash -c \"find . \\
+ sh("""
+ bash -c \"find . \\
-not \\( -path ./.\\* -prune \\) \\
- \\( ${find_pattern} \\) -prune -print -exec rm -r {} \\;\""""
+ \\( ${find_pattern} \\) -prune -print -exec rm -r {} \\;\"
+ """);
}
def strip_rc_number_from_version(VERSION) {
- return VERSION.split("-rc")[0]
+ return VERSION.split("-rc")[0];
}
-return this
+return this;
diff --git a/buildscripts/scripts/utils/windows.groovy b/buildscripts/scripts/utils/windows.groovy
index 72d35a9749b..166efbc3716 100644
--- a/buildscripts/scripts/utils/windows.groovy
+++ b/buildscripts/scripts/utils/windows.groovy
@@ -3,7 +3,7 @@
/// file: windows.groovy
def build(Map args) {
- def jenkins_base_folder = new File(currentBuild.fullProjectName).parent;
+ def jenkins_base_folder = new File(currentBuild.fullProjectName).parent; // groovylint-disable JavaIoPackageAccess
def artifacts_dir = 'artefacts';
print("jenkins_base_folder: ${jenkins_base_folder}");
@@ -30,37 +30,63 @@ def build(Map args) {
def (subdir, command, artifacts) = (
(args.TARGET == "cached") ? [
- "agents/modules/windows",
- "call build_the_module.cmd cached ${args.CREDS} ${args.CACHE_URL}",
+ "agents/modules/windows",
+ "call build_the_module.cmd cached ${args.CREDS} ${args.CACHE_URL}",
"python-3.cab"] :
(args.TARGET == "agent_with_sign") ? [
"agents/wnx",
- "call run.cmd --all --sign tribe29.pfx ${args.PASSWORD}",
- "cmk-agent-ctl.exe,check_mk_agent-64.exe,check_mk_agent.exe,check_mk_agent.msi,check_mk_agent_unsigned.msi,check_mk.user.yml,check_mk.yml,watest32.exe,watest64.exe,unit_tests_results.zip,OpenHardwareMonitorLib.dll,OpenHardwareMonitorCLI.exe,robotmk_ext.exe,check-sql.exe,windows_files_hashes.txt"] :
+ // The deprecated_unused_param's have to be present or the script will fail.
+ "call run.cmd --all --sign deprecated_unused_param1 deprecated_unused_param2",
+ [
+ "cmk-agent-ctl.exe",
+ "check_mk_agent-64.exe",
+ "check_mk_agent.exe",
+ "check_mk_agent.msi",
+ "check_mk_agent_unsigned.msi",
+ "check_mk.user.yml",
+ "check_mk.yml",
+ "watest32.exe",
+ "watest64.exe",
+ "unit_tests_results.zip",
+ "OpenHardwareMonitorLib.dll",
+ "OpenHardwareMonitorCLI.exe",
+ "robotmk_ext.exe",
+ "mk-sql.exe",
+ "windows_files_hashes.txt",
+ ].join(",")] :
(args.TARGET == "agent_no_sign") ? [
- "agents/wnx",
- "call run.cmd --all",
- "cmk-agent-ctl.exe,check_mk_agent-64.exe,check_mk_agent.exe,check_mk_agent.msi,check_mk.user.yml,check_mk.yml,watest32.exe,watest64.exe"] :
+ "agents/wnx",
+ "call run.cmd --all",
+ [
+ "cmk-agent-ctl.exe",
+ "check_mk_agent-64.exe",
+ "check_mk_agent.exe",
+ "check_mk_agent.msi",
+ "check_mk.user.yml",
+ "check_mk.yml",
+ "watest32.exe",
+ "watest64.exe",
+ ].join(",")] :
(args.TARGET == "cmk_agent_ctl_no_sign") ? [
"packages/cmk-agent-ctl",
"call run.cmd --all",
- ""] :
- (args.TARGET == "check_sql_no_sign") ? [
- "packages/check-sql",
+ ""] :
+ (args.TARGET == "mk_sql_no_sign") ? [
+ "packages/mk-sql",
"call run.cmd --all",
- "check-sql.exe"] :
+ "mk-sql.exe"] :
(args.TARGET == "test_unit") ? [
- "agents/wnx",
- "call run.cmd --test",
- "unit_tests_results.zip"] :
+ "agents/wnx",
+ "call run.cmd --test",
+ "unit_tests_results.zip"] :
(args.TARGET == "test_integration") ? [
"agents/wnx",
"call run_tests.cmd --component --integration",
- "integration_tests_results.zip"] :
+ "integration_tests_results.zip"] :
raise("${args.TARGET} is not known!")
)
- timeout(time: 30, unit: 'MINUTES') {
+ timeout(time: 60, unit: 'MINUTES') {
dir(subdir) {
bat(command);
}
@@ -79,11 +105,11 @@ def build(Map args) {
stash(
name: args.STASH_NAME,
includes: artifacts
- )
+ );
}
}
}
}
}
-return this
+return this;
diff --git a/buildscripts/scripts/validate_changes.py b/buildscripts/scripts/validate_changes.py
index c4bd585d2c1..dcb6fa0357e 100755
--- a/buildscripts/scripts/validate_changes.py
+++ b/buildscripts/scripts/validate_changes.py
@@ -197,7 +197,14 @@ def finalize_stage(stage: StageInfo, env_vars: Vars, no_skip: bool) -> StageInfo
def run_shell_command(cmd: str, replace_newlines: bool) -> str:
"""Run a command and return preprocessed stdout"""
- stdout_str = subprocess.check_output(["sh", "-c", cmd], text=True).strip()
+ with subprocess.Popen(
+ ["sh"],
+ text=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ ) as proc_sh:
+ stdout_str, _ = proc_sh.communicate(cmd)
+ stdout_str = stdout_str.strip()
return stdout_str.replace("\n", " ") if replace_newlines else stdout_str
diff --git a/buildscripts/scripts/winagt-build-check-sql.groovy b/buildscripts/scripts/winagt-build-check-sql.groovy
index 8fd5cbe5721..36b56fefce4 100644
--- a/buildscripts/scripts/winagt-build-check-sql.groovy
+++ b/buildscripts/scripts/winagt-build-check-sql.groovy
@@ -14,7 +14,7 @@ def main() {
dir("${checkout_dir}") {
stage("make setversion") {
- bat("make -C agents\\wnx NEW_VERSION='${cmk_version}' setversion")
+ bat("make -C agents\\wnx NEW_VERSION='${cmk_version}' setversion");
}
withCredentials([string(
@@ -22,8 +22,8 @@ def main() {
variable:"CI_TEST_SQL_DB_ENDPOINT"
)]) {
windows.build(
- TARGET: 'check_sql_no_sign',
- )
+ TARGET: 'mk_sql_no_sign',
+ );
}
}
}
diff --git a/buildscripts/scripts/winagt-build-cmk-agent-ctl.groovy b/buildscripts/scripts/winagt-build-cmk-agent-ctl.groovy
index 07c271af5d8..1efddd3f26d 100644
--- a/buildscripts/scripts/winagt-build-cmk-agent-ctl.groovy
+++ b/buildscripts/scripts/winagt-build-cmk-agent-ctl.groovy
@@ -14,12 +14,12 @@ def main() {
dir("${checkout_dir}") {
stage("make setversion") {
- bat("make -C agents\\wnx NEW_VERSION='${cmk_version}' setversion")
+ bat("make -C agents\\wnx NEW_VERSION='${cmk_version}' setversion");
}
windows.build(
TARGET: 'cmk_agent_ctl_no_sign',
- )
+ );
}
}
diff --git a/buildscripts/scripts/winagt-build-modules.groovy b/buildscripts/scripts/winagt-build-modules.groovy
index 06f79ce889b..fb2f22b2a83 100644
--- a/buildscripts/scripts/winagt-build-modules.groovy
+++ b/buildscripts/scripts/winagt-build-modules.groovy
@@ -15,12 +15,13 @@ def main() {
def branch_name = versioning.safe_branch_name(scm);
def branch_version = versioning.get_branch_version(checkout_dir);
def cmk_version_rc_aware = versioning.get_cmk_version(branch_name, branch_version, VERSION);
- def cmk_version = versioning.strip_rc_number_from_version(cmk_version_rc_aware)
+ def cmk_version = versioning.strip_rc_number_from_version(cmk_version_rc_aware);
dir("${checkout_dir}") {
setCustomBuildProperty(
key: "path_hashes",
- value: scm_directory_hashes(scm.extensions));
+ value: scm_directory_hashes(scm.extensions)
+ );
stage("make setversion") {
bat("make -C agents\\wnx NEW_VERSION='${cmk_version}' setversion");
@@ -35,7 +36,7 @@ def main() {
TARGET: 'cached',
CREDS: NEXUS_USERNAME+':'+NEXUS_PASSWORD,
CACHE_URL: 'https://artifacts.lan.tribe29.com/repository/omd-build-cache/'
- )
+ );
}
}
}
diff --git a/buildscripts/scripts/winagt-build.groovy b/buildscripts/scripts/winagt-build.groovy
index 7906d582af0..d69aefd68f6 100644
--- a/buildscripts/scripts/winagt-build.groovy
+++ b/buildscripts/scripts/winagt-build.groovy
@@ -10,13 +10,14 @@ def main() {
def branch_name = versioning.safe_branch_name(scm);
def branch_version = versioning.get_branch_version(checkout_dir);
- def cmk_vers_rc_aware = versioning.get_cmk_version(branch_name, branch_version, VERSION)
- def cmk_version = versioning.strip_rc_number_from_version(cmk_vers_rc_aware)
+ def cmk_vers_rc_aware = versioning.get_cmk_version(branch_name, branch_version, VERSION);
+ def cmk_version = versioning.strip_rc_number_from_version(cmk_vers_rc_aware);
dir("${checkout_dir}") {
setCustomBuildProperty(
key: "path_hashes",
- value: scm_directory_hashes(scm.extensions));
+ value: scm_directory_hashes(scm.extensions)
+ );
stage("make setversion") {
bat("make -C agents\\wnx NEW_VERSION='${cmk_version}' setversion")
@@ -29,17 +30,17 @@ def main() {
usernameVariable: ''),
string(
credentialsId: "CI_TEST_SQL_DB_ENDPOINT",
- variable:"CI_TEST_SQL_DB_ENDPOINT"
- )]) {
+ variable:"CI_TEST_SQL_DB_ENDPOINT"),
+ ]) {
windows.build(
TARGET: 'agent_with_sign',
PASSWORD: WIN_SIGN_PASSWORD,
- )
+ );
}
stage("detach") {
dir("agents\\wnx"){
- bat "run.cmd --detach"
+ bat("run.cmd --detach");
}
}
diff --git a/buildscripts/scripts/winagt-test-build.groovy b/buildscripts/scripts/winagt-test-build.groovy
index 53eedcee611..ecec7056c75 100644
--- a/buildscripts/scripts/winagt-test-build.groovy
+++ b/buildscripts/scripts/winagt-test-build.groovy
@@ -20,7 +20,7 @@ def main() {
)]) {
windows.build(
TARGET: 'agent_no_sign'
- )
+ );
}
}
}
diff --git a/buildscripts/scripts/winagt-test-integration.groovy b/buildscripts/scripts/winagt-test-integration.groovy
index 76f231ab225..56fdebed4bb 100644
--- a/buildscripts/scripts/winagt-test-integration.groovy
+++ b/buildscripts/scripts/winagt-test-integration.groovy
@@ -9,7 +9,7 @@ def main() {
dir("${checkout_dir}") {
windows.build(
TARGET: 'test_integration'
- )
+ );
}
}
}
diff --git a/buildscripts/scripts/winagt-test-unit.groovy b/buildscripts/scripts/winagt-test-unit.groovy
index c5b52abe82d..cdfa762b0a0 100644
--- a/buildscripts/scripts/winagt-test-unit.groovy
+++ b/buildscripts/scripts/winagt-test-unit.groovy
@@ -8,7 +8,7 @@ def main() {
dir("${checkout_dir}") {
windows.build(
TARGET: 'test_unit'
- )
+ );
}
}
diff --git a/cmk/.f12 b/cmk/.f12
index 489c707dfbe..c74b579af63 100755
--- a/cmk/.f12
+++ b/cmk/.f12
@@ -13,7 +13,7 @@
# Needed by {managed,cloud,saas}/cmk/.f12 to set the correct edition after .f12
if [ -z "$ORIG_EDITION" ]; then
- ORIG_EDITION=$(sudo su - "$SITE" -c "python3 -c 'import cmk.utils.version; print(cmk.utils.version.edition().short)'")
+ ORIG_EDITION=$("$ROOT/bin/python3" -c 'import cmk.utils.version; print(cmk.utils.version.edition().short)')
export ORIG_EDITION
fi
@@ -43,22 +43,28 @@
TARGET="${ROOT}/share/check_mk/checks/"
# NOTE: The -a flag is equivalent to -rlptgoD, but we do *not* want -t, as it
# would screw up our include caching mechanism!
-sudo rsync -rlpgoD --exclude="__pycache__" --delete-after "base/legacy_checks/" "${TARGET}"
-sudo find "${TARGET}" -type f | sed -e 'p;s/.py$//' | xargs -n2 mv
+rsync \
+ -rlD \
+ --exclude="__pycache__" \
+ --exclude ".mypy_cache" \
+ --delete-after "base/legacy_checks/" \
+ "${TARGET}"
+find "${TARGET}" -iname "*.py" -type f | sed -e 'p;s/.py$//' | xargs -n2 mv
echo "Copy files..."
-sudo rsync \
+rsync \
--exclude="*.pyc" \
--exclude="flycheck*" \
--exclude="base/legacy_checks" \
--exclude="__pycache__" \
+ --exclude ".mypy_cache" \
--delete-after \
- -aR \
+ -rlDR \
"${PYTHON3_PATHS[@]}" \
"$ROOT"/lib/python3/cmk/
# Transform required packages to namespace package
-sudo rm -f \
+rm -f \
"$ROOT/lib/python3/cmk/__init__.py" \
"$ROOT/lib/python3/cmk/active_checks/__init__.py" \
"$ROOT/lib/python3/cmk/plugins/collection/server_side_calls/__init__.py" \
@@ -94,7 +100,6 @@
"$ROOT/lib/python3/cmk/update_config/__init__.py" \
"$ROOT/lib/python3/cmk/update_config/plugins/__init__.py" \
"$ROOT/lib/python3/cmk/update_config/plugins/actions/__init__.py" \
- \
"$ROOT/lib/python3/cmk/cee/__init__.py" \
"$ROOT/lib/python3/cmk/cee/dcd/__init__.py" \
"$ROOT/lib/python3/cmk/cee/dcd/plugins/__init__.py" \
@@ -115,7 +120,6 @@
"$ROOT/lib/python3/cmk/gui/cee/plugins/__init__.py" \
"$ROOT/lib/python3/cmk/gui/cee/plugins/sla/__init__.py" \
"$ROOT/lib/python3/cmk/gui/cee/plugins/reporting/__init__.py" \
- \
"$ROOT/lib/python3/cmk/gui/cce/__init__.py" \
"$ROOT/lib/python3/cmk/gui/cce/plugins/__init__.py" \
"$ROOT/lib/python3/cmk/gui/cce/plugins/wato/__init__.py" \
@@ -124,18 +128,18 @@
"$ROOT/lib/python3/cmk/base/cce/plugins/agent_based/__init__.py"
# Restore site version after .f12
-sudo sed -i "s|^__version__.*$|__version__ = \"$CMK_VERSION\"|g" "$ROOT"/lib/python3/cmk/utils/version.py
+sed -i "s|^__version__.*$|__version__ = \"$CMK_VERSION\"|g" "$ROOT"/lib/python3/cmk/utils/version.py
# Make the target a CME version in case the version is already a CME version
# (The dev is working on CME files)
if [ "$ORIG_EDITION" = "cme" ]; then
echo "Make the target version a CME version..."
- sudo sed -i "/^def edition() -> Edition:/{n;n;s/.*/ return Edition.CME/}" "$ROOT/lib/python3/cmk/utils/version.py"
+ sed -i "/^def edition() -> Edition:/{n;n;s/.*/ return Edition.CME/}" "$ROOT/lib/python3/cmk/utils/version.py"
fi
if [ -z "$ONLY_COPY" ]; then
(
cd "$ROOT"/lib/python3/cmk
- sudo "$ROOT"/bin/python3 -m compileall -qq "${PYTHON3_PATHS[@]}"
+ "$ROOT"/bin/python3 -m compileall -qq "${PYTHON3_PATHS[@]}"
)
fi
diff --git a/cmk/active_checks/check_disk_smb.py b/cmk/active_checks/check_disk_smb.py
index 1642d1c7110..3e24fe5fd54 100755
--- a/cmk/active_checks/check_disk_smb.py
+++ b/cmk/active_checks/check_disk_smb.py
@@ -42,8 +42,7 @@ def __call__(
port: int | None = None,
ip_address: str | None = None,
configfile: str | None = None,
- ) -> ErrorResult | SMBShare:
- ...
+ ) -> ErrorResult | SMBShare: ...
def _output_check_result(
@@ -57,7 +56,7 @@ def _output_check_result(
def parse_arguments(argv: Sequence[str]) -> argparse.Namespace:
parser = argparse.ArgumentParser(
prog="check_disk_smb",
- description="""Check SMB Disk plugin for monitoring""",
+ description="""Check SMB Disk plug-in for monitoring""",
)
parser.add_argument(
diff --git a/cmk/active_checks/check_sftp.py b/cmk/active_checks/check_sftp.py
new file mode 100755
index 00000000000..0d8cf455066
--- /dev/null
+++ b/cmk/active_checks/check_sftp.py
@@ -0,0 +1,334 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+import getopt
+import os
+import sys
+import time
+from typing import NamedTuple, NoReturn, TypedDict
+
+import paramiko
+
+from cmk.utils.password_store import replace_passwords
+
+
+def usage() -> NoReturn:
+ sys.stderr.write(
+ """
+USAGE: check_sftp [OPTIONS] HOST
+
+OPTIONS:
+ --host HOST SFTP server address
+ --user USER Username for sftp login
+ --secret SECRET Secret/Password for sftp login
+ --port PORT Alternative port number (default is 22 for the connection)
+ --get-remote FILE Path to the file which to pull from SFTP server (e.g.
+ /tmp/testfile.txt)
+ --get-local PATH Path to store the pulled file locally (e.g. $OMD_ROOT/tmp/)
+ --put-local FILE Path to the file to push to the sftp server. See above for example
+ --put-remote PATH Path to save the pushed file (e.g. /tmp/)
+ --get-timestamp PATH Path to the file for getting the timestamp of this file
+ --timeout SECONDS Set timeout for connection (default is 10 seconds)
+ --verbose Output some more detailed information
+ --look-for-keys Search for discoverable keys in the user's "~/.ssh" directory
+ -h, --help Show this help message and exit
+ """
+ )
+ sys.exit(1)
+
+
+def connection(
+ opt_host: str | None,
+ opt_user: str | None,
+ opt_pass: str | None,
+ opt_port: int,
+ opt_timeout: float,
+ opt_look_for_keys: bool,
+) -> paramiko.sftp_client.SFTPClient:
+
+ # The typing says that the connect method requires a hostname. Previously we passed None to it
+ # if the argument was not set and paramiko did not check for that but passed it to
+ # socket.getaddrinfo which assumes localhost.
+ # I suggest we just be explicit about the default value here.
+ if opt_host is None:
+ opt_host = "localhost"
+
+ client = paramiko.SSHClient()
+ client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # nosec B507
+ client.connect(
+ opt_host,
+ username=opt_user,
+ password=opt_pass,
+ port=opt_port,
+ timeout=opt_timeout,
+ look_for_keys=opt_look_for_keys,
+ )
+ return client.open_sftp()
+
+
+class PathDict(TypedDict, total=False):
+ put_filename: str
+ get_filename: str
+ local_get_path: str
+ local_put_path: str
+ remote_get_path: str
+ remote_put_path: str
+ timestamp_filename: str
+ timestamp_path: str
+
+
+def get_paths(
+ opt_put_local: str | None,
+ opt_get_local: str | None,
+ opt_put_remote: str | None,
+ opt_get_remote: str | None,
+ opt_timestamp: str | None,
+ omd_root: str | None,
+ working_dir: str,
+) -> PathDict:
+ paths: PathDict = {}
+ if opt_put_local:
+ put_filename = opt_put_local.split("/")[-1]
+ paths["put_filename"] = put_filename
+ paths["local_put_path"] = f"{omd_root}/{opt_put_local}"
+ if opt_put_remote:
+ paths["remote_put_path"] = f"{working_dir}/{opt_put_remote}/{put_filename}"
+ else:
+ paths["remote_put_path"] = f"{working_dir}/{put_filename}"
+
+ if opt_get_remote:
+ get_filename = opt_get_remote.split("/")[-1]
+ paths["get_filename"] = get_filename
+ paths["remote_get_path"] = f"{working_dir}/{opt_get_remote}"
+ if opt_get_local:
+ paths["local_get_path"] = f"{omd_root}/{opt_get_local}/{get_filename}"
+ else:
+ paths["local_get_path"] = f"{omd_root}/{get_filename}"
+
+ if opt_timestamp:
+ paths["timestamp_filename"] = opt_timestamp.split("/")[-1]
+ paths["timestamp_path"] = f"{working_dir}/{opt_timestamp}"
+
+ return paths
+
+
+def file_available(
+ opt_put_local: str,
+ opt_put_remote: str | None,
+ sftp: paramiko.sftp_client.SFTPClient,
+ working_dir: str,
+) -> bool:
+ filename = opt_put_local.split("/")[-1]
+ return filename in sftp.listdir(f"{working_dir}/{opt_put_remote}")
+
+
+def create_testfile(path: str) -> None:
+ if os.path.isfile(path):
+ return
+ with open(path, "w") as f:
+ f.write("This is a test by Check_MK\n")
+
+
+def put_file(sftp: paramiko.sftp_client.SFTPClient, source: str, destination: str) -> None:
+ sftp.put(source, destination)
+
+
+def get_file(sftp: paramiko.sftp_client.SFTPClient, source: str, destination: str) -> None:
+ sftp.get(source, destination)
+
+
+def get_timestamp(sftp: paramiko.sftp_client.SFTPClient, path: str) -> int | None:
+ return sftp.stat(path).st_mtime
+
+
+def output_check_result(s: str) -> None:
+ sys.stdout.write("%s\n" % s)
+
+
+class Args(NamedTuple):
+ host: None | str
+ user: None | str
+ pass_: None | str
+ port: int
+ get_remote: None | str
+ get_local: None | str
+ put_local: None | str
+ put_remote: None | str
+ timestamp: None | str
+ timeout: float
+ verbose: bool
+ look_for_keys: bool
+
+
+def parse_arguments(sys_args: None | list[str]) -> Args: # pylint: disable=too-many-branches
+ if sys_args is None:
+ sys_args = sys.argv[1:]
+
+ opt_host = None
+ opt_user = None
+ opt_pass = None
+ opt_port = 22
+ opt_get_remote = None
+ opt_get_local = None
+ opt_put_local = None
+ opt_put_remote = None
+ opt_timestamp = None
+ opt_timeout = 10.0
+ opt_verbose = False
+ opt_look_for_keys = False
+
+ short_options = "hv"
+ long_options = [
+ "host=",
+ "user=",
+ "secret=",
+ "port=",
+ "get-remote=",
+ "get-local=",
+ "put-local=",
+ "put-remote=",
+ "get-timestamp=",
+ "verbose",
+ "help",
+ "timeout=",
+ "look-for-keys",
+ ]
+
+ try:
+ opts, _args = getopt.getopt(sys_args, short_options, long_options)
+ except getopt.GetoptError as err:
+ sys.stderr.write("%s\n" % err)
+ sys.exit(1)
+
+ for opt, arg in opts:
+ if opt in ["-h", "help"]:
+ usage()
+ elif opt in ["--host"]:
+ opt_host = arg
+ elif opt in ["--user"]:
+ opt_user = arg
+ elif opt in ["--secret"]:
+ opt_pass = arg
+ elif opt in ["--port"]:
+ opt_port = int(arg)
+ elif opt in ["--timeout"]:
+ opt_timeout = float(arg)
+ elif opt in ["--put-local"]:
+ opt_put_local = arg
+ elif opt in ["--put-remote"]:
+ opt_put_remote = arg
+ elif opt in ["--get-local"]:
+ opt_get_local = arg
+ elif opt in ["--get-remote"]:
+ opt_get_remote = arg
+ elif opt in ["--get-timestamp"]:
+ opt_timestamp = arg
+ elif opt in ["--look-for-keys"]:
+ opt_look_for_keys = True
+ elif opt in ["-v", "--verbose"]:
+ opt_verbose = True
+
+ return Args(
+ opt_host,
+ opt_user,
+ opt_pass,
+ opt_port,
+ opt_get_remote,
+ opt_get_local,
+ opt_put_local,
+ opt_put_remote,
+ opt_timestamp,
+ opt_timeout,
+ opt_verbose,
+ opt_look_for_keys,
+ )
+
+
+def run_check( # pylint: disable=too-many-branches
+ sys_args: None | list[str] = None,
+) -> tuple[int, str]:
+ args = parse_arguments(sys_args)
+
+ messages = []
+ overall_state = 0
+ try: # Establish connection
+ sftp = connection(
+ args.host, args.user, args.pass_, args.port, args.timeout, args.look_for_keys
+ )
+ messages.append("Login successful")
+ except Exception:
+ if args.verbose:
+ raise
+ return 2, "Connection failed!"
+
+ # Let's prepare for some other tests...
+ omd_root = os.getenv("OMD_ROOT")
+ if omd_root is None:
+ sys.stderr.write("This check must be executed from within a site\n")
+ sys.exit(1)
+
+ sftp.chdir(".")
+ working_dir = sftp.getcwd()
+ assert working_dir is not None # help mypy -- we just set it above, see getcwd() docs
+
+ paths = get_paths(
+ args.put_local,
+ args.get_local,
+ args.put_remote,
+ args.get_remote,
+ args.timestamp,
+ omd_root,
+ working_dir,
+ )
+
+ # .. and eventually execute them!
+ if args.put_local is not None:
+ try: # Put a file to the server
+ create_testfile(paths["local_put_path"])
+ testfile_already_present = file_available(
+ args.put_local, args.put_remote, sftp, working_dir
+ )
+
+ put_file(sftp, paths["local_put_path"], paths["remote_put_path"])
+ if not testfile_already_present:
+ sftp.remove(paths["remote_put_path"])
+
+ messages.append("Successfully put file to SFTP server")
+ except Exception:
+ if args.verbose:
+ raise
+ overall_state = max(overall_state, 2)
+ messages.append("Could not put file to SFTP server! (!!)")
+
+ if args.get_remote is not None:
+ try: # Get a file from the server
+ get_file(sftp, paths["remote_get_path"], paths["local_get_path"])
+ messages.append("Successfully got file from SFTP server")
+ except Exception:
+ if args.verbose:
+ raise
+ overall_state = max(overall_state, 2)
+ messages.append("Could not get file from SFTP server! (!!)")
+
+ if args.timestamp is not None:
+ try: # Get timestamp of a remote file
+ timestamp = get_timestamp(sftp, paths["timestamp_path"])
+ messages.append(
+ "Timestamp of {} is: {}".format(paths["timestamp_filename"], time.ctime(timestamp))
+ )
+ except Exception:
+ if args.verbose:
+ raise
+ overall_state = max(overall_state, 2)
+ messages.append("Could not get timestamp of file! (!!)")
+
+ return overall_state, ", ".join(messages)
+
+
+def main() -> int:
+ replace_passwords()
+ exitcode, info = run_check()
+ output_check_result(info)
+ return exitcode
diff --git a/cmk/active_checks/check_traceroute.py b/cmk/active_checks/check_traceroute.py
index ea0627f916d..2735b20456b 100644
--- a/cmk/active_checks/check_traceroute.py
+++ b/cmk/active_checks/check_traceroute.py
@@ -78,8 +78,7 @@ def __call__(
use_dns: bool,
probe_method: ProbeMethod,
ip_address_family: IPAddressFamily,
- ) -> Route:
- ...
+ ) -> Route: ...
class ProbeMethod(enum.Enum):
@@ -237,11 +236,11 @@ def _check_route(
]
return (
- 2
- if any(missing_routers_crit + found_routers_crit)
- else 1
- if any(missing_routers_warn + found_routers_warn)
- else 0,
+ (
+ 2
+ if any(missing_routers_crit + found_routers_crit)
+ else 1 if any(missing_routers_warn + found_routers_warn) else 0
+ ),
f"%d hop{'' if route.n_hops == 1 else 's'}, missing routers: %s, bad routers: %s\n%s"
% (
route.n_hops,
diff --git a/cmk/automations/results.py b/cmk/automations/results.py
index 1369966ae28..3f0d3715dba 100644
--- a/cmk/automations/results.py
+++ b/cmk/automations/results.py
@@ -9,22 +9,19 @@
from ast import literal_eval
from collections.abc import Mapping, Sequence
from dataclasses import asdict, astuple, dataclass
-from typing import Any, TypeAlias, TypeVar
-
-from typing_extensions import TypedDict
+from typing import Any, TypedDict, TypeVar
from cmk.utils import version as cmk_version
from cmk.utils.agentdatatype import AgentRawData
from cmk.utils.check_utils import ParametersTypeAlias
from cmk.utils.config_warnings import ConfigurationWarnings
from cmk.utils.hostaddress import HostAddress, HostName
-from cmk.utils.labels import HostLabel, HostLabelValueDict, Labels
+from cmk.utils.labels import HostLabel, HostLabelValueDict, Labels, LabelSources
from cmk.utils.notify_types import NotifyAnalysisInfo, NotifyBulks
from cmk.utils.plugin_registry import Registry
-from cmk.utils.rulesets.ruleset_matcher import LabelSources, RulesetName
+from cmk.utils.rulesets.ruleset_matcher import RulesetName
from cmk.utils.servicename import Item, ServiceName
-from cmk.checkengine.checking import CheckPluginNameStr
from cmk.checkengine.discovery import CheckPreviewEntry
from cmk.checkengine.discovery import DiscoveryResult as SingleHostDiscoveryResult
from cmk.checkengine.legacy import LegacyCheckParameters
@@ -32,9 +29,6 @@
from cmk.checkengine.submitters import ServiceDetails, ServiceState
DiscoveredHostLabelsDict = dict[str, HostLabelValueDict]
-Gateway: TypeAlias = tuple[
- tuple[HostName | None, HostAddress, HostName | None] | None, str, int, str
-]
class ResultTypeRegistry(Registry[type["ABCAutomationResult"]]):
@@ -45,8 +39,7 @@ def plugin_name(self, instance: type[ABCAutomationResult]) -> str:
result_type_registry = ResultTypeRegistry()
-class SerializedResult(str):
- ...
+class SerializedResult(str): ...
_DeserializedType = TypeVar("_DeserializedType", bound="ABCAutomationResult")
@@ -69,8 +62,7 @@ def deserialize(
@staticmethod
@abstractmethod
- def automation_call() -> str:
- ...
+ def automation_call() -> str: ...
def _default_serialize(self) -> SerializedResult:
return SerializedResult(repr(astuple(self)))
@@ -126,26 +118,6 @@ class ServiceDiscoveryPreviewResult(ABCAutomationResult):
source_results: Mapping[str, tuple[int, str]]
def serialize(self, for_cmk_version: cmk_version.Version) -> SerializedResult:
- if for_cmk_version < cmk_version.Version.from_str(
- "2.1.0p27"
- ): # no source results, no labels by host
- return SerializedResult(repr(astuple(self)[:6]))
-
- if for_cmk_version < cmk_version.Version.from_str(
- "2.2.0b1"
- ): # labels by host, but no source results
- return self._serialize_as_dict()
-
- if for_cmk_version < cmk_version.Version.from_str(
- "2.2.0b2"
- ): # no source results, no labels by host
- return SerializedResult(repr(astuple(self)[:6]))
-
- if for_cmk_version < cmk_version.Version.from_str(
- "2.2.0b6"
- ): # source_results, no labels by host
- return SerializedResult(repr(astuple(self)[:6] + (self.source_results,)))
-
return self._serialize_as_dict()
def _serialize_as_dict(self) -> SerializedResult:
@@ -240,7 +212,7 @@ def automation_call() -> str:
SetAutochecksTable = dict[
- tuple[str, Item], tuple[ServiceName, LegacyCheckParameters, Labels, list[HostName]]
+ tuple[str, Item], tuple[ServiceName, Mapping[str, object], Labels, list[HostName]]
]
@@ -382,7 +354,7 @@ def automation_call() -> str:
@dataclass
class GetCheckInformationResult(ABCAutomationResult):
- plugin_infos: Mapping[CheckPluginNameStr, Mapping[str, Any]]
+ plugin_infos: Mapping[str, Mapping[str, Any]]
@staticmethod
def automation_call() -> str:
@@ -404,14 +376,43 @@ def automation_call() -> str:
result_type_registry.register(GetSectionInformationResult)
+@dataclass(frozen=True)
+class Gateway:
+ existing_gw_host_name: HostName | None
+ ip: HostAddress
+ dns_name: HostName | None
+
+
+@dataclass(frozen=True)
+class GatewayResult:
+ gateway: Gateway | None
+ state: str
+ ping_fails: int
+ message: str
+
+
@dataclass
class ScanParentsResult(ABCAutomationResult):
- gateways: Sequence[Gateway]
+ results: Sequence[GatewayResult]
@staticmethod
def automation_call() -> str:
return "scan-parents"
+ @classmethod
+ def deserialize(cls, serialized_result: SerializedResult) -> ScanParentsResult:
+ (serialized_results,) = literal_eval(serialized_result)
+ results = [
+ GatewayResult(
+ gateway=Gateway(*gw) if gw else None,
+ state=state,
+ ping_fails=ping_fails,
+ message=message,
+ )
+ for gw, state, ping_fails, message in serialized_results
+ ]
+ return cls(results=results)
+
result_type_registry.register(ScanParentsResult)
@@ -455,6 +456,17 @@ def automation_call() -> str:
result_type_registry.register(UpdateDNSCacheResult)
+@dataclass
+class UpdatePasswordsMergedFileResult(ABCAutomationResult):
+
+ @staticmethod
+ def automation_call() -> str:
+ return "update-passwords-merged-file"
+
+
+result_type_registry.register(UpdatePasswordsMergedFileResult)
+
+
@dataclass
class GetAgentOutputResult(ABCAutomationResult):
success: bool
@@ -491,6 +503,18 @@ def automation_call() -> str:
result_type_registry.register(NotificationAnalyseResult)
+@dataclass
+class NotificationTestResult(ABCAutomationResult):
+ result: NotifyAnalysisInfo | None
+
+ @staticmethod
+ def automation_call() -> str:
+ return "notification-test"
+
+
+result_type_registry.register(NotificationTestResult)
+
+
@dataclass
class NotificationGetBulksResult(ABCAutomationResult):
result: NotifyBulks
diff --git a/cmk/base/.f12 b/cmk/base/.f12
index 1d8ae15c9f7..c07b8b57da7 100755
--- a/cmk/base/.f12
+++ b/cmk/base/.f12
@@ -1,18 +1,5 @@
#!/bin/bash
set -e
-SITE="${SITE:-$(until [ "$PWD" == / ]; do if [ -e .site ]; then
- cat .site
- break
-else cd ..; fi; done)}"
-SITE="${SITE:-$(omd sites --bare | head -n 1)}"
-ROOT="/omd/sites/${SITE}"
-
-echo "Updating compiled protobuf files..."
-make -C ../.. protobuf-files
-sudo rsync -rlpgoD --delete-after ../../enterprise/cmc_proto "$ROOT/lib/python3/"
-
-(
- cd ..
- bash .f12
-)
+(cd ../../non-free/cmc-protocols && bash .f12)
+(cd .. && bash .f12)
diff --git a/cmk/base/__init__.py b/cmk/base/__init__.py
index 67165df1c35..c0d5ce1ee83 100644
--- a/cmk/base/__init__.py
+++ b/cmk/base/__init__.py
@@ -12,6 +12,6 @@
# This folder is part of a namespace package, that can be shadowed/extended
# using the local/ hierarchy.
#
-# Do not change the following line, is is picked up by the build process:
+# Do not change the following line, it is picked up by the build process:
# check_mk.make: do-not-deploy
#
diff --git a/cmk/base/api/agent_based/checking_classes.py b/cmk/base/api/agent_based/checking_classes.py
deleted file mode 100644
index 89a19ba20f7..00000000000
--- a/cmk/base/api/agent_based/checking_classes.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-"""Classes used by the API for check plugins
-"""
-
-from __future__ import annotations
-
-from collections.abc import Callable
-from typing import NamedTuple
-
-from cmk.utils.check_utils import ParametersTypeAlias
-from cmk.utils.rulesets import RuleSetName
-
-from cmk.checkengine.checking import CheckPluginName
-from cmk.checkengine.sectionparser import ParsedSectionName
-
-from cmk.base.api.agent_based.plugin_classes import RuleSetTypeName
-
-from cmk.agent_based.v1.type_defs import CheckResult, DiscoveryResult
-
-CheckFunction = Callable[..., CheckResult]
-DiscoveryFunction = Callable[..., DiscoveryResult]
-
-
-class CheckPlugin(NamedTuple):
- name: CheckPluginName
- sections: list[ParsedSectionName]
- service_name: str
- discovery_function: DiscoveryFunction
- discovery_default_parameters: ParametersTypeAlias | None
- discovery_ruleset_name: RuleSetName | None
- discovery_ruleset_type: RuleSetTypeName
- check_function: CheckFunction
- check_default_parameters: ParametersTypeAlias | None
- check_ruleset_name: RuleSetName | None
- cluster_check_function: CheckFunction | None
- full_module: str | None # not available for auto migrated plugins.
diff --git a/cmk/base/api/agent_based/cluster_mode.py b/cmk/base/api/agent_based/cluster_mode.py
index 18b72f76562..41673fc4d64 100644
--- a/cmk/base/api/agent_based/cluster_mode.py
+++ b/cmk/base/api/agent_based/cluster_mode.py
@@ -2,7 +2,7 @@
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-"""Compute the cluster check function from the plugin and parameters."""
+"""Compute the cluster check function from the plug-in and parameters."""
from collections import defaultdict
from collections.abc import Callable, Iterable, Mapping, Sequence
@@ -30,8 +30,7 @@
class Selector(Protocol):
- def __call__(self, *a: State) -> State:
- ...
+ def __call__(self, *a: State) -> State: ...
def _unfit_for_clustering(**_kw: object) -> CheckResult:
diff --git a/cmk/base/api/agent_based/plugin_classes.py b/cmk/base/api/agent_based/plugin_classes.py
index 5d86302e1c5..b0f678f0a88 100644
--- a/cmk/base/api/agent_based/plugin_classes.py
+++ b/cmk/base/api/agent_based/plugin_classes.py
@@ -58,26 +58,21 @@ class AgentSectionPlugin(NamedTuple):
class _OIDSpecLike(Protocol):
@property
- def column(self) -> int | str:
- ...
+ def column(self) -> int | str: ...
@property
- def encoding(self) -> Literal["string", "binary"]:
- ...
+ def encoding(self) -> Literal["string", "binary"]: ...
@property
- def save_to_cache(self) -> bool:
- ...
+ def save_to_cache(self) -> bool: ...
class _SNMPTreeLike(Protocol):
@property
- def base(self) -> str:
- ...
+ def base(self) -> str: ...
@property
- def oids(self) -> Sequence[_OIDSpecLike]:
- ...
+ def oids(self) -> Sequence[_OIDSpecLike]: ...
class SNMPSectionPlugin(NamedTuple):
diff --git a/cmk/base/api/agent_based/register/_config.py b/cmk/base/api/agent_based/register/_config.py
index 3e51e0e905a..6528f922039 100644
--- a/cmk/base/api/agent_based/register/_config.py
+++ b/cmk/base/api/agent_based/register/_config.py
@@ -82,7 +82,7 @@ def get_check_plugin(plugin_name: CheckPluginName) -> CheckPlugin | None:
if plugin is not None or not plugin_name.is_management_name():
return plugin
- # create management board plugin on the fly:
+ # create management board plug-in on the fly:
non_mgmt_plugin = registered_check_plugins.get(plugin_name.create_basic_name())
if non_mgmt_plugin is not None:
mgmt_plugin = management_plugin_factory(non_mgmt_plugin)
@@ -103,7 +103,7 @@ def get_host_label_ruleset(ruleset_name: RuleSetName) -> Sequence[RuleSpec]:
def get_inventory_plugin(plugin_name: InventoryPluginName) -> InventoryPlugin | None:
- """Returns the registered inventory plugin"""
+ """Returns the registered inventory plug-in"""
return registered_inventory_plugins.get(plugin_name)
diff --git a/cmk/base/api/agent_based/register/_discover.py b/cmk/base/api/agent_based/register/_discover.py
index e5b66c62644..a5a972fea54 100644
--- a/cmk/base/api/agent_based/register/_discover.py
+++ b/cmk/base/api/agent_based/register/_discover.py
@@ -12,6 +12,7 @@
from cmk.agent_based.v2 import (
AgentSection,
CheckPlugin,
+ entry_point_prefixes,
InventoryPlugin,
SimpleSNMPSection,
SNMPSection,
@@ -41,20 +42,12 @@
def load_all_plugins() -> list[str]:
errors = []
for plugin, exception in load_plugins_with_exceptions("cmk.base.plugins.agent_based"):
- errors.append(f"Error in agent based plugin {plugin}: {exception}\n")
+ errors.append(f"Error in agent based plug-in {plugin}: {exception}\n")
if cmk.utils.debug.enabled():
raise exception
discovered_plugins: DiscoveredPlugins[_ABPlugins] = discover_plugins(
- PluginGroup.AGENT_BASED,
- {
- SimpleSNMPSection: "snmp_section_",
- SNMPSection: "snmp_section_",
- AgentSection: "agent_section_",
- CheckPlugin: "check_plugin_",
- InventoryPlugin: "inventory_plugin_",
- },
- raise_errors=cmk.utils.debug.enabled(),
+ PluginGroup.AGENT_BASED, entry_point_prefixes(), raise_errors=cmk.utils.debug.enabled()
)
errors.extend(f"Error in agent based plugin: {exc}" for exc in discovered_plugins.errors)
for loaded_plugin in discovered_plugins.plugins.items():
@@ -150,7 +143,7 @@ def register_check_plugin(check: CheckPlugin, location: PluginLocation) -> None:
# once we stop storing the plugins in a global variable, this
# special case can go.
return
- raise ValueError(f"duplicate check plugin definition: {plugin.name}")
+ raise ValueError(f"duplicate check plug-in definition: {plugin.name}")
add_check_plugin(plugin)
if plugin.discovery_ruleset_name is not None:
@@ -178,6 +171,6 @@ def register_inventory_plugin(inventory: InventoryPlugin, location: PluginLocati
# once we stop storing the plugins in a global variable, this
# special case can go.
return
- raise ValueError(f"duplicate inventory plugin definition: {plugin.name}")
+ raise ValueError(f"duplicate inventory plug-in definition: {plugin.name}")
add_inventory_plugin(plugin)
diff --git a/cmk/base/api/agent_based/register/check_plugins.py b/cmk/base/api/agent_based/register/check_plugins.py
index 9aba7b2f88b..878b6b9b55a 100644
--- a/cmk/base/api/agent_based/register/check_plugins.py
+++ b/cmk/base/api/agent_based/register/check_plugins.py
@@ -42,7 +42,7 @@ def _validate_service_name(plugin_name: CheckPluginName, service_name: str) -> N
raise ValueError(
"service name and description inconsistency: Please neither have your plugins "
"name start with %r, nor the description with %r. In the rare case that you want to "
- "implement a check plugin explicitly designed for management boards (and nothing else),"
+ "implement a check plug-in explicitly designed for management boards (and nothing else),"
" you must do both of the above."
% (CheckPluginName.MANAGEMENT_PREFIX, MANAGEMENT_DESCR_PREFIX)
)
@@ -60,7 +60,6 @@ def _requires_item(service_name: str) -> bool:
def _filter_discovery(
generator: Callable[..., Generator[Any, None, None]],
requires_item: bool,
- validate_item: bool,
) -> DiscoveryFunction:
"""Only let Services through
@@ -72,7 +71,7 @@ def filtered_generator(*args, **kwargs):
for element in generator(*args, **kwargs):
if not isinstance(element, Service):
raise TypeError("unexpected type in discovery: %r" % type(element))
- if validate_item and requires_item is (element.item is None):
+ if requires_item is (element.item is None):
raise TypeError("unexpected type of item discovered: %r" % type(element.item))
yield element
@@ -168,7 +167,6 @@ def create_check_plugin(
check_ruleset_name: str | None = None,
cluster_check_function: Callable | None = None,
location: PluginLocation | None = None,
- validate_item: bool = True,
validate_kwargs: bool = True,
) -> CheckPlugin:
"""Return an CheckPlugin object after validating and converting the arguments one by one
@@ -198,7 +196,7 @@ def create_check_plugin(
cluster_check_function=cluster_check_function,
)
- disco_func = _filter_discovery(discovery_function, requires_item, validate_item)
+ disco_func = _filter_discovery(discovery_function, requires_item)
disco_ruleset_name = RuleSetName(discovery_ruleset_name) if discovery_ruleset_name else None
cluster_check_function = (
diff --git a/cmk/base/api/agent_based/register/check_plugins_legacy.py b/cmk/base/api/agent_based/register/check_plugins_legacy.py
index d08e5f260a8..d1cfb9e829c 100644
--- a/cmk/base/api/agent_based/register/check_plugins_legacy.py
+++ b/cmk/base/api/agent_based/register/check_plugins_legacy.py
@@ -2,6 +2,8 @@
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
+
+# pylint: disable=protected-access
"""Helper to register a new-style section based on config.check_info
"""
import copy
@@ -13,6 +15,7 @@
from typing import Any
from cmk.utils.check_utils import maincheckify, unwrap_parameters
+from cmk.utils.legacy_check_api import LegacyCheckDefinition
from cmk.checkengine.parameters import Parameters
@@ -22,8 +25,6 @@
from cmk.agent_based.v1 import IgnoreResults, Metric, Result, Service, State
from cmk.agent_based.v1.type_defs import CheckResult
-from .utils_legacy import LegacyCheckDefinition
-
def _create_discovery_function(check_info_element: LegacyCheckDefinition) -> Callable:
"""Create an API compliant discovery function"""
@@ -31,7 +32,7 @@ def _create_discovery_function(check_info_element: LegacyCheckDefinition) -> Cal
# 1) ensure we have the correct signature
# 2) ensure it is a generator of Service instances
def discovery_migration_wrapper(section: object) -> object:
- disco_func = check_info_element.get("discovery_function")
+ disco_func = check_info_element.discovery_function
if not callable(disco_func): # never discover:
return
@@ -89,17 +90,18 @@ def _normalize_check_function_return_value(subresults: object) -> list:
raise TypeError(f"expected None, Tuple or Iterable, got {subresults=}")
-def _create_check_function(name: str, check_info_element: LegacyCheckDefinition) -> Callable:
+def _create_check_function(
+ name: str, service_name: str, check_info_element: LegacyCheckDefinition
+) -> Callable:
"""Create an API compliant check function"""
- service_descr = check_info_element["service_name"]
- if not isinstance(service_descr, str):
- raise ValueError(f"[{name}]: invalid service description: {service_descr!r}")
+ if check_info_element.check_function is None:
+ raise ValueError(f"[{name}]: check function is missing")
# 1) ensure we have the correct signature
- requires_item = "%s" in service_descr
+ requires_item = "%s" in service_name
sig_function = _create_signature_check_function(
requires_item=requires_item,
- original_function=check_info_element["check_function"],
+ original_function=check_info_element.check_function,
)
# 2) unwrap parameters and ensure it is a generator of valid instances
@@ -189,7 +191,7 @@ def _get_float(raw_value: Any) -> float | None:
if not isinstance(raw_value, str):
return None
- # try to cut of units:
+ # try to cut off units:
for i in range(len(raw_value) - 1, 0, -1):
with suppress(TypeError, ValueError):
return float(raw_value[:i])
@@ -246,43 +248,33 @@ def create_check_plugin_from_legacy(
*,
validate_creation_kwargs: bool = True,
) -> CheckPlugin:
- # We only intend to deal with checks from our repo.
- # We know what we can and have to deal with.
- if (
- unexpected_keys := set(check_info_element)
- - LegacyCheckDefinition.__optional_keys__
- - LegacyCheckDefinition.__required_keys__
- ):
- raise ValueError(
- f"Unexpected key(s) in check_info[{check_plugin_name!r}]: {unexpected_keys!r}"
- )
+ if not isinstance(check_info_element.service_name, str):
+ # we don't make it here in the None case, but it would be
+ # handled gracefully
+ raise ValueError(check_info_element.service_name)
new_check_name = maincheckify(check_plugin_name)
sections = [check_plugin_name.split(".", 1)[0]]
if "." in check_plugin_name:
- assert sections == check_info_element["sections"]
+ assert sections == check_info_element.sections
discovery_function = _create_discovery_function(check_info_element)
check_function = _create_check_function(
check_plugin_name,
+ check_info_element.service_name,
check_info_element,
)
return create_check_plugin(
name=new_check_name,
sections=sections,
- service_name=check_info_element["service_name"],
+ service_name=check_info_element.service_name,
discovery_function=discovery_function,
discovery_default_parameters=None, # legacy madness!
discovery_ruleset_name=None,
check_function=check_function,
- check_default_parameters=check_info_element.get("check_default_parameters", {}),
- check_ruleset_name=check_info_element.get("check_ruleset_name"),
- # Legacy check plugins may return an item even if the service description
- # does not contain a '%s'. In this case the old check API assumes an implicit,
- # trailing '%s'. Therefore, we disable this validation for legacy check plugins.
- # Once all check plugins are migrated to the new API this flag can be removed.
- validate_item=False,
+ check_default_parameters=check_info_element.check_default_parameters or {},
+ check_ruleset_name=check_info_element.check_ruleset_name,
validate_kwargs=validate_creation_kwargs,
)
diff --git a/cmk/base/api/agent_based/register/export.py b/cmk/base/api/agent_based/register/export.py
index 55bbb6a25c9..d6c22823b5e 100644
--- a/cmk/base/api/agent_based/register/export.py
+++ b/cmk/base/api/agent_based/register/export.py
@@ -112,7 +112,7 @@ def agent_section(
host_label_ruleset_type: The ruleset type is either :class:`RuleSetType.ALL` or
:class:`RuleSetType.MERGED`.
- It describes whether this plugin needs the merged result of the
+ It describes whether this plug-in needs the merged result of the
effective rules, or every individual rule matching for the current host.
supersedes: A list of section names which are superseded by this section. If this
@@ -233,7 +233,7 @@ def snmp_section(
host_label_ruleset_type: The ruleset type is either :class:`RuleSetType.ALL` or
:class:`RuleSetType.MERGED`.
- It describes whether this plugin needs the merged result of the
+ It describes whether this plug-in needs the merged result of the
effective rules, or every individual rule matching for the current host.
supersedes: A list of section names which are superseded by this section. If this
@@ -256,30 +256,32 @@ def snmp_section(
return register_snmp_section(
# supressions: we have to live with what the old api gives us. It will be validated.
- SNMPSection( # type:ignore[misc]
- name=name,
- detect=detect,
- fetch=fetch,
- parse_function=_noop_snmp_parse_function if parse_function is None else parse_function, # type: ignore[arg-type]
- parsed_section_name=parsed_section_name,
- host_label_function=host_label_function,
- host_label_default_parameters=host_label_default_parameters, # type: ignore[arg-type]
- host_label_ruleset_name=host_label_ruleset_name, # type: ignore[arg-type]
- host_label_ruleset_type=host_label_ruleset_type,
- supersedes=supersedes,
- )
- if isinstance(fetch, list)
- else SimpleSNMPSection( # type:ignore[misc]
- name=name,
- detect=detect,
- fetch=fetch,
- parse_function=_noop_snmp_parse_function if parse_function is None else parse_function, # type: ignore[arg-type]
- parsed_section_name=parsed_section_name,
- host_label_function=host_label_function,
- host_label_default_parameters=host_label_default_parameters, # type: ignore[arg-type]
- host_label_ruleset_name=host_label_ruleset_name, # type: ignore[arg-type]
- host_label_ruleset_type=host_label_ruleset_type,
- supersedes=supersedes,
+ (
+ SNMPSection( # type: ignore[misc]
+ name=name,
+ detect=detect,
+ fetch=fetch,
+ parse_function=_noop_snmp_parse_function if parse_function is None else parse_function, # type: ignore[arg-type]
+ parsed_section_name=parsed_section_name,
+ host_label_function=host_label_function,
+ host_label_default_parameters=host_label_default_parameters, # type: ignore[arg-type]
+ host_label_ruleset_name=host_label_ruleset_name, # type: ignore[arg-type]
+ host_label_ruleset_type=host_label_ruleset_type,
+ supersedes=supersedes,
+ )
+ if isinstance(fetch, list)
+ else SimpleSNMPSection( # type: ignore[misc]
+ name=name,
+ detect=detect,
+ fetch=fetch,
+ parse_function=_noop_snmp_parse_function if parse_function is None else parse_function, # type: ignore[arg-type]
+ parsed_section_name=parsed_section_name,
+ host_label_function=host_label_function,
+ host_label_default_parameters=host_label_default_parameters, # type: ignore[arg-type]
+ host_label_ruleset_name=host_label_ruleset_name, # type: ignore[arg-type]
+ host_label_ruleset_type=host_label_ruleset_type,
+ supersedes=supersedes,
+ )
),
get_validated_plugin_location(),
)
@@ -299,14 +301,14 @@ def check_plugin(
check_ruleset_name: str | None = None,
cluster_check_function: Callable | None = None,
) -> None:
- """Register a check plugin to checkmk.
+ """Register a check plug-in to checkmk.
Args:
name: The unique name of the check plugin. It must only contain the
characters 'A-Z', 'a-z', '0-9' and the underscore.
- sections: An optional list of section names that this plugin subscribes to.
+ sections: An optional list of section names that this plug-in subscribes to.
They correspond to the 'parsed_section_name' specified in
:meth:`agent_section` and :meth:`snmp_section`.
The corresponding sections are passed to the discovery and check
@@ -319,7 +321,7 @@ def check_plugin(
of "service_name".
discovery_function: The discovery_function. Arguments must be 'params' (if discovery
- parameters are defined) and 'section' (if the plugin subscribes
+ parameters are defined) and 'section' (if the plug-in subscribes
to a single section), or 'section_, section_' ect.
corresponding to the `sections`.
It is expected to be a generator of :class:`Service` instances.
@@ -331,13 +333,13 @@ def check_plugin(
discovery_ruleset_type: The ruleset type is either :class:`RuleSetType.ALL` or
:class:`RuleSetType.MERGED`.
- It describes whether this plugin needs the merged result of the
+ It describes whether this plug-in needs the merged result of the
effective rules, or every individual rule matching for the current
host.
check_function: The check_function. Arguments must be 'item' (if the service has an
item), 'params' (if check default parameters are defined) and
- 'section' (if the plugin subscribes to a single section), or
+ 'section' (if the plug-in subscribes to a single section), or
'section_, section_' ect. corresponding to the
`sections`.
@@ -379,14 +381,14 @@ def inventory_plugin(
inventory_default_parameters: _ParametersTypeAlias | None = None,
inventory_ruleset_name: str | None = None,
) -> None:
- """Register an inventory plugin to checkmk.
+ """Register an inventory plug-in to checkmk.
Args:
name: The unique name of the check plugin. It must only contain the
characters 'A-Z', 'a-z', '0-9' and the underscore.
- sections: An optional list of section names that this plugin subscribes to.
+ sections: An optional list of section names that this plug-in subscribes to.
They correspond to the 'parsed_section_name' specified in
:meth:`agent_section` and :meth:`snmp_section`.
The corresponding sections are passed to the discovery and check
@@ -395,7 +397,7 @@ def inventory_plugin(
a name equal to the name of the inventory plugin.
inventory_function: The inventory_function. Arguments must be 'params' (if inventory
- parameters are defined) and 'section' (if the plugin subscribes
+ parameters are defined) and 'section' (if the plug-in subscribes
to a single section), or 'section_, section_' ect.
corresponding to the `sections`.
It is expected to be a generator of :class:`Attributes` or
diff --git a/cmk/base/api/agent_based/register/section_plugins.py b/cmk/base/api/agent_based/register/section_plugins.py
index a0171ebb634..9ffab9979d3 100644
--- a/cmk/base/api/agent_based/register/section_plugins.py
+++ b/cmk/base/api/agent_based/register/section_plugins.py
@@ -332,13 +332,13 @@ def validate_section_supersedes(all_supersedes: dict[SectionName, set[SectionNam
implicitly = transitively - explicitly
if name in implicitly:
raise ValueError(
- "Section plugin '%s' implicitly supersedes section(s) %s. "
+ "Section plug-in '%s' implicitly supersedes section(s) %s. "
"This leads to a cyclic superseding!"
% (name, ", ".join(f"'{n}'" for n in sorted(implicitly)))
)
if implicitly:
raise ValueError(
- "Section plugin '%s' implicitly supersedes section(s) %s. "
+ "Section plug-in '%s' implicitly supersedes section(s) %s. "
"You must add those to the supersedes keyword argument."
% (name, ", ".join(f"'{n}'" for n in sorted(implicitly)))
)
diff --git a/cmk/base/api/agent_based/register/section_plugins_legacy.py b/cmk/base/api/agent_based/register/section_plugins_legacy.py
index 3268406fcd3..ac05959874e 100644
--- a/cmk/base/api/agent_based/register/section_plugins_legacy.py
+++ b/cmk/base/api/agent_based/register/section_plugins_legacy.py
@@ -19,8 +19,8 @@
SNMPDetectSpecification,
SNMPSection,
SNMPTree,
+ StringTable,
)
-from cmk.agent_based.v2.type_defs import StringTable
def create_section_plugin_from_legacy(
diff --git a/cmk/base/api/agent_based/register/utils.py b/cmk/base/api/agent_based/register/utils.py
index b9ffc9ff378..6fea34a1f46 100644
--- a/cmk/base/api/agent_based/register/utils.py
+++ b/cmk/base/api/agent_based/register/utils.py
@@ -2,6 +2,8 @@
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
+
+# pylint: disable=protected-access
import inspect
import sys
from collections.abc import Callable, Mapping, Sequence
@@ -28,7 +30,7 @@
def get_validated_plugin_location() -> PluginLocation:
- """Find out which module registered the plugin and make sure its in the right place"""
+ """Find out which module registered the plug-in and make sure its in the right place"""
# We used this before, but it was a performance killer. The method below is a lot faster.
# calling_from = inspect.stack()[2].filename
full_module_name = str(sys._getframe(2).f_globals["__name__"])
diff --git a/cmk/base/api/agent_based/register/utils_legacy.py b/cmk/base/api/agent_based/register/utils_legacy.py
deleted file mode 100644
index 126f91e8f8d..00000000000
--- a/cmk/base/api/agent_based/register/utils_legacy.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2023 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from collections.abc import Callable, Iterable, Mapping
-from typing import Any, NotRequired, TypedDict
-
-from cmk.agent_based.v2 import Service, SNMPDetectSpecification, SNMPTree
-
-_DiscoveredParameters = Mapping | tuple | str | None
-
-
-class LegacyCheckDefinition(TypedDict):
- detect: NotRequired[SNMPDetectSpecification]
- fetch: NotRequired[list[SNMPTree] | SNMPTree]
- sections: NotRequired[list[str]]
- check_function: NotRequired[Callable]
- discovery_function: NotRequired[
- Callable[..., None | Iterable[tuple[str | None, _DiscoveredParameters]] | Iterable[Service]]
- ]
- parse_function: NotRequired[Callable[[list], object]]
- check_ruleset_name: NotRequired[str]
- check_default_parameters: NotRequired[Mapping[str, Any]]
- service_name: NotRequired[str]
diff --git a/cmk/base/api/agent_based/value_store/_utils.py b/cmk/base/api/agent_based/value_store/_utils.py
index 3f58203aad4..cd0df2b4296 100644
--- a/cmk/base/api/agent_based/value_store/_utils.py
+++ b/cmk/base/api/agent_based/value_store/_utils.py
@@ -222,7 +222,7 @@ class _ValueStore(MutableMapping[_UserKey, Any]): # pylint: disable=too-many-an
"""Implements the mutable mapping that is exposed to the plugins
This class ensures that every service has its own name space in the
- persisted values, by adding the service ID (check plugin name and item) to
+ persisted values, by adding the service ID (check plug-in name and item) to
the user supplied keys.
"""
diff --git a/cmk/base/api/bakery/artifact_types.py b/cmk/base/api/bakery/artifact_types.py
index b0dce884718..5516fb54863 100644
--- a/cmk/base/api/bakery/artifact_types.py
+++ b/cmk/base/api/bakery/artifact_types.py
@@ -54,23 +54,23 @@ def _validate_lines(lines: Iterable[str]) -> None:
class Plugin:
"""File artifact that represents a Checkmk agent plugin
- The specified plugin file will be deployed to the Checkmk agent's plugin directory as
+ The specified plug-in file will be deployed to the Checkmk agent's plug-in directory as
a callable plugin.
Args:
base_os: The target operating system.
- source: Path of the plugin file, relative to the plugin source directory on the Checkmk site.
+ source: Path of the plug-in file, relative to the plug-in source directory on the Checkmk site.
This usually consists only of the filename.
- target: Target path, relative to the plugin directory within the agent's file tree
- on the target system. If omitted, the plugin will be deployed under it's
+ target: Target path, relative to the plug-in directory within the agent's file tree
+ on the target system. If omitted, the plug-in will be deployed under it's
relative source path/filename.
- interval: Caching interval in seconds. The plugin will only be executed by the
+ interval: Caching interval in seconds. The plug-in will only be executed by the
agent after the caching interval is elapsed.
asynchronous: Relevant for Windows Agent. Don't wait for termination of the plugin's
process if True. An existent interval will always result in asynchronous execution.
- timeout: Relevant for Windows Agent. Maximum waiting time for a plugin to terminate.
+ timeout: Relevant for Windows Agent. Maximum waiting time for a plug-in to terminate.
retry_count: Relevant for Windows Agent. Maximum number of retried executions after a
- failed plugin execution.
+ failed plug-in execution.
"""
def __init__(
@@ -128,7 +128,7 @@ class SystemBinary:
base_os: The target operating system.
source: Path of the file, relative to the agent source directory on the Checkmk site.
target: Target path, relative to the binart directory on the target system. If omitted,
- the plugin will be deployed under it's relative source path/filename.
+ the plug-in will be deployed under it's relative source path/filename.
"""
def __init__(self, *, base_os: OS, source: Path, target: Path | None = None) -> None:
@@ -158,7 +158,7 @@ class PluginConfig:
The resulting configuration file will be placed to the agent's config directory (by default,
'/etc/check_mk', configurable in WATO) and is meant to be read by the corresponding plugin.
It's content is unrestricted (apart from the fact that it must be passed as a list of 'str's),
- so it's up to the consuming plugin to process it correctly.
+ so it's up to the consuming plug-in to process it correctly.
Args:
base_os: The target operating system.
@@ -322,7 +322,7 @@ def _validate_windows_config_content(content: WindowsConfigContent) -> None:
class WindowsConfigEntry:
"""Config Entry for the Windows Agent yaml file (check_mk.install.yml)
- It's up to the consuming plugin to read the config entry correctly from the
+ It's up to the consuming plug-in to read the config entry correctly from the
yaml file. However, unlike the approach via PluginConfig, config entries described
here will be accessible consistently via the python yaml module.
diff --git a/cmk/base/api/bakery/register.py b/cmk/base/api/bakery/register.py
index 5d3b7d7cd8d..17b972e7ac9 100644
--- a/cmk/base/api/bakery/register.py
+++ b/cmk/base/api/bakery/register.py
@@ -28,10 +28,10 @@ def bakery_plugin(
) -> None:
"""Register a Bakery Plugin (Bakelet) to Checkmk
- This registration function accepts a plugin name (mandatory) and up to three
+ This registration function accepts a plug-in name (mandatory) and up to three
generator functions that may yield different types of artifacts.
The generator functions will be called with keyword-arguments 'conf' and/or 'aghash'
- while processing the bakery plugin (Callbacks), thus the specific call depends on the
+ while processing the bakery plug-in (Callbacks), thus the specific call depends on the
argument names of the provided functions.
For keyword-arg 'conf', the corresponding WATO configuration will be provided.
For keyword-arg 'aghash', the configuration hash of the resulting agent package
@@ -39,7 +39,7 @@ def bakery_plugin(
Unused arguments can be omitted in the function's signatures.
Args:
- name: The name of the agent plugin to be processed. It must be unique, and match
+ name: The name of the agent plug-in to be processed. It must be unique, and match
the name of the corresponding WATO rule. It may only contain ascii
letters (A-Z, a-z), digits (0-9), and underscores (_).
files_function: Generator function that yields file artifacts.
@@ -67,7 +67,7 @@ def bakery_plugin(
def get_bakery_plugins() -> dict[str, BakeryPlugin]:
for plugin, exception in load_plugins_with_exceptions("cmk.base.cee.plugins.bakery"):
- console.error("Error in bakery plugin %s: %s\n", plugin, exception)
+ console.error(f"Error in bakery plug-in {plugin}: {exception}\n")
if cmk.utils.debug.enabled():
raise exception
diff --git a/cmk/base/automations/__init__.py b/cmk/base/automations/__init__.py
index fbb60fad2ae..ef7c2999d52 100644
--- a/cmk/base/automations/__init__.py
+++ b/cmk/base/automations/__init__.py
@@ -104,8 +104,7 @@ class Automation(abc.ABC):
needs_config = False
@abc.abstractmethod
- def execute(self, args: list[str]) -> ABCAutomationResult:
- ...
+ def execute(self, args: list[str]) -> ABCAutomationResult: ...
#
diff --git a/cmk/base/automations/check_mk.py b/cmk/base/automations/check_mk.py
index 5353cacf1d3..c006583e034 100644
--- a/cmk/base/automations/check_mk.py
+++ b/cmk/base/automations/check_mk.py
@@ -9,6 +9,7 @@
import glob
import io
import itertools
+import json
import logging
import operator
import os
@@ -20,6 +21,7 @@
import time
from collections.abc import Container, Iterable, Mapping, Sequence
from contextlib import redirect_stderr, redirect_stdout, suppress
+from dataclasses import asdict
from itertools import islice
from pathlib import Path
from typing import Any
@@ -31,10 +33,12 @@
import cmk.utils.log as log
import cmk.utils.man_pages as man_pages
import cmk.utils.password_store
+import cmk.utils.paths
import cmk.utils.tty as tty
from cmk.utils.agentdatatype import AgentRawData
from cmk.utils.auto_queue import AutoQueue
from cmk.utils.caching import cache_manager
+from cmk.utils.config_path import LATEST_CONFIG
from cmk.utils.diagnostics import deserialize_cl_parameters, DiagnosticsCLParameters
from cmk.utils.encoding import ensure_str_with_fallback
from cmk.utils.everythingtype import EVERYTHING
@@ -64,6 +68,7 @@
var_dir,
)
from cmk.utils.sectionname import SectionName
+from cmk.utils.servicename import ServiceName
from cmk.utils.timeout import Timeout
from cmk.utils.timeperiod import timeperiod_active
from cmk.utils.version import edition_supports_nagvis
@@ -86,6 +91,7 @@
NotificationAnalyseResult,
NotificationGetBulksResult,
NotificationReplayResult,
+ NotificationTestResult,
ReloadResult,
RenameHostsResult,
RestartResult,
@@ -97,6 +103,7 @@
SetAutochecksTable,
UpdateDNSCacheResult,
UpdateHostLabelsResult,
+ UpdatePasswordsMergedFileResult,
)
from cmk.snmplib import (
@@ -106,14 +113,16 @@
oids_to_walk,
SNMPCredentials,
SNMPHostConfig,
+ SNMPVersion,
walk_for_export,
)
-from cmk.fetchers import FetcherType, get_raw_data, Mode, ProgramFetcher, TCPFetcher
+from cmk.fetchers import get_raw_data, Mode, ProgramFetcher, SNMPScanConfig, TCPFetcher, TLSConfig
+from cmk.fetchers.config import make_persisted_section_dir
from cmk.fetchers.filecache import FileCacheOptions, MaxAge
from cmk.fetchers.snmp import make_backend as make_snmp_backend
-from cmk.checkengine.checking import CheckPluginName, CheckPluginNameStr
+from cmk.checkengine.checking import CheckPluginName
from cmk.checkengine.discovery import (
AutocheckEntry,
AutocheckServiceWithNodes,
@@ -128,7 +137,8 @@
set_autochecks_of_cluster,
set_autochecks_of_real_hosts,
)
-from cmk.checkengine.fetcher import SourceType
+from cmk.checkengine.discovery._utils import DiscoveredItem
+from cmk.checkengine.fetcher import FetcherType, SourceType
from cmk.checkengine.parser import NO_SELECTION, parse_raw_data
from cmk.checkengine.submitters import ServiceDetails, ServiceState
from cmk.checkengine.summarize import summarize
@@ -156,13 +166,20 @@
HostLabelPluginMapper,
SectionPluginMapper,
)
-from cmk.base.config import ConfigCache
+from cmk.base.config import (
+ ConfigCache,
+ ConfiguredIPLookup,
+ handle_ip_lookup_failure,
+ lookup_mgmt_board_ip_address,
+ snmp_default_community,
+)
from cmk.base.core import CoreAction, do_restart
from cmk.base.core_factory import create_core
from cmk.base.diagnostics import DiagnosticsDump
from cmk.base.errorhandling import create_section_crash_dump
-from cmk.base.plugins.server_side_calls import load_active_checks
-from cmk.base.sources import make_parser
+from cmk.base.parent_scan import ScanConfig
+from cmk.base.server_side_calls import load_active_checks
+from cmk.base.sources import make_parser, SNMPFetcherConfig
from cmk.agent_based.v1.value_store import set_value_store_manager
from cmk.discover_plugins import discover_families, PluginGroup
@@ -217,11 +234,8 @@ class AutomationDiscovery(DiscoveryAutomation):
needs_config = True
needs_checks = True
- # Does discovery for a list of hosts. Possible values for mode:
- # "new" - find only new services (like -I)
- # "remove" - remove exceeding services
- # "fixall" - find new, remove exceeding
- # "refresh" - drop all services and reinventorize
+ # Does discovery for a list of hosts. For possible values see
+ # DiscoverySettings
# Hosts on the list that are offline (unmonitored) will
# be skipped.
def execute(self, args: list[str]) -> ServiceDiscoveryResult:
@@ -240,10 +254,19 @@ def execute(self, args: list[str]) -> ServiceDiscoveryResult:
if len(args) < 2:
raise MKAutomationError(
- "Need two arguments: new|remove|fixall|refresh|only-host-labels HOSTNAME"
+ "Need two arguments: %s " % "DiscoveryMode|DiscoverySettings HOSTNAME"
+ )
+
+ # TODO 2.3 introduced a new format but has to be compatible for 2.2.
+ # Can be removed one day
+ if (discovery_settings := args[0]) in ["new", "remove", "fixall", "refresh"]:
+ settings = DiscoverySettings.from_discovery_mode(
+ DiscoveryMode.from_str(discovery_settings)
)
+ else:
+ # 2.3 format
+ settings = DiscoverySettings.from_json(discovery_settings)
- settings = DiscoverySettings.from_discovery_mode(DiscoveryMode.from_str(args[0]))
hostnames = [HostName(h) for h in islice(args, 1, None)]
config_cache = config.get_config_cache()
@@ -252,13 +275,17 @@ def execute(self, args: list[str]) -> ServiceDiscoveryResult:
results: dict[HostName, DiscoveryResult] = {}
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=NO_SELECTION,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.discovery"),
)
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=force_snmp_cache_refresh,
mode=Mode.DISCOVERY,
@@ -266,6 +293,7 @@ def execute(self, args: list[str]) -> ServiceDiscoveryResult:
selected_sections=NO_SELECTION,
simulation_mode=config.simulation_mode,
snmp_backend_override=None,
+ password_store_file=cmk.utils.password_store.pending_password_store_path(),
)
for hostname in hostnames:
@@ -287,7 +315,7 @@ def section_error_handling(
results[hostname] = automation_discovery(
hostname,
is_cluster=hostname in config_cache.hosts_config.clusters,
- cluster_nodes=config_cache.nodes_of(hostname) or (),
+ cluster_nodes=config_cache.nodes(hostname),
active_hosts={
hn
for hn in itertools.chain(hosts_config.hosts, hosts_config.clusters)
@@ -297,8 +325,8 @@ def section_error_handling(
parser=parser,
fetcher=fetcher,
summarizer=CMKSummarizer(
- config_cache,
hostname,
+ config_cache.summary_config,
override_non_ok_state=None,
),
section_plugins=SectionPluginMapper(),
@@ -351,7 +379,10 @@ def execute(self, args: list[str]) -> ServiceDiscoveryPreviewResult:
config_cache = config.get_config_cache()
config_cache.ruleset_matcher.ruleset_optimizer.set_all_processed_hosts({host_name})
return _get_discovery_preview(
- host_name, not prevent_fetching, OnError.RAISE if raise_errors else OnError.WARN
+ host_name,
+ config_cache,
+ not prevent_fetching,
+ OnError.RAISE if raise_errors else OnError.WARN,
)
@@ -374,7 +405,10 @@ def execute(self, args: list[str]) -> ServiceDiscoveryPreviewResult:
) # ... or are you *absolutely* sure we always use *exactly* one of the directives :-)
return _get_discovery_preview(
- HostName(args[0]), perform_scan, OnError.RAISE if raise_errors else OnError.WARN
+ HostName(args[0]),
+ config.get_config_cache(),
+ perform_scan,
+ OnError.RAISE if raise_errors else OnError.WARN,
)
@@ -383,13 +417,19 @@ def execute(self, args: list[str]) -> ServiceDiscoveryPreviewResult:
# TODO: invert the 'perform_scan' logic -> 'prevent_fetching'
def _get_discovery_preview(
- host_name: HostName, perform_scan: bool, on_error: OnError
+ host_name: HostName,
+ config_cache: ConfigCache,
+ perform_scan: bool,
+ on_error: OnError,
) -> ServiceDiscoveryPreviewResult:
buf = io.StringIO()
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ )
with redirect_stdout(buf), redirect_stderr(buf):
log.setup_console_logging()
- check_preview = _execute_discovery(host_name, perform_scan, on_error)
+ check_preview = _execute_discovery(host_name, perform_scan, on_error, ip_address_of)
def make_discovered_host_labels(
labels: Sequence[HostLabel],
@@ -407,7 +447,8 @@ def make_discovered_host_labels(
return ServiceDiscoveryPreviewResult(
output=buf.getvalue(),
- check_table=check_preview.table,
+ # TODO: will be removed later when full information is returned to fronted
+ check_table=check_preview.table[host_name],
host_labels=make_discovered_host_labels(check_preview.labels.present),
new_labels=make_discovered_host_labels(
[l for l in check_preview.labels.new if l.name not in changed_labels]
@@ -423,13 +464,16 @@ def make_discovered_host_labels(
)
-def active_check_preview_rows(
- config_cache: ConfigCache, host_name: HostName
+def _active_check_preview_rows(
+ config_cache: ConfigCache,
+ host_name: HostName,
+ ip_address_of: config.IPLookup,
) -> Sequence[CheckPreviewEntry]:
active_checks_ = config_cache.active_checks(host_name)
- host_attrs = config_cache.get_host_attributes(host_name)
+ host_attrs = config_cache.get_host_attributes(host_name, ip_address_of)
ignored_services = config.IgnoredServices(config_cache, host_name)
ruleset_matcher = config_cache.ruleset_matcher
+ translations = config.get_service_translations(ruleset_matcher, host_name)
def make_check_source(desc: str) -> str:
return "ignored_active" if desc in ignored_services else "active"
@@ -438,12 +482,24 @@ def make_output(desc: str) -> str:
pretty = make_check_source(desc).rsplit("_", maxsplit=1)[-1].title()
return f"WAITING - {pretty} check, cannot be done offline"
+ def make_final_service_name(sn: ServiceName) -> ServiceName:
+ return config.get_final_service_description(sn, translations)
+
+ host_macros = ConfigCache.get_host_macros_from_attributes(host_name, host_attrs)
+ resource_macros = config.get_resource_macros()
+ macros = {**host_macros, **resource_macros}
+ password_store_file = cmk.utils.password_store.pending_password_store_path()
active_check_config = server_side_calls.ActiveCheck(
load_active_checks()[1],
config.active_check_info,
host_name,
+ config.get_ssc_host_config(host_name, config_cache, macros, ip_address_of),
host_attrs,
- config.get_service_translations(ruleset_matcher, host_name),
+ config.http_proxies,
+ make_final_service_name,
+ config.use_new_descriptions_for,
+ cmk.utils.password_store.load(password_store_file),
+ password_store_file,
)
return list(
@@ -452,14 +508,17 @@ def make_output(desc: str) -> str:
check_source=make_check_source(active_service.description),
check_plugin_name=active_service.plugin_name,
ruleset_name=None,
+ discovery_ruleset_name=None,
item=active_service.description,
- discovered_parameters=None,
- effective_parameters=None,
+ old_discovered_parameters={},
+ new_discovered_parameters={},
+ effective_parameters={},
description=active_service.description,
state=None,
output=make_output(active_service.description),
metrics=[],
- labels={},
+ old_labels={},
+ new_labels={},
found_on_nodes=[host_name],
)
for active_service in active_check_config.get_active_service_descriptions(
@@ -473,6 +532,7 @@ def _execute_discovery(
host_name: HostName,
perform_scan: bool,
on_error: OnError,
+ ip_address_of: config.IPLookup,
) -> CheckPreview:
file_cache_options = FileCacheOptions(
use_outdated=not perform_scan, use_only_cache=not perform_scan
@@ -482,13 +542,17 @@ def _execute_discovery(
hosts_config = config.make_hosts_config()
ruleset_matcher = config_cache.ruleset_matcher
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=NO_SELECTION,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.discovery"),
)
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=perform_scan,
mode=Mode.DISCOVERY,
@@ -496,18 +560,23 @@ def _execute_discovery(
selected_sections=NO_SELECTION,
simulation_mode=config.simulation_mode,
snmp_backend_override=None,
+ password_store_file=cmk.utils.password_store.pending_password_store_path(),
)
ip_address = (
None
if host_name in hosts_config.clusters
+ or ConfigCache.ip_stack_config(host_name) is ip_lookup.IPStackConfig.NO_IP
# We *must* do the lookup *before* calling `get_host_attributes()`
# because... I don't know... global variables I guess. In any case,
# doing it the other way around breaks one integration test.
else config.lookup_ip_address(config_cache, host_name)
)
- with plugin_contexts.current_host(host_name), set_value_store_manager(
- ValueStoreManager(host_name), store_changes=False
- ) as value_store_manager:
+ with (
+ plugin_contexts.current_host(host_name),
+ set_value_store_manager(
+ ValueStoreManager(host_name), store_changes=False
+ ) as value_store_manager,
+ ):
is_cluster = host_name in hosts_config.clusters
check_plugins = CheckPluginMapper(
config_cache,
@@ -519,12 +588,12 @@ def _execute_discovery(
host_name,
ip_address,
is_cluster=is_cluster,
- cluster_nodes=config_cache.nodes_of(host_name) or (),
+ cluster_nodes=config_cache.nodes(host_name),
parser=parser,
fetcher=fetcher,
summarizer=CMKSummarizer(
- config_cache,
host_name,
+ config_cache.summary_config,
override_non_ok_state=None,
),
section_plugins=SectionPluginMapper(),
@@ -555,11 +624,13 @@ def _execute_discovery(
on_error=on_error,
)
return CheckPreview(
- table=[
- *passive_check_preview.table,
- *active_check_preview_rows(config_cache, host_name),
- *config_cache.custom_check_preview_rows(host_name),
- ],
+ table={
+ host_name: [
+ *passive_check_preview.table[host_name],
+ *_active_check_preview_rows(config_cache, host_name, ip_address_of),
+ *config_cache.custom_check_preview_rows(host_name),
+ ]
+ },
labels=passive_check_preview.labels,
source_results=passive_check_preview.source_results,
kept_labels=passive_check_preview.kept_labels,
@@ -590,15 +661,22 @@ def _execute_autodiscovery() -> tuple[Mapping[HostName, DiscoveryResult], bool]:
config.load()
config_cache = config.get_config_cache()
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ )
ruleset_matcher = config_cache.ruleset_matcher
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=NO_SELECTION,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.discovery"),
)
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=False,
mode=Mode.DISCOVERY,
@@ -606,11 +684,11 @@ def _execute_autodiscovery() -> tuple[Mapping[HostName, DiscoveryResult], bool]:
selected_sections=NO_SELECTION,
simulation_mode=config.simulation_mode,
snmp_backend_override=None,
+ password_store_file=cmk.utils.password_store.core_password_store_path(LATEST_CONFIG),
)
section_plugins = SectionPluginMapper()
host_label_plugins = HostLabelPluginMapper(ruleset_matcher=ruleset_matcher)
plugins = DiscoveryPluginMapper(ruleset_matcher=ruleset_matcher)
- get_service_description = functools.partial(config.service_description, ruleset_matcher)
on_error = OnError.IGNORE
hosts_config = config_cache.hosts_config
@@ -679,7 +757,7 @@ def section_error_handling(
discovery_result, activate_host = autodiscovery(
host_name,
is_cluster=host_name in config_cache.hosts_config.clusters,
- cluster_nodes=config_cache.nodes_of(host_name) or (),
+ cluster_nodes=config_cache.nodes(host_name),
active_hosts={
hn
for hn in itertools.chain(hosts_config.hosts, hosts_config.clusters)
@@ -689,7 +767,9 @@ def section_error_handling(
parser=parser,
fetcher=fetcher,
summarizer=CMKSummarizer(
- config_cache, host_name, override_non_ok_state=None
+ host_name,
+ config_cache.summary_config,
+ override_non_ok_state=None,
),
section_plugins=section_plugins,
section_error_handling=section_error_handling,
@@ -699,7 +779,7 @@ def section_error_handling(
ignore_plugin=config_cache.check_plugin_ignored,
get_effective_host=config_cache.effective_host,
get_service_description=(
- functools.partial(get_service_description, ruleset_matcher)
+ functools.partial(config.service_description, ruleset_matcher)
),
schedule_discovery_check=_schedule_discovery_check,
rediscovery_parameters=params.rediscovery,
@@ -734,6 +814,7 @@ def section_error_handling(
if config.monitoring_core == "cmc":
cmk.base.core.do_reload(
config_cache,
+ ip_address_of,
core,
locking_mode=config.restart_locking,
all_hosts=hosts_config.hosts,
@@ -746,6 +827,7 @@ def section_error_handling(
else:
cmk.base.core.do_restart(
config_cache,
+ ip_address_of,
core,
all_hosts=hosts_config.hosts,
locking_mode=config.restart_locking,
@@ -792,8 +874,11 @@ def execute(self, args: list[str]) -> SetAutochecksResult:
new_services = [
AutocheckServiceWithNodes(
- AutocheckEntry(
- CheckPluginName(raw_check_plugin_name), item, params, raw_service_labels
+ DiscoveredItem[AutocheckEntry](
+ previous=AutocheckEntry(
+ CheckPluginName(raw_check_plugin_name), item, params, raw_service_labels
+ ),
+ new=None,
),
found_on_nodes,
)
@@ -807,9 +892,11 @@ def execute(self, args: list[str]) -> SetAutochecksResult:
if hostname in config_cache.hosts_config.clusters:
set_autochecks_of_cluster(
- config_cache.nodes_of(hostname) or (),
+ config_cache.nodes(hostname),
hostname,
- new_services,
+ # TODO: get full node information and pass it to set_autochecks_of_cluster.
+ # Currently the set-autochecks command will still set cluster information.
+ {hostname: new_services},
config_cache.effective_host,
functools.partial(config.service_description, config_cache.ruleset_matcher),
)
@@ -871,9 +958,9 @@ def execute(self, args: list[str]) -> RenameHostsResult:
# including the current history files will be handled later when the core
# is stopped.
for oldname, newname in renamings:
- self._finished_history_files[
- (oldname, newname)
- ] = self._rename_host_in_core_history_archive(oldname, newname)
+ self._finished_history_files[(oldname, newname)] = (
+ self._rename_host_in_core_history_archive(oldname, newname)
+ )
if self._finished_history_files[(oldname, newname)]:
actions.append("history")
@@ -895,21 +982,23 @@ def execute(self, args: list[str]) -> RenameHostsResult:
# Start monitoring again
if core_was_running:
# force config generation to succeed. The core *must* start.
- # TODO: Can't we drop this hack since we have config warnings now?
- config.ignore_ip_lookup_failures()
# In this case the configuration is already locked by the caller of the automation.
# If that is on the local site, we can not lock the configuration again during baking!
# (If we are on a remote site now, locking *would* work, but we will not bake agents anyway.)
config_cache = config.get_config_cache()
hosts_config = config.make_hosts_config()
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=ip_lookup.CollectFailedHosts()
+ )
_execute_silently(
config_cache,
CoreAction.START,
+ ip_address_of,
hosts_config,
skip_config_locking_for_bakery=True,
)
- for hostname in config.failed_ip_lookups():
+ for hostname in ip_address_of.error_handler.failed_ip_lookups:
actions.append("dnsfail-" + hostname)
# Convert actions into a dictionary { "what" : count }
@@ -1231,6 +1320,9 @@ def execute(self, args: list[str]) -> AnalyseServiceResult:
config_cache=config_cache,
host_name=host_name,
servicedesc=servicedesc,
+ ip_address_of=config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ ),
)
)
else AnalyseServiceResult(
@@ -1248,6 +1340,7 @@ def _get_service_info(
config_cache: ConfigCache,
host_name: HostName,
servicedesc: str,
+ ip_address_of: config.IPLookup,
) -> ServiceInfo:
# We just consider types of checks that are managed via WATO.
# We have the following possible types of services:
@@ -1256,6 +1349,14 @@ def _get_service_info(
# 3. classical checks
# 4. active checks
+ # special case. cheap to check, so check this first:
+ if servicedesc == "Check_MK Discovery":
+ return {
+ "origin": "active",
+ "checktype": "check-mk-inventory",
+ "parameters": asdict(config_cache.discovery_check_parameters(host_name)),
+ }
+
# 1. Enforced services
for checkgroup_name, service in config_cache.enforced_services_table(host_name).values():
if service.description == servicedesc:
@@ -1288,13 +1389,23 @@ def _get_service_info(
return result
# 4. Active checks
- host_attrs = config_cache.get_host_attributes(host_name)
+ translations = config.get_service_translations(config_cache.ruleset_matcher, host_name)
+ host_attrs = config_cache.get_host_attributes(host_name, ip_address_of)
+ host_macros = ConfigCache.get_host_macros_from_attributes(host_name, host_attrs)
+ resource_macros = config.get_resource_macros()
+ macros = {**host_macros, **resource_macros}
+ password_store_file = cmk.utils.password_store.pending_password_store_path()
active_check_config = server_side_calls.ActiveCheck(
load_active_checks()[1],
config.active_check_info,
host_name,
+ config.get_ssc_host_config(host_name, config_cache, macros, ip_address_of),
host_attrs,
- translations=config.get_service_translations(config_cache.ruleset_matcher, host_name),
+ config.http_proxies,
+ lambda x: config.get_final_service_description(x, translations),
+ config.use_new_descriptions_for,
+ cmk.utils.password_store.load(password_store_file),
+ password_store_file,
)
active_checks = config_cache.active_checks(host_name)
@@ -1318,7 +1429,7 @@ def _get_service_info_from_autochecks(
services = (
[
service
- for node in config_cache.nodes_of(host_name) or []
+ for node in config_cache.nodes(host_name)
for service in config_cache.get_autochecks_of(node)
if host_name == config_cache.effective_host(node, service.description)
]
@@ -1335,7 +1446,7 @@ def _get_service_info_from_autochecks(
plugin = agent_based_register.get_check_plugin(service.check_plugin_name)
if plugin is None:
- # plugin can only be None if we looked for the "Unimplemented check..." description.
+ # plug-in can only be None if we looked for the "Unimplemented check..." description.
# In this case we can run into the 'not found' case below.
continue
@@ -1516,7 +1627,12 @@ def execute(self, args: list[str]) -> RestartResult:
nodes = None
config_cache = config.get_config_cache()
hosts_config = config.make_hosts_config()
- return _execute_silently(config_cache, self._mode(), hosts_config, hosts_to_update=nodes)
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ )
+ return _execute_silently(
+ config_cache, self._mode(), ip_address_of, hosts_config, hosts_to_update=nodes
+ )
def _check_plugins_have_changed(self) -> bool:
last_time = self._time_of_last_core_restart()
@@ -1570,6 +1686,7 @@ def execute(self, args: list[str]) -> ReloadResult:
def _execute_silently(
config_cache: ConfigCache,
action: CoreAction,
+ ip_address_of: config.IPLookup,
hosts_config: Hosts,
hosts_to_update: set[HostName] | None = None,
skip_config_locking_for_bakery: bool = False,
@@ -1579,6 +1696,7 @@ def _execute_silently(
try:
do_restart(
config_cache,
+ ip_address_of,
create_core(config.monitoring_core),
action=action,
all_hosts=hosts_config.hosts,
@@ -1655,7 +1773,7 @@ def execute(self, args: list[str]) -> GetCheckInformationResult:
discover_families(raise_errors=cmk.utils.debug.enabled()), PluginGroup.CHECKMAN.value
)
- plugin_infos: dict[CheckPluginNameStr, dict[str, Any]] = {}
+ plugin_infos: dict[str, dict[str, Any]] = {}
for plugin in agent_based_register.iter_all_check_plugins():
plugin_info = plugin_infos.setdefault(
str(plugin.name),
@@ -1743,16 +1861,27 @@ def execute(self, args: list[str]) -> ScanParentsResult:
HostName(config.monitoring_host) if config.monitoring_host is not None else None
)
+ def make_scan_config() -> Mapping[HostName, ScanConfig]:
+ return {
+ host: config_cache.make_parent_scan_config(host)
+ for host in itertools.chain(
+ hostnames,
+ hosts_config.hosts,
+ ([HostName(config.monitoring_host)] if config.monitoring_host else ()),
+ )
+ }
+
try:
- gateways = cmk.base.parent_scan.scan_parents_of(
- config_cache,
+ gateway_results = cmk.base.parent_scan.scan_parents_of(
+ make_scan_config(),
hosts_config,
monitoring_host,
hostnames,
silent=True,
settings=settings,
+ lookup_ip_address=functools.partial(config.lookup_ip_address, config_cache),
)
- return ScanParentsResult(gateways)
+ return ScanParentsResult(gateway_results)
except Exception as e:
raise MKAutomationError("%s" % e)
@@ -1807,6 +1936,8 @@ def execute( # pylint: disable=too-many-branches
file_cache_options = FileCacheOptions()
if not ipaddress:
+ if ConfigCache.ip_stack_config(host_name) is ip_lookup.IPStackConfig.NO_IP:
+ raise MKGeneralException("Host is configured as No-IP host: %s" % host_name)
try:
resolved_address = config.lookup_ip_address(config_cache, host_name)
except Exception:
@@ -1831,6 +1962,14 @@ def execute( # pylint: disable=too-many-branches
cmd=cmd,
tcp_connect_timeout=tcp_connect_timeout,
file_cache_options=file_cache_options,
+ # Passing `ip_address_of` is the result of a refactoring.
+ # We do pass an IP address as well, so I'm not quite sure why we need this.
+ # Feel free to investigate!
+ # Also: This class might write to console. The de-serializer of the automation call will
+ # not be able to handle this I think? At best it will ignore it. We should fix this.
+ ip_address_of=config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ ),
)
)
@@ -1845,7 +1984,9 @@ def execute( # pylint: disable=too-many-branches
return DiagHostResult(
*self._execute_snmp(
test,
- config_cache.make_snmp_config(host_name, ipaddress, SourceType.HOST),
+ config_cache.make_snmp_config(
+ host_name, ipaddress, SourceType.HOST, backend_override=None
+ ),
host_name,
ipaddress,
snmp_community,
@@ -1898,15 +2039,41 @@ def _execute_agent(
cmd: str,
tcp_connect_timeout: float | None,
file_cache_options: FileCacheOptions,
+ ip_address_of: config.IPLookup,
) -> tuple[int, str]:
hosts_config = config_cache.hosts_config
check_interval = config_cache.check_mk_check_interval(host_name)
+ oid_cache_dir = Path(cmk.utils.paths.snmp_scan_cache_dir)
+ stored_walk_path = Path(cmk.utils.paths.snmpwalks_dir)
+ walk_cache_path = Path(cmk.utils.paths.var_dir) / "snmp_cache"
+ file_cache_path = Path(cmk.utils.paths.data_source_cache_dir)
+ tcp_cache_path = Path(cmk.utils.paths.tcp_cache_dir)
+ tls_config = TLSConfig(
+ cas_dir=Path(cmk.utils.paths.agent_cas_dir),
+ ca_store=Path(cmk.utils.paths.agent_cert_store),
+ site_crt=Path(cmk.utils.paths.site_cert_file),
+ )
+
state, output = 0, ""
+ pending_passwords_file = cmk.utils.password_store.pending_password_store_path()
+ passwords = cmk.utils.password_store.load(pending_passwords_file)
+ snmp_scan_config = SNMPScanConfig(
+ on_error=OnError.RAISE,
+ missing_sys_description=config_cache.missing_sys_description(host_name),
+ oid_cache_dir=oid_cache_dir,
+ )
for source in sources.make_sources(
host_name,
ipaddress,
- ConfigCache.address_family(host_name),
- config_cache=config_cache,
+ ConfigCache.ip_stack_config(host_name),
+ fetcher_factory=config_cache.fetcher_factory(),
+ snmp_fetcher_config=SNMPFetcherConfig(
+ scan_config=snmp_scan_config,
+ selected_sections=NO_SELECTION,
+ backend_override=None,
+ stored_walk_path=stored_walk_path,
+ walk_cache_path=walk_cache_path,
+ ),
is_cluster=host_name in hosts_config.clusters,
simulation_mode=config.simulation_mode,
file_cache_options=file_cache_options,
@@ -1915,7 +2082,26 @@ def _execute_agent(
discovery=1.5 * check_interval,
inventory=1.5 * check_interval,
),
- snmp_backend_override=None,
+ snmp_backend=config_cache.get_snmp_backend(host_name),
+ file_cache_path=file_cache_path,
+ tcp_cache_path=tcp_cache_path,
+ tls_config=tls_config,
+ computed_datasources=config_cache.computed_datasources(host_name),
+ datasource_programs=config_cache.datasource_programs(host_name),
+ tag_list=config_cache.tag_list(host_name),
+ management_ip=lookup_mgmt_board_ip_address(config_cache, host_name),
+ management_protocol=config_cache.management_protocol(host_name),
+ special_agent_command_lines=config_cache.special_agent_command_lines(
+ host_name,
+ ipaddress,
+ password_store_file=pending_passwords_file,
+ passwords=passwords,
+ ip_address_of=ConfiguredIPLookup(
+ config_cache, error_handler=handle_ip_lookup_failure
+ ),
+ ),
+ agent_connection_mode=config_cache.agent_connection_mode(host_name),
+ check_mk_check_interval=config_cache.check_mk_check_interval(host_name),
):
source_info = source.source_info()
if source_info.fetcher_type is FetcherType.SNMP:
@@ -1925,7 +2111,9 @@ def _execute_agent(
if source_info.fetcher_type is FetcherType.PROGRAM and cmd:
assert isinstance(fetcher, ProgramFetcher)
fetcher = ProgramFetcher(
- cmdline=config_cache.translate_commandline(host_name, ipaddress, cmd),
+ cmdline=config_cache.translate_commandline(
+ host_name, ipaddress, cmd, ip_address_of
+ ),
stdin=fetcher.stdin,
is_cmc=fetcher.is_cmc,
)
@@ -1940,6 +2128,7 @@ def _execute_agent(
host_name=fetcher.host_name,
encryption_handling=fetcher.encryption_handling,
pre_shared_secret=fetcher.pre_shared_secret,
+ tls_config=tls_config,
)
raw_data = get_raw_data(
@@ -2009,10 +2198,9 @@ def _execute_snmp( # type: ignore[no-untyped-def] # pylint: disable=too-many-b
# ('authNoPriv', 'md5', '11111111', '22222222')
# ('authPriv', 'md5', '11111111', '22222222', 'DES', '33333333')
- credentials: SNMPCredentials = snmp_config.credentials
-
- # Insert preconfigured communitiy
if test == "snmpv3":
+ credentials: SNMPCredentials = snmp_config.credentials
+
if snmpv3_use:
snmpv3_credentials = [snmpv3_use]
if snmpv3_use in ["authNoPriv", "authPriv"]:
@@ -2038,8 +2226,12 @@ def _execute_snmp( # type: ignore[no-untyped-def] # pylint: disable=too-many-b
snmpv3_credentials.extend([snmpv3_privacy_proto, snmpv3_privacy_password])
credentials = tuple(snmpv3_credentials)
- elif snmp_community:
- credentials = snmp_community
+ else:
+ credentials = snmp_community or (
+ snmp_config.credentials
+ if isinstance(snmp_config.credentials, str)
+ else snmp_default_community
+ )
# Determine SNMPv2/v3 community
if hostname not in config.explicit_snmp_communities:
@@ -2051,18 +2243,21 @@ def _execute_snmp( # type: ignore[no-untyped-def] # pylint: disable=too-many-b
credentials = cred
# SNMP versions
- if test in ["snmpv2", "snmpv3"]:
- is_bulkwalk_host = True
- is_snmpv2or3_without_bulkwalk_host = False
- elif test == "snmpv2_nobulk":
- is_bulkwalk_host = False
- is_snmpv2or3_without_bulkwalk_host = True
- elif test == "snmpv1":
- is_bulkwalk_host = False
- is_snmpv2or3_without_bulkwalk_host = False
-
- else:
- return 1, "SNMP command not implemented"
+ match test:
+ case "snmpv1":
+ snmp_version = SNMPVersion.V1
+ bulkwalk_enabled = False # not implemented in v1 anyway
+ case "snmpv2":
+ snmp_version = SNMPVersion.V2C
+ bulkwalk_enabled = True
+ case "snmpv2_nobulk":
+ snmp_version = SNMPVersion.V2C
+ bulkwalk_enabled = False
+ case "snmpv3":
+ snmp_version = SNMPVersion.V3
+ bulkwalk_enabled = True
+ case other:
+ return 1, f"SNMP command {other!r} not implemented"
# TODO: What about SNMP management boards?
# TODO: `get_snmp_table()` with some cache handling
@@ -2074,8 +2269,8 @@ def _execute_snmp( # type: ignore[no-untyped-def] # pylint: disable=too-many-b
ipaddress=ipaddress,
credentials=credentials,
port=snmp_config.port,
- is_bulkwalk_host=is_bulkwalk_host,
- is_snmpv2or3_without_bulkwalk_host=is_snmpv2or3_without_bulkwalk_host,
+ snmp_version=snmp_version,
+ bulkwalk_enabled=bulkwalk_enabled,
bulk_walk_size_of=snmp_config.bulk_walk_size_of,
timing={
"timeout": snmp_timeout,
@@ -2087,6 +2282,7 @@ def _execute_snmp( # type: ignore[no-untyped-def] # pylint: disable=too-many-b
snmp_backend=snmp_config.snmp_backend,
)
+ stored_walk_path = Path(cmk.utils.paths.snmpwalks_dir)
data = get_snmp_table(
section_name=None,
tree=BackendSNMPTree(
@@ -2094,7 +2290,7 @@ def _execute_snmp( # type: ignore[no-untyped-def] # pylint: disable=too-many-b
oids=[BackendOIDSpec(c, "string", False) for c in "1456"],
),
walk_cache={},
- backend=make_snmp_backend(snmp_config, log.logger),
+ backend=make_snmp_backend(snmp_config, log.logger, stored_walk_path=stored_walk_path),
)
if data:
@@ -2119,8 +2315,9 @@ def execute(self, args: list[str]) -> ActiveCheckResult:
config_cache = config.get_config_cache()
config_cache.ruleset_matcher.ruleset_optimizer.set_all_processed_hosts({host_name})
- with redirect_stdout(open(os.devnull, "w")):
- host_attrs = config_cache.get_host_attributes(host_name)
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ )
if plugin == "custom":
for entry in config_cache.custom_checks(host_name):
@@ -2128,7 +2325,10 @@ def execute(self, args: list[str]) -> ActiveCheckResult:
continue
command_line = self._replace_macros(
- host_name, entry["service_description"], entry.get("command_line", "")
+ host_name,
+ entry["service_description"],
+ entry.get("command_line", ""),
+ ip_address_of,
)
if command_line:
cmd = core_config.autodetect_plugin(command_line)
@@ -2139,16 +2339,28 @@ def execute(self, args: list[str]) -> ActiveCheckResult:
"Passive check - cannot be executed",
)
+ with redirect_stdout(open(os.devnull, "w")):
+ # TODO: we're redirecting stdout to /dev/null here; so we might want to create
+ # a version of ip_address_of that does not write to stdout in the first place.
+ # Also I don't think we ever revisit the collected errors.
+ host_attrs = config_cache.get_host_attributes(host_name, ip_address_of)
+
host_macros = ConfigCache.get_host_macros_from_attributes(host_name, host_attrs)
- resource_macros = self._get_resouce_macros()
+ resource_macros = config.get_resource_macros()
+ translations = config.get_service_translations(config_cache.ruleset_matcher, host_name)
+ macros = {**host_macros, **resource_macros}
+ password_store_file = cmk.utils.password_store.pending_password_store_path()
active_check_config = server_side_calls.ActiveCheck(
load_active_checks()[1],
config.active_check_info,
host_name,
+ config.get_ssc_host_config(host_name, config_cache, macros, ip_address_of),
host_attrs,
- translations=config.get_service_translations(config_cache.ruleset_matcher, host_name),
- macros={**host_macros, **resource_macros},
- stored_passwords=cmk.utils.password_store.load(),
+ config.http_proxies,
+ lambda x: config.get_final_service_description(x, translations),
+ config.use_new_descriptions_for,
+ cmk.utils.password_store.load(password_store_file),
+ password_store_file,
)
active_check = dict(config_cache.active_checks(host_name)).get(plugin, [])
@@ -2168,33 +2380,25 @@ def execute(self, args: list[str]) -> ActiveCheckResult:
"Failed to compute check result",
)
- def _get_resouce_macros(self) -> Mapping[str, str]:
- macros = {}
- try:
- for line in (omd_root / "etc/nagios/resource.cfg").open():
- line = line.strip()
- if not line or line[0] == "#":
- continue
- varname, value = line.split("=", 1)
- macros[varname] = value
- except Exception:
- if cmk.utils.debug.enabled():
- raise
- return macros
-
# Simulate replacing some of the more important macros of host and service. We
# cannot use dynamic macros, of course. Note: this will not work
# without OMD, since we do not know the value of $USER1$ and $USER2$
# here. We could read the Nagios resource.cfg file, but we do not
# know for sure the place of that either.
- def _replace_macros(self, hostname: HostName, service_desc: str, commandline: str) -> str:
+ def _replace_macros(
+ self,
+ hostname: HostName,
+ service_desc: str,
+ commandline: str,
+ ip_address_of: config.IPLookup,
+ ) -> str:
config_cache = config.get_config_cache()
macros = ConfigCache.get_host_macros_from_attributes(
- hostname, config_cache.get_host_attributes(hostname)
+ hostname, config_cache.get_host_attributes(hostname, ip_address_of)
)
service_attrs = core_config.get_service_attributes(hostname, service_desc, config_cache)
macros.update(ConfigCache.get_service_macros_from_attributes(service_attrs))
- macros.update(self._get_resouce_macros())
+ macros.update(config.get_resource_macros())
return replace_macros_in_str(commandline, {k: f"{v}" for k, v in macros.items()})
@@ -2230,6 +2434,22 @@ def _execute_check_plugin(self, commandline: str) -> tuple[ServiceState, Service
automations.register(AutomationActiveCheck())
+class AutomationUpdatePasswordsMergedFile(Automation):
+ cmd = "update-passwords-merged-file"
+ needs_config = True
+ needs_checks = False
+
+ def execute(self, args: list[str]) -> UpdatePasswordsMergedFileResult:
+ cmk.utils.password_store.save(
+ config.get_config_cache().collect_passwords(),
+ cmk.utils.password_store.pending_password_store_path(),
+ )
+ return UpdatePasswordsMergedFileResult()
+
+
+automations.register(AutomationUpdatePasswordsMergedFile())
+
+
class AutomationUpdateDNSCache(Automation):
cmd = "update-dns-cache"
needs_config = True
@@ -2275,14 +2495,45 @@ def execute(self, args: list[str]) -> GetAgentOutputResult:
info = b""
try:
- ipaddress = config.lookup_ip_address(config_cache, hostname)
+ ip_stack_config = ConfigCache.ip_stack_config(hostname)
+ ipaddress = (
+ None
+ if ip_stack_config is ip_lookup.IPStackConfig.NO_IP
+ else config.lookup_ip_address(config_cache, hostname)
+ )
check_interval = config_cache.check_mk_check_interval(hostname)
+ stored_walk_path = Path(cmk.utils.paths.snmpwalks_dir)
+ walk_cache_path = Path(cmk.utils.paths.var_dir) / "snmp_cache"
+ section_cache_path = Path(var_dir)
+ file_cache_path = Path(cmk.utils.paths.data_source_cache_dir)
+ tcp_cache_path = Path(cmk.utils.paths.tcp_cache_dir)
+ tls_config = TLSConfig(
+ cas_dir=Path(cmk.utils.paths.agent_cas_dir),
+ ca_store=Path(cmk.utils.paths.agent_cert_store),
+ site_crt=Path(cmk.utils.paths.site_cert_file),
+ )
+ snmp_scan_config = SNMPScanConfig(
+ on_error=OnError.RAISE,
+ oid_cache_dir=Path(cmk.utils.paths.snmp_scan_cache_dir),
+ missing_sys_description=config_cache.missing_sys_description(hostname),
+ )
+
if ty == "agent":
+ core_password_store_file = cmk.utils.password_store.core_password_store_path(
+ LATEST_CONFIG
+ )
for source in sources.make_sources(
hostname,
ipaddress,
- ConfigCache.address_family(hostname),
- config_cache=config_cache,
+ ip_stack_config,
+ fetcher_factory=config_cache.fetcher_factory(),
+ snmp_fetcher_config=SNMPFetcherConfig(
+ scan_config=snmp_scan_config,
+ selected_sections=NO_SELECTION,
+ backend_override=None,
+ stored_walk_path=stored_walk_path,
+ walk_cache_path=walk_cache_path,
+ ),
is_cluster=hostname in hosts_config.clusters,
simulation_mode=config.simulation_mode,
file_cache_options=file_cache_options,
@@ -2291,7 +2542,26 @@ def execute(self, args: list[str]) -> GetAgentOutputResult:
discovery=1.5 * check_interval,
inventory=1.5 * check_interval,
),
- snmp_backend_override=None,
+ snmp_backend=config_cache.get_snmp_backend(hostname),
+ file_cache_path=file_cache_path,
+ tcp_cache_path=tcp_cache_path,
+ tls_config=tls_config,
+ computed_datasources=config_cache.computed_datasources(hostname),
+ datasource_programs=config_cache.datasource_programs(hostname),
+ tag_list=config_cache.tag_list(hostname),
+ management_ip=lookup_mgmt_board_ip_address(config_cache, hostname),
+ management_protocol=config_cache.management_protocol(hostname),
+ special_agent_command_lines=config_cache.special_agent_command_lines(
+ hostname,
+ ipaddress,
+ password_store_file=core_password_store_file,
+ passwords=cmk.utils.password_store.load(core_password_store_file),
+ ip_address_of=ConfiguredIPLookup(
+ config_cache, error_handler=handle_ip_lookup_failure
+ ),
+ ),
+ agent_connection_mode=config_cache.agent_connection_mode(hostname),
+ check_mk_check_interval=config_cache.check_mk_check_interval(hostname),
):
source_info = source.source_info()
if source_info.fetcher_type is FetcherType.SNMP:
@@ -2306,11 +2576,18 @@ def execute(self, args: list[str]) -> GetAgentOutputResult:
)
host_sections = parse_raw_data(
make_parser(
- config_cache,
- source_info,
+ config_cache.parser_factory(),
+ source_info.hostname,
+ source_info.fetcher_type,
checking_sections=config_cache.make_checking_sections(
hostname, selected_sections=NO_SELECTION
),
+ persisted_section_dir=make_persisted_section_dir(
+ source_info.hostname,
+ fetcher_type=source_info.fetcher_type,
+ ident=source_info.ident,
+ section_cache_path=section_cache_path,
+ ),
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.checking"),
),
@@ -2321,11 +2598,7 @@ def execute(self, args: list[str]) -> GetAgentOutputResult:
hostname,
ipaddress,
host_sections,
- exit_spec=config_cache.exit_code_spec(hostname, source_info.ident),
- time_settings=config_cache.get_piggybacked_hosts_time_settings(
- piggybacked_hostname=hostname,
- ),
- is_piggyback=config_cache.is_piggyback_host(hostname),
+ config=config_cache.summary_config(hostname, source_info.ident),
fetcher_type=source_info.fetcher_type,
)
if any(r.state != 0 for r in source_results):
@@ -2337,8 +2610,12 @@ def execute(self, args: list[str]) -> GetAgentOutputResult:
else:
if not ipaddress:
raise MKGeneralException("Failed to gather IP address of %s" % hostname)
- snmp_config = config_cache.make_snmp_config(hostname, ipaddress, SourceType.HOST)
- backend = make_snmp_backend(snmp_config, log.logger, use_cache=False)
+ snmp_config = config_cache.make_snmp_config(
+ hostname, ipaddress, SourceType.HOST, backend_override=None
+ )
+ backend = make_snmp_backend(
+ snmp_config, log.logger, use_cache=False, stored_walk_path=stored_walk_path
+ )
lines = []
for walk_oid in oids_to_walk():
@@ -2375,8 +2652,19 @@ class AutomationNotificationReplay(Automation):
needs_checks = True # TODO: Can we change this?
def execute(self, args: list[str]) -> NotificationReplayResult:
+ def ensure_nagios(msg: str) -> None:
+ if config.is_cmc():
+ raise RuntimeError(msg)
+
nr = args[0]
- notify.notification_replay_backlog(int(nr))
+ notify.notification_replay_backlog(
+ lambda hostname, plugin: config.get_config_cache().notification_plugin_parameters(
+ hostname, plugin
+ ),
+ config.get_http_proxy,
+ ensure_nagios,
+ int(nr),
+ )
return NotificationReplayResult()
@@ -2389,13 +2677,54 @@ class AutomationNotificationAnalyse(Automation):
needs_checks = True # TODO: Can we change this?
def execute(self, args: list[str]) -> NotificationAnalyseResult:
+ def ensure_nagios(msg: str) -> None:
+ if config.is_cmc():
+ raise RuntimeError(msg)
+
nr = args[0]
- return NotificationAnalyseResult(notify.notification_analyse_backlog(int(nr)))
+ return NotificationAnalyseResult(
+ notify.notification_analyse_backlog(
+ lambda hostname, plugin: config.get_config_cache().notification_plugin_parameters(
+ hostname, plugin
+ ),
+ config.get_http_proxy,
+ ensure_nagios,
+ int(nr),
+ )
+ )
automations.register(AutomationNotificationAnalyse())
+class AutomationNotificationTest(Automation):
+ cmd = "notification-test"
+ needs_config = True
+ needs_checks = True # TODO: Can we change this?
+
+ def execute(self, args: list[str]) -> NotificationTestResult:
+ def ensure_nagios(msg: str) -> None:
+ if config.is_cmc():
+ raise RuntimeError(msg)
+
+ context = json.loads(args[0])
+ dispatch = args[1]
+ return NotificationTestResult(
+ notify.notification_test(
+ context,
+ lambda hostname, plugin: config.get_config_cache().notification_plugin_parameters(
+ hostname, plugin
+ ),
+ config.get_http_proxy,
+ ensure_nagios,
+ dispatch=dispatch == "True",
+ )
+ )
+
+
+automations.register(AutomationNotificationTest())
+
+
class AutomationGetBulks(Automation):
cmd = "notification-get-bulks"
needs_config = False
diff --git a/cmk/base/check_api.py b/cmk/base/check_api.py
index 3b57776df55..920526f2488 100644
--- a/cmk/base/check_api.py
+++ b/cmk/base/check_api.py
@@ -15,35 +15,29 @@
"""
-import socket
-import time
-from collections.abc import Callable, Mapping
-from typing import Any, Generator, Literal
-
-import cmk.utils.debug as _debug
+from collections.abc import Callable, Generator
+from typing import Any
# These imports are not meant for use in the API. So we prefix the names
# with an underscore. These names will be skipped when loading into the
# check context.
-from cmk.utils.exceptions import MKGeneralException
-from cmk.utils.hostaddress import HostName
from cmk.utils.http_proxy_config import HTTPProxyConfig
+
+# pylint: disable=unused-import
+from cmk.utils.legacy_check_api import LegacyCheckDefinition as LegacyCheckDefinition
from cmk.utils.metrics import MetricName
from cmk.utils.regex import regex as regex # pylint: disable=unused-import
+# pylint: disable=unused-import
from cmk.checkengine.checkresults import state_markers as state_markers
from cmk.checkengine.submitters import ServiceDetails, ServiceState
-import cmk.base.config as _config
-
-# pylint: disable=unused-import
-from cmk.base.api.agent_based.register.utils_legacy import (
- LegacyCheckDefinition as LegacyCheckDefinition,
-)
+from cmk.base.config import CheckContext as _CheckContext
+from cmk.base.config import get_http_proxy as _get_http_proxy
from cmk.base.plugin_contexts import host_name as host_name # pylint: disable=unused-import
from cmk.base.plugin_contexts import service_description # pylint: disable=unused-import
-from cmk.agent_based.v1 import render as _render
+from cmk.agent_based import v1 as _v1
# pylint: enable=unused-import
@@ -62,18 +56,10 @@
# to ease migration:
-DiscoveryResult = Generator[tuple[str | None, Mapping[str, object]], None, None]
CheckResult = Generator[tuple[int, str] | tuple[int, str, list[_MetricTuple]], None, None]
-# to ease migration:
-def Service(
- *, item: str | None = None, parameters: Mapping[str, object] | None = None
-) -> tuple[str | None, Mapping[str, object]]:
- return item, parameters or {}
-
-
-def get_check_api_context() -> _config.CheckContext:
+def get_check_api_context() -> _CheckContext:
"""This is called from cmk.base code to get the Check API things. Don't
use this from checks."""
return {k: v for k, v in globals().items() if not k.startswith("_")}
@@ -118,40 +104,6 @@ def savefloat(f: Any) -> float:
return 0.0
-# These functions were used in some specific checks until 1.6. Don't add it to
-# the future check API. It's kept here for compatibility reasons for now.
-def is_ipv6_primary(hostname: HostName) -> bool:
- return _config.get_config_cache().default_address_family(hostname) is socket.AF_INET6
-
-
-def get_age_human_readable(seconds: float) -> str:
- return _render.timespan(seconds) if seconds >= 0 else f"-{_render.timespan(-seconds)}"
-
-
-def get_bytes_human_readable(
- bytes_: int,
- base: Literal[1000, 1024] = 1024,
- precision: object = None, # for legacy compatibility
- unit: str = "B",
-) -> str:
- if not (
- renderer := {
- 1000: _render.disksize,
- 1024: _render.bytes,
- }.get(int(base))
- ):
- raise ValueError(f"Unsupported value for 'base' in get_bytes_human_readable: {base=}")
- return renderer(bytes_)[:-1] + unit
-
-
-def get_timestamp_human_readable(timestamp: float) -> str:
- """Format a time stamp for humans in "%Y-%m-%d %H:%M:%S" format.
- In case None is given or timestamp is 0, it returns "never"."""
- if timestamp:
- return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timestamp)))
- return "never"
-
-
def _normalize_levels(levels: Levels) -> Levels:
if len(levels) == 2: # upper warn and crit
warn_upper, crit_upper = levels[0], levels[1]
@@ -165,49 +117,39 @@ def _normalize_levels(levels: Levels) -> Levels:
def _do_check_levels(
- value: int | float, levels: Levels, human_readable_func: Callable, unit_info: str
+ value: int | float, levels: Levels, human_readable_func: Callable
) -> tuple[ServiceState, ServiceDetails]:
warn_upper, crit_upper, warn_lower, crit_lower = _normalize_levels(levels)
# Critical cases
if crit_upper is not None and value >= crit_upper:
- return 2, _levelsinfo_ty("at", warn_upper, crit_upper, human_readable_func, unit_info)
+ return 2, _levelsinfo_ty("at", warn_upper, crit_upper, human_readable_func)
if crit_lower is not None and value < crit_lower:
- return 2, _levelsinfo_ty("below", warn_lower, crit_lower, human_readable_func, unit_info)
+ return 2, _levelsinfo_ty("below", warn_lower, crit_lower, human_readable_func)
# Warning cases
if warn_upper is not None and value >= warn_upper:
- return 1, _levelsinfo_ty("at", warn_upper, crit_upper, human_readable_func, unit_info)
+ return 1, _levelsinfo_ty("at", warn_upper, crit_upper, human_readable_func)
if warn_lower is not None and value < warn_lower:
- return 1, _levelsinfo_ty("below", warn_lower, crit_lower, human_readable_func, unit_info)
+ return 1, _levelsinfo_ty("below", warn_lower, crit_lower, human_readable_func)
return 0, ""
-def _levelsinfo_ty(
- ty: str, warn: Warn, crit: Crit, human_readable_func: Callable, unit_info: str
-) -> str:
- warn_str = "never" if warn is None else f"{human_readable_func(warn)}{unit_info}"
- crit_str = "never" if crit is None else f"{human_readable_func(crit)}{unit_info}"
+def _levelsinfo_ty(ty: str, warn: Warn, crit: Crit, human_readable_func: Callable) -> str:
+ warn_str = "never" if warn is None else f"{human_readable_func(warn)}"
+ crit_str = "never" if crit is None else f"{human_readable_func(crit)}"
return f" (warn/crit {ty} {warn_str}/{crit_str})"
def _build_perfdata(
dsname: None | MetricName,
value: int | float,
- scale_value: Callable,
levels: Levels,
boundaries: tuple | None,
- ref_value: None | int | float = None,
) -> list:
if not dsname:
return []
-
- perf_list = [dsname, value, levels[0], levels[1]]
- if isinstance(boundaries, tuple) and len(boundaries) == 2:
- perf_list.extend([scale_value(v) for v in boundaries])
- perfdata = [tuple(perf_list)]
- if ref_value:
- perfdata.append(("predict_" + dsname, ref_value))
- return perfdata
+ used_boundaries = boundaries if isinstance(boundaries, tuple) and len(boundaries) == 2 else ()
+ return [(dsname, value, levels[0], levels[1], *used_boundaries)]
def check_levels( # pylint: disable=too-many-branches
@@ -215,12 +157,9 @@ def check_levels( # pylint: disable=too-many-branches
dsname: None | MetricName,
params: Any,
unit: str = "",
- factor: int | float = 1.0,
- scale: int | float = 1.0,
- statemarkers: bool = False,
human_readable_func: Callable | None = None,
infoname: str | None = None,
- boundaries: tuple | None = None,
+ boundaries: tuple[float | None, float | None] | None = None,
) -> ServiceCheckResult:
"""Generic function for checking a value against levels
@@ -240,20 +179,13 @@ def check_levels( # pylint: disable=too-many-branches
Dict containing "upper" or "levels_upper_min" as key -> upper level checking.
Dict containing "lower" and "upper"/"levels_upper_min" as key ->
lower and upper level checking.
- unit: unit to be displayed in the plugin output.
+ unit: unit to be displayed in the plug-in output.
Be aware: if a (builtin) human_readable_func is stated which already
provides a unit info, then this unit is not necessary. An additional
unit info is useful if a rate is calculated, eg.
unit="/s",
human_readable_func=get_bytes_human_readable,
results in 'X B/s'.
- factor: the levels are multiplied with this factor before applying
- them to the value. This is being used for the CPU load check
- currently. The levels here are "per CPU", so the number of
- CPUs is used as factor.
- scale: Scale of the levels in relation to "value" and the value in the RRDs.
- For example if the levels are specified in GB and the RRD store KB, then
- the scale is 1024*1024.
human_readable_func: Single argument function to present in a human readable fashion
the value. Builtin human_readable-functions already provide a unit:
- get_percent_human_readable
@@ -273,69 +205,50 @@ def check_levels( # pylint: disable=too-many-branches
else:
unit_info = ""
- def default_human_readable_func(x: float) -> str:
- return "%.2f" % (x / scale)
-
if human_readable_func is None:
- human_readable_func = default_human_readable_func
-
- def scale_value(v: None | int | float) -> None | int | float:
- if v is None:
- return None
- return v * factor * scale
- infotext = f"{human_readable_func(value)}{unit_info}"
- if infoname:
- infotext = f"{infoname}: {infotext}"
+ def render_func(x: float) -> str:
+ return f"{x:.2f}{unit_info}"
- # {}, (), None, (None, None), (None, None, None, None) -> do not check any levels
- if not params or set(params) <= {None}:
- # always add warn/crit, because the call-site may not know it passed None,
- # and therefore expect a quadruple.
- perf = _build_perfdata(dsname, value, scale_value, (None, None), boundaries)
- return 0, infotext, perf
+ else:
- # Pair of numbers -> static levels
- if isinstance(params, tuple):
- levels = tuple(scale_value(v) for v in _normalize_levels(params))
- ref_value = None
+ def render_func(x: float) -> str:
+ return f"{human_readable_func(x)}{unit_info}"
- # Dictionary -> predictive levels
- else:
+ if params and isinstance(params, dict):
if not dsname:
raise TypeError("Metric name is empty/None")
+ result, *metrics = _v1.check_levels_predictive(
+ value,
+ levels=params,
+ metric_name=dsname,
+ render_func=render_func,
+ label=infoname,
+ boundaries=boundaries,
+ )
+ assert isinstance(result, _v1.Result)
+ return (
+ int(result.state),
+ result.summary,
+ [
+ (m.name, m.value, *m.levels, *m.boundaries)
+ for m in metrics
+ if isinstance(m, _v1.Metric)
+ ],
+ )
+
+ infotext = f"{render_func(value)}"
+ if infoname:
+ infotext = f"{infoname}: {infotext}"
- try:
- ref_value, levels = params["__get_predictive_levels__"](
- dsname,
- levels_factor=factor * scale,
- )
- if ref_value:
- predictive_levels_msg = "predicted reference: %s" % human_readable_func(ref_value)
- else:
- predictive_levels_msg = "no reference for prediction yet"
-
- except MKGeneralException as e:
- ref_value = None
- levels = (None, None, None, None)
- predictive_levels_msg = "no reference for prediction (%s)" % e
-
- except Exception as e:
- if _debug.enabled():
- raise
- return 3, "%s" % e, []
-
- if predictive_levels_msg:
- infotext += " (%s)" % predictive_levels_msg
-
- state, levelstext = _do_check_levels(value, levels, human_readable_func, unit_info)
- infotext += levelstext
- if statemarkers:
- infotext += state_markers[state]
-
- perfdata = _build_perfdata(dsname, value, scale_value, levels, boundaries, ref_value)
+ # normalize {}, (), None, (None, None), (None, None, None, None)
+ if not params or set(params) <= {None}:
+ levels: Levels = (None, None, None, None)
+ else:
+ levels = _normalize_levels(params)
- return state, infotext, perfdata
+ state, levelstext = _do_check_levels(value, levels, render_func)
+ return state, infotext + levelstext, _build_perfdata(dsname, value, levels, boundaries)
def passwordstore_get_cmdline(fmt: str, pw: tuple | str) -> str | tuple[str, str, str]:
@@ -355,7 +268,7 @@ def get_http_proxy(http_proxy: tuple[str, str]) -> HTTPProxyConfig:
Intended to receive a value configured by the user using the HTTPProxyReference valuespec.
"""
- return _config.get_http_proxy(http_proxy)
+ return _get_http_proxy(http_proxy)
# NOTE: Currently this is not really needed, it is just here to keep any start
diff --git a/cmk/base/check_legacy_includes/__init__.py b/cmk/base/check_legacy_includes/__init__.py
index 67165df1c35..c0d5ce1ee83 100644
--- a/cmk/base/check_legacy_includes/__init__.py
+++ b/cmk/base/check_legacy_includes/__init__.py
@@ -12,6 +12,6 @@
# This folder is part of a namespace package, that can be shadowed/extended
# using the local/ hierarchy.
#
-# Do not change the following line, is is picked up by the build process:
+# Do not change the following line, it is picked up by the build process:
# check_mk.make: do-not-deploy
#
diff --git a/cmk/base/check_legacy_includes/acme.py b/cmk/base/check_legacy_includes/acme.py
index 12a8786e454..2be24ed27da 100644
--- a/cmk/base/check_legacy_includes/acme.py
+++ b/cmk/base/check_legacy_includes/acme.py
@@ -2,8 +2,10 @@
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
+from collections.abc import Mapping
+from typing import Final
-acme_environment_states = {
+ACME_ENVIRONMENT_STATES: Final[Mapping[str, tuple[int, str]]] = {
"1": (0, "initial"),
"2": (0, "normal"),
"3": (1, "minor"),
diff --git a/cmk/base/check_legacy_includes/alcatel.py b/cmk/base/check_legacy_includes/alcatel.py
deleted file mode 100644
index e3cad218d66..00000000000
--- a/cmk/base/check_legacy_includes/alcatel.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from collections.abc import Iterable
-
-ALCATEL_TEMP_CHECK_DEFAULT_PARAMETERS = {
- "levels": (45.0, 50.0),
-}
-
-DiscoveryResult = (
- Iterable[tuple[None, str | None]]
- | Iterable[tuple[str, str | None]]
- | Iterable[tuple[str, dict]]
-)
-
-
-def inventory_alcatel_cpu(info):
- yield None, {}
-
-
-def check_alcatel_cpu(_no_item, _no_params, info):
- cpu_perc = int(info[0][0])
- warn, crit = (90.0, 95.0)
- status = 0
- levelstext = ""
- if cpu_perc >= crit:
- status = 2
- elif cpu_perc >= warn:
- status = 1
- if status:
- levelstext = f" (warn/crit at {warn:.1f}%/{crit:.1f}%)"
- perfdata = [("util", cpu_perc, warn, crit, 0, 100)]
- return status, "total: %.1f%%" % cpu_perc + levelstext, perfdata
-
-
-def inventory_alcatel_fans(info) -> DiscoveryResult: # type: ignore[no-untyped-def]
- for nr, _value in enumerate(info, 1):
- yield str(nr), None
-
-
-def check_alcatel_fans(item, _no_params, info):
- fan_states = {
- 0: "has no status",
- 1: "not running",
- 2: "running",
- }
- try:
- line = info[int(item) - 1]
- fan_state = int(line[0])
- except (ValueError, IndexError):
- return None
-
- state = 0 if fan_state == 2 else 2
- return state, "Fan " + fan_states.get(fan_state, "unknown (%s)" % fan_state)
-
-
-def inventory_alcatel_temp(info) -> DiscoveryResult: # type: ignore[no-untyped-def]
- with_slot = len(info) != 1
- for index, row in enumerate(info):
- for oid, name in enumerate(["Board", "CPU"]):
- if row[oid] != "0":
- if with_slot:
- yield f"Slot {index + 1} {name}", {}
- else:
- yield name, {}
diff --git a/cmk/base/check_legacy_includes/apc_ats.py b/cmk/base/check_legacy_includes/apc_ats.py
new file mode 100644
index 00000000000..5a6f5a85524
--- /dev/null
+++ b/cmk/base/check_legacy_includes/apc_ats.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python3
+# Copyright (C) 2024 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+import enum
+from collections.abc import Sequence
+from contextlib import suppress
+from typing import NamedTuple
+
+from cmk.agent_based.v2 import any_of, equals
+
+
+class CommunictionStatus(enum.Enum):
+ NeverDiscovered = 1
+ Established = 2
+ Lost = 3
+
+
+class RedunandancyStatus(enum.Enum):
+ Lost = 1
+ Redundant = 2
+
+
+class Source(enum.Enum):
+ A = 1
+ B = 2
+
+
+class OverCurrentStatus(enum.Enum):
+ Exceeded = 1
+ OK = 2
+
+
+class PowerSupplyStatus(enum.Enum):
+ Failure = 1
+ OK = 2
+
+
+class PowerSource(NamedTuple):
+ name: str
+ status: PowerSupplyStatus
+
+
+class Status(NamedTuple):
+ com_status: CommunictionStatus | None
+ selected_source: Source | None
+ redundancy: RedunandancyStatus | None
+ overcurrent: OverCurrentStatus | None
+ powersources: Sequence[PowerSource]
+
+ @classmethod
+ def from_raw(cls, line):
+ com_state, source, redunancy, overcurrent, *powersources = list(map(_parse_int, line))
+ return cls(
+ com_status=CommunictionStatus(com_state),
+ selected_source=Source(source),
+ redundancy=RedunandancyStatus(redunancy),
+ overcurrent=OverCurrentStatus(overcurrent),
+ powersources=cls.parse_powersources(powersources),
+ )
+
+ @staticmethod
+ def parse_powersources(raw: list[int | None]) -> Sequence[PowerSource]:
+ return [
+ PowerSource(name=voltage, status=PowerSupplyStatus(value))
+ for voltage, value in zip(["5V", "24V", "3.3V", "1.0V"], raw)
+ if value is not None
+ ]
+
+
+def _parse_int(value: str) -> int | None:
+ with suppress(ValueError):
+ return int(value)
+ return None
+
+
+DETECT = any_of(
+ equals(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.318.1.3.11"),
+ equals(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.318.1.3.32"),
+)
diff --git a/cmk/base/check_legacy_includes/arbor.py b/cmk/base/check_legacy_includes/arbor.py
deleted file mode 100644
index 983fe17088b..00000000000
--- a/cmk/base/check_legacy_includes/arbor.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.base.check_api import check_levels
-
-# .
-# .--Disk Usage----------------------------------------------------------.
-# | ____ _ _ _ _ |
-# | | _ \(_)___| | __ | | | |___ __ _ __ _ ___ |
-# | | | | | / __| |/ / | | | / __|/ _` |/ _` |/ _ \ |
-# | | |_| | \__ \ < | |_| \__ \ (_| | (_| | __/ |
-# | |____/|_|___/_|\_\ \___/|___/\__,_|\__, |\___| |
-# | |___/ |
-# '----------------------------------------------------------------------'
-
-
-def inventory_arbor_disk_usage(parsed):
- if "disk" in parsed:
- return [("/", {})]
- return []
-
-
-def check_arbor_disk_usage(_no_item, params, parsed):
- usage = int(parsed["disk"])
- yield check_levels(usage, None, params["levels"], infoname="Disk usage")
- yield 0, "", [("disk_utilization", float(usage) / 100.0)]
-
-
-# .
-# .--Host Fault----------------------------------------------------------.
-# | _ _ _ _____ _ _ |
-# | | | | | ___ ___| |_ | ___|_ _ _ _| | |_ |
-# | | |_| |/ _ \/ __| __| | |_ / _` | | | | | __| |
-# | | _ | (_) \__ \ |_ | _| (_| | |_| | | |_ |
-# | |_| |_|\___/|___/\__| |_| \__,_|\__,_|_|\__| |
-# | |
-# '----------------------------------------------------------------------'
-
-
-def inventory_arbor_host_fault(parsed):
- if "host_fault" in parsed:
- return [(None, None)]
- return []
-
-
-def check_arbor_host_fault(_no_item, _no_params, parsed):
- status = 0
- if parsed["host_fault"] != "No Fault":
- status = 2
- return status, parsed["host_fault"]
-
-
-# .
-# .--Drop Rate-----------------------------------------------------------.
-# | ____ ____ _ |
-# | | _ \ _ __ ___ _ __ | _ \ __ _| |_ ___ |
-# | | | | | '__/ _ \| '_ \ | |_) / _` | __/ _ \ |
-# | | |_| | | | (_) | |_) | | _ < (_| | || __/ |
-# | |____/|_| \___/| .__/ |_| \_\__,_|\__\___| |
-# | |_| |
-# '----------------------------------------------------------------------'
-
-
-def inventory_arbor_drop_rate(parsed):
- if "drop_rate" in parsed:
- return [("Overrun", {})]
- return []
-
-
-def check_arbor_drop_rate(_no_item, params, parsed):
- drop_rate = int(parsed["drop_rate"])
- lower_levels = params.get("levels_lower") or (None, None)
- upper_levels = params.get("levels") or (None, None)
- yield check_levels(
- drop_rate,
- "if_in_pkts",
- upper_levels + lower_levels,
- human_readable_func=lambda x: "%.1f pps",
- )
-
-
-# .
diff --git a/cmk/base/check_legacy_includes/aws.py b/cmk/base/check_legacy_includes/aws.py
index e2ef7ead658..f056abc41e4 100644
--- a/cmk/base/check_legacy_includes/aws.py
+++ b/cmk/base/check_legacy_includes/aws.py
@@ -3,8 +3,8 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-import functools
-from collections.abc import Callable
+from collections.abc import Callable, Mapping
+from typing import TypeVar
from cmk.base.check_api import check_levels, CheckResult
from cmk.base.plugins.agent_based.agent_based_api.v1 import IgnoreResultsError, render
@@ -70,7 +70,7 @@ def check_aws_limits(aws_service, params, parsed_region_data):
- levels: use plain resource_key
- performance data: aws_%s_%s % AWS resource, resource_key
"""
- long_output = []
+ long_output: list[tuple[int, str]] = []
levels_reached = set()
max_state = 0
perfdata = []
@@ -103,7 +103,6 @@ def check_aws_limits(aws_service, params, parsed_region_data):
None,
(warn, crit),
human_readable_func=render.percent,
- statemarkers=True,
infoname="Usage",
)
@@ -111,15 +110,16 @@ def check_aws_limits(aws_service, params, parsed_region_data):
if state:
levels_reached.add(resource_title)
infotext += f", {extrainfo}"
- long_output.append(infotext)
+ long_output.append((state, infotext))
if levels_reached:
yield max_state, "Levels reached: %s" % ", ".join(sorted(levels_reached)), perfdata
else:
yield 0, "No levels reached", perfdata
- if long_output:
- yield 0, "\n%s" % "\n".join(sorted(long_output))
+ # use `notice` upon migration!
+ for state, details in sorted(long_output, key=lambda x: x[1]):
+ yield state, f"\n{details}"
def aws_get_float_human_readable(value: float, unit: str = "") -> str:
@@ -210,22 +210,11 @@ def check_aws_metrics(
raise IgnoreResultsError("Currently no data from AWS")
-def aws_get_parsed_item_data(check_function: Callable) -> Callable:
- """
- Modified version of get_parsed_item_data which lets services go stale instead of UNKN if the
- item is not found.
- """
+_Data = TypeVar("_Data")
- @functools.wraps(check_function)
- def wrapped_check_function(item, params, parsed):
- if not isinstance(parsed, dict):
- return (
- 3,
- "Wrong usage of decorator function 'aws_get_parsed_item_data': parsed is "
- "not a dict",
- )
- if item not in parsed:
- raise IgnoreResultsError("Currently no data from AWS")
- return check_function(item, params, parsed[item])
-
- return wrapped_check_function
+
+def get_data_or_go_stale(item: str, section: Mapping[str, _Data]) -> _Data:
+ try:
+ return section[item]
+ except KeyError:
+ raise IgnoreResultsError("Currently no data from AWS")
diff --git a/cmk/base/check_legacy_includes/azure.py b/cmk/base/check_legacy_includes/azure.py
index 56ace6e8397..0fe460afa74 100644
--- a/cmk/base/check_legacy_includes/azure.py
+++ b/cmk/base/check_legacy_includes/azure.py
@@ -7,23 +7,26 @@
import functools
import time
+from collections.abc import Callable, Mapping
+from typing import TypeVar
-from cmk.base.check_api import check_levels, get_bytes_human_readable
+from cmk.base.check_api import check_levels, CheckResult
from cmk.base.plugins.agent_based.agent_based_api.v1 import (
get_rate,
get_value_store,
IgnoreResultsError,
- render,
)
+from cmk.agent_based.v2 import render, Service
from cmk.plugins.lib.azure import AZURE_AGENT_SEPARATOR as AZURE_AGENT_SEPARATOR
+from cmk.plugins.lib.azure import get_service_labels_from_resource_tags
from cmk.plugins.lib.azure import iter_resource_attributes as iter_resource_attributes
from cmk.plugins.lib.azure import parse_resources as parse_resources
_AZURE_METRIC_FMT = {
"count": lambda n: "%d" % n,
"percent": render.percent,
- "bytes": get_bytes_human_readable,
+ "bytes": render.bytes,
"bytes_per_second": render.iobandwidth,
"seconds": lambda s: "%.2f s" % s,
"milli_seconds": lambda ms: "%d ms" % (ms * 1000),
@@ -31,20 +34,13 @@
}
-def get_data_or_go_stale(check_function):
- """Variant of get_parsed_item_data that raises MKCounterWrapped
- if data is not found.
- """
+_Data = TypeVar("_Data")
- @functools.wraps(check_function)
- def wrapped_check_function(item, params, parsed):
- if not isinstance(parsed, dict):
- return 3, "Wrong usage of decorator: parsed is not a dict"
- if item not in parsed or not parsed[item]:
- raise IgnoreResultsError("Data not present at the moment")
- return check_function(item, params, parsed[item])
- return wrapped_check_function
+def get_data_or_go_stale(item: str, section: Mapping[str, _Data]) -> _Data:
+ if resource := section.get(item):
+ return resource
+ raise IgnoreResultsError("Data not present at the moment")
def check_azure_metric( # pylint: disable=too-many-locals
@@ -82,7 +78,7 @@ def check_azure_metric( # pylint: disable=too-many-locals
cmk_key,
(levels or (None, None)) + (levels_lower or (None, None)),
infoname=display_name,
- human_readable_func=_AZURE_METRIC_FMT.get(unit, str), # type: ignore[arg-type]
+ human_readable_func=_AZURE_METRIC_FMT.get(unit, lambda x: f"{x}"),
boundaries=(0, None),
)
@@ -106,7 +102,9 @@ def discovery_function(parsed):
for name, resource in parsed.items():
metr = resource.metrics
if set(desired_metrics) & set(metr):
- yield name, {}
+ yield Service(
+ item=name, labels=get_service_labels_from_resource_tags(resource.tags)
+ )
return discovery_function
diff --git a/cmk/base/check_legacy_includes/check_mail.py b/cmk/base/check_legacy_includes/check_mail.py
index 78c8aa3f5ed..47f26656b75 100644
--- a/cmk/base/check_legacy_includes/check_mail.py
+++ b/cmk/base/check_legacy_includes/check_mail.py
@@ -8,6 +8,7 @@
from cmk.base.check_api import passwordstore_get_cmdline
+# Note: this is already migrated in cmk.plugins.email.server_side_calls.commons!
def general_check_mail_args_from_params(
check_ident: str, params: dict[str, Any]
) -> list[str | tuple[str, str, str]]:
diff --git a/cmk/base/check_legacy_includes/cmctc.py b/cmk/base/check_legacy_includes/cmctc.py
deleted file mode 100644
index e498b5c5d96..00000000000
--- a/cmk/base/check_legacy_includes/cmctc.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-def cmctc_translate_status(status):
- return {4: 0, 7: 1, 8: 1, 9: 2}.get(status, 3) # ok # warning # too low # too high
-
-
-def cmctc_translate_status_text(status):
- return {
- 1: "notAvail",
- 2: "lost",
- 3: "changed",
- 4: "ok",
- 5: "off",
- 6: "on",
- 7: "warning",
- 8: "tooLow",
- 9: "tooHigh",
- }.get(status, "UNKNOWN")
diff --git a/cmk/base/check_legacy_includes/cpu_util.py b/cmk/base/check_legacy_includes/cpu_util.py
index 9a4095ea55b..b911b0526b2 100644
--- a/cmk/base/check_legacy_includes/cpu_util.py
+++ b/cmk/base/check_legacy_includes/cpu_util.py
@@ -5,15 +5,15 @@
import time
-from cmk.base.check_api import check_levels, get_age_human_readable
+from cmk.base.check_api import check_levels
from cmk.base.plugins.agent_based.agent_based_api.v1 import (
get_average,
get_value_store,
IgnoreResultsError,
- render,
)
import cmk.plugins.lib.cpu_util as cpu_util
+from cmk.agent_based.v2 import render
# Common file for all (modern) checks that check CPU utilization (not load!)
@@ -288,7 +288,7 @@ def cpu_util_time(this_time, core, perc, threshold, warn_core, crit_core):
high_load_duration,
"%s_is_under_high_load_for" % core, # Not used
(warn_core, crit_core),
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname="%s is under high load for" % core,
)
if timestamp == 0:
diff --git a/cmk/base/check_legacy_includes/dell_compellent.py b/cmk/base/check_legacy_includes/dell_compellent.py
index 64c6a6984b2..f38ce218f63 100644
--- a/cmk/base/check_legacy_includes/dell_compellent.py
+++ b/cmk/base/check_legacy_includes/dell_compellent.py
@@ -5,9 +5,7 @@
from collections.abc import Mapping
-from cmk.base.check_api import DiscoveryResult, Service
-
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, StringTable
def discover(string_table: StringTable) -> DiscoveryResult:
diff --git a/cmk/base/check_legacy_includes/df.py b/cmk/base/check_legacy_includes/df.py
index 23069a5aa8d..4b68a8b3d09 100644
--- a/cmk/base/check_legacy_includes/df.py
+++ b/cmk/base/check_legacy_includes/df.py
@@ -5,10 +5,7 @@
# pylint: disable=chained-comparison,unused-import
-from cmk.base.check_api import get_bytes_human_readable
-from cmk.base.plugins.agent_based.agent_based_api.v1 import render
-
-from cmk.agent_based.v1 import Metric, Result, State
+from cmk.agent_based.v2 import Metric, render, Result, State
from cmk.plugins.lib.df import check_filesystem_levels, check_inodes
from cmk.plugins.lib.df import FILESYSTEM_DEFAULT_LEVELS as FILESYSTEM_DEFAULT_LEVELS # noqa: F401
from cmk.plugins.lib.df import FILESYSTEM_DEFAULT_PARAMS as FILESYSTEM_DEFAULT_PARAMS
@@ -169,7 +166,7 @@ def df_check_filesystem_single_coroutine( # pylint: disable=too-many-branches
if show_reserved:
reserved_perc_hr = render.percent(100.0 * reserved_mb / size_mb)
- reserved_hr = get_bytes_human_readable(reserved_mb * 1024**2)
+ reserved_hr = render.disksize(reserved_mb * 1024**2)
infotext.append(
"additionally reserved for root: %s" % reserved_hr #
if subtract_reserved
diff --git a/cmk/base/check_legacy_includes/dhcp_pools.py b/cmk/base/check_legacy_includes/dhcp_pools.py
index 907ba27094e..897d26cbb76 100644
--- a/cmk/base/check_legacy_includes/dhcp_pools.py
+++ b/cmk/base/check_legacy_includes/dhcp_pools.py
@@ -3,7 +3,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-from collections.abc import Mapping
+from collections.abc import Iterable, Mapping
from cmk.base.plugins.agent_based.agent_based_api.v1 import Metric, Result
@@ -16,9 +16,13 @@
# }
-def check_dhcp_pools_levels( # type: ignore[no-untyped-def]
- free, used, pending, size, params: Mapping[str, tuple[float, float]]
-):
+def check_dhcp_pools_levels(
+ free: float | None,
+ used: float | None,
+ pending: float | None,
+ size: float,
+ params: Mapping[str, tuple[float, float]],
+) -> Iterable[tuple[int, str, list]]:
for new_api_object in dhcp_pools.check_dhcp_pools_levels(free, used, pending, size, params):
if isinstance(new_api_object, Result):
yield int(new_api_object.state), new_api_object.summary, []
diff --git a/cmk/base/check_legacy_includes/didactum.py b/cmk/base/check_legacy_includes/didactum.py
index 29b0e465942..48a4bfe2247 100644
--- a/cmk/base/check_legacy_includes/didactum.py
+++ b/cmk/base/check_legacy_includes/didactum.py
@@ -5,7 +5,7 @@
from collections.abc import Mapping
-from cmk.base.check_api import DiscoveryResult, Service
+from cmk.agent_based.v2 import DiscoveryResult, Service
from .elphase import check_elphase
from .humidity import check_humidity
@@ -71,7 +71,7 @@ def parse_didactum_sensors(info):
return parsed
-def inventory_didactum_sensors(
+def discover_didactum_sensors(
parsed: Mapping[str, Mapping[str, Mapping[str, str]]], what: str
) -> DiscoveryResult:
yield from (
diff --git a/cmk/base/check_legacy_includes/diskstat.py b/cmk/base/check_legacy_includes/diskstat.py
deleted file mode 100644
index 78885910682..00000000000
--- a/cmk/base/check_legacy_includes/diskstat.py
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# pylint: disable=consider-using-in
-# pylint: disable=no-else-continue
-# pylint: disable=no-else-return
-
-from collections.abc import Mapping, MutableSequence, Sequence
-from typing import Any
-
-from cmk.base.check_api import check_levels
-from cmk.base.plugins.agent_based.agent_based_api.v1 import (
- get_average,
- get_rate,
- get_value_store,
- render,
-)
-
-
-def check_diskstat_line( # pylint: disable=too-many-branches
- this_time: float,
- item: str,
- params: Mapping[str, Any],
- line: Sequence[Any],
- mode: str = "sectors",
-) -> tuple[int, str, MutableSequence[Any],]:
- average_range = params.get("average")
- if average_range == 0:
- average_range = None # disable averaging when 0 is set
-
- value_store = get_value_store()
-
- perfdata: MutableSequence[Any] = []
- infos: MutableSequence[str] = []
- status: int = 0
- node = line[0]
- if node is not None and node != "":
- infos.append("Node %s" % node)
-
- for what, ctr in [("read", line[2]), ("write", line[3])]:
- if node:
- countername = f"diskstat.{node}.{item}.{what}"
- else:
- countername = f"diskstat.{item}.{what}"
-
- # unpack levels now, need also for perfdata
- levels = params.get(what)
- if isinstance(levels, tuple):
- warn, crit = levels
- else:
- warn, crit = None, None
-
- per_sec = get_rate(get_value_store(), countername, this_time, int(ctr), raise_overflow=True)
- if mode == "sectors":
- # compute IO rate in bytes/sec
- bytes_per_sec = per_sec * 512
- elif mode == "bytes":
- bytes_per_sec = per_sec
-
- dsname = what
-
- # compute average of the rate over ___ minutes
- if average_range is not None:
- perfdata.append((dsname, bytes_per_sec, warn, crit))
- bytes_per_sec = get_average(
- value_store, f"{countername}.avg", this_time, bytes_per_sec, average_range
- )
- dsname += ".avg"
-
- # check levels
- state, text, extraperf = check_levels(
- bytes_per_sec,
- dsname,
- levels,
- scale=1048576,
- statemarkers=True,
- human_readable_func=render.iobandwidth,
- infoname=what,
- )
- if text:
- infos.append(text)
- status = max(state, status)
- perfdata += extraperf
-
- # Add performance data for averaged IO
- if average_range is not None:
- perfdata = [perfdata[0], perfdata[2], perfdata[1], perfdata[3]]
-
- # Process IOs when available
- ios_per_sec = None
- if len(line) >= 6 and line[4] >= 0 and line[5] > 0:
- reads, writes = map(int, line[4:6])
- if "read_ios" in params:
- warn, crit = params["read_ios"]
- if reads >= crit:
- infos.append("Read operations: %d (!!)" % (reads))
- status = 2
- elif reads >= warn:
- infos.append("Read operations: %d (!)" % (reads))
- status = max(status, 1)
- else:
- warn, crit = None, None
- if "write_ios" in params:
- warn, crit = params["write_ios"]
- if writes >= crit:
- infos.append("Write operations: %d (!!)" % (writes))
- status = 2
- elif writes >= warn:
- infos.append("Write operations: %d (!)" % (writes))
- status = max(status, 1)
- else:
- warn, crit = None, None
- ios = reads + writes
- ios_per_sec = get_rate(
- get_value_store(), countername + ".ios", this_time, ios, raise_overflow=True
- )
- infos.append("IOs: %.2f/sec" % ios_per_sec)
-
- if params.get("latency_perfdata"):
- perfdata.append(("ios", ios_per_sec))
-
- # Do Latency computation if this information is available:
- if len(line) >= 7 and line[6] >= 0:
- timems = int(line[6])
- timems_per_sec = get_rate(
- get_value_store(), countername + ".time", this_time, timems, raise_overflow=True
- )
- if not ios_per_sec:
- latency = 0.0
- else:
- latency = timems_per_sec / ios_per_sec # fixed: true-division
- infos.append("Latency: %.2fms" % latency)
- if "latency" in params:
- warn, crit = params["latency"]
- if latency >= crit:
- status = 2
- infos[-1] += "(!!)"
- elif latency >= warn:
- status = max(status, 1)
- infos[-1] += "(!)"
- else:
- warn, crit = None, None
-
- if params.get("latency_perfdata"):
- perfdata.append(("latency", latency, warn, crit))
-
- # Queue Lengths (currently only Windows). Windows uses counters here.
- # I have not understood, why....
- if len(line) >= 9:
- for what, ctr in [("read", line[7]), ("write", line[8])]:
- countername = f"diskstat.{item}.ql.{what}"
- levels = params.get(what + "_ql")
- if levels:
- warn, crit = levels
- else:
- warn, crit = None, None
-
- qlx = get_rate(get_value_store(), countername, this_time, int(ctr), raise_overflow=True)
- ql = qlx / 10000000.0
- infos.append(what.title() + " Queue: %.2f" % ql)
-
- # check levels
- if levels is not None:
- if ql >= crit:
- status = 2
- infos[-1] += "(!!)"
- elif ql >= warn:
- status = max(status, 1)
- infos[-1] += "(!)"
-
- if params.get("ql_perfdata"):
- perfdata.append((what + "_ql", ql))
-
- return (status, ", ".join(infos), perfdata)
diff --git a/cmk/base/check_legacy_includes/filerdisks.py b/cmk/base/check_legacy_includes/filerdisks.py
index 08b746e989d..ec699d73f10 100644
--- a/cmk/base/check_legacy_includes/filerdisks.py
+++ b/cmk/base/check_legacy_includes/filerdisks.py
@@ -3,7 +3,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable
+from cmk.agent_based.v2 import render
# disks = [
# { "state" : "failed",
@@ -33,7 +33,7 @@ def check_filer_disks(disks, params): # pylint: disable=too-many-branches
if disk["state"] == what:
state[what].append(disk)
- yield 0, "Total raw capacity: %s" % get_bytes_human_readable(total_capacity), [
+ yield 0, "Total raw capacity: %s" % render.disksize(total_capacity), [
("total_disk_capacity", total_capacity)
]
# TODO: Is a prefailed disk unavailable?
diff --git a/cmk/base/check_legacy_includes/firewall_if.py b/cmk/base/check_legacy_includes/firewall_if.py
deleted file mode 100644
index fa07187ec44..00000000000
--- a/cmk/base/check_legacy_includes/firewall_if.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-import time
-from typing import Any
-
-from cmk.base.check_api import check_levels
-from cmk.base.plugins.agent_based.agent_based_api.v1 import get_average, get_rate, get_value_store
-
-
-def check_firewall_if(item, params, data):
- infotext_names = {
- "ip4_in_blocked": "Incoming IPv4 packets blocked: ",
- }
-
- this_time = time.time()
- value_store = get_value_store()
-
- for what, counter in data.items():
- rate = get_rate(
- get_value_store(),
- what,
- this_time,
- counter,
- raise_overflow=True,
- )
-
- if params.get("averaging"):
- backlog_minutes = params["averaging"]
- avgrate = get_average(
- value_store, f"firewall_if-{what}.{item}", this_time, rate, backlog_minutes
- )
- check_against = avgrate
- else:
- check_against = rate
-
- status, infotext, extraperf = check_levels(
- check_against,
- what,
- params.get(what),
- human_readable_func=lambda x: "%.2f pkts/s" % x,
- infoname=infotext_names[what],
- )
-
- perfdata: list[Any]
- perfdata = [(what, rate)] + extraperf[:1]
-
- yield status, infotext, perfdata
diff --git a/cmk/base/check_legacy_includes/fortigate_cpu.py b/cmk/base/check_legacy_includes/fortigate_cpu.py
deleted file mode 100644
index b08a87fa491..00000000000
--- a/cmk/base/check_legacy_includes/fortigate_cpu.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.base.check_api import DiscoveryResult, Service
-
-from cmk.agent_based.v2.type_defs import StringTable
-
-from .cpu_util import check_cpu_util
-
-
-def inventory_fortigate_cpu(string_table: StringTable) -> DiscoveryResult:
- yield Service()
-
-
-def check_fortigate_cpu(item, params, info):
- num_cpus = 0
- util = 0
- for line in info:
- util += int(line[0])
- num_cpus += 1
- if num_cpus == 0:
- return None
-
- util = float(util) / num_cpus # type: ignore[assignment]
-
- state, infotext, perfdata = next(check_cpu_util(util, params))
- infotext += " at %d CPUs" % num_cpus
-
- return state, infotext, perfdata
diff --git a/cmk/base/check_legacy_includes/fsc_sc2.py b/cmk/base/check_legacy_includes/fsc_sc2.py
deleted file mode 100644
index 2885fcf0793..00000000000
--- a/cmk/base/check_legacy_includes/fsc_sc2.py
+++ /dev/null
@@ -1,451 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from .fan import check_fan
-from .temperature import check_temperature
-
-# .--CPU-----------------------------------------------------------------.
-# | ____ ____ _ _ |
-# | / ___| _ \| | | | |
-# | | | | |_) | | | | |
-# | | |___| __/| |_| | |
-# | \____|_| \___/ |
-# | |
-# '----------------------------------------------------------------------'
-
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.3.1.1 "CPU1"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.3.1.2 "CPU2"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.4.1.1 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.4.1.2 2
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.5.1.1 "Intel(R) Xeon(R) CPU E5-2620 v2 @ 2.10GHz"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.5.1.2 ""
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.8.1.1 2100
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.8.1.2 0
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.13.1.1 6
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.13.1.2 0
-
-
-def inventory_fsc_sc2_cpu_status(info):
- for line in info:
- if line[1] != "2":
- yield line[0], None
-
-
-def check_fsc_sc2_cpu_status(item, _no_params, info):
- def get_cpu_status(status):
- return {
- "1": (3, "unknown"),
- "2": (3, "not-present"),
- "3": (0, "ok"),
- "4": (0, "disabled"),
- "5": (2, "error"),
- "6": (2, "failed"),
- "7": (1, "missing-termination"),
- "8": (1, "prefailure-warning"),
- }.get(status, (3, "unknown"))
-
- for designation, status, model, speed, cores in info:
- if designation == item:
- status_state, status_txt = get_cpu_status(status)
- return status_state, f"Status is {status_txt}, {model}, {cores} cores @ {speed} MHz"
-
-
-# .
-# .--memory--------------------------------------------------------------.
-# | |
-# | _ __ ___ ___ _ __ ___ ___ _ __ _ _ |
-# | | '_ ` _ \ / _ \ '_ ` _ \ / _ \| '__| | | | |
-# | | | | | | | __/ | | | | | (_) | | | |_| | |
-# | |_| |_| |_|\___|_| |_| |_|\___/|_| \__, | |
-# | |___/ |
-# '----------------------------------------------------------------------'
-
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.1 "DIMM-1A"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.2 "DIMM-2A"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.3 "DIMM-3A"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.4 "DIMM-1B"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.5 "DIMM-2B"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.6 "DIMM-3B"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.7 "DIMM-1C"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.8 "DIMM-2C"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.9 "DIMM-3C"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.1 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.2 2
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.3 2
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.4 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.5 2
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.6 2
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.7 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.8 2
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.9 2
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.1 4096
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.2 -1
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.3 -1
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.4 4096
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.5 -1
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.6 -1
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.7 4096
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.8 -1
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.9 -1
-
-
-def inventory_fsc_sc2_mem_status(info):
- for line in info:
- if line[1] != "2":
- yield line[0], None
-
-
-def check_fsc_sc2_mem_status(item, _no_params, info):
- def get_mem_status(status):
- return {
- "1": (3, "unknown"),
- "2": (3, "not-present"),
- "3": (0, "ok"),
- "4": (0, "disabled"),
- "5": (2, "error"),
- "6": (2, "failed"),
- "7": (1, "prefailure-predicted"),
- "11": (0, "hidden"),
- }.get(status, (3, "unknown"))
-
- for designation, status, capacity in info:
- if designation == item:
- status_state, status_txt = get_mem_status(status)
- return status_state, f"Status is {status_txt}, Size {capacity} MB"
-
-
-# .
-# .--fans----------------------------------------------------------------.
-# | __ |
-# | / _| __ _ _ __ ___ |
-# | | |_ / _` | '_ \/ __| |
-# | | _| (_| | | | \__ \ |
-# | |_| \__,_|_| |_|___/ |
-# | |
-# '----------------------------------------------------------------------'
-
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.1 "FAN1 SYS"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.2 "FAN2 SYS"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.3 "FAN3 SYS"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.4 "FAN4 SYS"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.5 "FAN5 SYS"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.6 "FAN PSU1"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.7 "FAN PSU2"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.1 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.2 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.3 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.4 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.5 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.6 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.7 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.1 5820
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.2 6000
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.3 6000
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.4 6000
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.5 6120
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.6 2400
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.7 2400
-
-FAN_FSC_SC2_CHECK_DEFAULT_PARAMETERS = {
- "lower": (1500, 2000),
-}
-
-
-def inventory_fsc_sc2_fans(info):
- for line in info:
- if line[1] not in ["8"]:
- yield line[0], {}
-
-
-def check_fsc_sc2_fans(item, params, info):
- status_map = {
- "1": (3, "Status is unknown"),
- "2": (0, "Status is disabled"),
- "3": (0, "Status is ok"),
- "4": (2, "Status is failed"),
- "5": (1, "Status is prefailure-predicted"),
- "6": (1, "Status is redundant-fan-failed"),
- "7": (3, "Status is not-manageable"),
- "8": (0, "Status is not-present"),
- }
-
- if isinstance(params, tuple):
- params = {"lower": params}
-
- for designation, status, rpm in info:
- if designation == item:
- yield status_map.get(status, (3, "Status is unknown"))
- if rpm:
- yield check_fan(int(rpm), params)
- else:
- yield 0, "Device did not deliver RPM values"
-
-
-# .
-# .--power---------------------------------------------------------------.
-# | |
-# | _ __ _____ _____ _ __ |
-# | | '_ \ / _ \ \ /\ / / _ \ '__| |
-# | | |_) | (_) \ V V / __/ | |
-# | | .__/ \___/ \_/\_/ \___|_| |
-# | |_| |
-# '----------------------------------------------------------------------'
-
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.3.1 "CPU1 Power"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.3.2 "CPU2 Power"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.4.1 "HDD Power"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.7.1 "System Power"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.10.1 "PSU1 Power"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.10.2 "PSU2 Power"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.224.1 "Total Power"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.224.2 "Total Power Out"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.3.1 5
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.3.2 0
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.4.1 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.7.1 50
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.10.1 52
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.10.2 40
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.224.1 92
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.224.2 68
-
-
-def parse_fsc_sc2_power_consumption(info):
- parsed: dict = {}
- for designation, value in info:
- # sometimes the device does not return a value
- if not value:
- parsed.setdefault(
- designation, {"device_state": (3, "Error on device while reading value")}
- )
- else:
- parsed.setdefault(designation, {"power": int(value)})
- return parsed
-
-
-# .
-# .--info----------------------------------------------------------------.
-# | _ __ |
-# | (_)_ __ / _| ___ |
-# | | | '_ \| |_ / _ \ |
-# | | | | | | _| (_) | |
-# | |_|_| |_|_| \___/ |
-# | |
-# '----------------------------------------------------------------------'
-
-# .1.3.6.1.4.1.231.2.10.2.2.10.2.3.1.5.1 "PRIMERGY RX300 S8"
-# .1.3.6.1.4.1.231.2.10.2.2.10.2.3.1.7.1 "--"
-# .1.3.6.1.4.1.231.2.10.2.2.10.4.1.1.11.1 "V4.6.5.4 R1.6.0 for D2939-B1x"
-
-
-def inventory_fsc_sc2_info(info):
- if info:
- return [(None, None)]
- return []
-
-
-def check_fsc_sc2_info(_no_item, _no_params, info):
- if info:
- return (
- 0,
- f"Model: {info[0][0]}, Serial Number: {info[0][1]}, BIOS Version: {info[0][2]}",
- )
- return None
-
-
-# .
-# .--temperature---------------------------------------------------------.
-# | _ _ |
-# | | |_ ___ _ __ ___ _ __ ___ _ __ __ _| |_ _ _ _ __ ___ |
-# | | __/ _ \ '_ ` _ \| '_ \ / _ \ '__/ _` | __| | | | '__/ _ \ |
-# | | || __/ | | | | | |_) | __/ | | (_| | |_| |_| | | | __/ |
-# | \__\___|_| |_| |_| .__/ \___|_| \__,_|\__|\__,_|_| \___| |
-# | |_| |
-# '----------------------------------------------------------------------'
-
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.1 "Ambient"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.2 "Systemboard 1"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.3 "Systemboard 2"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.4 "CPU1"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.5 "CPU2"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.6 "MEM A"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.7 "MEM B"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.8 "MEM C"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.9 "MEM D"
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.1 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.2 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.3 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.4 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.5 2
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.6 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.7 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.8 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.9 8
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.1 26
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.2 27
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.3 33
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.4 27
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.5 0
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.6 28
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.7 28
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.8 27
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.9 27
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.1 37
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.2 75
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.3 75
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.4 77
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.5 89
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.6 78
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.7 78
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.8 78
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.9 78
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.1 42
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.2 80
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.3 80
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.4 81
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.5 93
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.6 82
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.7 82
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.8 82
-# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.9 82
-
-
-def inventory_fsc_sc2_temp(info):
- for line in info:
- if line[1] != "2":
- yield line[0], {}
-
-
-def check_fsc_sc2_temp(item, params, info):
- temp_status = {
- "1": (3, "unknown"),
- "2": (0, "not-available"),
- "3": (0, "ok"),
- "4": (2, "sensor-failed"),
- "5": (2, "failed"),
- "6": (1, "temperature-warning-toohot"),
- "7": (2, "temperature-critical-toohot"),
- "8": (0, "temperature-normal"),
- "9": (1, "temperature-warning"),
- }
-
- for designation, status, temp, dev_warn, dev_crit in info:
- if designation == item:
- if not temp:
- return 3, "Did not receive temperature data"
-
- dev_status, dev_status_name = temp_status.get(status, (3, "unknown"))
-
- if not dev_warn or not dev_crit:
- return 3, "Did not receive device levels"
-
- dev_levels = int(dev_warn), int(dev_crit)
-
- return check_temperature(
- int(temp),
- params,
- "temp_{}".format(item.replace(" ", "_")),
- "c",
- dev_levels,
- None,
- dev_status,
- dev_status_name,
- )
- return None
-
-
-# .
-# .--voltage-------------------------------------------------------------.
-# | _ _ |
-# | __ _____ | | |_ __ _ __ _ ___ |
-# | \ \ / / _ \| | __/ _` |/ _` |/ _ \ |
-# | \ V / (_) | | || (_| | (_| | __/ |
-# | \_/ \___/|_|\__\__,_|\__, |\___| |
-# | |___/ |
-# '----------------------------------------------------------------------'
-
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.1 "BATT 3.0V"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.2 "STBY 12V"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.3 "STBY 5V"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.4 "STBY 3.3V"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.5 "LAN 1.8V STBY"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.6 "iRMC 1.5V STBY"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.7 "LAN 1.0V STBY"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.8 "MAIN 12V"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.9 "MAIN 5V"
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.1 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.2 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.3 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.4 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.5 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.6 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.7 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.8 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.9 3
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.1 3270
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.2 11880
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.3 5100
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.4 3350
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.5 1800
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.6 1460
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.7 980
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.8 12160
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.9 4980
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.1 2010
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.2 11280
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.3 4630
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.4 3020
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.5 1670
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.6 1390
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.7 930
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.8 11310
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.9 4630
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.1 3500
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.2 12960
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.3 5420
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.4 3570
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.5 1930
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.6 1610
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.7 1080
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.8 12900
-# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.9 5420
-
-
-def parse_fsc_sc2_voltage(info):
- # dev_state:
- # sc2VoltageStatus OBJECT-TYPE
- # SYNTAX INTEGER
- # {
- # unknown(1),
- # not-available(2),
- # ok(3),
- # too-low(4),
- # too-high(5),
- # sensor-failed(6)
- # }
- # ACCESS read-only
- # STATUS mandatory
- # DESCRIPTION "Voltage status"
- # ::= { sc2Voltages 4 }
-
- parsed: dict = {}
- for designation, dev_state, value, min_value, max_value in info:
- if dev_state == "2":
- continue
- try:
- value = float(value) / 1000.0
- min_value = float(min_value) / 1000.0
- max_value = float(max_value) / 1000.0
- except ValueError:
- state_info = 3, "Could not get all values"
- parsed.setdefault(designation, {"device_state": state_info})
- else:
- state_info = value
- if value < min_value:
- state_info = value, (2, "too low, deceeds %.2f V" % min_value) # type: ignore[assignment]
- elif value >= max_value:
- state_info = value, (2, "too high, exceeds %.2f V" % max_value) # type: ignore[assignment]
- parsed.setdefault(designation, {"voltage": state_info})
- return parsed
diff --git a/cmk/base/check_legacy_includes/graylog.py b/cmk/base/check_legacy_includes/graylog.py
index 60257800f2e..d4cfd4057c1 100644
--- a/cmk/base/check_legacy_includes/graylog.py
+++ b/cmk/base/check_legacy_includes/graylog.py
@@ -8,9 +8,10 @@
import json as json_module
import time
-from cmk.base.check_api import check_levels, get_age_human_readable
+from cmk.base.check_api import check_levels
from cmk.base.plugins.agent_based.agent_based_api.v1 import get_average, get_rate, get_value_store
+from cmk.agent_based.v2 import render
from cmk.plugins.lib import graylog
json = json_module
@@ -57,7 +58,7 @@ def handle_graylog_messages(messages, params):
avg_rate,
avg_key,
msgs_avg_levels_upper + msgs_avg_levels_lower,
- infoname="Average number of messages (%s)" % get_age_human_readable(avg * 60),
+ infoname="Average number of messages (%s)" % render.timespan(avg * 60),
)
diff_key = "msgs_diff"
@@ -73,7 +74,7 @@ def handle_graylog_messages(messages, params):
diff_levels_upper + diff_levels_lower,
human_readable_func=int,
infoname="Total number of messages since last check (within %s)"
- % get_age_human_readable(timespan),
+ % render.timespan(timespan),
)
diff --git a/cmk/base/check_legacy_includes/hitachi_hus.py b/cmk/base/check_legacy_includes/hitachi_hus.py
deleted file mode 100644
index 3322b1ac0f0..00000000000
--- a/cmk/base/check_legacy_includes/hitachi_hus.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# For Hitachi Unified Storage (HUS) devices which support the USPMIB
-# This devices has two units: Disk Controller (DKC) and Disk Unit (DKC)
-
-# Example output from DKC:
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.1 470849
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.2 1
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.3 1
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.4 1
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.5 1
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.6 5
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.7 1
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.8 1
-# .1.3.6.1.4.1.116.5.11.4.1.1.6.1.9 1
-
-# Example output from DKU:
-# .1.3.6.1.4.1.116.5.11.4.1.1.7.1.1 470849
-# .1.3.6.1.4.1.116.5.11.4.1.1.7.1.2 1
-# .1.3.6.1.4.1.116.5.11.4.1.1.7.1.3 4
-# .1.3.6.1.4.1.116.5.11.4.1.1.7.1.4 3
-# .1.3.6.1.4.1.116.5.11.4.1.1.7.1.5 1
-
-
-def inventory_hitachi_hus(info):
- for line in info:
- # set dkuRaidListIndexSerialNumber as item
- yield line[0], None
-
-
-def check_hitachi_hus(item, _no_params, info):
- # Maps for hitachi hus components
- hus_map_states = {
- "0": (3, "unknown"),
- "1": (0, "no error"),
- "2": (2, "acute"),
- "3": (2, "serious"),
- "4": (1, "moderate"),
- "5": (1, "service"),
- }
-
- ok_states = []
- warn_states = []
- crit_states = []
- unknown_states = []
-
- # We need to take care that the right map type is checked
- if len(info[0]) == 5:
- component = [
- "Power Supply",
- "Fan",
- "Environment",
- "Drive",
- ]
- else:
- component = [
- "Processor",
- "Internal Bus",
- "Cache",
- "Shared Memory",
- "Power Supply",
- "Battery",
- "Fan",
- "Environment",
- ]
-
- # Check the state of the components and add the output to the state lists
- for line in info:
- if line[0] != item:
- continue
-
- for what, device_state in zip(component, line[1:]):
- state, state_readable = hus_map_states[device_state]
- if state == 0:
- ok_states.append(f"{what}: {state_readable}")
- if state == 1:
- warn_states.append(f"{what}: {state_readable}")
- if state == 2:
- crit_states.append(f"{what}: {state_readable}")
- if state == 3:
- unknown_states.append(f"{what}: {state_readable}")
-
- for state, states, text in [
- (0, ok_states, "OK"),
- (3, unknown_states, "UNKNOWN"),
- (1, warn_states, "WARN"),
- (2, crit_states, "CRIT"),
- ]:
- if states:
- yield state, "{}: {}".format(text, ", ".join(states))
diff --git a/cmk/base/check_legacy_includes/hp_msa.py b/cmk/base/check_legacy_includes/hp_msa.py
deleted file mode 100644
index 4710fea1a0c..00000000000
--- a/cmk/base/check_legacy_includes/hp_msa.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-import cmk.plugins.lib.hp_msa as hp_msa
-
-# TODO
-# Use 'status-numeric' instead of 'status' field regardless of language.
-# See for state mapping: https://support.hpe.com/hpsc/doc/public/display?docId=emr_na-a00017709en_us
-
-hp_msa_state_map = {
- "Up": (0, "up"),
- "OK": (0, "OK"),
- "Warning": (1, "warning"),
- "Degraded": (1, "degraded"),
- "Error": (2, "error"),
- "Not Present": (2, "not present"),
- "Fault": (2, "fault"),
- "Unknown": (3, "unknown"),
-}
-
-parse_hp_msa = hp_msa.parse_hp_msa
-
-
-def inventory_hp_msa_health(parsed):
- return [(key, None) for key in parsed]
-
-
-def check_hp_msa_health(item, _no_params, parsed):
- if item in parsed:
- infotexts = []
- health_state, health_state_readable = hp_msa_state_map[parsed[item]["health"]]
- health_info = "Status: %s" % health_state_readable
- if health_state and parsed[item].get("health-reason", ""):
- health_info += " (%s)" % parsed[item]["health-reason"]
-
- infotexts.append(health_info)
-
- # extra info of volumes
- if parsed[item]["item_type"] == "volumes":
- volume_info = parsed[item].get("container-name", "")
- if volume_info:
- if parsed[item].get("raidtype", ""):
- volume_info += " (%s)" % parsed[item]["raidtype"]
- infotexts.append("container name: %s" % volume_info)
-
- # extra info of disks
- elif parsed[item]["item_type"] == "drives":
- for disk_info in ["serial-number", "vendor", "model", "description", "size"]:
- if parsed[item].get(disk_info, ""):
- infotexts.append(
- "%s: %s"
- % (
- disk_info.replace("-", " "),
- parsed[item][disk_info].replace("GB", " GB"),
- )
- )
-
- if parsed[item].get("rpm", ""):
- infotexts.append("speed: %s RPM" % (parsed[item]["rpm"]))
-
- return health_state, ", ".join(infotexts)
- return None
diff --git a/cmk/base/check_legacy_includes/hp_proliant.py b/cmk/base/check_legacy_includes/hp_proliant.py
index ed36eb479e8..2e044cd297d 100644
--- a/cmk/base/check_legacy_includes/hp_proliant.py
+++ b/cmk/base/check_legacy_includes/hp_proliant.py
@@ -3,17 +3,6 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-# pylint: disable=consider-using-in
-
-from .temperature import check_temperature
-
-hp_proliant_status_map = {
- 1: "unknown",
- 2: "ok",
- 3: "degraded",
- 4: "failed",
- 5: "disabled",
-}
hp_proliant_status2nagios_map = {
"unknown": 3,
@@ -24,27 +13,6 @@
"disabled": 1,
}
-hp_proliant_locale = {
- 1: "other",
- 2: "unknown",
- 3: "system",
- 4: "systemBoard",
- 5: "ioBoard",
- 6: "cpu",
- 7: "memory",
- 8: "storage",
- 9: "removableMedia",
- 10: "powerSupply",
- 11: "ambient",
- 12: "chassis",
- 13: "bridgeCard",
- 14: "managementBoard",
- 15: "backplane",
- 16: "networkSlot",
- 17: "bladeSlot",
- 18: "virtual",
-}
-
def sanitize_item(item: str) -> str:
r"""Sanitize null byte in item
@@ -55,221 +23,3 @@ def sanitize_item(item: str) -> str:
As of Checkmk 2.3, this should in fact no longer be necessary.
"""
return item.replace("\x00", r"\x00")
-
-
-# .--da cntlr------------------------------------------------------------.
-# | _ _ _ |
-# | __| | __ _ ___ _ __ | |_| |_ __ |
-# | / _` |/ _` | / __| '_ \| __| | '__| |
-# | | (_| | (_| | | (__| | | | |_| | | |
-# | \__,_|\__,_| \___|_| |_|\__|_|_| |
-# | |
-# '----------------------------------------------------------------------'
-
-hp_proliant_da_cntlr_cond_map = {
- "1": (3, "other"),
- "2": (0, "ok"),
- "3": (1, "degraded"),
- "4": (2, "failed"),
-}
-
-hp_proliant_da_cntlr_role_map = {
- "1": "other",
- "2": "notDuplexed",
- "3": "active",
- "4": "backup",
-}
-
-hp_proliant_da_cntlr_state_map = {
- "1": (3, "other"),
- "2": (0, "ok"),
- "3": (2, "generalFailure"),
- "4": (2, "cableProblem"),
- "5": (2, "poweredOff"),
-}
-
-
-def inventory_hp_proliant_da_cntlr(info):
- if info:
- return [(line[0], None) for line in info]
- return []
-
-
-def check_hp_proliant_da_cntlr(item, params, info):
- for line in info:
- index, model, slot, cond, role, b_status, b_cond, serial = line
- if index == item:
- sum_state = 0
- output = []
-
- for val, label, map_ in [
- (cond, "Condition", hp_proliant_da_cntlr_cond_map),
- (b_cond, "Board-Condition", hp_proliant_da_cntlr_cond_map),
- (b_status, "Board-Status", hp_proliant_da_cntlr_state_map),
- ]:
- this_state = map_[val][0]
- state_txt = ""
- if this_state == 1:
- state_txt = " (!)"
- elif this_state == 2:
- state_txt = " (!!)"
- sum_state = max(sum_state, this_state)
- output.append(f"{label}: {map_[val][1]}{state_txt}")
-
- output.append(
- "(Role: {}, Model: {}, Slot: {}, Serial: {})".format(
- hp_proliant_da_cntlr_role_map.get(role, "unknown"), model, slot, serial
- )
- )
-
- return (sum_state, ", ".join(output))
- return (3, "Controller not found in snmp data")
-
-
-# .
-# .--cpu-----------------------------------------------------------------.
-# | |
-# | ___ _ __ _ _ |
-# | / __| '_ \| | | | |
-# | | (__| |_) | |_| | |
-# | \___| .__/ \__,_| |
-# | |_| |
-# '----------------------------------------------------------------------'
-
-hp_proliant_cpu_status_map = {1: "unknown", 2: "ok", 3: "degraded", 4: "failed", 5: "disabled"}
-hp_proliant_cpu_status2nagios_map = {
- "unknown": 3,
- "ok": 0,
- "degraded": 2,
- "failed": 2,
- "disabled": 1,
-}
-
-
-def inventory_hp_proliant_cpu(info):
- yield from ((sanitize_item(line[0]), {}) for line in info)
-
-
-def check_hp_proliant_cpu(item, params, info):
- for line in info:
- if sanitize_item(line[0]) == item:
- index, slot, name, status = line
- snmp_status = hp_proliant_cpu_status_map[int(status)]
- status = hp_proliant_cpu_status2nagios_map[snmp_status]
-
- return (
- status,
- f'CPU{index} "{name}" in slot {slot} is in state "{snmp_status}"',
- )
- return (3, "item not found in snmp data")
-
-
-# .
-# .--fans----------------------------------------------------------------.
-# | __ |
-# | / _| __ _ _ __ ___ |
-# | | |_ / _` | '_ \/ __| |
-# | | _| (_| | | | \__ \ |
-# | |_| \__,_|_| |_|___/ |
-# | |
-# '----------------------------------------------------------------------'
-
-hp_proliant_fans_status_map = {1: "other", 2: "ok", 3: "degraded", 4: "failed"}
-hp_proliant_speed_map = {1: "other", 2: "normal", 3: "high"}
-hp_proliant_fans_locale = {
- 1: "other",
- 2: "unknown",
- 3: "system",
- 4: "systemBoard",
- 5: "ioBoard",
- 6: "cpu",
- 7: "memory",
- 8: "storage",
- 9: "removableMedia",
- 10: "powerSupply",
- 11: "ambient",
- 12: "chassis",
- 13: "bridgeCard",
-}
-
-
-def inventory_hp_proliant_fans(info):
- for line in [l for l in info if l[2] == "3"]:
- label = hp_proliant_fans_locale.get(int(line[1]), "other")
- yield sanitize_item(f"{line[0]} ({label})"), {}
-
-
-def check_hp_proliant_fans(item, params, info):
- for line in info:
- label = "other"
- if len(line) > 1 and int(line[1]) in hp_proliant_fans_locale:
- label = hp_proliant_fans_locale[int(line[1])]
-
- if sanitize_item(f"{line[0]} ({label})") == item:
- index, _name, _present, speed, status, currentSpeed = line
- snmp_status = hp_proliant_fans_status_map[int(status)]
- status = hp_proliant_status2nagios_map[snmp_status]
-
- detailOutput = ""
- perfdata = []
- if currentSpeed != "":
- detailOutput = ", RPM: %s" % currentSpeed
- perfdata = [("temp", int(currentSpeed))]
-
- return (
- status,
- f'FAN Sensor {index} "{label}", Speed is {hp_proliant_speed_map[int(speed)]}, State is {snmp_status}{detailOutput}',
- perfdata,
- )
- return (3, "item not found in snmp data")
-
-
-# .
-# .--temperature---------------------------------------------------------.
-# | _ _ |
-# | | |_ ___ _ __ ___ _ __ ___ _ __ __ _| |_ _ _ _ __ ___ |
-# | | __/ _ \ '_ ` _ \| '_ \ / _ \ '__/ _` | __| | | | '__/ _ \ |
-# | | || __/ | | | | | |_) | __/ | | (_| | |_| |_| | | | __/ |
-# | \__\___|_| |_| |_| .__/ \___|_| \__,_|\__|\__,_|_| \___| |
-# | |_| |
-# '----------------------------------------------------------------------'
-
-
-def format_hp_proliant_name(line):
- return f"{line[0]} ({hp_proliant_locale[int(line[1])]})"
-
-
-def inventory_hp_proliant_temp(info):
- for line in info:
- if line[-1] != "1":
- # other(1): Temperature could not be determined
- yield format_hp_proliant_name(line), {}
-
-
-def check_hp_proliant_temp(item, params, info):
- for line in info:
- if format_hp_proliant_name(line) == item:
- value, threshold, status = line[2:]
-
- # This case means no threshold available and
- # the devices' web interface displays "N/A"
- if threshold == "-99" or threshold == "0":
- devlevels = None
- else:
- threshold = float(threshold)
- devlevels = (threshold, threshold)
-
- snmp_status = hp_proliant_status_map[int(status)]
-
- return check_temperature(
- float(value),
- params,
- "hp_proliant_temp_%s" % item,
- dev_levels=devlevels,
- dev_status=hp_proliant_status2nagios_map[snmp_status],
- dev_status_name="Unit: %s" % snmp_status,
- )
- return 3, "item not found in snmp data"
-
-
-# .
diff --git a/cmk/base/check_legacy_includes/hwg.py b/cmk/base/check_legacy_includes/hwg.py
index fd082fff17f..58295ad9a81 100644
--- a/cmk/base/check_legacy_includes/hwg.py
+++ b/cmk/base/check_legacy_includes/hwg.py
@@ -3,8 +3,6 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-from .humidity import check_humidity
-from .temperature import check_temperature
map_units = {"1": "c", "2": "f", "3": "k", "4": "%"}
@@ -17,18 +15,6 @@
"5": "alarm high",
}
-map_readable_states = {
- "invalid": 3,
- "normal": 0,
- "out of range low": 2,
- "out of range high": 2,
- "alarm low": 2,
- "alarm high": 2,
-}
-
-HWG_HUMIDITY_DEFAULTLEVELS = {"levels": (60.0, 70.0)}
-HWG_TEMP_DEFAULTLEVELS = {"levels": (30.0, 35.0)}
-
def parse_hwg(info):
parsed: dict[str, dict] = {}
@@ -65,47 +51,3 @@ def parse_hwg(info):
)
return parsed
-
-
-def inventory_hwg_humidity(parsed):
- for index, attrs in parsed.items():
- if attrs.get("humidity"):
- yield index, {}
-
-
-def check_hwg_humidity(item, params, parsed):
- if not (data := parsed.get(item)):
- return
-
- status, infotext, perfdata = check_humidity(data["humidity"], params)
- infotext += " (Description: {}, Status: {})".format(data["descr"], data["dev_status_name"])
- yield status, infotext, perfdata
-
-
-def inventory_hwg_temp(parsed):
- for index, attrs in parsed.items():
- if attrs.get("temperature") and attrs["dev_status_name"] not in ["invalid", ""]:
- yield index, {}
-
-
-def check_hwg_temp(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- state = map_readable_states.get(data["dev_status_name"], 3)
- state_readable = data["dev_status_name"]
- temp = data["temperature"]
- if temp is None:
- yield state, "Status: %s" % state_readable
- return
-
- state, infotext, perfdata = check_temperature(
- temp,
- params,
- "hwg_temp_%s" % item,
- dev_unit=data["dev_unit"],
- dev_status=state,
- dev_status_name=state_readable,
- )
-
- infotext += " (Description: {}, Status: {})".format(data["descr"], data["dev_status_name"])
- yield state, "%s" % infotext, perfdata
diff --git a/cmk/base/check_legacy_includes/infoblox.py b/cmk/base/check_legacy_includes/infoblox.py
index 40c4dea06cb..d9b217947bc 100644
--- a/cmk/base/check_legacy_includes/infoblox.py
+++ b/cmk/base/check_legacy_includes/infoblox.py
@@ -3,18 +3,12 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-from typing import Any
-
-
-def inventory_infoblox_statistics(info):
- return [(None, None)]
-
def check_infoblox_statistics(ty, stats):
- texts: dict[Any, Any] = {}
+ texts: dict[str, list[str]] = {}
perfdata = []
for what, what_val, what_textfield, what_info in stats:
- texts.setdefault(what_textfield, [])
+ texts.setdefault(str(what_textfield), [])
texts[what_textfield].append("%d %s" % (what_val, what_info))
perfdata.append((f"{ty}_{what}", what_val))
diff --git a/cmk/base/check_legacy_includes/juniper_mem.py b/cmk/base/check_legacy_includes/juniper_mem.py
deleted file mode 100644
index 42e4294c420..00000000000
--- a/cmk/base/check_legacy_includes/juniper_mem.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from collections.abc import Iterator, Mapping
-from dataclasses import dataclass
-from typing import Any
-
-from cmk.base.check_legacy_includes.mem import check_memory_element
-
-
-@dataclass(frozen=True)
-class Section:
- used: int
- total: int
-
-
-def discover_juniper_mem_generic(section: Section) -> Iterator[tuple[None, dict]]:
- yield None, {}
-
-
-def check_juniper_mem_generic(
- _no_item: None,
- params: Mapping[str, Any],
- section: Section,
-) -> tuple[int, str, list]:
- return check_memory_element(
- label="Used",
- used=section.used,
- total=section.total,
- levels=params["levels"],
- metric_name="mem_used",
- )
diff --git a/cmk/base/check_legacy_includes/license.py b/cmk/base/check_legacy_includes/license.py
index 55f5411a908..4de4c180f34 100644
--- a/cmk/base/check_legacy_includes/license.py
+++ b/cmk/base/check_legacy_includes/license.py
@@ -6,8 +6,8 @@
def license_check_levels(total, in_use, params):
if params is False:
- warn = None
- crit = None
+ warn: float | None = None
+ crit: float | None = None
elif not params:
warn = total
crit = total
@@ -24,14 +24,13 @@ def license_check_levels(total, in_use, params):
else:
infotext = "used %d licenses, but you have only %d" % (in_use, total)
- if crit is not None and in_use >= crit:
- status = 2
- elif warn is not None and in_use >= warn:
- status = 1
- else:
- status = 0
-
- if status:
- infotext += " (warn/crit at %d/%d)" % (warn, crit) # type: ignore[str-format]
+ status = 0
+ if warn is not None and crit is not None:
+ if in_use >= crit:
+ status = 2
+ elif in_use >= warn:
+ status = 1
+ if status:
+ infotext += f" (warn/crit at {int(warn)}/{int(crit)})"
return status, infotext, perfdata
diff --git a/cmk/base/check_legacy_includes/liebert.py b/cmk/base/check_legacy_includes/liebert.py
deleted file mode 100644
index 384d3e60a25..00000000000
--- a/cmk/base/check_legacy_includes/liebert.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.plugins.lib.liebert import temperature_to_celsius
-
-
-def check_temp_unit(output):
- return temperature_to_celsius(float(output[0]), output[1])
diff --git a/cmk/base/check_legacy_includes/mcafee_gateway.py b/cmk/base/check_legacy_includes/mcafee_gateway.py
deleted file mode 100644
index 845dda0d8a3..00000000000
--- a/cmk/base/check_legacy_includes/mcafee_gateway.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-def inventory_mcafee_gateway_generic(info):
- return [(None, {})]
diff --git a/cmk/base/check_legacy_includes/mem.py b/cmk/base/check_legacy_includes/mem.py
index d91bc5d5c8f..72a73dc8818 100644
--- a/cmk/base/check_legacy_includes/mem.py
+++ b/cmk/base/check_legacy_includes/mem.py
@@ -5,9 +5,7 @@
import collections
-from cmk.base.check_api import get_bytes_human_readable
-from cmk.base.plugins.agent_based.agent_based_api.v1 import render
-
+from cmk.agent_based.v2 import render
from cmk.plugins.lib.memory import compute_state
from cmk.plugins.lib.memory import normalize_levels as normalize_mem_levels
@@ -47,8 +45,8 @@ def check_memory_element(
label,
render.percent(100.0 * show_value / total),
show_text,
- get_bytes_human_readable(show_value, base=1024),
- get_bytes_human_readable(total, base=1024),
+ render.bytes(show_value),
+ render.bytes(total),
(" %s" % label_total).rstrip(),
)
diff --git a/cmk/base/check_legacy_includes/nullmailer_mailq.py b/cmk/base/check_legacy_includes/nullmailer_mailq.py
index 32a82b746f1..d1205ec3c73 100644
--- a/cmk/base/check_legacy_includes/nullmailer_mailq.py
+++ b/cmk/base/check_legacy_includes/nullmailer_mailq.py
@@ -6,7 +6,9 @@
import typing
from collections.abc import Iterable
-from cmk.base.check_api import check_levels, get_bytes_human_readable
+from cmk.base.check_api import check_levels
+
+from cmk.agent_based.v2 import render
NULLMAILER_MAILQ_DEFAULT_LEVELS = {
"deferred": (10, 20),
@@ -43,7 +45,7 @@ def check_single_queue(queue: Queue, levels_length: tuple[int, int]) -> Iterable
queue.size,
"size" if make_metric else None,
None,
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Size",
)
diff --git a/cmk/base/check_legacy_includes/pandacom_temp.py b/cmk/base/check_legacy_includes/pandacom_temp.py
deleted file mode 100644
index 1065b1f371b..00000000000
--- a/cmk/base/check_legacy_includes/pandacom_temp.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from .temperature import check_temperature
-
-# suggested by customer
-PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS = {"levels": (35.0, 40.0)}
-
-
-def inventory_pandacom_module_temp(info):
- return [(line[0], {}) for line in info]
-
-
-def check_pandacom_module_temp(item, params, info):
- for slot, temp_str, warn_str, crit_str in info:
- if slot == item:
- return check_temperature(
- int(temp_str),
- params,
- "pandacom_%s" % item,
- dev_levels=(int(warn_str), int(crit_str)),
- )
- return None
diff --git a/cmk/base/check_legacy_includes/perle.py b/cmk/base/check_legacy_includes/perle.py
index c436ffba638..0c59e315112 100644
--- a/cmk/base/check_legacy_includes/perle.py
+++ b/cmk/base/check_legacy_includes/perle.py
@@ -12,109 +12,3 @@ def perle_check_alarms(alarms_str):
alarminfo += " (User intervention is needed to resolve the outstanding alarms)"
return state, f"Alarms: {alarms_str}{alarminfo}"
-
-
-# .--modules-------------------------------------------------------------.
-# | _ _ |
-# | _ __ ___ ___ __| |_ _| | ___ ___ |
-# | | '_ ` _ \ / _ \ / _` | | | | |/ _ \/ __| |
-# | | | | | | | (_) | (_| | |_| | | __/\__ \ |
-# | |_| |_| |_|\___/ \__,_|\__,_|_|\___||___/ |
-# | |
-# '----------------------------------------------------------------------'
-
-
-def inventory_perle_cm_modules(info):
- inventory = []
- for (
- _name,
- _led,
- index,
- _fiber_lprf,
- _fiber_link,
- _fiber_conn,
- _fiber_speed,
- _cooper_lprf,
- _copper_link,
- _copper_conn,
- _copper_speed,
- ) in info:
- inventory.append((index, None))
- return inventory
-
-
-def check_perle_cm_modules(item, _no_params, info):
- mappings = {
- "speed": {
- "0": "10 Mbs",
- "1": "100 Mbps",
- "2": "1000 Mbps",
- },
- "power_led": {
- "0": (2, "no power"),
- "1": (0, "power to the module"),
- "2": (0, "loopback enabled"),
- },
- "fiber_lprf": {
- "0": (0, "ok"),
- "1": (2, "offline"),
- "2": (2, "link fault"),
- "3": (2, "auto neg error"),
- # available for cm1110 modules
- "99": (2, "not applicable"),
- },
- "fiber_link": {
- "0": (1, "down"),
- "1": (0, "up"),
- },
- "fiber_connector": {
- "0": "sc",
- "1": "lc",
- "2": "st",
- "3": "sfp",
- "5": "fc",
- "6": "mtrj",
- },
- "copper_lprf": {
- "0": (0, "ok"),
- "1": (2, "remote fault"),
- },
- "copper_link": {
- "0": (1, "down"),
- "1": (0, "ok"),
- },
- "copper_connector": {
- "0": "rj45",
- },
- }
-
- for (
- _name,
- power_led,
- index,
- fiber_lprf,
- fiber_link,
- fiber_connector,
- fiber_speed,
- cooper_lprf,
- copper_link,
- copper_connector,
- copper_speed,
- ) in info:
- if item == index:
- state, state_readable = mappings["power_led"][power_led] # type: ignore[index]
- yield state, "Power status: %s" % state_readable
-
- for what, lprf, link, speed, connector in [
- ("Fiber", fiber_lprf, fiber_link, fiber_speed, fiber_connector),
- ("Copper", cooper_lprf, copper_link, copper_speed, copper_connector),
- ]:
- yield 0, "{} Speed: {}".format(what, mappings["speed"][speed]) # type: ignore[index]
-
- for what_state, what_key in [(lprf, "LPRF"), (link, "Link")]:
- state, state_readable = mappings[f"{what.lower()}_{what_key.lower()}"][ # type: ignore[index]
- what_state
- ]
- yield state, f"{what_key}: {state_readable}"
-
- yield 0, "Connector: %s" % mappings["%s_connector" % what.lower()][connector] # type: ignore[index]
diff --git a/cmk/base/check_legacy_includes/quanta.py b/cmk/base/check_legacy_includes/quanta.py
index 99d2f709ba3..cea5cae8086 100644
--- a/cmk/base/check_legacy_includes/quanta.py
+++ b/cmk/base/check_legacy_includes/quanta.py
@@ -54,7 +54,7 @@ def _validate_levels(
def parse_quanta(info: Sequence[Sequence[Sequence[str]]]) -> MutableMapping[str, Item]:
- parsed: MutableMapping[str, Item] = {}
+ parsed: dict[str, Item] = {}
for (
dev_index,
dev_status,
diff --git a/cmk/base/check_legacy_includes/size_trend.py b/cmk/base/check_legacy_includes/size_trend.py
index aaa44e9c2a3..c4e115adbbd 100644
--- a/cmk/base/check_legacy_includes/size_trend.py
+++ b/cmk/base/check_legacy_includes/size_trend.py
@@ -8,7 +8,6 @@
import time
from collections.abc import Callable
-from cmk.base.check_api import get_bytes_human_readable
from cmk.base.plugins.agent_based.agent_based_api.v1 import (
get_average,
get_rate,
@@ -141,7 +140,7 @@ def size_trend( # type: ignore[no-untyped-def] # pylint: disable=too-many-branc
sign = "+" if trend > 0 else ""
infotext += ", trend: {}{} / {:g} hours".format(
sign,
- get_bytes_human_readable(trend * MB),
+ render.disksize(trend * MB),
range_hours,
)
@@ -158,8 +157,8 @@ def size_trend( # type: ignore[no-untyped-def] # pylint: disable=too-many-branc
problems.append(
"growing too fast (warn/crit at %s/%s per %.1f h)(!"
% (
- get_bytes_human_readable(wa),
- get_bytes_human_readable(cr),
+ render.disksize(wa),
+ render.disksize(cr),
range_hours,
)
)
@@ -173,7 +172,7 @@ def size_trend( # type: ignore[no-untyped-def] # pylint: disable=too-many-branc
trend * MB,
levels.get("trend_shrinking_bytes"),
range_hours,
- get_bytes_human_readable,
+ render.disksize,
)
if tmp_state > 0:
state = max(state, tmp_state)
diff --git a/cmk/base/check_legacy_includes/temperature.py b/cmk/base/check_legacy_includes/temperature.py
index b71f7e86cbd..c6e9806fcbd 100644
--- a/cmk/base/check_legacy_includes/temperature.py
+++ b/cmk/base/check_legacy_includes/temperature.py
@@ -6,21 +6,18 @@
# pylint: disable=unused-import
import time
-from typing import AnyStr
+from collections.abc import Generator, Mapping, Sequence
+from typing import AnyStr, NotRequired, TypedDict
from cmk.base.check_api import check_levels, state_markers
-from cmk.base.plugins.agent_based.agent_based_api.v1 import (
- get_average,
- get_rate,
- get_value_store,
- IgnoreResultsError,
-)
+from cmk.agent_based.v2 import get_average, get_rate, get_value_store, IgnoreResultsError, State
from cmk.plugins.lib.temperature import _migrate_params
from cmk.plugins.lib.temperature import fahrenheit_to_celsius as fahrenheit_to_celsius
from cmk.plugins.lib.temperature import render_temp as render_temp
from cmk.plugins.lib.temperature import StatusType as StatusType
from cmk.plugins.lib.temperature import temp_unitsym as temp_unitsym
+from cmk.plugins.lib.temperature import TempParamDict
from cmk.plugins.lib.temperature import TempParamType as TempParamType
from cmk.plugins.lib.temperature import to_celsius as to_celsius
from cmk.plugins.lib.temperature import TwoLevelsType as TwoLevelsType
@@ -34,14 +31,22 @@
# Generic Check Type. Can be used elsewhere too.
CheckType = tuple[StatusType, AnyStr, PerfDataType]
+
+class CheckTempKwargs(TypedDict):
+ dev_unit: NotRequired[str]
+ dev_levels: NotRequired[TwoLevelsType | None]
+ dev_levels_lower: NotRequired[TwoLevelsType | None]
+ dev_status: NotRequired[StatusType | None]
+ dev_status_name: NotRequired[str]
+
+
#################################################################################################
#
# NOTE
# !! PLEASE READ !!
#
-# check_temperature_list has NOT been migrated to the new check API yet.
-#
-# check_temperature_trend and check_temperature have been migrated to the new check API.
+# check_temperature_trend, check_temperature and check_temperature_list have been
+# migrated to the new check API.
# The functions below must be decomissioned (i.e. deleted) once all checks using
# the check_temperature function have been migrated.
#
@@ -460,59 +465,30 @@ def check_temperature( # pylint: disable=too-many-branches
# and kwargs a dict of keyword arguments for check_temperature
-def check_temperature_list(sensorlist, params, unique_name):
- params = _migrate_params(params)
-
+def check_temperature_list(
+ sensorlist: Sequence[tuple[str, Number, CheckTempKwargs]],
+ params: TempParamDict,
+) -> Generator[tuple[int, str, list[tuple[str, Number]]], None, None]:
output_unit = params.get("output_unit", "c")
- def worststate(a, b):
- if a != 3 and b != 3:
- return max(a, b)
- if a != 2 and b != 2:
- return 3
- return 2
-
- if sensorlist == []:
- return None
+ if not sensorlist:
+ return
sensor_count = len(sensorlist)
- tempsum = 0
- tempmax = sensorlist[0][1]
- tempmin = sensorlist[0][1]
- status = 0
- detailtext = ""
- for entry in sensorlist:
- if len(entry) == 2:
- sub_item, temp = entry
- kwargs = {}
- else:
- sub_item, temp, kwargs = entry
- if not isinstance(temp, (float, int)):
- temp = float(temp)
-
- tempsum += temp
- tempmax = max(tempmax, temp)
- tempmin = min(tempmin, temp)
- sub_status, sub_infotext, _sub_perfdata = check_temperature(temp, params, None, **kwargs)
- status = worststate(status, sub_status)
- if status != 0:
- detailtext += sub_item + ": " + sub_infotext + state_markers[sub_status] + ", "
- if detailtext:
- detailtext = " " + detailtext[:-2] # Drop trailing ", ", add space to join with summary
+ yield 0, f"Sensors: {sensor_count}", []
unitsym = temp_unitsym[output_unit]
- tempavg = tempsum / float(sensor_count)
- summarytext = "%d Sensors; Highest: %s %s, Average: %s %s, Lowest: %s %s" % (
- sensor_count,
- render_temp(tempmax, output_unit),
- unitsym,
- render_temp(tempavg, output_unit),
- unitsym,
- render_temp(tempmin, output_unit),
- unitsym,
- )
- infotext = summarytext + detailtext
- perfdata = [("temp", tempmax)]
+ tempmax = max(temp for _item, temp, _kwargs in sensorlist)
+ yield 0, f"Highest: {render_temp(tempmax, output_unit)} {unitsym}", [("temp", tempmax)]
+ tempavg = sum(temp for _item, temp, _kwargs in sensorlist) / float(sensor_count)
+ yield 0, f"Average: {render_temp(tempavg, output_unit)} {unitsym}", []
+ tempmin = min(temp for _item, temp, _kwargs in sensorlist)
+ yield 0, f"Lowest: {render_temp(tempmin, output_unit)} {unitsym}", []
+
+ for sub_item, temp, kwargs in sensorlist:
+ sub_status, sub_infotext, _sub_perfdata = check_temperature(temp, params, None, **kwargs)
+ if sub_status != 0:
+ yield sub_status, f"{sub_item}: {sub_infotext}", []
if "trend_compute" in params and "period" in params["trend_compute"]:
usr_warn, usr_crit = params.get("levels") or (None, None)
@@ -525,10 +501,7 @@ def worststate(a, b):
)
trend_status, trend_infotext = check_temperature_trend(
- tempavg, params["trend_compute"], output_unit, crit, crit_lower, unique_name
+ tempavg, params["trend_compute"], output_unit, crit, crit_lower, ""
)
- status = max(status, trend_status)
if trend_infotext:
- infotext += ", " + trend_infotext
-
- return status, infotext, perfdata
+ yield trend_status, trend_infotext, []
diff --git a/cmk/base/check_legacy_includes/wmi.py b/cmk/base/check_legacy_includes/wmi.py
index bc8ac714d47..43c710ee3bc 100644
--- a/cmk/base/check_legacy_includes/wmi.py
+++ b/cmk/base/check_legacy_includes/wmi.py
@@ -6,15 +6,9 @@
from collections.abc import Callable, Iterable, Mapping
from math import ceil
-from cmk.base.check_api import check_levels, CheckResult, get_age_human_readable
-from cmk.base.plugins.agent_based.agent_based_api.v1 import (
- get_rate,
- get_value_store,
- IgnoreResultsError,
- render,
-)
-from cmk.base.plugins.agent_based.agent_based_api.v1.type_defs import StringTable
+from cmk.base.check_api import check_levels, CheckResult
+from cmk.agent_based.v2 import get_rate, get_value_store, IgnoreResultsError, render, StringTable
from cmk.plugins.lib.wmi import get_wmi_time
from cmk.plugins.lib.wmi import parse_wmi_table as parse_wmi_table_migrated
from cmk.plugins.lib.wmi import required_tables_missing, WMISection, WMITable
@@ -166,7 +160,7 @@ def inventory_wmi_table_total( # type: ignore[no-untyped-def]
# to make wato rules simpler, levels are allowed to be passed as tuples if the level
# specifies the upper limit
-def get_levels_quadruple(params):
+def get_levels_quadruple(params: tuple | dict[str, tuple] | None) -> tuple | None:
if params is None:
return (None, None, None, None)
if isinstance(params, tuple):
@@ -310,13 +304,13 @@ def wmi_calculate_raw_average_time(
return measure_per_sec / base_per_sec # fixed: true-division
-def wmi_yield_raw_average( # type: ignore[no-untyped-def]
+def wmi_yield_raw_average(
table: WMITable,
row: str | int,
column: str,
infoname: str | None,
perfvar: str | None,
- levels=None,
+ levels: tuple | dict[str, tuple] | None = None,
perfscale: float = 1.0,
) -> CheckResult:
try:
@@ -329,17 +323,17 @@ def wmi_yield_raw_average( # type: ignore[no-untyped-def]
perfvar,
get_levels_quadruple(levels),
infoname=infoname,
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.time_offset,
)
-def wmi_yield_raw_average_timer( # type: ignore[no-untyped-def]
+def wmi_yield_raw_average_timer(
table: WMITable,
row: str | int,
column: str,
infoname: str | None,
perfvar: str | None,
- levels=None,
+ levels: tuple | dict[str, tuple] | None = None,
) -> CheckResult:
assert table.frequency
try:
@@ -362,13 +356,13 @@ def wmi_yield_raw_average_timer( # type: ignore[no-untyped-def]
)
-def wmi_yield_raw_fraction( # type: ignore[no-untyped-def]
+def wmi_yield_raw_fraction(
table: WMITable,
row: str | int,
column: str,
infoname: str | None,
perfvar: str | None,
- levels=None,
+ levels: tuple | dict[str, tuple] | None = None,
) -> CheckResult:
try:
average = wmi_calculate_raw_average(table, row, column, 100)
@@ -383,6 +377,3 @@ def wmi_yield_raw_fraction( # type: ignore[no-untyped-def]
human_readable_func=render.percent,
boundaries=(0, 100),
)
-
-
-# .
diff --git a/cmk/base/checkers.py b/cmk/base/checkers.py
index 0b35716fd85..cb86055f910 100644
--- a/cmk/base/checkers.py
+++ b/cmk/base/checkers.py
@@ -10,23 +10,27 @@
import functools
import itertools
import logging
+import time
from collections.abc import Callable, Container, Iterable, Iterator, Mapping, Sequence
from functools import partial
-from typing import Final, TypeVar
+from pathlib import Path
+from typing import Final, Literal
import livestatus
import cmk.utils.debug
+import cmk.utils.paths
import cmk.utils.resulttype as result
import cmk.utils.tty as tty
+from cmk.utils import password_store
from cmk.utils.agentdatatype import AgentRawData
from cmk.utils.check_utils import unwrap_parameters, wrap_parameters
from cmk.utils.cpu_tracking import CPUTracker, Snapshot
from cmk.utils.exceptions import MKTimeout, OnError
from cmk.utils.hostaddress import HostAddress, HostName
from cmk.utils.log import console
-from cmk.utils.piggyback import PiggybackTimeSettings
-from cmk.utils.prediction import PredictionParameters, PredictionUpdater
+from cmk.utils.misc import pnp_cleanup
+from cmk.utils.prediction import make_updated_predictions, PredictionStore
from cmk.utils.rulesets.ruleset_matcher import RulesetMatcher
from cmk.utils.sectionname import SectionMap, SectionName
from cmk.utils.servicename import ServiceName
@@ -34,7 +38,8 @@
from cmk.snmplib import SNMPBackendEnum, SNMPRawData
-from cmk.fetchers import Fetcher, get_raw_data, Mode
+from cmk.fetchers import Fetcher, get_raw_data, Mode, SNMPScanConfig, TLSConfig
+from cmk.fetchers.config import make_persisted_section_dir
from cmk.fetchers.filecache import FileCache, FileCacheOptions, MaxAge
from cmk.checkengine.checking import (
@@ -48,9 +53,10 @@
MetricTuple,
ServiceCheckResult,
state_markers,
+ SubmittableServiceCheckResult,
+ UnsubmittableServiceCheckResult,
)
from cmk.checkengine.discovery import AutocheckEntry, DiscoveryPlugin, HostLabelPlugin
-from cmk.checkengine.exitspec import ExitSpec
from cmk.checkengine.fetcher import HostKey, SourceInfo, SourceType
from cmk.checkengine.inventory import InventoryPlugin, InventoryPluginName
from cmk.checkengine.legacy import LegacyCheckParameters
@@ -63,19 +69,38 @@
get_section_kwargs,
)
from cmk.checkengine.submitters import ServiceState
-from cmk.checkengine.summarize import summarize
+from cmk.checkengine.summarize import summarize, SummaryConfig
import cmk.base.api.agent_based.register as agent_based_register
import cmk.base.api.agent_based.register._config as _api
-import cmk.base.config as config
from cmk.base import plugin_contexts
from cmk.base.api.agent_based import cluster_mode, value_store
from cmk.base.api.agent_based.plugin_classes import CheckPlugin as CheckPluginAPI
from cmk.base.api.agent_based.value_store import ValueStoreManager
-from cmk.base.config import ConfigCache
+from cmk.base.config import (
+ ConfigCache,
+ ConfiguredIPLookup,
+ get_plugin_parameters,
+ handle_ip_lookup_failure,
+ lookup_ip_address,
+ lookup_mgmt_board_ip_address,
+)
from cmk.base.errorhandling import create_check_crash_dump
-from cmk.base.sources import make_parser, make_sources, Source
+from cmk.base.ip_lookup import IPStackConfig
+from cmk.base.sources import (
+ FetcherFactory,
+ make_parser,
+ make_sources,
+ ParserFactory,
+ SNMPFetcherConfig,
+ Source,
+)
+from cmk.agent_based.prediction_backend import (
+ InjectedParameters,
+ lookup_predictive_levels,
+ PredictionParameters,
+)
from cmk.agent_based.v1 import IgnoreResults, IgnoreResultsError, Metric
from cmk.agent_based.v1 import Result as CheckFunctionResult
from cmk.agent_based.v1 import State
@@ -95,8 +120,14 @@
def _fetch_all(
sources: Iterable[Source], *, simulation: bool, file_cache_options: FileCacheOptions, mode: Mode
-) -> Sequence[tuple[SourceInfo, result.Result[AgentRawData | SNMPRawData, Exception], Snapshot,]]:
- console.verbose("%s+%s %s\n", tty.yellow, tty.normal, "Fetching data".upper())
+) -> Sequence[
+ tuple[
+ SourceInfo,
+ result.Result[AgentRawData | SNMPRawData, Exception],
+ Snapshot,
+ ]
+]:
+ console.verbose(f"{tty.yellow}+{tty.normal} FETCHING DATA\n")
return [
_do_fetch(
source.source_info(),
@@ -114,8 +145,12 @@ def _do_fetch(
fetcher: Fetcher,
*,
mode: Mode,
-) -> tuple[SourceInfo, result.Result[AgentRawData | SNMPRawData, Exception], Snapshot,]:
- console.vverbose(f" Source: {source_info}\n")
+) -> tuple[
+ SourceInfo,
+ result.Result[AgentRawData | SNMPRawData, Exception],
+ Snapshot,
+]:
+ console.debug(f" Source: {source_info}\n")
with CPUTracker() as tracker:
raw_data = get_raw_data(file_cache, fetcher, mode)
return source_info, raw_data, tracker.duration
@@ -124,14 +159,16 @@ def _do_fetch(
class CMKParser:
def __init__(
self,
- config_cache: ConfigCache,
+ factory: ParserFactory,
*,
+ checking_sections: Callable[[HostName], Iterable[SectionName]],
selected_sections: SectionNameCollection,
keep_outdated: bool,
logger: logging.Logger,
) -> None:
- self.config_cache: Final = config_cache
+ self.factory: Final = factory
self.selected_sections: Final = selected_sections
+ self.checking_sections: Final = checking_sections
self.keep_outdated: Final = keep_outdated
self.logger: Final = logger
@@ -145,17 +182,23 @@ def __call__(
],
) -> Sequence[tuple[SourceInfo, result.Result[HostSections, Exception]]]:
"""Parse fetched data."""
- console.vverbose("%s+%s %s\n", tty.yellow, tty.normal, "Parse fetcher results".upper())
+ console.debug(f"{tty.yellow}+{tty.normal} PARSE FETCHER RESULTS\n")
output: list[tuple[SourceInfo, result.Result[HostSections, Exception]]] = []
+ section_cache_path = Path(cmk.utils.paths.var_dir)
# Special agents can produce data for the same check_plugin_name on the same host, in this case
# the section lines need to be extended
for source, raw_data in fetched:
source_result = parse_raw_data(
make_parser(
- self.config_cache,
- source,
- checking_sections=self.config_cache.make_checking_sections(
- source.hostname, selected_sections=NO_SELECTION
+ self.factory,
+ source.hostname,
+ source.fetcher_type,
+ checking_sections=self.checking_sections(source.hostname),
+ persisted_section_dir=make_persisted_section_dir(
+ source.hostname,
+ fetcher_type=source.fetcher_type,
+ ident=source.ident,
+ section_cache_path=section_cache_path,
),
keep_outdated=self.keep_outdated,
logger=self.logger,
@@ -170,13 +213,13 @@ def __call__(
class CMKSummarizer:
def __init__(
self,
- config_cache: ConfigCache,
host_name: HostName,
+ summary_config: Callable[[HostName, str], SummaryConfig],
*,
override_non_ok_state: ServiceState | None = None,
) -> None:
- self.config_cache: Final = config_cache
self.host_name: Final = host_name
+ self.summary_config: Final = summary_config
self.override_non_ok_state: Final = override_non_ok_state
def __call__(
@@ -187,12 +230,8 @@ def __call__(
_summarize_host_sections(
host_sections,
source,
+ self.summary_config(source.hostname, source.ident),
override_non_ok_state=self.override_non_ok_state,
- exit_spec=self.config_cache.exit_code_spec(source.hostname, source.ident),
- time_settings=self.config_cache.get_piggybacked_hosts_time_settings(
- piggybacked_hostname=source.hostname
- ),
- is_piggyback=self.config_cache.is_piggyback_host(source.hostname),
)
for source, host_sections in host_sections
]
@@ -201,18 +240,18 @@ def __call__(
def _summarize_host_sections(
host_sections: result.Result[HostSections, Exception],
source: SourceInfo,
+ config: SummaryConfig,
*,
override_non_ok_state: ServiceState | None = None,
- exit_spec: ExitSpec,
- time_settings: PiggybackTimeSettings,
- is_piggyback: bool,
) -> ActiveCheckResult:
return ActiveCheckResult.from_subresults(
*(
ActiveCheckResult(
- s.state
- if (s.state == 0 or override_non_ok_state is None)
- else override_non_ok_state,
+ (
+ s.state
+ if (s.state == 0 or override_non_ok_state is None)
+ else override_non_ok_state
+ ),
f"[{source.ident}] {s.summary}" if idx == 0 else s.summary,
s.details,
s.metrics,
@@ -222,9 +261,7 @@ def _summarize_host_sections(
source.hostname,
source.ipaddress,
host_sections,
- exit_spec=exit_spec,
- time_settings=time_settings,
- is_piggyback=is_piggyback,
+ config,
fetcher_type=source.fetcher_type,
)
)
@@ -236,69 +273,140 @@ class CMKFetcher:
def __init__(
self,
config_cache: ConfigCache,
+ factory: FetcherFactory,
*,
# alphabetically sorted
file_cache_options: FileCacheOptions,
force_snmp_cache_refresh: bool,
mode: Mode,
on_error: OnError,
+ password_store_file: Path,
selected_sections: SectionNameCollection,
simulation_mode: bool,
max_cachefile_age: MaxAge | None = None,
snmp_backend_override: SNMPBackendEnum | None,
) -> None:
self.config_cache: Final = config_cache
+ self.factory: Final = factory
self.file_cache_options: Final = file_cache_options
self.force_snmp_cache_refresh: Final = force_snmp_cache_refresh
self.mode: Final = mode
self.on_error: Final = on_error
+ self.password_store_file: Final = password_store_file
self.selected_sections: Final = selected_sections
self.simulation_mode: Final = simulation_mode
self.max_cachefile_age: Final = max_cachefile_age
self.snmp_backend_override: Final = snmp_backend_override
- def __call__(
- self, host_name: HostName, *, ip_address: HostAddress | None
- ) -> Sequence[
+ def __call__(self, host_name: HostName, *, ip_address: HostAddress | None) -> Sequence[
tuple[
SourceInfo,
result.Result[AgentRawData | SNMPRawData, Exception],
Snapshot,
]
]:
- nodes = self.config_cache.nodes_of(host_name)
hosts_config = self.config_cache.hosts_config
- if nodes is None:
+ is_cluster = host_name in hosts_config.clusters
+ if not is_cluster:
# In case of keepalive we always have an ipaddress (can be 0.0.0.0 or :: when
# address is unknown). When called as non keepalive ipaddress may be None or
# is already an address (2nd argument)
hosts = [
- (host_name, ip_address or config.lookup_ip_address(self.config_cache, host_name))
+ (
+ host_name,
+ (ip_stack_config := ConfigCache.ip_stack_config(host_name)),
+ ip_address
+ or (
+ None
+ if ip_stack_config is IPStackConfig.NO_IP
+ else lookup_ip_address(self.config_cache, host_name)
+ ),
+ )
]
else:
- hosts = [(node, config.lookup_ip_address(self.config_cache, node)) for node in nodes]
+ hosts = [
+ (
+ node,
+ (ip_stack_config := ConfigCache.ip_stack_config(node)),
+ (
+ None
+ if ip_stack_config is IPStackConfig.NO_IP
+ else lookup_ip_address(self.config_cache, node)
+ ),
+ )
+ for node in self.config_cache.nodes(host_name)
+ ]
+ stored_walk_path = Path(cmk.utils.paths.snmpwalks_dir)
+ walk_cache_path = Path(cmk.utils.paths.var_dir) / "snmp_cache"
+ file_cache_path = Path(cmk.utils.paths.data_source_cache_dir)
+ tcp_cache_path = Path(cmk.utils.paths.tcp_cache_dir)
+ tls_config = TLSConfig(
+ cas_dir=Path(cmk.utils.paths.agent_cas_dir),
+ ca_store=Path(cmk.utils.paths.agent_cert_store),
+ site_crt=Path(cmk.utils.paths.site_cert_file),
+ )
+ passwords = password_store.load(self.password_store_file)
return _fetch_all(
itertools.chain.from_iterable(
make_sources(
- host_name_,
- ip_address_,
- ConfigCache.address_family(host_name_),
- config_cache=self.config_cache,
- is_cluster=host_name in hosts_config.clusters,
+ current_host_name,
+ current_ip_address,
+ current_ip_stack_config,
+ fetcher_factory=self.factory,
+ snmp_fetcher_config=SNMPFetcherConfig(
+ scan_config=SNMPScanConfig(
+ missing_sys_description=self.config_cache.missing_sys_description(
+ current_host_name
+ ),
+ on_error=self.on_error if not is_cluster else OnError.RAISE,
+ oid_cache_dir=Path(cmk.utils.paths.snmp_scan_cache_dir),
+ ),
+ selected_sections=(
+ self.selected_sections if not is_cluster else NO_SELECTION
+ ),
+ backend_override=self.snmp_backend_override,
+ stored_walk_path=stored_walk_path,
+ walk_cache_path=walk_cache_path,
+ ),
+ is_cluster=current_host_name in hosts_config.clusters,
force_snmp_cache_refresh=(
- self.force_snmp_cache_refresh if nodes is None else False
+ self.force_snmp_cache_refresh if not is_cluster else False
),
- selected_sections=self.selected_sections if nodes is None else NO_SELECTION,
- on_scan_error=self.on_error if nodes is None else OnError.RAISE,
simulation_mode=self.simulation_mode,
file_cache_options=self.file_cache_options,
file_cache_max_age=(
self.max_cachefile_age or self.config_cache.max_cachefile_age(host_name)
),
- snmp_backend_override=self.snmp_backend_override,
+ snmp_backend=self.config_cache.get_snmp_backend(current_host_name),
+ file_cache_path=file_cache_path,
+ tcp_cache_path=tcp_cache_path,
+ tls_config=tls_config,
+ computed_datasources=self.config_cache.computed_datasources(current_host_name),
+ datasource_programs=self.config_cache.datasource_programs(current_host_name),
+ tag_list=self.config_cache.tag_list(current_host_name),
+ management_ip=lookup_mgmt_board_ip_address(
+ self.config_cache,
+ current_host_name,
+ ),
+ management_protocol=self.config_cache.management_protocol(current_host_name),
+ special_agent_command_lines=self.config_cache.special_agent_command_lines(
+ current_host_name,
+ current_ip_address,
+ passwords,
+ self.password_store_file,
+ ip_address_of=ConfiguredIPLookup(
+ self.config_cache, error_handler=handle_ip_lookup_failure
+ ),
+ ),
+ agent_connection_mode=self.config_cache.agent_connection_mode(
+ current_host_name
+ ),
+ check_mk_check_interval=self.config_cache.check_mk_check_interval(
+ current_host_name
+ ),
)
- for host_name_, ip_address_ in hosts
+ for current_host_name, current_ip_stack_config, current_ip_address in hosts
),
simulation=self.simulation_mode,
file_cache_options=self.file_cache_options,
@@ -340,7 +448,7 @@ def __getitem__(self, __key: SectionName) -> HostLabelPlugin:
return HostLabelPlugin(
function=plugin.host_label_function,
parameters=partial(
- config.get_plugin_parameters,
+ get_plugin_parameters,
matcher=self.ruleset_matcher,
default_parameters=plugin.host_label_default_parameters,
ruleset_name=plugin.host_label_ruleset_name,
@@ -394,10 +502,14 @@ def check_function(
self.value_store_manager,
clusters=self.clusters,
)
+ # Whatch out. The CMC has to agree on the path.
+ prediction_store = PredictionStore(
+ cmk.utils.paths.predictions_dir / host_name / pnp_cleanup(service.description)
+ )
return get_aggregated_result(
host_name,
host_name in self.clusters,
- cluster_nodes=self.config_cache.nodes_of(host_name) or (),
+ cluster_nodes=self.config_cache.nodes(host_name),
providers=providers,
service=service,
plugin=plugin,
@@ -405,6 +517,29 @@ def check_function(
rtc_package=self.rtc_package,
get_effective_host=self.config_cache.effective_host,
snmp_backend=self.config_cache.get_snmp_backend(host_name),
+ # In the past the creation of predictions (and the livestatus query needed)
+ # was performed inside the check plugins context.
+ # We should consider moving this side effect even further up the stack
+ injected_p=InjectedParameters(
+ meta_file_path_template=prediction_store.meta_file_path_template,
+ predictions=make_updated_predictions(
+ prediction_store,
+ partial(
+ livestatus.get_rrd_data,
+ livestatus.LocalConnection(),
+ host_name,
+ service.description,
+ ),
+ time.time(),
+ ),
+ ),
+ # Most of the following are only needed for individual plugins, actually.
+ # once we have all of them in this place, we might want to consider how
+ # to optimize these computations.
+ only_from=self.config_cache.only_from(host_name),
+ service_level=self.config_cache.effective_service_level(
+ host_name, service.description
+ ),
)
return CheckPlugin(
@@ -412,6 +547,7 @@ def check_function(
function=check_function,
default_parameters=plugin.check_default_parameters,
ruleset_name=plugin.check_ruleset_name,
+ discovery_ruleset_name=plugin.discovery_ruleset_name,
)
def __iter__(self) -> Iterator[CheckPluginName]:
@@ -444,29 +580,46 @@ def _get_check_function(
@functools.wraps(check_function)
def __check_function(*args: object, **kw: object) -> ServiceCheckResult:
- with plugin_contexts.current_service(
- str(service.check_plugin_name), service.description
- ), value_store_manager.namespace(service.id()):
+ with (
+ plugin_contexts.current_service(str(service.check_plugin_name), service.description),
+ value_store_manager.namespace(service.id()),
+ ):
return _aggregate_results(consume_check_results(check_function(*args, **kw)))
return __check_function
def _aggregate_results(
- subresults: tuple[Sequence[MetricTuple], Sequence[CheckFunctionResult]]
+ subresults: tuple[Sequence[IgnoreResults], Sequence[MetricTuple], Sequence[CheckFunctionResult]]
) -> ServiceCheckResult:
# Impedance matching part of `get_check_function()`.
- perfdata, results = subresults
- needs_marker = len(results) > 1
- summaries: list[str] = []
+ ignore_results, metrics, results = subresults
+
+ if not ignore_results and not results: # Check returned nothing
+ return SubmittableServiceCheckResult.item_not_found()
+
+ state = int(State.worst(*(r.state for r in results))) if results else 0
+ output = _aggregate_texts(ignore_results, results)
+
+ return (
+ UnsubmittableServiceCheckResult(state, output, metrics)
+ if ignore_results
+ else SubmittableServiceCheckResult(state, output, metrics)
+ )
+
+
+def _aggregate_texts(
+ ignore_results: Sequence[IgnoreResults],
+ results: Sequence[CheckFunctionResult],
+) -> str:
+ summaries = [t for e in ignore_results if (t := str(e))]
details: list[str] = []
- status = State.OK
+ needs_marker = len(results) > 1
def _add_state_marker(result_str: str, state_marker: str) -> str:
return result_str if state_marker in result_str else result_str + state_marker
for result_ in results:
- status = State.worst(status, result_.state)
state_marker = state_markers[int(result_.state)] if needs_marker else ""
if result_.summary:
summaries.append(
@@ -482,17 +635,12 @@ def _add_state_marker(result_str: str, state_marker: str) -> str:
)
)
- # Empty list? Check returned nothing
- if not details:
- return ServiceCheckResult.item_not_found()
-
if not summaries:
count = len(details)
summaries.append(
"Everything looks OK - %d detail%s available" % (count, "" if count == 1 else "s")
)
- all_text = [", ".join(summaries)] + details
- return ServiceCheckResult(int(status), "\n".join(all_text).strip(), perfdata)
+ return "\n".join([", ".join(summaries)] + details)
def consume_check_results(
@@ -500,27 +648,26 @@ def consume_check_results(
# creating invalid output.
# Typing this as `CheckResult` will make linters complain about unreachable code.
subresults: Iterable[object],
-) -> tuple[Sequence[MetricTuple], Sequence[CheckFunctionResult]]:
+) -> tuple[Sequence[IgnoreResults], Sequence[MetricTuple], Sequence[CheckFunctionResult]]:
"""Impedance matching between the Check API and the Check Engine."""
ignore_results: list[IgnoreResults] = []
results: list[CheckFunctionResult] = []
perfdata: list[MetricTuple] = []
- for subr in subresults:
- if isinstance(subr, IgnoreResults):
- ignore_results.append(subr)
- elif isinstance(subr, Metric):
- perfdata.append((subr.name, subr.value) + subr.levels + subr.boundaries)
- elif isinstance(subr, CheckFunctionResult):
- results.append(subr)
- else:
- raise TypeError(subr)
-
- # Consume *all* check results, and *then* raise, if we encountered
- # an IgnoreResults instance.
- if ignore_results:
- raise IgnoreResultsError(str(ignore_results[-1]))
-
- return perfdata, results
+ try:
+ for subr in subresults:
+ match subr:
+ case IgnoreResults():
+ ignore_results.append(subr)
+ case Metric():
+ perfdata.append((subr.name, subr.value) + subr.levels + subr.boundaries)
+ case CheckFunctionResult():
+ results.append(subr)
+ case _:
+ raise TypeError(subr)
+ except IgnoreResultsError as exc:
+ return [IgnoreResults(str(exc))], perfdata, results
+
+ return ignore_results, perfdata, results
def _get_monitoring_data_kwargs(
@@ -533,7 +680,7 @@ def _get_monitoring_data_kwargs(
*,
cluster_nodes: Sequence[HostName],
get_effective_host: Callable[[HostName, ServiceName], HostName],
-) -> tuple[Mapping[str, object], ServiceCheckResult]:
+) -> tuple[Mapping[str, object], UnsubmittableServiceCheckResult]:
# Mapping[str, object] stands for either
# * Mapping[HostName, Mapping[str, ParsedSectionContent | None]] for clusters, or
# * Mapping[str, ParsedSectionContent | None] otherwise.
@@ -558,7 +705,7 @@ def _get_monitoring_data_kwargs(
nodes,
sections,
),
- ServiceCheckResult.cluster_received_no_data([nk.hostname for nk in nodes]),
+ UnsubmittableServiceCheckResult.cluster_received_no_data([nk.hostname for nk in nodes]),
)
return (
@@ -567,7 +714,7 @@ def _get_monitoring_data_kwargs(
HostKey(host_name, source_type),
sections,
),
- ServiceCheckResult.received_no_data(),
+ UnsubmittableServiceCheckResult.received_no_data(),
)
@@ -598,9 +745,12 @@ def get_aggregated_result(
plugin: CheckPluginAPI,
check_function: Callable[..., ServiceCheckResult],
*,
+ injected_p: InjectedParameters,
rtc_package: AgentRawData | None,
get_effective_host: Callable[[HostName, ServiceName], HostName],
snmp_backend: SNMPBackendEnum,
+ only_from: None | str | list[str],
+ service_level: int,
) -> AggregatedResult:
# Mostly API-specific error-handling around the check function.
#
@@ -652,7 +802,6 @@ def get_aggregated_result(
if not section_kws: # no data found
return AggregatedResult(
service=service,
- submit=False,
data_received=False,
result=error_result,
cache_info=None,
@@ -664,28 +813,19 @@ def get_aggregated_result(
if plugin.check_default_parameters is None
else {
"params": _final_read_only_check_parameters(
- host_name, service.description, service.parameters
+ service.parameters, injected_p, only_from, service_level
)
}
)
try:
check_result = check_function(**item_kw, **params_kw, **section_kws)
- except IgnoreResultsError as e:
- msg = str(e) or "No service summary available"
- return AggregatedResult(
- service=service,
- submit=False,
- data_received=True,
- result=ServiceCheckResult(output=msg),
- cache_info=None,
- )
except MKTimeout:
raise
except Exception:
if cmk.utils.debug.enabled():
raise
- check_result = ServiceCheckResult(
+ check_result = SubmittableServiceCheckResult(
3,
create_check_crash_dump(
host_name,
@@ -711,7 +851,6 @@ def __iter(
return AggregatedResult(
service=service,
- submit=True,
data_received=True,
result=check_result,
cache_info=get_cache_info(
@@ -725,9 +864,10 @@ def __iter(
def _final_read_only_check_parameters(
- host_name: HostName,
- service_name: ServiceName,
entries: TimespecificParameters | LegacyCheckParameters,
+ injected_p: InjectedParameters,
+ only_from: None | str | list[str],
+ service_level: int,
) -> Parameters:
params = (
entries.evaluate(timeperiod_active)
@@ -740,63 +880,97 @@ def _final_read_only_check_parameters(
# For auto-migrated plugins expecting tuples, they will be
# unwrapped by a decorator of the original check_function.
wrap_parameters(
- _inject_prediction_callback_recursively(
- host_name,
- service_name,
- params,
- )
- if _contains_predictive_levels(params)
- else params,
+ (
+ postprocess_configuration(params, injected_p, only_from, service_level)
+ if _needs_postprocessing(params)
+ else params
+ ),
)
)
-def _contains_predictive_levels(params: LegacyCheckParameters) -> bool:
- if isinstance(params, (list, tuple)):
- return any(_contains_predictive_levels(p) for p in params)
+def _needs_postprocessing(params: object) -> bool:
+ match params:
+ case tuple(("cmk_postprocessed", str(), _)):
+ return True
+ case tuple() | list():
+ return any(_needs_postprocessing(p) for p in params)
+ case {"__injected__": _}: # legacy "valuespec" case.
+ return True
+ case {**mapping}:
+ return any(_needs_postprocessing(p) for p in mapping.values())
+ return False
- if isinstance(params, dict):
- return "__get_predictive_levels__" in params or any(
- _contains_predictive_levels(p) for p in params.values()
- )
- return False
+def postprocess_configuration(
+ params: LegacyCheckParameters | Mapping[str, object],
+ injected_p: InjectedParameters,
+ only_from: None | str | list[str],
+ service_level: int,
+) -> LegacyCheckParameters | Mapping[str, object]:
+ """Postprocess configuration parameters.
+ Parameters consisting of a 3-tuple with the first element being
+ "cmk_postprocessed" and the second one one of several known string constants
+ are postprocessed.
-_TParams = TypeVar("_TParams", object, list[object], tuple[object, ...], dict[str, object])
+ This currently supports two ways to handle predictive levels.
+ The "__injected__" case is legacy, the other case is the new one.
+ Once the legacy case is removed, this can be simplified.
-def _inject_prediction_callback_recursively(
- host_name: HostName, service_name: ServiceName, params: _TParams
-) -> _TParams:
+ Hopefully we can move this out of this scope entirely someday (and get
+ rid of the recursion).
+ """
match params:
+ case tuple(("cmk_postprocessed", "predictive_levels", value)):
+ return _postprocess_predictive_levels(value, injected_p)
+ case tuple(("cmk_postprocessed", "only_from", _)):
+ return only_from
+ case tuple(("cmk_postprocessed", "service_level", _)):
+ return service_level
case tuple():
- return tuple(_inject_prediction_callback_iterable(host_name, service_name, params))
+ return tuple(
+ postprocess_configuration(v, injected_p, only_from, service_level) for v in params
+ )
case list():
- return list(_inject_prediction_callback_iterable(host_name, service_name, params))
- case dict():
- if "__get_predictive_levels__" in params:
- params["__get_predictive_levels__"] = PredictionUpdater(
- host_name,
- service_name,
- PredictionParameters.model_validate(params),
- partial(livestatus.get_rrd_data, livestatus.LocalConnection()),
- ).get_predictive_levels
- return params
- return dict(
- zip(
- params.keys(),
- _inject_prediction_callback_iterable(host_name, service_name, params.values()),
- )
+ return list(
+ postprocess_configuration(v, injected_p, only_from, service_level) for v in params
)
-
+ case dict(): # check for legacy predictive levels :-(
+ return {
+ k: (
+ injected_p.model_dump()
+ if k == "__injected__"
+ else postprocess_configuration(v, injected_p, only_from, service_level)
+ )
+ for k, v in params.items()
+ }
return params
-def _inject_prediction_callback_iterable(
- host_name: HostName, service_name: ServiceName, params: Iterable[_TParams]
-) -> Iterable[_TParams]:
- return (_inject_prediction_callback_recursively(host_name, service_name, v) for v in params)
+def _postprocess_predictive_levels(
+ params: dict, injected_p: InjectedParameters
+) -> tuple[Literal["predictive"], tuple[str, float | None, tuple[float, float] | None]]:
+ match params:
+ case {
+ "__reference_metric__": str(metric),
+ "__direction__": "upper" | "lower" as direction,
+ **raw_prediction_params,
+ }:
+ return (
+ "predictive",
+ (
+ metric,
+ *lookup_predictive_levels(
+ metric,
+ direction,
+ PredictionParameters.model_validate(raw_prediction_params),
+ injected_p,
+ ),
+ ),
+ )
+ raise ValueError(f"Invalid predictive levels: {params!r}")
class DiscoveryPluginMapper(Mapping[CheckPluginName, DiscoveryPlugin]):
@@ -831,7 +1005,7 @@ def __discovery_function(
service_name=plugin.service_name,
function=__discovery_function,
parameters=partial(
- config.get_plugin_parameters,
+ get_plugin_parameters,
matcher=self.ruleset_matcher,
default_parameters=plugin.discovery_default_parameters,
ruleset_name=plugin.discovery_ruleset_name,
diff --git a/cmk/base/config.py b/cmk/base/config.py
index 666a0fbd352..c2e6aadb1d4 100644
--- a/cmk/base/config.py
+++ b/cmk/base/config.py
@@ -21,19 +21,22 @@
import socket
import struct
import sys
-from collections.abc import (
- Callable,
- Container,
- Iterable,
- Iterator,
- Mapping,
- MutableMapping,
- Sequence,
-)
+from collections.abc import Callable, Container, Iterable, Iterator, Mapping, Sequence
from enum import Enum
from importlib.util import MAGIC_NUMBER as _MAGIC_NUMBER
from pathlib import Path
-from typing import Any, AnyStr, assert_never, cast, Final, Literal, NamedTuple, overload, TypedDict
+from typing import (
+ Any,
+ AnyStr,
+ assert_never,
+ Final,
+ Generic,
+ Literal,
+ NamedTuple,
+ overload,
+ TypeAlias,
+ TypeVar,
+)
import cmk.utils
import cmk.utils.check_utils
@@ -53,28 +56,18 @@
import cmk.utils.version as cmk_version
from cmk.utils.agent_registration import connection_mode_from_host_config, HostAgentConnectionMode
from cmk.utils.caching import cache_manager
-from cmk.utils.check_utils import (
- maincheckify,
- ParametersTypeAlias,
- section_name_of,
- unwrap_parameters,
-)
+from cmk.utils.check_utils import maincheckify, ParametersTypeAlias, section_name_of
from cmk.utils.config_path import ConfigPath
-from cmk.utils.exceptions import MKGeneralException, MKIPAddressLookupError, MKTerminate, OnError
+from cmk.utils.exceptions import MKGeneralException, MKIPAddressLookupError, MKTerminate
from cmk.utils.hostaddress import HostAddress, HostName, Hosts
from cmk.utils.http_proxy_config import http_proxy_config_from_user_setting, HTTPProxyConfig
-from cmk.utils.labels import Labels
+from cmk.utils.labels import Labels, LabelSources
+from cmk.utils.legacy_check_api import LegacyCheckDefinition
from cmk.utils.log import console
from cmk.utils.macros import replace_macros_in_str
from cmk.utils.regex import regex
from cmk.utils.rulesets import RuleSetName
-from cmk.utils.rulesets.ruleset_matcher import (
- LabelManager,
- LabelSources,
- RulesetMatcher,
- RulesetName,
- RuleSpec,
-)
+from cmk.utils.rulesets.ruleset_matcher import LabelManager, RulesetMatcher, RulesetName, RuleSpec
from cmk.utils.sectionname import SectionName
from cmk.utils.servicename import Item, ServiceName
from cmk.utils.site import omd_site
@@ -92,47 +85,40 @@
SNMPContextConfig,
SNMPCredentials,
SNMPHostConfig,
+ SNMPRawDataElem,
SNMPTiming,
+ SNMPVersion,
)
from cmk.fetchers import (
- FetcherType,
IPMICredentials,
IPMIFetcher,
PiggybackFetcher,
+ ProgramFetcher,
SNMPFetcher,
SNMPSectionMeta,
TCPEncryptionHandling,
TCPFetcher,
+ TLSConfig,
)
-from cmk.fetchers.cache import SectionStore
from cmk.fetchers.config import make_persisted_section_dir
from cmk.fetchers.filecache import MaxAge
-from cmk.checkengine.checking import (
- CheckPluginName,
- CheckPluginNameStr,
- ConfiguredService,
- ServiceID,
-)
-from cmk.checkengine.discovery import (
- AutocheckEntry,
- AutochecksManager,
- CheckPreviewEntry,
- DiscoveryCheckParameters,
- DiscoveryPlugin,
-)
+from cmk.checkengine.checking import CheckPluginName, ConfiguredService, ServiceID
+from cmk.checkengine.discovery import AutochecksManager, CheckPreviewEntry, DiscoveryCheckParameters
from cmk.checkengine.exitspec import ExitSpec
-from cmk.checkengine.fetcher import SourceType
+from cmk.checkengine.fetcher import FetcherType, SourceType
from cmk.checkengine.inventory import HWSWInventoryParameters, InventoryPlugin
-from cmk.checkengine.legacy import LegacyCheckParameters
from cmk.checkengine.parameters import Parameters, TimespecificParameters, TimespecificParameterSet
from cmk.checkengine.parser import (
AgentParser,
AgentRawDataSectionElem,
NO_SELECTION,
SectionNameCollection,
+ SectionStore,
+ SNMPParser,
)
+from cmk.checkengine.summarize import SummaryConfig
import cmk.base.api.agent_based.register as agent_based_register
import cmk.base.default_config as default_config
@@ -143,10 +129,20 @@
from cmk.base.api.agent_based.register.section_plugins_legacy import (
create_section_plugin_from_legacy,
)
-from cmk.base.api.agent_based.register.utils_legacy import LegacyCheckDefinition
from cmk.base.default_config import * # pylint: disable=wildcard-import,unused-wildcard-import
-from cmk.base.ip_lookup import AddressFamily
-from cmk.base.plugins.server_side_calls import load_active_checks
+from cmk.base.ip_lookup import IPStackConfig
+from cmk.base.parent_scan import ScanConfig as ParentScanConfig
+from cmk.base.server_side_calls import load_special_agents, SpecialAgent, SpecialAgentCommandLine
+from cmk.base.sources import SNMPFetcherConfig
+
+from cmk.server_side_calls import v1 as server_side_calls_api
+from cmk.server_side_calls_backend.config_processing import PreprocessingResult
+
+try:
+ from cmk.base.cee.rrd import RRDObjectConfig
+except ModuleNotFoundError:
+ # Non-existing edition layering...
+ RRDObjectConfig: TypeAlias = object # type: ignore[no-redef]
# TODO: Prefix helper functions with "_".
@@ -173,6 +169,11 @@
CheckCommandArguments = Iterable[int | float | str | tuple[str, str, str]]
+LegacySSCConfigModel = object
+
+SSCRules = Sequence[tuple[str, Sequence[Mapping[str, object] | LegacySSCConfigModel]]]
+
+
class FilterMode(enum.Enum):
NONE = enum.auto()
INCLUDE_CLUSTERED = enum.auto()
@@ -233,7 +234,11 @@ def _aggregate_check_table_services(
if host_name in config_cache.hosts_config.clusters:
yield from (s for s in _get_clustered_services(config_cache, host_name) if sfilter.keep(s))
- yield from (s for s in _get_enforced_services(config_cache, host_name) if sfilter.keep(s))
+ yield from (
+ svc
+ for _, svc in config_cache.enforced_services_table(host_name).values()
+ if sfilter.keep(svc)
+ )
# NOTE: as far as I can see, we only have two cases with the filter mode.
# Either we compute services to check, or we compute services for fetching.
@@ -313,15 +318,6 @@ def is_mine(self, service: ConfiguredService) -> bool:
)
-def _get_enforced_services(
- config_cache: ConfigCache, host_name: HostName
-) -> list[ConfiguredService]:
- return [
- service
- for _ruleset_name, service in config_cache.enforced_services_table(host_name).values()
- ]
-
-
def _get_services_from_cluster_nodes(
config_cache: ConfigCache, node_name: HostName
) -> Iterable[ConfiguredService]:
@@ -333,11 +329,11 @@ def _get_clustered_services(
config_cache: ConfigCache,
cluster_name: HostName,
) -> Iterable[ConfiguredService]:
- for node in config_cache.nodes_of(cluster_name) or []:
+ for node in config_cache.nodes(cluster_name):
node_checks: list[ConfiguredService] = []
if not config_cache.is_ping_host(cluster_name):
node_checks += config_cache.get_autochecks_of(node)
- node_checks.extend(_get_enforced_services(config_cache, node))
+ node_checks.extend(svc for _, svc in config_cache.enforced_services_table(node).values())
yield from (
service
@@ -346,19 +342,10 @@ def _get_clustered_services(
)
-class ClusterCacheInfo(NamedTuple):
- clusters_of: dict[HostName, list[HostName]]
- nodes_of: dict[HostName, list[HostName]]
-
-
-class RRDConfig(TypedDict):
- """RRDConfig
- This typing might not be complete or even wrong, feel free to improve"""
-
- cfs: Iterable[Literal["MIN", "MAX", "AVERAGE"]] # conceptually a Set[Literal[...]]
- rras: list[tuple[float, int, int]]
- step: int
- format: Literal["pnp_multiple", "cmc_single"]
+@dataclasses.dataclass(frozen=True, kw_only=True)
+class ClusterCacheInfo:
+ clusters_of: Mapping[HostName, Sequence[HostName]]
+ nodes_of: Mapping[HostName, Sequence[HostName]]
CheckContext = dict[str, Any]
@@ -393,35 +380,45 @@ class _NestedExitSpec(ExitSpec, total=False):
individual: dict[str, ExitSpec]
-_ignore_ip_lookup_failures = False
-_failed_ip_lookups: list[HostName] = []
+IPLookup = Callable[
+ [HostName, Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6]],
+ HostAddress | None,
+]
+_TErrHandler = TypeVar("_TErrHandler", bound=Callable[[HostName, Exception], None])
-def ip_address_of(
- config_cache: ConfigCache, host_name: HostName, family: socket.AddressFamily | AddressFamily
-) -> HostAddress | None:
- try:
- return lookup_ip_address(config_cache, host_name, family=family)
- except Exception as e:
- if host_name in config_cache.hosts_config.clusters:
- return HostAddress("")
- _failed_ip_lookups.append(host_name)
- if not _ignore_ip_lookup_failures:
- config_warnings.warn(
- "Cannot lookup IP address of '%s' (%s). "
- "The host will not be monitored correctly." % (host_name, e)
- )
- return ip_lookup.fallback_ip_for(family)
+class ConfiguredIPLookup(Generic[_TErrHandler]):
+ def __init__(self, config_cache: ConfigCache, *, error_handler: _TErrHandler) -> None:
+ self._config_cache = config_cache
+ self.error_handler: Final[_TErrHandler] = error_handler
+ def __call__(
+ self,
+ host_name: HostName,
+ family: Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6],
+ ) -> HostAddress | None:
+ try:
+ return lookup_ip_address(self._config_cache, host_name, family=family)
+ except Exception as e:
+ if host_name in self._config_cache.hosts_config.clusters:
+ return HostAddress("")
+ self.error_handler(host_name, e)
-def ignore_ip_lookup_failures() -> None:
- global _ignore_ip_lookup_failures
- _ignore_ip_lookup_failures = True
+ return ip_lookup.fallback_ip_for(family)
-def failed_ip_lookups() -> list[HostName]:
- return _failed_ip_lookups
+# This is an going refactoring. Hopefully this will be untangled soon.
+def handle_ip_lookup_failure(host_name: HostName, exc: Exception) -> None:
+ """Error handler for IP lookup failures.
+
+ * collects error messages for failed lookups in a global variable
+ * writes error messages to the console
+ """
+ config_warnings.warn(
+ f"Cannot lookup IP address of '{host_name}' ({exc}). "
+ "The host will not be monitored correctly."
+ )
def get_variable_names() -> list[str]:
@@ -490,7 +487,7 @@ def load(
)
):
# TODO: Raise an exception
- console.error("Error in configuration: duplicate hosts: %s\n", ", ".join(duplicates))
+ console.error(f"Error in configuration: duplicate hosts: {', '.join(duplicates)}\n")
sys.exit(3)
@@ -582,7 +579,9 @@ def __setitem__(self, cluster_name: Any, value: Any) -> Any:
def _load_config_file(file_to_load: Path, into_dict: dict[str, Any]) -> None:
- exec(file_to_load.read_text(), into_dict, into_dict) # nosec B102 # BNS:aee528
+ exec(
+ compile(file_to_load.read_text(), file_to_load, "exec"), into_dict, into_dict
+ ) # nosec B102 # BNS:aee528
def _load_config(with_conf_d: bool, exclude_parents_mk: bool) -> set[str]:
@@ -648,7 +647,7 @@ def _load_config(with_conf_d: bool, exclude_parents_mk: bool) -> set[str]:
if cmk.utils.debug.enabled():
raise
if sys.stderr.isatty():
- console.error("Cannot read in configuration file %s: %s\n", path, e)
+ console.error(f"Cannot read in configuration file {path}: {e}\n")
sys.exit(1)
# Cleanup global helper vars
@@ -664,7 +663,7 @@ def _load_config(with_conf_d: bool, exclude_parents_mk: bool) -> set[str]:
def _transform_plugin_names_from_160_to_170(global_dict: dict[str, Any]) -> None:
- # Pre 1.7.0 check plugin names may have dots or dashes (one case) in them.
+ # Pre 1.7.0 check plug-in names may have dots or dashes (one case) in them.
# Now they don't, and we have to translate all variables that may use them:
if "service_descriptions" in global_dict:
global_dict["service_descriptions"] = {
@@ -757,7 +756,7 @@ def __init__(self, config_cache: ConfigCache) -> None:
self._config_cache = config_cache
def generate(self) -> Mapping[str, Any]:
- helper_config: MutableMapping[str, Any] = {}
+ helper_config: dict[str, Any] = {}
# These functions purpose is to filter out hosts which are monitored on different sites
hosts_config = self._config_cache.hosts_config
@@ -929,7 +928,7 @@ def strip_tags(tagged_hostlist: Iterable[str]) -> Sequence[HostName]:
def _get_shadow_hosts() -> ShadowHosts:
try:
# Only available with CEE
- return shadow_hosts # type: ignore[name-defined]
+ return shadow_hosts # type: ignore[name-defined,unused-ignore]
except NameError:
return {}
@@ -1042,7 +1041,9 @@ def _get_old_cmciii_temp_description(item: Item) -> tuple[ServiceName, None]:
"netscaler_mem": lambda item: ("Memory used", item),
"nullmailer_mailq": lambda item: ("Nullmailer Queue", None),
"nvidia_temp": lambda item: ("Temperature NVIDIA %s", item),
- "postfix_mailq": lambda item: ("Postfix Queue", None),
+ "postfix_mailq": lambda item: (
+ ("Postfix Queue", None) if item == "default" else ("Postfix Queue %s", item)
+ ),
"ps": lambda item: ("proc_%s", item),
"qmail_stats": lambda item: ("Qmail Queue", None),
"raritan_emx": lambda item: ("Rack %s", item),
@@ -1110,44 +1111,18 @@ def service_description(
return f"Unimplemented check {check_plugin_name} / {item}"
return f"Unimplemented check {check_plugin_name}"
- def __discovery_function(
- check_plugin_name: CheckPluginName, *args: object, **kw: object
- ) -> Iterable[AutocheckEntry]:
- # Deal with impededance mismatch between check API and check engine.
- yield from (
- AutocheckEntry(
- check_plugin_name=check_plugin_name,
- item=service.item,
- parameters=unwrap_parameters(service.parameters),
- service_labels={label.name: label.value for label in service.labels},
- )
- for service in check_plugin.discovery_function(*args, **kw)
- )
-
- plugin = DiscoveryPlugin(
- sections=check_plugin.sections,
- service_name=check_plugin.service_name,
- function=__discovery_function,
- parameters=functools.partial(
- get_plugin_parameters,
- matcher=matcher,
- default_parameters=check_plugin.discovery_default_parameters,
- ruleset_name=check_plugin.discovery_ruleset_name,
- ruleset_type=check_plugin.discovery_ruleset_type,
- rules_getter_function=agent_based_register.get_host_label_ruleset,
- ),
- )
-
return get_final_service_description(
_format_item_with_template(
- *_get_service_description_template_and_item(check_plugin_name, plugin, item)
+ *_get_service_description_template_and_item(
+ check_plugin_name, check_plugin.service_name, item
+ )
),
get_service_translations(matcher, hostname),
)
def _get_service_description_template_and_item(
- plugin_name: CheckPluginName, plugin: DiscoveryPlugin, item: Item
+ plugin_name: CheckPluginName, service_name_template: str, item: Item
) -> tuple[ServiceName, Item]:
plugin_name_str = str(plugin_name)
@@ -1157,7 +1132,7 @@ def _get_service_description_template_and_item(
old_descr = _old_service_descriptions.get(plugin_name_str)
if old_descr is None or plugin_name_str in use_new_descriptions_for:
- return plugin.service_name, item
+ return service_name_template, item
return old_descr(item)
@@ -1254,7 +1229,7 @@ def service_depends_on(
def is_cmc() -> bool:
- """Whether or not the site is currently configured to use the Microcore."""
+ """Whether or not the site is currently configured to use the Micro Core."""
return monitoring_core == "cmc"
@@ -1304,30 +1279,6 @@ def get_http_proxy(http_proxy: tuple[str, str]) -> HTTPProxyConfig:
)
-# .
-# .--Host matching-------------------------------------------------------.
-# | _ _ _ _ _ _ |
-# | | | | | ___ ___| |_ _ __ ___ __ _| |_ ___| |__ (_)_ __ __ _ |
-# | | |_| |/ _ \/ __| __| | '_ ` _ \ / _` | __/ __| '_ \| | '_ \ / _` | |
-# | | _ | (_) \__ \ |_ | | | | | | (_| | || (__| | | | | | | | (_| | |
-# | |_| |_|\___/|___/\__| |_| |_| |_|\__,_|\__\___|_| |_|_|_| |_|\__, | |
-# | |___/ |
-# +----------------------------------------------------------------------+
-# | Code for calculating the host condition matching of rules |
-# '----------------------------------------------------------------------'
-
-hosttags_match_taglist = tuple_rulesets.hosttags_match_taglist
-
-
-# Slow variant of checking wether a service is matched by a list
-# of regexes - used e.g. by cmk --notify
-def in_extraconf_servicelist(service_patterns: list[str], service: str) -> bool:
- if optimized_pattern := tuple_rulesets.convert_pattern_list(service_patterns):
- return optimized_pattern.match(service) is not None
-
- return False
-
-
# .
# .--Constants-----------------------------------------------------------.
# | ____ _ _ |
@@ -1348,16 +1299,16 @@ def in_extraconf_servicelist(service_patterns: list[str], service: str) -> bool:
ALL_SERVICES = tuple_rulesets.ALL_SERVICES
NEGATE = tuple_rulesets.NEGATE
-# TODO: Cleanup access to check_info[] -> replace it by different function calls
-# like for example check_exists(...)
# BE AWARE: sync these global data structures with
# _initialize_data_structures()
-# TODO: Refactor this.
-
# The following data structures will be filled by the checks
# all known checks
-check_info: dict[str, LegacyCheckDefinition] = {}
+check_info: dict[object, object] = (
+ {}
+) # want: dict[str, LegacyCheckDefinition], but don't trust the plugins!
+# for nagios config: keep track which plug-in lives where
+legacy_check_plugin_files: dict[str, str] = {}
# Lookup for legacy names
legacy_check_plugin_names: dict[CheckPluginName, str] = {}
# optional functions for parameter precompilation
@@ -1401,17 +1352,13 @@ def load_all_plugins(
errors.extend(load_checks(get_check_api_context, filelist))
- # Load new active checks.
- # These are just loaded here, because there currently is no other place
- # that will report the errors. Maybe a `cmk --validate-plugins` would be nice.
- more_errors, _plugins = load_active_checks()
-
- return [*errors, *more_errors]
+ return errors
def _initialize_data_structures() -> None:
"""Initialize some data structures which are populated while loading the checks"""
check_info.clear()
+ legacy_check_plugin_files.clear()
legacy_check_plugin_names.clear()
precompile_params.clear()
factory_settings.clear()
@@ -1422,18 +1369,19 @@ def _initialize_data_structures() -> None:
def _get_plugin_paths(*dirs: str) -> list[str]:
filelist: list[str] = []
for directory in dirs:
- filelist += _plugin_pathnames_in_directory(directory)
+ filelist += plugin_pathnames_in_directory(directory)
return filelist
# NOTE: The given file names should better be absolute, otherwise
# we depend on the current working directory, which is a bad idea,
# especially in tests.
-def load_checks( # pylint: disable=too-many-branches
+def load_checks(
get_check_api_context: GetCheckApiContext,
filelist: list[str],
) -> list[str]:
loaded_files: set[str] = set()
+ ignored_plugins_errors = []
did_compile = False
for f in filelist:
@@ -1447,6 +1395,10 @@ def load_checks( # pylint: disable=too-many-branches
try:
check_context = new_check_context(get_check_api_context)
+ # Make a copy of known plug-in names, we need to track them for nagios config generation
+ known_checks = {str(k) for k in check_info}
+ known_agents = {str(k) for k in special_agent_info}
+
did_compile |= load_precompiled_plugin(f, check_context)
loaded_files.add(file_name)
@@ -1455,16 +1407,37 @@ def load_checks( # pylint: disable=too-many-branches
raise
except Exception as e:
- console.error("Error in plugin file %s: %s\n", f, e)
+ ignored_plugins_errors.append(
+ f"Ignoring outdated plug-in file {f}: {e} -- this API is deprecated!\n"
+ )
if cmk.utils.debug.enabled():
raise
continue
- legacy_check_plugin_names.update({CheckPluginName(maincheckify(n)): n for n in check_info})
+ for plugin_name in {str(k) for k in check_info}.difference(known_checks) | {
+ str(k) for k in special_agent_info
+ }.difference(known_agents):
+ legacy_check_plugin_files[plugin_name] = f
+
+ # Now just drop everything we don't like; this is not a supported API anymore.
+ # Users affected by this will see a CRIT in their "Analyse Configuration" page.
+ sane_check_info = {}
+ for k, v in check_info.items():
+ if isinstance(k, str) and isinstance(v, LegacyCheckDefinition):
+ sane_check_info[k] = v
+ continue
+ ignored_plugins_errors.append(
+ f"Ignoring outdated plug-in {k!r}: Format no longer supported"
+ " -- this API is deprecated!\n"
+ )
- return _extract_agent_and_snmp_sections() + _extract_check_plugins(
- validate_creation_kwargs=did_compile
- )
+ legacy_check_plugin_names.update({CheckPluginName(maincheckify(n)): n for n in sane_check_info})
+
+ return [
+ *ignored_plugins_errors,
+ *_extract_agent_and_snmp_sections(sane_check_info),
+ *_extract_check_plugins(sane_check_info, validate_creation_kwargs=did_compile),
+ ]
# Constructs a new check context dictionary. It contains the whole check API.
@@ -1482,7 +1455,7 @@ def new_check_context(get_check_api_context: GetCheckApiContext) -> CheckContext
return context
-def _plugin_pathnames_in_directory(path: str) -> list[str]:
+def plugin_pathnames_in_directory(path: str) -> list[str]:
if path and os.path.exists(path):
return sorted(
[
@@ -1513,7 +1486,7 @@ def from_file(cls, path: str) -> _PYCHeader:
def load_precompiled_plugin(path: str, check_context: CheckContext) -> bool:
- """Loads the given check or check include plugin into the given
+ """Loads the given check or check include plug-in into the given
check context.
To improve loading speed the files are not read directly. The files are
@@ -1530,7 +1503,7 @@ def load_precompiled_plugin(path: str, check_context: CheckContext) -> bool:
do_compile = not _is_plugin_precompiled(path, precompiled_path)
if do_compile:
- console.vverbose(f"Precompile {path} to {precompiled_path}\n")
+ console.debug(f"Precompile {path} to {precompiled_path}\n")
store.makedirs(os.path.dirname(precompiled_path))
py_compile.compile(path, precompiled_path, doraise=True)
# The original file is from the version so the calculated mode is world readable...
@@ -1570,12 +1543,14 @@ def _precompiled_plugin_path(path: str) -> str:
AUTO_MIGRATION_ERR_MSG = (
- "Failed to auto-migrate legacy plugin to %s: %s\n"
+ "Failed to auto-migrate legacy plug-in to %s: %s\n"
"Please refer to Werk 10601 for more information.\n"
)
-def _extract_agent_and_snmp_sections() -> list[str]:
+def _extract_agent_and_snmp_sections(
+ legacy_checks: Mapping[str, LegacyCheckDefinition]
+) -> list[str]:
"""Here comes the next layer of converting-to-"new"-api.
For the new check-API in cmk/base/api/agent_based, we use the accumulated information
@@ -1583,31 +1558,32 @@ def _extract_agent_and_snmp_sections() -> list[str]:
"""
errors = []
# start with the "main"-checks, the ones without '.' in their names:
- main_checks = [(name, cinfo) for name, cinfo in check_info.items() if "." not in name]
+ main_checks = [(name, cinfo) for name, cinfo in legacy_checks.items() if "." not in name]
for section_name, check_info_element in main_checks:
if agent_based_register.is_registered_section_plugin(SectionName(section_name)):
continue
try:
+ assert (parse_function := check_info_element.parse_function) is not None
agent_based_register.add_section_plugin(
create_section_plugin_from_legacy(
name=section_name,
- parse_function=check_info_element["parse_function"],
- fetch=check_info_element.get("fetch"),
- detect=check_info_element.get("detect"),
+ parse_function=parse_function,
+ fetch=check_info_element.fetch,
+ detect=check_info_element.detect,
)
)
except (NotImplementedError, KeyError, AssertionError, ValueError) as exc:
# NOTE: missing section pugins may lead to missing data for a check plugin
- # *or* to more obscure errors, when a check/inventory plugin will be
+ # *or* to more obscure errors, when a check/inventory plug-in will be
# passed un-parsed data unexpectedly.
if cmk.utils.debug.enabled():
raise MKGeneralException(exc) from exc
errors.append(AUTO_MIGRATION_ERR_MSG % ("section", section_name))
if cmk.utils.debug.enabled():
- subchecks = (name for name in check_info if "." in name)
+ subchecks = (name for name in legacy_checks if "." in name)
for subcheck in subchecks:
assert agent_based_register.is_registered_section_plugin(
SectionName(section_name_of(subcheck))
@@ -1616,16 +1592,18 @@ def _extract_agent_and_snmp_sections() -> list[str]:
return errors
-def _extract_check_plugins(*, validate_creation_kwargs: bool) -> list[str]:
+def _extract_check_plugins(
+ legacy_checks: Mapping[str, LegacyCheckDefinition], *, validate_creation_kwargs: bool
+) -> list[str]:
"""Here comes the next layer of converting-to-"new"-api.
For the new check-API in cmk/base/api/agent_based, we use the accumulated information
in check_info to create API compliant check plugins.
"""
errors = []
- for check_plugin_name, check_info_dict in sorted(check_info.items()):
+ for check_plugin_name, check_info_element in sorted(legacy_checks.items()):
# skip pure section declarations:
- if check_info_dict.get("service_name") is None:
+ if check_info_element.service_name is None:
continue
try:
present_plugin = agent_based_register.get_check_plugin(
@@ -1637,13 +1615,13 @@ def _extract_check_plugins(*, validate_creation_kwargs: bool) -> list[str]:
# implemented here instead of the agent based register so that new API code does not
# need to include any handling of legacy cases
raise ValueError(
- f"Legacy check plugin still exists for check plugin {check_plugin_name}. "
+ f"Legacy check plug-in still exists for check plug-in {check_plugin_name}. "
"Please remove legacy plugin."
)
agent_based_register.add_check_plugin(
create_check_plugin_from_legacy(
check_plugin_name,
- check_info_dict,
+ check_info_element,
validate_creation_kwargs=validate_creation_kwargs,
)
)
@@ -1652,7 +1630,7 @@ def _extract_check_plugins(*, validate_creation_kwargs: bool) -> list[str]:
# will be silently droppend on most (all?) occasions.
if cmk.utils.debug.enabled():
raise MKGeneralException(exc) from exc
- errors.append(AUTO_MIGRATION_ERR_MSG % ("check plugin", check_plugin_name))
+ errors.append(AUTO_MIGRATION_ERR_MSG % ("check plug-in", check_plugin_name))
return errors
@@ -1675,7 +1653,7 @@ def compute_check_parameters(
host: HostName,
plugin_name: CheckPluginName,
item: Item,
- params: LegacyCheckParameters,
+ params: Mapping[str, object],
configured_parameters: TimespecificParameters | None = None,
) -> TimespecificParameters:
"""Compute parameters for a check honoring factory settings,
@@ -1690,51 +1668,11 @@ def compute_check_parameters(
matcher, host, plugin_name, check_plugin.check_ruleset_name, item
)
- return _update_with_configured_check_parameters(
- _update_with_default_check_parameters(check_plugin.check_default_parameters, params),
- configured_parameters,
- )
-
-
-def _update_with_default_check_parameters(
- check_default_parameters: ParametersTypeAlias | None,
- params: LegacyCheckParameters,
-) -> LegacyCheckParameters:
- if check_default_parameters is None:
- return params
-
- # Handle case where parameter is None but the type of the
- # default value is a dictionary. This is for example the
- # case if a check type has gotten parameters in a new version
- # but inventory of the old version left None as a parameter.
- # Also from now on we support that the inventory simply puts
- # None as a parameter. We convert that to an empty dictionary
- # that will be updated with the factory settings and default
- # levels, if possible.
- if params is None:
- params = {}
-
- if not isinstance(params, dict):
- # if discovered params is not updateable, it wins
- return params
-
- default_params = unwrap_parameters(check_default_parameters)
- if not isinstance(default_params, dict):
- # if default params are not updatetable, discovered params win
- return params
-
- # Merge params from inventory onto default parameters (if params is not updateable, it wins):
- return {**default_params, **params}
-
-
-def _update_with_configured_check_parameters(
- params: LegacyCheckParameters,
- configured_parameters: TimespecificParameters,
-) -> TimespecificParameters:
return TimespecificParameters(
[
*configured_parameters.entries,
TimespecificParameterSet.from_parameters(params),
+ TimespecificParameterSet.from_parameters(check_plugin.check_default_parameters or {}),
]
)
@@ -1760,7 +1698,7 @@ def _get_configured_parameters(
return TimespecificParameters(
[
# parameters configured via checkgroup_parameters
- TimespecificParameterSet.from_parameters(cast(LegacyCheckParameters, p))
+ TimespecificParameterSet.from_parameters(p)
for p in _get_checkgroup_parameters(matcher, host, str(ruleset_name), item, descr)
]
+ extra
@@ -1773,7 +1711,7 @@ def _get_checkgroup_parameters(
checkgroup: RulesetName,
item: Item,
descr: ServiceName,
-) -> Sequence[object]:
+) -> Sequence[Mapping[str, object]]:
rules = checkgroup_parameters.get(checkgroup)
if rules is None:
return []
@@ -1823,28 +1761,20 @@ def lookup_ip_address(
config_cache: ConfigCache,
host_name: HostName | HostAddress,
*,
- family: socket.AddressFamily | AddressFamily | None = None,
+ family: Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6] | None = None,
) -> HostAddress | None:
- if ConfigCache.address_family(host_name) is AddressFamily.NO_IP:
- # TODO(ml): [IPv6] Silently override the `family` parameter. Where
- # that is necessary, the callers are highly unlikely to handle IPv6
- # and DUAL_STACK correctly.
- return None
if family is None:
family = config_cache.default_address_family(host_name)
- if isinstance(family, socket.AddressFamily):
- family = AddressFamily.from_socket(family)
return ip_lookup.lookup_ip_address(
host_name=host_name,
family=family,
- # TODO(ml): [IPv6] What about dual stack?
- configured_ip_address=(ipaddresses if AddressFamily.IPv4 in family else ipv6addresses).get(
- host_name
- ),
+ configured_ip_address=(
+ ipaddresses if family is socket.AddressFamily.AF_INET else ipv6addresses
+ ).get(host_name),
simulation_mode=simulation_mode,
is_snmp_usewalk_host=(
config_cache.get_snmp_backend(host_name) is SNMPBackendEnum.STORED_WALK
- and config_cache.is_snmp_host(host_name)
+ and config_cache.computed_datasources(host_name).is_snmp
),
override_dns=HostAddress(fake_dns) if fake_dns is not None else None,
is_dyndns_host=config_cache.is_dyndns_host(host_name),
@@ -1852,6 +1782,70 @@ def lookup_ip_address(
)
+def _get_ssc_ip_family(
+ ip_family: Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6]
+) -> server_side_calls_api.IPAddressFamily:
+ match ip_family:
+ case socket.AddressFamily.AF_INET:
+ return server_side_calls_api.IPAddressFamily.IPV4
+ case socket.AddressFamily.AF_INET6:
+ return server_side_calls_api.IPAddressFamily.IPV6
+ case other:
+ assert_never(other)
+
+
+def get_resource_macros() -> Mapping[str, str]:
+ macros = {}
+ try:
+ for line in (cmk.utils.paths.omd_root / "etc/nagios/resource.cfg").open():
+ line = line.strip()
+ if not line or line[0] == "#":
+ continue
+ varname, value = line.split("=", 1)
+ macros[varname] = value
+ except Exception:
+ if cmk.utils.debug.enabled():
+ raise
+ return macros
+
+
+def get_ssc_host_config(
+ host_name: HostName,
+ config_cache: ConfigCache,
+ macros: Mapping[str, object],
+ ip_address_of: IPLookup,
+) -> server_side_calls_api.HostConfig:
+ """Translates our internal config into the HostConfig exposed to and expected by server_side_calls plugins."""
+ primary_family = config_cache.default_address_family(host_name)
+ ip_stack_config = config_cache.ip_stack_config(host_name)
+ additional_addresses_ipv4, additional_addresses_ipv6 = config_cache.additional_ipaddresses(
+ host_name
+ )
+
+ return server_side_calls_api.HostConfig(
+ name=host_name,
+ alias=config_cache.alias(host_name),
+ ipv4_config=(
+ server_side_calls_api.IPv4Config(
+ address=ip_address_of(host_name, socket.AddressFamily.AF_INET),
+ additional_addresses=additional_addresses_ipv4,
+ )
+ if ip_lookup.IPStackConfig.IPv4 in ip_stack_config
+ else None
+ ),
+ ipv6_config=(
+ server_side_calls_api.IPv6Config(
+ address=ip_address_of(host_name, socket.AddressFamily.AF_INET6),
+ additional_addresses=additional_addresses_ipv6,
+ )
+ if ip_lookup.IPStackConfig.IPv6 in ip_stack_config
+ else None
+ ),
+ primary_family=_get_ssc_ip_family(primary_family),
+ macros={k: str(v) for k, v in macros.items()},
+ )
+
+
# .
# .--Configuration Cache-------------------------------------------------.
# | ____ __ _ _ _ |
@@ -1894,32 +1888,30 @@ def __init__(self) -> None:
self.__explicit_host_attributes: dict[HostName, dict[str, str]] = {}
self.__computed_datasources: dict[HostName | HostAddress, ComputedDataSources] = {}
self.__discovery_check_parameters: dict[HostName, DiscoveryCheckParameters] = {}
- self.__active_checks: dict[HostName, list[tuple[str, Sequence[Any]]]] = {}
- self.__special_agents: dict[HostName, Sequence[tuple[str, Mapping[str, object]]]] = {}
+ self.__active_checks: dict[HostName, SSCRules] = {}
+ self.__special_agents: dict[HostName, SSCRules] = {}
self.__hostgroups: dict[HostName, Sequence[str]] = {}
self.__contactgroups: dict[HostName, Sequence[ContactgroupName]] = {}
self.__explicit_check_command: dict[HostName, HostCheckCommand] = {}
self.__snmp_fetch_interval: dict[tuple[HostName, SectionName], int | None] = {}
- self.__disabled_snmp_sections: dict[HostName, frozenset[SectionName]] = {}
self.__labels: dict[HostName, Labels] = {}
self.__label_sources: dict[HostName, LabelSources] = {}
- self.__notification_plugin_parameters: dict[
- tuple[HostName, CheckPluginNameStr], Mapping[str, object]
- ] = {}
+ self.__notification_plugin_parameters: dict[tuple[HostName, str], Mapping[str, object]] = {}
+ self.__snmp_backend: dict[HostName, SNMPBackendEnum] = {}
self.initialize()
def initialize(self) -> ConfigCache:
self.invalidate_host_config()
self._check_table_cache = cache_manager.obtain_cache("check_tables")
- self._cache_section_name_of: dict[CheckPluginNameStr, str] = {}
+ self._cache_section_name_of: dict[str, str] = {}
self._host_paths: dict[HostName, str] = ConfigCache._get_host_paths(host_paths)
self._hosttags: dict[HostName, Sequence[TagID]] = {}
self._autochecks_manager = AutochecksManager()
self._clusters_of_cache: dict[HostName, list[HostName]] = {}
- self._nodes_of_cache: dict[HostName, list[HostName]] = {}
+ self._nodes_cache: dict[HostName, list[HostName]] = {}
self._effective_host_cache: dict[tuple[HostName, ServiceName, tuple | None], HostName] = {}
self._check_mk_check_interval: dict[HostName, float] = {}
@@ -1939,110 +1931,43 @@ def initialize(self) -> ConfigCache:
self._discovered_labels_of_service,
),
clusters_of=self._clusters_of_cache,
- nodes_of=self._nodes_of_cache,
- all_configured_hosts=list(
- set(
- itertools.chain(
- self.hosts_config.hosts,
- self.hosts_config.clusters,
- self.hosts_config.shadow_hosts,
- )
- )
- ),
+ nodes_of=self._nodes_cache,
+ all_configured_hosts=list(set(self.hosts_config)),
)
self.ruleset_matcher.ruleset_optimizer.set_all_processed_hosts(
- set(
+ {
hn
for hn in set(self.hosts_config.hosts).union(self.hosts_config.clusters)
if self.is_active(hn) and self.is_online(hn)
- )
+ }
)
return self
- def make_ipmi_fetcher(self, host_name: HostName, ip_address: HostAddress) -> IPMIFetcher:
- ipmi_credentials = self.management_credentials(host_name, "ipmi")
- return IPMIFetcher(
- address=ip_address,
- username=ipmi_credentials.get("username"),
- password=ipmi_credentials.get("password"),
- )
+ def fetcher_factory(self) -> FetcherFactory:
+ return FetcherFactory(self, self.ruleset_matcher)
- def make_program_commandline(self, host_name: HostName, ip_address: HostAddress | None) -> str:
- """
- raise: LookupError if no datasource is configured.
- """
- return self.translate_commandline(
- host_name,
- ip_address,
- self.ruleset_matcher.get_host_values(host_name, datasource_programs)[0],
- )
+ def parser_factory(self) -> ParserFactory:
+ return ParserFactory(self, self.ruleset_matcher)
- def make_piggyback_fetcher(
- self, host_name: HostName, ip_address: HostAddress | None
- ) -> PiggybackFetcher:
- return PiggybackFetcher(
- hostname=host_name,
- address=ip_address,
+ def summary_config(self, host_name: HostName, source_id: str) -> SummaryConfig:
+ return SummaryConfig(
+ exit_spec=self.exit_code_spec(host_name, source_id),
time_settings=self.get_piggybacked_hosts_time_settings(piggybacked_hostname=host_name),
+ is_piggyback_host=self.is_piggyback_host(host_name),
)
- def make_snmp_fetcher(
- self,
- host_name: HostName,
- ip_address: HostAddress,
- *,
- on_scan_error: OnError,
- selected_sections: SectionNameCollection,
- snmp_config: SNMPHostConfig,
- ) -> SNMPFetcher:
- return SNMPFetcher(
- sections=self._make_snmp_sections(
- host_name,
- checking_sections=self.make_checking_sections(
- host_name, selected_sections=selected_sections
- ),
- ),
- on_error=on_scan_error,
- missing_sys_description=self._missing_sys_description(host_name),
- do_status_data_inventory=self.hwsw_inventory_parameters(
- host_name
- ).status_data_inventory,
- section_store_path=make_persisted_section_dir(
- host_name, fetcher_type=FetcherType.SNMP, ident="snmp"
- ),
- snmp_config=snmp_config,
+ def make_parent_scan_config(self, host_name: HostName) -> ParentScanConfig:
+ return ParentScanConfig(
+ active=self.is_active(host_name),
+ online=self.is_online(host_name),
+ ip_stack_config=ConfigCache.ip_stack_config(host_name),
+ parents=self.parents(host_name),
)
- def make_tcp_fetcher(self, host_name: HostName, ip_address: HostAddress) -> TCPFetcher:
- return TCPFetcher(
- host_name=host_name,
- address=(ip_address, self._agent_port(host_name)),
- family=self.default_address_family(host_name),
- timeout=self._tcp_connect_timeout(host_name),
- encryption_handling=self._encryption_handling(host_name),
- pre_shared_secret=self._symmetric_agent_encryption(host_name),
- )
-
- def make_agent_parser(
- self,
- host_name: HostName,
- section_store: SectionStore[Sequence[AgentRawDataSectionElem]],
- *,
- keep_outdated: bool,
- logger: logging.Logger,
- ) -> AgentParser:
- return AgentParser(
- host_name,
- section_store,
- keep_outdated=keep_outdated,
- check_interval=self.check_mk_check_interval(host_name),
- translation=get_piggyback_translations(self.ruleset_matcher, host_name),
- encoding_fallback=fallback_agent_output_encoding,
- simulation=agent_simulator, # name mismatch
- logger=logger,
- )
+ def datasource_programs(self, host_name: HostName) -> Sequence[str]:
+ return self.ruleset_matcher.get_host_values(host_name, datasource_programs)
def _discovered_labels_of_service(
self,
@@ -2066,8 +1991,8 @@ def get_tag_to_group_map() -> Mapping[TagID, TagGroupID]:
def ip_lookup_config(self, host_name: HostName) -> ip_lookup.IPLookupConfig:
return ip_lookup.IPLookupConfig(
hostname=host_name,
- address_family=ConfigCache.address_family(host_name),
- is_snmp_host=self.is_snmp_host(host_name),
+ ip_stack_config=ConfigCache.ip_stack_config(host_name),
+ is_snmp_host=self.computed_datasources(host_name).is_snmp,
snmp_backend=self.get_snmp_backend(host_name),
default_address_family=self.default_address_family(host_name),
management_address=self.management_address(host_name),
@@ -2075,7 +2000,12 @@ def ip_lookup_config(self, host_name: HostName) -> ip_lookup.IPLookupConfig:
)
def make_snmp_config(
- self, host_name: HostName, ip_address: HostAddress, source_type: SourceType
+ self,
+ host_name: HostName,
+ ip_address: HostAddress,
+ source_type: SourceType,
+ *,
+ backend_override: SNMPBackendEnum | None,
) -> SNMPHostConfig:
with contextlib.suppress(KeyError):
return self.__snmp_config[(host_name, ip_address, source_type)]
@@ -2091,28 +2021,47 @@ def _timeout_policy(
case _:
assert_never(policy)
- return self.__snmp_config.setdefault(
+ def _snmp_version(v2_enabled: bool, credentials: SNMPCredentials) -> SNMPVersion:
+ """Guess SNMP version from credentials :-("""
+ if isinstance(credentials, tuple):
+ return SNMPVersion.V3
+ if v2_enabled:
+ return SNMPVersion.V2C
+ return SNMPVersion.V1
+
+ credentials = (
+ self._snmp_credentials(host_name)
+ if source_type is SourceType.HOST
+ else self.management_credentials(host_name, "snmp")
+ )
+
+ snmp_config = self.__snmp_config.setdefault(
(host_name, ip_address, source_type),
SNMPHostConfig(
is_ipv6_primary=self.default_address_family(host_name) is socket.AF_INET6,
hostname=host_name,
ipaddress=ip_address,
- credentials=(
- self._snmp_credentials(host_name)
- if source_type is SourceType.HOST
- else self.management_credentials(host_name, "snmp")
- ),
+ credentials=credentials,
port=self._snmp_port(host_name),
- is_bulkwalk_host=(
+ snmp_version=_snmp_version(
self.ruleset_matcher.get_host_bool_value(
host_name,
- bulkwalk_hosts
- if source_type is SourceType.HOST
- else management_bulkwalk_hosts,
- )
+ # This is the ruleset "Enable SNMPv2c",
+ # (Which enables SNMP version 2, implying the *possibility* to use bulkwalk.)
+ # Very poor naming of the variable.
+ (
+ bulkwalk_hosts
+ if source_type is SourceType.HOST
+ else management_bulkwalk_hosts
+ ),
+ ),
+ credentials,
),
- is_snmpv2or3_without_bulkwalk_host=self.ruleset_matcher.get_host_bool_value(
- host_name, snmpv2c_hosts
+ bulkwalk_enabled=not self.ruleset_matcher.get_host_bool_value(
+ host_name,
+ # This is the ruleset "Disable bulk walks".
+ # Very poor naming of the variable.
+ snmpv2c_hosts,
),
bulk_walk_size_of=self._bulk_walk_size(host_name),
timing=self._snmp_timing(host_name),
@@ -2136,6 +2085,9 @@ def _timeout_policy(
snmp_backend=self.get_snmp_backend(host_name),
),
)
+ if backend_override:
+ return dataclasses.replace(snmp_config, snmp_backend=backend_override)
+ return snmp_config
def make_checking_sections(
self, hostname: HostName, *, selected_sections: SectionNameCollection
@@ -2173,10 +2125,10 @@ def invalidate_host_config(self) -> None:
self.__contactgroups.clear()
self.__explicit_check_command.clear()
self.__snmp_fetch_interval.clear()
- self.__disabled_snmp_sections.clear()
self.__labels.clear()
self.__label_sources.clear()
self.__notification_plugin_parameters.clear()
+ self.__snmp_backend.clear()
@staticmethod
def _get_host_paths(config_host_paths: dict[HostName, str]) -> dict[HostName, str]:
@@ -2254,14 +2206,15 @@ def configured_services(self, hostname: HostName) -> Sequence[ConfiguredService]
return resolved
- def enforced_services_table(
- self, hostname: HostName
- ) -> Mapping[ServiceID, tuple[RulesetName, ConfiguredService],]:
+ def enforced_services_table(self, hostname: HostName) -> Mapping[
+ ServiceID,
+ tuple[RulesetName, ConfiguredService],
+ ]:
"""Return a table of enforced services
Note: We need to reverse the order of the enforced services.
Users assume that earlier rules have precedence over later ones.
- Important if there are two rules for a host with the same combination of plugin name
+ Important if there are two rules for a host with the same combination of plug-in name
and item.
"""
with contextlib.suppress(KeyError):
@@ -2319,20 +2272,20 @@ def get_hwsw_inventory_parameters() -> HWSWInventoryParameters:
if host_name in self.hosts_config.clusters:
return HWSWInventoryParameters.from_raw({})
- # TODO: Use dict(self.active_checks).get("cmk_inv", [])?
- rules = active_checks.get("cmk_inv")
- if rules is None:
- return HWSWInventoryParameters.from_raw({})
-
# 'get_host_values' is already cached thus we can
# use it after every check cycle.
- entries = self.ruleset_matcher.get_host_values(host_name, rules)
-
- if not entries:
+ if not (
+ entries := self.ruleset_matcher.get_host_values(
+ host_name, active_checks.get("cmk_inv") or ()
+ )
+ ):
return HWSWInventoryParameters.from_raw({}) # No matching rule -> disable
# Convert legacy rules to current dict format (just like the valuespec)
- return HWSWInventoryParameters.from_raw({} if entries[0] is None else entries[0])
+ # we can only have None or a dict here, but mypy doesn't know that
+ return HWSWInventoryParameters.from_raw(
+ entries[0] if isinstance(entries[0], dict) else {}
+ )
with contextlib.suppress(KeyError):
return self.__hwsw_inventory_parameters[host_name]
@@ -2359,14 +2312,12 @@ def management_address(self, host_name: HostName) -> HostAddress | None:
@overload
def management_credentials(
self, host_name: HostName, protocol: Literal["snmp"]
- ) -> SNMPCredentials:
- ...
+ ) -> SNMPCredentials: ...
@overload
def management_credentials(
self, host_name: HostName, protocol: Literal["ipmi"]
- ) -> IPMICredentials:
- ...
+ ) -> IPMICredentials: ...
def management_credentials(
self, host_name: HostName, protocol: Literal["snmp", "ipmi"]
@@ -2474,12 +2425,6 @@ def computed_datasources(self, host_name: HostName | HostAddress) -> ComputedDat
host_name, cmk.utils.tags.compute_datasources(ConfigCache.tags(host_name))
)
- def is_tcp_host(self, host_name: HostName) -> bool:
- return self.computed_datasources(host_name).is_tcp
-
- def is_snmp_host(self, host_name: HostName | HostAddress) -> bool:
- return self.computed_datasources(host_name).is_snmp
-
def is_piggyback_host(self, host_name: HostName) -> bool:
def get_is_piggyback_host() -> bool:
tag_groups: Final = ConfigCache.tags(host_name)
@@ -2489,10 +2434,8 @@ def get_is_piggyback_host() -> bool:
return False
# for clusters with an auto-piggyback tag check if nodes have piggyback data
- if (
- host_name in self.hosts_config.clusters
- and (nodes := self.nodes_of(host_name)) is not None
- ):
+ nodes = self.nodes(host_name)
+ if nodes and host_name in self.hosts_config.clusters:
return any(self._has_piggyback_data(node) for node in nodes)
# Legacy automatic detection
@@ -2504,13 +2447,17 @@ def get_is_piggyback_host() -> bool:
return self.__is_piggyback_host.setdefault(host_name, get_is_piggyback_host())
def is_ping_host(self, host_name: HostName) -> bool:
+ cds = self.computed_datasources(host_name)
return not (
- self.is_snmp_host(host_name)
- or self.is_tcp_host(host_name)
+ cds.is_snmp
+ or cds.is_tcp
or self.is_piggyback_host(host_name)
or self.has_management_board(host_name)
)
+ def is_tcp(self, host_name: HostName) -> bool:
+ return self.computed_datasources(host_name).is_tcp
+
def _is_only_host(self, host_name: HostName) -> bool:
if only_hosts is None:
return True
@@ -2539,12 +2486,6 @@ def is_active(self, host_name: HostName) -> bool:
def is_dyndns_host(self, host_name: HostName | HostAddress) -> bool:
return self.ruleset_matcher.get_host_bool_value(host_name, dyndns_hosts)
- def is_all_agents_host(self, host_name: HostName) -> bool:
- return self.computed_datasources(host_name).is_all_agents_host
-
- def is_all_special_agents_host(self, host_name: HostName) -> bool:
- return self.computed_datasources(host_name).is_all_special_agents_host
-
def discovery_check_parameters(self, host_name: HostName) -> DiscoveryCheckParameters:
"""Compute the parameters for the discovery check for a host"""
@@ -2554,6 +2495,8 @@ def discovery_check_parameters(self, host_name: HostName) -> DiscoveryCheckParam
severity_new_services=int(inventory_check_severity),
severity_vanished_services=0,
severity_new_host_labels=1,
+ severity_changed_service_labels=0,
+ severity_changed_service_params=0,
# TODO: defaults are currently all over the place :-(
rediscovery={},
)
@@ -2577,6 +2520,13 @@ def make_discovery_check_parameters() -> DiscoveryCheckParameters:
check_interval=int(check_interval),
severity_new_services=int(entry["severity_unmonitored"]),
severity_vanished_services=int(entry["severity_vanished"]),
+ # TODO: should be changed via Transform & update-action of the periodic discovery rule
+ severity_changed_service_labels=int(
+ entry.get("severity_changed_service_labels", 0)
+ ),
+ severity_changed_service_params=int(
+ entry.get("severity_changed_service_params", 0)
+ ),
severity_new_host_labels=int(entry.get("severity_new_host_label", 1)),
rediscovery=entry.get("inventory_rediscovery", {}),
)
@@ -2603,15 +2553,15 @@ def custom_checks(self, host_name: HostName) -> Sequence[dict[Any, Any]]:
"""Return the free form configured custom checks without formalization"""
return self.ruleset_matcher.get_host_values(host_name, custom_checks)
- def active_checks(self, host_name: HostName) -> list[tuple[str, Sequence[Any]]]:
+ def active_checks(self, host_name: HostName) -> SSCRules:
"""Returns the list of active checks configured for this host
These are configured using the active check formalization of WATO
where the whole parameter set is configured using valuespecs.
"""
- def make_active_checks() -> list[tuple[str, Sequence[Any]]]:
- configured_checks: list[tuple[str, Sequence[Any]]] = []
+ def make_active_checks() -> SSCRules:
+ configured_checks: list[tuple[str, Sequence[object]]] = []
for plugin_name, ruleset in sorted(active_checks.items(), key=lambda x: x[0]):
# Skip Check_MK HW/SW Inventory for all ping hosts, even when the
# user has enabled the inventory for ping only hosts
@@ -2648,23 +2598,26 @@ def make_output(desc: str) -> str:
check_source=make_check_source(entry["service_description"]),
check_plugin_name="custom",
ruleset_name=None,
+ discovery_ruleset_name=None,
item=entry["service_description"],
- discovered_parameters=None,
- effective_parameters=None,
+ new_discovered_parameters={},
+ old_discovered_parameters={},
+ effective_parameters={},
description=entry["service_description"],
state=None,
output=make_output(entry["service_description"]),
metrics=[],
- labels={},
+ old_labels={},
+ new_labels={},
found_on_nodes=[host_name],
)
for entry in custom_checks_
}.values()
)
- def special_agents(self, host_name: HostName) -> Sequence[tuple[str, Mapping[str, object]]]:
- def special_agents_impl() -> Sequence[tuple[str, Mapping[str, object]]]:
- matched: list[tuple[str, Mapping[str, object]]] = []
+ def special_agents(self, host_name: HostName) -> SSCRules:
+ def special_agents_impl() -> SSCRules:
+ matched: list[tuple[str, Sequence[Mapping[str, object] | LegacySSCConfigModel]]] = []
# Previous to 1.5.0 it was not defined in which order the special agent
# rules overwrite each other. When multiple special agents were configured
# for a single host a "random" one was picked (depending on the iteration
@@ -2674,7 +2627,9 @@ def special_agents_impl() -> Sequence[tuple[str, Mapping[str, object]]]:
for agentname, ruleset in sorted(special_agents.items()):
params = self.ruleset_matcher.get_host_values(host_name, ruleset)
if params:
- matched.append((agentname, params[0]))
+ # we have match type first, so pick the first.
+ # However, nest it in a list to have a consistent return type
+ matched.append((agentname, [params[0]]))
return matched
with contextlib.suppress(KeyError):
@@ -2682,6 +2637,75 @@ def special_agents_impl() -> Sequence[tuple[str, Mapping[str, object]]]:
return self.__special_agents.setdefault(host_name, special_agents_impl())
+ def special_agent_command_lines(
+ self,
+ host_name: HostName,
+ ip_address: HostAddress | None,
+ passwords: Mapping[str, str],
+ password_store_file: Path,
+ ip_address_of: IPLookup,
+ ) -> Iterable[tuple[str, SpecialAgentCommandLine]]:
+ for agentname, params_seq in self.special_agents(host_name):
+ for params in params_seq:
+ host_attrs = self.get_host_attributes(host_name, ip_address_of)
+ macros = {
+ "": ip_address or "",
+ "": host_name,
+ **self.get_host_macros_from_attributes(host_name, host_attrs),
+ }
+ special_agent = SpecialAgent(
+ load_special_agents()[1],
+ special_agent_info,
+ host_name,
+ ip_address,
+ get_ssc_host_config(host_name, self, macros, ip_address_of),
+ host_attrs,
+ http_proxies,
+ passwords,
+ password_store_file,
+ )
+ for agent_data in special_agent.iter_special_agent_commands(agentname, params):
+ yield agentname, agent_data
+
+ def collect_passwords(self) -> Mapping[str, str]:
+ # consider making the hosts an argument. Sometimes we only need one.
+ all_active_hosts = {
+ hn
+ for hn in itertools.chain(self.hosts_config.hosts, self.hosts_config.clusters)
+ if self.is_active(hn) and self.is_online(hn)
+ }
+
+ def _filter_newstyle_ssc_rule(
+ unfiltered: Sequence[Mapping[str, object] | LegacySSCConfigModel]
+ ) -> Sequence[Mapping[str, object]]:
+ return [
+ r for r in unfiltered if isinstance(r, dict) and all(isinstance(k, str) for k in r)
+ ]
+
+ def _compose_filtered_ssc_rules(
+ rules: SSCRules,
+ ) -> Sequence[tuple[str, Sequence[Mapping[str, object]]]]:
+ return [(name, _filter_newstyle_ssc_rule(unfiltered)) for name, unfiltered in rules]
+
+ def _gather_secrets_from(
+ rules_function: Callable[[HostName], SSCRules]
+ ) -> Mapping[str, str]:
+ return {
+ id_: secret
+ for host in all_active_hosts
+ for id_, secret in (
+ PreprocessingResult.from_config(
+ _compose_filtered_ssc_rules(rules_function(host))
+ )
+ ).ad_hoc_secrets.items()
+ }
+
+ return {
+ **password_store.load(password_store.password_store_path()),
+ **_gather_secrets_from(self.active_checks),
+ **_gather_secrets_from(self.special_agents),
+ }
+
def hostgroups(self, host_name: HostName) -> Sequence[str]:
"""Returns the list of hostgroups of this host
@@ -2757,11 +2781,11 @@ def host_check_command(
explicit_command = self.explicit_check_command(host_name)
if explicit_command is not None:
return explicit_command
- if ConfigCache.address_family(host_name) is AddressFamily.NO_IP:
+ if ConfigCache.ip_stack_config(host_name) is IPStackConfig.NO_IP:
return "ok"
return default_host_check_command
- def _missing_sys_description(self, host_name: HostName) -> bool:
+ def missing_sys_description(self, host_name: HostName) -> bool:
return self.ruleset_matcher.get_host_bool_value(host_name, snmp_without_sys_descr)
def snmp_fetch_interval(self, host_name: HostName, section_name: SectionName) -> int | None:
@@ -2795,45 +2819,6 @@ def snmp_fetch_interval_impl() -> int | None:
(host_name, section_name), snmp_fetch_interval_impl()
)
- def disabled_snmp_sections(self, host_name: HostName) -> frozenset[SectionName]:
- def disabled_snmp_sections_impl() -> frozenset[SectionName]:
- """Return a set of disabled snmp sections"""
- rules = self.ruleset_matcher.get_host_values(host_name, snmp_exclude_sections)
- merged_section_settings = {"if64adm": True}
- for rule in reversed(rules):
- for section in rule.get("sections_enabled", ()):
- merged_section_settings[section] = False
- for section in rule.get("sections_disabled", ()):
- merged_section_settings[section] = True
-
- return frozenset(
- SectionName(name)
- for name, is_disabled in merged_section_settings.items()
- if is_disabled
- )
-
- with contextlib.suppress(KeyError):
- return self.__disabled_snmp_sections[host_name]
-
- return self.__disabled_snmp_sections.setdefault(host_name, disabled_snmp_sections_impl())
-
- def _make_snmp_sections(
- self,
- host_name: HostName,
- *,
- checking_sections: frozenset[SectionName],
- ) -> dict[SectionName, SNMPSectionMeta]:
- disabled_sections = self.disabled_snmp_sections(host_name)
- return {
- name: SNMPSectionMeta(
- checking=name in checking_sections,
- disabled=name in disabled_sections,
- redetect=name in checking_sections and agent_based_register.needs_redetection(name),
- fetch_interval=self.snmp_fetch_interval(host_name, name),
- )
- for name in (checking_sections | disabled_sections)
- }
-
def _collect_hosttags(self, tag_to_group_map: Mapping[TagID, TagGroupID]) -> None:
"""Calculate the effective tags for all configured hosts
@@ -2934,7 +2919,9 @@ def checkmk_check_parameters(self, host_name: HostName) -> CheckmkCheckParameter
return CheckmkCheckParameters(enabled=not self.is_ping_host(host_name))
def notification_plugin_parameters(
- self, host_name: HostName, plugin_name: CheckPluginNameStr
+ self,
+ host_name: HostName,
+ plugin_name: str,
) -> Mapping[str, object]:
def _impl() -> Mapping[str, object]:
default: Sequence[RuleSpec[Mapping[str, object]]] = []
@@ -2965,9 +2952,11 @@ def label_sources(self, host_name: HostName) -> LabelSources:
def max_cachefile_age(self, hostname: HostName) -> MaxAge:
check_interval = self.check_mk_check_interval(hostname)
return MaxAge(
- checking=check_max_cachefile_age
- if self.nodes_of(hostname) is None
- else cluster_max_cachefile_age,
+ checking=(
+ cluster_max_cachefile_age
+ if hostname in self.hosts_config.clusters
+ else check_max_cachefile_age
+ ),
discovery=1.5 * check_interval,
inventory=1.5 * check_interval,
)
@@ -3019,6 +3008,18 @@ def service_level(self, hostname: HostName) -> int | None:
entries = self.ruleset_matcher.get_host_values(hostname, host_service_levels)
return entries[0] if entries else None
+ def effective_service_level(
+ self,
+ host: HostName,
+ service_name: ServiceName,
+ ) -> int:
+ """Get the service level that applies to the current service."""
+ service_level = self.service_level_of_service(host, service_name)
+ if service_level is not None:
+ return service_level
+
+ return self.service_level(host) or 0
+
def _snmp_credentials(self, host_name: HostName | HostAddress) -> SNMPCredentials:
"""Determine SNMP credentials for a specific host
@@ -3049,6 +3050,14 @@ def _is_inline_backend_supported() -> bool:
return "netsnmp" in sys.modules and cmk_version.edition() is not cmk_version.Edition.CRE
def get_snmp_backend(self, host_name: HostName | HostAddress) -> SNMPBackendEnum:
+ if result := self.__snmp_backend.get(host_name):
+ return result
+
+ computed_backend = self._get_snmp_backend(host_name)
+ self.__snmp_backend[host_name] = computed_backend
+ return computed_backend
+
+ def _get_snmp_backend(self, host_name: HostName | HostAddress) -> SNMPBackendEnum:
if self.ruleset_matcher.get_host_bool_value(host_name, usewalk_hosts):
return SNMPBackendEnum.STORED_WALK
@@ -3065,14 +3074,12 @@ def get_snmp_backend(self, host_name: HostName | HostAddress) -> SNMPBackendEnum
return SNMPBackendEnum.CLASSIC
raise MKGeneralException(f"Bad Host SNMP Backend configuration: {host_backend}")
- # TODO(sk): remove this when netsnmp is fixed
- # NOTE: Force usage of CLASSIC with SNMP-v1 to prevent memory leak in the netsnmp
- if self._is_host_snmp_v1(host_name):
- return SNMPBackendEnum.CLASSIC
-
if with_inline_snmp and snmp_backend_default == "inline":
return SNMPBackendEnum.INLINE
-
+ if snmp_backend_default == "classic":
+ return SNMPBackendEnum.CLASSIC
+ # Note: in the above case we raise here.
+ # I am not sure if this different behavior is intentional.
return SNMPBackendEnum.CLASSIC
def snmp_credentials_of_version(
@@ -3125,19 +3132,19 @@ def check_mk_check_interval(self, hostname: HostName) -> float:
return self._check_mk_check_interval[hostname]
@staticmethod
- def address_family(host_name: HostName | HostAddress) -> AddressFamily:
+ def ip_stack_config(host_name: HostName | HostAddress) -> IPStackConfig:
# TODO(ml): [IPv6] clarify tag_groups vs tag_groups["address_family"]
tag_groups = ConfigCache.tags(host_name)
if (
TagGroupID("no-ip") in tag_groups
or TagID("no-ip") == tag_groups[TagGroupID("address_family")]
):
- return AddressFamily.NO_IP
+ return IPStackConfig.NO_IP
if (
TagGroupID("ip-v4v6") in tag_groups
or TagID("ip-v4v6") == tag_groups[TagGroupID("address_family")]
):
- return AddressFamily.DUAL_STACK
+ return IPStackConfig.DUAL_STACK
if (
TagGroupID("ip-v6") in tag_groups
or TagID("ip-v6") == tag_groups[TagGroupID("address_family")]
@@ -3145,28 +3152,34 @@ def address_family(host_name: HostName | HostAddress) -> AddressFamily:
TagGroupID("ip-v4") in tag_groups
or TagID("ip-v4") == tag_groups[TagGroupID("address_family")]
):
- return AddressFamily.DUAL_STACK
+ return IPStackConfig.DUAL_STACK
if (
TagGroupID("ip-v6") in tag_groups
or TagGroupID("ip-v6-only") in tag_groups
or tag_groups[TagGroupID("address_family")] in {TagID("ip-v6"), TagID("ip-v6-only")}
):
- return AddressFamily.IPv6
- return AddressFamily.IPv4
+ return IPStackConfig.IPv6
+ return IPStackConfig.IPv4
- def default_address_family(self, hostname: HostName | HostAddress) -> socket.AddressFamily:
+ def default_address_family(
+ self, hostname: HostName | HostAddress
+ ) -> Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6]:
def primary_ip_address_family_of() -> socket.AddressFamily:
rules = self.ruleset_matcher.get_host_values(hostname, primary_address_family)
- return socket.AF_INET6 if rules and rules[0] == "ipv6" else socket.AF_INET
+ return (
+ socket.AddressFamily.AF_INET6
+ if rules and rules[0] == "ipv6"
+ else socket.AddressFamily.AF_INET
+ )
def is_ipv6_primary() -> bool:
# Whether or not the given host is configured to be monitored primarily via IPv6
- return ConfigCache.address_family(hostname) is AddressFamily.IPv6 or (
- ConfigCache.address_family(hostname) is AddressFamily.DUAL_STACK
+ return ConfigCache.ip_stack_config(hostname) is IPStackConfig.IPv6 or (
+ ConfigCache.ip_stack_config(hostname) is IPStackConfig.DUAL_STACK
and primary_ip_address_family_of() is socket.AF_INET6
)
- return socket.AF_INET6 if is_ipv6_primary() else socket.AF_INET
+ return socket.AddressFamily.AF_INET6 if is_ipv6_primary() else socket.AddressFamily.AF_INET
def _has_piggyback_data(self, host_name: HostName) -> bool:
time_settings: list[tuple[str | None, str, int]] = self._piggybacked_host_files(host_name)
@@ -3176,7 +3189,10 @@ def _has_piggyback_data(self, host_name: HostName) -> bool:
return True
return make_persisted_section_dir(
- fetcher_type=FetcherType.PIGGYBACK, host_name=host_name, ident="piggyback"
+ fetcher_type=FetcherType.PIGGYBACK,
+ host_name=host_name,
+ ident="piggyback",
+ section_cache_path=Path(cmk.utils.paths.var_dir),
).exists()
def _piggybacked_host_files(self, host_name: HostName) -> list[tuple[str | None, str, int]]:
@@ -3283,7 +3299,7 @@ def icons_and_actions_of_service(
hostname: HostName,
description: ServiceName,
check_plugin_name: CheckPluginName | None,
- params: LegacyCheckParameters | TimespecificParameters,
+ params: TimespecificParameters | None,
) -> list[str]:
actions = set(
self.ruleset_matcher.service_extra_conf(
@@ -3291,6 +3307,12 @@ def icons_and_actions_of_service(
)
)
+ # Note: according to the typing the rest of this function will
+ # never do anything, meaning the 'icon' parameters of the 'ps'
+ # and 'services' rulesets do not do anything.
+ # It seems like this last worked in 2.0.0.
+ # CMK-16562
+
# Some WATO rules might register icons on their own
if check_plugin_name:
plugin = agent_based_register.get_check_plugin(check_plugin_name)
@@ -3386,7 +3408,7 @@ def get_autochecks_of(self, hostname: HostName) -> Sequence[ConfiguredService]:
self.effective_host,
)
- def section_name_of(self, section: CheckPluginNameStr) -> str:
+ def section_name_of(self, section: str) -> str:
try:
return self._cache_section_name_of[section]
except KeyError:
@@ -3401,7 +3423,11 @@ def _get_tag_attributes(
) -> ObjectAttributes:
return {f"__{prefix}_{k}": str(v) for k, v in collection.items()}
- def get_host_attributes(self, hostname: HostName) -> ObjectAttributes:
+ def get_host_attributes(
+ self,
+ hostname: HostName,
+ ip_address_of: IPLookup,
+ ) -> ObjectAttributes:
def _set_addresses(
attrs: ObjectAttributes,
addresses: list[HostAddress] | None,
@@ -3430,20 +3456,20 @@ def _set_addresses(
if "alias" not in attrs:
attrs["alias"] = self.alias(hostname)
- family = ConfigCache.address_family(hostname)
+ ip_stack_config = ConfigCache.ip_stack_config(hostname)
# Now lookup configured IP addresses
v4address: str | None = None
- if AddressFamily.IPv4 in family:
- v4address = ip_address_of(self, hostname, socket.AF_INET)
+ if IPStackConfig.IPv4 in ip_stack_config:
+ v4address = ip_address_of(hostname, socket.AddressFamily.AF_INET)
if v4address is None:
v4address = ""
attrs["_ADDRESS_4"] = v4address
v6address: str | None = None
- if AddressFamily.IPv6 in family:
- v6address = ip_address_of(self, hostname, socket.AF_INET6)
+ if IPStackConfig.IPv6 in ip_stack_config:
+ v6address = ip_address_of(hostname, socket.AddressFamily.AF_INET6)
if v6address is None:
v6address = ""
attrs["_ADDRESS_6"] = v6address
@@ -3472,7 +3498,7 @@ def _set_addresses(
if cmk_version.edition() is cmk_version.Edition.CME:
attrs[
"_CUSTOMER"
- ] = current_customer # type: ignore[name-defined] # pylint: disable=undefined-variable
+ ] = current_customer # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
return attrs
@@ -3480,27 +3506,31 @@ def get_cluster_attributes(
self,
hostname: HostName,
nodes: Sequence[HostName],
+ ip_address_of: IPLookup,
) -> dict:
sorted_nodes = sorted(nodes)
attrs = {
"_NODENAMES": " ".join(sorted_nodes),
}
+ ip_stack_config = ConfigCache.ip_stack_config(hostname)
node_ips_4 = []
- if AddressFamily.IPv4 in ConfigCache.address_family(hostname):
- family = socket.AF_INET
+ if IPStackConfig.IPv4 in ip_stack_config:
+ family: Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6] = (
+ socket.AddressFamily.AF_INET
+ )
for h in sorted_nodes:
- addr = ip_address_of(self, h, family)
+ addr = ip_address_of(h, family)
if addr is not None:
node_ips_4.append(addr)
else:
node_ips_4.append(ip_lookup.fallback_ip_for(family))
node_ips_6 = []
- if AddressFamily.IPv6 in ConfigCache.address_family(hostname):
- family = socket.AF_INET6
+ if IPStackConfig.IPv6 in ip_stack_config:
+ family = socket.AddressFamily.AF_INET6
for h in sorted_nodes:
- addr = ip_address_of(self, h, family)
+ addr = ip_address_of(h, family)
if addr is not None:
node_ips_6.append(addr)
else:
@@ -3516,9 +3546,9 @@ def get_cluster_attributes(
return attrs
def get_cluster_nodes_for_config(self, host_name: HostName) -> Sequence[HostName]:
- nodes = self.nodes_of(host_name)
- if nodes is None:
- return []
+ nodes = self.nodes(host_name)
+ if not nodes:
+ return ()
self._verify_cluster_address_family(host_name, nodes)
self._verify_cluster_datasource(host_name, nodes)
@@ -3636,9 +3666,10 @@ def translate_commandline(
host_name: HostName,
ip_address: HostAddress | None,
template: str,
+ ip_address_of: IPLookup,
) -> str:
def _translate_host_macros(cmd: str) -> str:
- attrs = self.get_host_attributes(host_name)
+ attrs = self.get_host_attributes(host_name, ip_address_of)
if host_name in self.hosts_config.clusters:
# TODO(ml): What is the difference between this and `self.parents()`?
parents_list = self.get_cluster_nodes_for_config(host_name)
@@ -3647,6 +3678,7 @@ def _translate_host_macros(cmd: str) -> str:
self.get_cluster_attributes(
host_name,
parents_list,
+ ip_address_of,
)
)
@@ -3689,25 +3721,24 @@ def _setup_clusters_nodes_cache(self) -> None:
clustername = HostName(cluster.split("|", 1)[0])
for name in hosts:
self._clusters_of_cache.setdefault(name, []).append(clustername)
- self._nodes_of_cache[clustername] = hosts
+ self._nodes_cache[clustername] = hosts
def get_cluster_cache_info(self) -> ClusterCacheInfo:
- return ClusterCacheInfo(self._clusters_of_cache, self._nodes_of_cache)
+ return ClusterCacheInfo(clusters_of=self._clusters_of_cache, nodes_of=self._nodes_cache)
- def clusters_of(self, hostname: HostName) -> list[HostName]:
+ def clusters_of(self, hostname: HostName) -> Sequence[HostName]:
"""Returns names of cluster hosts the host is a node of"""
- return self._clusters_of_cache.get(hostname, [])
+ return self._clusters_of_cache.get(hostname, ())
- # TODO: cleanup None case
- def nodes_of(self, hostname: HostName) -> Sequence[HostName] | None:
- """Returns the nodes of a cluster. Returns None if no match."""
- return self._nodes_of_cache.get(hostname)
+ def nodes(self, hostname: HostName) -> Sequence[HostName]:
+ """Returns the nodes of a cluster. Returns () if no match."""
+ return self._nodes_cache.get(hostname, ())
def effective_host(
self,
node_name: HostName,
servicedesc: str,
- part_of_clusters: list[HostName] | None = None,
+ part_of_clusters: Sequence[HostName] = (),
) -> HostName:
"""Compute the effective host (node or cluster) of a service
@@ -3732,7 +3763,7 @@ def _effective_host(
self,
node_name: HostName,
servicedesc: str,
- part_of_clusters: list[HostName] | None = None,
+ part_of_clusters: Sequence[HostName],
) -> HostName:
if part_of_clusters:
the_clusters = part_of_clusters
@@ -3752,12 +3783,11 @@ def _effective_host(
# 1. New style: explicitly assigned services
for cluster, conf in clustered_services_of.items():
- nodes = self.nodes_of(cluster)
- if not nodes:
+ if cluster not in self.hosts_config.clusters:
raise MKGeneralException(
f"Invalid entry clustered_services_of['{cluster}']: {cluster} is not a cluster."
)
- if node_name in nodes and self.ruleset_matcher.get_service_bool_value(
+ if node_name in self.nodes(cluster) and self.ruleset_matcher.get_service_bool_value(
node_name, servicedesc, conf
):
return cluster
@@ -3821,33 +3851,6 @@ def service_discovery_name() -> ServiceName:
return "Check_MK Discovery"
return "Check_MK inventory"
- def _agent_port(self, host_name: HostName) -> int:
- ports = self.ruleset_matcher.get_host_values(host_name, agent_ports)
- return ports[0] if ports else agent_port
-
- def _tcp_connect_timeout(self, host_name: HostName) -> float:
- timeouts = self.ruleset_matcher.get_host_values(host_name, tcp_connect_timeouts)
- return timeouts[0] if timeouts else tcp_connect_timeout
-
- def _encryption_handling(self, host_name: HostName) -> TCPEncryptionHandling:
- if not (settings := self.ruleset_matcher.get_host_values(host_name, encryption_handling)):
- return TCPEncryptionHandling.ANY_AND_PLAIN
- match settings[0]["accept"]:
- case "tls_encrypted_only":
- return TCPEncryptionHandling.TLS_ENCRYPTED_ONLY
- case "any_encrypted":
- return TCPEncryptionHandling.ANY_ENCRYPTED
- case "any_and_plain":
- return TCPEncryptionHandling.ANY_AND_PLAIN
- raise ValueError("Unknown setting: %r" % settings[0])
-
- def _symmetric_agent_encryption(self, host_name: HostName) -> str | None:
- return (
- settings[0]
- if (settings := self.ruleset_matcher.get_host_values(host_name, agent_encryption))
- else None
- )
-
def agent_exclude_sections(self, host_name: HostName) -> dict[str, str]:
settings = self.ruleset_matcher.get_host_values(host_name, agent_exclude_sections)
return settings[0] if settings else {}
@@ -3948,7 +3951,7 @@ class _Matchtype(Enum):
}
-def _boil_down_agent_rules(
+def boil_down_agent_rules(
*, defaults: Mapping[str, Any], rulesets: Mapping[str, Any]
) -> Mapping[str, Any]:
boiled_down = {**defaults}
@@ -3959,7 +3962,16 @@ def _boil_down_agent_rules(
if not entries:
continue
- match_type = _BAKERY_PLUGINS_WITH_SPECIAL_MATCHTYPES.get(varname, _Matchtype.FIRST)
+ if (
+ len(entries) > 0
+ and isinstance(first_entry := entries[0], dict)
+ and (cmk_match_type := first_entry.get("cmk-match-type", None)) is not None
+ ):
+ # new Ruleset API will use merge as default match_type
+ match_type = _Matchtype(cmk_match_type)
+ else:
+ match_type = _BAKERY_PLUGINS_WITH_SPECIAL_MATCHTYPES.get(varname, _Matchtype.FIRST)
+
if match_type is _Matchtype.FIRST:
boiled_down[varname] = entries[0]
elif match_type is _Matchtype.LIST:
@@ -3969,7 +3981,12 @@ def _boil_down_agent_rules(
# Compare #14868
boiled_down[varname] = {
**defaults.get(varname, {}),
- **{k: v for entry in entries[::-1] for k, v in entry.items()},
+ **{
+ k: v
+ for entry in entries[::-1]
+ for k, v in entry.items()
+ if k != "cmk-match-type"
+ },
}
elif match_type is _Matchtype.ALL:
boiled_down[varname] = entries
@@ -3979,9 +3996,233 @@ def _boil_down_agent_rules(
return boiled_down
+class ParserFactory:
+ # TODO: better and clearer separation between ConfigCache and this class.
+ def __init__(self, config_cache: ConfigCache, ruleset_matcher_: RulesetMatcher) -> None:
+ self._config_cache: Final = config_cache
+ self._ruleset_matcher: Final = ruleset_matcher_
+
+ def make_agent_parser(
+ self,
+ host_name: HostName,
+ section_store: SectionStore[Sequence[AgentRawDataSectionElem]],
+ *,
+ keep_outdated: bool,
+ logger: logging.Logger,
+ ) -> AgentParser:
+ return AgentParser(
+ host_name,
+ section_store,
+ keep_outdated=keep_outdated,
+ check_interval=self._config_cache.check_mk_check_interval(host_name),
+ translation=get_piggyback_translations(self._ruleset_matcher, host_name),
+ encoding_fallback=fallback_agent_output_encoding,
+ logger=logger,
+ )
+
+ def make_snmp_parser(
+ self,
+ host_name: HostName,
+ section_store: SectionStore[SNMPRawDataElem],
+ *,
+ keep_outdated: bool,
+ logger: logging.Logger,
+ checking_sections: Iterable[SectionName],
+ ) -> SNMPParser:
+ return SNMPParser(
+ host_name,
+ section_store,
+ check_intervals={
+ section_name: self._config_cache.snmp_fetch_interval(host_name, section_name)
+ for section_name in checking_sections
+ },
+ keep_outdated=keep_outdated,
+ logger=logger,
+ )
+
+
+class FetcherFactory:
+ # TODO: better and clearer separation between ConfigCache and this class.
+ def __init__(self, config_cache: ConfigCache, ruleset_matcher_: RulesetMatcher) -> None:
+ self._config_cache: Final = config_cache
+ self._ruleset_matcher: Final = ruleset_matcher_
+ self.__disabled_snmp_sections: dict[HostName, frozenset[SectionName]] = {}
+
+ def clear(self) -> None:
+ self.__disabled_snmp_sections.clear()
+
+ def _disabled_snmp_sections(self, host_name: HostName) -> frozenset[SectionName]:
+ def disabled_snmp_sections_impl() -> frozenset[SectionName]:
+ """Return a set of disabled snmp sections"""
+ rules = self._ruleset_matcher.get_host_values(host_name, snmp_exclude_sections)
+ merged_section_settings = {"if64adm": True}
+ for rule in reversed(rules):
+ for section in rule.get("sections_enabled", ()):
+ merged_section_settings[section] = False
+ for section in rule.get("sections_disabled", ()):
+ merged_section_settings[section] = True
+
+ return frozenset(
+ SectionName(name)
+ for name, is_disabled in merged_section_settings.items()
+ if is_disabled
+ )
+
+ with contextlib.suppress(KeyError):
+ return self.__disabled_snmp_sections[host_name]
+
+ return self.__disabled_snmp_sections.setdefault(host_name, disabled_snmp_sections_impl())
+
+ def _make_snmp_sections(
+ self,
+ host_name: HostName,
+ *,
+ checking_sections: frozenset[SectionName],
+ ) -> dict[SectionName, SNMPSectionMeta]:
+ disabled_sections = self._disabled_snmp_sections(host_name)
+ return {
+ name: SNMPSectionMeta(
+ checking=name in checking_sections,
+ disabled=name in disabled_sections,
+ redetect=name in checking_sections and agent_based_register.needs_redetection(name),
+ fetch_interval=self._config_cache.snmp_fetch_interval(host_name, name),
+ )
+ for name in (checking_sections | disabled_sections)
+ }
+
+ def make_snmp_fetcher(
+ self,
+ host_name: HostName,
+ ip_address: HostAddress,
+ *,
+ source_type: SourceType,
+ fetcher_config: SNMPFetcherConfig,
+ ) -> SNMPFetcher:
+ snmp_config = self._config_cache.make_snmp_config(
+ host_name,
+ ip_address,
+ source_type,
+ backend_override=fetcher_config.backend_override,
+ )
+ return SNMPFetcher(
+ sections=self._make_snmp_sections(
+ host_name,
+ checking_sections=self._config_cache.make_checking_sections(
+ host_name, selected_sections=fetcher_config.selected_sections
+ ),
+ ),
+ scan_config=fetcher_config.scan_config,
+ do_status_data_inventory=self._config_cache.hwsw_inventory_parameters(
+ host_name
+ ).status_data_inventory,
+ section_store_path=make_persisted_section_dir(
+ host_name,
+ fetcher_type=FetcherType.SNMP,
+ ident="snmp",
+ section_cache_path=Path(cmk.utils.paths.var_dir),
+ ),
+ snmp_config=snmp_config,
+ stored_walk_path=fetcher_config.stored_walk_path,
+ walk_cache_path=fetcher_config.walk_cache_path,
+ )
+
+ def _agent_port(self, host_name: HostName) -> int:
+ ports = self._ruleset_matcher.get_host_values(host_name, agent_ports)
+ return ports[0] if ports else agent_port
+
+ def _tcp_connect_timeout(self, host_name: HostName) -> float:
+ timeouts = self._ruleset_matcher.get_host_values(host_name, tcp_connect_timeouts)
+ return timeouts[0] if timeouts else tcp_connect_timeout
+
+ def _encryption_handling(self, host_name: HostName) -> TCPEncryptionHandling:
+ if not (settings := self._ruleset_matcher.get_host_values(host_name, encryption_handling)):
+ return TCPEncryptionHandling.ANY_AND_PLAIN
+ match settings[0]["accept"]:
+ case "tls_encrypted_only":
+ return TCPEncryptionHandling.TLS_ENCRYPTED_ONLY
+ case "any_encrypted":
+ return TCPEncryptionHandling.ANY_ENCRYPTED
+ case "any_and_plain":
+ return TCPEncryptionHandling.ANY_AND_PLAIN
+ raise ValueError("Unknown setting: %r" % settings[0])
+
+ def _symmetric_agent_encryption(self, host_name: HostName) -> str | None:
+ return (
+ settings[0]
+ if (settings := self._ruleset_matcher.get_host_values(host_name, agent_encryption))
+ else None
+ )
+
+ def make_tcp_fetcher(
+ self,
+ host_name: HostName,
+ ip_address: HostAddress,
+ *,
+ tls_config: TLSConfig,
+ ) -> TCPFetcher:
+ return TCPFetcher(
+ host_name=host_name,
+ address=(ip_address, self._agent_port(host_name)),
+ family=self._config_cache.default_address_family(host_name),
+ timeout=self._tcp_connect_timeout(host_name),
+ encryption_handling=self._encryption_handling(host_name),
+ pre_shared_secret=self._symmetric_agent_encryption(host_name),
+ tls_config=tls_config,
+ )
+
+ def make_ipmi_fetcher(self, host_name: HostName, ip_address: HostAddress) -> IPMIFetcher:
+ ipmi_credentials = self._config_cache.management_credentials(host_name, "ipmi")
+ return IPMIFetcher(
+ address=ip_address,
+ username=ipmi_credentials.get("username"),
+ password=ipmi_credentials.get("password"),
+ )
+
+ def _make_program_commandline(
+ self,
+ host_name: HostName,
+ ip_address: HostAddress | None,
+ ip_address_of: IPLookup,
+ program: str,
+ ) -> str:
+ return self._config_cache.translate_commandline(
+ host_name, ip_address, program, ip_address_of
+ )
+
+ def make_program_fetcher(
+ self,
+ host_name: HostName,
+ ip_address: HostAddress | None,
+ *,
+ program: str,
+ stdin: str | None,
+ ) -> ProgramFetcher:
+ cmdline = self._make_program_commandline(
+ host_name,
+ ip_address,
+ ConfiguredIPLookup(self._config_cache, error_handler=handle_ip_lookup_failure),
+ program,
+ )
+ return ProgramFetcher(cmdline=cmdline, stdin=stdin, is_cmc=is_cmc())
+
+ def make_special_agent_fetcher(self, *, cmdline: str, stdin: str | None) -> ProgramFetcher:
+ return ProgramFetcher(cmdline=cmdline, stdin=stdin, is_cmc=is_cmc())
+
+ def make_piggyback_fetcher(
+ self, host_name: HostName, ip_address: HostAddress | None
+ ) -> PiggybackFetcher:
+ return PiggybackFetcher(
+ hostname=host_name,
+ address=ip_address,
+ time_settings=self._config_cache.get_piggybacked_hosts_time_settings(
+ piggybacked_hostname=host_name
+ ),
+ )
+
+
class CEEConfigCache(ConfigCache):
def __init__(self) -> None:
- self.__rrd_config: dict[HostName, RRDConfig | None] = {}
+ self.__rrd_config: dict[HostName, RRDObjectConfig | None] = {}
self.__recuring_downtimes: dict[HostName, Sequence[RecurringDowntime]] = {}
self.__flap_settings: dict[HostName, tuple[float, float, float]] = {}
self.__log_long_output: dict[HostName, bool] = {}
@@ -4004,8 +4245,11 @@ def invalidate_host_config(self) -> None:
self.__rtc_secret.clear()
self.__agent_config.clear()
- def rrd_config(self, host_name: HostName) -> RRDConfig | None:
- def _rrd_config() -> RRDConfig | None:
+ def cmc_log_rrdcreation(self) -> Literal["terse", "full"] | None:
+ return cmc_log_rrdcreation
+
+ def rrd_config(self, host_name: HostName) -> RRDObjectConfig | None:
+ def _rrd_config() -> RRDObjectConfig | None:
entries = self.ruleset_matcher.get_host_values(host_name, cmc_host_rrd_config)
return entries[0] if entries else None
@@ -4018,7 +4262,7 @@ def recurring_downtimes(self, host_name: HostName) -> Sequence[RecurringDowntime
def _impl() -> Sequence[RecurringDowntime]:
return self.ruleset_matcher.get_host_values(
host_name,
- host_recurring_downtimes, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ host_recurring_downtimes, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
with contextlib.suppress(KeyError):
@@ -4030,10 +4274,10 @@ def flap_settings(self, host_name: HostName) -> tuple[float, float, float]:
def _impl() -> tuple[float, float, float]:
values = self.ruleset_matcher.get_host_values(
host_name,
- cmc_host_flap_settings, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_host_flap_settings, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
return (
- values[0] if values else cmc_flap_settings # type: ignore[name-defined] # pylint: disable=undefined-variable
+ values[0] if values else cmc_flap_settings # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
with contextlib.suppress(KeyError):
@@ -4045,7 +4289,7 @@ def log_long_output(self, host_name: HostName) -> bool:
def _impl() -> bool:
entries = self.ruleset_matcher.get_host_values(
host_name,
- cmc_host_long_output_in_monitoring_history, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_host_long_output_in_monitoring_history, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
return entries[0] if entries else False
@@ -4058,7 +4302,7 @@ def state_translation(self, host_name: HostName) -> dict:
def _impl() -> dict:
entries = self.ruleset_matcher.get_host_values(
host_name,
- host_state_translation, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ host_state_translation, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
spec: dict[object, object] = {}
@@ -4076,7 +4320,7 @@ def _impl() -> dict:
settings = {"timeout": 2.5}
settings |= self.ruleset_matcher.get_host_merged_dict(
host_name,
- cmc_smartping_settings, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_smartping_settings, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
return settings
@@ -4119,10 +4363,14 @@ def _impl() -> str | None:
return self.__rtc_secret.setdefault(host_name, _impl())
+ @staticmethod
+ def cmc_real_time_checks() -> object:
+ return cmc_real_time_checks # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
+
def agent_config(self, host_name: HostName, default: Mapping[str, Any]) -> Mapping[str, Any]:
def _impl() -> Mapping[str, Any]:
return {
- **_boil_down_agent_rules(
+ **boil_down_agent_rules(
defaults=default,
rulesets=self.matched_agent_config_entries(host_name),
),
@@ -4136,7 +4384,7 @@ def _impl() -> Mapping[str, Any]:
def rrd_config_of_service(
self, hostname: HostName, description: ServiceName
- ) -> RRDConfig | None:
+ ) -> RRDObjectConfig | None:
out = self.ruleset_matcher.service_extra_conf(hostname, description, cmc_service_rrd_config)
return out[0] if out else None
@@ -4146,7 +4394,7 @@ def recurring_downtimes_of_service(
return self.ruleset_matcher.service_extra_conf(
hostname,
description,
- service_recurring_downtimes, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ service_recurring_downtimes, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
def flap_settings_of_service(
@@ -4155,15 +4403,15 @@ def flap_settings_of_service(
out = self.ruleset_matcher.service_extra_conf(
hostname,
description,
- cmc_service_flap_settings, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_service_flap_settings, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
- return out[0] if out else cmc_flap_settings # type: ignore[name-defined] # pylint: disable=undefined-variable
+ return out[0] if out else cmc_flap_settings # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
def log_long_output_of_service(self, hostname: HostName, description: ServiceName) -> bool:
out = self.ruleset_matcher.service_extra_conf(
hostname,
description,
- cmc_service_long_output_in_monitoring_history, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_service_long_output_in_monitoring_history, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
return out[0] if out else False
@@ -4171,7 +4419,7 @@ def state_translation_of_service(self, hostname: HostName, description: ServiceN
entries = self.ruleset_matcher.service_extra_conf(
hostname,
description,
- service_state_translation, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ service_state_translation, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
spec: dict = {}
@@ -4184,9 +4432,9 @@ def check_timeout_of_service(self, hostname: HostName, description: ServiceName)
out = self.ruleset_matcher.service_extra_conf(
hostname,
description,
- cmc_service_check_timeout, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_service_check_timeout, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
- return out[0] if out else cmc_check_timeout # type: ignore[name-defined] # pylint: disable=undefined-variable
+ return out[0] if out else cmc_check_timeout # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
def graphite_metrics_of(
self,
@@ -4200,7 +4448,7 @@ def graphite_metrics_of(
iter(
self.ruleset_matcher.get_host_values(
hostname,
- cmc_graphite_host_metrics, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_graphite_host_metrics, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
),
default,
@@ -4209,7 +4457,7 @@ def graphite_metrics_of(
out = self.ruleset_matcher.service_extra_conf(
hostname,
description,
- cmc_graphite_service_metrics, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_graphite_service_metrics, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
return out[0] if out else default
@@ -4226,7 +4474,7 @@ def influxdb_metrics_of_service(
out = self.ruleset_matcher.service_extra_conf(
hostname,
description,
- cmc_influxdb_service_metrics, # type: ignore[name-defined] # pylint: disable=undefined-variable
+ cmc_influxdb_service_metrics, # type: ignore[name-defined,unused-ignore] # pylint: disable=undefined-variable
)
return out[0] if out else default
@@ -4243,7 +4491,7 @@ def generic_agent_config_entries(
yield from (
(
match_path,
- _boil_down_agent_rules(
+ boil_down_agent_rules(
defaults=defaults,
rulesets={
varname: CEEConfigCache._get_values_for_generic_agent(ruleset, match_path)
diff --git a/cmk/base/core.py b/cmk/base/core.py
index 05c01b306a4..69c6444d120 100644
--- a/cmk/base/core.py
+++ b/cmk/base/core.py
@@ -22,7 +22,7 @@
import cmk.base.core_config as core_config
import cmk.base.nagios_utils
import cmk.base.obsolete_output as out
-from cmk.base.config import ConfigCache
+from cmk.base.config import ConfigCache, IPLookup
from cmk.base.core_config import MonitoringCore
# suppress "Cannot find module" error from mypy
@@ -52,6 +52,7 @@ class CoreAction(enum.Enum):
def do_reload(
config_cache: ConfigCache,
+ ip_address_of: IPLookup,
core: MonitoringCore,
*,
all_hosts: Iterable[HostName],
@@ -61,6 +62,7 @@ def do_reload(
) -> None:
do_restart(
config_cache,
+ ip_address_of,
core,
action=CoreAction.RELOAD,
all_hosts=all_hosts,
@@ -72,6 +74,7 @@ def do_reload(
def do_restart(
config_cache: ConfigCache,
+ ip_address_of: IPLookup,
core: MonitoringCore,
*,
all_hosts: Iterable[HostName],
@@ -86,6 +89,7 @@ def do_restart(
core_config.do_create_config(
core=core,
config_cache=config_cache,
+ ip_address_of=ip_address_of,
all_hosts=all_hosts,
hosts_to_update=hosts_to_update,
duplicates=duplicates,
diff --git a/cmk/base/core_config.py b/cmk/base/core_config.py
index ccda951ef89..bb51da22dd3 100644
--- a/cmk/base/core_config.py
+++ b/cmk/base/core_config.py
@@ -3,8 +3,9 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
+# pylint: disable=protected-access
+
import abc
-import dataclasses
import os
import shutil
import socket
@@ -22,11 +23,10 @@
from cmk.utils.exceptions import MKGeneralException
from cmk.utils.hostaddress import HostAddress, HostName
from cmk.utils.labels import Labels
-from cmk.utils.licensing.handler import LicenseState, LicensingHandler
-from cmk.utils.licensing.helper import get_licensed_state_file_path, write_licensed_state
-from cmk.utils.paths import core_helper_config_dir
+from cmk.utils.licensing.handler import LicensingHandler
+from cmk.utils.licensing.helper import get_licensed_state_file_path
from cmk.utils.servicename import Item, ServiceName
-from cmk.utils.store import load_object_from_file, lock_checkmk_configuration, save_object_to_file
+from cmk.utils.store import lock_checkmk_configuration
from cmk.checkengine.checking import CheckPluginName, ConfiguredService, ServiceID
from cmk.checkengine.parameters import TimespecificParameters
@@ -41,12 +41,6 @@
CoreCommand = str
-@dataclasses.dataclass(frozen=True)
-class CollectedHostLabels:
- host_labels: Labels
- service_labels: dict[ServiceName, Labels]
-
-
class MonitoringCore(abc.ABC):
def __init__(self, licensing_handler_type: type[LicensingHandler]):
self._licensing_handler_type = licensing_handler_type
@@ -65,25 +59,28 @@ def create_config(
self,
config_path: VersionedConfigPath,
config_cache: ConfigCache,
+ ip_address_of: config.IPLookup,
+ passwords: Mapping[str, str],
hosts_to_update: set[HostName] | None = None,
) -> None:
licensing_handler = self._licensing_handler_type.make()
- self._persist_licensed_state(licensing_handler.state)
- self._create_config(config_path, config_cache, licensing_handler, hosts_to_update)
+ licensing_handler.persist_licensed_state(get_licensed_state_file_path())
+ self._create_config(
+ config_path, config_cache, ip_address_of, licensing_handler, passwords, hosts_to_update
+ )
@abc.abstractmethod
def _create_config(
self,
config_path: VersionedConfigPath,
config_cache: ConfigCache,
+ ip_address_of: config.IPLookup,
licensing_handler: LicensingHandler,
+ passwords: Mapping[str, str],
hosts_to_update: set[HostName] | None = None,
) -> None:
raise NotImplementedError
- def _persist_licensed_state(self, license_state: LicenseState) -> None:
- write_licensed_state(get_licensed_state_file_path(), license_state)
-
ActiveServiceID = tuple[str, Item] # TODO: I hope the str someday (tm) becomes "CheckPluginName",
AbstractServiceID = ActiveServiceID | ServiceID
@@ -99,8 +96,8 @@ def duplicate_service_warning(
) -> None:
return config_warnings.warn(
"ERROR: Duplicate service description (%s check) '%s' for host '%s'!\n"
- " - 1st occurrence: check plugin / item: %s / %r\n"
- " - 2nd occurrence: check plugin / item: %s / %r\n"
+ " - 1st occurrence: check plug-in / item: %s / %r\n"
+ " - 2nd occurrence: check plug-in / item: %s / %r\n"
% (checktype, description, host_name, *first_occurrence, *second_occurrence)
)
@@ -248,6 +245,7 @@ def check_icmp_arguments_of(
def do_create_config(
core: MonitoringCore,
config_cache: ConfigCache,
+ ip_address_of: config.IPLookup,
all_hosts: Iterable[HostName],
hosts_to_update: set[HostName] | None = None,
*,
@@ -263,7 +261,11 @@ def do_create_config(
try:
_create_core_config(
- core, config_cache, hosts_to_update=hosts_to_update, duplicates=duplicates
+ core,
+ config_cache,
+ ip_address_of,
+ hosts_to_update=hosts_to_update,
+ duplicates=duplicates,
)
except Exception as e:
if cmk.utils.debug.enabled():
@@ -297,9 +299,9 @@ def _bake_on_restart(
agent_bakery.bake_agents(
target_configs,
- bake_revision_mode=BakeRevisionMode.INACTIVE
- if config.apply_bake_revision
- else BakeRevisionMode.DISABLED,
+ bake_revision_mode=(
+ BakeRevisionMode.INACTIVE if config.apply_bake_revision else BakeRevisionMode.DISABLED
+ ),
logging_level=config.agent_bakery_logging,
call_site="config creation",
)
@@ -350,6 +352,7 @@ def _backup_objects_file(core: MonitoringCore) -> Iterator[None]:
def _create_core_config(
core: MonitoringCore,
config_cache: ConfigCache,
+ ip_address_of: config.IPLookup,
hosts_to_update: set[HostName] | None = None,
*,
duplicates: Sequence[HostName],
@@ -359,11 +362,23 @@ def _create_core_config(
_verify_non_duplicate_hosts(duplicates)
_verify_non_deprecated_checkgroups()
+ # recompute and save passwords, to ensure consistency:
+ passwords = config_cache.collect_passwords()
+ cmk.utils.password_store.save(passwords, cmk.utils.password_store.pending_password_store_path())
+
config_path = next(VersionedConfigPath.current())
with config_path.create(is_cmc=core.is_cmc()), _backup_objects_file(core):
- core.create_config(config_path, config_cache, hosts_to_update=hosts_to_update)
+ core.create_config(
+ config_path,
+ config_cache,
+ ip_address_of,
+ hosts_to_update=hosts_to_update,
+ passwords=passwords,
+ )
- cmk.utils.password_store.save_for_helpers(config_path)
+ cmk.utils.password_store.save(
+ passwords, cmk.utils.password_store.core_password_store_path(config_path)
+ )
def _verify_non_deprecated_checkgroups() -> None:
@@ -498,45 +513,5 @@ def _extra_service_attributes(
return attrs
-def write_notify_host_file(
- config_path: VersionedConfigPath,
- labels_per_host: Mapping[HostName, CollectedHostLabels],
-) -> None:
- notify_labels_path: Path = _get_host_file_path(config_path)
- for host, labels in labels_per_host.items():
- host_path = notify_labels_path / host
- save_object_to_file(
- host_path,
- dataclasses.asdict(
- CollectedHostLabels(
- host_labels=labels.host_labels,
- service_labels={k: v for k, v in labels.service_labels.items() if v.values()},
- )
- ),
- )
-
-
-def read_notify_host_file(
- host_name: HostName,
-) -> CollectedHostLabels:
- host_file_path: Path = _get_host_file_path(host_name=host_name)
- return CollectedHostLabels(
- **load_object_from_file(
- path=host_file_path,
- default={"host_labels": {}, "service_labels": {}},
- )
- )
-
-
-def _get_host_file_path(
- config_path: VersionedConfigPath | None = None,
- host_name: HostName | None = None,
-) -> Path:
- root_path = Path(config_path) if config_path else core_helper_config_dir / Path("latest")
- if host_name:
- return root_path / "notify" / "labels" / host_name
- return root_path / "notify" / "labels"
-
-
def get_labels_from_attributes(key_value_pairs: list[tuple[str, str]]) -> Labels:
return {key[8:]: value for key, value in key_value_pairs if key.startswith("__LABEL_")}
diff --git a/cmk/base/core_factory.py b/cmk/base/core_factory.py
index f55d1e6bd60..a202fac3e86 100644
--- a/cmk/base/core_factory.py
+++ b/cmk/base/core_factory.py
@@ -12,7 +12,7 @@ def get_licensing_handler_type() -> type[LicensingHandler]:
if edition() is Edition.CRE:
from cmk.utils.licensing.registry import get_available_licensing_handler_type
else:
- from cmk.utils.cee.licensing.registry import ( # type: ignore # pylint: disable=no-name-in-module,import-error
+ from cmk.utils.cee.licensing.registry import ( # type: ignore[import,unused-ignore,no-redef] # pylint: disable=no-name-in-module,import-error
get_available_licensing_handler_type,
)
return get_available_licensing_handler_type()
diff --git a/cmk/base/core_nagios.py b/cmk/base/core_nagios.py
deleted file mode 100644
index ee89498ab0f..00000000000
--- a/cmk/base/core_nagios.py
+++ /dev/null
@@ -1,1512 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-"""Code for support of Nagios (and compatible) cores"""
-
-import base64
-import itertools
-import os
-import py_compile
-import socket
-import sys
-from collections import Counter
-from collections.abc import Mapping
-from io import StringIO
-from pathlib import Path
-from typing import Any, cast, IO, Literal
-
-import cmk.utils.config_path
-import cmk.utils.config_warnings as config_warnings
-import cmk.utils.password_store
-import cmk.utils.paths
-import cmk.utils.store as store
-import cmk.utils.tty as tty
-from cmk.utils.check_utils import section_name_of
-from cmk.utils.config_path import VersionedConfigPath
-from cmk.utils.exceptions import MKGeneralException
-from cmk.utils.hostaddress import HostAddress, HostName, Hosts
-from cmk.utils.labels import Labels
-from cmk.utils.licensing.handler import LicensingHandler
-from cmk.utils.log import console
-from cmk.utils.macros import replace_macros_in_str
-from cmk.utils.servicename import ServiceName
-from cmk.utils.store.host_storage import ContactgroupName
-from cmk.utils.timeperiod import TimeperiodName
-
-from cmk.checkengine.checking import CheckPluginName, CheckPluginNameStr
-from cmk.checkengine.inventory import InventoryPluginName
-
-import cmk.base.api.agent_based.register as agent_based_register
-import cmk.base.config as config
-import cmk.base.core_config as core_config
-import cmk.base.ip_lookup as ip_lookup
-import cmk.base.obsolete_output as out
-import cmk.base.server_side_calls as server_side_calls
-import cmk.base.utils
-from cmk.base.config import ConfigCache, HostgroupName, ObjectAttributes, ServicegroupName
-from cmk.base.core_config import (
- AbstractServiceID,
- CollectedHostLabels,
- CoreCommand,
- CoreCommandName,
- get_labels_from_attributes,
- write_notify_host_file,
-)
-from cmk.base.ip_lookup import AddressFamily
-from cmk.base.plugins.server_side_calls import load_active_checks
-
-from cmk.discover_plugins import PluginLocation
-
-ObjectSpec = dict[str, Any]
-
-
-class NagiosCore(core_config.MonitoringCore):
- @classmethod
- def name(cls) -> Literal["nagios"]:
- return "nagios"
-
- @staticmethod
- def is_cmc() -> Literal[False]:
- return False
-
- def _create_config(
- self,
- config_path: VersionedConfigPath,
- config_cache: ConfigCache,
- licensing_handler: LicensingHandler,
- hosts_to_update: set[HostName] | None = None,
- ) -> None:
- self._create_core_config(config_path, licensing_handler)
- self._precompile_hostchecks(config_path)
-
- def _create_core_config(
- self,
- config_path: VersionedConfigPath,
- licensing_handler: LicensingHandler,
- ) -> None:
- """Tries to create a new Checkmk object configuration file for the Nagios core
-
- During create_config() exceptions may be raised which are caused by configuration issues.
- Don't produce a half written object file. Simply throw away everything and keep the old file.
-
- The user can then start the site with the old configuration and fix the configuration issue
- while the monitoring is running.
- """
-
- config_buffer = StringIO()
- create_config(
- config_buffer,
- config_path,
- hostnames=None,
- licensing_handler=licensing_handler,
- )
-
- store.save_text_to_file(cmk.utils.paths.nagios_objects_file, config_buffer.getvalue())
-
- def _precompile_hostchecks(self, config_path: VersionedConfigPath) -> None:
- out.output("Precompiling host checks...")
- _precompile_hostchecks(config_path)
- out.output(tty.ok + "\n")
-
-
-# .--Create config-------------------------------------------------------.
-# | ____ _ __ _ |
-# | / ___|_ __ ___ __ _| |_ ___ ___ ___ _ __ / _(_) __ _ |
-# | | | | '__/ _ \/ _` | __/ _ \ / __/ _ \| '_ \| |_| |/ _` | |
-# | | |___| | | __/ (_| | || __/ | (_| (_) | | | | _| | (_| | |
-# | \____|_| \___|\__,_|\__\___| \___\___/|_| |_|_| |_|\__, | |
-# | |___/ |
-# +----------------------------------------------------------------------+
-# | Create a configuration file for Nagios core with hosts + services |
-# '----------------------------------------------------------------------'
-
-
-class NagiosConfig:
- def __init__(self, outfile: IO[str], hostnames: list[HostName] | None) -> None:
- super().__init__()
- self._outfile = outfile
- self.hostnames = hostnames
-
- self.hostgroups_to_define: set[HostgroupName] = set()
- self.servicegroups_to_define: set[ServicegroupName] = set()
- self.contactgroups_to_define: set[ContactgroupName] = set()
- self.checknames_to_define: set[CheckPluginName] = set()
- self.active_checks_to_define: set[CheckPluginNameStr] = set()
- self.custom_commands_to_define: set[CoreCommandName] = set()
- self.hostcheck_commands_to_define: list[tuple[CoreCommand, str]] = []
-
- def write(self, x: str) -> None:
- # TODO: Something seems to be mixed up in our call sites...
- self._outfile.write(x)
-
-
-def _validate_licensing(
- hosts: Hosts, licensing_handler: LicensingHandler, licensing_counter: Counter
-) -> None:
- if block_effect := licensing_handler.effect_core(
- licensing_counter["services"], len(hosts.shadow_hosts)
- ).block:
- raise MKGeneralException(block_effect.message_raw)
-
-
-def create_config(
- outfile: IO[str],
- config_path: VersionedConfigPath,
- hostnames: list[HostName] | None,
- licensing_handler: LicensingHandler,
-) -> None:
- if config.host_notification_periods:
- config_warnings.warn(
- "host_notification_periods is not longer supported. Please use extra_host_conf['notification_period'] instead."
- )
-
- if config.service_notification_periods:
- config_warnings.warn(
- "service_notification_periods is not longer supported. Please use extra_service_conf['notification_period'] instead."
- )
-
- # Map service_period to _SERVICE_PERIOD. This field does not exist in Nagios.
- # The CMC has this field natively.
- if "service_period" in config.extra_host_conf:
- config.extra_host_conf["_SERVICE_PERIOD"] = config.extra_host_conf["service_period"]
- del config.extra_host_conf["service_period"]
- if "service_period" in config.extra_service_conf:
- config.extra_service_conf["_SERVICE_PERIOD"] = config.extra_service_conf["service_period"]
- del config.extra_service_conf["service_period"]
-
- config_cache = config.get_config_cache()
-
- if hostnames is None:
- hosts_config = config_cache.hosts_config
- hostnames = sorted(
- {
- hn
- for hn in itertools.chain(hosts_config.hosts, hosts_config.clusters)
- if config_cache.is_active(hn) and config_cache.is_online(hn)
- }
- )
- else:
- hostnames = sorted(hostnames)
-
- cfg = NagiosConfig(outfile, hostnames)
-
- _output_conf_header(cfg)
-
- stored_passwords = cmk.utils.password_store.load()
-
- licensing_counter = Counter("services")
- all_host_labels: dict[HostName, CollectedHostLabels] = {}
- for hostname in hostnames:
- all_host_labels[hostname] = _create_nagios_config_host(
- cfg, config_cache, hostname, stored_passwords, licensing_counter
- )
-
- _validate_licensing(config_cache.hosts_config, licensing_handler, licensing_counter)
-
- write_notify_host_file(config_path, all_host_labels)
-
- _create_nagios_config_contacts(cfg, hostnames)
- _create_nagios_config_hostgroups(cfg)
- _create_nagios_config_servicegroups(cfg)
- _create_nagios_config_contactgroups(cfg)
- _create_nagios_config_commands(cfg)
- _create_nagios_config_timeperiods(cfg)
-
- if config.extra_nagios_conf:
- cfg.write("\n# extra_nagios_conf\n\n")
- cfg.write(config.extra_nagios_conf)
-
-
-def _output_conf_header(cfg: NagiosConfig) -> None:
- cfg.write(
- """#
-# Created by Check_MK. Do not edit.
-#
-
-"""
- )
-
-
-def _create_nagios_config_host(
- cfg: NagiosConfig,
- config_cache: ConfigCache,
- hostname: HostName,
- stored_passwords: Mapping[str, str],
- license_counter: Counter,
-) -> CollectedHostLabels:
- cfg.write("\n# ----------------------------------------------------\n")
- cfg.write("# %s\n" % hostname)
- cfg.write("# ----------------------------------------------------\n")
-
- host_attrs = config_cache.get_host_attributes(hostname)
- if config.generate_hostconf:
- host_spec = _create_nagios_host_spec(cfg, config_cache, hostname, host_attrs)
- cfg.write(_format_nagios_object("host", host_spec))
-
- return CollectedHostLabels(
- host_labels=get_labels_from_attributes(list(host_attrs.items())),
- service_labels=_create_nagios_servicedefs(
- cfg, config_cache, hostname, host_attrs, stored_passwords, license_counter
- ),
- )
-
-
-def _create_nagios_host_spec( # pylint: disable=too-many-branches
- cfg: NagiosConfig, config_cache: ConfigCache, hostname: HostName, attrs: ObjectAttributes
-) -> ObjectSpec:
- ip = attrs["address"]
-
- if hostname in config_cache.hosts_config.clusters:
- nodes = config_cache.get_cluster_nodes_for_config(hostname)
- attrs.update(config_cache.get_cluster_attributes(hostname, nodes))
-
- # _
- # / |
- # | |
- # | |
- # |_| 1. normal, physical hosts
-
- host_spec = {
- "host_name": hostname,
- "use": (
- config.cluster_template
- if hostname in config_cache.hosts_config.clusters
- else config.host_template
- ),
- "address": (
- ip if ip else ip_lookup.fallback_ip_for(config_cache.default_address_family(hostname))
- ),
- "alias": attrs["alias"],
- }
-
- # Add custom macros
- for key, value in attrs.items():
- if key[0] == "_":
- host_spec[key] = value
-
- def host_check_via_service_status(service: ServiceName) -> CoreCommand:
- command = "check-mk-host-custom-%d" % (len(cfg.hostcheck_commands_to_define) + 1)
- service_with_hostname = replace_macros_in_str(
- service,
- {"$HOSTNAME$": hostname},
- )
- cfg.hostcheck_commands_to_define.append(
- (
- command,
- 'echo "$SERVICEOUTPUT:%s:%s$" && exit $SERVICESTATEID:%s:%s$'
- % (
- hostname,
- service_with_hostname,
- hostname,
- service_with_hostname,
- ),
- ),
- )
- return command
-
- def host_check_via_custom_check(
- command_name: CoreCommandName, command: CoreCommand
- ) -> CoreCommand:
- cfg.custom_commands_to_define.add(command_name)
- return command
-
- # Host check command might differ from default
- command = core_config.host_check_command(
- config_cache,
- hostname,
- ip,
- hostname in config_cache.hosts_config.clusters,
- "ping",
- host_check_via_service_status,
- host_check_via_custom_check,
- )
- if command:
- host_spec["check_command"] = command
-
- hostgroups = config_cache.hostgroups(hostname)
- if config.define_hostgroups or hostgroups == [config.default_host_group]:
- cfg.hostgroups_to_define.update(hostgroups)
- host_spec["hostgroups"] = ",".join(hostgroups)
-
- # Contact groups
- contactgroups = config_cache.contactgroups(hostname)
- if contactgroups:
- host_spec["contact_groups"] = ",".join(contactgroups)
- cfg.contactgroups_to_define.update(contactgroups)
-
- if hostname not in config_cache.hosts_config.clusters:
- # Parents for non-clusters
-
- # Get parents explicitly defined for host/folder via extra_host_conf["parents"]. Only honor
- # the ruleset "parents" in case no explicit parents are set
- if not attrs.get("parents", []):
- parents_list = config_cache.parents(hostname)
- if parents_list:
- host_spec["parents"] = ",".join(parents_list)
-
- elif hostname in config_cache.hosts_config.clusters:
- # Special handling of clusters
- host_spec["parents"] = ",".join(nodes)
-
- # Custom configuration last -> user may override all other values
- # TODO: Find a generic mechanism for CMC and Nagios
- for key, value in config_cache.extra_host_attributes(hostname).items():
- if key == "cmk_agent_connection":
- continue
- if hostname in config_cache.hosts_config.clusters and key == "parents":
- continue
- host_spec[key] = value
-
- return host_spec
-
-
-def transform_active_service_command(
- cfg: NagiosConfig, service_data: server_side_calls.ActiveServiceData
-) -> str:
- if config.simulation_mode:
- cfg.custom_commands_to_define.add("check-mk-simulation")
- return "check-mk-simulation!echo 'Simulation mode - cannot execute real check'"
-
- if service_data.command == "check-mk-custom":
- cfg.custom_commands_to_define.add("check-mk-custom")
- return f"{service_data.command}!{service_data.command_line}"
-
- return service_data.command_display
-
-
-def _create_nagios_servicedefs( # pylint: disable=too-many-branches
- cfg: NagiosConfig,
- config_cache: ConfigCache,
- hostname: HostName,
- host_attrs: ObjectAttributes,
- stored_passwords: Mapping[str, str],
- license_counter: Counter,
-) -> dict[ServiceName, Labels]:
- check_mk_attrs = core_config.get_service_attributes(hostname, "Check_MK", config_cache)
-
- # _____
- # |___ /
- # |_ \
- # ___) |
- # |____/ 3. Services
-
- def do_omit_service(hostname: HostName, description: ServiceName) -> bool:
- if config_cache.service_ignored(hostname, description):
- return True
- if hostname != config_cache.effective_host(hostname, description):
- return True
- return False
-
- def get_dependencies(hostname: HostName, servicedesc: ServiceName) -> str:
- result = ""
- for dep in config.service_depends_on(config_cache, hostname, servicedesc):
- result += _format_nagios_object(
- "servicedependency",
- {
- "use": config.service_dependency_template,
- "host_name": hostname,
- "service_description": dep,
- "dependent_host_name": hostname,
- "dependent_service_description": servicedesc,
- },
- )
-
- return result
-
- host_check_table = config_cache.check_table(hostname)
- have_at_least_one_service = False
- used_descriptions: dict[ServiceName, AbstractServiceID] = {}
- service_labels: dict[ServiceName, Labels] = {}
- for service in sorted(host_check_table.values(), key=lambda s: s.sort_key()):
- if not service.description:
- config_warnings.warn(
- f"Skipping invalid service with empty description (plugin: {service.check_plugin_name}) on host {hostname}"
- )
- continue
-
- if service.description in used_descriptions:
- core_config.duplicate_service_warning(
- checktype="auto",
- description=service.description,
- host_name=hostname,
- first_occurrence=used_descriptions[service.description],
- second_occurrence=service.id(),
- )
- continue
- used_descriptions[service.description] = service.id()
-
- # Services Dependencies for autochecks
- cfg.write(get_dependencies(hostname, service.description))
-
- service_spec = {
- "use": config.passive_service_template_perf,
- "host_name": hostname,
- "service_description": service.description,
- "check_command": "check_mk-%s" % service.check_plugin_name,
- }
-
- passive_service_attributes = core_config.get_cmk_passive_service_attributes(
- config_cache, hostname, service, check_mk_attrs
- )
-
- service_labels[service.description] = {
- label.name: label.value for label in service.service_labels.values()
- } | dict(get_labels_from_attributes(list(passive_service_attributes.items())))
-
- service_spec.update(passive_service_attributes)
-
- service_spec.update(
- _extra_service_conf_of(cfg, config_cache, hostname, service.description)
- )
-
- cfg.write(_format_nagios_object("service", service_spec))
- license_counter["services"] += 1
-
- cfg.checknames_to_define.add(service.check_plugin_name)
- have_at_least_one_service = True
-
- # Active check for Check_MK
- if config_cache.checkmk_check_parameters(hostname).enabled:
- service_spec = {
- "use": config.active_service_template,
- "host_name": hostname,
- "service_description": "Check_MK",
- }
- service_spec.update(check_mk_attrs)
- service_spec.update(_extra_service_conf_of(cfg, config_cache, hostname, "Check_MK"))
- cfg.write(_format_nagios_object("service", service_spec))
- license_counter["services"] += 1
-
- # legacy checks via active_checks
- active_services = []
-
- active_check_config = server_side_calls.ActiveCheck(
- load_active_checks()[1],
- config.active_check_info,
- hostname,
- host_attrs,
- stored_passwords=stored_passwords,
- translations=config.get_service_translations(config_cache.ruleset_matcher, hostname),
- escape_func=lambda a: a.replace("\\", "\\\\").replace("!", "\\!"),
- )
-
- active_checks = config_cache.active_checks(hostname)
- actchecks = [name for name, params in active_checks if params]
- for service_data in active_check_config.get_active_service_data(active_checks):
- if do_omit_service(hostname, service_data.description):
- continue
-
- if (existing_plugin := used_descriptions.get(service_data.description)) is not None:
- core_config.duplicate_service_warning(
- checktype="active",
- description=service_data.description,
- host_name=hostname,
- first_occurrence=existing_plugin,
- second_occurrence=(f"active({service_data.plugin_name})", None),
- )
- continue
-
- used_descriptions[service_data.description] = (
- f"active({service_data.plugin_name})",
- service_data.description,
- )
-
- service_spec = {
- "use": "check_mk_perf,check_mk_default",
- "host_name": hostname,
- "service_description": service_data.description,
- "check_command": transform_active_service_command(cfg, service_data),
- "active_checks_enabled": str(1),
- }
- service_spec.update(
- core_config.get_service_attributes(hostname, service_data.description, config_cache)
- )
- service_spec.update(
- _extra_service_conf_of(cfg, config_cache, hostname, service_data.description)
- )
-
- cfg.active_checks_to_define.add(service_data.plugin_name)
- active_services.append(service_spec)
-
- if actchecks:
- cfg.write("\n\n# Active checks\n")
-
- for service_spec in active_services:
- cfg.write(_format_nagios_object("service", service_spec))
- license_counter["services"] += 1
-
- # write service dependencies for active checks
- cfg.write(get_dependencies(hostname, service_spec["service_description"]))
-
- # Legacy checks via custom_checks
- custchecks = config_cache.custom_checks(hostname)
- translations = config.get_service_translations(
- config_cache.ruleset_matcher,
- hostname,
- )
- if custchecks:
- cfg.write("\n\n# Custom checks\n")
- for entry in custchecks:
- # entries are dicts with the following keys:
- # "service_description" Service description to use
- # "command_line" (optional) Unix command line for executing the check
- # If this is missing, we create a passive check
- # "command_name" (optional) Name of Monitoring command to define. If missing,
- # we use "check-mk-custom"
- description = config.get_final_service_description(
- entry["service_description"], translations
- )
- command_name = entry.get("command_name", "check-mk-custom")
- command_line = entry.get("command_line", "")
-
- if not description:
- config_warnings.warn(
- "Skipping invalid service with empty description on host %s" % hostname
- )
- continue
-
- if command_line:
- command_line = (
- core_config.autodetect_plugin(command_line)
- .replace("\\", "\\\\")
- .replace("!", "\\!")
- )
-
- if "freshness" in entry:
- freshness = {
- "check_freshness": 1,
- "freshness_threshold": 60 * entry["freshness"]["interval"],
- }
- command_line = "echo %s && exit %d" % (
- _quote_nagios_string(entry["freshness"]["output"]),
- entry["freshness"]["state"],
- )
- else:
- freshness = {}
-
- cfg.custom_commands_to_define.add(command_name)
-
- if description in used_descriptions:
- cn, _ = used_descriptions[description]
- # If we have the same active check again with the same description,
- # then we do not regard this as an error, but simply ignore the
- # second one.
- if cn == "custom(%s)" % command_name:
- continue
-
- core_config.duplicate_service_warning(
- checktype="custom",
- description=description,
- host_name=hostname,
- first_occurrence=used_descriptions[description],
- second_occurrence=("custom(%s)" % command_name, description),
- )
- continue
-
- used_descriptions[description] = ("custom(%s)" % command_name, description)
-
- command = f"{command_name}!{command_line}"
-
- service_spec = {
- "use": "check_mk_perf,check_mk_default",
- "host_name": hostname,
- "service_description": description,
- "check_command": _simulate_command(cfg, command),
- "active_checks_enabled": str(1 if (command_line and not freshness) else 0),
- }
- service_spec.update(freshness)
- service_spec.update(
- core_config.get_service_attributes(hostname, description, config_cache)
- )
- service_spec.update(_extra_service_conf_of(cfg, config_cache, hostname, description))
- cfg.write(_format_nagios_object("service", service_spec))
- license_counter["services"] += 1
-
- # write service dependencies for custom checks
- cfg.write(get_dependencies(hostname, description))
-
- service_discovery_name = ConfigCache.service_discovery_name()
-
- # Inventory checks - if user has configured them.
- if not (disco_params := config_cache.discovery_check_parameters(hostname)).commandline_only:
- service_spec = {
- "use": config.inventory_check_template,
- "host_name": hostname,
- "service_description": service_discovery_name,
- }
- service_spec.update(
- core_config.get_service_attributes(hostname, service_discovery_name, config_cache)
- )
-
- service_spec.update(
- _extra_service_conf_of(cfg, config_cache, hostname, service_discovery_name)
- )
-
- service_spec.update(
- {
- "check_interval": str(disco_params.check_interval),
- "retry_interval": str(disco_params.check_interval),
- }
- )
-
- cfg.write(_format_nagios_object("service", service_spec))
- license_counter["services"] += 1
-
- if have_at_least_one_service:
- cfg.write(
- _format_nagios_object(
- "servicedependency",
- {
- "use": config.service_dependency_template,
- "host_name": hostname,
- "service_description": "Check_MK",
- "dependent_host_name": hostname,
- "dependent_service_description": service_discovery_name,
- },
- )
- )
-
- # No check_mk service, no legacy service -> create PING service
- if not have_at_least_one_service and not actchecks and not custchecks:
- _add_ping_service(
- cfg,
- config_cache,
- hostname,
- host_attrs["address"],
- config_cache.default_address_family(hostname),
- "PING",
- host_attrs.get("_NODEIPS"),
- license_counter,
- )
-
- if ConfigCache.address_family(hostname) is AddressFamily.DUAL_STACK:
- if config_cache.default_address_family(hostname) is socket.AF_INET6:
- if "PING IPv4" not in used_descriptions:
- _add_ping_service(
- cfg,
- config_cache,
- hostname,
- host_attrs["_ADDRESS_4"],
- socket.AF_INET,
- "PING IPv4",
- host_attrs.get("_NODEIPS_4"),
- license_counter,
- )
- else:
- if "PING IPv6" not in used_descriptions:
- _add_ping_service(
- cfg,
- config_cache,
- hostname,
- host_attrs["_ADDRESS_6"],
- socket.AF_INET6,
- "PING IPv6",
- host_attrs.get("_NODEIPS_6"),
- license_counter,
- )
-
- return service_labels
-
-
-def _add_ping_service(
- cfg: NagiosConfig,
- config_cache: ConfigCache,
- host_name: HostName,
- ipaddress: HostAddress,
- family: socket.AddressFamily,
- descr: ServiceName,
- node_ips: str | None,
- licensing_counter: Counter,
-) -> None:
- arguments = core_config.check_icmp_arguments_of(config_cache, host_name, family=family)
-
- ping_command = "check-mk-ping"
- if host_name in config_cache.hosts_config.clusters:
- assert node_ips is not None
- arguments += " -m 1 " + node_ips
- else:
- arguments += " " + ipaddress
-
- service_spec = {
- "use": config.pingonly_template,
- "host_name": host_name,
- "service_description": descr,
- "check_command": f"{ping_command}!{arguments}",
- }
- service_spec.update(core_config.get_service_attributes(host_name, descr, config_cache))
- service_spec.update(_extra_service_conf_of(cfg, config_cache, host_name, descr))
- cfg.write(_format_nagios_object("service", service_spec))
- licensing_counter["services"] += 1
-
-
-def _format_nagios_object(object_type: str, object_spec: ObjectSpec) -> str:
- cfg = ["define %s {" % object_type]
- for key, val in sorted(object_spec.items(), key=lambda x: x[0]):
- # Use a base16 encoding for names and values of tags, labels and label
- # sources to work around the syntactic restrictions in Nagios' object
- # configuration files.
- if key[0] == "_": # quick pre-check: custom variable?
- for prefix in ("__TAG_", "__LABEL_", "__LABELSOURCE_"):
- if key.startswith(prefix):
- key = prefix + _b16encode(key[len(prefix) :])
- val = _b16encode(val)
- cfg.append(" %-29s %s" % (key, val))
- cfg.append("}")
-
- return "\n".join(cfg) + "\n\n"
-
-
-def _b16encode(b: str) -> str:
- return (base64.b16encode(b.encode())).decode()
-
-
-def _simulate_command(cfg: NagiosConfig, command: CoreCommand) -> CoreCommand:
- if config.simulation_mode:
- cfg.custom_commands_to_define.add("check-mk-simulation")
- return "check-mk-simulation!echo 'Simulation mode - cannot execute real check'"
- return command
-
-
-def _create_nagios_config_hostgroups(cfg: NagiosConfig) -> None:
- if config.define_hostgroups:
- cfg.write("\n# ------------------------------------------------------------\n")
- cfg.write("# Host groups (controlled by define_hostgroups)\n")
- cfg.write("# ------------------------------------------------------------\n")
- for hg in sorted(cfg.hostgroups_to_define):
- cfg.write(
- _format_nagios_object(
- "hostgroup",
- {
- "hostgroup_name": hg,
- "alias": config.define_hostgroups.get(hg, hg),
- },
- )
- )
-
- # No creation of host groups but we need to define default host group
- elif config.default_host_group in cfg.hostgroups_to_define:
- cfg.write(
- _format_nagios_object(
- "hostgroup",
- {
- "hostgroup_name": config.default_host_group,
- "alias": "Check_MK default hostgroup",
- },
- )
- )
-
-
-def _create_nagios_config_servicegroups(cfg: NagiosConfig) -> None:
- if not config.define_servicegroups:
- return
- cfg.write("\n# ------------------------------------------------------------\n")
- cfg.write("# Service groups (controlled by define_servicegroups)\n")
- cfg.write("# ------------------------------------------------------------\n")
- for sg in sorted(cfg.servicegroups_to_define):
- cfg.write(
- _format_nagios_object(
- "servicegroup",
- {
- "servicegroup_name": sg,
- "alias": config.define_servicegroups.get(sg, sg),
- },
- )
- )
-
-
-def _create_nagios_config_contactgroups(cfg: NagiosConfig) -> None:
- if not cfg.contactgroups_to_define:
- return
- cfg.write("\n# ------------------------------------------------------------\n")
- cfg.write("# Contact groups (controlled by define_contactgroups)\n")
- cfg.write("# ------------------------------------------------------------\n\n")
- for name in sorted(cfg.contactgroups_to_define):
- contactgroup_spec = {
- "contactgroup_name": name,
- "alias": config.define_contactgroups.get(name, name),
- }
- if members := config.contactgroup_members.get(name):
- contactgroup_spec["members"] = ",".join(members)
- cfg.write(_format_nagios_object("contactgroup", contactgroup_spec))
-
-
-def _create_nagios_config_commands(cfg: NagiosConfig) -> None:
- if config.generate_dummy_commands:
- cfg.write("\n# ------------------------------------------------------------\n")
- cfg.write("# Dummy check commands and active check commands\n")
- cfg.write("# ------------------------------------------------------------\n\n")
- for checkname in cfg.checknames_to_define:
- cfg.write(
- _format_nagios_object(
- "command",
- {
- "command_name": "check_mk-%s" % checkname,
- "command_line": config.dummy_check_commandline,
- },
- )
- )
-
- # active_checks
- for acttype in cfg.active_checks_to_define:
- command_line = (
- act_info["command_line"]
- if (act_info := config.active_check_info.get(acttype)) is not None
- else f"check_{acttype} $ARG1$"
- )
- cfg.write(
- _format_nagios_object(
- "command",
- {
- "command_name": "check_mk_active-%s" % acttype,
- "command_line": core_config.autodetect_plugin(command_line),
- },
- )
- )
-
- # custom_checks
- for command_name in cfg.custom_commands_to_define:
- cfg.write(
- _format_nagios_object(
- "command",
- {
- "command_name": command_name,
- "command_line": "$ARG1$",
- },
- )
- )
-
- # custom host checks
- for command_name, command_line in cfg.hostcheck_commands_to_define:
- cfg.write(
- _format_nagios_object(
- "command",
- {
- "command_name": command_name,
- "command_line": command_line,
- },
- )
- )
-
-
-def _create_nagios_config_timeperiods(cfg: NagiosConfig) -> None:
- if len(config.timeperiods) > 0:
- cfg.write("\n# ------------------------------------------------------------\n")
- cfg.write("# Timeperiod definitions (controlled by variable 'timeperiods')\n")
- cfg.write("# ------------------------------------------------------------\n\n")
- tpnames = sorted(config.timeperiods)
- for name in tpnames:
- tp = config.timeperiods[name]
- timeperiod_spec = {
- "timeperiod_name": name,
- }
-
- if "alias" in tp:
- alias = tp["alias"]
- assert isinstance(alias, str)
- timeperiod_spec["alias"] = alias
-
- for key, value in tp.items():
- if key not in ["alias", "exclude"]:
- # TODO: We should *really* improve TimeperiodSpec: We have no way to use assert
- # below to distinguish between a list of TimeperiodNames for "exclude" and the
- # list of tuples for the time ranges.
- times = ",".join(
- (f"{fr}-{to}") for (fr, to) in cast(list[tuple[str, str]], value)
- )
- if times:
- timeperiod_spec[key] = times
-
- if "exclude" in tp:
- timeperiod_spec["exclude"] = ",".join(cast(list[TimeperiodName], tp["exclude"]))
-
- cfg.write(_format_nagios_object("timeperiod", timeperiod_spec))
-
-
-def _create_nagios_config_contacts(cfg: NagiosConfig, hostnames: list[HostName]) -> None:
- if config.contacts:
- cfg.write("\n# ------------------------------------------------------------\n")
- cfg.write("# Contact definitions (controlled by variable 'contacts')\n")
- cfg.write("# ------------------------------------------------------------\n\n")
- for cname, contact in sorted(config.contacts.items()):
- # Create contact groups in nagios, even when they are empty. This is needed
- # for RBN to work correctly when using contactgroups as recipients which are
- # not assigned to any host
- cfg.contactgroups_to_define.update(contact.get("contactgroups", []))
- # If the contact is in no contact group or all of the contact groups
- # of the contact have neither hosts nor services assigned - in other
- # words if the contact is not assigned to any host or service, then
- # we do not create this contact in Nagios. It's useless and will produce
- # warnings.
- cgrs = [
- cgr
- for cgr in contact.get("contactgroups", [])
- if cgr in cfg.contactgroups_to_define
- ]
- if not cgrs:
- continue
-
- contact_spec: ObjectSpec = {
- "contact_name": cname,
- }
-
- if "alias" in contact:
- contact_spec["alias"] = contact["alias"]
-
- if "email" in contact:
- contact_spec["email"] = contact["email"]
-
- if "pager" in contact:
- contact_spec["pager"] = contact["pager"]
-
- for what in ["host", "service"]:
- if what == "host":
- no: str = contact.get("host_notification_options", "")
- elif what == "service":
- no = contact.get("service_notification_options", "")
- else:
- raise ValueError()
-
- if not no:
- contact_spec["%s_notifications_enabled" % what] = 0
- no = "n"
-
- contact_spec.update(
- {
- "%s_notification_options" % what: ",".join(no),
- "%s_notification_period" % what: contact.get("notification_period", "24X7"),
- "%s_notification_commands"
- % what: contact.get("%s_notification_commands" % what, "check-mk-notify"),
- }
- )
-
- # Add custom macros
- contact_spec.update({key: val for key, val in contact.items() if key.startswith("_")})
-
- contact_spec["contactgroups"] = ", ".join(cgrs)
- cfg.write(_format_nagios_object("contact", contact_spec))
-
- if hostnames:
- cfg.contactgroups_to_define.add("check-mk-notify")
- cfg.write("# Needed for rule based notifications\n")
- cfg.write(
- _format_nagios_object(
- "contact",
- {
- "contact_name": "check-mk-notify",
- "alias": "Contact for rule based notifications",
- "host_notification_options": "d,u,r,f,s",
- "service_notification_options": "u,c,w,r,f,s",
- "host_notification_period": "24X7",
- "service_notification_period": "24X7",
- "host_notification_commands": "check-mk-notify",
- "service_notification_commands": "check-mk-notify",
- "contactgroups": "check-mk-notify",
- },
- )
- )
-
-
-def _quote_nagios_string(s: str) -> str:
- """Quote string for use in a nagios command execution. Please note that also
- quoting for ! and backslash for Nagios itself takes place here."""
- return "'" + s.replace("\\", "\\\\").replace("'", "'\"'\"'").replace("!", "\\!") + "'"
-
-
-def _extra_service_conf_of(
- cfg: NagiosConfig, config_cache: ConfigCache, hostname: HostName, description: ServiceName
-) -> ObjectSpec:
- """Collect all extra configuration data for a service"""
- service_spec: ObjectSpec = {}
-
- # Add contact groups to the config only if the user has defined them.
- # Otherwise inherit the contact groups from the host.
- # "check-mk-notify" is always returned for rulebased notifications and
- # the Nagios core and not defined by the user.
- sercgr = config_cache.contactgroups_of_service(hostname, description)
- if sercgr != ["check-mk-notify"]:
- service_spec["contact_groups"] = ",".join(sercgr)
- cfg.contactgroups_to_define.update(sercgr)
-
- sergr = config_cache.servicegroups_of_service(hostname, description)
- if sergr:
- service_spec["service_groups"] = ",".join(sergr)
- if config.define_servicegroups:
- cfg.servicegroups_to_define.update(sergr)
-
- return service_spec
-
-
-# .
-# .--Precompile----------------------------------------------------------.
-# | ____ _ _ |
-# | | _ \ _ __ ___ ___ ___ _ __ ___ _ __ (_) | ___ |
-# | | |_) | '__/ _ \/ __/ _ \| '_ ` _ \| '_ \| | |/ _ \ |
-# | | __/| | | __/ (_| (_) | | | | | | |_) | | | __/ |
-# | |_| |_| \___|\___\___/|_| |_| |_| .__/|_|_|\___| |
-# | |_| |
-# +----------------------------------------------------------------------+
-# | Precompiling creates on dedicated Python file per host, which just |
-# | contains that code and information that is needed for executing all |
-# | checks of that host. Also static data that cannot change during the |
-# | normal monitoring process is being precomputed and hard coded. This |
-# | all saves substantial CPU resources as opposed to running Checkmk |
-# | in adhoc mode (about 75%). |
-# '----------------------------------------------------------------------'
-
-
-def _find_check_plugins(checktype: CheckPluginNameStr) -> list[str]:
- """Find files to be included in precompile host check for a certain
- check (for example df or mem.used).
-
- In case of checks with a period (subchecks) we might have to include both "mem" and "mem.used".
- The subcheck *may* be implemented in a separate file."""
- if "." in checktype:
- candidates = [section_name_of(checktype), checktype]
- else:
- candidates = [checktype]
-
- paths = []
- for candidate in candidates:
- local_file_path = cmk.utils.paths.local_checks_dir / candidate
- if local_file_path.exists():
- paths.append(str(local_file_path))
- continue
-
- filename = cmk.utils.paths.checks_dir + "/" + candidate
- if os.path.exists(filename):
- paths.append(filename)
-
- return paths
-
-
-class HostCheckStore:
- """Caring about persistence of the precompiled host check files"""
-
- @staticmethod
- def host_check_file_path(config_path: VersionedConfigPath, hostname: HostName) -> Path:
- return Path(config_path) / "host_checks" / hostname
-
- @staticmethod
- def host_check_source_file_path(config_path: VersionedConfigPath, hostname: HostName) -> Path:
- # TODO: Use append_suffix(".py") once we are on Python 3.10
- path = HostCheckStore.host_check_file_path(config_path, hostname)
- return path.with_suffix(path.suffix + ".py")
-
- def write(self, config_path: VersionedConfigPath, hostname: HostName, host_check: str) -> None:
- compiled_filename = self.host_check_file_path(config_path, hostname)
- source_filename = self.host_check_source_file_path(config_path, hostname)
-
- store.makedirs(compiled_filename.parent)
-
- store.save_text_to_file(source_filename, host_check)
-
- # compile python (either now or delayed - see host_check code for delay_precompile handling)
- if config.delay_precompile:
- compiled_filename.symlink_to(hostname + ".py")
- else:
- py_compile.compile(
- file=str(source_filename),
- cfile=str(compiled_filename),
- dfile=str(compiled_filename),
- doraise=True,
- )
- os.chmod(compiled_filename, 0o750) # nosec B103 # BNS:c29b0e
-
- console.verbose(" ==> %s.\n", compiled_filename, stream=sys.stderr)
-
-
-def _precompile_hostchecks(config_path: VersionedConfigPath) -> None:
- console.verbose("Creating precompiled host check config...\n")
- config_cache = config.get_config_cache()
- hosts_config = config_cache.hosts_config
-
- config.save_packed_config(config_path, config_cache)
-
- console.verbose("Precompiling host checks...\n")
-
- host_check_store = HostCheckStore()
- for hostname in {
- # Inconsistent with `create_config` above.
- hn
- for hn in itertools.chain(hosts_config.hosts, hosts_config.clusters)
- if config_cache.is_active(hn) and config_cache.is_online(hn)
- }:
- try:
- console.verbose(
- "%s%s%-16s%s:",
- tty.bold,
- tty.blue,
- hostname,
- tty.normal,
- stream=sys.stderr,
- )
- host_check = _dump_precompiled_hostcheck(
- config_cache,
- config_path,
- hostname,
- )
- if host_check is None:
- console.verbose("(no Checkmk checks)\n")
- continue
-
- host_check_store.write(config_path, hostname, host_check)
- except Exception as e:
- if cmk.utils.debug.enabled():
- raise
- console.error(f"Error precompiling checks for host {hostname}: {e}\n")
- sys.exit(5)
-
-
-def _dump_precompiled_hostcheck( # pylint: disable=too-many-branches
- config_cache: ConfigCache,
- config_path: VersionedConfigPath,
- hostname: HostName,
- *,
- verify_site_python: bool = True,
-) -> str | None:
- (
- needed_legacy_check_plugin_names,
- needed_agent_based_check_plugin_names,
- needed_agent_based_inventory_plugin_names,
- ) = _get_needed_plugin_names(config_cache, hostname)
-
- if hostname in config_cache.hosts_config.clusters:
- nodes = config_cache.nodes_of(hostname)
- if nodes is None:
- raise TypeError()
-
- for node in nodes:
- (
- node_needed_legacy_check_plugin_names,
- node_needed_agent_based_check_plugin_names,
- node_needed_agent_based_inventory_plugin_names,
- ) = _get_needed_plugin_names(config_cache, node)
- needed_legacy_check_plugin_names.update(node_needed_legacy_check_plugin_names)
- needed_agent_based_check_plugin_names.update(node_needed_agent_based_check_plugin_names)
- needed_agent_based_inventory_plugin_names.update(
- node_needed_agent_based_inventory_plugin_names
- )
-
- needed_legacy_check_plugin_names.update(
- _get_required_legacy_check_sections(
- needed_agent_based_check_plugin_names,
- needed_agent_based_inventory_plugin_names,
- )
- )
-
- if not any(
- (
- needed_legacy_check_plugin_names,
- needed_agent_based_check_plugin_names,
- needed_agent_based_inventory_plugin_names,
- )
- ):
- return None
-
- output = StringIO()
- output.write("#!/usr/bin/env python3\n")
- output.write("# encoding: utf-8\n\n")
-
- output.write("import logging\n")
- output.write("import sys\n\n")
-
- if verify_site_python:
- output.write("if not sys.executable.startswith('/omd'):\n")
- output.write(' sys.stdout.write("ERROR: Only executable with sites python\\n")\n')
- output.write(" sys.exit(2)\n\n")
-
- # Self-compile: replace symlink with precompiled python-code, if
- # we are run for the first time
- if config.delay_precompile:
- output.write(
- """
-import os
-if os.path.islink(%(dst)r):
- import py_compile
- os.remove(%(dst)r)
- py_compile.compile(%(src)r, %(dst)r, %(dst)r, True)
- os.chmod(%(dst)r, 0o755)
-
-"""
- % {
- "src": str(HostCheckStore.host_check_source_file_path(config_path, hostname)),
- "dst": str(HostCheckStore.host_check_file_path(config_path, hostname)),
- }
- )
-
- # Remove precompiled directory from sys.path. Leaving it in the path
- # makes problems when host names (name of precompiled files) are equal
- # to python module names like "random"
- output.write("sys.path.pop(0)\n")
-
- output.write("import cmk.utils.log\n")
- output.write("import cmk.utils.debug\n")
- output.write("from cmk.utils.exceptions import MKTerminate\n")
- output.write("from cmk.utils.config_path import LATEST_CONFIG\n")
- output.write("\n")
- output.write("import cmk.base.utils\n")
- output.write("import cmk.base.config as config\n")
- output.write("from cmk.discover_plugins import PluginLocation\n")
- output.write("from cmk.utils.log import console\n")
- output.write("from cmk.base.api.agent_based.register import register_plugin_by_type\n")
- output.write("import cmk.base.check_api as check_api\n")
- output.write("import cmk.base.ip_lookup as ip_lookup\n") # is this still needed?
- output.write("from cmk.checkengine.submitters import get_submitter\n")
- output.write("\n")
-
- locations = _get_needed_agent_based_locations(
- needed_agent_based_check_plugin_names,
- needed_agent_based_inventory_plugin_names,
- )
- for module in {l.module for l in locations}:
- output.write("import %s\n" % module)
- console.verbose(" %s%s%s", tty.green, module, tty.normal, stream=sys.stderr)
- for location in (l for l in locations if l.name is not None):
- output.write(f"register_plugin_by_type({location!r}, {location.module}.{location.name})\n")
-
- # Register default Checkmk signal handler
- output.write("cmk.base.utils.register_sigint_handler()\n")
-
- # initialize global variables
- output.write(
- """
-# very simple commandline parsing: only -v (once or twice) and -d are supported
-
-cmk.utils.log.setup_console_logging()
-logger = logging.getLogger("cmk.base")
-
-# TODO: This is not really good parsing, because it not cares about syntax like e.g. "-nv".
-# The later regular argument parsing is handling this correctly. Try to clean this up.
-cmk.utils.log.logger.setLevel(cmk.utils.log.verbosity_to_log_level(len([ a for a in sys.argv if a in [ "-v", "--verbose"] ])))
-
-if '-d' in sys.argv:
- cmk.utils.debug.enable()
-
-"""
- )
-
- file_list = sorted(_get_legacy_check_file_names_to_load(needed_legacy_check_plugin_names))
- formatted_file_list = (
- "\n %s,\n" % ",\n ".join("%r" % n for n in file_list) if file_list else ""
- )
- output.write(
- "config.load_checks(check_api.get_check_api_context, [%s])\n" % formatted_file_list
- )
-
- for check_plugin_name in sorted(needed_legacy_check_plugin_names):
- console.verbose(" %s%s%s", tty.green, check_plugin_name, tty.normal, stream=sys.stderr)
-
- output.write("config.load_packed_config(LATEST_CONFIG)\n")
-
- # IP addresses
- (
- needed_ipaddresses,
- needed_ipv6addresses,
- ) = (
- {},
- {},
- )
- if hostname in config_cache.hosts_config.clusters:
- nodes = config_cache.nodes_of(hostname)
- if nodes is None:
- raise TypeError()
-
- for node in nodes:
- if AddressFamily.IPv4 in ConfigCache.address_family(node):
- needed_ipaddresses[node] = config.lookup_ip_address(
- config_cache, node, family=socket.AF_INET
- )
-
- if AddressFamily.IPv6 in ConfigCache.address_family(node):
- needed_ipv6addresses[node] = config.lookup_ip_address(
- config_cache, node, family=socket.AF_INET6
- )
-
- try:
- if AddressFamily.IPv4 in ConfigCache.address_family(hostname):
- needed_ipaddresses[hostname] = config.lookup_ip_address(
- config_cache, hostname, family=socket.AF_INET
- )
- except Exception:
- pass
-
- try:
- if AddressFamily.IPv6 in ConfigCache.address_family(hostname):
- needed_ipv6addresses[hostname] = config.lookup_ip_address(
- config_cache, hostname, family=socket.AF_INET6
- )
- except Exception:
- pass
- else:
- if AddressFamily.IPv4 in ConfigCache.address_family(hostname):
- needed_ipaddresses[hostname] = config.lookup_ip_address(
- config_cache, hostname, family=socket.AF_INET
- )
-
- if AddressFamily.IPv6 in ConfigCache.address_family(hostname):
- needed_ipv6addresses[hostname] = config.lookup_ip_address(
- config_cache, hostname, family=socket.AF_INET6
- )
-
- output.write("config.ipaddresses = %r\n\n" % needed_ipaddresses)
- output.write("config.ipv6addresses = %r\n\n" % needed_ipv6addresses)
- output.write("try:\n")
- output.write(" # mode_check is `mode --check hostname`\n")
- output.write(" from cmk.base.modes.check_mk import mode_check\n")
- output.write(" sys.exit(\n")
- output.write(" mode_check(\n")
- output.write(" get_submitter,\n")
- output.write(" {},\n")
- output.write(f" [{hostname!r}],\n")
- output.write(" active_check_handler=lambda *args: None,\n")
- output.write(" keepalive=False,\n")
- output.write(" )\n")
- output.write(" )\n")
- output.write("except MKTerminate:\n")
- output.write(" out.output('\\n', stream=sys.stderr)\n")
- output.write(" sys.exit(1)\n")
- output.write("except SystemExit as e:\n")
- output.write(" sys.exit(e.code)\n")
- output.write("except Exception as e:\n")
- output.write(" import traceback, pprint\n")
-
- # status output message
- output.write(
- ' sys.stdout.write("UNKNOWN - Exception in precompiled check: %s (details in long output)\\n" % e)\n'
- )
-
- # generate traceback for long output
- output.write(' sys.stdout.write("Traceback: %s\\n" % traceback.format_exc())\n')
-
- output.write("\n")
- output.write(" sys.exit(3)\n")
-
- return output.getvalue()
-
-
-def _get_needed_plugin_names(
- config_cache: ConfigCache, host_name: HostName
-) -> tuple[set[CheckPluginNameStr], set[CheckPluginName], set[InventoryPluginName]]:
- needed_legacy_check_plugin_names = {
- f"agent_{name}" for name, _p in config_cache.special_agents(host_name)
- }
-
- # Collect the needed check plugin names using the host check table.
- # Even auto-migrated checks must be on the list of needed *agent based* plugins:
- # In those cases, the module attribute will be `None`, so nothing will
- # be imported; BUT: we need it in the list, because it must be considered
- # when determining the needed *section* plugins.
- # This matters in cases where the section is migrated, but the check
- # plugins are not.
- needed_agent_based_check_plugin_names = config_cache.check_table(
- host_name,
- filter_mode=config.FilterMode.INCLUDE_CLUSTERED,
- skip_ignored=False,
- ).needed_check_names()
-
- legacy_names = (_resolve_legacy_plugin_name(pn) for pn in needed_agent_based_check_plugin_names)
- needed_legacy_check_plugin_names.update(ln for ln in legacy_names if ln is not None)
-
- # Inventory plugins get passed parsed data these days.
- # Load the required sections, or inventory plugins will crash upon unparsed data.
- needed_agent_based_inventory_plugin_names: set[InventoryPluginName] = set()
- if config_cache.hwsw_inventory_parameters(host_name).status_data_inventory:
- for inventory_plugin in agent_based_register.iter_all_inventory_plugins():
- needed_agent_based_inventory_plugin_names.add(inventory_plugin.name)
- for parsed_section_name in inventory_plugin.sections:
- # check if we must add the legacy check plugin:
- legacy_check_name = config.legacy_check_plugin_names.get(
- CheckPluginName(str(parsed_section_name))
- )
- if legacy_check_name is not None:
- needed_legacy_check_plugin_names.add(legacy_check_name)
-
- return (
- needed_legacy_check_plugin_names,
- needed_agent_based_check_plugin_names,
- needed_agent_based_inventory_plugin_names,
- )
-
-
-def _resolve_legacy_plugin_name(check_plugin_name: CheckPluginName) -> CheckPluginNameStr | None:
- legacy_name = config.legacy_check_plugin_names.get(check_plugin_name)
- if legacy_name:
- return legacy_name
-
- if not check_plugin_name.is_management_name():
- return None
-
- # See if me must include a legacy plugin from which we derived the given one:
- # A management plugin *could have been* created on the fly, from a 'regular' legacy
- # check plugin. In this case, we must load that.
- plugin = agent_based_register.get_check_plugin(check_plugin_name)
- if not plugin or plugin.location is not None:
- # it does *not* result from a legacy plugin, if module is not None
- return None
-
- # just try to get the legacy name of the 'regular' plugin:
- return config.legacy_check_plugin_names.get(check_plugin_name.create_basic_name())
-
-
-def _get_legacy_check_file_names_to_load(
- needed_check_plugin_names: set[CheckPluginNameStr],
-) -> list[str]:
- # check info table
- # We need to include all those plugins that are referenced in the host's
- # check table.
- filenames: list[str] = []
- for check_plugin_name in needed_check_plugin_names:
- # Now add check file(s) itself
- paths = _find_check_plugins(check_plugin_name)
- if not paths:
- raise MKGeneralException(f"Cannot find check file needed for {check_plugin_name}")
-
- for path in paths:
- if path not in filenames:
- filenames.append(path)
-
- return filenames
-
-
-def _get_needed_agent_based_locations(
- check_plugin_names: set[CheckPluginName],
- inventory_plugin_names: set[InventoryPluginName],
-) -> list[PluginLocation]:
- modules = {
- plugin.location
- for plugin in [agent_based_register.get_check_plugin(p) for p in check_plugin_names]
- if plugin is not None and plugin.location is not None
- }
- modules.update(
- plugin.location
- for plugin in [agent_based_register.get_inventory_plugin(p) for p in inventory_plugin_names]
- if plugin is not None and plugin.location is not None
- )
- modules.update(
- section.location
- for section in agent_based_register.get_relevant_raw_sections(
- check_plugin_names=check_plugin_names,
- inventory_plugin_names=inventory_plugin_names,
- ).values()
- if section.location is not None
- )
-
- return sorted(modules, key=lambda l: (l.module, l.name or ""))
-
-
-def _get_required_legacy_check_sections(
- check_plugin_names: set[CheckPluginName],
- inventory_plugin_names: set[InventoryPluginName],
-) -> set[str]:
- """
- new style plugin may have a dependency to a legacy check
- """
- required_legacy_check_sections = set()
- for section in agent_based_register.get_relevant_raw_sections(
- check_plugin_names=check_plugin_names,
- inventory_plugin_names=inventory_plugin_names,
- ).values():
- if section.location is None:
- required_legacy_check_sections.add(str(section.name))
- return required_legacy_check_sections
diff --git a/cmk/base/core_nagios/__init__.py b/cmk/base/core_nagios/__init__.py
new file mode 100644
index 00000000000..a5fabe5417b
--- /dev/null
+++ b/cmk/base/core_nagios/__init__.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+"""Code for support of Nagios (and compatible) cores"""
+
+# I am not saying this should be the API, but that's the status quo.
+from ._create_config import (
+ create_config,
+ create_nagios_config_commands,
+ create_nagios_host_spec,
+ create_nagios_servicedefs,
+ format_nagios_object,
+ NagiosConfig,
+ NagiosCore,
+)
+from ._host_check_config import HostCheckConfig
+from ._precompile_host_checks import dump_precompiled_hostcheck, HostCheckStore
+
+__all__ = [
+ "format_nagios_object",
+ "create_config",
+ "create_nagios_config_commands",
+ "create_nagios_host_spec",
+ "create_nagios_servicedefs",
+ "NagiosConfig",
+ "NagiosCore",
+ "dump_precompiled_hostcheck",
+ "HostCheckConfig",
+ "HostCheckStore",
+]
diff --git a/cmk/base/core_nagios/_create_config.py b/cmk/base/core_nagios/_create_config.py
new file mode 100644
index 00000000000..98f1f499c47
--- /dev/null
+++ b/cmk/base/core_nagios/_create_config.py
@@ -0,0 +1,1034 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+"""Code for support of Nagios (and compatible) cores"""
+
+import base64
+import itertools
+import socket
+from collections import Counter
+from collections.abc import Mapping, Sequence
+from io import StringIO
+from typing import Any, cast, IO, Literal
+
+import cmk.utils.config_path
+import cmk.utils.paths
+from cmk.utils import config_warnings, password_store, store, tty
+from cmk.utils.config_path import LATEST_CONFIG, VersionedConfigPath
+from cmk.utils.exceptions import MKGeneralException
+from cmk.utils.hostaddress import HostAddress, HostName, Hosts
+from cmk.utils.labels import CollectedHostLabels, Labels
+from cmk.utils.licensing.handler import LicensingHandler
+from cmk.utils.macros import replace_macros_in_str
+from cmk.utils.notify import write_notify_host_file
+from cmk.utils.servicename import MAX_SERVICE_NAME_LEN, ServiceName
+from cmk.utils.store.host_storage import ContactgroupName
+from cmk.utils.timeperiod import TimeperiodName
+
+from cmk.checkengine.checking import CheckPluginName
+
+import cmk.base.config as config
+import cmk.base.core_config as core_config
+import cmk.base.ip_lookup as ip_lookup
+import cmk.base.obsolete_output as out
+import cmk.base.server_side_calls as server_side_calls
+import cmk.base.utils
+from cmk.base.config import ConfigCache, HostgroupName, ObjectAttributes, ServicegroupName
+from cmk.base.core_config import (
+ AbstractServiceID,
+ CoreCommand,
+ CoreCommandName,
+ get_labels_from_attributes,
+)
+from cmk.base.ip_lookup import IPStackConfig
+
+from ._precompile_host_checks import precompile_hostchecks
+
+ObjectSpec = dict[str, Any]
+
+
+class NagiosCore(core_config.MonitoringCore):
+ @classmethod
+ def name(cls) -> Literal["nagios"]:
+ return "nagios"
+
+ @staticmethod
+ def is_cmc() -> Literal[False]:
+ return False
+
+ def _create_config(
+ self,
+ config_path: VersionedConfigPath,
+ config_cache: ConfigCache,
+ ip_address_of: config.IPLookup,
+ licensing_handler: LicensingHandler,
+ passwords: Mapping[str, str],
+ hosts_to_update: set[HostName] | None = None,
+ ) -> None:
+ self._config_cache = config_cache
+ self._create_core_config(config_path, licensing_handler, passwords)
+ self._precompile_hostchecks(config_path)
+
+ def _create_core_config(
+ self,
+ config_path: VersionedConfigPath,
+ licensing_handler: LicensingHandler,
+ passwords: Mapping[str, str],
+ ) -> None:
+ """Tries to create a new Checkmk object configuration file for the Nagios core
+
+ During create_config() exceptions may be raised which are caused by configuration issues.
+ Don't produce a half written object file. Simply throw away everything and keep the old file.
+
+ The user can then start the site with the old configuration and fix the configuration issue
+ while the monitoring is running.
+ """
+
+ config_buffer = StringIO()
+ hosts_config = self._config_cache.hosts_config
+ create_config(
+ config_buffer,
+ config_path,
+ self._config_cache,
+ hostnames=sorted(
+ {
+ hn
+ for hn in itertools.chain(hosts_config.hosts, hosts_config.clusters)
+ if self._config_cache.is_active(hn) and self._config_cache.is_online(hn)
+ }
+ ),
+ licensing_handler=licensing_handler,
+ passwords=passwords,
+ )
+
+ store.save_text_to_file(cmk.utils.paths.nagios_objects_file, config_buffer.getvalue())
+
+ def _precompile_hostchecks(self, config_path: VersionedConfigPath) -> None:
+ out.output("Precompiling host checks...")
+ precompile_hostchecks(config_path, self._config_cache)
+ out.output(tty.ok + "\n")
+
+
+# .--Create config-------------------------------------------------------.
+# | ____ _ __ _ |
+# | / ___|_ __ ___ __ _| |_ ___ ___ ___ _ __ / _(_) __ _ |
+# | | | | '__/ _ \/ _` | __/ _ \ / __/ _ \| '_ \| |_| |/ _` | |
+# | | |___| | | __/ (_| | || __/ | (_| (_) | | | | _| | (_| | |
+# | \____|_| \___|\__,_|\__\___| \___\___/|_| |_|_| |_|\__, | |
+# | |___/ |
+# +----------------------------------------------------------------------+
+# | Create a configuration file for Nagios core with hosts + services |
+# '----------------------------------------------------------------------'
+
+
+class NagiosConfig:
+ def __init__(self, outfile: IO[str], hostnames: Sequence[HostName] | None) -> None:
+ super().__init__()
+ self._outfile = outfile
+ self.hostnames = hostnames
+
+ self.hostgroups_to_define: set[HostgroupName] = set()
+ self.servicegroups_to_define: set[ServicegroupName] = set()
+ self.contactgroups_to_define: set[ContactgroupName] = set()
+ self.checknames_to_define: set[CheckPluginName] = set()
+ self.active_checks_to_define: dict[str, str] = {}
+ self.custom_commands_to_define: set[CoreCommandName] = set()
+ self.hostcheck_commands_to_define: list[tuple[CoreCommand, str]] = []
+
+ def write(self, x: str) -> None:
+ # TODO: Something seems to be mixed up in our call sites...
+ self._outfile.write(x)
+
+
+def _validate_licensing(
+ hosts: Hosts, licensing_handler: LicensingHandler, licensing_counter: Counter
+) -> None:
+ if block_effect := licensing_handler.effect_core(
+ licensing_counter["services"], len(hosts.shadow_hosts)
+ ).block:
+ raise MKGeneralException(block_effect.message_raw)
+
+
+def create_config(
+ outfile: IO[str],
+ config_path: VersionedConfigPath,
+ config_cache: ConfigCache,
+ hostnames: Sequence[HostName],
+ licensing_handler: LicensingHandler,
+ passwords: Mapping[str, str],
+) -> None:
+ cfg = NagiosConfig(outfile, hostnames)
+
+ _output_conf_header(cfg)
+
+ licensing_counter = Counter("services")
+ all_host_labels: dict[HostName, CollectedHostLabels] = {}
+ for hostname in hostnames:
+ all_host_labels[hostname] = _create_nagios_config_host(
+ cfg, config_cache, hostname, passwords, licensing_counter
+ )
+
+ _validate_licensing(config_cache.hosts_config, licensing_handler, licensing_counter)
+
+ write_notify_host_file(config_path, all_host_labels)
+
+ _create_nagios_config_contacts(cfg, hostnames)
+ _create_nagios_config_hostgroups(cfg)
+ _create_nagios_config_servicegroups(cfg)
+ _create_nagios_config_contactgroups(cfg)
+ create_nagios_config_commands(cfg)
+ _create_nagios_config_timeperiods(cfg)
+
+ if config.extra_nagios_conf:
+ cfg.write("\n# extra_nagios_conf\n\n")
+ cfg.write(config.extra_nagios_conf)
+
+
+def _output_conf_header(cfg: NagiosConfig) -> None:
+ cfg.write(
+ """#
+# Created by Check_MK. Do not edit.
+#
+
+"""
+ )
+
+
+def _create_nagios_config_host(
+ cfg: NagiosConfig,
+ config_cache: ConfigCache,
+ hostname: HostName,
+ stored_passwords: Mapping[str, str],
+ license_counter: Counter,
+) -> CollectedHostLabels:
+ cfg.write("\n# ----------------------------------------------------\n")
+ cfg.write("# %s\n" % hostname)
+ cfg.write("# ----------------------------------------------------\n")
+
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ )
+
+ host_attrs = config_cache.get_host_attributes(hostname, ip_address_of)
+ if config.generate_hostconf:
+ host_spec = create_nagios_host_spec(cfg, config_cache, hostname, host_attrs, ip_address_of)
+ cfg.write(format_nagios_object("host", host_spec))
+
+ return CollectedHostLabels(
+ host_labels=get_labels_from_attributes(list(host_attrs.items())),
+ service_labels=create_nagios_servicedefs(
+ cfg,
+ config_cache,
+ hostname,
+ host_attrs,
+ stored_passwords,
+ license_counter,
+ ip_address_of,
+ ),
+ )
+
+
+def create_nagios_host_spec( # pylint: disable=too-many-branches
+ cfg: NagiosConfig,
+ config_cache: ConfigCache,
+ hostname: HostName,
+ attrs: ObjectAttributes,
+ ip_address_of: config.IPLookup,
+) -> ObjectSpec:
+ ip = attrs["address"]
+
+ if hostname in config_cache.hosts_config.clusters:
+ nodes = config_cache.get_cluster_nodes_for_config(hostname)
+ attrs.update(config_cache.get_cluster_attributes(hostname, nodes, ip_address_of))
+
+ # _
+ # / |
+ # | |
+ # | |
+ # |_| 1. normal, physical hosts
+
+ host_spec = {
+ "host_name": hostname,
+ "use": (
+ config.cluster_template
+ if hostname in config_cache.hosts_config.clusters
+ else config.host_template
+ ),
+ "address": (
+ ip if ip else ip_lookup.fallback_ip_for(config_cache.default_address_family(hostname))
+ ),
+ "alias": attrs["alias"],
+ }
+
+ # Add custom macros
+ for key, value in attrs.items():
+ if key[0] == "_":
+ host_spec[key] = value
+
+ def host_check_via_service_status(service: ServiceName) -> CoreCommand:
+ command = "check-mk-host-custom-%d" % (len(cfg.hostcheck_commands_to_define) + 1)
+ service_with_hostname = replace_macros_in_str(
+ service,
+ {"$HOSTNAME$": hostname},
+ )
+ cfg.hostcheck_commands_to_define.append(
+ (
+ command,
+ 'echo "$SERVICEOUTPUT:%s:%s$" && exit $SERVICESTATEID:%s:%s$'
+ % (
+ hostname,
+ service_with_hostname,
+ hostname,
+ service_with_hostname,
+ ),
+ ),
+ )
+ return command
+
+ def host_check_via_custom_check(
+ command_name: CoreCommandName, command: CoreCommand
+ ) -> CoreCommand:
+ cfg.custom_commands_to_define.add(command_name)
+ return command
+
+ # Host check command might differ from default
+ command = core_config.host_check_command(
+ config_cache,
+ hostname,
+ ip,
+ hostname in config_cache.hosts_config.clusters,
+ "ping",
+ host_check_via_service_status,
+ host_check_via_custom_check,
+ )
+ if command:
+ host_spec["check_command"] = command
+
+ hostgroups = config_cache.hostgroups(hostname)
+ if config.define_hostgroups or hostgroups == [config.default_host_group]:
+ cfg.hostgroups_to_define.update(hostgroups)
+ host_spec["hostgroups"] = ",".join(hostgroups)
+
+ # Contact groups
+ contactgroups = config_cache.contactgroups(hostname)
+ if contactgroups:
+ host_spec["contact_groups"] = ",".join(contactgroups)
+ cfg.contactgroups_to_define.update(contactgroups)
+
+ if hostname not in config_cache.hosts_config.clusters:
+ # Parents for non-clusters
+
+ # Get parents explicitly defined for host/folder via extra_host_conf["parents"]. Only honor
+ # the ruleset "parents" in case no explicit parents are set
+ if not attrs.get("parents", []):
+ parents_list = config_cache.parents(hostname)
+ if parents_list:
+ host_spec["parents"] = ",".join(parents_list)
+
+ elif hostname in config_cache.hosts_config.clusters:
+ # Special handling of clusters
+ host_spec["parents"] = ",".join(nodes)
+
+ # Custom configuration last -> user may override all other values
+ # TODO: Find a generic mechanism for CMC and Nagios
+ for key, value in config_cache.extra_host_attributes(hostname).items():
+ if key == "cmk_agent_connection":
+ continue
+ if hostname in config_cache.hosts_config.clusters and key == "parents":
+ continue
+ host_spec[key] = value
+
+ return host_spec
+
+
+def transform_active_service_command(
+ cfg: NagiosConfig, service_data: server_side_calls.ActiveServiceData
+) -> str:
+ if config.simulation_mode:
+ cfg.custom_commands_to_define.add("check-mk-simulation")
+ return "check-mk-simulation!echo 'Simulation mode - cannot execute real check'"
+
+ if service_data.command == "check-mk-custom":
+ cfg.custom_commands_to_define.add("check-mk-custom")
+ return f"{service_data.command}!{service_data.command_line}"
+
+ return service_data.command_display
+
+
+def create_nagios_servicedefs( # pylint: disable=too-many-branches
+ cfg: NagiosConfig,
+ config_cache: ConfigCache,
+ hostname: HostName,
+ host_attrs: ObjectAttributes,
+ stored_passwords: Mapping[str, str],
+ license_counter: Counter,
+ ip_address_of: config.IPLookup,
+) -> dict[ServiceName, Labels]:
+ check_mk_attrs = core_config.get_service_attributes(hostname, "Check_MK", config_cache)
+
+ # _____
+ # |___ /
+ # |_ \
+ # ___) |
+ # |____/ 3. Services
+
+ def do_omit_service(hostname: HostName, description: ServiceName) -> bool:
+ if config_cache.service_ignored(hostname, description):
+ return True
+ if hostname != config_cache.effective_host(hostname, description):
+ return True
+ return False
+
+ def get_dependencies(hostname: HostName, servicedesc: ServiceName) -> str:
+ result = ""
+ for dep in config.service_depends_on(config_cache, hostname, servicedesc):
+ result += format_nagios_object(
+ "servicedependency",
+ {
+ "use": config.service_dependency_template,
+ "host_name": hostname,
+ "service_description": dep,
+ "dependent_host_name": hostname,
+ "dependent_service_description": servicedesc,
+ },
+ )
+
+ return result
+
+ host_check_table = config_cache.check_table(hostname)
+ have_at_least_one_service = False
+ used_descriptions: dict[ServiceName, AbstractServiceID] = {}
+ service_labels: dict[ServiceName, Labels] = {}
+ for service in sorted(host_check_table.values(), key=lambda s: s.sort_key()):
+ if not service.description:
+ config_warnings.warn(
+ f"Skipping invalid service with empty description (plugin: {service.check_plugin_name}) on host {hostname}"
+ )
+ continue
+
+ if len(service.description) > MAX_SERVICE_NAME_LEN:
+ config_warnings.warn(
+ f"Skipping invalid service exceeding the name length limit of {MAX_SERVICE_NAME_LEN} "
+ f"(plugin: {service.check_plugin_name}) on host: {hostname}, Service: {service.description}"
+ )
+ continue
+
+ if service.description in used_descriptions:
+ core_config.duplicate_service_warning(
+ checktype="auto",
+ description=service.description,
+ host_name=hostname,
+ first_occurrence=used_descriptions[service.description],
+ second_occurrence=service.id(),
+ )
+ continue
+ used_descriptions[service.description] = service.id()
+
+ # Services Dependencies for autochecks
+ cfg.write(get_dependencies(hostname, service.description))
+
+ service_spec = {
+ "use": config.passive_service_template_perf,
+ "host_name": hostname,
+ "service_description": service.description,
+ "check_command": "check_mk-%s" % service.check_plugin_name,
+ }
+
+ passive_service_attributes = core_config.get_cmk_passive_service_attributes(
+ config_cache, hostname, service, check_mk_attrs
+ )
+
+ service_labels[service.description] = {
+ label.name: label.value for label in service.service_labels.values()
+ } | dict(get_labels_from_attributes(list(passive_service_attributes.items())))
+
+ service_spec.update(passive_service_attributes)
+
+ service_spec.update(
+ _extra_service_conf_of(cfg, config_cache, hostname, service.description)
+ )
+
+ cfg.write(format_nagios_object("service", service_spec))
+ license_counter["services"] += 1
+
+ cfg.checknames_to_define.add(service.check_plugin_name)
+ have_at_least_one_service = True
+
+ # Active check for Check_MK
+ if config_cache.checkmk_check_parameters(hostname).enabled:
+ service_spec = {
+ "use": config.active_service_template,
+ "host_name": hostname,
+ "service_description": "Check_MK",
+ }
+ service_spec.update(check_mk_attrs)
+ service_spec.update(_extra_service_conf_of(cfg, config_cache, hostname, "Check_MK"))
+ cfg.write(format_nagios_object("service", service_spec))
+ license_counter["services"] += 1
+
+ # legacy checks via active_checks
+ active_services = []
+
+ translations = config.get_service_translations(config_cache.ruleset_matcher, hostname)
+ host_macros = ConfigCache.get_host_macros_from_attributes(hostname, host_attrs)
+ resource_macros = config.get_resource_macros()
+ macros = {**host_macros, **resource_macros}
+ active_check_config = server_side_calls.ActiveCheck(
+ server_side_calls.load_active_checks()[1],
+ config.active_check_info,
+ hostname,
+ config.get_ssc_host_config(hostname, config_cache, macros, ip_address_of),
+ host_attrs,
+ config.http_proxies,
+ lambda x: config.get_final_service_description(x, translations),
+ config.use_new_descriptions_for,
+ stored_passwords,
+ password_store.core_password_store_path(LATEST_CONFIG),
+ escape_func=lambda a: a.replace("\\", "\\\\").replace("!", "\\!"),
+ )
+
+ active_checks = config_cache.active_checks(hostname)
+ actchecks = [name for name, params in active_checks if params]
+ for service_data in active_check_config.get_active_service_data(active_checks):
+ if do_omit_service(hostname, service_data.description):
+ continue
+
+ if (existing_plugin := used_descriptions.get(service_data.description)) is not None:
+ core_config.duplicate_service_warning(
+ checktype="active",
+ description=service_data.description,
+ host_name=hostname,
+ first_occurrence=existing_plugin,
+ second_occurrence=(f"active({service_data.plugin_name})", None),
+ )
+ continue
+
+ used_descriptions[service_data.description] = (
+ f"active({service_data.plugin_name})",
+ service_data.description,
+ )
+
+ service_spec = {
+ "use": "check_mk_perf,check_mk_default",
+ "host_name": hostname,
+ "service_description": service_data.description,
+ "check_command": transform_active_service_command(cfg, service_data),
+ "active_checks_enabled": str(1),
+ }
+ service_spec.update(
+ core_config.get_service_attributes(hostname, service_data.description, config_cache)
+ )
+ service_spec.update(
+ _extra_service_conf_of(cfg, config_cache, hostname, service_data.description)
+ )
+
+ cfg.active_checks_to_define[service_data.plugin_name] = service_data.detected_executable
+ active_services.append(service_spec)
+
+ if actchecks:
+ cfg.write("\n\n# Active checks\n")
+
+ for service_spec in active_services:
+ cfg.write(format_nagios_object("service", service_spec))
+ license_counter["services"] += 1
+
+ # write service dependencies for active checks
+ cfg.write(get_dependencies(hostname, service_spec["service_description"]))
+
+ # Legacy checks via custom_checks
+ custchecks = config_cache.custom_checks(hostname)
+ translations = config.get_service_translations(
+ config_cache.ruleset_matcher,
+ hostname,
+ )
+ if custchecks:
+ cfg.write("\n\n# Custom checks\n")
+ for entry in custchecks:
+ # entries are dicts with the following keys:
+ # "service_description" Service description to use
+ # "command_line" (optional) Unix command line for executing the check
+ # If this is missing, we create a passive check
+ # "command_name" (optional) Name of Monitoring command to define. If missing,
+ # we use "check-mk-custom"
+ description = config.get_final_service_description(
+ entry["service_description"], translations
+ )
+ command_name = entry.get("command_name", "check-mk-custom")
+ command_line = entry.get("command_line", "")
+
+ if not description:
+ config_warnings.warn(
+ "Skipping invalid service with empty description on host %s" % hostname
+ )
+ continue
+
+ if command_line:
+ command_line = (
+ core_config.autodetect_plugin(command_line)
+ .replace("\\", "\\\\")
+ .replace("!", "\\!")
+ )
+
+ if "freshness" in entry:
+ freshness = {
+ "check_freshness": 1,
+ "freshness_threshold": 60 * entry["freshness"]["interval"],
+ }
+ command_line = "echo %s && exit %d" % (
+ _quote_nagios_string(entry["freshness"]["output"]),
+ entry["freshness"]["state"],
+ )
+ else:
+ freshness = {}
+
+ cfg.custom_commands_to_define.add(command_name)
+
+ if description in used_descriptions:
+ cn, _ = used_descriptions[description]
+ # If we have the same active check again with the same description,
+ # then we do not regard this as an error, but simply ignore the
+ # second one.
+ if cn == "custom(%s)" % command_name:
+ continue
+
+ core_config.duplicate_service_warning(
+ checktype="custom",
+ description=description,
+ host_name=hostname,
+ first_occurrence=used_descriptions[description],
+ second_occurrence=("custom(%s)" % command_name, description),
+ )
+ continue
+
+ used_descriptions[description] = ("custom(%s)" % command_name, description)
+
+ command = f"{command_name}!{command_line}"
+
+ service_spec = {
+ "use": "check_mk_perf,check_mk_default",
+ "host_name": hostname,
+ "service_description": description,
+ "check_command": _simulate_command(cfg, command),
+ "active_checks_enabled": str(1 if (command_line and not freshness) else 0),
+ }
+ service_spec.update(freshness)
+ service_spec.update(
+ core_config.get_service_attributes(hostname, description, config_cache)
+ )
+ service_spec.update(_extra_service_conf_of(cfg, config_cache, hostname, description))
+ cfg.write(format_nagios_object("service", service_spec))
+ license_counter["services"] += 1
+
+ # write service dependencies for custom checks
+ cfg.write(get_dependencies(hostname, description))
+
+ service_discovery_name = ConfigCache.service_discovery_name()
+
+ # Inventory checks - if user has configured them.
+ if not (disco_params := config_cache.discovery_check_parameters(hostname)).commandline_only:
+ service_spec = {
+ "use": config.inventory_check_template,
+ "host_name": hostname,
+ "service_description": service_discovery_name,
+ }
+ service_spec.update(
+ core_config.get_service_attributes(hostname, service_discovery_name, config_cache)
+ )
+
+ service_spec.update(
+ _extra_service_conf_of(cfg, config_cache, hostname, service_discovery_name)
+ )
+
+ service_spec.update(
+ {
+ "check_interval": str(disco_params.check_interval),
+ "retry_interval": str(disco_params.check_interval),
+ }
+ )
+
+ cfg.write(format_nagios_object("service", service_spec))
+ license_counter["services"] += 1
+
+ if have_at_least_one_service:
+ cfg.write(
+ format_nagios_object(
+ "servicedependency",
+ {
+ "use": config.service_dependency_template,
+ "host_name": hostname,
+ "service_description": "Check_MK",
+ "dependent_host_name": hostname,
+ "dependent_service_description": service_discovery_name,
+ },
+ )
+ )
+
+ # No check_mk service, no legacy service -> create PING service
+ if not have_at_least_one_service and not actchecks and not custchecks:
+ _add_ping_service(
+ cfg,
+ config_cache,
+ hostname,
+ host_attrs["address"],
+ config_cache.default_address_family(hostname),
+ "PING",
+ host_attrs.get("_NODEIPS"),
+ license_counter,
+ )
+
+ if ConfigCache.ip_stack_config(hostname) is IPStackConfig.DUAL_STACK:
+ if config_cache.default_address_family(hostname) is socket.AF_INET6:
+ if "PING IPv4" not in used_descriptions:
+ _add_ping_service(
+ cfg,
+ config_cache,
+ hostname,
+ host_attrs["_ADDRESS_4"],
+ socket.AF_INET,
+ "PING IPv4",
+ host_attrs.get("_NODEIPS_4"),
+ license_counter,
+ )
+ else:
+ if "PING IPv6" not in used_descriptions:
+ _add_ping_service(
+ cfg,
+ config_cache,
+ hostname,
+ host_attrs["_ADDRESS_6"],
+ socket.AF_INET6,
+ "PING IPv6",
+ host_attrs.get("_NODEIPS_6"),
+ license_counter,
+ )
+
+ return service_labels
+
+
+def _add_ping_service(
+ cfg: NagiosConfig,
+ config_cache: ConfigCache,
+ host_name: HostName,
+ ipaddress: HostAddress,
+ family: socket.AddressFamily,
+ descr: ServiceName,
+ node_ips: str | None,
+ licensing_counter: Counter,
+) -> None:
+ arguments = core_config.check_icmp_arguments_of(config_cache, host_name, family=family)
+
+ ping_command = "check-mk-ping"
+ if host_name in config_cache.hosts_config.clusters:
+ assert node_ips is not None
+ arguments += " -m 1 " + node_ips
+ else:
+ arguments += " " + ipaddress
+
+ service_spec = {
+ "use": config.pingonly_template,
+ "host_name": host_name,
+ "service_description": descr,
+ "check_command": f"{ping_command}!{arguments}",
+ }
+ service_spec.update(core_config.get_service_attributes(host_name, descr, config_cache))
+ service_spec.update(_extra_service_conf_of(cfg, config_cache, host_name, descr))
+ cfg.write(format_nagios_object("service", service_spec))
+ licensing_counter["services"] += 1
+
+
+def format_nagios_object(object_type: str, object_spec: ObjectSpec) -> str:
+ cfg = ["define %s {" % object_type]
+ for key, val in sorted(object_spec.items(), key=lambda x: x[0]):
+ # Use a base16 encoding for names and values of tags, labels and label
+ # sources to work around the syntactic restrictions in Nagios' object
+ # configuration files.
+ if key[0] == "_": # quick pre-check: custom variable?
+ for prefix in ("__TAG_", "__LABEL_", "__LABELSOURCE_"):
+ if key.startswith(prefix):
+ key = prefix + _b16encode(key[len(prefix) :])
+ val = _b16encode(val)
+ cfg.append(" %-29s %s" % (key, val))
+ cfg.append("}")
+
+ return "\n".join(cfg) + "\n\n"
+
+
+def _b16encode(b: str) -> str:
+ return (base64.b16encode(b.encode())).decode()
+
+
+def _simulate_command(cfg: NagiosConfig, command: CoreCommand) -> CoreCommand:
+ if config.simulation_mode:
+ cfg.custom_commands_to_define.add("check-mk-simulation")
+ return "check-mk-simulation!echo 'Simulation mode - cannot execute real check'"
+ return command
+
+
+def _create_nagios_config_hostgroups(cfg: NagiosConfig) -> None:
+ if config.define_hostgroups:
+ cfg.write("\n# ------------------------------------------------------------\n")
+ cfg.write("# Host groups (controlled by define_hostgroups)\n")
+ cfg.write("# ------------------------------------------------------------\n")
+ for hg in sorted(cfg.hostgroups_to_define):
+ cfg.write(
+ format_nagios_object(
+ "hostgroup",
+ {
+ "hostgroup_name": hg,
+ "alias": config.define_hostgroups.get(hg, hg),
+ },
+ )
+ )
+
+ # No creation of host groups but we need to define default host group
+ elif config.default_host_group in cfg.hostgroups_to_define:
+ cfg.write(
+ format_nagios_object(
+ "hostgroup",
+ {
+ "hostgroup_name": config.default_host_group,
+ "alias": "Check_MK default hostgroup",
+ },
+ )
+ )
+
+
+def _create_nagios_config_servicegroups(cfg: NagiosConfig) -> None:
+ if not config.define_servicegroups:
+ return
+ cfg.write("\n# ------------------------------------------------------------\n")
+ cfg.write("# Service groups (controlled by define_servicegroups)\n")
+ cfg.write("# ------------------------------------------------------------\n")
+ for sg in sorted(cfg.servicegroups_to_define):
+ cfg.write(
+ format_nagios_object(
+ "servicegroup",
+ {
+ "servicegroup_name": sg,
+ "alias": config.define_servicegroups.get(sg, sg),
+ },
+ )
+ )
+
+
+def _create_nagios_config_contactgroups(cfg: NagiosConfig) -> None:
+ if not cfg.contactgroups_to_define:
+ return
+ cfg.write("\n# ------------------------------------------------------------\n")
+ cfg.write("# Contact groups (controlled by define_contactgroups)\n")
+ cfg.write("# ------------------------------------------------------------\n\n")
+ for name in sorted(cfg.contactgroups_to_define):
+ contactgroup_spec = {
+ "contactgroup_name": name,
+ "alias": config.define_contactgroups.get(name, name),
+ }
+ if members := config.contactgroup_members.get(name):
+ contactgroup_spec["members"] = ",".join(members)
+ cfg.write(format_nagios_object("contactgroup", contactgroup_spec))
+
+
+def create_nagios_config_commands(cfg: NagiosConfig) -> None:
+ if config.generate_dummy_commands:
+ cfg.write("\n# ------------------------------------------------------------\n")
+ cfg.write("# Dummy check commands and active check commands\n")
+ cfg.write("# ------------------------------------------------------------\n\n")
+ for checkname in cfg.checknames_to_define:
+ cfg.write(
+ format_nagios_object(
+ "command",
+ {
+ "command_name": "check_mk-%s" % checkname,
+ "command_line": config.dummy_check_commandline,
+ },
+ )
+ )
+
+ # active_checks
+ for acttype, detected_executable in cfg.active_checks_to_define.items():
+ command_line = (
+ core_config.autodetect_plugin(act_info["command_line"])
+ if (act_info := config.active_check_info.get(acttype)) is not None
+ else f"{detected_executable} $ARG1$"
+ )
+ cfg.write(
+ format_nagios_object(
+ "command",
+ {
+ "command_name": f"check_mk_active-{acttype}",
+ "command_line": command_line,
+ },
+ )
+ )
+
+ # custom_checks
+ for command_name in cfg.custom_commands_to_define:
+ cfg.write(
+ format_nagios_object(
+ "command",
+ {
+ "command_name": command_name,
+ "command_line": "$ARG1$",
+ },
+ )
+ )
+
+ # custom host checks
+ for command_name, command_line in cfg.hostcheck_commands_to_define:
+ cfg.write(
+ format_nagios_object(
+ "command",
+ {
+ "command_name": command_name,
+ "command_line": command_line,
+ },
+ )
+ )
+
+
+def _create_nagios_config_timeperiods(cfg: NagiosConfig) -> None:
+ if len(config.timeperiods) > 0:
+ cfg.write("\n# ------------------------------------------------------------\n")
+ cfg.write("# Timeperiod definitions (controlled by variable 'timeperiods')\n")
+ cfg.write("# ------------------------------------------------------------\n\n")
+ tpnames = sorted(config.timeperiods)
+ for name in tpnames:
+ tp = config.timeperiods[name]
+ timeperiod_spec = {
+ "timeperiod_name": name,
+ }
+
+ if "alias" in tp:
+ alias = tp["alias"]
+ assert isinstance(alias, str)
+ timeperiod_spec["alias"] = alias
+
+ for key, value in tp.items():
+ if key not in ["alias", "exclude"]:
+ # TODO: We should *really* improve TimeperiodSpec: We have no way to use assert
+ # below to distinguish between a list of TimeperiodNames for "exclude" and the
+ # list of tuples for the time ranges.
+ times = ",".join(
+ (f"{fr}-{to}") for (fr, to) in cast(list[tuple[str, str]], value)
+ )
+ if times:
+ timeperiod_spec[key] = times
+
+ if "exclude" in tp:
+ timeperiod_spec["exclude"] = ",".join(cast(list[TimeperiodName], tp["exclude"]))
+
+ cfg.write(format_nagios_object("timeperiod", timeperiod_spec))
+
+
+def _create_nagios_config_contacts(cfg: NagiosConfig, hostnames: Sequence[HostName]) -> None:
+ if config.contacts:
+ cfg.write("\n# ------------------------------------------------------------\n")
+ cfg.write("# Contact definitions (controlled by variable 'contacts')\n")
+ cfg.write("# ------------------------------------------------------------\n\n")
+ for cname, contact in sorted(config.contacts.items()):
+ # Create contact groups in nagios, even when they are empty. This is needed
+ # for RBN to work correctly when using contactgroups as recipients which are
+ # not assigned to any host
+ cfg.contactgroups_to_define.update(contact.get("contactgroups", []))
+ # If the contact is in no contact group or all of the contact groups
+ # of the contact have neither hosts nor services assigned - in other
+ # words if the contact is not assigned to any host or service, then
+ # we do not create this contact in Nagios. It's useless and will produce
+ # warnings.
+ cgrs = [
+ cgr
+ for cgr in contact.get("contactgroups", [])
+ if cgr in cfg.contactgroups_to_define
+ ]
+ if not cgrs:
+ continue
+
+ contact_spec: ObjectSpec = {
+ "contact_name": cname,
+ }
+
+ if "alias" in contact:
+ contact_spec["alias"] = contact["alias"]
+
+ if "email" in contact:
+ contact_spec["email"] = contact["email"]
+
+ if "pager" in contact:
+ contact_spec["pager"] = contact["pager"]
+
+ for what in ["host", "service"]:
+ if what == "host":
+ no: str = contact.get("host_notification_options", "")
+ elif what == "service":
+ no = contact.get("service_notification_options", "")
+ else:
+ raise ValueError()
+
+ if not no:
+ contact_spec["%s_notifications_enabled" % what] = 0
+ no = "n"
+
+ contact_spec.update(
+ {
+ "%s_notification_options" % what: ",".join(no),
+ "%s_notification_period" % what: contact.get("notification_period", "24X7"),
+ "%s_notification_commands"
+ % what: contact.get("%s_notification_commands" % what, "check-mk-notify"),
+ }
+ )
+
+ # Add custom macros
+ contact_spec.update({key: val for key, val in contact.items() if key.startswith("_")})
+
+ contact_spec["contactgroups"] = ", ".join(cgrs)
+ cfg.write(format_nagios_object("contact", contact_spec))
+
+ if hostnames:
+ cfg.contactgroups_to_define.add("check-mk-notify")
+ cfg.write("# Needed for rule based notifications\n")
+ cfg.write(
+ format_nagios_object(
+ "contact",
+ {
+ "contact_name": "check-mk-notify",
+ "alias": "Contact for rule based notifications",
+ "host_notification_options": "d,u,r,f,s",
+ "service_notification_options": "u,c,w,r,f,s",
+ "host_notification_period": "24X7",
+ "service_notification_period": "24X7",
+ "host_notification_commands": "check-mk-notify",
+ "service_notification_commands": "check-mk-notify",
+ "contactgroups": "check-mk-notify",
+ },
+ )
+ )
+
+
+def _quote_nagios_string(s: str) -> str:
+ """Quote string for use in a nagios command execution. Please note that also
+ quoting for ! and backslash for Nagios itself takes place here."""
+ return "'" + s.replace("\\", "\\\\").replace("'", "'\"'\"'").replace("!", "\\!") + "'"
+
+
+def _extra_service_conf_of(
+ cfg: NagiosConfig, config_cache: ConfigCache, hostname: HostName, description: ServiceName
+) -> ObjectSpec:
+ """Collect all extra configuration data for a service"""
+ service_spec: ObjectSpec = {}
+
+ # Add contact groups to the config only if the user has defined them.
+ # Otherwise inherit the contact groups from the host.
+ # "check-mk-notify" is always returned for rulebased notifications and
+ # the Nagios core and not defined by the user.
+ sercgr = config_cache.contactgroups_of_service(hostname, description)
+ if sercgr != ["check-mk-notify"]:
+ service_spec["contact_groups"] = ",".join(sercgr)
+ cfg.contactgroups_to_define.update(sercgr)
+
+ sergr = config_cache.servicegroups_of_service(hostname, description)
+ if sergr:
+ service_spec["service_groups"] = ",".join(sergr)
+ if config.define_servicegroups:
+ cfg.servicegroups_to_define.update(sergr)
+
+ return service_spec
diff --git a/cmk/base/core_nagios/_host_check_config.py b/cmk/base/core_nagios/_host_check_config.py
new file mode 100644
index 00000000000..9265a9d9b63
--- /dev/null
+++ b/cmk/base/core_nagios/_host_check_config.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+from dataclasses import dataclass
+
+from cmk.utils.hostaddress import HostAddress, HostName
+
+from cmk.discover_plugins import PluginLocation
+
+
+@dataclass(frozen=True, kw_only=True)
+class HostCheckConfig:
+ delay_precompile: bool
+ src: str
+ dst: str
+ verify_site_python: bool
+ locations: list[PluginLocation]
+ checks_to_load: list[str]
+ ipaddresses: dict[HostName, HostAddress]
+ ipv6addresses: dict[HostName, HostAddress]
+ hostname: HostName
diff --git a/cmk/base/core_nagios/_host_check_template.py b/cmk/base/core_nagios/_host_check_template.py
new file mode 100644
index 00000000000..8457cb4b0bd
--- /dev/null
+++ b/cmk/base/core_nagios/_host_check_template.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python3
+# Copyright (C) 2024 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+import sys
+from importlib import import_module
+
+import cmk.utils.debug
+import cmk.utils.log
+from cmk.utils.config_path import LATEST_CONFIG
+from cmk.utils.exceptions import MKTerminate
+from cmk.utils.hostaddress import HostAddress, HostName
+
+from cmk.checkengine.submitters import get_submitter
+
+import cmk.base.check_api as check_api
+import cmk.base.config as config
+import cmk.base.obsolete_output as out
+import cmk.base.utils
+from cmk.base.api.agent_based.register import register_plugin_by_type
+from cmk.base.core_nagios import HostCheckConfig
+from cmk.base.modes.check_mk import mode_check
+
+from cmk.discover_plugins import PluginLocation
+
+# This will be replaced by the config genreration, when the template is instanciated.
+CONFIG = HostCheckConfig(
+ delay_precompile=False,
+ src="",
+ dst="",
+ verify_site_python=False,
+ locations=[PluginLocation("dummy.callsite.of.plugin.location")],
+ checks_to_load=[],
+ ipaddresses={HostName("somehost"): HostAddress("::")},
+ ipv6addresses={},
+ hostname=HostName("somehost"),
+)
+
+
+def _self_compile(src: str, dst: str) -> None:
+ """replace symlink with precompiled python-code, if we are run for the first time"""
+ import os
+
+ if not os.path.islink(dst):
+ return
+
+ import py_compile
+
+ os.remove(dst)
+ py_compile.compile(src, dst, dst, True)
+ os.chmod(dst, 0o700)
+
+
+def _simple_arg_parsing(executable: str, *opts: str) -> tuple[int, bool]:
+ """Very basic argument parsing
+
+ It seems this is all we needed in the last decade.
+
+ >>> _simple_arg_parsing("/foo", "-vv", "-v", "-d")
+ (3, True)
+ """
+ if not set(opts).issubset({"-v", "-vv", "-d"}):
+ sys.stderr.write(f"usage: {executable} [-v | -vv] [-d]")
+ raise SystemExit(3)
+
+ j_opts = "".join(opts)
+ return j_opts.count("v"), "d" in j_opts
+
+
+def main() -> int:
+ loglevel, debug = _simple_arg_parsing(*sys.argv)
+
+ if CONFIG.verify_site_python and not sys.executable.startswith("/omd"):
+ sys.stdout.write("ERROR: Only executable with sites python\\n")
+ return 2
+
+ if CONFIG.delay_precompile:
+ _self_compile(CONFIG.src, CONFIG.dst)
+
+ for location in CONFIG.locations:
+ module = import_module(location.module)
+ if location.name is not None:
+ register_plugin_by_type(location, getattr(module, location.name))
+
+ cmk.base.utils.register_sigint_handler()
+ cmk.utils.log.setup_console_logging()
+
+ cmk.utils.log.logger.setLevel(cmk.utils.log.verbosity_to_log_level(loglevel))
+ if debug:
+ cmk.utils.debug.enable()
+
+ config.load_checks(check_api.get_check_api_context, CONFIG.checks_to_load)
+
+ config.load_packed_config(LATEST_CONFIG)
+
+ config.ipaddresses = CONFIG.ipaddresses
+ config.ipv6addresses = CONFIG.ipv6addresses
+
+ try:
+ return mode_check(
+ get_submitter,
+ {},
+ [CONFIG.hostname],
+ active_check_handler=lambda *args: None,
+ keepalive=False,
+ precompiled_host_check=True,
+ )
+ except MKTerminate:
+ out.output("\n", stream=sys.stderr)
+ return 1
+ except Exception as e:
+ import traceback
+
+ sys.stdout.write(
+ # status output message
+ f"UNKNOWN - Exception in precompiled check: {e} (details in long output)\n"
+ # generate traceback for long output
+ f"Traceback: {traceback.format_exc()}\n"
+ )
+ return 3
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/cmk/base/core_nagios/_precompile_host_checks.py b/cmk/base/core_nagios/_precompile_host_checks.py
new file mode 100644
index 00000000000..a5c7ea6d708
--- /dev/null
+++ b/cmk/base/core_nagios/_precompile_host_checks.py
@@ -0,0 +1,383 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+"""
+Precompiling creates on dedicated Python file per host, which just
+contains that code and information that is needed for executing all
+checks of that host. Also static data that cannot change during the
+normal monitoring process is being precomputed and hard coded. This
+all saves substantial CPU resources as opposed to running Checkmk
+in adhoc mode (about 75%).
+"""
+
+import itertools
+import os
+import py_compile
+import re
+import socket
+import sys
+from pathlib import Path
+
+import cmk.utils.config_path
+import cmk.utils.password_store
+import cmk.utils.paths
+import cmk.utils.store as store
+import cmk.utils.tty as tty
+from cmk.utils.check_utils import section_name_of
+from cmk.utils.config_path import VersionedConfigPath
+from cmk.utils.hostaddress import HostAddress, HostName
+from cmk.utils.log import console
+
+from cmk.checkengine.checking import CheckPluginName
+from cmk.checkengine.inventory import InventoryPluginName
+
+import cmk.base.api.agent_based.register as agent_based_register
+import cmk.base.config as config
+import cmk.base.server_side_calls as server_side_calls
+import cmk.base.utils
+from cmk.base.config import ConfigCache
+from cmk.base.ip_lookup import IPStackConfig
+
+from cmk.discover_plugins import PluginLocation
+
+from ._host_check_config import HostCheckConfig
+
+_TEMPLATE_FILE = Path(__file__).parent / "_host_check_template.py"
+
+_INSTANTIATION_PATTERN = re.compile(
+ f" = {HostCheckConfig.__name__}\\(.*?\n\\)",
+ re.DOTALL,
+)
+
+
+class HostCheckStore:
+ """Caring about persistence of the precompiled host check files"""
+
+ @staticmethod
+ def host_check_file_path(config_path: VersionedConfigPath, hostname: HostName) -> Path:
+ return Path(config_path) / "host_checks" / hostname
+
+ @staticmethod
+ def host_check_source_file_path(config_path: VersionedConfigPath, hostname: HostName) -> Path:
+ # TODO: Use append_suffix(".py") once we are on Python 3.10
+ path = HostCheckStore.host_check_file_path(config_path, hostname)
+ return path.with_suffix(path.suffix + ".py")
+
+ def write(self, config_path: VersionedConfigPath, hostname: HostName, host_check: str) -> None:
+ compiled_filename = self.host_check_file_path(config_path, hostname)
+ source_filename = self.host_check_source_file_path(config_path, hostname)
+
+ store.makedirs(compiled_filename.parent)
+
+ store.save_text_to_file(source_filename, host_check)
+
+ # compile python (either now or delayed - see host_check code for delay_precompile handling)
+ if config.delay_precompile:
+ compiled_filename.symlink_to(hostname + ".py")
+ else:
+ py_compile.compile(
+ file=str(source_filename),
+ cfile=str(compiled_filename),
+ dfile=str(compiled_filename),
+ doraise=True,
+ )
+ os.chmod(compiled_filename, 0o750) # nosec B103 # BNS:c29b0e
+
+ console.verbose(f" ==> {compiled_filename}.\n", stream=sys.stderr)
+
+
+def precompile_hostchecks(config_path: VersionedConfigPath, config_cache: ConfigCache) -> None:
+ console.verbose("Creating precompiled host check config...\n")
+ hosts_config = config_cache.hosts_config
+
+ config.save_packed_config(config_path, config_cache)
+
+ console.verbose("Precompiling host checks...\n")
+
+ host_check_store = HostCheckStore()
+ for hostname in {
+ # Inconsistent with `create_config` above.
+ hn
+ for hn in itertools.chain(hosts_config.hosts, hosts_config.clusters)
+ if config_cache.is_active(hn) and config_cache.is_online(hn)
+ }:
+ try:
+ console.verbose(f"{tty.bold}{tty.blue}{hostname:<16}{tty.normal}:", stream=sys.stderr)
+ host_check = dump_precompiled_hostcheck(
+ config_cache,
+ config_path,
+ hostname,
+ )
+ if host_check is None:
+ console.verbose("(no Checkmk checks)\n")
+ continue
+
+ host_check_store.write(config_path, hostname, host_check)
+ except Exception as e:
+ if cmk.utils.debug.enabled():
+ raise
+ console.error(f"Error precompiling checks for host {hostname}: {e}\n")
+ sys.exit(5)
+
+
+def dump_precompiled_hostcheck( # pylint: disable=too-many-branches
+ config_cache: ConfigCache,
+ config_path: VersionedConfigPath,
+ hostname: HostName,
+ *,
+ verify_site_python: bool = True,
+) -> str | None:
+ (
+ needed_legacy_check_plugin_names,
+ needed_agent_based_check_plugin_names,
+ needed_agent_based_inventory_plugin_names,
+ ) = _get_needed_plugin_names(config_cache, hostname)
+
+ if hostname in config_cache.hosts_config.clusters:
+ assert config_cache.nodes(hostname)
+ for node in config_cache.nodes(hostname):
+ (
+ node_needed_legacy_check_plugin_names,
+ node_needed_agent_based_check_plugin_names,
+ node_needed_agent_based_inventory_plugin_names,
+ ) = _get_needed_plugin_names(config_cache, node)
+ needed_legacy_check_plugin_names.update(node_needed_legacy_check_plugin_names)
+ needed_agent_based_check_plugin_names.update(node_needed_agent_based_check_plugin_names)
+ needed_agent_based_inventory_plugin_names.update(
+ node_needed_agent_based_inventory_plugin_names
+ )
+
+ needed_legacy_check_plugin_names.update(
+ _get_required_legacy_check_sections(
+ needed_agent_based_check_plugin_names,
+ needed_agent_based_inventory_plugin_names,
+ )
+ )
+
+ if not any(
+ (
+ needed_legacy_check_plugin_names,
+ needed_agent_based_check_plugin_names,
+ needed_agent_based_inventory_plugin_names,
+ )
+ ):
+ return None
+
+ locations = _get_needed_agent_based_locations(
+ needed_agent_based_check_plugin_names,
+ needed_agent_based_inventory_plugin_names,
+ )
+
+ checks_to_load = sorted(_get_legacy_check_file_names_to_load(needed_legacy_check_plugin_names))
+
+ for check_plugin_name in sorted(needed_legacy_check_plugin_names):
+ console.verbose(f" {tty.green}{check_plugin_name}{tty.normal}", stream=sys.stderr)
+
+ # IP addresses
+ # FIXME:
+ # What we construct here does not match what we need to assign it to `config.ipaddresses` and
+ # `config.ipv6addresses` later.
+ # But maybe it is in fact not a problem, because `config.lookup_ip_address` happens to never
+ # return `None` the way we call it here.
+ ip_stack_config = ConfigCache.ip_stack_config(hostname)
+ needed_ipaddresses: dict[HostName, HostAddress | None] = {}
+ needed_ipv6addresses: dict[HostName, HostAddress | None] = {}
+ if hostname in config_cache.hosts_config.clusters:
+ assert config_cache.nodes(hostname)
+ for node in config_cache.nodes(hostname):
+ node_ip_stack_config = ConfigCache.ip_stack_config(node)
+ if IPStackConfig.IPv4 in node_ip_stack_config:
+ needed_ipaddresses[node] = config.lookup_ip_address(
+ config_cache, node, family=socket.AddressFamily.AF_INET
+ )
+
+ if IPStackConfig.IPv6 in node_ip_stack_config:
+ needed_ipv6addresses[node] = config.lookup_ip_address(
+ config_cache, node, family=socket.AddressFamily.AF_INET6
+ )
+
+ try:
+ if IPStackConfig.IPv4 in ip_stack_config:
+ needed_ipaddresses[hostname] = config.lookup_ip_address(
+ config_cache, hostname, family=socket.AddressFamily.AF_INET
+ )
+ except Exception:
+ pass
+
+ try:
+ if IPStackConfig.IPv6 in ip_stack_config:
+ needed_ipv6addresses[hostname] = config.lookup_ip_address(
+ config_cache, hostname, family=socket.AddressFamily.AF_INET6
+ )
+ except Exception:
+ pass
+ else:
+ if IPStackConfig.IPv4 in ip_stack_config:
+ needed_ipaddresses[hostname] = config.lookup_ip_address(
+ config_cache, hostname, family=socket.AddressFamily.AF_INET
+ )
+
+ if IPStackConfig.IPv6 in ip_stack_config:
+ needed_ipv6addresses[hostname] = config.lookup_ip_address(
+ config_cache, hostname, family=socket.AddressFamily.AF_INET6
+ )
+
+ # assign the values here, just to let the type checker do its job
+ host_check_config = HostCheckConfig(
+ delay_precompile=bool(config.delay_precompile),
+ src=str(HostCheckStore.host_check_source_file_path(config_path, hostname)),
+ dst=str(HostCheckStore.host_check_file_path(config_path, hostname)),
+ verify_site_python=verify_site_python,
+ locations=locations,
+ checks_to_load=checks_to_load,
+ ipaddresses=needed_ipaddresses, # type: ignore[arg-type] # see FIXME above.
+ ipv6addresses=needed_ipv6addresses, # type: ignore[arg-type] # see FIXME above.
+ hostname=hostname,
+ )
+
+ template = _TEMPLATE_FILE.read_text()
+ if (m_placeholder := _INSTANTIATION_PATTERN.search(template)) is None:
+ raise ValueError(f"broken template at: {_TEMPLATE_FILE})")
+
+ return template.replace(
+ m_placeholder.group(0),
+ f" = {host_check_config!r}",
+ )
+
+
+def _get_needed_plugin_names(
+ config_cache: ConfigCache, host_name: HostName
+) -> tuple[set[str], set[CheckPluginName], set[InventoryPluginName]]:
+ ssc_api_special_agents = {p.name for p in server_side_calls.load_special_agents()[1].values()}
+ needed_legacy_check_plugin_names = {
+ f"agent_{name}"
+ for name, _p in config_cache.special_agents(host_name)
+ if name not in ssc_api_special_agents
+ }
+
+ # Collect the needed check plug-in names using the host check table.
+ # Even auto-migrated checks must be on the list of needed *agent based* plugins:
+ # In those cases, the module attribute will be `None`, so nothing will
+ # be imported; BUT: we need it in the list, because it must be considered
+ # when determining the needed *section* plugins.
+ # This matters in cases where the section is migrated, but the check
+ # plugins are not.
+ needed_agent_based_check_plugin_names = config_cache.check_table(
+ host_name,
+ filter_mode=config.FilterMode.INCLUDE_CLUSTERED,
+ skip_ignored=False,
+ ).needed_check_names()
+
+ legacy_names = (_resolve_legacy_plugin_name(pn) for pn in needed_agent_based_check_plugin_names)
+ needed_legacy_check_plugin_names.update(ln for ln in legacy_names if ln is not None)
+
+ # Inventory plugins get passed parsed data these days.
+ # Load the required sections, or inventory plugins will crash upon unparsed data.
+ needed_agent_based_inventory_plugin_names: set[InventoryPluginName] = set()
+ if config_cache.hwsw_inventory_parameters(host_name).status_data_inventory:
+ for inventory_plugin in agent_based_register.iter_all_inventory_plugins():
+ needed_agent_based_inventory_plugin_names.add(inventory_plugin.name)
+ for parsed_section_name in inventory_plugin.sections:
+ # check if we must add the legacy check plugin:
+ legacy_check_name = config.legacy_check_plugin_names.get(
+ CheckPluginName(str(parsed_section_name))
+ )
+ if legacy_check_name is not None:
+ needed_legacy_check_plugin_names.add(legacy_check_name)
+
+ return (
+ needed_legacy_check_plugin_names,
+ needed_agent_based_check_plugin_names,
+ needed_agent_based_inventory_plugin_names,
+ )
+
+
+def _resolve_legacy_plugin_name(check_plugin_name: CheckPluginName) -> str | None:
+ legacy_name = config.legacy_check_plugin_names.get(check_plugin_name)
+ if legacy_name:
+ return legacy_name
+
+ if not check_plugin_name.is_management_name():
+ return None
+
+ # See if me must include a legacy plug-in from which we derived the given one:
+ # A management plug-in *could have been* created on the fly, from a 'regular' legacy
+ # check plugin. In this case, we must load that.
+ plugin = agent_based_register.get_check_plugin(check_plugin_name)
+ if not plugin or plugin.location is not None:
+ # it does *not* result from a legacy plugin, if module is not None
+ return None
+
+ # just try to get the legacy name of the 'regular' plugin:
+ return config.legacy_check_plugin_names.get(check_plugin_name.create_basic_name())
+
+
+def _get_legacy_check_file_names_to_load(
+ needed_check_plugin_names: set[str],
+) -> set[str]:
+ # check info table
+ # We need to include all those plugins that are referenced in the hosts
+ # check table.
+ return {
+ filename
+ for check_plugin_name in needed_check_plugin_names
+ for filename in _find_check_plugins(check_plugin_name)
+ }
+
+
+def _find_check_plugins(checktype: str) -> set[str]:
+ """Find files to be included in precompile host check for a certain
+ check (for example df or mem.used).
+
+ In case of checks with a period (subchecks) we might have to include both "mem" and "mem.used".
+ The subcheck *may* be implemented in a separate file."""
+ return {
+ filename
+ for candidate in (section_name_of(checktype), checktype)
+ # in case there is no "main check" anymore, the lookup fails -> skip.
+ if (filename := config.legacy_check_plugin_files.get(candidate)) is not None
+ }
+
+
+def _get_needed_agent_based_locations(
+ check_plugin_names: set[CheckPluginName],
+ inventory_plugin_names: set[InventoryPluginName],
+) -> list[PluginLocation]:
+ modules = {
+ plugin.location
+ for plugin in [agent_based_register.get_check_plugin(p) for p in check_plugin_names]
+ if plugin is not None and plugin.location is not None
+ }
+ modules.update(
+ plugin.location
+ for plugin in [agent_based_register.get_inventory_plugin(p) for p in inventory_plugin_names]
+ if plugin is not None and plugin.location is not None
+ )
+ modules.update(
+ section.location
+ for section in agent_based_register.get_relevant_raw_sections(
+ check_plugin_names=check_plugin_names,
+ inventory_plugin_names=inventory_plugin_names,
+ ).values()
+ if section.location is not None
+ )
+
+ return sorted(modules, key=lambda l: (l.module, l.name or ""))
+
+
+def _get_required_legacy_check_sections(
+ check_plugin_names: set[CheckPluginName],
+ inventory_plugin_names: set[InventoryPluginName],
+) -> set[str]:
+ """
+ new style plug-in may have a dependency to a legacy check
+ """
+ required_legacy_check_sections = set()
+ for section in agent_based_register.get_relevant_raw_sections(
+ check_plugin_names=check_plugin_names,
+ inventory_plugin_names=inventory_plugin_names,
+ ).values():
+ if section.location is None:
+ required_legacy_check_sections.add(str(section.name))
+ return required_legacy_check_sections
diff --git a/cmk/base/default_config/__init__.py b/cmk/base/default_config/__init__.py
index 1a0007a939b..fd0e26bb877 100644
--- a/cmk/base/default_config/__init__.py
+++ b/cmk/base/default_config/__init__.py
@@ -3,7 +3,7 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-# Load plugin names into this module to have a single set of default settings
+# Load plug-in names into this module to have a single set of default settings
# pylint: disable=wildcard-import,unused-wildcard-import
from .base import * # noqa: F401 F403
diff --git a/cmk/base/default_config/base.py b/cmk/base/default_config/base.py
index 321d640cb8d..9c006c0aae9 100644
--- a/cmk/base/default_config/base.py
+++ b/cmk/base/default_config/base.py
@@ -22,7 +22,6 @@
from cmk.fetchers import IPMICredentials
-from cmk.checkengine.checking import CheckPluginNameStr
from cmk.checkengine.discovery import RediscoveryParameters
from cmk.checkengine.exitspec import ExitSpec
@@ -64,7 +63,6 @@
service_description_translation: list[RuleSpec[TranslationOptionsSpec]] = []
simulation_mode = False
fake_dns: str | None = None
-agent_simulator = False
perfdata_format: Literal["pnp", "standard"] = "pnp"
check_mk_perfdata_with_times = True
# TODO: Remove these options?
@@ -150,6 +148,8 @@ class _RRDConfig(TypedDict):
class _PeriodicDiscovery(TypedDict):
severity_unmonitored: SupportsInt
severity_vanished: SupportsInt
+ severity_changed_service_labels: SupportsInt
+ severity_changed_service_params: SupportsInt
severity_new_host_label: SupportsInt
check_interval: SupportsInt
inventory_rediscovery: RediscoveryParameters
@@ -182,13 +182,21 @@ class _PeriodicDiscovery(TypedDict):
}
static_checks: dict[str, list[RuleSpec[list[object]]]] = {}
check_parameters: list[RuleSpec[Any]] = []
-checkgroup_parameters: dict[str, list[RuleSpec[object]]] = {}
+checkgroup_parameters: dict[str, list[RuleSpec[Mapping[str, object]]]] = {}
# for HW/SW-Inventory
inv_parameters: dict[str, list[RuleSpec[Mapping[str, object]]]] = {}
+
+
# WATO variant for fully formalized checks
-active_checks: dict[str, list[RuleSpec[Mapping[str, object]]]] = {}
+# WATOs active check configurations are demanded to be Mapping[str, object] by the new ruleset API.
+# However: We still have legacy rulesets, which can be of any (basic python) type.
+active_checks: dict[str, list[RuleSpec[object]]] = {}
# WATO variant for datasource_programs
-special_agents: dict[str, list[RuleSpec[Mapping[str, object]]]] = {}
+# WATOs special agent configurations are demanded to be Mapping[str, object] by the new ruleset API.
+# However: We still have legacy rulesets, which can be of any (basic python) type.
+special_agents: dict[str, list[RuleSpec[object]]] = {}
+
+
# WATO variant for free-form custom checks without formalization
custom_checks: list[RuleSpec[dict[Any, Any]]] = []
all_hosts: list = []
@@ -302,7 +310,7 @@ class _NestedExitSpec(ExitSpec, total=False):
snmp_exclude_sections: list[RuleSpec[Mapping[str, Sequence[str]]]] = []
# Rulesets for parameters of notification scripts
notification_parameters: dict[str, list[RuleSpec[Mapping[str, object]]]] = {}
-use_new_descriptions_for: list[CheckPluginNameStr] = []
+use_new_descriptions_for: list[str] = []
# Custom user icons / actions to be configured
host_icons_and_actions: list[RuleSpec[str]] = []
# Custom user icons / actions to be configured
@@ -312,7 +320,7 @@ class _NestedExitSpec(ExitSpec, total=False):
# Assign tags to services
service_tag_rules: list[RuleSpec[Sequence[tuple[str, str]]]] = []
-# Rulesets for agent bakery
+# Rulesets for Agent Bakery
agent_config: dict[str, list[RuleSpec[Any]]] = {}
agent_bakery_logging: int | None = None
bake_agents_on_restart = False
diff --git a/cmk/base/diagnostics.py b/cmk/base/diagnostics.py
index e0f3f4fe32f..9cb749cfd22 100644
--- a/cmk/base/diagnostics.py
+++ b/cmk/base/diagnostics.py
@@ -18,6 +18,7 @@
import urllib.parse
import uuid
from collections.abc import Iterator, Mapping
+from contextlib import suppress
from datetime import datetime
from functools import cache
from pathlib import Path
@@ -47,6 +48,7 @@
get_checkmk_log_files_map,
OPT_CHECKMK_CONFIG_FILES,
OPT_CHECKMK_CORE_FILES,
+ OPT_CHECKMK_CRASH_REPORTS,
OPT_CHECKMK_LICENSING_FILES,
OPT_CHECKMK_LOG_FILES,
OPT_CHECKMK_OVERVIEW,
@@ -59,7 +61,7 @@
from cmk.utils.licensing.usage import deserialize_dump
from cmk.utils.log import console, section
from cmk.utils.site import omd_site
-from cmk.utils.structured_data import load_tree, SDRawTree
+from cmk.utils.structured_data import load_tree, SDNodeName, SDRawTree
from cmk.utils.user import UserId
if cmk_version.edition() in [
@@ -68,7 +70,7 @@
cmk_version.Edition.CCE,
cmk_version.Edition.CSE,
]:
- from cmk.base.cee.diagnostics import ( # type: ignore[import] # pylint: disable=no-name-in-module,import-error
+ from cmk.base.cee.diagnostics import ( # type: ignore[import,unused-ignore] # pylint: disable=no-name-in-module,import-error
cmc_specific_attrs,
)
else:
@@ -86,9 +88,9 @@ def create_diagnostics_dump(parameters: DiagnosticsOptionalParameters | None) ->
section.section_step("Creating diagnostics dump", verbose=False)
if dump.tarfile_created:
- console.info("%s\n", _format_filepath(dump.tarfile_path))
+ console.info(f"{_format_filepath(dump.tarfile_path)}\n")
else:
- console.info("%s%s\n", _GAP, "No dump")
+ console.info(f"{_GAP}No dump\n")
# .--format helper-------------------------------------------------------.
@@ -180,6 +182,9 @@ def _get_optional_elements(
if parameters.get(OPT_CHECKMK_OVERVIEW):
optional_elements.append(CheckmkOverviewDiagnosticsElement())
+ if parameters.get(OPT_CHECKMK_CRASH_REPORTS):
+ optional_elements.append(CrashDumpsDiagnosticsElement())
+
rel_checkmk_config_files = parameters.get(OPT_CHECKMK_CONFIG_FILES)
if rel_checkmk_config_files:
optional_elements.append(CheckmkConfigFilesDiagnosticsElement(rel_checkmk_config_files))
@@ -213,13 +218,14 @@ def create(self) -> None:
def _create_dump_folder(self) -> None:
section.section_step("Create dump folder")
- console.verbose("%s\n", _format_filepath(self.dump_folder))
+ console.verbose(f"{_format_filepath(self.dump_folder)}\n")
self.dump_folder.mkdir(parents=True, exist_ok=True)
def _create_tarfile(self) -> None:
- with tarfile.open(name=self.tarfile_path, mode="w:gz") as tar, tempfile.TemporaryDirectory(
- dir=self.dump_folder
- ) as tmp_dump_folder:
+ with (
+ tarfile.open(name=self.tarfile_path, mode="w:gz") as tar,
+ tempfile.TemporaryDirectory(dir=self.dump_folder) as tmp_dump_folder,
+ ):
for filepath in self._get_filepaths(Path(tmp_dump_folder)):
rel_path = str(filepath).replace(str(tmp_dump_folder), "")
tar.add(str(filepath), arcname=rel_path)
@@ -230,19 +236,19 @@ def _get_filepaths(self, tmp_dump_folder: Path) -> list[Path]:
filepaths = []
for element in self.elements:
- console.info("%s\n", _format_title(element.title))
- console.info("%s\n", _format_description(element.description))
+ console.info(f"{_format_title(element.title)}\n")
+ console.info(f"{_format_description(element.description)}\n")
try:
for filepath in element.add_or_get_files(tmp_dump_folder):
filepaths.append(filepath)
except DiagnosticsElementError as e:
- console.info("%s\n", _format_error(str(e)))
+ console.info(f"{_format_error(str(e))}\n")
continue
except Exception:
- console.info("%s\n", _format_error(traceback.format_exc()))
+ console.info(f"{_format_error(traceback.format_exc())}\n")
continue
return filepaths
@@ -261,7 +267,7 @@ def _cleanup_dump_folder(self) -> None:
"Cleanup dump folder", add_info="keep last %d dumps" % self._keep_num_dumps
)
for _mtime, filepath in dumps:
- console.verbose("%s\n", _format_filepath(filepath))
+ console.verbose(f"{_format_filepath(filepath)}\n")
self._remove_file(filepath)
def _remove_file(self, filepath: Path) -> None:
@@ -742,11 +748,11 @@ def title(self) -> str:
@property
def description(self) -> str:
return _(
- "Checkmk Agent, Number, version and edition of sites, Cluster host; "
- "Number of hosts, services, CMK Helper, Live Helper, "
- "Helper usage; State of daemons: Apache, Core, Crontag, "
+ "Checkmk Agent, Number, version and edition of sites, cluster host; "
+ "number of hosts, services, CMK Helper, Live Helper, "
+ "Helper usage; state of daemons: Apache, Core, Crontab, "
"DCD, Liveproxyd, MKEventd, MKNotifyd, RRDCached "
- "(Agent plugin mk_inventory needs to be installed)"
+ "(Agent plug-in mk_inventory needs to be installed)"
)
def _collect_infos(self) -> SDRawTree:
@@ -761,7 +767,11 @@ def _collect_infos(self) -> SDRawTree:
"No HW/SW inventory tree of '%s' found" % checkmk_server_name
)
- if not (node := tree.get_tree(("software", "applications", "check_mk"))):
+ if not (
+ node := tree.get_tree(
+ (SDNodeName("software"), SDNodeName("applications"), SDNodeName("check_mk"))
+ )
+ ):
raise DiagnosticsElementError(
"No HW/SW inventory node 'Software > Applications > Checkmk'"
)
@@ -810,6 +820,14 @@ def _copy_and_decrypt(self, rel_filepath: Path, tmp_dump_folder: Path) -> Path |
with Path(filepath).open("rb") as source:
json_data = json.dumps(deserialize_dump(source.read()), sort_keys=True, indent=4)
store.save_text_to_file(tmp_filepath, json_data)
+ # We 'encrypt' only license thingies at the moment, so there is currently no need to
+ # sanitize encrypted files
+ elif str(rel_filepath) == "multisite.d/sites.mk":
+ sites = store.load_from_mk_file(filepath, "sites", {})
+ for detail in sites.values():
+ with suppress(KeyError):
+ detail["secret"] = "redacted"
+ store.save_to_mk_file(tmp_filepath, "sites", sites)
else:
shutil.copy(str(filepath), str(tmp_filepath))
@@ -979,13 +997,46 @@ def _get_response(
return requests.post( # nosec B113 # BNS:773085
url,
- data={
- "_username": "automation",
- "_secret": automation_secret,
- },
+ auth=("automation", automation_secret),
)
+class CrashDumpsDiagnosticsElement(ABCDiagnosticsElement):
+ @property
+ def ident(self) -> str:
+ return "crashdumps"
+
+ @property
+ def title(self) -> str:
+ return _("The latest crash dumps of each type")
+
+ @property
+ def description(self) -> str:
+ return _("Returns the latest crash dumps of each type as found in var/checkmk/crashes")
+
+ def add_or_get_files(self, tmp_dump_folder: Path) -> DiagnosticsElementFilepaths:
+ for category in cmk.utils.paths.crash_dir.glob("*"):
+ tmpdir = tmp_dump_folder.joinpath("var/check_mk/crashes/%s" % category.name)
+ tmpdir.mkdir(parents=True, exist_ok=True)
+
+ sorted_dumps = sorted(category.glob("*"), key=lambda path: int(path.stat().st_mtime))
+
+ if sorted_dumps:
+ # Determine the latest file of that category
+ dumpfile_path = sorted_dumps[-1]
+
+ # Pack the dump into a .tar.gz, so it can easily be uploaded
+ # to https://crash.checkmk.com/
+ tarfile_path = tmpdir.joinpath(dumpfile_path.name).with_suffix(".tar.gz")
+
+ with tarfile.open(name=tarfile_path, mode="w:gz") as tar:
+ for file in dumpfile_path.iterdir():
+ rel_path = str(file).replace(str(dumpfile_path) + "/", "")
+ tar.add(str(file), arcname=rel_path)
+
+ yield tarfile_path
+
+
class CMCDumpDiagnosticsElement(ABCDiagnosticsElement):
@property
def ident(self) -> str:
@@ -998,7 +1049,7 @@ def title(self) -> str:
@property
def description(self) -> str:
return _(
- "Configuration, status, and status history data of the CMC (Checkmk Microcore); "
+ "Configuration, status, and status history data of the CMC (Checkmk Micro Core); "
"cmcdump output of the status and config."
)
@@ -1020,7 +1071,7 @@ def add_or_get_files(self, tmp_dump_folder: Path) -> DiagnosticsElementFilepaths
)
except subprocess.CalledProcessError as e:
- console.info("%s\n", _format_error(str(e)))
+ console.info(f"{_format_error(str(e))}\n")
continue
filepath = tmpdir.joinpath(f"{self.ident}{suffix}")
diff --git a/cmk/base/dump_host.py b/cmk/base/dump_host.py
index 163d5f670dc..d36e8fe7697 100644
--- a/cmk/base/dump_host.py
+++ b/cmk/base/dump_host.py
@@ -5,30 +5,49 @@
import socket
import time
+from pathlib import Path
+from typing import Literal
+import cmk.utils.password_store
+import cmk.utils.paths
import cmk.utils.render
import cmk.utils.tty as tty
+from cmk.utils.exceptions import OnError
from cmk.utils.hostaddress import HostAddress, HostName, Hosts
from cmk.utils.paths import tmp_dir
+from cmk.utils.tags import ComputedDataSources
from cmk.utils.timeperiod import timeperiod_active
-from cmk.snmplib import SNMPBackendEnum
-
-from cmk.fetchers import IPMIFetcher, PiggybackFetcher, ProgramFetcher, SNMPFetcher, TCPFetcher
+from cmk.snmplib import SNMPBackendEnum, SNMPVersion
+
+from cmk.fetchers import (
+ IPMIFetcher,
+ PiggybackFetcher,
+ ProgramFetcher,
+ SNMPFetcher,
+ SNMPScanConfig,
+ TCPFetcher,
+ TLSConfig,
+)
from cmk.fetchers.filecache import FileCacheOptions, MaxAge
from cmk.checkengine.fetcher import SourceType
-from cmk.checkengine.legacy import LegacyCheckParameters
from cmk.checkengine.parameters import TimespecificParameters
+from cmk.checkengine.parser import NO_SELECTION
-import cmk.base.config as config
import cmk.base.core
import cmk.base.ip_lookup as ip_lookup
import cmk.base.obsolete_output as out
import cmk.base.sources as sources
-from cmk.base.config import ConfigCache
-from cmk.base.ip_lookup import AddressFamily
-from cmk.base.sources import Source
+from cmk.base.config import (
+ ConfigCache,
+ ConfiguredIPLookup,
+ handle_ip_lookup_failure,
+ lookup_ip_address,
+ lookup_mgmt_board_ip_address,
+)
+from cmk.base.ip_lookup import IPStackConfig
+from cmk.base.sources import SNMPFetcherConfig, Source
def dump_source(source: Source) -> str: # pylint: disable=too-many-branches
@@ -58,20 +77,20 @@ def dump_source(source: Source) -> str: # pylint: disable=too-many-branches
if snmp_config.snmp_backend is SNMPBackendEnum.STORED_WALK:
return "SNMP (use stored walk)"
- if snmp_config.is_snmpv3_host:
+ if snmp_config.snmp_version is SNMPVersion.V3:
credentials_text = "Credentials: '%s'" % ", ".join(snmp_config.credentials)
else:
credentials_text = "Community: %r" % snmp_config.credentials
- if snmp_config.is_snmpv3_host or snmp_config.is_bulkwalk_host:
- bulk = "yes"
- else:
- bulk = "no"
+ bulk = "yes" if snmp_config.use_bulkwalk else "no"
- return "%s (%s, Bulk walk: %s, Port: %d, Backend: %s)" % (
- "SNMP"
- if source.source_info().source_type is SourceType.HOST
- else "Management board - SNMP",
+ return "%s%s (%s, Bulkwalk: %s, Port: %d, Backend: %s)" % (
+ (
+ "SNMP"
+ if source.source_info().source_type is SourceType.HOST
+ else "Management board - SNMP"
+ ),
+ snmp_config.snmp_version.name.lower(),
credentials_text,
bulk,
snmp_config.port,
@@ -85,40 +104,51 @@ def dump_source(source: Source) -> str: # pylint: disable=too-many-branches
return type(fetcher).__name__
-def _agent_description(config_cache: ConfigCache, host_name: HostName) -> str:
- if config_cache.is_all_agents_host(host_name):
+def _agent_description(cds: ComputedDataSources) -> str:
+ if cds.is_all_agents_host:
return "Normal Checkmk agent, all configured special agents"
- if config_cache.is_all_special_agents_host(host_name):
+ if cds.is_all_special_agents_host:
return "No Checkmk agent, all configured special agents"
- if config_cache.is_tcp_host(host_name):
+ if cds.is_tcp:
return "Normal Checkmk agent, or special agent if configured"
return "No agent"
-def dump_host(config_cache: ConfigCache, hostname: HostName) -> None:
+def dump_host(
+ config_cache: ConfigCache,
+ hostname: HostName,
+ *,
+ simulation_mode: bool,
+) -> None:
# pylint: disable=too-many-branches
out.output("\n")
hosts_config = config_cache.hosts_config
if hostname in hosts_config.clusters:
- nodes = config_cache.nodes_of(hostname)
- if nodes is None:
- raise RuntimeError()
+ assert config_cache.nodes(hostname)
color = tty.bgmagenta
- add_txt = " (cluster of " + (", ".join(nodes)) + ")"
+ add_txt = " (cluster of " + (", ".join(config_cache.nodes(hostname))) + ")"
else:
color = tty.bgblue
add_txt = ""
out.output("%s%s%s%-78s %s\n" % (color, tty.bold, tty.white, hostname + add_txt, tty.normal))
- ipaddress = _ip_address_for_dump_host(
- config_cache, hosts_config, hostname, family=config_cache.default_address_family(hostname)
+ ip_stack_config = ConfigCache.ip_stack_config(hostname)
+ ipaddress = (
+ None
+ if ip_stack_config is IPStackConfig.NO_IP
+ else _ip_address_for_dump_host(
+ config_cache,
+ hosts_config,
+ hostname,
+ family=config_cache.default_address_family(hostname),
+ )
)
addresses: str | None = ""
- if ConfigCache.address_family(hostname) is not AddressFamily.DUAL_STACK:
+ if ip_stack_config is not IPStackConfig.DUAL_STACK:
addresses = ipaddress
else:
try:
@@ -155,11 +185,10 @@ def dump_host(config_cache: ConfigCache, hostname: HostName) -> None:
out.output(tty.yellow + "Labels: " + tty.normal + ", ".join(labels) + "\n")
if hostname in hosts_config.clusters:
- parents_list = config_cache.nodes_of(hostname)
- if parents_list is None:
- raise RuntimeError()
+ parents_list = config_cache.nodes(hostname)
else:
parents_list = config_cache.parents(hostname)
+
if parents_list:
out.output(
tty.yellow + "Parents: " + tty.normal + ", ".join(parents_list) + "\n"
@@ -179,18 +208,60 @@ def dump_host(config_cache: ConfigCache, hostname: HostName) -> None:
+ "\n"
)
+ oid_cache_dir = Path(cmk.utils.paths.snmp_scan_cache_dir)
+ stored_walk_path = Path(cmk.utils.paths.snmpwalks_dir)
+ walk_cache_path = Path(cmk.utils.paths.var_dir) / "snmp_cache"
+ file_cache_path = Path(cmk.utils.paths.data_source_cache_dir)
+ tcp_cache_path = Path(cmk.utils.paths.tcp_cache_dir)
+ tls_config = TLSConfig(
+ cas_dir=Path(cmk.utils.paths.agent_cas_dir),
+ ca_store=Path(cmk.utils.paths.agent_cert_store),
+ site_crt=Path(cmk.utils.paths.site_cert_file),
+ )
+ used_password_store = cmk.utils.password_store.pending_password_store_path()
+ passwords = cmk.utils.password_store.load(used_password_store)
agenttypes = [
dump_source(source)
for source in sources.make_sources(
hostname,
ipaddress,
- ConfigCache.address_family(hostname),
+ ConfigCache.ip_stack_config(hostname),
+ fetcher_factory=config_cache.fetcher_factory(),
+ snmp_fetcher_config=SNMPFetcherConfig(
+ scan_config=SNMPScanConfig(
+ on_error=OnError.RAISE,
+ missing_sys_description=config_cache.missing_sys_description(hostname),
+ oid_cache_dir=oid_cache_dir,
+ ),
+ selected_sections=NO_SELECTION,
+ backend_override=None,
+ stored_walk_path=stored_walk_path,
+ walk_cache_path=walk_cache_path,
+ ),
is_cluster=hostname in hosts_config.clusters,
file_cache_options=FileCacheOptions(),
- config_cache=config_cache,
- simulation_mode=config.simulation_mode,
+ simulation_mode=simulation_mode,
file_cache_max_age=MaxAge.zero(),
- snmp_backend_override=None,
+ snmp_backend=config_cache.get_snmp_backend(hostname),
+ file_cache_path=file_cache_path,
+ tcp_cache_path=tcp_cache_path,
+ tls_config=tls_config,
+ computed_datasources=config_cache.computed_datasources(hostname),
+ datasource_programs=config_cache.datasource_programs(hostname),
+ tag_list=config_cache.tag_list(hostname),
+ management_ip=lookup_mgmt_board_ip_address(config_cache, hostname),
+ management_protocol=config_cache.management_protocol(hostname),
+ special_agent_command_lines=config_cache.special_agent_command_lines(
+ hostname,
+ ipaddress,
+ password_store_file=used_password_store,
+ passwords=passwords,
+ ip_address_of=ConfiguredIPLookup(
+ config_cache, error_handler=handle_ip_lookup_failure
+ ),
+ ),
+ agent_connection_mode=config_cache.agent_connection_mode(hostname),
+ check_mk_check_interval=config_cache.check_mk_check_interval(hostname),
)
]
@@ -198,7 +269,7 @@ def dump_host(config_cache: ConfigCache, hostname: HostName) -> None:
agenttypes.append("PING only")
out.output(tty.yellow + "Agent mode: " + tty.normal)
- out.output(_agent_description(config_cache, hostname) + "\n")
+ out.output(_agent_description(config_cache.computed_datasources(hostname)) + "\n")
out.output(tty.yellow + "Type of agent: " + tty.normal)
if len(agenttypes) == 1:
@@ -227,15 +298,14 @@ def dump_host(config_cache: ConfigCache, hostname: HostName) -> None:
tty.print_table(headers, colors, table_data, " ")
-def _evaluate_params(params: LegacyCheckParameters | TimespecificParameters) -> str:
- if not isinstance(params, TimespecificParameters):
- return repr(params)
-
- if params.is_constant():
- return repr(params.evaluate(timeperiod_active))
- return "Timespecific parameters at {}: {!r}".format(
- cmk.utils.render.date_and_time(time.time()),
- params.evaluate(timeperiod_active),
+def _evaluate_params(params: TimespecificParameters) -> str:
+ return (
+ repr(params.evaluate(timeperiod_active))
+ if params.is_constant()
+ else "Timespecific parameters at {}: {!r}".format(
+ cmk.utils.render.date_and_time(time.time()),
+ params.evaluate(timeperiod_active),
+ )
)
@@ -244,10 +314,10 @@ def _ip_address_for_dump_host(
hosts_config: Hosts,
host_name: HostName,
*,
- family: socket.AddressFamily,
+ family: Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6],
) -> HostAddress | None:
try:
- return config.lookup_ip_address(config_cache, host_name, family=family)
+ return lookup_ip_address(config_cache, host_name, family=family)
except Exception:
return (
HostAddress("")
diff --git a/cmk/base/errorhandling/_crash.py b/cmk/base/errorhandling/_crash.py
index c66c6a30e41..032418b94aa 100644
--- a/cmk/base/errorhandling/_crash.py
+++ b/cmk/base/errorhandling/_crash.py
@@ -15,12 +15,13 @@
import cmk.utils.paths
from cmk.utils.agentdatatype import AgentRawData
from cmk.utils.hostaddress import HostName
+from cmk.utils.piggyback import get_source_hostnames
from cmk.utils.sectionname import SectionName
from cmk.utils.servicename import ServiceName
from cmk.snmplib import SNMPBackendEnum
-from cmk.checkengine.checking import CheckPluginName, CheckPluginNameStr
+from cmk.checkengine.checking import CheckPluginName
CrashReportStore = crash_reporting.CrashReportStore
@@ -62,7 +63,7 @@ def create_check_crash_dump(
host_name: HostName,
service_name: ServiceName,
*,
- plugin_name: CheckPluginNameStr | CheckPluginName,
+ plugin_name: str | CheckPluginName,
plugin_kwargs: Mapping[str, Any],
is_cluster: bool,
is_enforced: bool,
@@ -90,9 +91,9 @@ def create_check_crash_dump(
},
type_specific_attributes={
"snmp_info": _read_snmp_info(host_name),
- "agent_output": _read_agent_output(host_name)
- if rtc_package is None
- else rtc_package,
+ "agent_output": (
+ _read_agent_output(host_name) if rtc_package is None else rtc_package
+ ),
},
)
CrashReportStore().save(crash)
@@ -144,7 +145,7 @@ def type() -> Literal["check"]:
def _read_snmp_info(hostname: str) -> bytes | None:
- cache_path = Path(cmk.utils.paths.data_source_cache_dir, "snmp", hostname)
+ cache_path = Path(cmk.utils.paths.snmpwalks_dir, hostname)
try:
with cache_path.open(mode="rb") as f:
return f.read()
@@ -153,11 +154,20 @@ def _read_snmp_info(hostname: str) -> bytes | None:
return None
-def _read_agent_output(hostname: str) -> AgentRawData | None:
+def _read_agent_output(hostname: HostName) -> AgentRawData | None:
cache_path = Path(cmk.utils.paths.tcp_cache_dir, hostname)
- try:
- with cache_path.open(mode="rb") as f:
- return AgentRawData(f.read())
- except OSError:
- pass
+ piggyback_cache_path = Path(cmk.utils.paths.piggyback_dir, hostname)
+ cache_paths = [cache_path] + [
+ piggyback_cache_path / source_hostname for source_hostname in get_source_hostnames(hostname)
+ ]
+ agent_outputs = []
+ for cache_path in cache_paths:
+ try:
+ with cache_path.open(mode="rb") as f:
+ agent_outputs.append(f.read())
+ except OSError:
+ pass
+
+ if agent_outputs:
+ return AgentRawData(b"\n".join(agent_outputs))
return None
diff --git a/cmk/base/errorhandling/_handler.py b/cmk/base/errorhandling/_handler.py
index 92ff194d47f..4109c7226a4 100644
--- a/cmk/base/errorhandling/_handler.py
+++ b/cmk/base/errorhandling/_handler.py
@@ -21,7 +21,6 @@
from cmk.snmplib import SNMPBackendEnum
-from cmk.checkengine.checking import CheckPluginNameStr
from cmk.checkengine.checkresults import ActiveCheckResult
from cmk.checkengine.exitspec import ExitSpec
from cmk.checkengine.submitters import ServiceState
@@ -36,7 +35,7 @@ def __init__(
*,
host_name: HostName,
service_name: ServiceName,
- plugin_name: CheckPluginNameStr,
+ plugin_name: str,
is_cluster: bool,
snmp_backend: SNMPBackendEnum,
keepalive: bool,
@@ -83,7 +82,7 @@ def _handle_failure(
*,
host_name: HostName,
service_name: ServiceName,
- plugin_name: CheckPluginNameStr,
+ plugin_name: str,
is_cluster: bool,
snmp_backend: SNMPBackendEnum,
keepalive: bool,
diff --git a/cmk/base/events.py b/cmk/base/events.py
index 4c137f911ab..1f5a185d83e 100644
--- a/cmk/base/events.py
+++ b/cmk/base/events.py
@@ -13,7 +13,7 @@
import sys
import time
from collections.abc import Callable, Iterable, Mapping
-from typing import Any
+from typing import Any, cast
from urllib.parse import quote, urlencode
import livestatus
@@ -21,22 +21,23 @@
import cmk.utils.daemon
import cmk.utils.debug
from cmk.utils.hostaddress import HostName
-from cmk.utils.notify_types import EventContext, EventRule
+from cmk.utils.http_proxy_config import HTTPProxyConfig
+from cmk.utils.notify import read_notify_host_file
+from cmk.utils.notify_types import EnrichedEventContext, EventContext, EventRule
from cmk.utils.regex import regex
+from cmk.utils.rulesets.tuple_rulesets import hosttags_match_taglist, in_extraconf_servicelist
from cmk.utils.servicename import ServiceName
from cmk.utils.site import omd_site
from cmk.utils.tags import TagID
from cmk.utils.timeperiod import check_timeperiod, cleanup_timeperiod_caches
import cmk.base.config as config
-import cmk.base.core
-from cmk.base.core_config import read_notify_host_file
ContactList = list # TODO Improve this
# We actually want to use Matcher for all our matchers, but mypy is too dumb to
# use that for function types, see https://github.com/python/mypy/issues/1641.
-Matcher = Callable[[EventRule, EventContext], str | None]
+Matcher = Callable[[EventRule, EventContext, bool], str | None]
logger = logging.getLogger("cmk.base.events")
@@ -46,12 +47,13 @@ def _send_reply_ready() -> None:
sys.stdout.flush()
-def event_keepalive( # pylint: disable=too-many-branches
- event_function: Callable,
- call_every_loop: Callable | None = None,
+def event_keepalive(
+ event_function: Callable[[EventContext], object],
+ call_every_loop: Callable[[], object] | None = None,
loop_interval: int | None = None,
- shutdown_function: Callable | None = None,
+ shutdown_function: Callable[[], object] | None = None,
) -> None:
+ # pylint: disable=too-many-branches
last_config_timestamp = config_timestamp()
# Send signal that we are ready to receive the next event, but
@@ -249,17 +251,20 @@ def livestatus_fetch_contacts(host: HostName, service: ServiceName | None) -> Co
return None # We must allow notifications without Livestatus access
-def add_rulebased_macros(raw_context: EventContext, contacts_needed: bool) -> None:
+def add_rulebased_macros(
+ raw_context: EventContext,
+ ensure_nagios: Callable[[str], object],
+ contacts_needed: bool,
+) -> None:
# For the rule based notifications we need the list of contacts
# an object has. The CMC does send this in the macro "CONTACTS"
if "CONTACTS" not in raw_context and contacts_needed:
- # Ensure that we don't reach this when the Microcore is enabled. Triggering this logic
- # with the Microcore might result in dead locks.
- if config.is_cmc():
- raise RuntimeError(
- "Missing 'CONTACTS' in raw notification context. It should always "
- "be available when using the Microcore."
- )
+ # Ensure that we don't reach this when the Micro Core is enabled. Triggering this logic
+ # with the Micro Core might result in dead locks.
+ ensure_nagios(
+ "Missing 'CONTACTS' in raw notification context. It should always "
+ "be available when using the Micro Core."
+ )
contact_list = livestatus_fetch_contacts(
raw_context["HOSTNAME"], raw_context.get("SERVICEDESC")
@@ -277,105 +282,107 @@ def add_rulebased_macros(raw_context: EventContext, contacts_needed: bool) -> No
raw_context["CONTACTNAME"] = "check-mk-notify"
-def complete_raw_context( # pylint: disable=too-many-branches
+def complete_raw_context(
raw_context: EventContext,
+ ensure_nagios: Callable[[str], object],
with_dump: bool,
contacts_needed: bool,
-) -> None:
+) -> EnrichedEventContext:
+ # pylint: disable=too-many-branches
"""Extend the raw notification context
This ensures that all raw contexts processed in the notification code has specific variables
set. Add a few further helper variables that are useful in notification and alert plugins.
"""
-
raw_keys = list(raw_context)
# If a remote site has send the spool file to the central site and the user
# uses "Analyze ruleset", the key "OMD_SITE" is already present. So there is
# no need to enrich the raw_context again. This also avoids overwriting
# of sitespecific values.
+ enriched_context = cast(EnrichedEventContext, raw_context.copy())
if "OMD_SITE" in raw_context:
- return
+ return enriched_context
try:
- raw_context["OMD_SITE"] = omd_site()
+ enriched_context["OMD_SITE"] = omd_site()
- raw_context["WHAT"] = "SERVICE" if raw_context.get("SERVICEDESC") else "HOST"
+ enriched_context["WHAT"] = "SERVICE" if enriched_context.get("SERVICEDESC") else "HOST"
- raw_context.setdefault("MONITORING_HOST", socket.gethostname())
- raw_context.setdefault("OMD_ROOT", str(cmk.utils.paths.omd_root))
+ enriched_context.setdefault("MONITORING_HOST", socket.gethostname())
+ enriched_context.setdefault("OMD_ROOT", str(cmk.utils.paths.omd_root))
# The Checkmk Micro Core sends the MICROTIME and no other time stamps. We add
# a few Nagios-like variants in order to be compatible
- if "MICROTIME" in raw_context:
- microtime = int(raw_context["MICROTIME"])
+ if "MICROTIME" in enriched_context:
+ microtime = int(enriched_context["MICROTIME"])
timestamp = float(microtime) / 1000000.0
broken = time.localtime(timestamp)
- raw_context["DATE"] = time.strftime("%Y-%m-%d", broken)
- raw_context["SHORTDATETIME"] = time.strftime("%Y-%m-%d %H:%M:%S", broken)
- raw_context["LONGDATETIME"] = time.strftime("%a %b %d %H:%M:%S %Z %Y", broken)
- elif "MICROTIME" not in raw_context:
+ enriched_context["DATE"] = time.strftime("%Y-%m-%d", broken)
+ enriched_context["SHORTDATETIME"] = time.strftime("%Y-%m-%d %H:%M:%S", broken)
+ enriched_context["LONGDATETIME"] = time.strftime("%a %b %d %H:%M:%S %Z %Y", broken)
+ elif "MICROTIME" not in enriched_context:
# In case the microtime is not provided, e.g. when using Nagios, then set it here
# from the current time. We could look for "LONGDATETIME" and calculate the timestamp
# from that one, but we try to keep this simple here.
- raw_context["MICROTIME"] = "%d" % (time.time() * 1000000)
+ enriched_context["MICROTIME"] = "%d" % (time.time() * 1000000)
- raw_context["HOSTURL"] = "/check_mk/index.py?start_url=view.py?%s" % quote(
+ enriched_context["HOSTURL"] = "/check_mk/index.py?start_url=view.py?%s" % quote(
urlencode(
[
("view_name", "hoststatus"),
- ("host", raw_context["HOSTNAME"]),
- ("site", raw_context["OMD_SITE"]),
+ ("host", enriched_context["HOSTNAME"]),
+ ("site", enriched_context["OMD_SITE"]),
]
)
)
- if raw_context["WHAT"] == "SERVICE":
- raw_context["SERVICEURL"] = "/check_mk/index.py?start_url=view.py?%s" % quote(
+ if enriched_context["WHAT"] == "SERVICE":
+ enriched_context["SERVICEURL"] = "/check_mk/index.py?start_url=view.py?%s" % quote(
urlencode(
[
("view_name", "service"),
- ("host", raw_context["HOSTNAME"]),
- ("service", raw_context["SERVICEDESC"]),
- ("site", raw_context["OMD_SITE"]),
+ ("host", enriched_context["HOSTNAME"]),
+ ("service", enriched_context["SERVICEDESC"]),
+ ("site", enriched_context["OMD_SITE"]),
]
)
)
# Relative Timestamps for several macros
- if (value := raw_context.get("LASTHOSTSTATECHANGE")) is not None:
- raw_context["LASTHOSTSTATECHANGE_REL"] = get_readable_rel_date(value)
- if (value := raw_context.get("LASTSERVICESTATECHANGE")) is not None:
- raw_context["LASTSERVICESTATECHANGE_REL"] = get_readable_rel_date(value)
- if (value := raw_context.get("LASTHOSTUP")) is not None:
- raw_context["LASTHOSTUP_REL"] = get_readable_rel_date(value)
- if (value := raw_context.get("LASTSERVICEOK")) is not None:
- raw_context["LASTSERVICEOK_REL"] = get_readable_rel_date(value)
+ if (value := enriched_context.get("LASTHOSTSTATECHANGE")) is not None:
+ enriched_context["LASTHOSTSTATECHANGE_REL"] = get_readable_rel_date(value)
+ if (value := enriched_context.get("LASTSERVICESTATECHANGE")) is not None:
+ enriched_context["LASTSERVICESTATECHANGE_REL"] = get_readable_rel_date(value)
+ if (value := enriched_context.get("LASTHOSTUP")) is not None:
+ enriched_context["LASTHOSTUP_REL"] = get_readable_rel_date(value)
+ if (value := enriched_context.get("LASTSERVICEOK")) is not None:
+ enriched_context["LASTSERVICEOK_REL"] = get_readable_rel_date(value)
- add_rulebased_macros(raw_context, contacts_needed)
+ add_rulebased_macros(enriched_context, ensure_nagios, contacts_needed)
# For custom notifications the number is set to 0 by the core (Nagios and CMC). We force at least
# number 1 here, so that rules with conditions on numbers do not fail (the minimum is 1 here)
- if raw_context.get("HOSTNOTIFICATIONNUMBER") == "0":
+ if enriched_context.get("HOSTNOTIFICATIONNUMBER") == "0":
if with_dump:
logger.info("Setting HOSTNOTIFICATIONNUMBER for notification from '0' to '1'")
- raw_context["HOSTNOTIFICATIONNUMBER"] = "1"
- if raw_context.get("SERVICENOTIFICATIONNUMBER") == "0":
+ enriched_context["HOSTNOTIFICATIONNUMBER"] = "1"
+ if enriched_context.get("SERVICENOTIFICATIONNUMBER") == "0":
if with_dump:
logger.info("Setting SERVICENOTIFICATIONNUMBER for notification from '0' to '1'")
- raw_context["SERVICENOTIFICATIONNUMBER"] = "1"
+ enriched_context["SERVICENOTIFICATIONNUMBER"] = "1"
# Add the previous hard state. This is necessary for notification rules that depend on certain transitions,
# like OK -> WARN (but not CRIT -> WARN). The CMC sends PREVIOUSHOSTHARDSTATE and PREVIOUSSERVICEHARDSTATE.
# Nagios does not have this information and we try to deduct this.
- if "PREVIOUSHOSTHARDSTATE" not in raw_context and "LASTHOSTSTATE" in raw_context:
- prev_state = raw_context["LASTHOSTSTATE"]
+ if "PREVIOUSHOSTHARDSTATE" not in enriched_context and "LASTHOSTSTATE" in enriched_context:
+ prev_state = enriched_context["LASTHOSTSTATE"]
# When the attempts are > 1 then the last state could be identical with
# the current one, e.g. both critical. In that case we assume the
# previous hard state to be OK.
- if prev_state == raw_context["HOSTSTATE"]:
+ if prev_state == enriched_context["HOSTSTATE"]:
prev_state = "UP"
- elif "HOSTATTEMPT" not in raw_context or (
- "HOSTATTEMPT" in raw_context and raw_context["HOSTATTEMPT"] != "1"
+ elif "HOSTATTEMPT" not in enriched_context or (
+ "HOSTATTEMPT" in enriched_context and enriched_context["HOSTATTEMPT"] != "1"
):
# Here We do not know. The transition might be OK -> WARN -> CRIT and
# the initial OK is completely lost. We use the artificial state "?"
@@ -383,36 +390,39 @@ def complete_raw_context( # pylint: disable=too-many-branches
# notification is being sent out. But when the new state is UP, then
# we know that the previous state was a hard state (otherwise there
# would not have been any notification)
- if raw_context["HOSTSTATE"] != "UP":
+ if enriched_context["HOSTSTATE"] != "UP":
prev_state = "?"
logger.info("Previous host hard state not known. Allowing all states.")
- raw_context["PREVIOUSHOSTHARDSTATE"] = prev_state
+ enriched_context["PREVIOUSHOSTHARDSTATE"] = prev_state
# Same for services
- if raw_context["WHAT"] == "SERVICE" and "PREVIOUSSERVICEHARDSTATE" not in raw_context:
- prev_state = raw_context["LASTSERVICESTATE"]
- if prev_state == raw_context["SERVICESTATE"]:
+ if (
+ enriched_context["WHAT"] == "SERVICE"
+ and "PREVIOUSSERVICEHARDSTATE" not in enriched_context
+ ):
+ prev_state = enriched_context["LASTSERVICESTATE"]
+ if prev_state == enriched_context["SERVICESTATE"]:
prev_state = "OK"
- elif "SERVICEATTEMPT" not in raw_context or (
- "SERVICEATTEMPT" in raw_context and raw_context["SERVICEATTEMPT"] != "1"
+ elif "SERVICEATTEMPT" not in enriched_context or (
+ "SERVICEATTEMPT" in enriched_context and enriched_context["SERVICEATTEMPT"] != "1"
):
if raw_context["SERVICESTATE"] != "OK":
prev_state = "?"
logger.info("Previous service hard state not known. Allowing all states.")
- raw_context["PREVIOUSSERVICEHARDSTATE"] = prev_state
+ enriched_context["PREVIOUSSERVICEHARDSTATE"] = prev_state
# Add short variants for state names (at most 4 characters)
- for ctx_key, ctx_value in list(raw_context.items()):
+ for ctx_key, ctx_value in list(enriched_context.items()):
assert isinstance(ctx_value, str)
if ctx_key.endswith("STATE"):
# dynamical keys are bad...
- raw_context[ctx_key[:-5] + "SHORTSTATE"] = ctx_value[:4] # type: ignore[literal-required]
+ enriched_context[ctx_key[:-5] + "SHORTSTATE"] = ctx_value[:4] # type: ignore[literal-required]
- if raw_context["WHAT"] == "SERVICE":
- raw_context["SERVICEFORURL"] = quote(raw_context["SERVICEDESC"])
- raw_context["HOSTFORURL"] = quote(raw_context["HOSTNAME"])
+ if enriched_context["WHAT"] == "SERVICE":
+ enriched_context["SERVICEFORURL"] = quote(enriched_context["SERVICEDESC"])
+ enriched_context["HOSTFORURL"] = quote(enriched_context["HOSTNAME"])
- _update_raw_context_with_labels(raw_context)
+ _update_enriched_context_with_labels(enriched_context)
except Exception as e:
logger.info("Error on completing raw context: %s", e)
@@ -422,23 +432,25 @@ def complete_raw_context( # pylint: disable=too-many-branches
sorted(
[
f" {key}={value}"
- for key, value in raw_context.items()
+ for key, value in enriched_context.items()
if key not in raw_keys
]
)
)
logger.info("Computed variables:\n%s", log_context)
+ return enriched_context
+
-def _update_raw_context_with_labels(raw_context: EventContext) -> None:
- labels = read_notify_host_file(raw_context["HOSTNAME"])
+def _update_enriched_context_with_labels(enriched_context: EnrichedEventContext) -> None:
+ labels = read_notify_host_file(enriched_context["HOSTNAME"])
for k, v in labels.host_labels.items():
# Dynamically added keys...
- raw_context["HOSTLABEL_" + k] = v # type: ignore[literal-required]
- if raw_context["WHAT"] == "SERVICE":
- for k, v in labels.service_labels.get(raw_context["SERVICEDESC"], {}).items():
+ enriched_context["HOSTLABEL_" + k] = v # type: ignore[literal-required]
+ if enriched_context["WHAT"] == "SERVICE":
+ for k, v in labels.service_labels.get(enriched_context["SERVICEDESC"], {}).items():
# Dynamically added keys...
- raw_context["SERVICELABEL_" + k] = v # type: ignore[literal-required]
+ enriched_context["SERVICELABEL_" + k] = v # type: ignore[literal-required]
# TODO: Use cmk.utils.render.*?
@@ -459,16 +471,19 @@ def get_readable_rel_date(timestamp: Any) -> str:
# While the rest of the world increasingly embraces lambdas and folds, the
# Python world moves backwards in time. :-P So let's introduce this helper...
def apply_matchers(
- matchers: Iterable[Matcher], rule: EventRule, context: EventContext
+ matchers: Iterable[Matcher],
+ rule: EventRule,
+ context: EnrichedEventContext | EventContext,
+ analyse: bool,
) -> str | None:
for matcher in matchers:
- result = matcher(rule, context)
+ result = matcher(rule, context, analyse)
if result is not None:
return result
return None
-def event_match_rule(rule: EventRule, context: EventContext) -> str | None:
+def event_match_rule(rule: EventRule, context: EventContext, analyse: bool = False) -> str | None:
return apply_matchers(
[
event_match_site,
@@ -492,10 +507,15 @@ def event_match_rule(rule: EventRule, context: EventContext) -> str | None:
],
rule,
context,
+ analyse,
)
-def event_match_site(rule: EventRule, context: EventContext) -> str | None:
+def event_match_site(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_site" not in rule:
return None
@@ -512,7 +532,11 @@ def event_match_site(rule: EventRule, context: EventContext) -> str | None:
return None
-def event_match_folder(rule: EventRule, context: EventContext) -> str | None:
+def event_match_folder(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_folder" in rule:
mustfolder = rule["match_folder"]
mustpath = mustfolder.split("/")
@@ -539,11 +563,15 @@ def event_match_folder(rule: EventRule, context: EventContext) -> str | None:
return None
-def event_match_hosttags(rule: EventRule, context: EventContext) -> str | None:
+def event_match_hosttags(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
required = rule.get("match_hosttags")
if required:
tags = [TagID(ident) for ident in context.get("HOSTTAGS", "").split()]
- if not config.hosttags_match_taglist(tags, (TagID(_) for _ in required)):
+ if not hosttags_match_taglist(tags, (TagID(_) for _ in required)):
return "The host's tags {} do not match the required tags {}".format(
"|".join(tags),
"|".join(required),
@@ -551,11 +579,19 @@ def event_match_hosttags(rule: EventRule, context: EventContext) -> str | None:
return None
-def event_match_servicegroups_fixed(rule: EventRule, context: EventContext) -> str | None:
+def event_match_servicegroups_fixed(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
return _event_match_servicegroups(rule, context, is_regex=False)
-def event_match_servicegroups_regex(rule: EventRule, context: EventContext) -> str | None:
+def event_match_servicegroups_regex(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
return _event_match_servicegroups(rule, context, is_regex=True)
@@ -628,11 +664,19 @@ def _event_match_servicegroups( # pylint: disable=too-many-branches
return None
-def event_match_exclude_servicegroups_fixed(rule: EventRule, context: EventContext) -> str | None:
+def event_match_exclude_servicegroups_fixed(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
return _event_match_exclude_servicegroups(rule, context, is_regex=False)
-def event_match_exclude_servicegroups_regex(rule: EventRule, context: EventContext) -> str | None:
+def event_match_exclude_servicegroups_regex(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
return _event_match_exclude_servicegroups(rule, context, is_regex=True)
@@ -674,7 +718,11 @@ def _event_match_exclude_servicegroups(
return None
-def event_match_contacts(rule: EventRule, context: EventContext) -> str | None:
+def event_match_contacts(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_contacts" not in rule:
return None
@@ -694,7 +742,11 @@ def event_match_contacts(rule: EventRule, context: EventContext) -> str | None:
)
-def event_match_contactgroups(rule: EventRule, context: EventContext) -> str | None:
+def event_match_contactgroups(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
required_groups = rule.get("match_contactgroups")
if required_groups is None:
return None
@@ -721,7 +773,7 @@ def event_match_contactgroups(rule: EventRule, context: EventContext) -> str | N
)
-def event_match_hostgroups(rule: EventRule, context: EventContext) -> str | None:
+def event_match_hostgroups(rule: EventRule, context: EventContext, _analyse: bool) -> str | None:
required_groups = rule.get("match_hostgroups")
if required_groups is not None:
hgn = context.get("HOSTGROUPNAMES")
@@ -746,7 +798,11 @@ def event_match_hostgroups(rule: EventRule, context: EventContext) -> str | None
return None
-def event_match_hosts(rule: EventRule, context: EventContext) -> str | None:
+def event_match_hosts(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_hosts" in rule:
hostlist = rule["match_hosts"]
if context["HOSTNAME"] not in hostlist:
@@ -757,19 +813,27 @@ def event_match_hosts(rule: EventRule, context: EventContext) -> str | None:
return None
-def event_match_exclude_hosts(rule: EventRule, context: EventContext) -> str | None:
+def event_match_exclude_hosts(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if context["HOSTNAME"] in rule.get("match_exclude_hosts", []):
return "The host's name '%s' is on the list of excluded hosts" % context["HOSTNAME"]
return None
-def event_match_services(rule: EventRule, context: EventContext) -> str | None:
+def event_match_services(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_services" in rule:
if context["WHAT"] != "SERVICE":
return "The rule specifies a list of services, but this is a host notification."
servicelist = rule["match_services"]
service = context["SERVICEDESC"]
- if not config.in_extraconf_servicelist(servicelist, service):
+ if not in_extraconf_servicelist(servicelist, service):
return (
"The service's description '%s' does not match by the list of "
"allowed services (%s)" % (service, ", ".join(servicelist))
@@ -777,12 +841,16 @@ def event_match_services(rule: EventRule, context: EventContext) -> str | None:
return None
-def event_match_exclude_services(rule: EventRule, context: EventContext) -> str | None:
+def event_match_exclude_services(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if context["WHAT"] != "SERVICE":
return None
excludelist = rule.get("match_exclude_services", [])
service = context["SERVICEDESC"]
- if config.in_extraconf_servicelist(excludelist, service):
+ if in_extraconf_servicelist(excludelist, service):
return (
"The service's description '%s' matches the list of excluded services"
% context["SERVICEDESC"]
@@ -790,7 +858,11 @@ def event_match_exclude_services(rule: EventRule, context: EventContext) -> str
return None
-def event_match_plugin_output(rule: EventRule, context: EventContext) -> str | None:
+def event_match_plugin_output(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_plugin_output" in rule:
r = regex(rule["match_plugin_output"])
@@ -799,14 +871,18 @@ def event_match_plugin_output(rule: EventRule, context: EventContext) -> str | N
else:
output = context["HOSTOUTPUT"]
if not r.search(output):
- return "The expression '{}' cannot be found in the plugin output '{}'".format(
+ return "The expression '{}' cannot be found in the plug-in output '{}'".format(
rule["match_plugin_output"],
output,
)
return None
-def event_match_checktype(rule: EventRule, context: EventContext) -> str | None:
+def event_match_checktype(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_checktype" in rule:
if context["WHAT"] != "SERVICE":
return "The rule specifies a list of Check_MK plugins, but this is a host notification."
@@ -816,14 +892,23 @@ def event_match_checktype(rule: EventRule, context: EventContext) -> str | None:
plugin = command[9:]
allowed = rule["match_checktype"]
if plugin not in allowed:
- return "The Check_MK plugin '{}' is not on the list of allowed plugins ({})".format(
+ return "The Check_MK plug-in '{}' is not on the list of allowed plugins ({})".format(
plugin,
", ".join(allowed),
)
return None
-def event_match_timeperiod(rule: EventRule, _context: EventContext) -> str | None:
+def event_match_timeperiod(
+ rule: EventRule,
+ _context: EventContext,
+ analyse: bool,
+) -> str | None:
+ # don't test on notification tests, in that case this is done within
+ # notify.rbn_match_timeperiod
+ if analyse:
+ return None
+
if "match_timeperiod" in rule:
timeperiod = rule["match_timeperiod"]
if timeperiod != "24X7" and not check_timeperiod(timeperiod):
@@ -831,7 +916,11 @@ def event_match_timeperiod(rule: EventRule, _context: EventContext) -> str | Non
return None
-def event_match_servicelevel(rule: EventRule, context: EventContext) -> str | None:
+def event_match_servicelevel(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_sl" in rule:
from_sl, to_sl = rule["match_sl"]
if context["WHAT"] == "SERVICE" and context.get("SVC_SL", "").isdigit():
@@ -860,15 +949,18 @@ def add_context_to_environment(
# PARAMETER_LVL1_1_VALUE = 42
# PARAMETER_LVL1_2_VALUE = 13
def add_to_event_context(
- context: EventContext | dict[str, str], prefix: str, param: object
+ context: EventContext | dict[str, str],
+ prefix: str,
+ param: object,
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
) -> None:
if isinstance(param, (list, tuple)):
if all(isinstance(p, str) for p in param):
# TODO: Why on earth do we have these arbitrary differences? Can we unify this?
suffix, separator = ("S", " ") if isinstance(param, list) else ("", "\t")
- add_to_event_context(context, prefix + suffix, separator.join(param))
+ add_to_event_context(context, prefix + suffix, separator.join(param), get_http_proxy)
for nr, value in enumerate(param, start=1):
- add_to_event_context(context, f"{prefix}_{nr}", value)
+ add_to_event_context(context, f"{prefix}_{nr}", value, get_http_proxy)
elif isinstance(param, dict): # NOTE: We only handle Dict[str, Any].
for key, value in param.items():
varname = f"{prefix}_{key.upper()}"
@@ -876,8 +968,8 @@ def add_to_event_context(
# Compatibility for 1.5 pushover explicitly configured proxy URL format
if isinstance(value, str):
value = ("url", value)
- value = config.get_http_proxy(value).serialize()
- add_to_event_context(context, varname, value)
+ value = get_http_proxy(value).serialize()
+ add_to_event_context(context, varname, value, get_http_proxy)
elif isinstance(param, (str, int, float)): # NOTE: bool is a subclass of int!
# Dynamically added keys...
context[prefix] = str(param) # type: ignore[literal-required]
diff --git a/cmk/base/export.py b/cmk/base/export.py
index 560e82ec01d..f198ea2805c 100644
--- a/cmk/base/export.py
+++ b/cmk/base/export.py
@@ -2,6 +2,8 @@
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
+
+# pylint: disable=protected-access
"""
Utility module for common code between the CMK base and other parts
of Check_MK. The GUI is e.g. accessing this module for gathering things
@@ -14,7 +16,7 @@
from cmk.utils.rulesets.ruleset_matcher import RulesetMatcher, RulesetMatchObject
from cmk.utils.servicename import Item, ServiceName
-from cmk.checkengine.checking import CheckPluginName, CheckPluginNameStr
+from cmk.checkengine.checking import CheckPluginName
import cmk.base.config as config
@@ -34,9 +36,7 @@ def reset_config() -> None:
_config_loaded = False
-def service_description(
- hostname: HostName, check_plugin_name: CheckPluginNameStr, item: Item
-) -> str:
+def service_description(hostname: HostName, check_plugin_name: str, item: Item) -> str:
return config.service_description(
get_ruleset_matcher(), hostname, CheckPluginName(check_plugin_name), item
)
diff --git a/cmk/base/ip_lookup.py b/cmk/base/ip_lookup.py
index 560f528e2c0..85f890c6272 100644
--- a/cmk/base/ip_lookup.py
+++ b/cmk/base/ip_lookup.py
@@ -10,7 +10,7 @@
from collections.abc import Iterable, Iterator, Mapping, MutableMapping, Sequence
from contextlib import contextmanager
from pathlib import Path
-from typing import Any, NamedTuple
+from typing import Any, assert_never, Literal, NamedTuple
import cmk.utils.debug
import cmk.utils.paths
@@ -30,26 +30,16 @@
@enum.unique
-class AddressFamily(enum.IntFlag):
+class IPStackConfig(enum.IntFlag):
NO_IP = enum.auto()
IPv4 = enum.auto()
IPv6 = enum.auto()
DUAL_STACK = IPv4 | IPv6
- @classmethod
- def from_socket(cls, /, af: socket.AddressFamily) -> AddressFamily:
- match af:
- case socket.AF_INET:
- return cls.IPv4
- case socket.AF_INET6:
- return cls.IPv6
- case _:
- raise ValueError(af)
-
class IPLookupConfig(NamedTuple):
hostname: HostName
- address_family: AddressFamily
+ ip_stack_config: IPStackConfig
is_snmp_host: bool
snmp_backend: SNMPBackendEnum
default_address_family: socket.AddressFamily
@@ -57,21 +47,28 @@ class IPLookupConfig(NamedTuple):
is_dyndns_host: bool
-def fallback_ip_for(family: socket.AddressFamily | AddressFamily) -> HostAddress:
- if isinstance(family, socket.AddressFamily):
- family = AddressFamily.from_socket(family)
+def fallback_ip_for(
+ family: Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6]
+) -> HostAddress:
match family:
- case AddressFamily.IPv4:
+ case socket.AddressFamily.AF_INET:
return HostAddress("0.0.0.0")
- case AddressFamily.IPv6:
- return HostAddress("::")
- case _:
- # TODO(ml): [IPv6] This ignores `default_address_family()`
- # and falls back to IPv6, where IPv4 is the default almost
- # everywhere else. Using "0.0.0.0" or "::" only makes sense
- # for a server anyway, so the users of this function are
- # most likely misconfigured.
+ case socket.AddressFamily.AF_INET6:
return HostAddress("::")
+ case other:
+ assert_never(other)
+
+
+def _local_ip_for(
+ family: Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6]
+) -> HostAddress:
+ match family:
+ case socket.AddressFamily.AF_INET:
+ return HostAddress("127.0.0.1")
+ case socket.AddressFamily.AF_INET6:
+ return HostAddress("::1")
+ case other:
+ assert_never(other)
def enforce_fake_dns(address: HostAddress) -> None:
@@ -93,7 +90,7 @@ def enforce_localhost() -> None:
def lookup_ip_address(
*,
host_name: HostName | HostAddress,
- family: AddressFamily | socket.AddressFamily,
+ family: Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6],
configured_ip_address: HostAddress | None,
simulation_mode: bool,
is_snmp_usewalk_host: bool,
@@ -109,12 +106,9 @@ def lookup_ip_address(
if override_dns:
return override_dns
- if isinstance(family, socket.AddressFamily):
- family = AddressFamily.from_socket(family)
-
# Honor simulation mode und usewalk hosts. Never contact the network.
if simulation_mode or _enforce_localhost or is_snmp_usewalk_host:
- return HostAddress("::1") if AddressFamily.IPv6 in family else HostAddress("127.0.0.1")
+ return _local_ip_for(family)
# check if IP address is hard coded by the user
if configured_ip_address:
@@ -125,15 +119,9 @@ def lookup_ip_address(
if is_dyndns_host:
return host_name
- if family is AddressFamily.NO_IP:
- return None
-
return cached_dns_lookup(
host_name,
- # NO_IP handled in guard.
- # TODO(ml): [IPv6] Default to IPv4 for DUAL_STACK. Why doesn't this
- # obey `default_address_family()` or handle both addresses in that case?
- family=socket.AF_INET if AddressFamily.IPv4 in family else socket.AF_INET6,
+ family=family,
force_file_cache_renewal=force_file_cache_renewal,
)
@@ -157,9 +145,9 @@ def cached_dns_lookup(
2) inner layer: see _file_cached_dns_lookup
"""
- cache: dict[
- tuple[HostName | HostAddress, socket.AddressFamily], HostAddress | None
- ] = cache_manager.obtain_cache("cached_dns_lookup")
+ cache: dict[tuple[HostName | HostAddress, socket.AddressFamily], HostAddress | None] = (
+ cache_manager.obtain_cache("cached_dns_lookup")
+ )
cache_id = hostname, family
# Address has already been resolved in prior call to this function?
@@ -256,9 +244,11 @@ def deserialize(self, raw: bytes) -> Mapping[IPLookupCacheId, HostAddress]:
assert isinstance(loaded_object, dict)
return {
- (HostName(k), socket.AF_INET) # old pre IPv6 style
- if isinstance(k, str)
- else (HostName(k[0]), {4: socket.AF_INET, 6: socket.AF_INET6}[k[1]]): HostAddress(v)
+ (
+ (HostName(k), socket.AF_INET) # old pre IPv6 style
+ if isinstance(k, str)
+ else (HostName(k[0]), {4: socket.AF_INET, 6: socket.AF_INET6}[k[1]])
+ ): HostAddress(v)
for k, v in loaded_object.items()
}
@@ -420,9 +410,29 @@ def update_dns_cache(
def _annotate_family(
ip_lookup_configs: Iterable[IPLookupConfig],
-) -> Iterable[tuple[HostName, IPLookupConfig, socket.AddressFamily]]:
+) -> Iterable[
+ tuple[
+ HostName,
+ IPLookupConfig,
+ Literal[socket.AddressFamily.AF_INET, socket.AddressFamily.AF_INET6],
+ ]
+]:
for host_config in ip_lookup_configs:
- if AddressFamily.IPv4 in host_config.address_family:
- yield host_config.hostname, host_config, socket.AF_INET
- if AddressFamily.IPv6 in host_config.address_family:
- yield host_config.hostname, host_config, socket.AF_INET6
+ if IPStackConfig.IPv4 in host_config.ip_stack_config:
+ yield host_config.hostname, host_config, socket.AddressFamily.AF_INET
+ if IPStackConfig.IPv6 in host_config.ip_stack_config:
+ yield host_config.hostname, host_config, socket.AddressFamily.AF_INET6
+
+
+class CollectFailedHosts:
+ """Collects hosts for which IP lookup fails"""
+
+ def __init__(self) -> None:
+ self._failed_ip_lookups: dict[HostName, Exception] = {}
+
+ @property
+ def failed_ip_lookups(self) -> Mapping[HostName, Exception]:
+ return self._failed_ip_lookups
+
+ def __call__(self, host_name: HostName, exc: Exception) -> None:
+ self._failed_ip_lookups[host_name] = exc
diff --git a/cmk/base/legacy_checks/3ware_disks.py b/cmk/base/legacy_checks/3ware_disks.py
index 2d38bd8a007..2f4649fd457 100644
--- a/cmk/base/legacy_checks/3ware_disks.py
+++ b/cmk/base/legacy_checks/3ware_disks.py
@@ -35,7 +35,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_3ware_disks(info):
diff --git a/cmk/base/legacy_checks/3ware_info.py b/cmk/base/legacy_checks/3ware_info.py
index 91fc9a5f5d8..e925f08a765 100644
--- a/cmk/base/legacy_checks/3ware_info.py
+++ b/cmk/base/legacy_checks/3ware_info.py
@@ -26,7 +26,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_3ware_info(info):
diff --git a/cmk/base/legacy_checks/3ware_units.py b/cmk/base/legacy_checks/3ware_units.py
index 7266ba9b011..edcb6d68883 100644
--- a/cmk/base/legacy_checks/3ware_units.py
+++ b/cmk/base/legacy_checks/3ware_units.py
@@ -24,7 +24,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_3ware_units(info):
diff --git a/cmk/base/legacy_checks/__init__.py b/cmk/base/legacy_checks/__init__.py
index 67165df1c35..c0d5ce1ee83 100644
--- a/cmk/base/legacy_checks/__init__.py
+++ b/cmk/base/legacy_checks/__init__.py
@@ -12,6 +12,6 @@
# This folder is part of a namespace package, that can be shadowed/extended
# using the local/ hierarchy.
#
-# Do not change the following line, is is picked up by the build process:
+# Do not change the following line, it is picked up by the build process:
# check_mk.make: do-not-deploy
#
diff --git a/cmk/base/legacy_checks/acme_agent_sessions.py b/cmk/base/legacy_checks/acme_agent_sessions.py
index b7f0ae34831..69bf3621ef1 100644
--- a/cmk/base/legacy_checks/acme_agent_sessions.py
+++ b/cmk/base/legacy_checks/acme_agent_sessions.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.acme import DETECT_ACME
diff --git a/cmk/base/legacy_checks/acme_certificates.py b/cmk/base/legacy_checks/acme_certificates.py
index 47f31667141..db0beba9010 100644
--- a/cmk/base/legacy_checks/acme_certificates.py
+++ b/cmk/base/legacy_checks/acme_certificates.py
@@ -6,11 +6,10 @@
import time
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.acme import DETECT_ACME
# .1.3.6.1.4.1.9148.3.9.1.10.1.3.65.1 rootca
@@ -34,10 +33,10 @@ def check_acme_certificates(item, params, info):
state = 0
time_diff = expire_time - now
- if expire_time <= now:
- age_info = "%s ago" % get_age_human_readable(abs(time_diff))
+ if time_diff < 0:
+ age_info = "%s ago" % render.timespan(-time_diff)
else:
- age_info = "%s to go" % get_age_human_readable(time_diff)
+ age_info = "%s to go" % render.timespan(time_diff)
infotext = f"Expire: {expire} ({age_info})"
@@ -48,8 +47,8 @@ def check_acme_certificates(item, params, info):
state = 1
if state:
infotext += " (warn/crit below {}/{})".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
+ render.timespan(warn),
+ render.timespan(crit),
)
else:
state = 2
diff --git a/cmk/base/legacy_checks/acme_fan.py b/cmk/base/legacy_checks/acme_fan.py
index f556c16f47e..1996e6cfaa1 100644
--- a/cmk/base/legacy_checks/acme_fan.py
+++ b/cmk/base/legacy_checks/acme_fan.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.acme import acme_environment_states
+from cmk.base.check_legacy_includes.acme import ACME_ENVIRONMENT_STATES
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.acme import DETECT_ACME
# .1.3.6.1.4.1.9148.3.3.1.4.1.1.3.1 MAIN FAN1 --> ACMEPACKET-ENVMON-MIB::apEnvMonFanStatusDescr.1
@@ -32,7 +31,7 @@ def inventory_acme_fan(info):
def check_acme_fan(item, params, info):
for descr, value_str, state in info:
if item == descr:
- dev_state, dev_state_readable = acme_environment_states[state]
+ dev_state, dev_state_readable = ACME_ENVIRONMENT_STATES[state]
return dev_state, f"Status: {dev_state_readable}, Speed: {value_str}%"
return None
diff --git a/cmk/base/legacy_checks/acme_powersupply.py b/cmk/base/legacy_checks/acme_powersupply.py
index d676c7ced2b..d13f374f74d 100644
--- a/cmk/base/legacy_checks/acme_powersupply.py
+++ b/cmk/base/legacy_checks/acme_powersupply.py
@@ -5,11 +5,10 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.acme import acme_environment_states
+from cmk.base.check_legacy_includes.acme import ACME_ENVIRONMENT_STATES
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.acme import DETECT_ACME
# .1.3.6.1.4.1.9148.3.3.1.5.1.1.3.1 Power Supply A --> ACMEPACKET-ENVMON-MIB::apEnvMonPowerSupplyStatusDescr.1
@@ -25,7 +24,7 @@ def inventory_acme_powersupply(info):
def check_acme_powersupply(item, _no_params, info):
for descr, state in info:
if item == descr:
- dev_state, dev_state_readable = acme_environment_states[state]
+ dev_state, dev_state_readable = ACME_ENVIRONMENT_STATES[state]
return dev_state, "Status: %s" % dev_state_readable
return None
diff --git a/cmk/base/legacy_checks/acme_realm.py b/cmk/base/legacy_checks/acme_realm.py
index 7fdba016be5..5ad325e00cd 100644
--- a/cmk/base/legacy_checks/acme_realm.py
+++ b/cmk/base/legacy_checks/acme_realm.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.acme import DETECT_ACME
diff --git a/cmk/base/legacy_checks/acme_sbc.py b/cmk/base/legacy_checks/acme_sbc.py
index 4c4d68b270e..bb05cd615a4 100644
--- a/cmk/base/legacy_checks/acme_sbc.py
+++ b/cmk/base/legacy_checks/acme_sbc.py
@@ -36,12 +36,17 @@
# Dec 8 11:43:00.227: Standby to BecomingActive, active peer xxx has timed out, no arp reply from active in 250ms
# Mar 16 10:13:33.248: Active to RelinquishingActive
+from collections.abc import Mapping
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import StringTable
-def acme_sbc_parse_function(string_table):
+Section = tuple[Mapping[str, str], Mapping[str, str]]
+
+
+def acme_sbc_parse_function(string_table: StringTable) -> Section:
states = {}
settings = {}
for line in string_table:
diff --git a/cmk/base/legacy_checks/acme_sbc_snmp.py b/cmk/base/legacy_checks/acme_sbc_snmp.py
index c620f244d0e..15108c04c45 100644
--- a/cmk/base/legacy_checks/acme_sbc_snmp.py
+++ b/cmk/base/legacy_checks/acme_sbc_snmp.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.acme import DETECT_ACME
# comNET GmbH, Fabian Binder
diff --git a/cmk/base/legacy_checks/acme_temp.py b/cmk/base/legacy_checks/acme_temp.py
index 9520cf8fa72..778ccb0be23 100644
--- a/cmk/base/legacy_checks/acme_temp.py
+++ b/cmk/base/legacy_checks/acme_temp.py
@@ -5,12 +5,11 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.acme import acme_environment_states
+from cmk.base.check_legacy_includes.acme import ACME_ENVIRONMENT_STATES
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.acme import DETECT_ACME
# .1.3.6.1.4.1.9148.3.3.1.3.1.1.2.1 0 --> ACMEPACKET-ENVMON-MIB::apEnvMonTemperatureStatusType.1
@@ -46,7 +45,7 @@ def inventory_acme_temp(info):
def check_acme_temp(item, params, info):
for descr, value_str, state in info:
if item == descr:
- dev_state, dev_state_readable = acme_environment_states[state]
+ dev_state, dev_state_readable = ACME_ENVIRONMENT_STATES[state]
return check_temperature(
float(value_str),
params,
diff --git a/cmk/base/legacy_checks/acme_voltage.py b/cmk/base/legacy_checks/acme_voltage.py
index c70c5540df2..9ea03dbcd73 100644
--- a/cmk/base/legacy_checks/acme_voltage.py
+++ b/cmk/base/legacy_checks/acme_voltage.py
@@ -5,12 +5,11 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.acme import acme_environment_states
+from cmk.base.check_legacy_includes.acme import ACME_ENVIRONMENT_STATES
from cmk.base.check_legacy_includes.elphase import check_elphase
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.acme import DETECT_ACME
# .1.3.6.1.4.1.9148.3.3.1.2.1.1.3.1 MAIN 1.20V --> ACMEPACKET-ENVMON-MIB::apEnvMonVoltageStatusDescr.1
@@ -67,7 +66,7 @@ def check_acme_voltage(item, params, info):
return check_elphase(
descr,
params,
- {descr: {"voltage": (float(value_str) / 1000.0, acme_environment_states[state])}},
+ {descr: {"voltage": (float(value_str) / 1000.0, ACME_ENVIRONMENT_STATES[state])}},
)
return None
diff --git a/cmk/base/legacy_checks/ad_replication.py b/cmk/base/legacy_checks/ad_replication.py
index 37285e5bbc3..734a965193f 100644
--- a/cmk/base/legacy_checks/ad_replication.py
+++ b/cmk/base/legacy_checks/ad_replication.py
@@ -19,8 +19,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
def _get_relative_date_human_readable(timestamp: float) -> str:
diff --git a/cmk/base/legacy_checks/adva_fsp_current.py b/cmk/base/legacy_checks/adva_fsp_current.py
index 88082cc2c78..ce5d75b66d1 100644
--- a/cmk/base/legacy_checks/adva_fsp_current.py
+++ b/cmk/base/legacy_checks/adva_fsp_current.py
@@ -13,8 +13,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
def inventory_adva_fsp_current(info):
diff --git a/cmk/base/legacy_checks/adva_fsp_temp.py b/cmk/base/legacy_checks/adva_fsp_temp.py
index cf39b4f2b9d..863c125360e 100644
--- a/cmk/base/legacy_checks/adva_fsp_temp.py
+++ b/cmk/base/legacy_checks/adva_fsp_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
# this is currently here only to prevent error messages when upgrading
diff --git a/cmk/base/legacy_checks/agent_3par.py b/cmk/base/legacy_checks/agent_3par.py
deleted file mode 100644
index 51e46f627ad..00000000000
--- a/cmk/base/legacy_checks/agent_3par.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from collections.abc import Mapping, Sequence
-from typing import Any
-
-from cmk.base.check_api import passwordstore_get_cmdline
-from cmk.base.config import special_agent_info
-
-
-def agent_3par_arguments(
- params: Mapping[str, Any], hostname: str, ipaddress: str | None
-) -> Sequence[str | tuple[str, str, str]]:
- args = [
- "--user",
- params["user"],
- "--password",
- passwordstore_get_cmdline("%s", params["password"]),
- "--port",
- params["port"],
- ]
- if not params.get("verify_cert", False):
- args.append("--no-cert-check")
-
- if "values" in params:
- args += ["--values", ",".join(params["values"])]
-
- args.append(ipaddress or hostname)
-
- return args
-
-
-special_agent_info["3par"] = agent_3par_arguments
diff --git a/cmk/base/legacy_checks/agent_acme_sbc.py b/cmk/base/legacy_checks/agent_acme_sbc.py
index e5dc7c7a7b1..53a19b2cae8 100644
--- a/cmk/base/legacy_checks/agent_acme_sbc.py
+++ b/cmk/base/legacy_checks/agent_acme_sbc.py
@@ -5,13 +5,12 @@
from collections.abc import Mapping, Sequence
-from typing import Any
from cmk.base.config import special_agent_info
def agent_acme_sbc(
- params: Mapping[str, Any], hostname: str, ipaddress: str | None
+ params: Mapping[str, object], hostname: str, ipaddress: str | None
) -> Sequence[str]:
return [hostname]
diff --git a/cmk/base/legacy_checks/agent_alertmanager.py b/cmk/base/legacy_checks/agent_alertmanager.py
index d838f5432e4..18e93e5a3b7 100644
--- a/cmk/base/legacy_checks/agent_alertmanager.py
+++ b/cmk/base/legacy_checks/agent_alertmanager.py
@@ -5,27 +5,14 @@
from collections.abc import Mapping, Sequence
-from typing import Any, NamedTuple
from cmk.base.config import special_agent_info
-# NOTE: This code is temporarily duplicated from cmk/base/config.py to resolve
-# a layering violation.
-# This will be resovled with CMK-3812.
-# DO NOT USE THIS!!!
-
-
-class SpecialAgentConfiguration(NamedTuple):
- args: Sequence[str]
- # None makes the stdin of subprocess /dev/null
- stdin: str | None
-
def agent_alertmanager_arguments(
- params: Mapping[str, Any], hostname: str, ipaddress: str | None
-) -> SpecialAgentConfiguration:
- alertmanager_params = {**params, "host_address": ipaddress, "host_name": hostname}
- return SpecialAgentConfiguration([], repr(alertmanager_params))
+ params: Mapping[str, object], _hostname: str, _ipaddress: str | None
+) -> Sequence[str]:
+ return ["--config", repr(params)]
special_agent_info["alertmanager"] = agent_alertmanager_arguments
diff --git a/cmk/base/legacy_checks/agent_aws.py b/cmk/base/legacy_checks/agent_aws.py
new file mode 100644
index 00000000000..4b464c6785b
--- /dev/null
+++ b/cmk/base/legacy_checks/agent_aws.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+from collections.abc import Mapping, Sequence
+from typing import Any
+
+from cmk.base.check_api import passwordstore_get_cmdline
+from cmk.base.config import special_agent_info
+
+
+def _get_tag_options(tag_values, prefix):
+ options = []
+ for key, values in tag_values:
+ options.append("--%s-tag-key" % prefix)
+ options.append(key)
+ options.append("--%s-tag-values" % prefix)
+ options += values
+ return options
+
+
+def _get_services_config(services):
+ # '--services': {
+ # 's3': {'selection': ('tags', [('KEY', ['VAL1', 'VAL2'])])},
+ # 'ec2': {'selection': 'all'},
+ # 'ebs': {'selection': ('names', ['ebs1', 'ebs2'])},
+ # }
+ service_args = []
+ for service_name, service_config in services.items():
+ if service_config is None:
+ continue
+ if service_config.get("limits"):
+ service_args += ["--%s-limits" % service_name]
+ selection = service_config.get("selection")
+ if not isinstance(selection, tuple):
+ # Here: value of selection is 'all' which means there's no
+ # restriction (names or tags) to the instances of a specific
+ # AWS service. The commandline option already includes this
+ # service '--services SERVICE1 SERVICE2 ...' (see below).
+ continue
+ selection_type, selection_values = selection
+ if not selection_values:
+ continue
+ if selection_type == "names":
+ service_args.append("--%s-names" % service_name)
+ service_args += selection_values
+ elif selection_type == "tags":
+ service_args += _get_tag_options(selection_values, service_name)
+ return service_args
+
+
+def _proxy_args(details: Mapping[str, Any]) -> Sequence[Any]:
+ proxy_args = ["--proxy-host", details["proxy_host"]]
+ if proxy_port := details.get("proxy_port"):
+ proxy_args += ["--proxy-port", str(proxy_port)]
+ if (proxy_user := details.get("proxy_user")) and (proxy_pwd := details.get("proxy_password")):
+ proxy_args += [
+ "--proxy-user",
+ proxy_user,
+ "--proxy-password",
+ passwordstore_get_cmdline("%s", proxy_pwd),
+ ]
+ return proxy_args
+
+
+def agent_aws_arguments( # pylint: disable=too-many-branches
+ params: Mapping[str, Any], hostname: str, ipaddress: str | None
+) -> Sequence[Any]:
+ args = [
+ "--access-key-id",
+ params["access_key_id"],
+ "--secret-access-key",
+ passwordstore_get_cmdline("%s", params["secret_access_key"]),
+ *(_proxy_args(params["proxy_details"]) if "proxy_details" in params else []),
+ ]
+ global_service_region = params.get("access", {}).get("global_service_region")
+ if global_service_region is not None:
+ args += ["--global-service-region", global_service_region]
+ role_arn_id = params.get("access", {}).get("role_arn_id")
+ if role_arn_id:
+ args += ["--assume-role"]
+ if role_arn_id[0]:
+ args += ["--role-arn", role_arn_id[0]]
+ if role_arn_id[1]:
+ args += ["--external-id", role_arn_id[1]]
+ regions = params.get("regions")
+ if regions:
+ args.append("--regions")
+ args += regions
+ global_services = params.get("global_services", {})
+ if global_services:
+ args.append("--global-services")
+ # We need to sort the inner services-as-a-dict-params
+ # in order to create reliable tests
+ args += sorted(global_services)
+ args += _get_services_config(global_services)
+ services = params.get("services", {})
+ # for backwards compatibility
+ if "cloudwatch" in services:
+ services["cloudwatch_alarms"] = services["cloudwatch"]
+ del services["cloudwatch"]
+ if services:
+ args.append("--services")
+ # We need to sort the inner services-as-a-dict-params
+ # in order to create reliable tests
+ args += sorted(services)
+ args += _get_services_config(services)
+ if "requests" in services.get("s3", {}):
+ args += ["--s3-requests"]
+ alarms = services.get("cloudwatch_alarms", {}).get("alarms")
+ if alarms:
+ # {'alarms': 'all'} is handled by no additionally specified names
+ args += ["--cloudwatch-alarms"]
+ if isinstance(alarms, tuple):
+ args += alarms[1]
+ if "cloudfront" in services.get("wafv2", {}):
+ args += ["--wafv2-cloudfront"]
+ if "cloudfront" in global_services:
+ cloudfront_host_assignment = global_services["cloudfront"]["host_assignment"]
+ args += ["--cloudfront-host-assignment", cloudfront_host_assignment]
+ # '--overall-tags': [('KEY_1', ['VAL_1', 'VAL_2']), ...)],
+ args += _get_tag_options(params.get("overall_tags", []), "overall")
+ args += [
+ "--hostname",
+ hostname,
+ ]
+ args.extend(("--piggyback-naming-convention", params["piggyback_naming_convention"]))
+ return args
+
+
+special_agent_info["aws"] = agent_aws_arguments
diff --git a/cmk/base/legacy_checks/agent_azure.py b/cmk/base/legacy_checks/agent_azure.py
new file mode 100644
index 00000000000..846017eb848
--- /dev/null
+++ b/cmk/base/legacy_checks/agent_azure.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+from collections.abc import Mapping, Sequence
+from typing import Any
+
+from cmk.base.check_api import get_http_proxy, passwordstore_get_cmdline
+from cmk.base.config import special_agent_info
+
+
+def agent_azure_arguments( # pylint: disable=too-many-branches
+ params: Mapping[str, Any],
+ hostname: str,
+ ipaddress: str | None,
+) -> Sequence[Any]:
+ args = [
+ "--tenant",
+ params["tenant"],
+ "--client",
+ params["client"],
+ "--secret",
+ passwordstore_get_cmdline("%s", params["secret"]),
+ ]
+
+ keys = ("authority", "subscription", "piggyback_vms", "sequential")
+
+ for key in (k for k in keys if k in params):
+ option = "--%s" % key
+ value = params[key]
+ if isinstance(value, bool):
+ if value:
+ args.append(option)
+ else:
+ args += [option, value]
+
+ if proxy_settings := params.get("proxy"):
+ args += ["--proxy", get_http_proxy(proxy_settings).serialize()]
+
+ if "services" in params:
+ args += ["--services", *params["services"]]
+
+ config = params["config"]
+
+ explicit = config.get("explicit", [])
+ if explicit:
+ args.append("--explicit-config")
+ for group_dict in explicit:
+ group_name = group_dict["group_name"]
+ args.append("group=%s" % group_name)
+
+ group_resources = group_dict.get("resources")
+ if group_resources:
+ args.append("resources=%s" % ",".join(group_resources))
+
+ tag_based = config.get("tag_based", [])
+ for tag, requirement in tag_based:
+ if requirement == "exists":
+ args += ["--require-tag", tag]
+ elif isinstance(requirement, tuple) and requirement[0] == "value":
+ args += ["--require-tag-value", tag, requirement[1]]
+
+ return args
+
+
+special_agent_info["azure"] = agent_azure_arguments
diff --git a/cmk/base/legacy_checks/agent_cisco_prime.py b/cmk/base/legacy_checks/agent_cisco_prime.py
deleted file mode 100644
index 904d85afaa4..00000000000
--- a/cmk/base/legacy_checks/agent_cisco_prime.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from collections.abc import Mapping, Sequence
-from typing import Any
-
-from cmk.base.check_api import passwordstore_get_cmdline
-from cmk.base.config import special_agent_info
-
-
-def agent_cisco_prime_arguments(
- params: Mapping[str, Any], hostname: str, ipaddress: str | None
-) -> Sequence[str | tuple[str, str, str]]:
- param_host = params.get("host")
- if param_host == "host_name":
- host = hostname
- elif param_host == "ip_address":
- if ipaddress is None:
- raise ValueError(f"IP address for host '{hostname}' is not set")
- host = ipaddress
- elif isinstance(param_host, tuple) and param_host[0] == "custom":
- host = param_host[1]["host"]
- else:
- # behaviour previous to host configuration
- host = ipaddress or hostname
-
- basic_auth = params.get("basicauth")
- return [
- str(elem) # non-str get ignored silently - so turn all elements into `str`
- for chunk in (
- ("--hostname", host),
- ("-u", "{}:{}".format(basic_auth[0], passwordstore_get_cmdline("%s", basic_auth[1]))) #
- if basic_auth
- else (),
- ("--port", params["port"]) if "port" in params else (), #
- ("--no-tls",) if params.get("no-tls") else (), #
- ("--no-cert-check",) if params.get("no-cert-check") else (), #
- ("--timeout", params["timeout"]) if "timeout" in params else (), #
- )
- for elem in chunk
- ]
-
-
-special_agent_info["cisco_prime"] = agent_cisco_prime_arguments
diff --git a/cmk/base/legacy_checks/agent_fritzbox.py b/cmk/base/legacy_checks/agent_fritzbox.py
deleted file mode 100644
index e88e4224817..00000000000
--- a/cmk/base/legacy_checks/agent_fritzbox.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# {
-# 'timeout': 10,
-# }
-
-
-from collections.abc import Mapping, Sequence
-from typing import Any
-
-from cmk.base.config import special_agent_info
-
-
-def agent_fritzbox_arguments(
- params: Mapping[str, Any], hostname: str, ipaddress: str | None
-) -> Sequence[str]:
- args = []
-
- if "timeout" in params:
- args += ["--timeout", "%d" % params["timeout"]]
-
- args.append(ipaddress or hostname)
- return args
-
-
-special_agent_info["fritzbox"] = agent_fritzbox_arguments
diff --git a/cmk/base/legacy_checks/agent_kube.py b/cmk/base/legacy_checks/agent_kube.py
new file mode 100644
index 00000000000..e5dcb347d49
--- /dev/null
+++ b/cmk/base/legacy_checks/agent_kube.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python3
+# Copyright (C) 2022 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+# mypy: disable-error-code="arg-type"
+
+from collections.abc import Mapping, Sequence
+from typing import Any, Literal
+
+from cmk.base.check_api import get_http_proxy, passwordstore_get_cmdline
+from cmk.base.config import special_agent_info
+
+
+def _timeouts(timeouts: Mapping[str, int], arg_prefix: str) -> Sequence[str]:
+ args = []
+ if (connect := timeouts.get("connect")) is not None:
+ args.append(f"--{arg_prefix}-connect-timeout")
+ args.append(str(connect))
+ if (read := timeouts.get("read")) is not None:
+ args.append(f"--{arg_prefix}-read-timeout")
+ args.append(str(read))
+ return args
+
+
+def _usage_endpoint(
+ params: Mapping[str, object], prefix: Literal["prometheus", "cluster-collector"]
+) -> list[str]:
+ args = [
+ f"--{prefix}-endpoint",
+ str(params["endpoint_v2"]),
+ "--usage-proxy",
+ get_http_proxy(params.get("proxy", ("environment", "environment"))).serialize(),
+ ]
+ if params.get("verify-cert"):
+ args.append("--usage-verify-cert")
+ if timeouts := params.get("timeout"):
+ args.extend(_timeouts(timeouts, "usage")) # type: ignore[arg-type]
+ return args
+
+
+def agent_kube_arguments( # pylint: disable=too-many-branches
+ params: Mapping[str, Any], hostname: str, ipaddress: str | None
+) -> Sequence[str | tuple[str, str, str]]:
+ args = ["--cluster", params["cluster-name"]]
+ args.extend(["--kubernetes-cluster-hostname", hostname])
+ args.extend(["--token", passwordstore_get_cmdline("%s", params["token"])])
+
+ args.append("--monitored-objects")
+ args.extend(params["monitored-objects"])
+
+ if "namespaces" in params:
+ namespace_filter_option, filter_patterns = params["namespaces"]
+ for namespace_pattern in filter_patterns:
+ args.append(f"--{namespace_filter_option}")
+ args.append(namespace_pattern)
+
+ if "cluster-resource-aggregation" in params:
+ if params["cluster-resource-aggregation"] == "cluster-aggregation-include-all-nodes":
+ args.append("--cluster-aggregation-include-all-nodes")
+ else:
+ args.append("--cluster-aggregation-exclude-node-roles")
+ args.extend(params["cluster-resource-aggregation"][1])
+ else:
+ args.extend(["--cluster-aggregation-exclude-node-roles", "control-plane", "infra"])
+
+ if (host_labels_param := params.get("import-annotations")) is not None:
+ if host_labels_param == "include-annotations-as-host-labels":
+ args.append("--include-annotations-as-host-labels")
+ else:
+ args.append("--include-matching-annotations-as-host-labels")
+ args.append(host_labels_param[1])
+
+ api_params = params["kubernetes-api-server"]
+ args.extend(["--api-server-endpoint", api_params["endpoint_v2"]])
+ if api_params.get("verify-cert"):
+ args.append("--verify-cert-api")
+ args.extend(
+ [
+ "--api-server-proxy",
+ get_http_proxy(api_params.get("proxy", ("environment", "environment"))).serialize(),
+ ]
+ )
+ if api_timeouts := api_params.get("timeout"):
+ args.extend(_timeouts(api_timeouts, "k8s-api"))
+
+ if (endpoint_params := params.get("usage_endpoint")) is not None:
+ return args + _usage_endpoint(endpoint_params[1], endpoint_params[0])
+
+ return args
+
+
+special_agent_info["kube"] = agent_kube_arguments
diff --git a/cmk/base/legacy_checks/agent_prism.py b/cmk/base/legacy_checks/agent_prism.py
deleted file mode 100644
index 7be513a7e3e..00000000000
--- a/cmk/base/legacy_checks/agent_prism.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-# mypy: disable-error-code="list-item"
-
-from collections.abc import Mapping, Sequence
-from typing import Any
-
-from cmk.base.check_api import passwordstore_get_cmdline
-from cmk.base.config import special_agent_info
-
-
-def agent_prism_arguments(
- params: Mapping[str, Any], hostname: str, ipaddress: str | None
-) -> Sequence[str]:
- return [
- "--server",
- ipaddress or hostname,
- "--username",
- "%s" % params["username"],
- "--password",
- passwordstore_get_cmdline("%s", params["password"]),
- *(["--port", "%s" % params["port"]] if "port" in params else []),
- ]
-
-
-special_agent_info["prism"] = agent_prism_arguments
diff --git a/cmk/base/legacy_checks/agent_prometheus.py b/cmk/base/legacy_checks/agent_prometheus.py
new file mode 100644
index 00000000000..f0756935ffd
--- /dev/null
+++ b/cmk/base/legacy_checks/agent_prometheus.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+from collections.abc import Mapping, Sequence
+
+from cmk.base.config import special_agent_info
+
+
+def agent_prometheus_arguments(
+ params: Mapping[str, object], hostname: str, ipaddress: str | None
+) -> Sequence[str]:
+ prometheus_params = {**params, "host_address": ipaddress, "host_name": hostname}
+ return ["--config", repr(prometheus_params)]
+
+
+special_agent_info["prometheus"] = agent_prometheus_arguments
diff --git a/cmk/base/legacy_checks/agent_vsphere.py b/cmk/base/legacy_checks/agent_vsphere.py
new file mode 100644
index 00000000000..f7915baa817
--- /dev/null
+++ b/cmk/base/legacy_checks/agent_vsphere.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+# {
+# 'tcp_port': 443,
+# 'secret': 'wef',
+# 'infos': ['hostsystem', 'virtualmachine'],
+# 'user': 'wefwef'
+# }
+
+
+# mypy: disable-error-code="list-item"
+
+from collections.abc import Mapping, Sequence
+from typing import Any
+
+from cmk.base.check_api import passwordstore_get_cmdline
+from cmk.base.config import special_agent_info
+
+
+def agent_vsphere_arguments( # pylint: disable=too-many-branches
+ params: Mapping[str, Any], hostname: str, ipaddress: str | None
+) -> Sequence[str | tuple[str, str, str]]:
+ args = []
+ if "tcp_port" in params:
+ args += ["-p", "%d" % params["tcp_port"]]
+
+ args += ["-u", params["user"]]
+ args += [passwordstore_get_cmdline("-s=%s", params["secret"])]
+ args += ["-i", ",".join(params["infos"])]
+
+ # True: Queried host is a host system
+ # False: Queried host is the vCenter
+ if params["direct"]:
+ args += ["--direct", "--hostname", hostname]
+
+ if params.get("skip_placeholder_vms", True):
+ args.append("-P")
+
+ if "spaces" in params:
+ args += ["--spaces", params["spaces"]]
+
+ if "timeout" in params:
+ args += ["--timeout", params["timeout"]]
+
+ if v_display := params.get("vm_pwr_display"):
+ args += ["--vm_pwr_display", v_display]
+
+ if vm_piggyname := params.get("vm_piggyname"):
+ args += ["--vm_piggyname", vm_piggyname]
+
+ if h_display := params.get("host_pwr_display"):
+ args += ["--host_pwr_display", h_display]
+
+ if params.get("snapshots_on_host", False):
+ args += ["--snapshots-on-host"]
+
+ cert_verify = params.get("ssl", True)
+ if cert_verify is False:
+ args += ["--no-cert-check"]
+ elif cert_verify is True:
+ args += ["--cert-server-name", hostname]
+ else:
+ args += ["--cert-server-name", cert_verify]
+
+ args.append(ipaddress or hostname)
+
+ return args
+
+
+special_agent_info["vsphere"] = agent_vsphere_arguments
diff --git a/cmk/base/legacy_checks/aironet_clients.py b/cmk/base/legacy_checks/aironet_clients.py
index 644662f61ab..8c3136a6d25 100644
--- a/cmk/base/legacy_checks/aironet_clients.py
+++ b/cmk/base/legacy_checks/aironet_clients.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, equals, SNMPTree, StringTable
aironet_default_strength_levels = (-25, -20)
aironet_default_quality_levels = (40, 35)
diff --git a/cmk/base/legacy_checks/aironet_errors.py b/cmk/base/legacy_checks/aironet_errors.py
index 377951d5c68..798296e9ab4 100644
--- a/cmk/base/legacy_checks/aironet_errors.py
+++ b/cmk/base/legacy_checks/aironet_errors.py
@@ -9,8 +9,15 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, get_rate, get_value_store, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ any_of,
+ equals,
+ get_rate,
+ get_value_store,
+ OIDEnd,
+ SNMPTree,
+ StringTable,
+)
def inventory_aironet_errors(info):
diff --git a/cmk/base/legacy_checks/aix_multipath.py b/cmk/base/legacy_checks/aix_multipath.py
index b656a92db5b..7b56dd8ad26 100644
--- a/cmk/base/legacy_checks/aix_multipath.py
+++ b/cmk/base/legacy_checks/aix_multipath.py
@@ -16,7 +16,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_aix_multipath(info):
diff --git a/cmk/base/legacy_checks/aix_sap_processlist.py b/cmk/base/legacy_checks/aix_sap_processlist.py
index 4d589665469..147f053058d 100644
--- a/cmk/base/legacy_checks/aix_sap_processlist.py
+++ b/cmk/base/legacy_checks/aix_sap_processlist.py
@@ -37,9 +37,11 @@
import re
import time
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_aix_sap_processlist(string_table):
instance, description = None, None
@@ -90,9 +92,7 @@ def check_aix_sap_processlist(item, _no_params, parsed):
start = time.strftime("%c", start_time)
elapsed = time.time() - time.mktime(start_time)
perfdata = [("runtime", elapsed)]
- infotexts.append(
- f"Start Time: {start}, Elapsed Time: {get_age_human_readable(elapsed)}"
- )
+ infotexts.append(f"Start Time: {start}, Elapsed Time: {render.timespan(elapsed)}")
if status == "GREEN":
state = 0
diff --git a/cmk/base/legacy_checks/akcp_daisy_temp.py b/cmk/base/legacy_checks/akcp_daisy_temp.py
index f41ea7e84ac..012da556ddf 100644
--- a/cmk/base/legacy_checks/akcp_daisy_temp.py
+++ b/cmk/base/legacy_checks/akcp_daisy_temp.py
@@ -11,8 +11,16 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, equals, exists, not_exists, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ all_of,
+ any_of,
+ equals,
+ exists,
+ not_exists,
+ OIDEnd,
+ SNMPTree,
+ StringTable,
+)
def inventory_akcp_daisy_temp(info):
diff --git a/cmk/base/legacy_checks/alcatel_cpu.py b/cmk/base/legacy_checks/alcatel_cpu.py
index 62ced7b2bea..1ec9f1c66c9 100644
--- a/cmk/base/legacy_checks/alcatel_cpu.py
+++ b/cmk/base/legacy_checks/alcatel_cpu.py
@@ -5,18 +5,35 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.alcatel import check_alcatel_cpu, inventory_alcatel_cpu
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.alcatel import DETECT_ALCATEL
+from cmk.agent_based.v2 import SNMPTree, StringTable
+from cmk.plugins.lib.alcatel import DETECT_ALCATEL, DETECT_ALCATEL_AOS7
def parse_alcatel_cpu(string_table: StringTable) -> StringTable | None:
return string_table or None
+def inventory_alcatel_cpu(section):
+ yield None, {}
+
+
+def check_alcatel_cpu(_no_item, _no_params, section):
+ cpu_perc = int(section[0][0])
+ warn, crit = (90.0, 95.0)
+ status = 0
+ levelstext = ""
+ if cpu_perc >= crit:
+ status = 2
+ elif cpu_perc >= warn:
+ status = 1
+ if status:
+ levelstext = f" (warn/crit at {warn:.1f}%/{crit:.1f}%)"
+ perfdata = [("util", cpu_perc, warn, crit, 0, 100)]
+ return status, "total: %.1f%%" % cpu_perc + levelstext, perfdata
+
+
check_info["alcatel_cpu"] = LegacyCheckDefinition(
parse_function=parse_alcatel_cpu,
detect=DETECT_ALCATEL,
@@ -28,3 +45,16 @@ def parse_alcatel_cpu(string_table: StringTable) -> StringTable | None:
discovery_function=inventory_alcatel_cpu,
check_function=check_alcatel_cpu,
)
+
+
+check_info["alcatel_cpu_aos7"] = LegacyCheckDefinition(
+ parse_function=parse_alcatel_cpu,
+ detect=DETECT_ALCATEL_AOS7,
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.6486.801.1.2.1.16.1.1.1.1.1",
+ oids=["15"],
+ ),
+ service_name="CPU utilization",
+ discovery_function=inventory_alcatel_cpu,
+ check_function=check_alcatel_cpu,
+)
diff --git a/cmk/base/legacy_checks/alcatel_cpu_aos7.py b/cmk/base/legacy_checks/alcatel_cpu_aos7.py
deleted file mode 100644
index d116f627bfe..00000000000
--- a/cmk/base/legacy_checks/alcatel_cpu_aos7.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.alcatel import check_alcatel_cpu, inventory_alcatel_cpu
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.alcatel import DETECT_ALCATEL_AOS7
-
-
-def parse_alcatel_cpu_aos7(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
-check_info["alcatel_cpu_aos7"] = LegacyCheckDefinition(
- parse_function=parse_alcatel_cpu_aos7,
- detect=DETECT_ALCATEL_AOS7,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.6486.801.1.2.1.16.1.1.1.1.1",
- oids=["15"],
- ),
- service_name="CPU utilization",
- discovery_function=inventory_alcatel_cpu,
- check_function=check_alcatel_cpu,
-)
diff --git a/cmk/base/legacy_checks/alcatel_fans.py b/cmk/base/legacy_checks/alcatel_fans.py
index 9d816b6b321..a1334f4ded7 100644
--- a/cmk/base/legacy_checks/alcatel_fans.py
+++ b/cmk/base/legacy_checks/alcatel_fans.py
@@ -4,18 +4,50 @@
# conditions defined in the file COPYING, which is part of this source code package.
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.alcatel import check_alcatel_fans, inventory_alcatel_fans
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.alcatel import DETECT_ALCATEL
+from cmk.agent_based.v2 import SNMPTree, StringTable
+from cmk.plugins.lib.alcatel import DETECT_ALCATEL, DETECT_ALCATEL_AOS7
def parse_alcatel_fans(string_table: StringTable) -> StringTable:
return string_table
+def discover_alcatel_fans(info):
+ for nr, _value in enumerate(info, 1):
+ yield str(nr), None
+
+
+def check_alcatel_fans(item, _no_params, info):
+ fan_states = {
+ 0: "has no status",
+ 1: "not running",
+ 2: "running",
+ }
+ try:
+ line = info[int(item) - 1]
+ fan_state = int(line[0])
+ except (ValueError, IndexError):
+ return None
+
+ state = 0 if fan_state == 2 else 2
+ return state, "Fan " + fan_states.get(fan_state, "unknown (%s)" % fan_state)
+
+
+check_info["alcatel_fans_aos7"] = LegacyCheckDefinition(
+ parse_function=parse_alcatel_fans,
+ detect=DETECT_ALCATEL_AOS7,
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.6486.801.1.1.1.3.1.1.11.1",
+ oids=["2"],
+ ),
+ service_name="Fan %s",
+ discovery_function=discover_alcatel_fans,
+ check_function=check_alcatel_fans,
+)
+
+
check_info["alcatel_fans"] = LegacyCheckDefinition(
parse_function=parse_alcatel_fans,
detect=DETECT_ALCATEL,
@@ -24,6 +56,6 @@ def parse_alcatel_fans(string_table: StringTable) -> StringTable:
oids=["2"],
),
service_name="Fan %s",
- discovery_function=inventory_alcatel_fans,
+ discovery_function=discover_alcatel_fans,
check_function=check_alcatel_fans,
)
diff --git a/cmk/base/legacy_checks/alcatel_fans_aos7.py b/cmk/base/legacy_checks/alcatel_fans_aos7.py
deleted file mode 100644
index 3a920507cc0..00000000000
--- a/cmk/base/legacy_checks/alcatel_fans_aos7.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.alcatel import check_alcatel_fans, inventory_alcatel_fans
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.alcatel import DETECT_ALCATEL_AOS7
-
-
-def parse_alcatel_fans_aos7(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["alcatel_fans_aos7"] = LegacyCheckDefinition(
- parse_function=parse_alcatel_fans_aos7,
- detect=DETECT_ALCATEL_AOS7,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.6486.801.1.1.1.3.1.1.11.1",
- oids=["2"],
- ),
- service_name="Fan %s",
- discovery_function=inventory_alcatel_fans,
- check_function=check_alcatel_fans,
-)
diff --git a/cmk/base/legacy_checks/alcatel_power.py b/cmk/base/legacy_checks/alcatel_power.py
index 4c2cd10dd71..6d724c11131 100644
--- a/cmk/base/legacy_checks/alcatel_power.py
+++ b/cmk/base/legacy_checks/alcatel_power.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.alcatel import DETECT_ALCATEL
alcatel_power_operstate_map = {
diff --git a/cmk/base/legacy_checks/alcatel_temp.py b/cmk/base/legacy_checks/alcatel_temp.py
index e466fbcc569..e5e8eec9704 100644
--- a/cmk/base/legacy_checks/alcatel_temp.py
+++ b/cmk/base/legacy_checks/alcatel_temp.py
@@ -4,18 +4,28 @@
# conditions defined in the file COPYING, which is part of this source code package.
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.alcatel import (
- ALCATEL_TEMP_CHECK_DEFAULT_PARAMETERS,
- inventory_alcatel_temp,
-)
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.alcatel import DETECT_ALCATEL
+def parse_alcatel_temp(string_table: StringTable) -> StringTable:
+ return string_table
+
+
+def discover_alcatel_temp(info):
+ with_slot = len(info) != 1
+ for index, row in enumerate(info):
+ for oid, name in enumerate(["Board", "CPU"]):
+ if row[oid] != "0":
+ if with_slot:
+ yield f"Slot {index + 1} {name}", {}
+ else:
+ yield name, {}
+
+
def check_alcatel_temp(item, params, info):
if len(info) == 1:
slot_index = 0
@@ -33,10 +43,6 @@ def check_alcatel_temp(item, params, info):
return check_temperature(temp_celsius, params, "alcatel_temp_%s" % item)
-def parse_alcatel_temp(string_table: StringTable) -> StringTable:
- return string_table
-
-
check_info["alcatel_temp"] = LegacyCheckDefinition(
parse_function=parse_alcatel_temp,
detect=DETECT_ALCATEL,
@@ -45,8 +51,10 @@ def parse_alcatel_temp(string_table: StringTable) -> StringTable:
oids=["4", "5"],
),
service_name="Temperature %s",
- discovery_function=inventory_alcatel_temp,
+ discovery_function=discover_alcatel_temp,
check_function=check_alcatel_temp,
check_ruleset_name="temperature",
- check_default_parameters=ALCATEL_TEMP_CHECK_DEFAULT_PARAMETERS,
+ check_default_parameters={
+ "levels": (45.0, 50.0),
+ },
)
diff --git a/cmk/base/legacy_checks/alcatel_temp_aos7.py b/cmk/base/legacy_checks/alcatel_temp_aos7.py
index 7316e4f71e8..01dc9fd7b68 100644
--- a/cmk/base/legacy_checks/alcatel_temp_aos7.py
+++ b/cmk/base/legacy_checks/alcatel_temp_aos7.py
@@ -4,7 +4,6 @@
# conditions defined in the file COPYING, which is part of this source code package.
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.alcatel import ALCATEL_TEMP_CHECK_DEFAULT_PARAMETERS
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
@@ -84,5 +83,7 @@ def discover_alcatel_temp_aos7(section):
discovery_function=discover_alcatel_temp_aos7,
check_function=check_alcatel_aos7_temp,
check_ruleset_name="temperature",
- check_default_parameters=ALCATEL_TEMP_CHECK_DEFAULT_PARAMETERS,
+ check_default_parameters={
+ "levels": (45.0, 50.0),
+ },
)
diff --git a/cmk/base/legacy_checks/alcatel_timetra_chassis.py b/cmk/base/legacy_checks/alcatel_timetra_chassis.py
index 079c9582fbf..233df040728 100644
--- a/cmk/base/legacy_checks/alcatel_timetra_chassis.py
+++ b/cmk/base/legacy_checks/alcatel_timetra_chassis.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_alcatel_timetra_chassis(info):
diff --git a/cmk/base/legacy_checks/alcatel_timetra_cpu.py b/cmk/base/legacy_checks/alcatel_timetra_cpu.py
index a5e5f45c982..4a8eddd3bd8 100644
--- a/cmk/base/legacy_checks/alcatel_timetra_cpu.py
+++ b/cmk/base/legacy_checks/alcatel_timetra_cpu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_alcatel_timetra_cpu(info):
diff --git a/cmk/base/legacy_checks/apc_ats_output.py b/cmk/base/legacy_checks/apc_ats_output.py
index 7c741bb650c..c80768b5804 100644
--- a/cmk/base/legacy_checks/apc_ats_output.py
+++ b/cmk/base/legacy_checks/apc_ats_output.py
@@ -2,14 +2,13 @@
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-
-
-# mypy: disable-error-code="var-annotated"
-
from cmk.base.check_api import check_levels, LegacyCheckDefinition
+from cmk.base.check_legacy_includes.apc_ats import DETECT
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, SNMPTree
+from cmk.agent_based.v2 import SNMPTree
+
+# mypy: disable-error-code="var-annotated"
def parse_apc_ats_output(string_table):
@@ -68,10 +67,7 @@ def check_apc_ats_output(item, params, parsed):
check_info["apc_ats_output"] = LegacyCheckDefinition(
- detect=any_of(
- equals(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.318.1.3.11"),
- equals(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.318.1.3.32"),
- ),
+ detect=DETECT,
fetch=SNMPTree(
base=".1.3.6.1.4.1.318.1.1.8.5.4.3.1",
oids=["1", "3", "4", "10", "13"],
@@ -83,6 +79,6 @@ def check_apc_ats_output(item, params, parsed):
check_ruleset_name="apc_ats_output",
check_default_parameters={
"output_voltage_max": (240, 250),
- "load_perc_max": (85, 95),
+ "load_perc_max": (85.0, 95.0),
},
)
diff --git a/cmk/base/legacy_checks/apc_ats_status.py b/cmk/base/legacy_checks/apc_ats_status.py
index 356b941bc9c..10e5d26b56b 100644
--- a/cmk/base/legacy_checks/apc_ats_status.py
+++ b/cmk/base/legacy_checks/apc_ats_status.py
@@ -2,41 +2,57 @@
# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition, saveint
+from collections.abc import Iterable
+from typing import Any
+
+from cmk.base.check_api import LegacyCheckDefinition
+from cmk.base.check_legacy_includes.apc_ats import (
+ CommunictionStatus,
+ DETECT,
+ OverCurrentStatus,
+ PowerSupplyStatus,
+ RedunandancyStatus,
+ Source,
+ Status,
+)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
-def inventory_apc_ats_status(info):
+def parse_apc_ats_status(info: StringTable) -> Status | None:
if len(info) == 1:
- yield None, {"power_source": int(info[0][1])}
+ return Status.from_raw(info[0])
+ return None
+
+
+def inventory_apc_ats_status(parsed: Status) -> Iterable[tuple[None, dict]]:
+ if parsed and parsed.selected_source:
+ yield None, {"power_source": parsed.selected_source.value}
-def check_apc_ats_status(_no_item, params, info):
+def check_apc_ats_status(_no_item: Any, params: dict, parsed: Status) -> Iterable:
source = params["power_source"]
- comstatus, selected_source, redundancy, overcurrent, ps5, ps24 = map(saveint, info[0])
state = 0
messages = []
# current source of power
- sources = {1: "A", 2: "B"}
- if source != selected_source:
+ source_parsed = Source(source)
+ if source_parsed != parsed.selected_source:
state = 2
+ assert parsed.selected_source is not None
messages.append(
- f"Power source Changed from {sources[source]} to {sources[selected_source]}(!!)"
+ "Power source Changed from %s to %s(!!)"
+ % (source_parsed.name, parsed.selected_source.name)
)
else:
- messages.append("Power source %s selected" % sources[source])
+ messages.append("Power source %s selected" % source_parsed.name)
# current communication status of the Automatic Transfer Switch.
- if comstatus == 1:
+ if parsed.com_status == CommunictionStatus.NeverDiscovered:
state = max(1, state)
messages.append("Communication Status: never Discovered(!)")
- elif comstatus == 3:
+ elif parsed.com_status == CommunictionStatus.Lost:
state = 2
messages.append("Communication Status: lost(!!)")
@@ -44,7 +60,7 @@ def check_apc_ats_status(_no_item, params, info):
# Lost(1) indicates that the ATS is unable to switch over to the alternate power source
# if the current source fails. Redundant(2) indicates that the ATS will switch
# over to the alternate power source if the current source fails.
- if redundancy != 2:
+ if parsed.redundancy == RedunandancyStatus.Lost:
state = 2
messages.append("redundancy lost(!!)")
else:
@@ -54,33 +70,26 @@ def check_apc_ats_status(_no_item, params, info):
# exceeded the output current threshold and will not allow a switch
# over to the alternate power source if the current source fails.
# atsCurrentOK(2) indicates that the output current is below the output current threshold.
- if overcurrent == 1:
+ if parsed.overcurrent == OverCurrentStatus.Exceeded:
state = 2
- messages.append("exceedet ouput current threshold(!!)")
+ messages.append("exceeded ouput current threshold(!!)")
- # 5Volt power supply
- if ps5 != 2:
- state = 2
- messages.append("5V power supply failed(!!)")
-
- # 24Volt power supply
- if ps24 != 2:
- state = 2
- messages.append("24V power suppy failed(!!)")
+ for powersource in parsed.powersources:
+ if powersource is None:
+ continue
+ if powersource.status != PowerSupplyStatus.OK:
+ state = 2
+ messages.append(f"{powersource.name} power supply failed(!!)")
return state, ", ".join(messages)
-def parse_apc_ats_status(string_table: StringTable) -> StringTable:
- return string_table
-
-
check_info["apc_ats_status"] = LegacyCheckDefinition(
parse_function=parse_apc_ats_status,
- detect=contains(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.318.1.3.11"),
+ detect=DETECT,
fetch=SNMPTree(
base=".1.3.6.1.4.1.318.1.1.8.5.1",
- oids=["1.0", "2.0", "3.0", "4.0", "5.0", "6.0"],
+ oids=["1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "17.0", "18.0"],
),
service_name="ATS Status",
discovery_function=inventory_apc_ats_status,
diff --git a/cmk/base/legacy_checks/apc_humidity.py b/cmk/base/legacy_checks/apc_humidity.py
index aa16183553f..1c15e576940 100644
--- a/cmk/base/legacy_checks/apc_humidity.py
+++ b/cmk/base/legacy_checks/apc_humidity.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.humidity import check_humidity
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.apc import DETECT
@@ -42,7 +41,7 @@ def parse_apc_humidity(string_table: StringTable) -> StringTable:
check_function=check_apc_humidity,
check_ruleset_name="humidity",
check_default_parameters={
- "levels": (40, 35),
- "levels_lower": (60, 65),
+ "levels": (40.0, 35.0),
+ "levels_lower": (60.0, 65.0),
},
)
diff --git a/cmk/base/legacy_checks/apc_inputs.py b/cmk/base/legacy_checks/apc_inputs.py
index f5e956cff1c..af219c818a7 100644
--- a/cmk/base/legacy_checks/apc_inputs.py
+++ b/cmk/base/legacy_checks/apc_inputs.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.apc import DETECT
diff --git a/cmk/base/legacy_checks/apc_inrow_airflow.py b/cmk/base/legacy_checks/apc_inrow_airflow.py
index 098eba549e6..aa08bbaea64 100644
--- a/cmk/base/legacy_checks/apc_inrow_airflow.py
+++ b/cmk/base/legacy_checks/apc_inrow_airflow.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.apc import DETECT
@@ -45,7 +44,7 @@ def check_apc_inrow_airflow(_no_item, params, info):
state = 1
message = "too high"
- perf = [("flow", flow, warn, crit)]
+ perf = [("airflow", flow, warn, crit)]
return state, f"Current: {flow:.0f} l/s {message}", perf
diff --git a/cmk/base/legacy_checks/apc_inrow_fanspeed.py b/cmk/base/legacy_checks/apc_inrow_fanspeed.py
index dc9850e4529..022b45e5abb 100644
--- a/cmk/base/legacy_checks/apc_inrow_fanspeed.py
+++ b/cmk/base/legacy_checks/apc_inrow_fanspeed.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, savefloat
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.apc import DETECT
@@ -20,7 +19,7 @@ def inventory_apc_inrow_fanspeed(info):
def check_apc_inrow_fanspeed(_no_item, _no_params, info):
value = savefloat(info[0][0]) / 10
- return 0, "Current: %.2f%%" % value, [("fanspeed", value)]
+ return 0, "Current: %.2f%%" % value, [("fan_perc", value)]
def parse_apc_inrow_fanspeed(string_table: StringTable) -> StringTable:
diff --git a/cmk/base/legacy_checks/apc_mod_pdu_modules.py b/cmk/base/legacy_checks/apc_mod_pdu_modules.py
index 34d487ad7b5..347684c026a 100644
--- a/cmk/base/legacy_checks/apc_mod_pdu_modules.py
+++ b/cmk/base/legacy_checks/apc_mod_pdu_modules.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, savefloat, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
def inventory_apc_mod_pdu_modules(info):
@@ -25,11 +24,11 @@ def check_apc_mod_pdu_modules(item, _no_params, info):
for name, status, current_power in info:
if name == item:
status = saveint(status)
- # As per the device's MIB, the values are measured in tenths of KW
+ # As per the device's MIB, the values are measured in tenths of kW
current_power = savefloat(current_power) / 10
- message = f"Status {apc_states.get(status, 6)}, current: {current_power:.2f}kw "
+ message = f"Status {apc_states.get(status, 6)}, current: {current_power:.2f} kW "
- perf = [("current_power", current_power)]
+ perf = [("power", current_power * 1000)]
if status == 2:
return 1, message, perf
if status in [3, 6]:
diff --git a/cmk/base/legacy_checks/apc_netbotz_other_sensors.py b/cmk/base/legacy_checks/apc_netbotz_other_sensors.py
deleted file mode 100644
index 4ddc8b9752b..00000000000
--- a/cmk/base/legacy_checks/apc_netbotz_other_sensors.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# Max. eigth sensors
-# .1.3.6.1.4.1.5528.100.4.2.10.1.4.399845582 Wasserstand_FG1
-# .1.3.6.1.4.1.5528.100.4.2.10.1.4.3502248167 Ethernet Link Status
-# .1.3.6.1.4.1.5528.100.4.2.10.1.4.3823829717 A-Link Bus Power
-# .1.3.6.1.4.1.5528.100.4.2.10.1.3.399845582 0
-# .1.3.6.1.4.1.5528.100.4.2.10.1.3.3502248167 0
-# .1.3.6.1.4.1.5528.100.4.2.10.1.3.3823829717 0
-# .1.3.6.1.4.1.5528.100.4.2.10.1.7.399845582 No Leak
-# .1.3.6.1.4.1.5528.100.4.2.10.1.7.3502248167 Up
-# .1.3.6.1.4.1.5528.100.4.2.10.1.7.3823829717 OK
-
-
-# MIB: The sensor reading shown as a string (or empty string
-# if it is not plugged into a port).
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def inventory_apc_netbotz_other_sensors(info):
- for _sensor_label, _error_state, state_readable in info:
- if state_readable != "":
- return [(None, None)]
- return []
-
-
-def check_apc_netbotz_other_sensors(_no_item, _no_params, info):
- count_ok_sensors = 0
- for sensor_label, error_state, state_readable in info:
- if state_readable != "":
- if state_readable != "OK":
- state_readable = state_readable.lower()
-
- if error_state == "0":
- count_ok_sensors += 1
- else:
- yield 2, f"{sensor_label}: {state_readable}"
-
- if count_ok_sensors > 0:
- yield 0, "%d sensors are OK" % count_ok_sensors
-
-
-def parse_apc_netbotz_other_sensors(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["apc_netbotz_other_sensors"] = LegacyCheckDefinition(
- parse_function=parse_apc_netbotz_other_sensors,
- detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.5528.100.20.10"),
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.5528.100.4.2.10.1",
- oids=["4", "3", "7"],
- ),
- service_name="Numeric sensors summary",
- discovery_function=inventory_apc_netbotz_other_sensors,
- check_function=check_apc_netbotz_other_sensors,
-)
diff --git a/cmk/base/legacy_checks/apc_netbotz_sensors.py b/cmk/base/legacy_checks/apc_netbotz_sensors.py
deleted file mode 100644
index ca519f64eab..00000000000
--- a/cmk/base/legacy_checks/apc_netbotz_sensors.py
+++ /dev/null
@@ -1,240 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-# mypy: disable-error-code="var-annotated"
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.humidity import check_humidity
-from cmk.base.check_legacy_includes.temperature import check_temperature
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree, startswith
-
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.636159851 nbAlinkEnc_0_4_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.882181375 nbAlinkEnc_2_1_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.1619732064 nbAlinkEnc_0_2_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.1665932156 nbAlinkEnc_1_4_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.1751899818 nbAlinkEnc_2_2_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.1857547767 nbAlinkEnc_1_5_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.2370211927 nbAlinkEnc_1_6_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.2618588815 nbAlinkEnc_2_3_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.2628357572 nbAlinkEnc_0_1_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3031356659 nbAlinkEnc_0_5_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3056253200 nbAlinkEnc_0_6_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3103062985 nbAlinkEnc_2_4_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3328914949 nbAlinkEnc_1_3_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3406802758 nbAlinkEnc_0_3_TEMP
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.636159851 252
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.882181375 222
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.1619732064 222
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.1665932156 216
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.1751899818 245
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.1857547767 234
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.2370211927 240
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.2618588815 220
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.2628357572 229
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3031356659 0
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3056253200 0
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3103062985 215
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3328914949 234
-# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3406802758 238
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.636159851 25.200000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.882181375 22.200000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.1619732064 22.200000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.1665932156 21.600000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.1751899818 24.500000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.1857547767 23.400000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.2370211927 24.000000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.2618588815 22.000000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.2628357572 22.900000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3031356659
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3056253200
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3103062985 21.500000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3328914949 23.400000
-# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3406802758 23.800000
-
-# .1.3.6.1.4.1.5528.100.4.1.2.1.1.421607638 nbAlinkEnc_1_5_HUMI
-# .1.3.6.1.4.1.5528.100.4.1.2.1.1.581338442 nbAlinkEnc_1_3_HUMI
-# .1.3.6.1.4.1.5528.100.4.1.2.1.1.1121716336 nbAlinkEnc_0_6_HUMI
-# .1.3.6.1.4.1.5528.100.4.1.2.1.1.3273299739 nbAlinkEnc_0_3_HUMI
-# .1.3.6.1.4.1.5528.100.4.1.2.1.1.4181308384 nbAlinkEnc_0_5_HUMI
-# .1.3.6.1.4.1.5528.100.4.1.2.1.2.421607638 370
-# .1.3.6.1.4.1.5528.100.4.1.2.1.2.581338442 320
-# .1.3.6.1.4.1.5528.100.4.1.2.1.2.1121716336 0
-# .1.3.6.1.4.1.5528.100.4.1.2.1.2.3273299739 320
-# .1.3.6.1.4.1.5528.100.4.1.2.1.2.4181308384 0
-# .1.3.6.1.4.1.5528.100.4.1.2.1.7.421607638 37.000000
-# .1.3.6.1.4.1.5528.100.4.1.2.1.7.581338442 32.000000
-# .1.3.6.1.4.1.5528.100.4.1.2.1.7.1121716336
-# .1.3.6.1.4.1.5528.100.4.1.2.1.7.3273299739 32.000000
-# .1.3.6.1.4.1.5528.100.4.1.2.1.7.4181308384
-
-# .1.3.6.1.4.1.5528.100.4.1.3.1.1.1000015730 nbAlinkEnc_0_5_DWPT
-# .1.3.6.1.4.1.5528.100.4.1.3.1.1.1490079962 nbAlinkEnc_0_3_DWPT
-# .1.3.6.1.4.1.5528.100.4.1.3.1.1.2228353183 nbAlinkEnc_0_6_DWPT
-# .1.3.6.1.4.1.5528.100.4.1.3.1.1.2428087247 nbAlinkEnc_1_3_DWPT
-# .1.3.6.1.4.1.5528.100.4.1.3.1.1.3329736831 nbAlinkEnc_1_5_DWPT
-# .1.3.6.1.4.1.5528.100.4.1.3.1.2.1000015730 0
-# .1.3.6.1.4.1.5528.100.4.1.3.1.2.1490079962 61
-# .1.3.6.1.4.1.5528.100.4.1.3.1.2.2228353183 0
-# .1.3.6.1.4.1.5528.100.4.1.3.1.2.2428087247 57
-# .1.3.6.1.4.1.5528.100.4.1.3.1.2.3329736831 78
-# .1.3.6.1.4.1.5528.100.4.1.3.1.7.1000015730
-# .1.3.6.1.4.1.5528.100.4.1.3.1.7.1490079962 6.100000
-# .1.3.6.1.4.1.5528.100.4.1.3.1.7.2228353183
-# .1.3.6.1.4.1.5528.100.4.1.3.1.7.2428087247 5.700000
-# .1.3.6.1.4.1.5528.100.4.1.3.1.7.3329736831 7.800000
-
-# .--temperature---------------------------------------------------------.
-# | _ _ |
-# | | |_ ___ _ __ ___ _ __ ___ _ __ __ _| |_ _ _ _ __ ___ |
-# | | __/ _ \ '_ ` _ \| '_ \ / _ \ '__/ _` | __| | | | '__/ _ \ |
-# | | || __/ | | | | | |_) | __/ | | (_| | |_| |_| | | | __/ |
-# | \__\___|_| |_| |_| .__/ \___|_| \__,_|\__|\__,_|_| \___| |
-# | |_| |
-# +----------------------------------------------------------------------+
-# | main check |
-# '----------------------------------------------------------------------'
-
-# Suggested by customer
-
-
-def parse_apc_netbotz_sensors(string_table):
- parsed = {}
- for item_type, block in zip(("temp", "humidity", "dewpoint"), string_table):
- for item_name, reading_str, label, plugged_in_state in block:
- if not plugged_in_state:
- continue
- parsed.setdefault(item_type, {}).setdefault(
- item_name, {"reading": float(reading_str) / 10, "label": label}
- )
- return parsed
-
-
-def inventory_apc_netbotz_sensors_temp(parsed, what):
- return [(item, {}) for item in parsed.get(what, [])]
-
-
-def check_apc_netbotz_sensors_temp(item, params, parsed, what):
- if item in parsed.get(what, []):
- data = parsed[what][item]
- state, infotext, perf = check_temperature(
- data["reading"], params, f"apc_netbotz_sensors_{what}_{item}"
- )
- return state, "[{}] {}".format(data["label"], infotext), perf
- return None
-
-
-_OIDS = [
- "1", # NETBOTZV2-MIB::*SensorId
- "2", # NETBOTZV2-MIB::*SensorValue
- "4", # NETBOTZV2-MIB::*SensorLabel
- "7", # NETBOTZV2-MIB::*ValueStr; empty if sensor is not plugged in
-]
-
-
-def discover_apc_netbotz_sensors(parsed):
- return inventory_apc_netbotz_sensors_temp(parsed, "temp")
-
-
-def check_apc_netbotz_sensors(item, params, parsed):
- return check_apc_netbotz_sensors_temp(item, params, parsed, "temp")
-
-
-check_info["apc_netbotz_sensors"] = LegacyCheckDefinition(
- detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.5528.100.20.10"),
- fetch=[
- SNMPTree(
- base=".1.3.6.1.4.1.5528.100.4.1.1.1",
- oids=["1", "2", "4", "7"],
- ),
- SNMPTree(
- base=".1.3.6.1.4.1.5528.100.4.1.2.1",
- oids=["1", "2", "4", "7"],
- ),
- SNMPTree(
- base=".1.3.6.1.4.1.5528.100.4.1.3.1",
- oids=["1", "2", "4", "7"],
- ),
- ],
- parse_function=parse_apc_netbotz_sensors,
- service_name="Temperature %s",
- discovery_function=discover_apc_netbotz_sensors,
- check_function=check_apc_netbotz_sensors,
- check_ruleset_name="temperature",
- check_default_parameters={
- "levels": (30.0, 35.0),
- "levels_lower": (25.0, 20.0),
- },
-)
-
-
-def discover_apc_netbotz_sensors_dewpoint(parsed):
- return inventory_apc_netbotz_sensors_temp(parsed, "dewpoint")
-
-
-def check_apc_netbotz_sensors_dewpoint(item, params, info):
- return check_apc_netbotz_sensors_temp(item, params, info, "dewpoint")
-
-
-# .
-# .--dewpoint------------------------------------------------------------.
-# | _ _ _ |
-# | __| | _____ ___ __ ___ (_)_ __ | |_ |
-# | / _` |/ _ \ \ /\ / / '_ \ / _ \| | '_ \| __| |
-# | | (_| | __/\ V V /| |_) | (_) | | | | | |_ |
-# | \__,_|\___| \_/\_/ | .__/ \___/|_|_| |_|\__| |
-# | |_| |
-# '----------------------------------------------------------------------'
-
-# Suggested by customer
-
-check_info["apc_netbotz_sensors.dewpoint"] = LegacyCheckDefinition(
- service_name="Dew point %s",
- sections=["apc_netbotz_sensors"],
- discovery_function=discover_apc_netbotz_sensors_dewpoint,
- check_function=check_apc_netbotz_sensors_dewpoint,
- check_ruleset_name="temperature",
- check_default_parameters={
- "levels": (18.0, 25.0),
- "levels_lower": (-4.0, -6.0),
- },
-)
-
-# .
-# .--humidity------------------------------------------------------------.
-# | _ _ _ _ _ |
-# | | |__ _ _ _ __ ___ (_) __| (_) |_ _ _ |
-# | | '_ \| | | | '_ ` _ \| |/ _` | | __| | | | |
-# | | | | | |_| | | | | | | | (_| | | |_| |_| | |
-# | |_| |_|\__,_|_| |_| |_|_|\__,_|_|\__|\__, | |
-# | |___/ |
-# '----------------------------------------------------------------------'
-
-
-def inventory_apc_netbotz_sensors_humidity(parsed):
- return [(item, {}) for item in parsed.get("humidity", [])]
-
-
-def check_apc_netbotz_sensors_humidity(item, params, parsed):
- if item in parsed.get("humidity", []):
- data = parsed["humidity"][item]
- state, infotext, perf = check_humidity(data["reading"], params)
- return state, "[{}] {}".format(data["label"], infotext), perf
- return None
-
-
-check_info["apc_netbotz_sensors.humidity"] = LegacyCheckDefinition(
- service_name="Humidity %s",
- sections=["apc_netbotz_sensors"],
- discovery_function=inventory_apc_netbotz_sensors_humidity,
- check_function=check_apc_netbotz_sensors_humidity,
- check_ruleset_name="humidity",
- check_default_parameters={
- "levels": (60.0, 65.0),
- "levels_lower": (35.0, 30.0),
- },
-)
diff --git a/cmk/base/legacy_checks/apc_sts_source.py b/cmk/base/legacy_checks/apc_sts_source.py
index ae6c826ca66..77f0cb93179 100644
--- a/cmk/base/legacy_checks/apc_sts_source.py
+++ b/cmk/base/legacy_checks/apc_sts_source.py
@@ -10,8 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_apc_sts_source(info):
diff --git a/cmk/base/legacy_checks/apc_symmetra.py b/cmk/base/legacy_checks/apc_symmetra.py
index 61d315d6c47..56fed0d68e1 100644
--- a/cmk/base/legacy_checks/apc_symmetra.py
+++ b/cmk/base/legacy_checks/apc_symmetra.py
@@ -8,12 +8,12 @@
import time
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.elphase import check_elphase
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
+from cmk.agent_based.v2 import render, SNMPTree
from cmk.plugins.lib.apc import DETECT
# .1.3.6.1.4.1.318.1.1.1.2.1.1.0 2
@@ -249,7 +249,7 @@ def check_apc_symmetra(_no_item, params, parsed): # pylint: disable=too-many-br
if battery_time_remain:
battery_time_remain = float(battery_time_remain) / 100.0
- battery_time_remain_readable = get_age_human_readable(battery_time_remain)
+ battery_time_remain_readable = render.timespan(battery_time_remain)
state = 0
levelstxt = ""
battery_time_warn, battery_time_crit = None, None
@@ -272,8 +272,8 @@ def check_apc_symmetra(_no_item, params, parsed): # pylint: disable=too-many-br
if state:
levelstxt = " (warn/crit below {}/{})".format(
- get_age_human_readable(battery_time_warn),
- get_age_human_readable(battery_time_crit),
+ render.timespan(battery_time_warn),
+ render.timespan(battery_time_crit),
)
yield state, f"Time remaining: {battery_time_remain_readable}{levelstxt}", perfdata
@@ -309,7 +309,7 @@ def check_apc_symmetra(_no_item, params, parsed): # pylint: disable=too-many-br
check_function=check_apc_symmetra,
check_ruleset_name="apc_symentra",
check_default_parameters={
- "capacity": (95, 80),
+ "capacity": (95.0, 80.0),
"calibration_state": 0,
"battery_replace_state": 1,
},
diff --git a/cmk/base/legacy_checks/apc_symmetra_ext_temp.py b/cmk/base/legacy_checks/apc_symmetra_ext_temp.py
index 5727f64c795..4fb0313d219 100644
--- a/cmk/base/legacy_checks/apc_symmetra_ext_temp.py
+++ b/cmk/base/legacy_checks/apc_symmetra_ext_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.apc import DETECT
diff --git a/cmk/base/legacy_checks/apc_symmetra_test.py b/cmk/base/legacy_checks/apc_symmetra_test.py
index 41d48d4bf3c..6feeff5aed6 100644
--- a/cmk/base/legacy_checks/apc_symmetra_test.py
+++ b/cmk/base/legacy_checks/apc_symmetra_test.py
@@ -36,8 +36,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.apc import DETECT
diff --git a/cmk/base/legacy_checks/appdynamics_memory.py b/cmk/base/legacy_checks/appdynamics_memory.py
index c6e1575b9a6..37657d00453 100644
--- a/cmk/base/legacy_checks/appdynamics_memory.py
+++ b/cmk/base/legacy_checks/appdynamics_memory.py
@@ -10,10 +10,10 @@
# mypy: disable-error-code="arg-type,list-item"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
def inventory_appdynamics_memory(info):
@@ -84,14 +84,14 @@ def check_appdynamics_memory(item, params, info): # pylint: disable=too-many-br
if max_available > 0:
perfdata = [("mem_%s" % mem_type, used, warn, crit, 0, max_available)]
yield state, "Used: {} of {} ({:.2f}%){}".format(
- get_bytes_human_readable(used),
- get_bytes_human_readable(max_available),
+ render.bytes(used),
+ render.bytes(max_available),
used_percent,
levels_label,
), perfdata
else:
perfdata = [("mem_%s" % mem_type, used)]
- yield state, "Used: %s" % get_bytes_human_readable(used), perfdata
+ yield state, "Used: %s" % render.bytes(used), perfdata
if max_available > 0:
perfdata = [
@@ -99,7 +99,7 @@ def check_appdynamics_memory(item, params, info): # pylint: disable=too-many-br
]
else:
perfdata = [("mem_%s_committed" % mem_type, committed)]
- yield 0, "Committed: %s" % get_bytes_human_readable(committed), perfdata
+ yield 0, "Committed: %s" % render.bytes(committed), perfdata
def parse_appdynamics_memory(string_table: StringTable) -> StringTable:
diff --git a/cmk/base/legacy_checks/appdynamics_sessions.py b/cmk/base/legacy_checks/appdynamics_sessions.py
index 0506593a7b6..ca4e057b071 100644
--- a/cmk/base/legacy_checks/appdynamics_sessions.py
+++ b/cmk/base/legacy_checks/appdynamics_sessions.py
@@ -14,8 +14,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, StringTable
def inventory_appdynamics_sessions(info):
diff --git a/cmk/base/legacy_checks/appdynamics_web_container.py b/cmk/base/legacy_checks/appdynamics_web_container.py
index a61afce5699..25f18795828 100644
--- a/cmk/base/legacy_checks/appdynamics_web_container.py
+++ b/cmk/base/legacy_checks/appdynamics_web_container.py
@@ -17,8 +17,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, render, StringTable
Section = Mapping[str, Mapping[str, int]]
diff --git a/cmk/base/legacy_checks/arbor_peakflow_sp.py b/cmk/base/legacy_checks/arbor_peakflow_sp.py
deleted file mode 100644
index b7df6fcd196..00000000000
--- a/cmk/base/legacy_checks/arbor_peakflow_sp.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.arbor import check_arbor_disk_usage, inventory_arbor_disk_usage
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.arbor import DETECT_PEAKFLOW_SP
-from cmk.plugins.lib.df import FILESYSTEM_DEFAULT_PARAMS
-
-# .1.3.6.1.4.1.9694.1.4.2.1.1.0 796 --> PEAKFLOW-SP-MIB::deviceCpuLoadAvg1min.0
-# .1.3.6.1.4.1.9694.1.4.2.1.2.0 742 --> PEAKFLOW-SP-MIB::deviceCpuLoadAvg5min.0
-# .1.3.6.1.4.1.9694.1.4.2.1.3.0 742 --> PEAKFLOW-SP-MIB::deviceCpuLoadAvg15min.0
-# .1.3.6.1.4.1.9694.1.4.2.1.4.0 0 --> PEAKFLOW-SP-MIB::deviceDiskUsage.0
-# .1.3.6.1.4.1.9694.1.4.2.1.5.0 32864948 --> PEAKFLOW-SP-MIB::devicePhysicalMemory.0
-# .1.3.6.1.4.1.9694.1.4.2.1.6.0 4793660 --> PEAKFLOW-SP-MIB::devicePhysicalMemoryInUse.0
-# .1.3.6.1.4.1.9694.1.4.2.1.7.0 15 --> PEAKFLOW-SP-MIB::devicePhysicalMemoryUsage.0
-# .1.3.6.1.4.1.9694.1.4.2.1.8.0 4892156 --> PEAKFLOW-SP-MIB::deviceSwapSpace.0
-# .1.3.6.1.4.1.9694.1.4.2.1.9.0 0 --> PEAKFLOW-SP-MIB::deviceSwapSpaceInUse.0
-# .1.3.6.1.4.1.9694.1.4.2.1.10.0 0 --> PEAKFLOW-SP-MIB::deviceSwapSpaceUsage.0
-# .1.3.6.1.4.1.9694.1.4.2.1.11.0 0 --> PEAKFLOW-SP-MIB::deviceTotalFlows.0
-# .1.3.6.1.4.1.9694.1.4.2.1.12.0 0 --> PEAKFLOW-SP-MIB::deviceTotalFlowsHC.0
-
-
-def parse_peakflow_sp(string_table):
- if not string_table:
- return None
- valid = string_table[0]
- res = {"disk": valid[0]}
- if valid[1]:
- # this value appears to be optional
- res["flows"] = valid[1]
-
- return res
-
-
-check_info["arbor_peakflow_sp"] = LegacyCheckDefinition(
- detect=DETECT_PEAKFLOW_SP,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.9694.1.4.2.1",
- oids=["4.0", "12.0"],
- ),
- parse_function=parse_peakflow_sp,
-)
-
-check_info["arbor_peakflow_sp.disk_usage"] = LegacyCheckDefinition(
- service_name="Disk Usage %s",
- sections=["arbor_peakflow_sp"],
- discovery_function=inventory_arbor_disk_usage,
- check_function=check_arbor_disk_usage,
- check_ruleset_name="filesystem",
- check_default_parameters=FILESYSTEM_DEFAULT_PARAMS,
-)
-
-
-def inventory_arbor_peakflow_sp_flows(parsed):
- if "flows" in parsed:
- return [(None, None)]
- return []
-
-
-def check_arbor_peakflow_sp_flows(_no_item, params, parsed):
- flows = int(parsed["flows"])
- return 0, "%d flows" % flows, [("flows", flows)]
-
-
-check_info["arbor_peakflow_sp.flows"] = LegacyCheckDefinition(
- service_name="Flow Count",
- sections=["arbor_peakflow_sp"],
- discovery_function=inventory_arbor_peakflow_sp_flows,
- check_function=check_arbor_peakflow_sp_flows,
-)
diff --git a/cmk/base/legacy_checks/arbor_peakflow_tms.py b/cmk/base/legacy_checks/arbor_peakflow_tms.py
deleted file mode 100644
index 4df502fe83c..00000000000
--- a/cmk/base/legacy_checks/arbor_peakflow_tms.py
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.arbor import (
- check_arbor_disk_usage,
- check_arbor_host_fault,
- inventory_arbor_disk_usage,
- inventory_arbor_host_fault,
-)
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.arbor import DETECT_PEAKFLOW_TMS
-from cmk.plugins.lib.df import FILESYSTEM_DEFAULT_PARAMS
-
-# .1.3.6.1.4.1.9694.1.5.2.1.0 No Fault --> PEAKFLOW-TMS-MIB::tmsHostFault.0
-# .1.3.6.1.4.1.9694.1.5.2.2.0 101420100 --> PEAKFLOW-TMS-MIB::tmsHostUpTime.0
-# .1.3.6.1.4.1.9694.1.5.2.3.0 46 --> PEAKFLOW-TMS-MIB::deviceCpuLoadAvg1min.0
-# .1.3.6.1.4.1.9694.1.5.2.4.0 64 --> PEAKFLOW-TMS-MIB::deviceCpuLoadAvg5min.0
-# .1.3.6.1.4.1.9694.1.5.2.5.0 67 --> PEAKFLOW-TMS-MIB::deviceCpuLoadAvg15min.0
-# .1.3.6.1.4.1.9694.1.5.2.6.0 6 --> PEAKFLOW-TMS-MIB::deviceDiskUsage.0
-# .1.3.6.1.4.1.9694.1.5.2.7.0 4 --> PEAKFLOW-TMS-MIB::devicePhysicalMemoryUsage.0
-# .1.3.6.1.4.1.9694.1.5.2.8.0 0 --> PEAKFLOW-TMS-MIB::deviceSwapSpaceUsage.0
-
-
-def parse_peakflow_tms(string_table):
- if not all(string_table):
- return None
- health = string_table[0][0]
- updates = string_table[1][0]
- return {
- "disk": health[0],
- "host_fault": health[1],
- "update": {"Device": updates[0], "Mitigation": updates[1]},
- }
-
-
-check_info["arbor_peakflow_tms"] = LegacyCheckDefinition(
- detect=DETECT_PEAKFLOW_TMS,
- fetch=[
- SNMPTree(
- base=".1.3.6.1.4.1.9694.1.5.2",
- oids=["6.0", "1.0"],
- ),
- SNMPTree(
- base=".1.3.6.1.4.1.9694.1.5.5",
- oids=["1.2.0", "2.1.0"],
- ),
- ],
- parse_function=parse_peakflow_tms,
-)
-
-check_info["arbor_peakflow_tms.disk_usage"] = LegacyCheckDefinition(
- service_name="Disk Usage %s",
- sections=["arbor_peakflow_tms"],
- discovery_function=inventory_arbor_disk_usage,
- check_function=check_arbor_disk_usage,
- check_ruleset_name="filesystem",
- check_default_parameters=FILESYSTEM_DEFAULT_PARAMS,
-)
-
-check_info["arbor_peakflow_tms.host_fault"] = LegacyCheckDefinition(
- service_name="Host Fault",
- sections=["arbor_peakflow_tms"],
- discovery_function=inventory_arbor_host_fault,
- check_function=check_arbor_host_fault,
-)
-
-
-def inventory_peakflow_tms_updates(parsed):
- for name in parsed["update"]:
- yield name, None
-
-
-def check_peakflow_tms_updates(item, _no_params, parsed):
- if item in parsed["update"]:
- return 0, parsed["update"][item]
- return None
-
-
-check_info["arbor_peakflow_tms.updates"] = LegacyCheckDefinition(
- service_name="Config Update %s",
- sections=["arbor_peakflow_tms"],
- discovery_function=inventory_peakflow_tms_updates,
- check_function=check_peakflow_tms_updates,
-)
diff --git a/cmk/base/legacy_checks/arbor_pravail.py b/cmk/base/legacy_checks/arbor_pravail.py
deleted file mode 100644
index a12552333c3..00000000000
--- a/cmk/base/legacy_checks/arbor_pravail.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.arbor import (
- check_arbor_disk_usage,
- check_arbor_drop_rate,
- check_arbor_host_fault,
- inventory_arbor_disk_usage,
- inventory_arbor_drop_rate,
- inventory_arbor_host_fault,
-)
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.arbor import DETECT_PRAVAIL
-from cmk.plugins.lib.df import FILESYSTEM_DEFAULT_PARAMS
-
-# .1.3.6.1.4.1.9694.1.6.2.3.0 2070 --> PRAVAIL-MIB::deviceCpuLoadAvg1min.0
-# .1.3.6.1.4.1.9694.1.6.2.4.0 2059 --> PRAVAIL-MIB::deviceCpuLoadAvg5min.0
-# .1.3.6.1.4.1.9694.1.6.2.5.0 2059 --> PRAVAIL-MIB::deviceCpuLoadAvg15min.0
-# .1.3.6.1.4.1.9694.1.6.2.6.0 8 --> PRAVAIL-MIB::deviceDiskUsage.0
-# .1.3.6.1.4.1.9694.1.6.2.7.0 49 --> PRAVAIL-MIB::devicePhysicalMemoryUsage.0
-# .1.3.6.1.4.1.9694.1.6.2.8.0 0 --> PRAVAIL-MIB::deviceSwapSpaceUsage.0
-# .1.3.6.1.4.1.9694.1.6.2.39.0 43 --> PRAVAIL-MIB::pravailOverrunDropRatePps.0
-
-
-def parse_pravail(string_table):
- if not string_table:
- return None
- # peakflow SP and TMS have the same string_table in different oid ranges
- valid = string_table[0]
- return {
- "disk": valid[0],
- "host_fault": valid[1],
- "drop_rate": valid[2],
- }
-
-
-check_info["arbor_pravail"] = LegacyCheckDefinition(
- detect=DETECT_PRAVAIL,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.9694.1.6.2",
- oids=["6.0", "1.0", "39.0"],
- ),
- parse_function=parse_pravail,
-)
-
-check_info["arbor_pravail.disk_usage"] = LegacyCheckDefinition(
- service_name="Disk Usage %s",
- sections=["arbor_pravail"],
- discovery_function=inventory_arbor_disk_usage,
- check_function=check_arbor_disk_usage,
- check_ruleset_name="filesystem",
- check_default_parameters=FILESYSTEM_DEFAULT_PARAMS,
-)
-
-check_info["arbor_pravail.host_fault"] = LegacyCheckDefinition(
- service_name="Host Fault",
- sections=["arbor_pravail"],
- discovery_function=inventory_arbor_host_fault,
- check_function=check_arbor_host_fault,
-)
-
-check_info["arbor_pravail.drop_rate"] = LegacyCheckDefinition(
- service_name="%s drop rate",
- sections=["arbor_pravail"],
- discovery_function=inventory_arbor_drop_rate,
- check_function=check_arbor_drop_rate,
- check_ruleset_name="generic_rate",
-)
diff --git a/cmk/base/legacy_checks/arc_raid_status.py b/cmk/base/legacy_checks/arc_raid_status.py
index f62176ba674..97872afb87f 100644
--- a/cmk/base/legacy_checks/arc_raid_status.py
+++ b/cmk/base/legacy_checks/arc_raid_status.py
@@ -11,7 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_arc_raid_status(info):
diff --git a/cmk/base/legacy_checks/arcserve_backup.py b/cmk/base/legacy_checks/arcserve_backup.py
index 06d6454e7f9..53e432c3f51 100644
--- a/cmk/base/legacy_checks/arcserve_backup.py
+++ b/cmk/base/legacy_checks/arcserve_backup.py
@@ -58,9 +58,11 @@
# mypy: disable-error-code="var-annotated"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_arcserve_backup(info):
unit_factor = {"kb": 1024, "mb": 1024**2, "gb": 1024**3, "tb": 1024**4}
@@ -128,7 +130,7 @@ def check_arcserve_backup(item, _no_params, section): # pylint: disable=too-man
if message != "":
message += ", "
size = section[item]["size"]
- message += "Size: %s" % get_bytes_human_readable(section[item]["size"])
+ message += "Size: %s" % render.bytes(section[item]["size"])
else:
size = 0
perfdata.append(("size", size))
diff --git a/cmk/base/legacy_checks/arris_cmts_cpu.py b/cmk/base/legacy_checks/arris_cmts_cpu.py
index 7de42c3ec3b..697d1d5d0a2 100644
--- a/cmk/base/legacy_checks/arris_cmts_cpu.py
+++ b/cmk/base/legacy_checks/arris_cmts_cpu.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, OIDEnd, SNMPTree, StringTable
def inventory_arris_cmts_cpu(info):
diff --git a/cmk/base/legacy_checks/arris_cmts_mem.py b/cmk/base/legacy_checks/arris_cmts_mem.py
index c5fa1a4be7d..ac39eb8f3e8 100644
--- a/cmk/base/legacy_checks/arris_cmts_mem.py
+++ b/cmk/base/legacy_checks/arris_cmts_mem.py
@@ -43,7 +43,7 @@ def check_arris_cmts_mem(item, params, parsed):
data["mem_used"],
data["mem_total"],
(mode, (warn, crit)),
- metric_name="memused",
+ metric_name="mem_used",
)
diff --git a/cmk/base/legacy_checks/arris_cmts_temp.py b/cmk/base/legacy_checks/arris_cmts_temp.py
index 2e0e48e21f5..92def4eb76d 100644
--- a/cmk/base/legacy_checks/arris_cmts_temp.py
+++ b/cmk/base/legacy_checks/arris_cmts_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
def inventory_arris_cmts_temp(info):
diff --git a/cmk/base/legacy_checks/artec_documents.py b/cmk/base/legacy_checks/artec_documents.py
index 2d3da2f3be4..5707569693a 100644
--- a/cmk/base/legacy_checks/artec_documents.py
+++ b/cmk/base/legacy_checks/artec_documents.py
@@ -9,8 +9,15 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, equals, get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ all_of,
+ contains,
+ equals,
+ get_rate,
+ get_value_store,
+ SNMPTree,
+ StringTable,
+)
# .1.3.6.1.4.1.31560.0.0.3.1.3.1.48 Amount Documents Count --> ARTEC-MIB::artecDocumentsName.1.48
# .1.3.6.1.4.1.31560.0.0.3.1.3.1.49 Replicate Count --> ARTEC-MIB::artecDocumentsName.1.49
diff --git a/cmk/base/legacy_checks/artec_temp.py b/cmk/base/legacy_checks/artec_temp.py
index 3cb5f87d515..ff4f5889dd0 100644
--- a/cmk/base/legacy_checks/artec_temp.py
+++ b/cmk/base/legacy_checks/artec_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, equals, SNMPTree, StringTable
# .1.3.6.1.4.1.31560.3.1.1.1.48 33 --> ARTEC-MIB::hddTemperature
diff --git a/cmk/base/legacy_checks/aruba_aps.py b/cmk/base/legacy_checks/aruba_aps.py
index 81d31a48617..e557a2ae940 100644
--- a/cmk/base/legacy_checks/aruba_aps.py
+++ b/cmk/base/legacy_checks/aruba_aps.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_aruba_aps(info):
diff --git a/cmk/base/legacy_checks/aruba_clients.py b/cmk/base/legacy_checks/aruba_clients.py
index e2d9603a0d1..b0e80803062 100644
--- a/cmk/base/legacy_checks/aruba_clients.py
+++ b/cmk/base/legacy_checks/aruba_clients.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_aruba_clients(info):
diff --git a/cmk/base/legacy_checks/atto_fibrebridge_fcport.py b/cmk/base/legacy_checks/atto_fibrebridge_fcport.py
index 045fbab23da..e54085fd16a 100644
--- a/cmk/base/legacy_checks/atto_fibrebridge_fcport.py
+++ b/cmk/base/legacy_checks/atto_fibrebridge_fcport.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, OIDEnd, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, OIDEnd, SNMPTree, startswith, StringTable
def inventory_atto_fibrebridge_fcport(info):
diff --git a/cmk/base/legacy_checks/avaya_45xx_cpu.py b/cmk/base/legacy_checks/avaya_45xx_cpu.py
index b41c654f022..96d4a425036 100644
--- a/cmk/base/legacy_checks/avaya_45xx_cpu.py
+++ b/cmk/base/legacy_checks/avaya_45xx_cpu.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_avaya_45xx_cpu(info):
diff --git a/cmk/base/legacy_checks/avaya_45xx_fan.py b/cmk/base/legacy_checks/avaya_45xx_fan.py
index f5ae6fec2f9..769916f937f 100644
--- a/cmk/base/legacy_checks/avaya_45xx_fan.py
+++ b/cmk/base/legacy_checks/avaya_45xx_fan.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_avaya_45xx_fan(info):
diff --git a/cmk/base/legacy_checks/avaya_45xx_temp.py b/cmk/base/legacy_checks/avaya_45xx_temp.py
index 23448200eda..e7e3322fdf3 100644
--- a/cmk/base/legacy_checks/avaya_45xx_temp.py
+++ b/cmk/base/legacy_checks/avaya_45xx_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_avaya_45xx_temp(info):
diff --git a/cmk/base/legacy_checks/avaya_88xx_cpu.py b/cmk/base/legacy_checks/avaya_88xx_cpu.py
index 2de46f7837c..fb4e05215f4 100644
--- a/cmk/base/legacy_checks/avaya_88xx_cpu.py
+++ b/cmk/base/legacy_checks/avaya_88xx_cpu.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.avaya import DETECT_AVAYA
diff --git a/cmk/base/legacy_checks/avaya_chassis_card.py b/cmk/base/legacy_checks/avaya_chassis_card.py
index 502f577921d..4775fb48d4d 100644
--- a/cmk/base/legacy_checks/avaya_chassis_card.py
+++ b/cmk/base/legacy_checks/avaya_chassis_card.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.avaya import DETECT_AVAYA
avaya_chassis_card_operstatus_codes = {
diff --git a/cmk/base/legacy_checks/avaya_chassis_ps.py b/cmk/base/legacy_checks/avaya_chassis_ps.py
index b18b8ec7fcf..8c4460cd89c 100644
--- a/cmk/base/legacy_checks/avaya_chassis_ps.py
+++ b/cmk/base/legacy_checks/avaya_chassis_ps.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.avaya import DETECT_AVAYA
avaya_chassis_ps_status_codes = {
diff --git a/cmk/base/legacy_checks/avaya_chassis_temp.py b/cmk/base/legacy_checks/avaya_chassis_temp.py
index 049359398a2..d33917d0f27 100644
--- a/cmk/base/legacy_checks/avaya_chassis_temp.py
+++ b/cmk/base/legacy_checks/avaya_chassis_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.avaya import DETECT_AVAYA
diff --git a/cmk/base/legacy_checks/aws_dynamodb_summary.py b/cmk/base/legacy_checks/aws_dynamodb_summary.py
index 00bf8b256ce..45f45fdbb71 100644
--- a/cmk/base/legacy_checks/aws_dynamodb_summary.py
+++ b/cmk/base/legacy_checks/aws_dynamodb_summary.py
@@ -6,10 +6,11 @@
from collections.abc import Iterable
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.aws import AWSRegions
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
from cmk.plugins.aws.lib import GenericAWSSection, parse_aws
@@ -43,7 +44,7 @@ def check_aws_dynamodb_summary(item, params, parsed):
% (
table_name,
table["ItemCount"],
- get_bytes_human_readable(table["TableSizeBytes"]),
+ render.bytes(table["TableSizeBytes"]),
table["TableStatus"],
)
)
diff --git a/cmk/base/legacy_checks/aws_dynamodb_table.py b/cmk/base/legacy_checks/aws_dynamodb_table.py
index a1eaa291102..a7ad9c77af2 100644
--- a/cmk/base/legacy_checks/aws_dynamodb_table.py
+++ b/cmk/base/legacy_checks/aws_dynamodb_table.py
@@ -4,7 +4,7 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.aws import (
aws_get_float_human_readable,
inventory_aws_generic_single,
@@ -162,7 +162,7 @@ def check_aws_dynamodb_latency(item, params, parsed):
metric_name,
levels,
infoname=f"{statistic} latency {operation}",
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
)
if go_stale:
@@ -180,7 +180,16 @@ def discover_aws_dynamodb_table_read_capacity(p):
check_function=check_aws_dynamodb_read_capacity,
check_ruleset_name="aws_dynamodb_capacity",
check_default_parameters={
- "levels_%s" % op: {"levels_average": {"levels_upper": (80, 90)}} for op in ["read", "write"]
+ "levels_read": {
+ "levels_average": {
+ "levels_upper": (80.0, 90.0),
+ },
+ },
+ "levels_write": {
+ "levels_average": {
+ "levels_upper": (80.0, 90.0),
+ },
+ },
},
)
@@ -196,7 +205,16 @@ def discover_aws_dynamodb_table_write_capacity(p):
check_function=check_aws_dynamodb_write_capacity,
check_ruleset_name="aws_dynamodb_capacity",
check_default_parameters={
- "levels_%s" % op: {"levels_average": {"levels_upper": (80, 90)}} for op in ["read", "write"]
+ "levels_read": {
+ "levels_average": {
+ "levels_upper": (80.0, 90.0),
+ },
+ },
+ "levels_write": {
+ "levels_average": {
+ "levels_upper": (80.0, 90.0),
+ },
+ },
},
)
diff --git a/cmk/base/legacy_checks/aws_ec2_summary.py b/cmk/base/legacy_checks/aws_ec2_summary.py
index 809f78de22d..a663eb5282d 100644
--- a/cmk/base/legacy_checks/aws_ec2_summary.py
+++ b/cmk/base/legacy_checks/aws_ec2_summary.py
@@ -20,7 +20,7 @@ def discover_aws_ec2_summary(section: GenericAWSSection) -> Iterable[tuple[None,
def check_aws_ec2_summary(item, params, parsed):
- instances_by_state = {}
+ instances_by_state: dict[str, list] = {}
long_output = []
for instance in parsed:
instance_private_dns_name = instance["PrivateDnsName"]
@@ -29,9 +29,7 @@ def check_aws_ec2_summary(item, params, parsed):
instances_by_state.setdefault(instance_state, []).append(instance_id)
long_output.append(f"[{instance_id}] {instance_private_dns_name}: {instance_state}")
- yield 0, "Instances: %s" % sum(
- len(v) for v in instances_by_state.values() # type: ignore[misc] # expected bool?!
- )
+ yield 0, "Instances: %s" % sum(len(v) for v in instances_by_state.values())
for instance_state, instances in instances_by_state.items():
yield 0, f"{instance_state}: {len(instances)}"
diff --git a/cmk/base/legacy_checks/aws_elb.py b/cmk/base/legacy_checks/aws_elb.py
index f0f28ebc4b4..e4caf0943d6 100644
--- a/cmk/base/legacy_checks/aws_elb.py
+++ b/cmk/base/legacy_checks/aws_elb.py
@@ -4,7 +4,7 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.aws import (
aws_get_counts_rate_human_readable,
check_aws_http_errors,
@@ -122,7 +122,7 @@ def check_aws_elb_latency(item, params, parsed):
"metric_val": parsed.get("Latency"),
"metric_name": "aws_load_balancer_latency",
"levels": params.get("levels_latency"),
- "human_readable_func": get_age_human_readable,
+ "human_readable_func": render.timespan,
}
]
)
diff --git a/cmk/base/legacy_checks/aws_elbv2_application_target_groups_http.py b/cmk/base/legacy_checks/aws_elbv2_application_target_groups_http.py
index 4b907076fd1..4e453dc4a25 100644
--- a/cmk/base/legacy_checks/aws_elbv2_application_target_groups_http.py
+++ b/cmk/base/legacy_checks/aws_elbv2_application_target_groups_http.py
@@ -5,7 +5,7 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.aws import aws_get_parsed_item_data, check_aws_http_errors
+from cmk.base.check_legacy_includes.aws import check_aws_http_errors, get_data_or_go_stale
from cmk.base.config import check_info
from cmk.plugins.aws.lib import extract_aws_metrics_by_labels, parse_aws
@@ -29,8 +29,8 @@ def discover_aws_application_elb_target_groups_http(section):
yield from ((item, {}) for item, data in section.items() if "RequestCount" in data)
-@aws_get_parsed_item_data
-def check_aws_application_elb_target_groups_http(item, params, data):
+def check_aws_application_elb_target_groups_http(item, params, section):
+ data = get_data_or_go_stale(item, section)
return check_aws_http_errors(
params.get("levels_http", {}),
data,
diff --git a/cmk/base/legacy_checks/aws_elbv2_application_target_groups_lambda.py b/cmk/base/legacy_checks/aws_elbv2_application_target_groups_lambda.py
index f4ebf852391..8d606f9790b 100644
--- a/cmk/base/legacy_checks/aws_elbv2_application_target_groups_lambda.py
+++ b/cmk/base/legacy_checks/aws_elbv2_application_target_groups_lambda.py
@@ -6,9 +6,9 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.aws import (
- aws_get_parsed_item_data,
check_aws_error_rate,
check_aws_request_rate,
+ get_data_or_go_stale,
)
from cmk.base.config import check_info
@@ -27,8 +27,8 @@ def discover_aws_elbv2_target_groups_lambda(section):
yield from ((item, {}) for item, data in section.items() if "RequestCount" in data)
-@aws_get_parsed_item_data
-def check_aws_application_elb_target_groups_lambda(item, params, data):
+def check_aws_application_elb_target_groups_lambda(item, params, section):
+ data = get_data_or_go_stale(item, section)
request_rate = data.get("RequestCount")
if request_rate is None:
raise IgnoreResultsError("Currently no data from AWS")
diff --git a/cmk/base/legacy_checks/aws_elbv2_network.py b/cmk/base/legacy_checks/aws_elbv2_network.py
index f3b913dfd90..f49846edf00 100644
--- a/cmk/base/legacy_checks/aws_elbv2_network.py
+++ b/cmk/base/legacy_checks/aws_elbv2_network.py
@@ -176,7 +176,7 @@ def discover_aws_elbv2_network_connections(p):
# yield check_levels(perc,
# 'aws_overall_hosts_health_perc',
# params.get('levels_overall_hosts_health_perc'),
-# human_readable_func=get_percent_human_readable,
+# human_readable_func=render.percent,
# infoname="Proportion of healthy hosts")
#
#
diff --git a/cmk/base/legacy_checks/aws_glacier.py b/cmk/base/legacy_checks/aws_glacier.py
index 980ad62c658..e67675861dd 100644
--- a/cmk/base/legacy_checks/aws_glacier.py
+++ b/cmk/base/legacy_checks/aws_glacier.py
@@ -6,10 +6,12 @@
from collections.abc import Iterable, Mapping
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.aws import parse_aws
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
Section = Mapping[str, Mapping]
@@ -50,7 +52,7 @@ def check_aws_glacier_archives(item, params, parsed):
vault_size,
"aws_glacier_vault_size",
params.get("vault_size_levels", (None, None)),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.disksize,
infoname="Vault size",
)
@@ -108,14 +110,14 @@ def check_aws_glacier_summary(item, params, parsed):
sum_size,
"aws_glacier_total_vault_size",
params.get("vault_size_levels", (None, None)),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.disksize,
infoname="Total size",
)
if largest_vault:
yield 0, "Largest vault: {} ({})".format(
largest_vault,
- get_bytes_human_readable(largest_vault_size),
+ render.disksize(largest_vault_size),
), [("aws_glacier_largest_vault_size", largest_vault_size)]
diff --git a/cmk/base/legacy_checks/aws_rds_limits.py b/cmk/base/legacy_checks/aws_rds_limits.py
index 6726795bc58..0dfd3ea4a24 100644
--- a/cmk/base/legacy_checks/aws_rds_limits.py
+++ b/cmk/base/legacy_checks/aws_rds_limits.py
@@ -6,10 +6,12 @@
from collections.abc import Callable
from typing import Any
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.aws import AWSLimitsByRegion, check_aws_limits, parse_aws
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_aws_rds_limits(string_table):
limits_by_region: AWSLimitsByRegion = {}
@@ -21,7 +23,7 @@ def parse_aws_rds_limits(string_table):
factor = 1024**4 / 1000.0
limit = limit * factor
amount = amount * factor
- human_readable_f: Callable[[Any], str] | type[int] = get_bytes_human_readable
+ human_readable_f: Callable[[Any], str] | type[int] = render.bytes
else:
human_readable_f = int
limits_by_region.setdefault(region, []).append(
diff --git a/cmk/base/legacy_checks/aws_s3.py b/cmk/base/legacy_checks/aws_s3.py
index d135f1ab40c..bc423c6a1ec 100644
--- a/cmk/base/legacy_checks/aws_s3.py
+++ b/cmk/base/legacy_checks/aws_s3.py
@@ -6,10 +6,12 @@
from collections.abc import Iterable, Mapping
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.aws import inventory_aws_generic, parse_aws
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
Section = Mapping[str, Mapping]
@@ -54,13 +56,13 @@ def check_aws_s3_objects(item, params, parsed):
bucket_sizes = metrics["BucketSizeBytes"]
storage_infos = []
for storage_type, value in bucket_sizes.items():
- storage_infos.append(f"{storage_type}: {get_bytes_human_readable(value)}")
+ storage_infos.append(f"{storage_type}: {render.bytes(value)}")
sum_size = sum(bucket_sizes.values())
yield check_levels(
sum_size,
"aws_bucket_size",
params.get("bucket_size_levels", (None, None)),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Bucket size",
)
if storage_infos:
@@ -122,14 +124,14 @@ def check_aws_s3_summary(item, params, parsed):
sum_size,
"aws_bucket_size",
params.get("bucket_size_levels", (None, None)),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Total size",
)
if largest_bucket:
yield 0, "Largest bucket: {} ({})".format(
largest_bucket,
- get_bytes_human_readable(largest_bucket_size),
+ render.bytes(largest_bucket_size),
), [("aws_largest_bucket_size", largest_bucket_size)]
diff --git a/cmk/base/legacy_checks/aws_s3_requests.py b/cmk/base/legacy_checks/aws_s3_requests.py
index 39cb698137e..dca4a9611da 100644
--- a/cmk/base/legacy_checks/aws_s3_requests.py
+++ b/cmk/base/legacy_checks/aws_s3_requests.py
@@ -4,13 +4,13 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.aws import (
aws_get_bytes_rate_human_readable,
aws_get_counts_rate_human_readable,
- aws_get_parsed_item_data,
check_aws_http_errors,
check_aws_metrics,
+ get_data_or_go_stale,
inventory_aws_generic,
)
from cmk.base.config import check_info
@@ -54,8 +54,8 @@ def parse_aws_s3(string_table):
# '----------------------------------------------------------------------'
-@aws_get_parsed_item_data
-def check_aws_s3_requests(item, params, metrics):
+def check_aws_s3_requests(item, params, section):
+ metrics = get_data_or_go_stale(item, section)
all_requests_rate = metrics.get("AllRequests")
if all_requests_rate is None:
raise IgnoreResultsError("Currently no data from AWS")
@@ -113,8 +113,8 @@ def discover_aws_s3_requests(p):
# '----------------------------------------------------------------------'
-@aws_get_parsed_item_data
-def check_aws_s3_http_errors(item, params, metrics):
+def check_aws_s3_http_errors(item, params, section):
+ metrics = get_data_or_go_stale(item, section)
return check_aws_http_errors(
params.get("levels_load_balancers", params),
metrics,
@@ -147,8 +147,8 @@ def discover_aws_s3_requests_http_errors(p):
# '----------------------------------------------------------------------'
-@aws_get_parsed_item_data
-def check_aws_s3_latency(item, params, metrics):
+def check_aws_s3_latency(item, params, section):
+ metrics = get_data_or_go_stale(item, section)
metric_infos = []
for key, title, perf_key in [
("TotalRequestLatency", "Total request latency", "aws_request_latency"),
@@ -171,7 +171,7 @@ def check_aws_s3_latency(item, params, metrics):
"metric_name": perf_key,
"levels": levels,
"info_name": title,
- "human_readable_func": get_age_human_readable,
+ "human_readable_func": render.time_offset,
}
)
@@ -201,8 +201,8 @@ def discover_aws_s3_requests_latency(p):
# '----------------------------------------------------------------------'
-@aws_get_parsed_item_data
-def check_aws_s3_traffic_stats(item, params, metrics):
+def check_aws_s3_traffic_stats(item, params, section):
+ metrics = get_data_or_go_stale(item, section)
return check_aws_metrics(
[
{
@@ -241,8 +241,8 @@ def discover_aws_s3_requests_traffic_stats(p):
# '----------------------------------------------------------------------'
-@aws_get_parsed_item_data
-def check_aws_s3_select_object(item, params, metrics):
+def check_aws_s3_select_object(item, params, section):
+ metrics = get_data_or_go_stale(item, section)
return check_aws_metrics(
[
{
diff --git a/cmk/base/legacy_checks/azure_ad.py b/cmk/base/legacy_checks/azure_ad.py
index 392ec1c12eb..e47c979e961 100644
--- a/cmk/base/legacy_checks/azure_ad.py
+++ b/cmk/base/legacy_checks/azure_ad.py
@@ -10,10 +10,12 @@
import time
from calendar import timegm
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.azure import AZURE_AGENT_SEPARATOR
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_azure_ad(string_table):
parsed = {}
@@ -124,7 +126,7 @@ def check_azure_sync(item, params, parsed):
time_delta,
None,
params.get("age"),
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname="Time since last synchronization",
)
diff --git a/cmk/base/legacy_checks/azure_databases.py b/cmk/base/legacy_checks/azure_databases.py
index 8af71612713..163664e2f0b 100644
--- a/cmk/base/legacy_checks/azure_databases.py
+++ b/cmk/base/legacy_checks/azure_databases.py
@@ -4,7 +4,7 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.azure import (
check_azure_metric,
discover_azure_by_metrics,
@@ -15,11 +15,14 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render, Service
+from cmk.plugins.lib.azure import get_service_labels_from_resource_tags
+
# https://www.unigma.com/2016/07/11/best-practices-for-monitoring-microsoft-azure/
-@get_data_or_go_stale
-def check_azure_databases_storage(_item, params, resource):
+def check_azure_databases_storage(item, params, section):
+ resource = get_data_or_go_stale(item, section)
cmk_key = "storage_percent"
levels = params.get("%s_levels" % cmk_key)
mcheck = check_azure_metric(
@@ -29,7 +32,7 @@ def check_azure_databases_storage(_item, params, resource):
state, text, perf = mcheck
abs_storage_metric = resource.metrics.get("average_storage")
if abs_storage_metric is not None:
- text += " (%s)" % get_bytes_human_readable(abs_storage_metric.value)
+ text += " (%s)" % render.bytes(abs_storage_metric.value)
yield state, text, perf
@@ -47,8 +50,8 @@ def check_azure_databases_storage(_item, params, resource):
)
-@get_data_or_go_stale
-def check_azure_databases_deadlock(_item, params, resource):
+def check_azure_databases_deadlock(item, params, section):
+ resource = get_data_or_go_stale(item, section)
cmk_key = "deadlocks"
levels = params.get("%s_levels" % cmk_key)
mcheck = check_azure_metric(resource, "average_deadlock", cmk_key, "Deadlocks", levels=levels)
@@ -70,8 +73,8 @@ def check_azure_databases_deadlock(_item, params, resource):
)
-@get_data_or_go_stale
-def check_azure_databases_cpu(_item, params, resource):
+def check_azure_databases_cpu(item, params, section):
+ resource = get_data_or_go_stale(item, section)
metrics = resource.metrics
cpu_percent = metrics.get("average_cpu_percent")
@@ -96,8 +99,8 @@ def check_azure_databases_cpu(_item, params, resource):
)
-@get_data_or_go_stale
-def check_azure_databases_dtu(_item, params, resource):
+def check_azure_databases_dtu(item, params, section):
+ resource = get_data_or_go_stale(item, section)
cmk_key = "dtu_percent"
levels = params.get("%s_levels" % cmk_key)
mcheck = check_azure_metric(
@@ -131,8 +134,8 @@ def check_azure_databases_dtu(_item, params, resource):
)
-@get_data_or_go_stale
-def check_azure_databases_connections(_item, params, resource):
+def check_azure_databases_connections(item, params, section):
+ resource = get_data_or_go_stale(item, section)
for key, cmk_key, displ, use_rate in _AZURE_CONNECTIONS_METRICS:
levels = params.get("%s_levels" % cmk_key)
mcheck = check_azure_metric(resource, key, cmk_key, displ, levels=levels, use_rate=use_rate)
@@ -156,14 +159,20 @@ def check_azure_databases_connections(_item, params, resource):
)
-@get_data_or_go_stale
-def check_azure_databases(_item, _no_params, resource):
+def check_azure_databases(item, _no_params, section):
+ resource = get_data_or_go_stale(item, section)
for k, v in iter_resource_attributes(resource):
yield 0, f"{k}: {v}"
def discover_azure_databases(section):
- yield from ((item, {}) for item in section)
+ yield from (
+ Service(
+ item=item,
+ labels=get_service_labels_from_resource_tags(resource.tags),
+ )
+ for item, resource in section.items()
+ )
check_info["azure_databases"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/azure_sites.py b/cmk/base/legacy_checks/azure_sites.py
index 3c8aff2e145..b806683bd4d 100644
--- a/cmk/base/legacy_checks/azure_sites.py
+++ b/cmk/base/legacy_checks/azure_sites.py
@@ -13,6 +13,9 @@
)
from cmk.base.config import check_info
+from cmk.agent_based.v2 import Service
+from cmk.plugins.lib.azure import get_service_labels_from_resource_tags
+
_AZURE_SITES_METRICS = ( # metric_key, cmk_key, display_name, use_rate_flag
("total_CpuTime", "cpu_time_percent", "CPU time", True),
("total_AverageResponseTime", "avg_response_time", "Average response time", False),
@@ -20,8 +23,8 @@
)
-@get_data_or_go_stale
-def check_azure_sites(_item, params, resource):
+def check_azure_sites(item, params, section):
+ resource = get_data_or_go_stale(item, section)
for key, cmk_key, displ, use_rate in _AZURE_SITES_METRICS:
levels = params.get("%s_levels" % cmk_key, (None, None))
mcheck = check_azure_metric(resource, key, cmk_key, displ, levels=levels, use_rate=use_rate)
@@ -33,7 +36,10 @@ def check_azure_sites(_item, params, resource):
def discover_azure_sites(section):
- yield from ((item, {}) for item in section)
+ yield from (
+ Service(item=item, labels=get_service_labels_from_resource_tags(resource.tags))
+ for item, resource in section.items()
+ )
check_info["azure_sites"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/azure_storageaccounts.py b/cmk/base/legacy_checks/azure_storageaccounts.py
index 3e13727a4de..37139a2b5bb 100644
--- a/cmk/base/legacy_checks/azure_storageaccounts.py
+++ b/cmk/base/legacy_checks/azure_storageaccounts.py
@@ -16,9 +16,12 @@
)
from cmk.base.config import check_info
+from cmk.agent_based.v2 import Service
+from cmk.plugins.lib.azure import get_service_labels_from_resource_tags
-@get_data_or_go_stale
-def check_azure_storageaccounts(_item, params, resource):
+
+def check_azure_storageaccounts(item, params, section):
+ resource = get_data_or_go_stale(item, section)
iter_attrs = iter_resource_attributes(resource, include_keys=("kind", "location"))
# kind first
try:
@@ -38,7 +41,10 @@ def check_azure_storageaccounts(_item, params, resource):
def discover_azure_storageaccounts(section):
- yield from ((item, {}) for item in section)
+ yield from (
+ Service(item=item, labels=get_service_labels_from_resource_tags(resource.tags))
+ for item, resource in section.items()
+ )
check_info["azure_storageaccounts"] = LegacyCheckDefinition(
@@ -47,7 +53,7 @@ def discover_azure_storageaccounts(section):
discovery_function=discover_azure_storageaccounts,
check_function=check_azure_storageaccounts,
check_ruleset_name="azure_storageaccounts",
- check_default_parameters={}
+ check_default_parameters={},
# metrics description:
# https://docs.microsoft.com/en-US/azure/monitoring-and-diagnostics/monitoring-supported-metrics#microsoftstoragestorageaccounts
# 'ingress_levels': tuple [B]
@@ -65,8 +71,8 @@ def discover_azure_storageaccounts(section):
)
-@get_data_or_go_stale
-def check_azure_storageaccounts_flow(_item, params, resource):
+def check_azure_storageaccounts_flow(item, params, section):
+ resource = get_data_or_go_stale(item, section)
for metric_key in ("total_Ingress", "total_Egress", "total_Transactions"):
cmk_key = metric_key[6:].lower()
displ = cmk_key.title()
@@ -84,7 +90,7 @@ def check_azure_storageaccounts_flow(_item, params, resource):
),
check_function=check_azure_storageaccounts_flow,
check_ruleset_name="azure_storageaccounts",
- check_default_parameters={}
+ check_default_parameters={},
# metrics description:
# https://docs.microsoft.com/en-US/azure/monitoring-and-diagnostics/monitoring-supported-metrics#microsoftstoragestorageaccounts
# 'ingress_levels': tuple [B]
@@ -102,8 +108,8 @@ def check_azure_storageaccounts_flow(_item, params, resource):
)
-@get_data_or_go_stale
-def check_azure_storageaccounts_performance(_item, params, resource):
+def check_azure_storageaccounts_performance(item, params, section):
+ resource = get_data_or_go_stale(item, section)
for key, cmk_key, displ in (
("average_SuccessServerLatency", "server_latency", "Success server latency"),
("average_SuccessE2ELatency", "e2e_latency", "End-to-end server latency"),
@@ -123,7 +129,7 @@ def check_azure_storageaccounts_performance(_item, params, resource):
),
check_function=check_azure_storageaccounts_performance,
check_ruleset_name="azure_storageaccounts",
- check_default_parameters={}
+ check_default_parameters={},
# metrics description:
# https://docs.microsoft.com/en-US/azure/monitoring-and-diagnostics/monitoring-supported-metrics#microsoftstoragestorageaccounts
# 'ingress_levels': tuple [B]
diff --git a/cmk/base/legacy_checks/azure_usagedetails.py b/cmk/base/legacy_checks/azure_usagedetails.py
index 165eead2d85..beb47fc2a69 100644
--- a/cmk/base/legacy_checks/azure_usagedetails.py
+++ b/cmk/base/legacy_checks/azure_usagedetails.py
@@ -37,8 +37,8 @@ def parse_azure_usagedetails(string_table):
return parsed
-@get_data_or_go_stale
-def check_azure_usagedetails(_no_item, params, data):
+def check_azure_usagedetails(item, params, section):
+ data = get_data_or_go_stale(item, section)
for currency, amount in list(data.get("costs", {}).items()):
levels = params.get("levels")
yield check_levels(amount, "service_costs_%s" % currency.lower(), levels, currency)
diff --git a/cmk/base/legacy_checks/barracuda_mail_latency.py b/cmk/base/legacy_checks/barracuda_mail_latency.py
index 156d9e48982..8b8c5a28e42 100644
--- a/cmk/base/legacy_checks/barracuda_mail_latency.py
+++ b/cmk/base/legacy_checks/barracuda_mail_latency.py
@@ -6,11 +6,10 @@
# .1.3.6.1.4.1.20632.2.5 2
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.barracuda import DETECT_BARRACUDA
@@ -23,7 +22,7 @@ def check_barracuda_mail_latency(_no_item, params, info):
int(info[0][0]),
"mail_latency",
params["levels"],
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname="Average",
)
diff --git a/cmk/base/legacy_checks/barracuda_mailqueues.py b/cmk/base/legacy_checks/barracuda_mailqueues.py
index d8ca701fdf6..98a3bab0967 100644
--- a/cmk/base/legacy_checks/barracuda_mailqueues.py
+++ b/cmk/base/legacy_checks/barracuda_mailqueues.py
@@ -13,13 +13,12 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.barracuda import DETECT_BARRACUDA
-def inventory_barracuda_mailqueues(info):
- return [("", {})]
+def discover_barracuda_mailqueues(info):
+ yield None, {}
def check_barracuda_mailqueues(_no_item, params, info):
@@ -57,13 +56,13 @@ def parse_barracuda_mailqueues(string_table: StringTable) -> StringTable | None:
base=".1.3.6.1.4.1.20632.2",
oids=["2", "3", "4", "60"],
),
- service_name="Mail Queue %s",
+ service_name="Mail Queue",
# The barracuda spam firewall does not response or returns a timeout error
# executing 'snmpwalk' on whole tables. But we can workaround here specifying
# all needed OIDs. Then we can use 'snmpget' and 'snmpwalk' on these single OIDs.,
- discovery_function=inventory_barracuda_mailqueues,
+ discovery_function=discover_barracuda_mailqueues,
check_function=check_barracuda_mailqueues,
- check_ruleset_name="mail_queue_length",
+ check_ruleset_name="mail_queue_length_single",
check_default_parameters={
"deferred": (80, 100),
"active": (80, 100),
diff --git a/cmk/base/legacy_checks/barracuda_system_cpu_util.py b/cmk/base/legacy_checks/barracuda_system_cpu_util.py
index 5bb95923cf3..6b62f4778a9 100644
--- a/cmk/base/legacy_checks/barracuda_system_cpu_util.py
+++ b/cmk/base/legacy_checks/barracuda_system_cpu_util.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.barracuda import DETECT_BARRACUDA
# .1.3.6.1.4.1.20632.2.13 3
diff --git a/cmk/base/legacy_checks/bdt_tape_info.py b/cmk/base/legacy_checks/bdt_tape_info.py
index afa577a4cc1..010166dad48 100644
--- a/cmk/base/legacy_checks/bdt_tape_info.py
+++ b/cmk/base/legacy_checks/bdt_tape_info.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_bdt_tape_info(info):
diff --git a/cmk/base/legacy_checks/bdt_tape_status.py b/cmk/base/legacy_checks/bdt_tape_status.py
index f7c8f837063..ca0ad1eb234 100644
--- a/cmk/base/legacy_checks/bdt_tape_status.py
+++ b/cmk/base/legacy_checks/bdt_tape_status.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_bdt_tape_status(info):
diff --git a/cmk/base/legacy_checks/bdtms_tape_info.py b/cmk/base/legacy_checks/bdtms_tape_info.py
index 4b59a06eaa0..3a722fbee47 100644
--- a/cmk/base/legacy_checks/bdtms_tape_info.py
+++ b/cmk/base/legacy_checks/bdtms_tape_info.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_bdtms_tape_info(info):
diff --git a/cmk/base/legacy_checks/bdtms_tape_module.py b/cmk/base/legacy_checks/bdtms_tape_module.py
index 4b668254ad9..aad781064e6 100644
--- a/cmk/base/legacy_checks/bdtms_tape_module.py
+++ b/cmk/base/legacy_checks/bdtms_tape_module.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, OIDEnd, SNMPTree, StringTable
def inventory_bdtms_tape_module(info):
diff --git a/cmk/base/legacy_checks/bdtms_tape_status.py b/cmk/base/legacy_checks/bdtms_tape_status.py
index 3c7243139cd..48ee427a4fe 100644
--- a/cmk/base/legacy_checks/bdtms_tape_status.py
+++ b/cmk/base/legacy_checks/bdtms_tape_status.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_bdtms_tape_info(info):
diff --git a/cmk/base/legacy_checks/bintec_brrp_status.py b/cmk/base/legacy_checks/bintec_brrp_status.py
index d7c9023c848..37b054329ac 100644
--- a/cmk/base/legacy_checks/bintec_brrp_status.py
+++ b/cmk/base/legacy_checks/bintec_brrp_status.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, startswith, StringTable
def bintec_brrp_status_compose_item(brrp_id):
diff --git a/cmk/base/legacy_checks/bintec_cpu.py b/cmk/base/legacy_checks/bintec_cpu.py
index 360b8d69401..d47706d9712 100644
--- a/cmk/base/legacy_checks/bintec_cpu.py
+++ b/cmk/base/legacy_checks/bintec_cpu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
# Diese OIDs liefern nicht die LOAD, wie man annehmen könnte, sondern die
# UTILIZATION, da ausschließlich die Auslastung der CPU berücksichtigt wird.
@@ -37,7 +36,7 @@ def check_bintec_cpu(_no_item, params, info):
yield from check_cpu_util(util, params)
-# Migration NOTE: Create a separate section, but a common check plugin for
+# Migration NOTE: Create a separate section, but a common check plug-in for
# tplink_cpu, hr_cpu, cisco_nexus_cpu, bintec_cpu, winperf_processor,
# lxc_container_cpu, docker_container_cpu.
# Migration via cmk/update_config.py!
diff --git a/cmk/base/legacy_checks/bintec_info.py b/cmk/base/legacy_checks/bintec_info.py
index ba66f70aba2..113d7bef132 100644
--- a/cmk/base/legacy_checks/bintec_info.py
+++ b/cmk/base/legacy_checks/bintec_info.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, equals, SNMPTree, StringTable
def inventory_bintec_info(info):
diff --git a/cmk/base/legacy_checks/bintec_sensors.py b/cmk/base/legacy_checks/bintec_sensors.py
index 25c33b1ad9a..1efbce9d8a9 100644
--- a/cmk/base/legacy_checks/bintec_sensors.py
+++ b/cmk/base/legacy_checks/bintec_sensors.py
@@ -11,8 +11,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def parse_bintec_sensors(string_table: StringTable) -> StringTable:
diff --git a/cmk/base/legacy_checks/blade_blades.py b/cmk/base/legacy_checks/blade_blades.py
index 5921251691e..3b0babf8dc5 100644
--- a/cmk/base/legacy_checks/blade_blades.py
+++ b/cmk/base/legacy_checks/blade_blades.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.blade import DETECT_BLADE
# .1.3.6.1.4.1.2.3.51.2.22.1.5.1.1.2.1 1
diff --git a/cmk/base/legacy_checks/blade_blowers.py b/cmk/base/legacy_checks/blade_blowers.py
index d29084352b1..2e4d718ecda 100644
--- a/cmk/base/legacy_checks/blade_blowers.py
+++ b/cmk/base/legacy_checks/blade_blowers.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.blade import DETECT_BLADE
# The BLADE-MIB is somewhat goofy redarding the blower
diff --git a/cmk/base/legacy_checks/blade_bx_blades.py b/cmk/base/legacy_checks/blade_bx_blades.py
index 062ee0a5ab8..684e9aae761 100644
--- a/cmk/base/legacy_checks/blade_bx_blades.py
+++ b/cmk/base/legacy_checks/blade_bx_blades.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.blade import DETECT_BLADE_BX
diff --git a/cmk/base/legacy_checks/blade_bx_powerfan.py b/cmk/base/legacy_checks/blade_bx_powerfan.py
index 56b3bd97915..1e2016f2e1e 100644
--- a/cmk/base/legacy_checks/blade_bx_powerfan.py
+++ b/cmk/base/legacy_checks/blade_bx_powerfan.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.blade import DETECT_BLADE_BX
blade_bx_status = {
@@ -97,7 +96,7 @@ def parse_blade_bx_powerfan(string_table: StringTable) -> StringTable:
check_function=check_blade_bx_powerfan,
check_ruleset_name="hw_fans_perc",
check_default_parameters={
- "levels_lower": (20, 10),
- "levels": (80, 90),
+ "levels_lower": (20.0, 10.0),
+ "levels": (80.0, 90.0),
},
)
diff --git a/cmk/base/legacy_checks/blade_bx_powermod.py b/cmk/base/legacy_checks/blade_bx_powermod.py
index 3f361e3c602..056f3003883 100644
--- a/cmk/base/legacy_checks/blade_bx_powermod.py
+++ b/cmk/base/legacy_checks/blade_bx_powermod.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.blade import DETECT_BLADE_BX
diff --git a/cmk/base/legacy_checks/blade_bx_temp.py b/cmk/base/legacy_checks/blade_bx_temp.py
index fbe389fccc9..9b420cf5524 100644
--- a/cmk/base/legacy_checks/blade_bx_temp.py
+++ b/cmk/base/legacy_checks/blade_bx_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.blade import DETECT_BLADE_BX
diff --git a/cmk/base/legacy_checks/blade_health.py b/cmk/base/legacy_checks/blade_health.py
index 58a265bb01a..cbb1c6da698 100644
--- a/cmk/base/legacy_checks/blade_health.py
+++ b/cmk/base/legacy_checks/blade_health.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.blade import DETECT_BLADE
# Example excerpt from SNMP data:
diff --git a/cmk/base/legacy_checks/blade_mediatray.py b/cmk/base/legacy_checks/blade_mediatray.py
index fb9a6cbcb80..d2e20e50f69 100644
--- a/cmk/base/legacy_checks/blade_mediatray.py
+++ b/cmk/base/legacy_checks/blade_mediatray.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.blade import DETECT_BLADE
diff --git a/cmk/base/legacy_checks/blade_powerfan.py b/cmk/base/legacy_checks/blade_powerfan.py
index f7ded03fce4..31f6de6c550 100644
--- a/cmk/base/legacy_checks/blade_powerfan.py
+++ b/cmk/base/legacy_checks/blade_powerfan.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, SNMPTree, StringTable
def inventory_blade_powerfan(info):
diff --git a/cmk/base/legacy_checks/blade_powermod.py b/cmk/base/legacy_checks/blade_powermod.py
index 5f767cee0d5..721d7b8be96 100644
--- a/cmk/base/legacy_checks/blade_powermod.py
+++ b/cmk/base/legacy_checks/blade_powermod.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, SNMPTree, StringTable
def inventory_blade_powermod(info):
diff --git a/cmk/base/legacy_checks/bluecat_command_server.py b/cmk/base/legacy_checks/bluecat_command_server.py
index 516aafcda0d..1cda77b0dcd 100644
--- a/cmk/base/legacy_checks/bluecat_command_server.py
+++ b/cmk/base/legacy_checks/bluecat_command_server.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.bluecat import DETECT_BLUECAT
diff --git a/cmk/base/legacy_checks/bluecat_dns_queries.py b/cmk/base/legacy_checks/bluecat_dns_queries.py
index 09708b34909..ea60e46500b 100644
--- a/cmk/base/legacy_checks/bluecat_dns_queries.py
+++ b/cmk/base/legacy_checks/bluecat_dns_queries.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree, StringTable
from cmk.plugins.lib.bluecat import DETECT_BLUECAT
diff --git a/cmk/base/legacy_checks/bluecat_ha.py b/cmk/base/legacy_checks/bluecat_ha.py
index 7eccdbc0eae..fc9972b7db9 100644
--- a/cmk/base/legacy_checks/bluecat_ha.py
+++ b/cmk/base/legacy_checks/bluecat_ha.py
@@ -4,15 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.bluecat import DETECT_BLUECAT
-def inventory_bluecat_ha(section: StringTable) -> DiscoveryResult:
+def discover_bluecat_ha(section: StringTable) -> DiscoveryResult:
# Only add if device is not in standalone mode
if section and section[0][0] != "1":
yield Service()
@@ -51,7 +50,7 @@ def parse_bluecat_ha(string_table: StringTable) -> StringTable:
oids=["1"],
),
service_name="HA State",
- discovery_function=inventory_bluecat_ha,
+ discovery_function=discover_bluecat_ha,
check_function=check_bluecat_ha,
check_ruleset_name="bluecat_ha",
check_default_parameters={
diff --git a/cmk/base/legacy_checks/bluecat_ntp.py b/cmk/base/legacy_checks/bluecat_ntp.py
index 034decc8880..1e7f3798ea4 100644
--- a/cmk/base/legacy_checks/bluecat_ntp.py
+++ b/cmk/base/legacy_checks/bluecat_ntp.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_bluecat_ntp(info):
diff --git a/cmk/base/legacy_checks/bluecat_threads.py b/cmk/base/legacy_checks/bluecat_threads.py
index c8931a4f54b..e21a8816ee6 100644
--- a/cmk/base/legacy_checks/bluecat_threads.py
+++ b/cmk/base/legacy_checks/bluecat_threads.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
threads_default_levels = {"levels": ("levels", (2000, 4000))}
diff --git a/cmk/base/legacy_checks/bluecoat_diskcpu.py b/cmk/base/legacy_checks/bluecoat_diskcpu.py
index b4d9965dcb7..b6502dbc3e8 100644
--- a/cmk/base/legacy_checks/bluecoat_diskcpu.py
+++ b/cmk/base/legacy_checks/bluecoat_diskcpu.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_bluecoat_diskcpu(info):
diff --git a/cmk/base/legacy_checks/bluenet_meter.py b/cmk/base/legacy_checks/bluenet_meter.py
index 62ad91c11a6..5126c8bf286 100644
--- a/cmk/base/legacy_checks/bluenet_meter.py
+++ b/cmk/base/legacy_checks/bluenet_meter.py
@@ -10,7 +10,7 @@
from cmk.base.check_legacy_includes.elphase import check_elphase
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
+from cmk.agent_based.v2 import SNMPTree, startswith
def parse_bluenet_meter(string_table):
@@ -31,7 +31,7 @@ def discover_bluenet_meter(section):
check_info["bluenet_meter"] = LegacyCheckDefinition(
- detect=equals(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.21695.1"),
+ detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.21695.1"),
fetch=SNMPTree(
base=".1.3.6.1.4.1.21695.1.10.7.2.1",
oids=["1", "5", "7", "8", "9"],
diff --git a/cmk/base/legacy_checks/bluenet_sensor.py b/cmk/base/legacy_checks/bluenet_sensor.py
index 0102e9f6bfc..2547afe211d 100644
--- a/cmk/base/legacy_checks/bluenet_sensor.py
+++ b/cmk/base/legacy_checks/bluenet_sensor.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
# .--Temperature---------------------------------------------------------.
# | _____ _ |
@@ -53,7 +52,7 @@ def parse_bluenet_sensor(string_table: StringTable) -> StringTable:
check_info["bluenet_sensor"] = LegacyCheckDefinition(
parse_function=parse_bluenet_sensor,
- detect=equals(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.21695.1"),
+ detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.21695.1"),
fetch=SNMPTree(
base=".1.3.6.1.4.1.21695.1.10.7.3.1",
oids=["1", "2", "4", "5"],
diff --git a/cmk/base/legacy_checks/brocade.py b/cmk/base/legacy_checks/brocade.py
index 0f7ffc85e2f..a255ca3ebd5 100644
--- a/cmk/base/legacy_checks/brocade.py
+++ b/cmk/base/legacy_checks/brocade.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, equals, SNMPTree, startswith, StringTable
# Example output from agent:
# [['1', '24', 'SLOT #0: TEMP #1'],
diff --git a/cmk/base/legacy_checks/brocade_info.py b/cmk/base/legacy_checks/brocade_info.py
index 5240ccf6603..0b6a5759f33 100644
--- a/cmk/base/legacy_checks/brocade_info.py
+++ b/cmk/base/legacy_checks/brocade_info.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, startswith, StringTable
def inventory_brocade_info(info):
diff --git a/cmk/base/legacy_checks/brocade_mlx.py b/cmk/base/legacy_checks/brocade_mlx.py
index 15a80580b1d..89a8de1d2ed 100644
--- a/cmk/base/legacy_checks/brocade_mlx.py
+++ b/cmk/base/legacy_checks/brocade_mlx.py
@@ -13,8 +13,7 @@
from cmk.base.check_legacy_includes.mem import check_memory_element
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.brocade import DETECT_MLX
# TODO refactoring: use parse-function
diff --git a/cmk/base/legacy_checks/brocade_mlx_fan.py b/cmk/base/legacy_checks/brocade_mlx_fan.py
index 1b420e606cb..ade7aeebfde 100644
--- a/cmk/base/legacy_checks/brocade_mlx_fan.py
+++ b/cmk/base/legacy_checks/brocade_mlx_fan.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.brocade import DETECT_MLX
diff --git a/cmk/base/legacy_checks/brocade_tm.py b/cmk/base/legacy_checks/brocade_tm.py
index f3576629ebb..f764ba569c9 100644
--- a/cmk/base/legacy_checks/brocade_tm.py
+++ b/cmk/base/legacy_checks/brocade_tm.py
@@ -15,8 +15,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree, StringTable
from cmk.plugins.lib.brocade import DETECT_MLX
diff --git a/cmk/base/legacy_checks/brocade_vdx_status.py b/cmk/base/legacy_checks/brocade_vdx_status.py
index 3900d59dd4a..51477433916 100644
--- a/cmk/base/legacy_checks/brocade_vdx_status.py
+++ b/cmk/base/legacy_checks/brocade_vdx_status.py
@@ -11,8 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, startswith, StringTable
def inventory_brocade_vdx_status(info):
diff --git a/cmk/base/legacy_checks/bvip_fans.py b/cmk/base/legacy_checks/bvip_fans.py
index 2ea835ce11b..141e38aee1a 100644
--- a/cmk/base/legacy_checks/bvip_fans.py
+++ b/cmk/base/legacy_checks/bvip_fans.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fan import check_fan
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.bvip import DETECT_BVIP
diff --git a/cmk/base/legacy_checks/bvip_info.py b/cmk/base/legacy_checks/bvip_info.py
index 22dc201a5ab..45b7a974d3c 100644
--- a/cmk/base/legacy_checks/bvip_info.py
+++ b/cmk/base/legacy_checks/bvip_info.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.bvip import DETECT_BVIP
diff --git a/cmk/base/legacy_checks/bvip_link.py b/cmk/base/legacy_checks/bvip_link.py
index 8e0775032d6..784bc450c64 100644
--- a/cmk/base/legacy_checks/bvip_link.py
+++ b/cmk/base/legacy_checks/bvip_link.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.bvip import DETECT_BVIP
diff --git a/cmk/base/legacy_checks/bvip_poe.py b/cmk/base/legacy_checks/bvip_poe.py
index 14744ae67cd..2d854fc3f26 100644
--- a/cmk/base/legacy_checks/bvip_poe.py
+++ b/cmk/base/legacy_checks/bvip_poe.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.bvip import DETECT_BVIP
diff --git a/cmk/base/legacy_checks/bvip_temp.py b/cmk/base/legacy_checks/bvip_temp.py
index 26c89d2c206..32553b3132d 100644
--- a/cmk/base/legacy_checks/bvip_temp.py
+++ b/cmk/base/legacy_checks/bvip_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.bvip import DETECT_BVIP
diff --git a/cmk/base/legacy_checks/bvip_util.py b/cmk/base/legacy_checks/bvip_util.py
index 6e38c17d18e..937ff3e8516 100644
--- a/cmk/base/legacy_checks/bvip_util.py
+++ b/cmk/base/legacy_checks/bvip_util.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.bvip import DETECT_BVIP
diff --git a/cmk/base/legacy_checks/bvip_video_alerts.py b/cmk/base/legacy_checks/bvip_video_alerts.py
index c2d7300c1d2..0b3fbfbb606 100644
--- a/cmk/base/legacy_checks/bvip_video_alerts.py
+++ b/cmk/base/legacy_checks/bvip_video_alerts.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.bvip import DETECT_BVIP
diff --git a/cmk/base/legacy_checks/carel_uniflair_cooling.py b/cmk/base/legacy_checks/carel_uniflair_cooling.py
index 2d4fde2a08f..ab6c8f8b446 100644
--- a/cmk/base/legacy_checks/carel_uniflair_cooling.py
+++ b/cmk/base/legacy_checks/carel_uniflair_cooling.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.detection import DETECT_NEVER
# snmp_scan_function
diff --git a/cmk/base/legacy_checks/casa_cpu_mem.py b/cmk/base/legacy_checks/casa_cpu_mem.py
index f92c7662f13..98a94f50f0f 100644
--- a/cmk/base/legacy_checks/casa_cpu_mem.py
+++ b/cmk/base/legacy_checks/casa_cpu_mem.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.mem import check_memory_element
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.casa import DETECT_CASA
Section = Mapping[str, Mapping[str, object]]
diff --git a/cmk/base/legacy_checks/casa_cpu_util.py b/cmk/base/legacy_checks/casa_cpu_util.py
index 170aebbd6db..d0021cde3d0 100644
--- a/cmk/base/legacy_checks/casa_cpu_util.py
+++ b/cmk/base/legacy_checks/casa_cpu_util.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.casa import DETECT_CASA
diff --git a/cmk/base/legacy_checks/casa_fan.py b/cmk/base/legacy_checks/casa_fan.py
index 8cd925017eb..841cceafad7 100644
--- a/cmk/base/legacy_checks/casa_fan.py
+++ b/cmk/base/legacy_checks/casa_fan.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.casa import DETECT_CASA
diff --git a/cmk/base/legacy_checks/casa_power.py b/cmk/base/legacy_checks/casa_power.py
index 10e1a87c3a3..64d721c22e1 100644
--- a/cmk/base/legacy_checks/casa_power.py
+++ b/cmk/base/legacy_checks/casa_power.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.casa import DETECT_CASA
diff --git a/cmk/base/legacy_checks/cbl_airlaser.py b/cmk/base/legacy_checks/cbl_airlaser.py
index 151382e94b5..fd1245c76d8 100644
--- a/cmk/base/legacy_checks/cbl_airlaser.py
+++ b/cmk/base/legacy_checks/cbl_airlaser.py
@@ -90,9 +90,11 @@ def parse_cbl_airlaser(string_table):
return string_table[0], {
hwclass: {
sensor: (
- airlaser_status_names[int(data[hwclass][offset][0])]
- if "Status" in sensor
- else saveint(data[hwclass][offset][0]),
+ (
+ airlaser_status_names[int(data[hwclass][offset][0])]
+ if "Status" in sensor
+ else saveint(data[hwclass][offset][0])
+ ),
sub_oid,
offset,
)
diff --git a/cmk/base/legacy_checks/check_bi_aggr.py b/cmk/base/legacy_checks/check_bi_aggr.py
new file mode 100644
index 00000000000..d34f0e0933d
--- /dev/null
+++ b/cmk/base/legacy_checks/check_bi_aggr.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+from cmk.base.check_api import passwordstore_get_cmdline
+from cmk.base.config import active_check_info
+
+
+def check_bi_aggr_arguments(params):
+ # Convert legacy format
+ if isinstance(params, tuple):
+ converted_params = {}
+ converted_params["base_url"] = params[0]
+ converted_params["aggregation_name"] = params[1]
+ converted_params["credentials"] = ("configured", (params[2], params[3]))
+ converted_params["optional"] = params[4]
+ params = converted_params
+ args = ["-b", params["base_url"], "-a", params["aggregation_name"]]
+ if params["credentials"] == "automation":
+ args.append("--use-automation-user")
+ else:
+ # configured
+ args += [
+ "-u",
+ params["credentials"][1][0],
+ "-s",
+ passwordstore_get_cmdline("%s", params["credentials"][1][1]),
+ ]
+ opt_params = params["optional"]
+ if "auth_mode" in opt_params:
+ args += ["-m", opt_params["auth_mode"]]
+ if "timeout" in opt_params:
+ args += ["-t", opt_params["timeout"]]
+ if opt_params.get("in_downtime"):
+ args += ["--in-downtime", opt_params["in_downtime"]]
+ if opt_params.get("acknowledged"):
+ args += ["--acknowledged", opt_params["acknowledged"]]
+ if opt_params.get("track_downtimes"):
+ args += ["-r", "-n", "$HOSTNAME$"]
+ return args
+
+
+active_check_info["bi_aggr"] = {
+ "command_line": "check_bi_aggr $ARG1$",
+ "argument_function": check_bi_aggr_arguments,
+ "service_description": lambda params: (
+ "Aggr %s" % params[1] if isinstance(params, tuple) else params["aggregation_name"]
+ ),
+ "has_perfdata": True,
+}
diff --git a/cmk/base/legacy_checks/check_disk_smb.py b/cmk/base/legacy_checks/check_disk_smb.py
new file mode 100644
index 00000000000..5e0a4e8bb59
--- /dev/null
+++ b/cmk/base/legacy_checks/check_disk_smb.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+from cmk.base.check_api import passwordstore_get_cmdline
+from cmk.base.config import active_check_info
+
+
+def check_disk_smb_arguments(params):
+ args = [
+ params["share"],
+ "-H",
+ "$HOSTADDRESS$" if params["host"] == "use_parent_host" else params["host"][1],
+ ]
+
+ warn, crit = params["levels"]
+ args += ["--levels", warn, crit]
+
+ if "workgroup" in params:
+ args += ["-W", params["workgroup"]]
+
+ if "port" in params:
+ args += ["-P", params["port"]]
+
+ if "auth" in params:
+ username, password = params["auth"]
+ args += [
+ "-u",
+ username,
+ "-p",
+ passwordstore_get_cmdline("%s", password),
+ ]
+
+ if "ip_address" in params:
+ args += ["-a", params["ip_address"]]
+
+ return args
+
+
+active_check_info["disk_smb"] = {
+ "command_line": "check_disk_smb $ARG1$",
+ "argument_function": check_disk_smb_arguments,
+ "service_description": lambda params: "SMB Share " + params["share"].replace("$", ""),
+}
diff --git a/cmk/base/legacy_checks/check_ldap.py b/cmk/base/legacy_checks/check_ldap.py
deleted file mode 100644
index 06b9c6b6524..00000000000
--- a/cmk/base/legacy_checks/check_ldap.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import passwordstore_get_cmdline
-from cmk.base.config import active_check_info
-
-
-def check_ldap_arguments(params):
- _name, basedn, settings = params
- args = []
-
- if "hostname" in settings:
- args += ["-H", settings["hostname"]]
- else:
- args += ["-H", "$HOSTADDRESS$"]
-
- args += ["-b", basedn]
-
- if "response_time" in settings:
- warn, crit = settings["response_time"]
- args += ["-w", "%f" % (warn / 1000.0), "-c", "%f" % (crit / 1000.0)]
-
- if "timeout" in settings:
- args += ["-t", settings["timeout"]]
-
- if "attribute" in settings:
- args += ["-a", settings["attribute"]]
-
- if "authentication" in settings:
- binddn, password = settings["authentication"]
- args += ["-D", binddn, "-P", passwordstore_get_cmdline("%s", password)]
-
- if "port" in settings:
- args += ["-p", settings["port"]]
-
- if "version" in settings:
- args += {
- "v2": ["-2"],
- "v3": ["-3"],
- "v3tls": ["-3", "-T"],
- }[settings["version"]]
-
- if settings.get("ssl"):
- args.append("--ssl")
-
- return args
-
-
-def check_ldap_desc(params):
- if params[0].startswith("^"):
- return params[0][1:]
- return "LDAP %s" % params[0]
-
-
-active_check_info["ldap"] = {
- "command_line": "check_ldap $ARG1$",
- "argument_function": check_ldap_arguments,
- "service_description": check_ldap_desc,
-}
diff --git a/cmk/base/legacy_checks/check_mail.py b/cmk/base/legacy_checks/check_mail.py
new file mode 100644
index 00000000000..22bbfc5fd5c
--- /dev/null
+++ b/cmk/base/legacy_checks/check_mail.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+from cmk.base.check_legacy_includes.check_mail import general_check_mail_args_from_params
+from cmk.base.config import active_check_info
+
+CHECK_IDENT = "check_mail"
+
+
+def check_mail_arguments(params):
+ """
+ >>> for a in check_mail_arguments({
+ ... 'service_description': 'Email',
+ ... 'fetch': ('IMAP', {
+ ... 'server': 'imap.gmx.de',
+ ... 'auth': ('basic', ('me@gmx.de', ('password', 'p4ssw0rd'))),
+ ... 'connection': {'disable_tls': True, 'port': 123}}),
+ ... 'forward': {'facility': 2, 'application': None, 'host': 'me.too@checkmk.com',
+ ... 'cleanup': True}}):
+ ... print(a)
+ --fetch-protocol=IMAP
+ --fetch-server=imap.gmx.de
+ --fetch-port=123
+ --fetch-username=me@gmx.de
+ --fetch-password=p4ssw0rd
+ --forward-ec
+ --forward-facility=2
+ --forward-host=me.too@checkmk.com
+ --cleanup=delete
+ """
+ args: list[str | tuple[str, str, str]] = general_check_mail_args_from_params(
+ CHECK_IDENT, params
+ )
+
+ if "forward" in params:
+ forward = params["forward"]
+ args.append("--forward-ec")
+
+ if forward.get("method"):
+ args.append(f"--forward-method={forward['method']}")
+
+ if forward.get("match_subject"):
+ args.append(f"--match-subject={forward['match_subject']}")
+
+ # int - can be 0
+ if "facility" in forward:
+ args.append(f"--forward-facility={forward['facility']}")
+
+ if forward.get("host"):
+ args.append(f"--forward-host={forward['host']}")
+
+ if forward.get("application"):
+ args.append(f"--forward-app={forward['application']}")
+
+ # int - can be 0
+ if "body_limit" in forward:
+ args.append(f"--body-limit={forward['body_limit']}")
+
+ if isinstance(forward.get("cleanup"), bool): # can never be False
+ args.append("--cleanup=delete")
+
+ elif isinstance(forward.get("cleanup"), str):
+ move_to_subfolder = forward["cleanup"]
+ args.append(f"--cleanup={move_to_subfolder}")
+
+ return args
+
+
+if __name__ == "__main__":
+ # Please keep these lines - they make TDD easy and have no effect on normal test runs.
+ # Just run this file from your IDE and dive into the code.
+ import doctest
+
+ assert not doctest.testmod().failed
+else:
+ active_check_info["mail"] = {
+ "command_line": f"{CHECK_IDENT} $ARG1$",
+ "argument_function": check_mail_arguments,
+ "service_description": lambda params: params["service_description"],
+ }
diff --git a/cmk/base/legacy_checks/check_mail_loop.py b/cmk/base/legacy_checks/check_mail_loop.py
new file mode 100644
index 00000000000..b3a656c618f
--- /dev/null
+++ b/cmk/base/legacy_checks/check_mail_loop.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+from cmk.base.check_api import host_name, passwordstore_get_cmdline
+from cmk.base.check_legacy_includes.check_mail import general_check_mail_args_from_params
+from cmk.base.config import active_check_info
+
+CHECK_IDENT = "check_mail_loop"
+
+
+def check_mail_loop_arguments(params): # pylint: disable=too-many-branches
+ """
+ >>> from cmk.base.plugin_contexts import current_host
+ >>> with current_host("hurz"):
+ ... for a in check_mail_loop_arguments(
+ ... {
+ ... "item": "MailLoop_imap",
+ ... "subject": "Some subject",
+ ... "send": (
+ ... "SMTP",
+ ... {
+ ... "server": "smtp.gmx.de",
+ ... "auth": ("me@gmx.de", ("password", "p4ssw0rd")),
+ ... "connection": {"tls": True, "port": 42},
+ ... },
+ ... ),
+ ... "fetch": (
+ ... "IMAP",
+ ... {
+ ... "server": "imap.gmx.de",
+ ... "auth": ("basic", ("me@gmx.de", ("password", "p4ssw0rd"))),
+ ... "connection": {"disable_tls": False, "port": 123},
+ ... },
+ ... ),
+ ... "mail_from": "me_from@gmx.de",
+ ... "mail_to": "me_to@gmx.de",
+ ... "connect_timeout": 23,
+ ... "duration": (93780, 183840),
+ ... }
+ ... ):
+ ... print(a)
+ --fetch-protocol=IMAP
+ --fetch-server=imap.gmx.de
+ --fetch-tls
+ --fetch-port=123
+ --fetch-username=me@gmx.de
+ --fetch-password=p4ssw0rd
+ --connect-timeout=23
+ --send-protocol=SMTP
+ --send-server=smtp.gmx.de
+ --send-port=42
+ --send-tls
+ --send-username=me@gmx.de
+ --send-password=p4ssw0rd
+ --mail-from=me_from@gmx.de
+ --mail-to=me_to@gmx.de
+ --status-suffix=hurz-MailLoop_imap
+ --warning=93780
+ --critical=183840
+ --subject=Some subject
+ """
+ args: list[str | tuple[str, str, str]] = general_check_mail_args_from_params(
+ CHECK_IDENT, params
+ )
+
+ try:
+ send_protocol, send_params = params["send"]
+ connection_params = send_params["connection"]
+ except KeyError as exc:
+ raise ValueError(
+ f"{params['item']} --- Params for check_mail_loop are faulty (missing {exc}), did you update the config?"
+ )
+
+ args.append(f"--send-protocol={send_protocol}")
+ args.append(f"--send-server={send_params.get('server', '$HOSTADDRESS$')}")
+ if (port := connection_params.get("port")) is not None:
+ args.append(f"--send-port={port}")
+
+ if send_protocol == "SMTP":
+ if connection_params.get("tls"):
+ args.append("--send-tls")
+
+ if auth_params := send_params.get("auth"):
+ username, password = auth_params
+ args.append(f"--send-username={username}")
+ args.append(passwordstore_get_cmdline("--send-password=%s", password))
+ elif send_protocol == "EWS":
+ if not connection_params.get("disable_tls"):
+ args.append("--send-tls")
+
+ if connection_params.get("disable_cert_validation"):
+ args.append("--send-disable-cert-validation")
+
+ auth_type, auth_data = send_params.get("auth")
+ if auth_type == "basic":
+ username, password = auth_data
+ args += [
+ f"--send-username={username}",
+ passwordstore_get_cmdline("--send-password=%s", password),
+ ]
+ else:
+ client_id, client_secret, tenant_id = auth_data
+ args += [
+ f"--send-client-id={client_id}",
+ passwordstore_get_cmdline("--send-client-secret=%s", client_secret),
+ f"--send-tenant-id={tenant_id}",
+ ]
+
+ args.append(f"--send-email-address={send_params.get('email_address')}")
+ else:
+ raise NotImplementedError(f"Sending mails is not implemented for {send_protocol}")
+
+ args.append(f"--mail-from={params['mail_from']}")
+ args.append(f"--mail-to={params['mail_to']}")
+
+ if "delete_messages" in params:
+ args.append("--delete-messages")
+
+ args.append(f"--status-suffix={host_name()}-{params['item']}")
+
+ if "duration" in params:
+ warning, critical = params["duration"]
+ args.append(f"--warning={warning}")
+ args.append(f"--critical={critical}")
+
+ if "subject" in params:
+ args.append(f"--subject={params['subject']}")
+
+ return args
+
+
+active_check_info["mail_loop"] = {
+ "command_line": f"{CHECK_IDENT} $ARG1$",
+ "argument_function": check_mail_loop_arguments,
+ "service_description": lambda params: f"Mail Loop {params['item']}",
+}
diff --git a/cmk/base/legacy_checks/check_mailboxes.py b/cmk/base/legacy_checks/check_mailboxes.py
new file mode 100644
index 00000000000..eea66c4c17f
--- /dev/null
+++ b/cmk/base/legacy_checks/check_mailboxes.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+"""This is the command line generator for the `check_mailboxes` active check.
+It defines active_check_info["mailboxes"]
+
+"""
+
+CHECK_IDENT = "check_mailboxes"
+
+from cmk.base.check_legacy_includes.check_mail import general_check_mail_args_from_params
+from cmk.base.config import active_check_info
+
+
+def check_mailboxes_arguments(params):
+ """
+ >>> for l in check_mailboxes_arguments({ # v2.1.0 / EWS
+ ... 'service_description': 'SD',
+ ... 'fetch': ('EWS', {
+ ... 'server': 'srv', 'connection': {},
+ ... 'auth': ('basic', ('usr', 'pw')),
+ ... 'email_address': 'usr@srv.com',
+ ... 'connection': {'disable_tls': True, 'disable_cert_validation': False, 'port': 123}}),
+ ... 'age': (1, 2), 'age_newest': (3, 4), 'count': (5, 6),
+ ... 'mailboxes': ['abc', 'def']}):
+ ... print(l)
+ --fetch-protocol=EWS
+ --fetch-server=srv
+ --fetch-port=123
+ --fetch-username=usr
+ --fetch-password=pw
+ --fetch-email-address=usr@srv.com
+ --warn-age-oldest=1
+ --crit-age-oldest=2
+ --warn-age-newest=3
+ --crit-age-newest=4
+ --warn-count=5
+ --crit-count=6
+ --mailbox=abc
+ --mailbox=def
+ """
+ args: list[str | tuple[str, str, str]] = general_check_mail_args_from_params(
+ CHECK_IDENT, params
+ )
+
+ if "retrieve_max" in params:
+ args.append(f"--retrieve-max={params['retrieve_max']}")
+
+ if "age" in params:
+ warn, crit = params["age"]
+ args += [f"--warn-age-oldest={warn}", f"--crit-age-oldest={crit}"]
+
+ if "age_newest" in params:
+ warn, crit = params["age_newest"]
+ args += [f"--warn-age-newest={warn}", f"--crit-age-newest={crit}"]
+
+ if "count" in params:
+ warn, crit = params["count"]
+ args += [f"--warn-count={warn}", f"--crit-count={crit}"]
+
+ for mb in params.get("mailboxes", []):
+ args.append(f"--mailbox={mb}")
+
+ return args
+
+
+active_check_info["mailboxes"] = { # pylint: disable=undefined-variable
+ "command_line": f"{CHECK_IDENT} $ARG1$",
+ "argument_function": check_mailboxes_arguments,
+ "service_description": lambda params: params["service_description"],
+}
diff --git a/cmk/base/legacy_checks/check_mkevents.py b/cmk/base/legacy_checks/check_mkevents.py
deleted file mode 100644
index 7907102f72b..00000000000
--- a/cmk/base/legacy_checks/check_mkevents.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.config import active_check_info
-
-
-def check_mkevents_arguments(params):
- args = []
- if "remote" in params:
- remote = params["remote"]
- if isinstance(remote, tuple):
- ipaddress, port = remote
- args += ["-H", "%s:%d" % (ipaddress, port)]
- elif remote:
- args += ["-s", remote]
-
- if params.get("ignore_acknowledged"):
- args.append("-a")
-
- if (last_log := params.get("show_last_log")) is not None:
- match last_log:
- case "summary":
- args.append("-l")
- case "details":
- args.append("-L")
-
- hostspec = params.get("hostspec", "$HOSTADDRESS$")
- if isinstance(hostspec, list):
- hostspec = "/".join(hostspec)
- args.append(hostspec)
-
- if "application" in params:
- args.append(params["application"])
-
- return args
-
-
-def check_mkevents_description(params):
- item = params.get("item", params.get("application"))
- if item:
- return "Events %s" % item
- return "Events"
-
-
-active_check_info["mkevents"] = {
- "command_line": "check_mkevents $ARG1$",
- "argument_function": check_mkevents_arguments,
- "service_description": check_mkevents_description,
-}
diff --git a/cmk/base/legacy_checks/check_notify_count.py b/cmk/base/legacy_checks/check_notify_count.py
deleted file mode 100644
index ec059c62f07..00000000000
--- a/cmk/base/legacy_checks/check_notify_count.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.config import active_check_info
-
-
-def check_notify_count_arguments(params):
- _description, interval, settings = params
- args = ["-r", interval]
-
- if "num_per_contact" in settings:
- warn, crit = settings["num_per_contact"]
- args += ["-w", warn]
- args += ["-c", crit]
-
- return args
-
-
-active_check_info["notify_count"] = {
- "command_line": "check_notify_count $ARG1$",
- "argument_function": check_notify_count_arguments,
- "service_description": lambda params: "Notify %s" % params[0],
-}
diff --git a/cmk/base/legacy_checks/check_sftp.py b/cmk/base/legacy_checks/check_sftp.py
index b83b31587ed..655482c5de9 100644
--- a/cmk/base/legacy_checks/check_sftp.py
+++ b/cmk/base/legacy_checks/check_sftp.py
@@ -11,42 +11,37 @@
def check_sftp_arguments(params):
- args = []
- host, user, secret, settings = params
+ args = [
+ "--host=%s" % params["host"],
+ "--user=%s" % params["user"],
+ passwordstore_get_cmdline("--secret=%s", params["secret"]),
+ ]
- args.append("--host=%s" % host)
- args.append("--user=%s" % user)
- args.append(passwordstore_get_cmdline("--secret=%s", secret))
+ if "port" in params:
+ args.append("--port=%s" % params["port"])
- if "port" in settings:
- args.append("--port=%s" % settings["port"])
+ if "timeout" in params:
+ args.append("--timeout=%s" % params["timeout"])
- if "timeout" in settings:
- args.append("--timeout=%s" % settings["timeout"])
+ if "timestamp" in params:
+ args.append("--get-timestamp=%s" % params["timestamp"])
- if "timestamp" in settings:
- args.append("--get-timestamp=%s" % settings["timestamp"])
+ if "put" in params:
+ args.append("--put-local=%s" % params["put"]["local"])
+ args.append("--put-remote=%s" % params["put"]["remote"])
- if "put" in settings:
- local, remote = settings["put"]
- args.append("--put-local=%s" % local)
- args.append("--put-remote=%s" % remote)
+ if "get" in params:
+ args.append("--get-remote=%s" % params["get"]["remote"])
+ args.append("--get-local=%s" % params["get"]["local"])
- if "get" in settings:
- remote, local = settings["get"]
- args.append("--get-remote=%s" % remote)
- args.append("--get-local=%s" % local)
-
- if settings.get("look_for_keys", False):
+ if params.get("look_for_keys", False):
args.append("--look-for-keys")
return args
def check_sftp_desc(params):
- if "description" in params[3]:
- return params[3]["description"]
- return "SFTP %s" % params[0]
+ return params.get("description") or f"SFTP {params['host']}"
active_check_info["sftp"] = {
diff --git a/cmk/base/legacy_checks/check_sql.py b/cmk/base/legacy_checks/check_sql.py
deleted file mode 100644
index 36da22e0699..00000000000
--- a/cmk/base/legacy_checks/check_sql.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-# mypy: disable-error-code="arg-type"
-
-from cmk.base.check_api import passwordstore_get_cmdline
-from cmk.base.config import active_check_info
-
-
-def check_sql_arguments(params): # pylint: disable=too-many-branches
- args = []
-
- if "host" in params:
- args += ["--hostname=%s" % params["host"]]
- else:
- args += ["--hostname=$HOSTADDRESS$"]
-
- args.append("--dbms=%s" % params["dbms"])
- args.append("--name=%s" % params["name"])
- args.append("--user=%s" % params["user"])
- args.append(passwordstore_get_cmdline("--password=%s", params["password"]))
-
- if "port" in params:
- args.append("--port=%s" % params["port"])
-
- if "procedure" in params:
- if "procedure" in params and "useprocs" in params["procedure"]:
- args.append("--procedure")
- if "input" in params["procedure"]:
- args.append("--inputvars=%s" % params["procedure"]["input"])
-
- if "levels" in params:
- upper = params["levels"]
- else:
- upper = "", ""
-
- if "levels_low" in params:
- lower = params["levels_low"]
- else:
- lower = "", ""
-
- if "perfdata" in params:
- if (metrics := params.get("perfdata")) is not None:
- args.append(f"--metrics={metrics}")
-
- if "levels" in params or "levels_low" in params:
- warn_low, crit_low = lower
- warn_high, crit_high = upper
- args.append(f"-w{warn_low}:{warn_high}")
- args.append(f"-c{crit_low}:{crit_high}")
-
- if "text" in params:
- args.append("--text=%s" % params["text"])
-
- if isinstance(params["sql"], tuple):
- sql_tmp = params["sql"][-1]
- else:
- sql_tmp = params["sql"]
-
- args.append("%s" % sql_tmp.replace("\n", r"\n").replace(";", r"\;"))
-
- return args
-
-
-active_check_info["sql"] = {
- "command_line": "check_sql $ARG1$",
- "argument_function": check_sql_arguments,
- "service_description": lambda args: args["description"],
-}
diff --git a/cmk/base/legacy_checks/check_tcp.py b/cmk/base/legacy_checks/check_tcp.py
deleted file mode 100644
index 5683ddc4c46..00000000000
--- a/cmk/base/legacy_checks/check_tcp.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-# mypy: disable-error-code="list-item"
-
-from cmk.base.config import active_check_info
-
-
-def check_tcp_arguments(params): # pylint: disable=too-many-branches
- port, settings = params
- args = []
-
- args += ["-p", str(port)]
-
- if "response_time" in settings:
- warn, crit = settings["response_time"]
- args += ["-w", "%f" % (warn / 1000.0)]
- args += ["-c", "%f" % (crit / 1000.0)]
-
- if "timeout" in settings:
- args += ["-t", settings["timeout"]]
-
- if "refuse_state" in settings:
- args += ["-r", settings["refuse_state"]]
-
- if settings.get("escape_send_string"):
- args.append("--escape")
-
- if "send_string" in settings:
- args += ["-s", settings["send_string"]]
-
- if "expect" in settings:
- for s in settings["expect"]:
- args += ["-e", s]
-
- if settings.get("expect_all"):
- args.append("-A")
-
- if settings.get("jail"):
- args.append("--jail")
-
- if "mismatch_state" in settings:
- args += ["-M", settings["mismatch_state"]]
-
- if "delay" in settings:
- args += ["-d", settings["delay"]]
-
- if "maxbytes" in settings:
- args += ["-m", settings["maxbytes"]]
-
- if settings.get("ssl"):
- args.append("--ssl")
-
- if "cert_days" in settings:
- # legacy behavior
- if isinstance(settings["cert_days"], int):
- args += ["-D", settings["cert_days"]]
- else:
- warn, crit = settings["cert_days"]
- args += ["-D", "%d,%d" % (warn, crit)]
-
- if "quit_string" in settings:
- args += ["-q", settings["quit_string"]]
-
- if "hostname" in settings:
- args += ["-H", settings["hostname"]]
- else:
- args += ["-H", "$HOSTADDRESS$"]
-
- return args
-
-
-active_check_info["tcp"] = {
- "command_line": "check_tcp $ARG1$",
- "argument_function": check_tcp_arguments,
- "service_description": lambda args: args[1].get("svc_description", "TCP Port %d" % args[0]),
-}
diff --git a/cmk/base/legacy_checks/check_traceroute.py b/cmk/base/legacy_checks/check_traceroute.py
deleted file mode 100644
index cb0ac56b4f6..00000000000
--- a/cmk/base/legacy_checks/check_traceroute.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from collections.abc import Mapping
-from typing import Any
-
-from cmk.utils.hostaddress import HostName # pylint: disable=cmk-module-layer-violation
-
-from cmk.base.check_api import host_name, is_ipv6_primary
-from cmk.base.config import active_check_info
-
-
-def check_traceroute_arguments(params: Mapping[str, Any]) -> list[str]:
- args = ["$HOSTADDRESS$"]
-
- if params["dns"]:
- args.append("--use_dns")
-
- return [
- *args,
- f"--probe_method={params['method'] or 'udp'}",
- f"--ip_address_family={params['address_family'] or ('ipv6' if is_ipv6_primary(HostName(host_name())) else 'ipv4')}",
- "--routers_missing_warn",
- *(router for router, state in params["routers"] if state == "W"),
- "--routers_missing_crit",
- *(router for router, state in params["routers"] if state == "C"),
- "--routers_found_warn",
- *(router for router, state in params["routers"] if state == "w"),
- "--routers_found_crit",
- *(router for router, state in params["routers"] if state == "c"),
- ]
-
-
-active_check_info["traceroute"] = {
- "command_line": "check_traceroute $ARG1$",
- "argument_function": check_traceroute_arguments,
- "service_description": lambda params: "Routing",
-}
diff --git a/cmk/base/legacy_checks/check_uniserv.py b/cmk/base/legacy_checks/check_uniserv.py
deleted file mode 100644
index a3b7cbfccf5..00000000000
--- a/cmk/base/legacy_checks/check_uniserv.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.config import active_check_info
-
-
-def check_uniserv_arguments(params):
- args = ["$HOSTADDRESS$", params["port"], params["service"]]
-
- if isinstance(params["job"], tuple):
- job = params["job"][0]
- else:
- job = params["job"]
- if job == "version":
- args.append("VERSION")
- else:
- address = params["job"][1]
- args.append("ADDRESS")
- args.append(address["street"])
- args.append(address["street_no"])
- args.append(address["city"])
- args.append(address["search_regex"])
-
- return args
-
-
-def check_uniserv_desc(params):
- job = params["job"]
- if isinstance(job, tuple):
- job = job[0]
-
- if job == "version":
- return "Uniserv %s Version" % params["service"]
- return "Uniserv {} Address {} ".format(params["service"], params["job"][1]["city"])
-
-
-active_check_info["uniserv"] = {
- "command_line": "check_uniserv $ARG1$",
- "argument_function": check_uniserv_arguments,
- "service_description": check_uniserv_desc,
-}
diff --git a/cmk/base/legacy_checks/checkpoint_fan.py b/cmk/base/legacy_checks/checkpoint_fan.py
index d81827a4b7c..c0a4156c225 100644
--- a/cmk/base/legacy_checks/checkpoint_fan.py
+++ b/cmk/base/legacy_checks/checkpoint_fan.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.checkpoint import checkpoint_sensorstatus_to_nagios
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
diff --git a/cmk/base/legacy_checks/checkpoint_firewall.py b/cmk/base/legacy_checks/checkpoint_firewall.py
index 3f3680b95fe..d5ed0fee47b 100644
--- a/cmk/base/legacy_checks/checkpoint_firewall.py
+++ b/cmk/base/legacy_checks/checkpoint_firewall.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
diff --git a/cmk/base/legacy_checks/checkpoint_ha_problems.py b/cmk/base/legacy_checks/checkpoint_ha_problems.py
index 7895c54309b..bba8fb89ce0 100644
--- a/cmk/base/legacy_checks/checkpoint_ha_problems.py
+++ b/cmk/base/legacy_checks/checkpoint_ha_problems.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
diff --git a/cmk/base/legacy_checks/checkpoint_ha_status.py b/cmk/base/legacy_checks/checkpoint_ha_status.py
index d6a96785e6c..581169fc6b7 100644
--- a/cmk/base/legacy_checks/checkpoint_ha_status.py
+++ b/cmk/base/legacy_checks/checkpoint_ha_status.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
# .1.3.6.1.4.1.2620.1.5.2.0 1
@@ -21,7 +20,7 @@
# .1.3.6.1.4.1.2620.1.5.103.0
-def inventory_checkpoint_ha_status(section: StringTable) -> DiscoveryResult:
+def discover_checkpoint_ha_status(section: StringTable) -> DiscoveryResult:
if not section:
return
installed, _major, _minor, _started, _state, _block_state, _stat_code, _stat_long = section[0]
@@ -70,6 +69,6 @@ def parse_checkpoint_ha_status(string_table: StringTable) -> StringTable:
oids=["2", "3", "4", "5", "6", "7", "101", "103"],
),
service_name="HA Status",
- discovery_function=inventory_checkpoint_ha_status,
+ discovery_function=discover_checkpoint_ha_status,
check_function=check_checkpoint_ha_status,
)
diff --git a/cmk/base/legacy_checks/checkpoint_memory.py b/cmk/base/legacy_checks/checkpoint_memory.py
index 90a873b9486..c228e423650 100644
--- a/cmk/base/legacy_checks/checkpoint_memory.py
+++ b/cmk/base/legacy_checks/checkpoint_memory.py
@@ -8,16 +8,9 @@
from cmk.base.check_legacy_includes.mem import check_memory_element
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
-# FIXME
-# The WATO group 'memory_simple' needs an item and the service_description should
-# have a '%s'. At the moment the current item 'System' and 'Memory' without '%s'
-# works but is not consistent. This will be fixed in the future.
-# If we change this we loose history and parameter sets have to be adapted.
-
# comNET GmbH, Fabian Binder
# .1.3.6.1.4.1.2620.1.6.7.4.3.0 8101654528 --> CHECKPOINT-MIB::memTotalReal
@@ -26,8 +19,7 @@
def inventory_checkpoint_memory(info):
if info and len(info[0]) > 1:
- return [("System", {})]
- return []
+ yield None, {}
def check_checkpoint_memory(item, params, info):
@@ -55,9 +47,9 @@ def parse_checkpoint_memory(string_table: StringTable) -> StringTable:
base=".1.3.6.1.4.1.2620.1.6.7.4",
oids=["3", "4"],
),
- service_name="Memory",
+ service_name="Memory System",
discovery_function=inventory_checkpoint_memory,
check_function=check_checkpoint_memory,
- check_ruleset_name="memory_simple",
+ check_ruleset_name="memory_simple_single",
check_default_parameters={"levels": ("perc_used", (80.0, 90.0))},
)
diff --git a/cmk/base/legacy_checks/checkpoint_powersupply.py b/cmk/base/legacy_checks/checkpoint_powersupply.py
index cd24bd0688f..2ad3dd292c1 100644
--- a/cmk/base/legacy_checks/checkpoint_powersupply.py
+++ b/cmk/base/legacy_checks/checkpoint_powersupply.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
diff --git a/cmk/base/legacy_checks/checkpoint_svn_status.py b/cmk/base/legacy_checks/checkpoint_svn_status.py
index 0d391a32217..5d20b943978 100644
--- a/cmk/base/legacy_checks/checkpoint_svn_status.py
+++ b/cmk/base/legacy_checks/checkpoint_svn_status.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
diff --git a/cmk/base/legacy_checks/checkpoint_temp.py b/cmk/base/legacy_checks/checkpoint_temp.py
index c3d5d0096b1..9d47b0784f3 100644
--- a/cmk/base/legacy_checks/checkpoint_temp.py
+++ b/cmk/base/legacy_checks/checkpoint_temp.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
diff --git a/cmk/base/legacy_checks/checkpoint_tunnels.py b/cmk/base/legacy_checks/checkpoint_tunnels.py
index 8ac42813d72..c5771890392 100644
--- a/cmk/base/legacy_checks/checkpoint_tunnels.py
+++ b/cmk/base/legacy_checks/checkpoint_tunnels.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
tunnel_states = {
diff --git a/cmk/base/legacy_checks/checkpoint_voltage.py b/cmk/base/legacy_checks/checkpoint_voltage.py
index a632af2485f..915688d65f4 100644
--- a/cmk/base/legacy_checks/checkpoint_voltage.py
+++ b/cmk/base/legacy_checks/checkpoint_voltage.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.checkpoint import checkpoint_sensorstatus_to_nagios
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.checkpoint import DETECT
diff --git a/cmk/base/legacy_checks/cisco_ace_rserver.py b/cmk/base/legacy_checks/cisco_ace_rserver.py
index 997e588193b..bbe91395959 100644
--- a/cmk/base/legacy_checks/cisco_ace_rserver.py
+++ b/cmk/base/legacy_checks/cisco_ace_rserver.py
@@ -10,8 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, OIDBytes, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, OIDBytes, SNMPTree, StringTable
def parse_framework_mib_inet_address(ip_address_type, ip_address):
diff --git a/cmk/base/legacy_checks/cisco_asa_connections.py b/cmk/base/legacy_checks/cisco_asa_connections.py
index ff0e016996b..3816922a0c3 100644
--- a/cmk/base/legacy_checks/cisco_asa_connections.py
+++ b/cmk/base/legacy_checks/cisco_asa_connections.py
@@ -14,8 +14,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, SNMPTree, startswith, StringTable
def inventory_cisco_asa_connections(info):
diff --git a/cmk/base/legacy_checks/cisco_cpu.py b/cmk/base/legacy_checks/cisco_cpu.py
index 777fa949aaa..7242182f866 100644
--- a/cmk/base/legacy_checks/cisco_cpu.py
+++ b/cmk/base/legacy_checks/cisco_cpu.py
@@ -20,8 +20,8 @@
not_exists,
render,
SNMPTree,
+ StringTable,
)
-from cmk.agent_based.v2.type_defs import StringTable
def inventory_cisco_cpu(info):
diff --git a/cmk/base/legacy_checks/cisco_fan.py b/cmk/base/legacy_checks/cisco_fan.py
index dc5b63af3d2..7bf85ed59ef 100644
--- a/cmk/base/legacy_checks/cisco_fan.py
+++ b/cmk/base/legacy_checks/cisco_fan.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.cisco import DETECT_CISCO
from cmk.plugins.lib.cisco_sensor_item import cisco_sensor_item
diff --git a/cmk/base/legacy_checks/cisco_hsrp.py b/cmk/base/legacy_checks/cisco_hsrp.py
index 7b9fa1f1e17..8370ac6efa5 100644
--- a/cmk/base/legacy_checks/cisco_hsrp.py
+++ b/cmk/base/legacy_checks/cisco_hsrp.py
@@ -53,8 +53,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, exists, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, exists, OIDEnd, SNMPTree, StringTable
hsrp_states = {1: "initial", 2: "learn", 3: "listen", 4: "speak", 5: "standby", 6: "active"}
diff --git a/cmk/base/legacy_checks/cisco_nexus_cpu.py b/cmk/base/legacy_checks/cisco_nexus_cpu.py
index ef3651ccbcb..a756484c8d7 100644
--- a/cmk/base/legacy_checks/cisco_nexus_cpu.py
+++ b/cmk/base/legacy_checks/cisco_nexus_cpu.py
@@ -4,17 +4,24 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ all_of,
+ contains,
+ DiscoveryResult,
+ exists,
+ Service,
+ SNMPTree,
+ StringTable,
+)
# .1.3.6.1.4.1.9.9.305.1.1.1.0 1 --> CISCO-SYSTEM-EXT-MIB::cseSysCPUUtilization.0
-def inventory_cisco_nexus_cpu(section: StringTable) -> DiscoveryResult:
+def discover_cisco_nexus_cpu(section: StringTable) -> DiscoveryResult:
if section and section[0][0]:
yield Service()
@@ -23,7 +30,7 @@ def check_cisco_nexus_cpu(_no_item, params, info):
return check_cpu_util(float(info[0][0]), params)
-# Migration NOTE: Create a separate section, but a common check plugin for
+# Migration NOTE: Create a separate section, but a common check plug-in for
# tplink_cpu, hr_cpu, cisco_nexus_cpu, bintec_cpu, winperf_processor,
# lxc_container_cpu, docker_container_cpu.
# Migration via cmk/update_config.py!
@@ -43,7 +50,7 @@ def parse_cisco_nexus_cpu(string_table: StringTable) -> StringTable:
oids=["0"],
),
service_name="CPU utilization",
- discovery_function=inventory_cisco_nexus_cpu,
+ discovery_function=discover_cisco_nexus_cpu,
check_function=check_cisco_nexus_cpu,
check_ruleset_name="cpu_utilization_os",
check_default_parameters={
diff --git a/cmk/base/legacy_checks/cisco_oldcpu.py b/cmk/base/legacy_checks/cisco_oldcpu.py
index 14003e7dcab..18d298bdaab 100644
--- a/cmk/base/legacy_checks/cisco_oldcpu.py
+++ b/cmk/base/legacy_checks/cisco_oldcpu.py
@@ -4,17 +4,24 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ all_of,
+ DiscoveryResult,
+ exists,
+ Service,
+ SNMPTree,
+ startswith,
+ StringTable,
+)
# .1.3.6.1.4.1.9.2.1.57.0 13 --> OLD-CISCO-CPU-MIB::avgBusy1.0
-def inventory_cisco_oldcpu(section: StringTable) -> DiscoveryResult:
+def discover_cisco_oldcpu(section: StringTable) -> DiscoveryResult:
if section and section[0][0]:
yield Service()
@@ -39,7 +46,7 @@ def parse_cisco_oldcpu(string_table: StringTable) -> StringTable:
oids=["57"],
),
service_name="CPU utilization",
- discovery_function=inventory_cisco_oldcpu,
+ discovery_function=discover_cisco_oldcpu,
check_function=check_cisco_oldcpu,
check_ruleset_name="cpu_utilization",
check_default_parameters={"util": (80.0, 90.0)},
diff --git a/cmk/base/legacy_checks/cisco_power.py b/cmk/base/legacy_checks/cisco_power.py
index cb73697a957..e94885a813d 100644
--- a/cmk/base/legacy_checks/cisco_power.py
+++ b/cmk/base/legacy_checks/cisco_power.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.cisco import DETECT_CISCO
from cmk.plugins.lib.cisco_sensor_item import cisco_sensor_item
diff --git a/cmk/base/legacy_checks/cisco_prime_wifi_access_points.py b/cmk/base/legacy_checks/cisco_prime_wifi_access_points.py
index 8b23edfe0f4..95806817603 100644
--- a/cmk/base/legacy_checks/cisco_prime_wifi_access_points.py
+++ b/cmk/base/legacy_checks/cisco_prime_wifi_access_points.py
@@ -16,8 +16,7 @@
from cmk.base.check_legacy_includes.cisco_prime import parse_cisco_prime
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
Section = Mapping
@@ -55,6 +54,6 @@ def check_cisco_prime_wifi_access_points(item, params, parsed):
check_function=check_cisco_prime_wifi_access_points,
check_ruleset_name="cisco_prime_wifi_access_points",
check_default_parameters={
- "levels": (20, 40),
+ "levels": (20.0, 40.0),
},
)
diff --git a/cmk/base/legacy_checks/cisco_prime_wifi_connections.py b/cmk/base/legacy_checks/cisco_prime_wifi_connections.py
index 604bf7f5db8..1cc072f39c9 100644
--- a/cmk/base/legacy_checks/cisco_prime_wifi_connections.py
+++ b/cmk/base/legacy_checks/cisco_prime_wifi_connections.py
@@ -17,7 +17,7 @@
from cmk.base.check_legacy_includes.cisco_prime import parse_cisco_prime
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
Section = Mapping
diff --git a/cmk/base/legacy_checks/cisco_redundancy.py b/cmk/base/legacy_checks/cisco_redundancy.py
index 3519333a9d2..0d6e65990e4 100644
--- a/cmk/base/legacy_checks/cisco_redundancy.py
+++ b/cmk/base/legacy_checks/cisco_redundancy.py
@@ -14,8 +14,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree, StringTable
def inventory_cisco_redundancy(info):
@@ -62,6 +61,8 @@ def check_cisco_redundancy(_no_item, params, info):
"5": "user forced",
"6": "active unit failed",
"7": "active unit removed",
+ "8": "active lost gateway connectivity",
+ "9": "RMI port went down on active",
},
}
@@ -110,7 +111,14 @@ def parse_cisco_redundancy(string_table: StringTable) -> StringTable:
detect=all_of(contains(".1.3.6.1.2.1.1.1.0", "cisco"), exists(".1.3.6.1.4.1.9.9.176.1.1.*")),
fetch=SNMPTree(
base=".1.3.6.1.4.1.9.9.176.1.1",
- oids=["1", "2", "3", "4", "6", "8"],
+ oids=[
+ "1", # cRFStatusUnitId
+ "2", # cRFStatusUnitState
+ "3", # cRFStatusPeerUnitId
+ "4", # cRFStatusPeerUnitState
+ "6", # cRFStatusDuplexMode
+ "8", # cRFStatusLastSwactReasonCode
+ ],
),
service_name="Redundancy Framework Status",
discovery_function=inventory_cisco_redundancy,
diff --git a/cmk/base/legacy_checks/cisco_srst_call_legs.py b/cmk/base/legacy_checks/cisco_srst_call_legs.py
index c6067e821f2..483c04f477c 100644
--- a/cmk/base/legacy_checks/cisco_srst_call_legs.py
+++ b/cmk/base/legacy_checks/cisco_srst_call_legs.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, equals, SNMPTree, StringTable
def inventory_cisco_srst_call_legs(info):
diff --git a/cmk/base/legacy_checks/cisco_srst_phones.py b/cmk/base/legacy_checks/cisco_srst_phones.py
index 438df64a281..915bcb67c02 100644
--- a/cmk/base/legacy_checks/cisco_srst_phones.py
+++ b/cmk/base/legacy_checks/cisco_srst_phones.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, equals, SNMPTree, StringTable
def inventory_cisco_srst_phones(info):
diff --git a/cmk/base/legacy_checks/cisco_srst_state.py b/cmk/base/legacy_checks/cisco_srst_state.py
index 9f0be6795ac..0c85efe28ca 100644
--- a/cmk/base/legacy_checks/cisco_srst_state.py
+++ b/cmk/base/legacy_checks/cisco_srst_state.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.uptime import check_uptime_seconds
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, equals, SNMPTree, StringTable
# .1.3.6.1.4.1.9.9.441.1.3.1 CISCO-SRST-MIB::csrstState (1: active, 2: inactive)
# .1.3.6.1.4.1.9.9.441.1.3.4 CISCO-SRST-MIB::csrstTotalUpTime
diff --git a/cmk/base/legacy_checks/cisco_stackpower.py b/cmk/base/legacy_checks/cisco_stackpower.py
index 23637eac1f3..e2a7ecc8c9e 100644
--- a/cmk/base/legacy_checks/cisco_stackpower.py
+++ b/cmk/base/legacy_checks/cisco_stackpower.py
@@ -44,8 +44,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, startswith, StringTable
def inventory_cisco_stackpower(info):
diff --git a/cmk/base/legacy_checks/cisco_sys_mem.py b/cmk/base/legacy_checks/cisco_sys_mem.py
index a43fceffb87..f3d41647510 100644
--- a/cmk/base/legacy_checks/cisco_sys_mem.py
+++ b/cmk/base/legacy_checks/cisco_sys_mem.py
@@ -10,8 +10,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, startswith, StringTable
def inventory_cisco_sys_mem(info):
diff --git a/cmk/base/legacy_checks/cisco_temp.py b/cmk/base/legacy_checks/cisco_temp.py
index 1ed7674d021..417ab8c56de 100644
--- a/cmk/base/legacy_checks/cisco_temp.py
+++ b/cmk/base/legacy_checks/cisco_temp.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, not_exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, not_exists, SNMPTree, StringTable
def inventory_cisco_temp(info):
diff --git a/cmk/base/legacy_checks/cisco_ucs_cpu.py b/cmk/base/legacy_checks/cisco_ucs_cpu.py
index 2a49fc9e6c3..cf9918d1964 100644
--- a/cmk/base/legacy_checks/cisco_ucs_cpu.py
+++ b/cmk/base/legacy_checks/cisco_ucs_cpu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cisco_ucs import DETECT, map_operability, map_presence
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
# comNET GmbH, Fabian Binder - 2018-05-08
diff --git a/cmk/base/legacy_checks/cisco_ucs_hdd.py b/cmk/base/legacy_checks/cisco_ucs_hdd.py
index 50a331a3637..46c242654ba 100644
--- a/cmk/base/legacy_checks/cisco_ucs_hdd.py
+++ b/cmk/base/legacy_checks/cisco_ucs_hdd.py
@@ -59,9 +59,13 @@ def check_cisco_ucs_hdd(item: str, _no_params, section: Section):
return
yield (
- 0,
- f"Status: {hdd.operability} (hot spare)",
- ) if hdd.drive_status in _HOT_SPARE_VALUES else (hdd.state, f"Status: {hdd.operability}")
+ (
+ 0,
+ f"Status: {hdd.operability} (hot spare)",
+ )
+ if hdd.drive_status in _HOT_SPARE_VALUES
+ else (hdd.state, f"Status: {hdd.operability}")
+ )
yield 0, f"Size: {render.disksize(hdd.size)}"
yield 0, f"Model: {hdd.model}"
yield 0, f"Vendor: {hdd.vendor}"
diff --git a/cmk/base/legacy_checks/cisco_ucs_lun.py b/cmk/base/legacy_checks/cisco_ucs_lun.py
index 5d4e35fb771..4ec050779fd 100644
--- a/cmk/base/legacy_checks/cisco_ucs_lun.py
+++ b/cmk/base/legacy_checks/cisco_ucs_lun.py
@@ -4,12 +4,11 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.cisco_ucs import DETECT, map_operability
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
# comNET GmbH, Fabian Binder - 2018-05-07
@@ -40,8 +39,8 @@ def check_cisco_ucs_lun(_no_item, _no_params, info):
state, state_readable = map_operability.get(status, (3, "Unknown, status code %s" % status))
mode_state, mode_state_readable = map_luntype.get(mode, (3, "Unknown, status code %s" % mode))
# size is returned in MB
- # on migration: check whether to use render.size (MB) or render.bytes (MiB)
- size_readable = get_bytes_human_readable(int(size or "0") * 1024 * 1024)
+ # ^- or MiB? or what?
+ size_readable = render.bytes(int(size or "0") * 1024 * 1024)
yield state, "Status: %s" % state_readable
yield 0, "Size: %s" % size_readable
yield mode_state, "Mode: %s" % mode_state_readable
diff --git a/cmk/base/legacy_checks/cisco_ucs_mem_total.py b/cmk/base/legacy_checks/cisco_ucs_mem_total.py
index 69976198009..cb577428abc 100644
--- a/cmk/base/legacy_checks/cisco_ucs_mem_total.py
+++ b/cmk/base/legacy_checks/cisco_ucs_mem_total.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cisco_ucs import DETECT
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
# comNET GmbH, Fabian Binder - 2018-05-30
diff --git a/cmk/base/legacy_checks/cisco_ucs_psu.py b/cmk/base/legacy_checks/cisco_ucs_psu.py
index 41b493487f7..c9a649e12f4 100644
--- a/cmk/base/legacy_checks/cisco_ucs_psu.py
+++ b/cmk/base/legacy_checks/cisco_ucs_psu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cisco_ucs import DETECT, map_operability
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
# comNET GmbH, Fabian Binder - 2018-05-07
diff --git a/cmk/base/legacy_checks/cisco_ucs_system.py b/cmk/base/legacy_checks/cisco_ucs_system.py
index 4fc211e4c05..f1a61453e1a 100644
--- a/cmk/base/legacy_checks/cisco_ucs_system.py
+++ b/cmk/base/legacy_checks/cisco_ucs_system.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cisco_ucs import DETECT, map_operability
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
# comNET GmbH, Fabian Binder - 2018-05-07
diff --git a/cmk/base/legacy_checks/cisco_ucs_temp_mem.py b/cmk/base/legacy_checks/cisco_ucs_temp_mem.py
index d4b4adf5cc1..16840c3b3a6 100644
--- a/cmk/base/legacy_checks/cisco_ucs_temp_mem.py
+++ b/cmk/base/legacy_checks/cisco_ucs_temp_mem.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
# comNET GmbH, Fabian Binder - 2018-05-30
diff --git a/cmk/base/legacy_checks/cisco_vss.py b/cmk/base/legacy_checks/cisco_vss.py
index 46ba30fbbf3..0ece755b985 100644
--- a/cmk/base/legacy_checks/cisco_vss.py
+++ b/cmk/base/legacy_checks/cisco_vss.py
@@ -53,8 +53,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, contains, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, contains, exists, SNMPTree, StringTable
cisco_vss_role_names = {
"1": "standalone",
diff --git a/cmk/base/legacy_checks/citrix_serverload.py b/cmk/base/legacy_checks/citrix_serverload.py
index c3d661bea8e..48e47df6f01 100644
--- a/cmk/base/legacy_checks/citrix_serverload.py
+++ b/cmk/base/legacy_checks/citrix_serverload.py
@@ -10,8 +10,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
def inventory_citrix_serverload(info):
diff --git a/cmk/base/legacy_checks/citrix_sessions.py b/cmk/base/legacy_checks/citrix_sessions.py
index 918a830831b..9b7cece551e 100644
--- a/cmk/base/legacy_checks/citrix_sessions.py
+++ b/cmk/base/legacy_checks/citrix_sessions.py
@@ -14,7 +14,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
citrix_sessions_default_levels = {
"total": (60, 65),
diff --git a/cmk/base/legacy_checks/climaveneta_alarm.py b/cmk/base/legacy_checks/climaveneta_alarm.py
index 7bc9cd41b33..3129b0858d9 100644
--- a/cmk/base/legacy_checks/climaveneta_alarm.py
+++ b/cmk/base/legacy_checks/climaveneta_alarm.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, OIDEnd, SNMPTree, StringTable
climaveneta_alarms = {
# 20 : "Global (general)",
diff --git a/cmk/base/legacy_checks/climaveneta_fan.py b/cmk/base/legacy_checks/climaveneta_fan.py
index 92d8b7b3e45..432893bc8ab 100644
--- a/cmk/base/legacy_checks/climaveneta_fan.py
+++ b/cmk/base/legacy_checks/climaveneta_fan.py
@@ -4,15 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.fan import check_fan
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, equals, Service, SNMPTree, StringTable
-def inventory_climaveneta_fan(section: StringTable) -> DiscoveryResult:
+def discover_climaveneta_fan(section: StringTable) -> DiscoveryResult:
if section and len(section[0]) == 2:
yield Service(item="1")
yield Service(item="2")
@@ -35,7 +34,7 @@ def parse_climaveneta_fan(string_table: StringTable) -> StringTable:
oids=["42", "43"],
),
service_name="Fan %s",
- discovery_function=inventory_climaveneta_fan,
+ discovery_function=discover_climaveneta_fan,
check_function=check_climaveneta_fan,
check_ruleset_name="hw_fans",
check_default_parameters={
diff --git a/cmk/base/legacy_checks/climaveneta_temp.py b/cmk/base/legacy_checks/climaveneta_temp.py
index e744f99f65f..53165642cb2 100644
--- a/cmk/base/legacy_checks/climaveneta_temp.py
+++ b/cmk/base/legacy_checks/climaveneta_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, OIDEnd, SNMPTree, StringTable
climaveneta_sensors = {
1: "Room",
diff --git a/cmk/base/legacy_checks/cmc_temp.py b/cmk/base/legacy_checks/cmc_temp.py
index 034f5a3cf79..83a7240d67a 100644
--- a/cmk/base/legacy_checks/cmc_temp.py
+++ b/cmk/base/legacy_checks/cmc_temp.py
@@ -10,15 +10,15 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
# [[[u'26', u'26']], [[u'45', u'15', u'45', u'15']]]
def inventory_cmc_temp(info):
# There are always two sensors
- return [("1", {}), ("2", {})]
+ yield "1", {}
+ yield "2", {}
def check_cmc_temp(item, params, info):
diff --git a/cmk/base/legacy_checks/cmciii_lcp_fans.py b/cmk/base/legacy_checks/cmciii_lcp_fans.py
index 03989065734..4e82f7dbd0b 100644
--- a/cmk/base/legacy_checks/cmciii_lcp_fans.py
+++ b/cmk/base/legacy_checks/cmciii_lcp_fans.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.cmciii import DETECT_CMCIII_LCP
diff --git a/cmk/base/legacy_checks/cmciii_lcp_water.py b/cmk/base/legacy_checks/cmciii_lcp_water.py
index 9436a8a3e7b..133faf61b2d 100644
--- a/cmk/base/legacy_checks/cmciii_lcp_water.py
+++ b/cmk/base/legacy_checks/cmciii_lcp_water.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.cmciii import DETECT_CMCIII_LCP
# Note: The CMCIII checks for Water IN/OUT and similar stuff are
diff --git a/cmk/base/legacy_checks/cmctc.py b/cmk/base/legacy_checks/cmctc.py
index 8dcb0dabc69..23f292597bb 100644
--- a/cmk/base/legacy_checks/cmctc.py
+++ b/cmk/base/legacy_checks/cmctc.py
@@ -7,13 +7,11 @@
from typing import NamedTuple
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.cmctc import cmctc_translate_status, cmctc_translate_status_text
-from cmk.base.check_legacy_includes.temperature import check_temperature
+from cmk.base.check_legacy_includes.temperature import check_temperature, TempParamType
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.cmctc import DETECT_CMCTC
+from cmk.agent_based.v2 import SNMPTree, StringTable
+from cmk.plugins.lib.cmctc import cmctc_translate_status, cmctc_translate_status_text, DETECT_CMCTC
# Table columns:
# 0: index
@@ -57,7 +55,7 @@ def inventory_cmctc_temp(section: Section) -> Iterable[tuple[str, dict]]:
yield from ((item, {}) for item in section)
-def check_cmctc_temp(item, params, section):
+def check_cmctc_temp(item: str, params: TempParamType, section: Section) -> Iterable:
if (sensor := section.get(item)) is None:
return
@@ -66,7 +64,7 @@ def check_cmctc_temp(item, params, section):
params,
"cmctc_temp_%s" % item,
dev_levels=sensor.levels,
- dev_levels_lower=sensor.level_lower,
+ dev_levels_lower=sensor.levels_lower,
dev_status=cmctc_translate_status(sensor.status),
dev_status_name="Unit: %s" % cmctc_translate_status_text(sensor.status),
)
@@ -85,7 +83,6 @@ def check_cmctc_temp(item, params, section):
"5.2.1.6",
"5.2.1.7",
"5.2.1.8",
- "7.2.1.2",
],
),
SNMPTree(
@@ -98,7 +95,6 @@ def check_cmctc_temp(item, params, section):
"5.2.1.6",
"5.2.1.7",
"5.2.1.8",
- "7.2.1.2",
],
),
SNMPTree(
@@ -111,7 +107,6 @@ def check_cmctc_temp(item, params, section):
"5.2.1.6",
"5.2.1.7",
"5.2.1.8",
- "7.2.1.2",
],
),
SNMPTree(
@@ -124,7 +119,6 @@ def check_cmctc_temp(item, params, section):
"5.2.1.6",
"5.2.1.7",
"5.2.1.8",
- "7.2.1.2",
],
),
],
diff --git a/cmk/base/legacy_checks/cmctc_config.py b/cmk/base/legacy_checks/cmctc_config.py
index 392c8a9316d..03dd98766ae 100644
--- a/cmk/base/legacy_checks/cmctc_config.py
+++ b/cmk/base/legacy_checks/cmctc_config.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.cmctc import DETECT_CMCTC
# .1.3.6.1.4.1.2606.4.3.1.1.0 1
diff --git a/cmk/base/legacy_checks/cmctc_lcp.py b/cmk/base/legacy_checks/cmctc_lcp.py
deleted file mode 100644
index 35b71b2e810..00000000000
--- a/cmk/base/legacy_checks/cmctc_lcp.py
+++ /dev/null
@@ -1,391 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from collections.abc import Iterable, Mapping
-from typing import Any, NamedTuple
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.cmctc import cmctc_translate_status, cmctc_translate_status_text
-from cmk.base.check_legacy_includes.temperature import check_temperature, TempParamType
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.cmctc import DETECT_CMCTC
-
-
-class Sensor(NamedTuple):
- status: str
- reading: float
- high: float
- low: float
- warn: float
- description: str
- type_: str
-
- def has_levels(self) -> bool:
- return {self.low, self.warn, self.high} != {0.0} and self.low < self.high
-
- @property
- def levels(self) -> tuple[float, float] | None:
- return (self.warn, self.high) if self.has_levels() else None
-
- @property
- def levels_lower(self) -> tuple[float, float] | None:
- return (self.low, float("-inf")) if self.has_levels() else None
-
-
-Section = Mapping[str, Sensor]
-
-
-_CMCTC_LCP_SENSORS = {
- "4": (None, "access"),
- "12": (None, "humidity"),
- # User Sensors
- "13": ("normally open", "user"),
- "14": ("normally closed", "user"),
- # Leakage
- "23": (None, "flow"),
- "30": (None, "current"),
- "31": (None, "status"),
- "32": (None, "position"),
- # Blower
- "40": ("1", "blower"),
- "41": ("2", "blower"),
- "42": ("3", "blower"),
- "43": ("4", "blower"),
- "44": ("5", "blower"),
- "45": ("6", "blower"),
- "46": ("7", "blower"),
- "47": ("8", "blower"),
- # Server in/out
- "48": ("Server in 1", "temp"),
- "49": ("Server out 1", "temp"),
- "50": ("Server in 2", "temp"),
- "51": ("Server out 2", "temp"),
- "52": ("Server in 3", "temp"),
- "53": ("Server out 3", "temp"),
- "54": ("Server in 4", "temp"),
- "55": ("Server out 4", "temp"),
- # Overview Server
- "56": ("Overview Server in", "temp"),
- "57": ("Overview Server out", "temp"),
- # Water
- "58": ("Water in", "temp"),
- "59": ("Water out", "temp"),
- "60": (None, "flow"),
- # Other stuff
- "61": (None, "blowergrade"),
- "62": (None, "regulator"),
-}
-
-
-_TREES = [
- "3", # cmcTcUnit1OutputTable
- "4", # cmcTcUnit2OutputTable
- "5", # cmcTcUnit3OutputTable
- "6", # cmcTcUnit4OutputTable
-]
-
-
-def parse_cmctc_lcp(string_table: list[StringTable]) -> Section:
- return {
- f"{sensor_spec[0]} - {tree}.{index}"
- if sensor_spec[0]
- else index: Sensor(
- status=status,
- reading=float(reading),
- high=float(high),
- low=float(low),
- warn=float(warn),
- description=description,
- type_=sensor_spec[1],
- )
- for tree, block in zip(_TREES, string_table)
- for index, typeid, status, reading, high, low, warn, description in block
- if (sensor_spec := _CMCTC_LCP_SENSORS.get(typeid))
- }
-
-
-def inventory_cmctc_lcp(section: Section, sensortype: str) -> Iterable[tuple[str, dict]]:
- yield from ((item, {}) for item, sensor in section.items() if sensor.type_ == sensortype)
-
-
-def check_cmctc_lcp(
- item: str, params: Any, section: Section, sensortype: str
-) -> Iterable[tuple[int, str, list]]:
- map_sensor_state = {
- "1": (3, "not available"),
- "2": (2, "lost"),
- "3": (1, "changed"),
- "4": (0, "ok"),
- "5": (2, "off"),
- "6": (0, "on"),
- "7": (1, "warning"),
- "8": (2, "too low"),
- "9": (2, "too high"),
- "10": (2, "error"),
- }
-
- map_unit = {
- "access": "",
- "current": " A",
- "status": "",
- "position": "",
- "temp": " °C",
- "blower": " RPM",
- "blowergrade": "",
- "humidity": "%",
- "flow": " l/min",
- "regulator": "%",
- "user": "",
- }
-
- if (sensor := section.get(item)) is None:
- return
-
- unit = map_unit[sensor.type_]
- infotext = ""
- if sensor.description:
- infotext += "[%s] " % sensor.description
- state, extra_info = map_sensor_state[sensor.status]
- yield state, "%s%d%s" % (infotext, int(sensor.reading), unit), []
-
- extra_state = 0
- if params:
- warn, crit = params
- perfdata = [(sensortype, sensor.reading, warn, crit)]
- if sensor.reading >= crit:
- extra_state = 2
- elif sensor.reading >= warn:
- extra_state = 1
-
- if extra_state:
- extra_info += " (warn/crit at %d/%d%s)" % (warn, crit, unit)
- else:
- perfdata = [(sensortype, sensor.reading, None, None)]
- if sensor.has_levels():
- if sensor.reading >= sensor.high or sensor.reading <= sensor.low:
- extra_state = 2
- extra_info += " (device lower/upper crit at {}/{}{})".format(
- sensor.low,
- sensor.high,
- unit,
- )
-
- yield extra_state, extra_info, perfdata
-
-
-def inventory_cmctc_lcp_temp(section: Section) -> Iterable[tuple[str, dict]]:
- yield from inventory_cmctc_lcp(section, "temp")
-
-
-def check_cmctc_lcp_temp(
- item: str, params: TempParamType, section: Section
-) -> Iterable[tuple[int, str, list]]:
- if (sensor := section.get(item)) is None:
- return
-
- status = int(sensor.status)
- yield check_temperature(
- sensor.reading,
- params,
- "cmctc_lcp_temp_%s" % item,
- dev_levels=sensor.levels,
- dev_levels_lower=sensor.levels_lower,
- dev_status=cmctc_translate_status(status),
- dev_status_name="Unit: %s" % cmctc_translate_status_text(status),
- )
-
-
-check_info["cmctc_lcp"] = LegacyCheckDefinition(
- detect=DETECT_CMCTC,
- fetch=[
- SNMPTree(
- base=f".1.3.6.1.4.1.2606.4.2.{idx}",
- oids=[
- "5.2.1.1",
- "5.2.1.2",
- "5.2.1.4",
- "5.2.1.5",
- "5.2.1.6",
- "5.2.1.7",
- "5.2.1.8",
- "7.2.1.2",
- ],
- )
- for idx in _TREES
- ],
- parse_function=parse_cmctc_lcp,
-)
-
-
-def discover_cmctc_lcp_access(info):
- return inventory_cmctc_lcp(info, "access")
-
-
-def check_cmctc_lcp_access(item, params, info):
- return check_cmctc_lcp(item, params, info, "access")
-
-
-check_info["cmctc_lcp.access"] = LegacyCheckDefinition(
- service_name="Access %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_access,
- check_function=check_cmctc_lcp_access,
-)
-
-
-def discover_cmctc_lcp_blower(info):
- return inventory_cmctc_lcp(info, "blower")
-
-
-def check_cmctc_lcp_blower(item, params, info):
- return check_cmctc_lcp(item, params, info, "blower")
-
-
-check_info["cmctc_lcp.blower"] = LegacyCheckDefinition(
- service_name="Blower %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_blower,
- check_function=check_cmctc_lcp_blower,
-)
-
-
-def discover_cmctc_lcp_blowergrade(info):
- return inventory_cmctc_lcp(info, "blowergrade")
-
-
-def check_cmctc_lcp_blowergrade(item, params, info):
- return check_cmctc_lcp(item, params, info, "blowergrade")
-
-
-check_info["cmctc_lcp.blowergrade"] = LegacyCheckDefinition(
- service_name="Blower Grade %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_blowergrade,
- check_function=check_cmctc_lcp_blowergrade,
-)
-
-
-def discover_cmctc_lcp_current(info):
- return inventory_cmctc_lcp(info, "current")
-
-
-def check_cmctc_lcp_current(item, params, info):
- return check_cmctc_lcp(item, params, info, "current")
-
-
-check_info["cmctc_lcp.current"] = LegacyCheckDefinition(
- service_name="Current %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_current,
- check_function=check_cmctc_lcp_current,
-)
-
-
-def discover_cmctc_lcp_flow(info):
- return inventory_cmctc_lcp(info, "flow")
-
-
-def check_cmctc_lcp_flow(item, params, info):
- return check_cmctc_lcp(item, params, info, "flow")
-
-
-check_info["cmctc_lcp.flow"] = LegacyCheckDefinition(
- service_name="Waterflow %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_flow,
- check_function=check_cmctc_lcp_flow,
-)
-
-
-def discover_cmctc_lcp_humidity(info):
- return inventory_cmctc_lcp(info, "humidity")
-
-
-def check_cmctc_lcp_humidity(item, params, info):
- return check_cmctc_lcp(item, params, info, "humidity")
-
-
-check_info["cmctc_lcp.humidity"] = LegacyCheckDefinition(
- service_name="Humidity %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_humidity,
- check_function=check_cmctc_lcp_humidity,
-)
-
-
-def discover_cmctc_lcp_position(info):
- return inventory_cmctc_lcp(info, "position")
-
-
-def check_cmctc_lcp_position(item, params, info):
- return check_cmctc_lcp(item, params, info, "position")
-
-
-check_info["cmctc_lcp.position"] = LegacyCheckDefinition(
- service_name="Position %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_position,
- check_function=check_cmctc_lcp_position,
-)
-
-
-def discover_cmctc_lcp_regulator(info):
- return inventory_cmctc_lcp(info, "regulator")
-
-
-def check_cmctc_lcp_regulator(item, params, info):
- return check_cmctc_lcp(item, params, info, "regulator")
-
-
-check_info["cmctc_lcp.regulator"] = LegacyCheckDefinition(
- service_name="Regulator %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_regulator,
- check_function=check_cmctc_lcp_regulator,
-)
-
-
-def discover_cmctc_lcp_status(info):
- return inventory_cmctc_lcp(info, "status")
-
-
-def check_cmctc_lcp_status(item, params, info):
- return check_cmctc_lcp(item, params, info, "status")
-
-
-check_info["cmctc_lcp.status"] = LegacyCheckDefinition(
- service_name="Status %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_status,
- check_function=check_cmctc_lcp_status,
-)
-
-
-def discover_cmctc_lcp_user(info):
- return inventory_cmctc_lcp(info, "user")
-
-
-def check_cmctc_lcp_user(item, params, info):
- return check_cmctc_lcp(item, params, info, "user")
-
-
-check_info["cmctc_lcp.user"] = LegacyCheckDefinition(
- service_name="User Sensor %s",
- sections=["cmctc_lcp"],
- discovery_function=discover_cmctc_lcp_user,
- check_function=check_cmctc_lcp_user,
-)
-
-# temperature check is standardised
-check_info["cmctc_lcp.temp"] = LegacyCheckDefinition(
- service_name="Temperature %s",
- sections=["cmctc_lcp"],
- discovery_function=inventory_cmctc_lcp_temp,
- check_function=check_cmctc_lcp_temp,
- check_ruleset_name="temperature",
-)
diff --git a/cmk/base/legacy_checks/cmctc_psm_m.py b/cmk/base/legacy_checks/cmctc_psm_m.py
index 79989bc92da..270f846eb70 100644
--- a/cmk/base/legacy_checks/cmctc_psm_m.py
+++ b/cmk/base/legacy_checks/cmctc_psm_m.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.cmctc import DETECT_CMCTC
# Table columns:
diff --git a/cmk/base/legacy_checks/cmctc_state.py b/cmk/base/legacy_checks/cmctc_state.py
index cfc60f14bc1..85150e28c27 100644
--- a/cmk/base/legacy_checks/cmctc_state.py
+++ b/cmk/base/legacy_checks/cmctc_state.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.cmctc import DETECT_CMCTC
# .1.3.6.1.4.1.2606.4.2.1.0 2
diff --git a/cmk/base/legacy_checks/couchbase_buckets_mem.py b/cmk/base/legacy_checks/couchbase_buckets_mem.py
index e073ceaa7ec..a2db9eaf8ea 100644
--- a/cmk/base/legacy_checks/couchbase_buckets_mem.py
+++ b/cmk/base/legacy_checks/couchbase_buckets_mem.py
@@ -6,10 +6,11 @@
from collections.abc import Iterable
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.mem import check_memory_element
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
from cmk.plugins.lib.couchbase import parse_couchbase_lines, Section
DiscoveryResult = Iterable[tuple[str, dict]]
@@ -44,7 +45,7 @@ def check_couchbase_bucket_mem(item, params, parsed):
"mem_low_wat",
None,
infoname="Low watermark",
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
high_watermark = data.get("ep_mem_high_wat")
@@ -54,7 +55,7 @@ def check_couchbase_bucket_mem(item, params, parsed):
"mem_high_wat",
None,
infoname="High watermark",
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
diff --git a/cmk/base/legacy_checks/couchbase_buckets_vbuckets.py b/cmk/base/legacy_checks/couchbase_buckets_vbuckets.py
index e6584279c88..5d1acffc4b5 100644
--- a/cmk/base/legacy_checks/couchbase_buckets_vbuckets.py
+++ b/cmk/base/legacy_checks/couchbase_buckets_vbuckets.py
@@ -6,7 +6,7 @@
from collections.abc import Iterable
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
from cmk.agent_based.v2 import render
@@ -41,7 +41,7 @@ def check_couchbase_buckets_vbuckets(item, params, parsed):
"item_memory",
params.get("item_memory"),
infoname="Item memory",
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
pending_vbuckets = data.get("vb_pending_num")
@@ -75,7 +75,7 @@ def check_couchbase_buckets_vbuckets_replica(item, params, parsed):
"item_memory",
params.get("item_memory"),
infoname="Item memory",
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
diff --git a/cmk/base/legacy_checks/couchbase_nodes_size.py b/cmk/base/legacy_checks/couchbase_nodes_size.py
index 2b014c10ef4..8669736fd96 100644
--- a/cmk/base/legacy_checks/couchbase_nodes_size.py
+++ b/cmk/base/legacy_checks/couchbase_nodes_size.py
@@ -4,9 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
from cmk.plugins.lib.couchbase import parse_couchbase_lines
@@ -30,7 +31,7 @@ def check_couchbase_nodes_size(item, params, parsed):
on_disk,
"size_on_disk",
params.get("size_on_disk"),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Size on disk",
)
@@ -40,7 +41,7 @@ def check_couchbase_nodes_size(item, params, parsed):
size,
"data_size",
params.get("size"),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Data size",
)
diff --git a/cmk/base/legacy_checks/cpsecure_sessions.py b/cmk/base/legacy_checks/cpsecure_sessions.py
index cd83e63571c..a3c3f5aa189 100644
--- a/cmk/base/legacy_checks/cpsecure_sessions.py
+++ b/cmk/base/legacy_checks/cpsecure_sessions.py
@@ -15,8 +15,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
def inventory_cpsecure_sessions(info):
diff --git a/cmk/base/legacy_checks/cups_queues.py b/cmk/base/legacy_checks/cups_queues.py
index 6a66ab6aeac..87b3925806a 100644
--- a/cmk/base/legacy_checks/cups_queues.py
+++ b/cmk/base/legacy_checks/cups_queues.py
@@ -28,8 +28,7 @@
import time
from collections.abc import Mapping
-
-from typing_extensions import TypedDict
+from typing import TypedDict
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
diff --git a/cmk/base/legacy_checks/datapower_cpu.py b/cmk/base/legacy_checks/datapower_cpu.py
index d8eb734d498..3e00592db4f 100644
--- a/cmk/base/legacy_checks/datapower_cpu.py
+++ b/cmk/base/legacy_checks/datapower_cpu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.datapower import DETECT
diff --git a/cmk/base/legacy_checks/datapower_fs.py b/cmk/base/legacy_checks/datapower_fs.py
index 460d0614e5a..6097992bec9 100644
--- a/cmk/base/legacy_checks/datapower_fs.py
+++ b/cmk/base/legacy_checks/datapower_fs.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_list, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.datapower import DETECT
diff --git a/cmk/base/legacy_checks/datapower_ldrive.py b/cmk/base/legacy_checks/datapower_ldrive.py
index bd4790d051d..e359201ab17 100644
--- a/cmk/base/legacy_checks/datapower_ldrive.py
+++ b/cmk/base/legacy_checks/datapower_ldrive.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.datapower import DETECT
diff --git a/cmk/base/legacy_checks/datapower_mem.py b/cmk/base/legacy_checks/datapower_mem.py
index f5bd634c6ed..843c31d52ca 100644
--- a/cmk/base/legacy_checks/datapower_mem.py
+++ b/cmk/base/legacy_checks/datapower_mem.py
@@ -8,22 +8,13 @@
from cmk.base.check_legacy_includes.mem import check_memory_element
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.datapower import DETECT
-# FIXME
-# The WATO group 'memory_simple' needs an item and the service_description should
-# have a '%s'. At the moment the current empty item '' and 'Memory' without '%s'
-# works but is not consistent. This will be fixed in the future.
-# If we change this we loose history and parameter sets have to be adapted.
-
def inventory_datapower_mem(info):
- # TODO: Cleanup empty string and change manpage
if info:
- return [("", {})]
- return []
+ yield None, {}
def check_datapower_mem(item, params, info):
@@ -31,7 +22,11 @@ def check_datapower_mem(item, params, info):
mem_used_bytes = int(info[0][1]) * 1024
return check_memory_element(
- "Usage", mem_used_bytes, mem_total_bytes, params.get("levels"), metric_name="mem_used"
+ "Usage",
+ mem_used_bytes,
+ mem_total_bytes,
+ params.get("levels"),
+ metric_name="mem_used",
)
@@ -49,6 +44,6 @@ def parse_datapower_mem(string_table: StringTable) -> StringTable:
service_name="Memory",
discovery_function=inventory_datapower_mem,
check_function=check_datapower_mem,
- check_ruleset_name="memory_simple",
+ check_ruleset_name="memory_simple_single",
check_default_parameters={"levels": ("perc_used", (80.0, 90.0))},
)
diff --git a/cmk/base/legacy_checks/datapower_pdrive.py b/cmk/base/legacy_checks/datapower_pdrive.py
index 6cef295cb9a..b39f9832662 100644
--- a/cmk/base/legacy_checks/datapower_pdrive.py
+++ b/cmk/base/legacy_checks/datapower_pdrive.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.datapower import DETECT
diff --git a/cmk/base/legacy_checks/datapower_raid_bat.py b/cmk/base/legacy_checks/datapower_raid_bat.py
index 15f26681ccc..3a6753fe227 100644
--- a/cmk/base/legacy_checks/datapower_raid_bat.py
+++ b/cmk/base/legacy_checks/datapower_raid_bat.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.datapower import DETECT
diff --git a/cmk/base/legacy_checks/datapower_temp.py b/cmk/base/legacy_checks/datapower_temp.py
index fd0a2e7b369..134bf551814 100644
--- a/cmk/base/legacy_checks/datapower_temp.py
+++ b/cmk/base/legacy_checks/datapower_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.datapower import DETECT
#
diff --git a/cmk/base/legacy_checks/db2_backup.py b/cmk/base/legacy_checks/db2_backup.py
index 599f97d5ab7..8653875ed36 100644
--- a/cmk/base/legacy_checks/db2_backup.py
+++ b/cmk/base/legacy_checks/db2_backup.py
@@ -6,11 +6,11 @@
import time
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.db2 import parse_db2_dbs
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
+from cmk.agent_based.v2 import IgnoreResultsError, render
# <<>>
# [[[db2taddm:CMDBS1]]]
@@ -41,7 +41,7 @@ def check_db2_backup(item, params, parsed):
age,
None,
params["levels"],
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname="Time since last backup",
)
diff --git a/cmk/base/legacy_checks/db2_mem.py b/cmk/base/legacy_checks/db2_mem.py
index f04d386d22b..32fd31801d8 100644
--- a/cmk/base/legacy_checks/db2_mem.py
+++ b/cmk/base/legacy_checks/db2_mem.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError, render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, render, StringTable
def inventory_db2_mem(info):
@@ -44,7 +43,12 @@ def check_db2_mem(item, params, info): # pylint: disable=too-many-branches
perc_free = (limit - usage) / limit * 100.0
yield 0, f"Max {render.bytes(limit)}"
yield check_levels(
- usage, "mem", None, human_readable_func=render.bytes, infoname="Used", boundaries=(0, limit)
+ usage,
+ "mem_used",
+ None,
+ human_readable_func=render.bytes,
+ infoname="Used",
+ boundaries=(0, limit),
)
yield check_levels(
perc_free,
diff --git a/cmk/base/legacy_checks/db2_tablespaces.py b/cmk/base/legacy_checks/db2_tablespaces.py
index e7a8eba6926..8cc2de8a1a5 100644
--- a/cmk/base/legacy_checks/db2_tablespaces.py
+++ b/cmk/base/legacy_checks/db2_tablespaces.py
@@ -6,7 +6,7 @@
# mypy: disable-error-code="arg-type"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.db2 import parse_db2_dbs
from cmk.base.config import check_info
@@ -72,7 +72,7 @@ def check_db2_tablespaces(item, params, parsed):
warn, crit, levels_text, as_perc = db_get_tablespace_levels_in_bytes(usable, params)
- infotext = f"{get_bytes_human_readable(used)} of {get_bytes_human_readable(usable)} used"
+ infotext = f"{render.disksize(used)} of {render.disksize(usable)} used"
perfdata = [
("tablespace_size", usable, max(0, total - (warn or 0)), max(0, total - (crit or 0))),
("tablespace_used", used),
@@ -93,7 +93,7 @@ def check_db2_tablespaces(item, params, parsed):
if as_perc:
value_str = render.percent(perc_free)
else:
- value_str = get_bytes_human_readable(abs_free)
+ value_str = render.disksize(abs_free)
infotext = f"only {value_str} left {levels_text}"
yield state, infotext
diff --git a/cmk/base/legacy_checks/db2_version.py b/cmk/base/legacy_checks/db2_version.py
index 1032b5b4286..af4d57371a8 100644
--- a/cmk/base/legacy_checks/db2_version.py
+++ b/cmk/base/legacy_checks/db2_version.py
@@ -10,7 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_db2_version(info):
diff --git a/cmk/base/legacy_checks/decru_cpu.py b/cmk/base/legacy_checks/decru_cpu.py
index 785131da658..1027609abb0 100644
--- a/cmk/base/legacy_checks/decru_cpu.py
+++ b/cmk/base/legacy_checks/decru_cpu.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.decru import DETECT_DECRU
diff --git a/cmk/base/legacy_checks/decru_fans.py b/cmk/base/legacy_checks/decru_fans.py
index c17f53b81ee..21f5d0d2c31 100644
--- a/cmk/base/legacy_checks/decru_fans.py
+++ b/cmk/base/legacy_checks/decru_fans.py
@@ -3,19 +3,12 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
-from typing import Mapping
-
-from cmk.base.check_api import (
- check_levels,
- CheckResult,
- DiscoveryResult,
- LegacyCheckDefinition,
- Service,
-)
+from collections.abc import Mapping
+
+from cmk.base.check_api import check_levels, CheckResult, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.decru import DETECT_DECRU
diff --git a/cmk/base/legacy_checks/decru_perf.py b/cmk/base/legacy_checks/decru_perf.py
index abf52ce81e3..e37685633a8 100644
--- a/cmk/base/legacy_checks/decru_perf.py
+++ b/cmk/base/legacy_checks/decru_perf.py
@@ -5,17 +5,10 @@
from collections.abc import Mapping
-from cmk.base.check_api import (
- check_levels,
- CheckResult,
- DiscoveryResult,
- LegacyCheckDefinition,
- Service,
-)
+from cmk.base.check_api import check_levels, CheckResult, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.decru import DETECT_DECRU
diff --git a/cmk/base/legacy_checks/decru_power.py b/cmk/base/legacy_checks/decru_power.py
index 927a04a2a68..95de4d6415f 100644
--- a/cmk/base/legacy_checks/decru_power.py
+++ b/cmk/base/legacy_checks/decru_power.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.decru import DETECT_DECRU
diff --git a/cmk/base/legacy_checks/decru_temps.py b/cmk/base/legacy_checks/decru_temps.py
index 3bc6e0e5546..2b9208ee305 100644
--- a/cmk/base/legacy_checks/decru_temps.py
+++ b/cmk/base/legacy_checks/decru_temps.py
@@ -7,8 +7,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature, fahrenheit_to_celsius
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.decru import DETECT_DECRU
diff --git a/cmk/base/legacy_checks/dell_chassis_fans.py b/cmk/base/legacy_checks/dell_chassis_fans.py
index f6fd7a5d6a5..399b276b797 100644
--- a/cmk/base/legacy_checks/dell_chassis_fans.py
+++ b/cmk/base/legacy_checks/dell_chassis_fans.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_CHASSIS
diff --git a/cmk/base/legacy_checks/dell_chassis_io.py b/cmk/base/legacy_checks/dell_chassis_io.py
index 071630e20bc..ba76314c79b 100644
--- a/cmk/base/legacy_checks/dell_chassis_io.py
+++ b/cmk/base/legacy_checks/dell_chassis_io.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_CHASSIS
diff --git a/cmk/base/legacy_checks/dell_chassis_kvm.py b/cmk/base/legacy_checks/dell_chassis_kvm.py
index d609e1b74c2..b3f856f689d 100644
--- a/cmk/base/legacy_checks/dell_chassis_kvm.py
+++ b/cmk/base/legacy_checks/dell_chassis_kvm.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_CHASSIS
diff --git a/cmk/base/legacy_checks/dell_chassis_power.py b/cmk/base/legacy_checks/dell_chassis_power.py
index 2462d44951d..decb0448109 100644
--- a/cmk/base/legacy_checks/dell_chassis_power.py
+++ b/cmk/base/legacy_checks/dell_chassis_power.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, savefloat
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_CHASSIS
diff --git a/cmk/base/legacy_checks/dell_chassis_powersupplies.py b/cmk/base/legacy_checks/dell_chassis_powersupplies.py
index 27b5931b28d..4b717f4eb7a 100644
--- a/cmk/base/legacy_checks/dell_chassis_powersupplies.py
+++ b/cmk/base/legacy_checks/dell_chassis_powersupplies.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition, savefloat
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_CHASSIS
diff --git a/cmk/base/legacy_checks/dell_chassis_slots.py b/cmk/base/legacy_checks/dell_chassis_slots.py
index 3cb599b840a..f3afb094923 100644
--- a/cmk/base/legacy_checks/dell_chassis_slots.py
+++ b/cmk/base/legacy_checks/dell_chassis_slots.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_CHASSIS
diff --git a/cmk/base/legacy_checks/dell_chassis_status.py b/cmk/base/legacy_checks/dell_chassis_status.py
index 692a696aabe..6ef5333ad19 100644
--- a/cmk/base/legacy_checks/dell_chassis_status.py
+++ b/cmk/base/legacy_checks/dell_chassis_status.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_CHASSIS
diff --git a/cmk/base/legacy_checks/dell_chassis_temp.py b/cmk/base/legacy_checks/dell_chassis_temp.py
index 6b291a0219c..69a6ef16f12 100644
--- a/cmk/base/legacy_checks/dell_chassis_temp.py
+++ b/cmk/base/legacy_checks/dell_chassis_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_CHASSIS
diff --git a/cmk/base/legacy_checks/dell_compellent_controller.py b/cmk/base/legacy_checks/dell_compellent_controller.py
index e2143b66c25..302c8e6865f 100644
--- a/cmk/base/legacy_checks/dell_compellent_controller.py
+++ b/cmk/base/legacy_checks/dell_compellent_controller.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes import dell_compellent
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_DELL_COMPELLENT
# example output
diff --git a/cmk/base/legacy_checks/dell_compellent_enclosure.py b/cmk/base/legacy_checks/dell_compellent_enclosure.py
index a4e7f008896..2b7790f0c73 100644
--- a/cmk/base/legacy_checks/dell_compellent_enclosure.py
+++ b/cmk/base/legacy_checks/dell_compellent_enclosure.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes import dell_compellent
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_DELL_COMPELLENT
# example output
diff --git a/cmk/base/legacy_checks/dell_compellent_folder.py b/cmk/base/legacy_checks/dell_compellent_folder.py
index 2a42085cc6f..37e3e2ce725 100644
--- a/cmk/base/legacy_checks/dell_compellent_folder.py
+++ b/cmk/base/legacy_checks/dell_compellent_folder.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_list, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_DELL_COMPELLENT
diff --git a/cmk/base/legacy_checks/dell_eql_storage.py b/cmk/base/legacy_checks/dell_eql_storage.py
index 1643a5aab82..5cd4bf59a80 100644
--- a/cmk/base/legacy_checks/dell_eql_storage.py
+++ b/cmk/base/legacy_checks/dell_eql_storage.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, render, SNMPTree, startswith, StringTable
def inventory_dell_eql_storage(info):
@@ -77,10 +76,10 @@ def check_dell_eql_storage(item, _no_params, info):
("fs_free", total_bytes - used_bytes),
]
yield 0, "Used: {}/{} (Snapshots: {}, Replication: {})".format(
- get_bytes_human_readable(used_bytes),
- get_bytes_human_readable(total_bytes),
- get_bytes_human_readable(snap_bytes),
- get_bytes_human_readable(repl_bytes),
+ render.disksize(used_bytes),
+ render.disksize(total_bytes),
+ render.disksize(snap_bytes),
+ render.disksize(repl_bytes),
), perfdata
diff --git a/cmk/base/legacy_checks/dell_idrac_disks.py b/cmk/base/legacy_checks/dell_idrac_disks.py
index 804da6c55cd..9b691494828 100644
--- a/cmk/base/legacy_checks/dell_idrac_disks.py
+++ b/cmk/base/legacy_checks/dell_idrac_disks.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
# .1.3.6.1.4.1.674.10892.5.5.1.20.130.4.1.2.1 Physical Disk 0:1:0 --> IDRAC-MIB::physicalDiskName.1
@@ -82,7 +81,7 @@ def check_dell_idrac_disks(item, _no_params, info):
if disk_name == item:
yield 0, "[{}] Size: {}".format(
display_name,
- get_bytes_human_readable(int(capacity_MB) * 1024 * 1024),
+ render.disksize(int(capacity_MB) * 1024 * 1024),
)
for what, what_key, what_text in [
diff --git a/cmk/base/legacy_checks/dell_idrac_fans.py b/cmk/base/legacy_checks/dell_idrac_fans.py
index 7a76e431238..f4083a2a573 100644
--- a/cmk/base/legacy_checks/dell_idrac_fans.py
+++ b/cmk/base/legacy_checks/dell_idrac_fans.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fan import check_fan
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
DELL_IDRAC_FANS_STATE_MAP = {
"1": (3, "OTHER"),
diff --git a/cmk/base/legacy_checks/dell_idrac_power.py b/cmk/base/legacy_checks/dell_idrac_power.py
index 6ea16e017c1..5aa2425f558 100644
--- a/cmk/base/legacy_checks/dell_idrac_power.py
+++ b/cmk/base/legacy_checks/dell_idrac_power.py
@@ -11,8 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_dell_idrac_power(info):
diff --git a/cmk/base/legacy_checks/dell_idrac_raid.py b/cmk/base/legacy_checks/dell_idrac_raid.py
index e368e72090e..3e8242c8c15 100644
--- a/cmk/base/legacy_checks/dell_idrac_raid.py
+++ b/cmk/base/legacy_checks/dell_idrac_raid.py
@@ -11,8 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_dell_idrac_raid(info):
diff --git a/cmk/base/legacy_checks/dell_idrac_virtdisks.py b/cmk/base/legacy_checks/dell_idrac_virtdisks.py
index dd70df9de05..e3ca34757a7 100644
--- a/cmk/base/legacy_checks/dell_idrac_virtdisks.py
+++ b/cmk/base/legacy_checks/dell_idrac_virtdisks.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
# .1.3.6.1.4.1.674.10892.5.5.1.20.140.1.1.2.1 System --> IDRAC-MIB::virtualDiskName.1
diff --git a/cmk/base/legacy_checks/dell_om_disks.py b/cmk/base/legacy_checks/dell_om_disks.py
index 164ddbb7ba8..c6dc526448d 100644
--- a/cmk/base/legacy_checks/dell_om_disks.py
+++ b/cmk/base/legacy_checks/dell_om_disks.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition, saveint
+from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_OPENMANAGE
@@ -76,7 +75,7 @@ def check_dell_om_disks(item, _no_params, info):
smart = saveint(smart)
mt = saveint(mt)
size = saveint(sizeMB) * 1024 * 1024
- msg = [f"{name} ({pid}, {get_bytes_human_readable(size)})"]
+ msg = [f"{name} ({pid}, {render.bytes(size)})"]
label = ""
if smart == 2:
dstate = 34
diff --git a/cmk/base/legacy_checks/dell_om_esmlog.py b/cmk/base/legacy_checks/dell_om_esmlog.py
index fc2fa060418..cb712720055 100644
--- a/cmk/base/legacy_checks/dell_om_esmlog.py
+++ b/cmk/base/legacy_checks/dell_om_esmlog.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_OPENMANAGE
diff --git a/cmk/base/legacy_checks/dell_om_fans.py b/cmk/base/legacy_checks/dell_om_fans.py
index 6dc4b114cec..7ac09bb5a85 100644
--- a/cmk/base/legacy_checks/dell_om_fans.py
+++ b/cmk/base/legacy_checks/dell_om_fans.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fan import check_fan
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_OPENMANAGE
diff --git a/cmk/base/legacy_checks/dell_om_mem.py b/cmk/base/legacy_checks/dell_om_mem.py
index 95eafd48b7e..afa7f6fbde3 100644
--- a/cmk/base/legacy_checks/dell_om_mem.py
+++ b/cmk/base/legacy_checks/dell_om_mem.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_OPENMANAGE
@@ -51,7 +50,7 @@ def check_dell_om_mem(item, _no_params, info):
yield 1, failure_modes[bitmask]
bitmask *= 2
- yield 0, "Size: %s" % get_bytes_human_readable(int(size) * 1024)
+ yield 0, "Size: %s" % render.bytes(int(size) * 1024)
def parse_dell_om_mem(string_table: StringTable) -> StringTable:
diff --git a/cmk/base/legacy_checks/dell_om_power.py b/cmk/base/legacy_checks/dell_om_power.py
index c213837e3f8..4c27e394f35 100644
--- a/cmk/base/legacy_checks/dell_om_power.py
+++ b/cmk/base/legacy_checks/dell_om_power.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_OPENMANAGE
# example output
diff --git a/cmk/base/legacy_checks/dell_om_processors.py b/cmk/base/legacy_checks/dell_om_processors.py
index 2a5d7c47812..24b09353a5a 100644
--- a/cmk/base/legacy_checks/dell_om_processors.py
+++ b/cmk/base/legacy_checks/dell_om_processors.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_OPENMANAGE
diff --git a/cmk/base/legacy_checks/dell_om_sensors.py b/cmk/base/legacy_checks/dell_om_sensors.py
index c705f2f03c2..cf1f2f6d872 100644
--- a/cmk/base/legacy_checks/dell_om_sensors.py
+++ b/cmk/base/legacy_checks/dell_om_sensors.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_OPENMANAGE
diff --git a/cmk/base/legacy_checks/dell_powerconnect_cpu.py b/cmk/base/legacy_checks/dell_powerconnect_cpu.py
index f1e237d9523..92c6fa98141 100644
--- a/cmk/base/legacy_checks/dell_powerconnect_cpu.py
+++ b/cmk/base/legacy_checks/dell_powerconnect_cpu.py
@@ -19,8 +19,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, IgnoreResultsError, render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, IgnoreResultsError, render, SNMPTree, StringTable
# Inventory of dell power connect CPU details.
diff --git a/cmk/base/legacy_checks/dell_powerconnect_fans.py b/cmk/base/legacy_checks/dell_powerconnect_fans.py
index e7e35b600f1..673bf3d9302 100644
--- a/cmk/base/legacy_checks/dell_powerconnect_fans.py
+++ b/cmk/base/legacy_checks/dell_powerconnect_fans.py
@@ -32,8 +32,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, SNMPTree, StringTable
dell_powerconnect_fans_status_map = {
"1": "normal",
diff --git a/cmk/base/legacy_checks/dell_powerconnect_psu.py b/cmk/base/legacy_checks/dell_powerconnect_psu.py
index 3484e93e4c9..7f3c7902d1b 100644
--- a/cmk/base/legacy_checks/dell_powerconnect_psu.py
+++ b/cmk/base/legacy_checks/dell_powerconnect_psu.py
@@ -37,8 +37,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, SNMPTree, StringTable
dell_powerconnect_psu_status_map = {
"1": "normal",
diff --git a/cmk/base/legacy_checks/dell_poweredge_amperage.py b/cmk/base/legacy_checks/dell_poweredge_amperage.py
index b82ce985400..2d9e59dbf18 100644
--- a/cmk/base/legacy_checks/dell_poweredge_amperage.py
+++ b/cmk/base/legacy_checks/dell_poweredge_amperage.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.dell_poweredge import check_dell_poweredge_amperage
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
diff --git a/cmk/base/legacy_checks/dell_poweredge_cpu.py b/cmk/base/legacy_checks/dell_poweredge_cpu.py
index 1469221f701..0a5bf4cdebb 100644
--- a/cmk/base/legacy_checks/dell_poweredge_cpu.py
+++ b/cmk/base/legacy_checks/dell_poweredge_cpu.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.dell_poweredge import check_dell_poweredge_cpu
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
diff --git a/cmk/base/legacy_checks/dell_poweredge_mem.py b/cmk/base/legacy_checks/dell_poweredge_mem.py
index 18e86395450..20d32d47f2f 100644
--- a/cmk/base/legacy_checks/dell_poweredge_mem.py
+++ b/cmk/base/legacy_checks/dell_poweredge_mem.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.dell_poweredge import check_dell_poweredge_mem
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
diff --git a/cmk/base/legacy_checks/dell_poweredge_netdev.py b/cmk/base/legacy_checks/dell_poweredge_netdev.py
index 16c4a8862d9..04a4b1ece85 100644
--- a/cmk/base/legacy_checks/dell_poweredge_netdev.py
+++ b/cmk/base/legacy_checks/dell_poweredge_netdev.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.dell_poweredge import check_dell_poweredge_netdev
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
diff --git a/cmk/base/legacy_checks/dell_poweredge_pci.py b/cmk/base/legacy_checks/dell_poweredge_pci.py
index 5d0eb172ea3..d7cdc82bc0d 100644
--- a/cmk/base/legacy_checks/dell_poweredge_pci.py
+++ b/cmk/base/legacy_checks/dell_poweredge_pci.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.dell_poweredge import check_dell_poweredge_pci
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
diff --git a/cmk/base/legacy_checks/dell_poweredge_status.py b/cmk/base/legacy_checks/dell_poweredge_status.py
index 1489651010a..4095b276198 100644
--- a/cmk/base/legacy_checks/dell_poweredge_status.py
+++ b/cmk/base/legacy_checks/dell_poweredge_status.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.dell_poweredge import check_dell_poweredge_status
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
diff --git a/cmk/base/legacy_checks/dell_poweredge_temp.py b/cmk/base/legacy_checks/dell_poweredge_temp.py
index 8a7af48d949..835b9d8a321 100644
--- a/cmk/base/legacy_checks/dell_poweredge_temp.py
+++ b/cmk/base/legacy_checks/dell_poweredge_temp.py
@@ -11,8 +11,7 @@
)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.dell import DETECT_IDRAC_POWEREDGE
diff --git a/cmk/base/legacy_checks/didactum_can_sensors_analog.py b/cmk/base/legacy_checks/didactum_can_sensors_analog.py
index 42c2e15cafb..b67864b9353 100644
--- a/cmk/base/legacy_checks/didactum_can_sensors_analog.py
+++ b/cmk/base/legacy_checks/didactum_can_sensors_analog.py
@@ -8,7 +8,7 @@
check_didactum_sensors_humidity,
check_didactum_sensors_temp,
check_didactum_sensors_voltage,
- inventory_didactum_sensors,
+ discover_didactum_sensors,
parse_didactum_sensors,
)
from cmk.base.config import check_info
@@ -33,7 +33,7 @@
def inventory_didactum_can_sensors_analog_temp(parsed):
- return inventory_didactum_sensors(parsed, "temperature")
+ return discover_didactum_sensors(parsed, "temperature")
check_info["didactum_can_sensors_analog"] = LegacyCheckDefinition(
@@ -61,7 +61,7 @@ def inventory_didactum_can_sensors_analog_temp(parsed):
def inventory_didactum_can_sensors_analog_humid(parsed):
- return inventory_didactum_sensors(parsed, "humidity")
+ return discover_didactum_sensors(parsed, "humidity")
check_info["didactum_can_sensors_analog.humidity"] = LegacyCheckDefinition(
@@ -84,7 +84,7 @@ def inventory_didactum_can_sensors_analog_humid(parsed):
def inventory_didactum_can_sensors_analog_volt(parsed):
- return inventory_didactum_sensors(parsed, "voltage")
+ return discover_didactum_sensors(parsed, "voltage")
check_info["didactum_can_sensors_analog.voltage"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/didactum_sensors_analog.py b/cmk/base/legacy_checks/didactum_sensors_analog.py
index a4903be4b9e..0c8a6f81321 100644
--- a/cmk/base/legacy_checks/didactum_sensors_analog.py
+++ b/cmk/base/legacy_checks/didactum_sensors_analog.py
@@ -8,7 +8,7 @@
check_didactum_sensors_humidity,
check_didactum_sensors_temp,
check_didactum_sensors_voltage,
- inventory_didactum_sensors,
+ discover_didactum_sensors,
parse_didactum_sensors,
)
from cmk.base.config import check_info
@@ -58,7 +58,7 @@
def inventory_didactum_sensors_analog_temp(parsed):
- return inventory_didactum_sensors(parsed, "temperature")
+ return discover_didactum_sensors(parsed, "temperature")
check_info["didactum_sensors_analog"] = LegacyCheckDefinition(
@@ -86,7 +86,7 @@ def inventory_didactum_sensors_analog_temp(parsed):
def inventory_didactum_sensors_analog_humid(parsed):
- return inventory_didactum_sensors(parsed, "humidity")
+ return discover_didactum_sensors(parsed, "humidity")
check_info["didactum_sensors_analog.humidity"] = LegacyCheckDefinition(
@@ -109,7 +109,7 @@ def inventory_didactum_sensors_analog_humid(parsed):
def inventory_didactum_sensors_analog_volt(parsed):
- return inventory_didactum_sensors(parsed, "voltage")
+ return discover_didactum_sensors(parsed, "voltage")
check_info["didactum_sensors_analog.voltage"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/didactum_sensors_discrete.py b/cmk/base/legacy_checks/didactum_sensors_discrete.py
index 44358b59057..241cdc0ed08 100644
--- a/cmk/base/legacy_checks/didactum_sensors_discrete.py
+++ b/cmk/base/legacy_checks/didactum_sensors_discrete.py
@@ -6,7 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.didactum import (
- inventory_didactum_sensors,
+ discover_didactum_sensors,
parse_didactum_sensors,
)
from cmk.base.config import check_info
@@ -33,8 +33,8 @@
def inventory_didactum_sensors_discrete_dry(parsed):
- yield from inventory_didactum_sensors(parsed, "dry")
- yield from inventory_didactum_sensors(parsed, "smoke")
+ yield from discover_didactum_sensors(parsed, "dry")
+ yield from discover_didactum_sensors(parsed, "smoke")
def check_didactum_sensors_discrete_dry(item, params, parsed):
diff --git a/cmk/base/legacy_checks/didactum_sensors_outlet.py b/cmk/base/legacy_checks/didactum_sensors_outlet.py
index c52a052b407..d2d823f1bab 100644
--- a/cmk/base/legacy_checks/didactum_sensors_outlet.py
+++ b/cmk/base/legacy_checks/didactum_sensors_outlet.py
@@ -6,7 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.didactum import (
- inventory_didactum_sensors,
+ discover_didactum_sensors,
parse_didactum_sensors,
)
from cmk.base.config import check_info
@@ -16,7 +16,7 @@
def inventory_didactum_sensors_outlet_relay(parsed):
- return inventory_didactum_sensors(parsed, "relay")
+ return discover_didactum_sensors(parsed, "relay")
def check_didactum_sensors_outlet_relay(item, params, parsed):
diff --git a/cmk/base/legacy_checks/dmi_sysinfo.py b/cmk/base/legacy_checks/dmi_sysinfo.py
index 6e9b531ca5d..e7a64c501ad 100644
--- a/cmk/base/legacy_checks/dmi_sysinfo.py
+++ b/cmk/base/legacy_checks/dmi_sysinfo.py
@@ -10,7 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_dmi_sysinfo(info):
diff --git a/cmk/base/legacy_checks/dmraid.py b/cmk/base/legacy_checks/dmraid.py
index b452d9948eb..f0a7cc37a1c 100644
--- a/cmk/base/legacy_checks/dmraid.py
+++ b/cmk/base/legacy_checks/dmraid.py
@@ -17,7 +17,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_dmraid_ldisks(info):
diff --git a/cmk/base/legacy_checks/docker_node_disk_usage.py b/cmk/base/legacy_checks/docker_node_disk_usage.py
index f90b1fc714c..9f20098bf01 100644
--- a/cmk/base/legacy_checks/docker_node_disk_usage.py
+++ b/cmk/base/legacy_checks/docker_node_disk_usage.py
@@ -6,10 +6,12 @@
# mypy: disable-error-code="arg-type"
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.base.plugins.agent_based.agent_based_api.v1 import IgnoreResultsError
import cmk.plugins.lib.docker as docker
+from cmk.agent_based.v2 import render
def parse_docker_node_disk_usage(string_table):
@@ -18,11 +20,15 @@ def parse_docker_node_disk_usage(string_table):
def check_docker_node_disk_usage(item, params, parsed):
+ if not parsed:
+ # The section error is reported by the "Docker node info" service
+ raise IgnoreResultsError("Disk usage missing")
+
if not (data := parsed.get(item)):
return
for key, human_readable_func in (
- ("size", get_bytes_human_readable),
- ("reclaimable", get_bytes_human_readable),
+ ("size", render.bytes),
+ ("reclaimable", render.bytes),
("count", lambda x: x),
("active", lambda x: x),
):
diff --git a/cmk/base/legacy_checks/docker_node_info.py b/cmk/base/legacy_checks/docker_node_info.py
index 15ffef1c634..36ae995155d 100644
--- a/cmk/base/legacy_checks/docker_node_info.py
+++ b/cmk/base/legacy_checks/docker_node_info.py
@@ -8,6 +8,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.base.plugins.agent_based.agent_based_api.v1 import IgnoreResultsError
from cmk.plugins.lib.docker import NodeInfoSection
@@ -20,9 +21,9 @@ def discover_docker_node_info(section: NodeInfoSection) -> Iterable[tuple[None,
def check_docker_node_info(_no_item, _no_params, parsed):
if "Name" in parsed:
yield 0, "Daemon running on host %s" % parsed["Name"]
- for state, key in enumerate(("Warning", "Critical", "Unknown"), 1):
- if key in parsed:
- yield state, parsed[key]
+ for state, key in [(2, "Critical"), (3, "Unknown")]:
+ for msg in parsed.get(key, ()):
+ yield state, msg
check_info["docker_node_info"] = LegacyCheckDefinition(
@@ -33,6 +34,10 @@ def check_docker_node_info(_no_item, _no_params, parsed):
def check_docker_node_containers(_no_item, params, parsed):
+ if list(parsed.keys()) == ["Unknown"]:
+ # The section error is reported by the "Docker node info" service
+ raise IgnoreResultsError("Container statistics missing")
+
for title, key, levels_prefix in (
("containers", "Containers", ""),
("running", "ContainersRunning", "running_"),
diff --git a/cmk/base/legacy_checks/docsis_channels_downstream.py b/cmk/base/legacy_checks/docsis_channels_downstream.py
index 254f75e2d07..5af4f0e01ff 100644
--- a/cmk/base/legacy_checks/docsis_channels_downstream.py
+++ b/cmk/base/legacy_checks/docsis_channels_downstream.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, equals, SNMPTree, StringTable
def inventory_docsis_channels_downstream(info):
diff --git a/cmk/base/legacy_checks/docsis_cm_status.py b/cmk/base/legacy_checks/docsis_cm_status.py
index bcae66a532b..52d628ce3d5 100644
--- a/cmk/base/legacy_checks/docsis_cm_status.py
+++ b/cmk/base/legacy_checks/docsis_cm_status.py
@@ -25,8 +25,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, equals, OIDEnd, SNMPTree, StringTable
def inventory_docsis_cm_status(info):
diff --git a/cmk/base/legacy_checks/domino_info.py b/cmk/base/legacy_checks/domino_info.py
index c0db2cafb94..39643069eb3 100644
--- a/cmk/base/legacy_checks/domino_info.py
+++ b/cmk/base/legacy_checks/domino_info.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.domino import DETECT
# Example SNMP walk:
diff --git a/cmk/base/legacy_checks/domino_transactions.py b/cmk/base/legacy_checks/domino_transactions.py
index a56d771c931..63dccc170a9 100644
--- a/cmk/base/legacy_checks/domino_transactions.py
+++ b/cmk/base/legacy_checks/domino_transactions.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.domino import DETECT
diff --git a/cmk/base/legacy_checks/domino_users.py b/cmk/base/legacy_checks/domino_users.py
index 934c496ab3c..1cbc6ea1d4c 100644
--- a/cmk/base/legacy_checks/domino_users.py
+++ b/cmk/base/legacy_checks/domino_users.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.domino import DETECT
diff --git a/cmk/base/legacy_checks/drbd.py b/cmk/base/legacy_checks/drbd.py
index 2fb675e2a67..89f72726297 100644
--- a/cmk/base/legacy_checks/drbd.py
+++ b/cmk/base/legacy_checks/drbd.py
@@ -130,8 +130,7 @@
from cmk.base.check_api import LegacyCheckDefinition, state_markers
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, StringTable
_drbd_block_start_match = re.compile("^[0-9]+:")
diff --git a/cmk/base/legacy_checks/elasticsearch_nodes.py b/cmk/base/legacy_checks/elasticsearch_nodes.py
index 671f30be359..5a47d97d294 100644
--- a/cmk/base/legacy_checks/elasticsearch_nodes.py
+++ b/cmk/base/legacy_checks/elasticsearch_nodes.py
@@ -19,7 +19,7 @@
# mypy: disable-error-code="var-annotated,arg-type"
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
from cmk.agent_based.v2 import render
@@ -57,7 +57,7 @@ def check_elasticsearch_nodes(item, params, parsed):
for data_key, params_key, hr_func in [
("cpu_percent", "cpu_levels", render.percent),
("cpu_total_in_millis", "cpu_total_in_millis", int),
- ("mem_total_virtual_in_bytes", "mem_total_virtual_in_bytes", get_bytes_human_readable),
+ ("mem_total_virtual_in_bytes", "mem_total_virtual_in_bytes", render.bytes),
("open_file_descriptors", "open_file_descriptors", int),
("max_file_descriptors", "max_file_descriptors", int),
]:
diff --git a/cmk/base/legacy_checks/eltek_fans.py b/cmk/base/legacy_checks/eltek_fans.py
index b87e1de1b9c..dfd699f407a 100644
--- a/cmk/base/legacy_checks/eltek_fans.py
+++ b/cmk/base/legacy_checks/eltek_fans.py
@@ -53,8 +53,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.eltek import DETECT_ELTEK
@@ -112,6 +111,6 @@ def parse_eltek_fans(string_table: StringTable) -> StringTable:
check_function=check_eltek_fans,
check_ruleset_name="hw_fans_perc",
check_default_parameters={
- "levels": (99, 100),
+ "levels": (99.0, 100.0),
},
)
diff --git a/cmk/base/legacy_checks/eltek_outdoor_temp.py b/cmk/base/legacy_checks/eltek_outdoor_temp.py
index eb2adfa6aba..364c863fd1a 100644
--- a/cmk/base/legacy_checks/eltek_outdoor_temp.py
+++ b/cmk/base/legacy_checks/eltek_outdoor_temp.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.eltek import DETECT_ELTEK
# .1.3.6.1.4.1.12148.9.1.17.3.1.1.0 1 --> ELTEK-DISTRIBUTED-MIB::ioUnitID.0
diff --git a/cmk/base/legacy_checks/eltek_systemstatus.py b/cmk/base/legacy_checks/eltek_systemstatus.py
index b58e56eab20..e5720d1df0a 100644
--- a/cmk/base/legacy_checks/eltek_systemstatus.py
+++ b/cmk/base/legacy_checks/eltek_systemstatus.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.eltek import DETECT_ELTEK
diff --git a/cmk/base/legacy_checks/emc_datadomain_disks.py b/cmk/base/legacy_checks/emc_datadomain_disks.py
index 623360a4e63..2d7dd241fcd 100644
--- a/cmk/base/legacy_checks/emc_datadomain_disks.py
+++ b/cmk/base/legacy_checks/emc_datadomain_disks.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_DATADOMAIN
diff --git a/cmk/base/legacy_checks/emc_datadomain_fans.py b/cmk/base/legacy_checks/emc_datadomain_fans.py
index acdde62765e..dab91fc45ad 100644
--- a/cmk/base/legacy_checks/emc_datadomain_fans.py
+++ b/cmk/base/legacy_checks/emc_datadomain_fans.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_DATADOMAIN
diff --git a/cmk/base/legacy_checks/emc_datadomain_fs.py b/cmk/base/legacy_checks/emc_datadomain_fs.py
index 08f5eea3545..756990bce6a 100644
--- a/cmk/base/legacy_checks/emc_datadomain_fs.py
+++ b/cmk/base/legacy_checks/emc_datadomain_fs.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_list, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.df import EXCLUDED_MOUNTPOINTS
from cmk.plugins.lib.emc import DETECT_DATADOMAIN
diff --git a/cmk/base/legacy_checks/emc_datadomain_mtree.py b/cmk/base/legacy_checks/emc_datadomain_mtree.py
index 92355ab1746..0251f4c7f4d 100644
--- a/cmk/base/legacy_checks/emc_datadomain_mtree.py
+++ b/cmk/base/legacy_checks/emc_datadomain_mtree.py
@@ -4,10 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
+from cmk.agent_based.v2 import render, SNMPTree
from cmk.plugins.lib.emc import DETECT_DATADOMAIN
@@ -36,7 +36,7 @@ def check_emc_datadomain_mtree(item, params, parsed):
yield (
params.get(dev_state_str, 3),
"Status: {}, Precompiled: {}".format(
- dev_state_str, get_bytes_human_readable(mtree_data["precompiled"])
+ dev_state_str, render.bytes(mtree_data["precompiled"])
),
[("precompiled", mtree_data["precompiled"])],
)
diff --git a/cmk/base/legacy_checks/emc_datadomain_nvbat.py b/cmk/base/legacy_checks/emc_datadomain_nvbat.py
index 81f10bd3c66..a111b438aa3 100644
--- a/cmk/base/legacy_checks/emc_datadomain_nvbat.py
+++ b/cmk/base/legacy_checks/emc_datadomain_nvbat.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_DATADOMAIN
@@ -34,7 +33,7 @@ def check_emc_datadomain_nvbat(item, _no_params, info):
dev_state_str = state_table.get(dev_state, ("Unknown", 3))[0]
dev_state_rc = state_table.get(dev_state, ("Unknown", 3))[1]
infotext = f"Status {dev_state_str} Charge Level {dev_charge}%"
- perfdata = [("charge", dev_charge + "%")]
+ perfdata = [("battery_capacity", dev_charge + "%")]
return dev_state_rc, infotext, perfdata
return None
diff --git a/cmk/base/legacy_checks/emc_datadomain_power.py b/cmk/base/legacy_checks/emc_datadomain_power.py
index 5e11e800dcf..1917b881a19 100644
--- a/cmk/base/legacy_checks/emc_datadomain_power.py
+++ b/cmk/base/legacy_checks/emc_datadomain_power.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_DATADOMAIN
diff --git a/cmk/base/legacy_checks/emc_datadomain_temps.py b/cmk/base/legacy_checks/emc_datadomain_temps.py
index ae1e55acf9c..c9eda6dffd9 100644
--- a/cmk/base/legacy_checks/emc_datadomain_temps.py
+++ b/cmk/base/legacy_checks/emc_datadomain_temps.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_DATADOMAIN
diff --git a/cmk/base/legacy_checks/emc_isilon.py b/cmk/base/legacy_checks/emc_isilon.py
index c08785e3c75..fe74c016a69 100644
--- a/cmk/base/legacy_checks/emc_isilon.py
+++ b/cmk/base/legacy_checks/emc_isilon.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_ISILON
diff --git a/cmk/base/legacy_checks/emc_isilon_cpu.py b/cmk/base/legacy_checks/emc_isilon_cpu.py
index 24c410e9ce2..ca24bbdab87 100644
--- a/cmk/base/legacy_checks/emc_isilon_cpu.py
+++ b/cmk/base/legacy_checks/emc_isilon_cpu.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_ISILON
diff --git a/cmk/base/legacy_checks/emc_isilon_diskstatus.py b/cmk/base/legacy_checks/emc_isilon_diskstatus.py
index 961dd7f1801..396a08b987c 100644
--- a/cmk/base/legacy_checks/emc_isilon_diskstatus.py
+++ b/cmk/base/legacy_checks/emc_isilon_diskstatus.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_ISILON
diff --git a/cmk/base/legacy_checks/emc_isilon_fans.py b/cmk/base/legacy_checks/emc_isilon_fans.py
index 83965ed5ca3..136c7458d56 100644
--- a/cmk/base/legacy_checks/emc_isilon_fans.py
+++ b/cmk/base/legacy_checks/emc_isilon_fans.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fan import check_fan
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_ISILON
diff --git a/cmk/base/legacy_checks/emc_isilon_power.py b/cmk/base/legacy_checks/emc_isilon_power.py
index 01f797df6ac..7aa44c3f267 100644
--- a/cmk/base/legacy_checks/emc_isilon_power.py
+++ b/cmk/base/legacy_checks/emc_isilon_power.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_ISILON
diff --git a/cmk/base/legacy_checks/emc_isilon_temp.py b/cmk/base/legacy_checks/emc_isilon_temp.py
index 51d8d654558..2ed6b9ee8ab 100644
--- a/cmk/base/legacy_checks/emc_isilon_temp.py
+++ b/cmk/base/legacy_checks/emc_isilon_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_ISILON
diff --git a/cmk/base/legacy_checks/emc_vplex_cpu.py b/cmk/base/legacy_checks/emc_vplex_cpu.py
index 8fdb0bad68d..f8567f3d6f9 100644
--- a/cmk/base/legacy_checks/emc_vplex_cpu.py
+++ b/cmk/base/legacy_checks/emc_vplex_cpu.py
@@ -5,12 +5,11 @@
from collections.abc import Mapping
-from cmk.base.check_api import CheckResult, DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import CheckResult, LegacyCheckDefinition
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.emc import DETECT_VPLEX
diff --git a/cmk/base/legacy_checks/emcvnx_info.py b/cmk/base/legacy_checks/emcvnx_info.py
index 70a835856ad..5a0113a176a 100644
--- a/cmk/base/legacy_checks/emcvnx_info.py
+++ b/cmk/base/legacy_checks/emcvnx_info.py
@@ -53,18 +53,18 @@
def parse_emcvnx_info(string_table):
parsed = {
- "string_table": [],
+ "info": [],
"storage": [],
"link": [],
"config": [],
"io": [],
}
key_to_subcheck = {
- "System Fault LED": "string_table",
- "Server IP Address": "string_table",
- "System Date": "string_table",
- "System Time": "string_table",
- "Serial Number For The SP": "string_table",
+ "System Fault LED": "info",
+ "Server IP Address": "info",
+ "System Date": "info",
+ "System Time": "info",
+ "Serial Number For The SP": "info",
"Storage Processor": "storage",
"Storage Processor Network Name": "storage",
"Storage Processor IP Address": "storage",
diff --git a/cmk/base/legacy_checks/emcvnx_storage_pools.py b/cmk/base/legacy_checks/emcvnx_storage_pools.py
index 4f1a4753b2d..4022fe59753 100644
--- a/cmk/base/legacy_checks/emcvnx_storage_pools.py
+++ b/cmk/base/legacy_checks/emcvnx_storage_pools.py
@@ -6,12 +6,7 @@
# mypy: disable-error-code="var-annotated,arg-type"
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_bytes_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
from cmk.agent_based.v2 import render
@@ -100,9 +95,9 @@ def check_emcvnx_storage_pools(item, params, parsed):
) % (
state,
status,
- get_bytes_human_readable(user_capacity),
- get_bytes_human_readable(consumed_capacity),
- get_bytes_human_readable(avail_capacity),
+ render.bytes(user_capacity),
+ render.bytes(consumed_capacity),
+ render.bytes(avail_capacity),
)
state = 0
@@ -115,8 +110,8 @@ def check_emcvnx_storage_pools(item, params, parsed):
state = 1
if state:
infotext += " (warn/crit at {}/{})".format(
- get_bytes_human_readable(perc_full_warn),
- get_bytes_human_readable(perc_full_crit),
+ render.bytes(perc_full_warn),
+ render.bytes(perc_full_crit),
)
yield state, infotext
@@ -125,8 +120,8 @@ def check_emcvnx_storage_pools(item, params, parsed):
+ "Total subscribed capacity: %s"
) % (
render.percent(percent_subscribed),
- get_bytes_human_readable(over_subscribed),
- get_bytes_human_readable(total_subscribed_capacity),
+ render.bytes(over_subscribed),
+ render.bytes(total_subscribed_capacity),
), [
("emcvnx_consumed_capacity", consumed_capacity),
("emcvnx_avail_capacity", avail_capacity),
@@ -202,7 +197,7 @@ def check_emcvnx_storage_pools_tiering(item, params, parsed):
"emcvnx_move_%s" % short_dir,
None,
infoname="Move %s" % short_dir,
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
move_completed_raw = data.get("Data Movement Completed (GBs)")
@@ -213,7 +208,7 @@ def check_emcvnx_storage_pools_tiering(item, params, parsed):
"emcvnx_move_completed",
None,
infoname="Movement completed",
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
time_to_complete = data.get("Estimated Time to Complete")
@@ -225,7 +220,7 @@ def check_emcvnx_storage_pools_tiering(item, params, parsed):
"emcvnx_time_to_complete",
params["time_to_complete"],
infoname="Age",
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
)
@@ -268,7 +263,7 @@ def check_emcvnx_storage_pools_tieringtypes(item, params, parsed):
None,
None,
infoname="User capacity",
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
consumed_capacity_raw = data.get("%s_Consumed Capacity (GBs)" % tier_name)
@@ -279,7 +274,7 @@ def check_emcvnx_storage_pools_tieringtypes(item, params, parsed):
"emcvnx_consumed_capacity",
None,
infoname="Consumed capacity",
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
avail_capacity_raw = data.get("%s_Available Capacity (GBs)" % tier_name)
@@ -290,7 +285,7 @@ def check_emcvnx_storage_pools_tieringtypes(item, params, parsed):
"emcvnx_avail_capacity",
None,
infoname="Available capacity",
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
percent_subscribed_raw = data.get("%s_Percent Subscribed" % tier_name)
@@ -314,7 +309,7 @@ def check_emcvnx_storage_pools_tieringtypes(item, params, parsed):
"emcvnx_targeted_%s" % short_dir,
None,
infoname="Move %s" % short_dir,
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
@@ -336,9 +331,7 @@ def check_emcvnx_storage_pools_tieringtypes(item, params, parsed):
# '----------------------------------------------------------------------'
-def _emcvnx_get_text_perf(
- data, key, perfname, format_func=get_bytes_human_readable, factor=1024**3
-):
+def _emcvnx_get_text_perf(data, key, perfname, format_func=render.bytes, factor=1024**3):
field = data.get(key, "unknown")
try:
value = float(field) * factor
diff --git a/cmk/base/legacy_checks/emcvnx_writecache.py b/cmk/base/legacy_checks/emcvnx_writecache.py
index a3c51450af3..156c3d27bbd 100644
--- a/cmk/base/legacy_checks/emcvnx_writecache.py
+++ b/cmk/base/legacy_checks/emcvnx_writecache.py
@@ -11,7 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_emcvnx_writecache(info):
diff --git a/cmk/base/legacy_checks/emerson_stat.py b/cmk/base/legacy_checks/emerson_stat.py
index 63af177f7a1..042b5a9cff3 100644
--- a/cmk/base/legacy_checks/emerson_stat.py
+++ b/cmk/base/legacy_checks/emerson_stat.py
@@ -32,14 +32,13 @@
# from a customer, it is named "Emerson Energy Systems (EES) Power MIB"
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, startswith, StringTable
-def inventory_emerson_stat(string_table: StringTable) -> DiscoveryResult:
+def discover_emerson_stat(string_table: StringTable) -> DiscoveryResult:
if string_table:
yield Service()
@@ -84,6 +83,6 @@ def parse_emerson_stat(string_table: StringTable) -> StringTable:
oids=["0"],
),
service_name="Status",
- discovery_function=inventory_emerson_stat,
+ discovery_function=discover_emerson_stat,
check_function=check_emerson_stat,
)
diff --git a/cmk/base/legacy_checks/emerson_temp.py b/cmk/base/legacy_checks/emerson_temp.py
index ceafc5c1862..cf00e560087 100644
--- a/cmk/base/legacy_checks/emerson_temp.py
+++ b/cmk/base/legacy_checks/emerson_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
#
# during inventory we are looking for all temperatures available,
diff --git a/cmk/base/legacy_checks/enterasys_cpu_util.py b/cmk/base/legacy_checks/enterasys_cpu_util.py
index 0df8bc3c61a..d8aecf5ece9 100644
--- a/cmk/base/legacy_checks/enterasys_cpu_util.py
+++ b/cmk/base/legacy_checks/enterasys_cpu_util.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.enterasys import DETECT_ENTERASYS
diff --git a/cmk/base/legacy_checks/enterasys_fans.py b/cmk/base/legacy_checks/enterasys_fans.py
index 6ea6ed6d940..ec60fe73d3c 100644
--- a/cmk/base/legacy_checks/enterasys_fans.py
+++ b/cmk/base/legacy_checks/enterasys_fans.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.enterasys import DETECT_ENTERASYS
diff --git a/cmk/base/legacy_checks/enterasys_lsnat.py b/cmk/base/legacy_checks/enterasys_lsnat.py
index 46180826ce6..767c4cc6435 100644
--- a/cmk/base/legacy_checks/enterasys_lsnat.py
+++ b/cmk/base/legacy_checks/enterasys_lsnat.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint, state_markers
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, exists, SNMPTree, startswith, StringTable
def inventory_enterasys_lsnat(info):
diff --git a/cmk/base/legacy_checks/enterasys_powersupply.py b/cmk/base/legacy_checks/enterasys_powersupply.py
index e5ab9624520..adbed7c3f8e 100644
--- a/cmk/base/legacy_checks/enterasys_powersupply.py
+++ b/cmk/base/legacy_checks/enterasys_powersupply.py
@@ -15,8 +15,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.enterasys import DETECT_ENTERASYS
diff --git a/cmk/base/legacy_checks/enterasys_temp.py b/cmk/base/legacy_checks/enterasys_temp.py
index 156e3079e87..96c60630be5 100644
--- a/cmk/base/legacy_checks/enterasys_temp.py
+++ b/cmk/base/legacy_checks/enterasys_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.enterasys import DETECT_ENTERASYS
diff --git a/cmk/base/legacy_checks/entersekt.py b/cmk/base/legacy_checks/entersekt.py
index 703e36cd503..f181bde82b3 100644
--- a/cmk/base/legacy_checks/entersekt.py
+++ b/cmk/base/legacy_checks/entersekt.py
@@ -19,8 +19,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree, StringTable
def inventory_entersekt(info):
diff --git a/cmk/base/legacy_checks/epson_beamer_lamp.py b/cmk/base/legacy_checks/epson_beamer_lamp.py
index dbd770b5278..778b243637f 100644
--- a/cmk/base/legacy_checks/epson_beamer_lamp.py
+++ b/cmk/base/legacy_checks/epson_beamer_lamp.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_epson_beamer_lamp(info):
diff --git a/cmk/base/legacy_checks/esx_vsphere_counters.py b/cmk/base/legacy_checks/esx_vsphere_counters.py
index d7425fc1eed..2fff360f80c 100644
--- a/cmk/base/legacy_checks/esx_vsphere_counters.py
+++ b/cmk/base/legacy_checks/esx_vsphere_counters.py
@@ -6,11 +6,11 @@
# mypy: disable-error-code="arg-type"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.uptime import check_uptime_seconds
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
+from cmk.agent_based.v2 import IgnoreResultsError, render
# Example output:
# <<>>
@@ -109,7 +109,7 @@ def check_esx_vsphere_counters_swap(item, params, parsed):
for key in ("in", "out", "used"):
if SWAP.get(key):
- value = get_bytes_human_readable(float(SWAP[key]))
+ value = render.bytes(float(SWAP[key]))
else:
value = "not available"
diff --git a/cmk/base/legacy_checks/esx_vsphere_datastores.py b/cmk/base/legacy_checks/esx_vsphere_datastores.py
index 1eb9e16ee9d..69147f18c58 100644
--- a/cmk/base/legacy_checks/esx_vsphere_datastores.py
+++ b/cmk/base/legacy_checks/esx_vsphere_datastores.py
@@ -6,7 +6,7 @@
# mypy: disable-error-code="var-annotated"
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.df import df_check_filesystem_single, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
@@ -64,7 +64,7 @@ def check_esx_vsphere_datastores(item, params, parsed):
uncommitted_bytes = data.get("uncommitted")
if uncommitted_bytes is None:
return
- text_uncommitted = "Uncommitted: %s" % get_bytes_human_readable(uncommitted_bytes)
+ text_uncommitted = "Uncommitted: %s" % render.bytes(uncommitted_bytes)
yield 0, text_uncommitted, [("uncommitted", uncommitted_bytes / mib)] # fixed: true-division
used_bytes = size_bytes - avail_bytes
diff --git a/cmk/base/legacy_checks/esx_vsphere_sensors.py b/cmk/base/legacy_checks/esx_vsphere_sensors.py
index 439634d7b86..6d1cf824a9d 100644
--- a/cmk/base/legacy_checks/esx_vsphere_sensors.py
+++ b/cmk/base/legacy_checks/esx_vsphere_sensors.py
@@ -17,7 +17,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_esx_vsphere_sensors(info):
diff --git a/cmk/base/legacy_checks/f5_bigip_apm.py b/cmk/base/legacy_checks/f5_bigip_apm.py
index 1bb4e605e2b..7d1b9f99f8d 100644
--- a/cmk/base/legacy_checks/f5_bigip_apm.py
+++ b/cmk/base/legacy_checks/f5_bigip_apm.py
@@ -4,15 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.f5_bigip import DETECT
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
-def inventory_f5_bigip_apm(section: StringTable) -> DiscoveryResult:
+def discover_f5_bigip_apm(section: StringTable) -> DiscoveryResult:
if section and section[0][0]:
yield Service()
@@ -35,6 +34,6 @@ def parse_f5_bigip_apm(string_table: StringTable) -> StringTable:
oids=["0"],
),
service_name="SSL/VPN Connections",
- discovery_function=inventory_f5_bigip_apm,
+ discovery_function=discover_f5_bigip_apm,
check_function=check_f5_bigip_apm,
)
diff --git a/cmk/base/legacy_checks/f5_bigip_chassis_temp.py b/cmk/base/legacy_checks/f5_bigip_chassis_temp.py
index bb1e8570d87..b13cdc27466 100644
--- a/cmk/base/legacy_checks/f5_bigip_chassis_temp.py
+++ b/cmk/base/legacy_checks/f5_bigip_chassis_temp.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
def inventory_f5_bigip_chassis_temp(info):
diff --git a/cmk/base/legacy_checks/f5_bigip_conns.py b/cmk/base/legacy_checks/f5_bigip_conns.py
index d09f04c8c21..256ec08d192 100644
--- a/cmk/base/legacy_checks/f5_bigip_conns.py
+++ b/cmk/base/legacy_checks/f5_bigip_conns.py
@@ -12,8 +12,7 @@
from cmk.base.check_legacy_includes.f5_bigip import DETECT, get_conn_rate_params
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree, StringTable
def inventory_f5_bigip_conns(info):
diff --git a/cmk/base/legacy_checks/f5_bigip_cpu_temp.py b/cmk/base/legacy_checks/f5_bigip_cpu_temp.py
index 3d2370620eb..f384329b26b 100644
--- a/cmk/base/legacy_checks/f5_bigip_cpu_temp.py
+++ b/cmk/base/legacy_checks/f5_bigip_cpu_temp.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
def inventory_f5_bigip_cpu_temp(info):
diff --git a/cmk/base/legacy_checks/f5_bigip_interfaces.py b/cmk/base/legacy_checks/f5_bigip_interfaces.py
index 8aa326589dc..3599f4de3c9 100644
--- a/cmk/base/legacy_checks/f5_bigip_interfaces.py
+++ b/cmk/base/legacy_checks/f5_bigip_interfaces.py
@@ -11,8 +11,15 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, get_rate, get_value_store, render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ any_of,
+ equals,
+ get_rate,
+ get_value_store,
+ render,
+ SNMPTree,
+ StringTable,
+)
# .1.3.6.1.4.1.3375.2.1.2.4.4.3.1.1. index for ifname
# .1.3.6.1.4.1.3375.2.1.2.4.1.2.1.17. index for ifstate
diff --git a/cmk/base/legacy_checks/f5_bigip_mem.py b/cmk/base/legacy_checks/f5_bigip_mem.py
index cb21d30aa63..d5f3c50ff83 100644
--- a/cmk/base/legacy_checks/f5_bigip_mem.py
+++ b/cmk/base/legacy_checks/f5_bigip_mem.py
@@ -10,12 +10,6 @@
from cmk.agent_based.v2 import contains, SNMPTree
-# FIXME
-# The WATO group 'memory_simple' needs an item and the service_description should
-# have a '%s'. At the moment the current item 'total'/'TMM' and 'Memory' without '%s'
-# works but is not consistent. This will be fixed in the future.
-# If we change this we loose history and parameter sets have to be adapted.
-
# Example output:
# Overall memory
# .1.3.6.1.4.1.3375.2.1.7.1.1.0 8396496896 sysHostMemoryTotal
@@ -47,13 +41,15 @@ def parse_f5_bigip_mem(string_table):
def discover_f5_bigip_mem(parsed):
if "total" in parsed:
yield "total", {}
+ if parsed.get("TMM", (0, 0))[0] > 0:
+ yield "TMM", {}
-def check_f5_bigip_mem(_item, params, parsed):
- if "total" not in parsed:
+def check_f5_bigip_mem(item, params, parsed):
+ if item not in parsed:
return None
- mem_total, mem_used = parsed["total"]
+ mem_total, mem_used = parsed[item]
return check_memory_element(
"Usage",
mem_used,
@@ -70,38 +66,9 @@ def check_f5_bigip_mem(_item, params, parsed):
oids=["7.1.1", "7.1.2", "1.2.1.143", "1.2.1.144"],
),
parse_function=parse_f5_bigip_mem,
- service_name="Memory",
+ service_name="Memory %s",
discovery_function=discover_f5_bigip_mem,
check_function=check_f5_bigip_mem,
check_ruleset_name="memory_simple",
check_default_parameters={"levels": ("perc_used", (80.0, 90.0))},
)
-
-
-def discover_f5_bigip_mem_tmm(parsed):
- if parsed.get("TMM", (0, 0))[0] > 0:
- yield "TMM", {}
-
-
-def check_f5_bigip_mem_tmm(_item, params, parsed):
- if "TMM" not in parsed:
- return None
-
- mem_total, mem_used = parsed["TMM"]
- return check_memory_element(
- "Usage",
- mem_used,
- mem_total,
- params.get("levels"),
- metric_name="mem_used",
- )
-
-
-check_info["f5_bigip_mem.tmm"] = LegacyCheckDefinition(
- service_name="Memory",
- sections=["f5_bigip_mem"],
- discovery_function=discover_f5_bigip_mem_tmm,
- check_function=check_f5_bigip_mem_tmm,
- check_ruleset_name="memory_simple",
- check_default_parameters={"levels": ("perc_used", (80.0, 90.0))},
-)
diff --git a/cmk/base/legacy_checks/f5_bigip_psu.py b/cmk/base/legacy_checks/f5_bigip_psu.py
index 347112ec403..ff08ed7263b 100644
--- a/cmk/base/legacy_checks/f5_bigip_psu.py
+++ b/cmk/base/legacy_checks/f5_bigip_psu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.f5_bigip import DETECT
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
# Agent / MIB output
# SysChassisPowerSupplyEntry ::=
diff --git a/cmk/base/legacy_checks/f5_bigip_snat.py b/cmk/base/legacy_checks/f5_bigip_snat.py
index 0d5c458a313..032e4d8c313 100644
--- a/cmk/base/legacy_checks/f5_bigip_snat.py
+++ b/cmk/base/legacy_checks/f5_bigip_snat.py
@@ -8,11 +8,11 @@
import time
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.f5_bigip import DETECT
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree
+from cmk.agent_based.v2 import get_rate, get_value_store, render, SNMPTree
def parse_f5_bigip_snat(string_table):
@@ -100,9 +100,7 @@ def check_f5_bigip_snat(item, params, parsed):
value,
param_var,
levels,
- human_readable_func=lambda x, p=param_var: get_bytes_human_readable(x, base=1000.0)
- if "octets" in p
- else str(x),
+ human_readable_func=render.disksize if "octets" in param_var else str,
infoname=map_paramvar_to_text[param_var.rstrip("_lower")],
)
if state:
diff --git a/cmk/base/legacy_checks/fast_lta_headunit.py b/cmk/base/legacy_checks/fast_lta_headunit.py
index 948fbfdfc9d..d0ee62cd20a 100644
--- a/cmk/base/legacy_checks/fast_lta_headunit.py
+++ b/cmk/base/legacy_checks/fast_lta_headunit.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, exists, SNMPTree, startswith, StringTable
def parse_fast_lta_headunit(string_table: Sequence[StringTable]) -> Sequence[StringTable]:
diff --git a/cmk/base/legacy_checks/fast_lta_silent_cubes.py b/cmk/base/legacy_checks/fast_lta_silent_cubes.py
index 8f5c02c97ab..b1b77663c59 100644
--- a/cmk/base/legacy_checks/fast_lta_silent_cubes.py
+++ b/cmk/base/legacy_checks/fast_lta_silent_cubes.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_list, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, exists, SNMPTree, startswith, StringTable
def inventory_fast_lta_silent_cubes_status(info):
diff --git a/cmk/base/legacy_checks/fc_port.py b/cmk/base/legacy_checks/fc_port.py
index 5d4e7d30c2f..e5a116c523a 100644
--- a/cmk/base/legacy_checks/fc_port.py
+++ b/cmk/base/legacy_checks/fc_port.py
@@ -22,8 +22,8 @@
render,
SNMPTree,
startswith,
+ StringTable,
)
-from cmk.agent_based.v2.type_defs import StringTable
# Taken from connUnitPortState
# user selected state of the port hardware
diff --git a/cmk/base/legacy_checks/filehandler.py b/cmk/base/legacy_checks/filehandler.py
index e50a87f5539..f05935e5bf9 100644
--- a/cmk/base/legacy_checks/filehandler.py
+++ b/cmk/base/legacy_checks/filehandler.py
@@ -18,7 +18,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_filehandler(info):
diff --git a/cmk/base/legacy_checks/filestats.py b/cmk/base/legacy_checks/filestats.py
index 64670a46b87..a4819e84f89 100644
--- a/cmk/base/legacy_checks/filestats.py
+++ b/cmk/base/legacy_checks/filestats.py
@@ -9,15 +9,11 @@
import ast
import re
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_bytes_human_readable,
- LegacyCheckDefinition,
- state_markers,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition, state_markers
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
# params = {
# "mincount": (tuple, integer),
# "maxcount": -"-,
@@ -112,8 +108,8 @@ def check_filestats_extremes(files, params, show_files=False):
return []
long_output = {}
for key, hr_function, minlabel, maxlabel in (
- ("size", get_bytes_human_readable, "smallest", "largest"),
- ("age", get_age_human_readable, "newest", "oldest"),
+ ("size", render.disksize, "smallest", "largest"),
+ ("age", render.timespan, "newest", "oldest"),
):
files_with_metric = [f for f in files if f.get(key) is not None]
if not files_with_metric:
@@ -148,8 +144,8 @@ def check_filestats_extremes(files, params, show_files=False):
break
if efile["path"] not in long_output:
text = "Age: {}, Size: {}{}".format(
- get_age_human_readable(efile["age"]),
- get_bytes_human_readable(efile["size"]),
+ render.timespan(efile["age"]),
+ render.disksize(efile["size"]),
state_markers[state],
)
long_output[efile["path"]] = text
@@ -167,8 +163,8 @@ def check_filestats_extremes(files, params, show_files=False):
break
if efile["path"] not in long_output:
text = "Age: {}, Size: {}{}".format(
- get_age_human_readable(efile["age"]),
- get_bytes_human_readable(efile["size"]),
+ render.timespan(efile["age"]),
+ render.disksize(efile["size"]),
state_markers[state],
)
long_output[efile["path"]] = text
@@ -265,14 +261,14 @@ def check_filestats_single(item, params, parsed):
return
_output_variety, reported_lines = data
if len(reported_lines) != 1:
- yield 1, "Received multiple filestats per single file service. Please check agent plugin configuration (mk_filestats). For example, if there are multiple non-utf-8 filenames, then they may be mapped to the same file service."
+ yield 1, "Received multiple filestats per single file service. Please check agent plug-in configuration (mk_filestats). For example, if there are multiple non-utf-8 filenames, then they may be mapped to the same file service."
single_stat = [i for i in reported_lines if i.get("type") == "file"][0]
if single_stat.get("size") is None and single_stat.get("age") is None:
yield 0, f'Status: {single_stat.get("stat_status")}'
return
- for key, hr_function in (("size", get_bytes_human_readable), ("age", get_age_human_readable)):
+ for key, hr_function in (("size", render.disksize), ("age", render.timespan)):
if (value := single_stat.get(key)) is None:
continue
diff --git a/cmk/base/legacy_checks/fireeye_active_vms.py b/cmk/base/legacy_checks/fireeye_active_vms.py
index 323acc0156c..55dd552267e 100644
--- a/cmk/base/legacy_checks/fireeye_active_vms.py
+++ b/cmk/base/legacy_checks/fireeye_active_vms.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib import fireeye
diff --git a/cmk/base/legacy_checks/fireeye_bypass.py b/cmk/base/legacy_checks/fireeye_bypass.py
index c11673dc51d..0de590a589e 100644
--- a/cmk/base/legacy_checks/fireeye_bypass.py
+++ b/cmk/base/legacy_checks/fireeye_bypass.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.13.1.41.0 0
@@ -16,7 +15,7 @@
# .1.3.6.1.4.1.25597.13.1.43.0 0
-def inventory_bypass(section: StringTable) -> DiscoveryResult:
+def discover_bypass(section: StringTable) -> DiscoveryResult:
if section:
value = int(section[0][0])
yield Service(parameters={"value": value})
@@ -42,6 +41,6 @@ def parse_fireeye_bypass(string_table: StringTable) -> StringTable:
oids=["41"],
),
service_name="Bypass Mail Rate",
- discovery_function=inventory_bypass,
+ discovery_function=discover_bypass,
check_function=check_fireeye_bypass,
)
diff --git a/cmk/base/legacy_checks/fireeye_content.py b/cmk/base/legacy_checks/fireeye_content.py
index 1764de97d86..63b6dff81ca 100644
--- a/cmk/base/legacy_checks/fireeye_content.py
+++ b/cmk/base/legacy_checks/fireeye_content.py
@@ -8,10 +8,10 @@
import time
from collections.abc import Iterable
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
+from cmk.agent_based.v2 import render, SNMPTree
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.11.5.1.5.0 456.180 --> FE-FIREEYE-MIB::feSecurityContentVersion.0
@@ -63,7 +63,7 @@ def check_fireeye_content(_no_item, params, parsed):
time.time() - parsed.update_time_seconds,
None,
params.get("update_time_levels"),
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname="Age",
)
diff --git a/cmk/base/legacy_checks/fireeye_fans.py b/cmk/base/legacy_checks/fireeye_fans.py
index 6e2ef13b510..28e5ae1c083 100644
--- a/cmk/base/legacy_checks/fireeye_fans.py
+++ b/cmk/base/legacy_checks/fireeye_fans.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fireeye import check_fireeye_states, inventory_fireeye_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.11.4.1.3.1.1.1 1 --> FE-FIREEYE-MIB::feFanIndex.1
diff --git a/cmk/base/legacy_checks/fireeye_lic_active.py b/cmk/base/legacy_checks/fireeye_lic_active.py
index 4bdfbeccb06..bbbf8e03b5b 100644
--- a/cmk/base/legacy_checks/fireeye_lic_active.py
+++ b/cmk/base/legacy_checks/fireeye_lic_active.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fireeye import inventory_fireeye_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.11.5.1.10.0 1
diff --git a/cmk/base/legacy_checks/fireeye_lic_expiration.py b/cmk/base/legacy_checks/fireeye_lic_expiration.py
index 1a40435f770..d3a4685132b 100644
--- a/cmk/base/legacy_checks/fireeye_lic_expiration.py
+++ b/cmk/base/legacy_checks/fireeye_lic_expiration.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
diff --git a/cmk/base/legacy_checks/fireeye_mail.py b/cmk/base/legacy_checks/fireeye_mail.py
index 79e6c2bb1c0..27c88100070 100644
--- a/cmk/base/legacy_checks/fireeye_mail.py
+++ b/cmk/base/legacy_checks/fireeye_mail.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.fireeye import inventory_fireeye_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_average, get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_average, get_rate, get_value_store, SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
diff --git a/cmk/base/legacy_checks/fireeye_powersupplies.py b/cmk/base/legacy_checks/fireeye_powersupplies.py
index e53e9876dfe..11f1b580ef4 100644
--- a/cmk/base/legacy_checks/fireeye_powersupplies.py
+++ b/cmk/base/legacy_checks/fireeye_powersupplies.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fireeye import check_fireeye_states, inventory_fireeye_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.11.3.1.1.0 Good --> FE-FIREEYE-MIB::fePowerSupplyOverallStatus.0
diff --git a/cmk/base/legacy_checks/fireeye_quarantine.py b/cmk/base/legacy_checks/fireeye_quarantine.py
index a338603fbc0..8ddced99ffd 100644
--- a/cmk/base/legacy_checks/fireeye_quarantine.py
+++ b/cmk/base/legacy_checks/fireeye_quarantine.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.13.1.40.0 1
diff --git a/cmk/base/legacy_checks/fireeye_smtp_conn.py b/cmk/base/legacy_checks/fireeye_smtp_conn.py
index f9a2dd40e34..7cdfe3a84cf 100644
--- a/cmk/base/legacy_checks/fireeye_smtp_conn.py
+++ b/cmk/base/legacy_checks/fireeye_smtp_conn.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fireeye import inventory_fireeye_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.13.1.46.0 8
diff --git a/cmk/base/legacy_checks/fireeye_sys_image.py b/cmk/base/legacy_checks/fireeye_sys_image.py
index 449e9e30c1f..96bc6142e53 100644
--- a/cmk/base/legacy_checks/fireeye_sys_image.py
+++ b/cmk/base/legacy_checks/fireeye_sys_image.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fireeye import inventory_fireeye_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.11.5.1.1.0 eMPS (eMPS) 7.6.5.442663 --> FE-FIREEYE-MIB::feInstalledSystemImage.0
diff --git a/cmk/base/legacy_checks/fireeye_temp.py b/cmk/base/legacy_checks/fireeye_temp.py
index bf4b28e8638..7ce0535a69e 100644
--- a/cmk/base/legacy_checks/fireeye_temp.py
+++ b/cmk/base/legacy_checks/fireeye_temp.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fireeye import DETECT
# .1.3.6.1.4.1.25597.11.1.1.4.0 32 --> FE-FIREEYE-MIB::feTemperatureValue.0
diff --git a/cmk/base/legacy_checks/fortigate_cpu.py b/cmk/base/legacy_checks/fortigate_cpu.py
index 54d75c69685..0f15b078e3b 100644
--- a/cmk/base/legacy_checks/fortigate_cpu.py
+++ b/cmk/base/legacy_checks/fortigate_cpu.py
@@ -5,20 +5,59 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.fortigate_cpu import (
- check_fortigate_cpu,
- inventory_fortigate_cpu,
-)
+from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, exists, not_exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ all_of,
+ contains,
+ DiscoveryResult,
+ exists,
+ not_exists,
+ Service,
+ SNMPTree,
+ StringTable,
+)
def parse_fortigate_cpu(string_table: StringTable) -> StringTable | None:
return string_table or None
+def discover_fortigate_cpu(string_table: StringTable) -> DiscoveryResult:
+ yield Service()
+
+
+def check_fortigate_cpu(item, params, info):
+ if (num_cpus := len(info)) == 0:
+ return None
+
+ util = sum(float(raw_util) for raw_util, *_rest in info) / num_cpus
+
+ state, infotext, perfdata = next(check_cpu_util(util, params))
+ infotext += " at %d CPUs" % num_cpus
+
+ return state, infotext, perfdata
+
+
+check_info["fortigate_cpu_base"] = LegacyCheckDefinition(
+ parse_function=parse_fortigate_cpu,
+ detect=all_of(
+ contains(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.12356.101.1"),
+ exists(".1.3.6.1.4.1.12356.101.4.1.3.0"),
+ ),
+ # uses mib FORTINET-FORTIGATE-MIB,
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.12356.101.4.1",
+ oids=["3"],
+ ),
+ service_name="CPU utilization",
+ discovery_function=discover_fortigate_cpu,
+ check_function=check_fortigate_cpu,
+ check_ruleset_name="cpu_utilization",
+ check_default_parameters={"util": (80.0, 90.0)},
+)
+
check_info["fortigate_cpu"] = LegacyCheckDefinition(
parse_function=parse_fortigate_cpu,
detect=all_of(
@@ -32,7 +71,7 @@ def parse_fortigate_cpu(string_table: StringTable) -> StringTable | None:
oids=["8"],
),
service_name="CPU utilization",
- discovery_function=inventory_fortigate_cpu,
+ discovery_function=discover_fortigate_cpu,
check_function=check_fortigate_cpu,
check_ruleset_name="cpu_utilization",
check_default_parameters={"util": (80.0, 90.0)},
diff --git a/cmk/base/legacy_checks/fortigate_cpu_base.py b/cmk/base/legacy_checks/fortigate_cpu_base.py
deleted file mode 100644
index 7e455e98690..00000000000
--- a/cmk/base/legacy_checks/fortigate_cpu_base.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.fortigate_cpu import (
- check_fortigate_cpu,
- inventory_fortigate_cpu,
-)
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def parse_fortigate_cpu_base(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
-check_info["fortigate_cpu_base"] = LegacyCheckDefinition(
- parse_function=parse_fortigate_cpu_base,
- detect=all_of(
- contains(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.12356.101.1"),
- exists(".1.3.6.1.4.1.12356.101.4.1.3.0"),
- ),
- # uses mib FORTINET-FORTIGATE-MIB,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.12356.101.4.1",
- oids=["3"],
- ),
- service_name="CPU utilization",
- discovery_function=inventory_fortigate_cpu,
- check_function=check_fortigate_cpu,
- check_ruleset_name="cpu_utilization",
- check_default_parameters={"util": (80.0, 90.0)},
-)
diff --git a/cmk/base/legacy_checks/fortigate_ipsecvpn.py b/cmk/base/legacy_checks/fortigate_ipsecvpn.py
index 5dfacec8eed..3ccf33dc769 100644
--- a/cmk/base/legacy_checks/fortigate_ipsecvpn.py
+++ b/cmk/base/legacy_checks/fortigate_ipsecvpn.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fortinet import DETECT_FORTIGATE
diff --git a/cmk/base/legacy_checks/fortigate_sessions.py b/cmk/base/legacy_checks/fortigate_sessions.py
index 562e86d1502..9cf530ce5e2 100644
--- a/cmk/base/legacy_checks/fortigate_sessions.py
+++ b/cmk/base/legacy_checks/fortigate_sessions.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, exists, not_exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, exists, not_exists, SNMPTree, StringTable
def inventory_fortigate_sessions(info):
diff --git a/cmk/base/legacy_checks/fortigate_sessions_base.py b/cmk/base/legacy_checks/fortigate_sessions_base.py
index 04bfd66c95e..b8280b188d3 100644
--- a/cmk/base/legacy_checks/fortigate_sessions_base.py
+++ b/cmk/base/legacy_checks/fortigate_sessions_base.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree, StringTable
def inventory_fortigate_sessions_base(info):
diff --git a/cmk/base/legacy_checks/fortigate_signatures.py b/cmk/base/legacy_checks/fortigate_signatures.py
deleted file mode 100644
index 95a5cc6b52f..00000000000
--- a/cmk/base/legacy_checks/fortigate_signatures.py
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# .1.3.6.1.4.1.12356.101.4.2.1.0 27.00768(2015-09-01 15:10)
-# .1.3.6.1.4.1.12356.101.4.2.2.0 6.00689(2015-09-01 00:15)
-
-# signature ages (defaults are 1/2 days)
-
-import time
-
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition, regex
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.fortinet import DETECT_FORTIGATE
-
-
-def parse_fortigate_signatures(string_table):
- if not string_table:
- return None
-
- def parse_version(ver):
- # sample: 27.00768(2015-09-01 15:10)
- ver_regex = regex(r"([0-9.]*)\(([0-9-: ]*)\)")
- match = ver_regex.match(ver)
- if match is None:
- return None, None
- # what timezone is this in?
- t = time.strptime(match.group(2), "%Y-%m-%d %H:%M")
- ts = time.mktime(t)
- return match.group(1), time.time() - ts
-
- parsed = []
- for (key, title), value in zip(
- [
- ("av_age", "AV"),
- ("ips_age", "IPS"),
- ("av_ext_age", "AV extended"),
- ("ips_ext_age", "IPS extended"),
- ],
- string_table[0],
- ):
- version, age = parse_version(value)
- parsed.append((key, title, version, age))
- return parsed
-
-
-def inventory_fortigate_signatures(parsed):
- if parsed:
- return [(None, {})]
- return []
-
-
-def check_fortigate_signatures(_no_item, params, parsed):
- for key, title, version, age in parsed:
- if age is None:
- continue
- infotext = f"[{version}] {title} age: {get_age_human_readable(age)}"
- state = 0
- levels = params.get(key)
- if levels is not None:
- warn, crit = levels
- if crit is not None and age >= crit:
- state = 2
- elif warn is not None and age >= warn:
- state = 1
- if state:
- infotext += " (warn/crit at {}/{})".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
- )
- yield state, infotext
-
-
-check_info["fortigate_signatures"] = LegacyCheckDefinition(
- detect=DETECT_FORTIGATE,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.12356.101.4.2",
- oids=["1", "2", "3", "4"],
- ),
- parse_function=parse_fortigate_signatures,
- service_name="Signatures",
- discovery_function=inventory_fortigate_signatures,
- check_function=check_fortigate_signatures,
- check_ruleset_name="fortinet_signatures",
- check_default_parameters={
- "av_age": (86400, 172800),
- "ips_age": (86400, 172800),
- },
-)
diff --git a/cmk/base/legacy_checks/fortinet_controller_aps.py b/cmk/base/legacy_checks/fortinet_controller_aps.py
index 48c4f7fb265..4abbfc5b8df 100644
--- a/cmk/base/legacy_checks/fortinet_controller_aps.py
+++ b/cmk/base/legacy_checks/fortinet_controller_aps.py
@@ -17,10 +17,10 @@
# mypy: disable-error-code="var-annotated"
-from cmk.base.check_api import get_timestamp_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
+from cmk.agent_based.v2 import contains, render, SNMPTree
def parse_fortinet_controller_aps(string_table):
@@ -109,7 +109,7 @@ def check_fortinet_controller_aps(item, params, parsed):
uptime = data["uptime"]
if uptime:
- yield 0, "Up since %s" % get_timestamp_human_readable(uptime), [("uptime", uptime)]
+ yield 0, "Up since %s" % render.datetime(uptime), [("uptime", uptime)]
location = data.get("location")
if location:
diff --git a/cmk/base/legacy_checks/fortisandbox_cpu_util.py b/cmk/base/legacy_checks/fortisandbox_cpu_util.py
index 68642302e88..9fca0c443b7 100644
--- a/cmk/base/legacy_checks/fortisandbox_cpu_util.py
+++ b/cmk/base/legacy_checks/fortisandbox_cpu_util.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.fortinet import DETECT_FORTISANDBOX
# Nikolas Hagemann, comNET GmbH - nikolas.hagemann@comnetgmbh.com
diff --git a/cmk/base/legacy_checks/fsc_ipmi_mem_status.py b/cmk/base/legacy_checks/fsc_ipmi_mem_status.py
index 26fd947e618..f2174eba64b 100644
--- a/cmk/base/legacy_checks/fsc_ipmi_mem_status.py
+++ b/cmk/base/legacy_checks/fsc_ipmi_mem_status.py
@@ -22,7 +22,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
fsc_ipmi_mem_status_levels = [
# Status Code, Label
@@ -48,7 +48,7 @@ def inventory_fsc_ipmi_mem_status(info):
def check_fsc_ipmi_mem_status(name, _no_params, info):
for line in info:
if line[0] == "E":
- return (3, "Error in agent plugin output (%s)" % " ".join(line[1:]))
+ return (3, "Error in agent plug-in output (%s)" % " ".join(line[1:]))
if line[1] == name:
return fsc_ipmi_mem_status_levels[int(line[2])]
diff --git a/cmk/base/legacy_checks/fsc_sc2_cpu_status.py b/cmk/base/legacy_checks/fsc_sc2_cpu_status.py
index 909067608c0..1673a20931a 100644
--- a/cmk/base/legacy_checks/fsc_sc2_cpu_status.py
+++ b/cmk/base/legacy_checks/fsc_sc2_cpu_status.py
@@ -6,20 +6,52 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.fsc import DETECT_FSC_SC2
-from cmk.base.check_legacy_includes.fsc_sc2 import (
- check_fsc_sc2_cpu_status,
- inventory_fsc_sc2_cpu_status,
-)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
def parse_fsc_sc2_cpu_status(string_table: StringTable) -> StringTable:
return string_table
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.3.1.1 "CPU1"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.3.1.2 "CPU2"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.4.1.1 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.4.1.2 2
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.5.1.1 "Intel(R) Xeon(R) CPU E5-2620 v2 @ 2.10GHz"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.5.1.2 ""
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.8.1.1 2100
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.8.1.2 0
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.13.1.1 6
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.4.1.13.1.2 0
+
+
+def inventory_fsc_sc2_cpu_status(info):
+ for line in info:
+ if line[1] != "2":
+ yield line[0], None
+
+
+def check_fsc_sc2_cpu_status(item, _no_params, info):
+ def get_cpu_status(status):
+ return {
+ "1": (3, "unknown"),
+ "2": (3, "not-present"),
+ "3": (0, "ok"),
+ "4": (0, "disabled"),
+ "5": (2, "error"),
+ "6": (2, "failed"),
+ "7": (1, "missing-termination"),
+ "8": (1, "prefailure-warning"),
+ }.get(status, (3, "unknown"))
+
+ for designation, status, model, speed, cores in info:
+ if designation == item:
+ status_state, status_txt = get_cpu_status(status)
+ return status_state, f"Status is {status_txt}, {model}, {cores} cores @ {speed} MHz"
+
+
check_info["fsc_sc2_cpu_status"] = LegacyCheckDefinition(
parse_function=parse_fsc_sc2_cpu_status,
detect=DETECT_FSC_SC2,
diff --git a/cmk/base/legacy_checks/fsc_sc2_fans.py b/cmk/base/legacy_checks/fsc_sc2_fans.py
index eddfc79c97a..8fc7f88bcd6 100644
--- a/cmk/base/legacy_checks/fsc_sc2_fans.py
+++ b/cmk/base/legacy_checks/fsc_sc2_fans.py
@@ -5,22 +5,70 @@
from cmk.base.check_api import LegacyCheckDefinition
+from cmk.base.check_legacy_includes.fan import check_fan
from cmk.base.check_legacy_includes.fsc import DETECT_FSC_SC2
-from cmk.base.check_legacy_includes.fsc_sc2 import (
- check_fsc_sc2_fans,
- FAN_FSC_SC2_CHECK_DEFAULT_PARAMETERS,
- inventory_fsc_sc2_fans,
-)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
def parse_fsc_sc2_fans(string_table: StringTable) -> StringTable:
return string_table
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.1 "FAN1 SYS"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.2 "FAN2 SYS"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.3 "FAN3 SYS"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.4 "FAN4 SYS"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.5 "FAN5 SYS"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.6 "FAN PSU1"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.3.1.7 "FAN PSU2"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.1 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.2 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.3 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.4 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.5 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.6 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.5.1.7 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.1 5820
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.2 6000
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.3 6000
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.4 6000
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.5 6120
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.6 2400
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.2.1.6.1.7 2400
+
+
+def inventory_fsc_sc2_fans(info):
+ for line in info:
+ if line[1] not in ["8"]:
+ yield line[0], {}
+
+
+def check_fsc_sc2_fans(item, params, info):
+ status_map = {
+ "1": (3, "Status is unknown"),
+ "2": (0, "Status is disabled"),
+ "3": (0, "Status is ok"),
+ "4": (2, "Status is failed"),
+ "5": (1, "Status is prefailure-predicted"),
+ "6": (1, "Status is redundant-fan-failed"),
+ "7": (3, "Status is not-manageable"),
+ "8": (0, "Status is not-present"),
+ }
+
+ if isinstance(params, tuple):
+ params = {"lower": params}
+
+ for designation, status, rpm in info:
+ if designation == item:
+ yield status_map.get(status, (3, "Status is unknown"))
+ if rpm:
+ yield check_fan(int(rpm), params)
+ else:
+ yield 0, "Device did not deliver RPM values"
+
+
check_info["fsc_sc2_fans"] = LegacyCheckDefinition(
parse_function=parse_fsc_sc2_fans,
detect=DETECT_FSC_SC2,
@@ -32,5 +80,7 @@ def parse_fsc_sc2_fans(string_table: StringTable) -> StringTable:
discovery_function=inventory_fsc_sc2_fans,
check_function=check_fsc_sc2_fans,
check_ruleset_name="hw_fans",
- check_default_parameters=FAN_FSC_SC2_CHECK_DEFAULT_PARAMETERS,
+ check_default_parameters={
+ "lower": (1500, 2000),
+ },
)
diff --git a/cmk/base/legacy_checks/fsc_sc2_info.py b/cmk/base/legacy_checks/fsc_sc2_info.py
index 3a1b9c803da..8885a7b53e3 100644
--- a/cmk/base/legacy_checks/fsc_sc2_info.py
+++ b/cmk/base/legacy_checks/fsc_sc2_info.py
@@ -6,17 +6,34 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.fsc import DETECT_FSC_SC2
-from cmk.base.check_legacy_includes.fsc_sc2 import check_fsc_sc2_info, inventory_fsc_sc2_info
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
+
+# .1.3.6.1.4.1.231.2.10.2.2.10.2.3.1.5.1 "PRIMERGY RX300 S8"
+# .1.3.6.1.4.1.231.2.10.2.2.10.2.3.1.7.1 "--"
+# .1.3.6.1.4.1.231.2.10.2.2.10.4.1.1.11.1 "V4.6.5.4 R1.6.0 for D2939-B1x"
def parse_fsc_sc2_info(string_table: StringTable) -> StringTable:
return string_table
+def inventory_fsc_sc2_info(info):
+ if info:
+ return [(None, None)]
+ return []
+
+
+def check_fsc_sc2_info(_no_item, _no_params, info):
+ if info:
+ return (
+ 0,
+ f"Model: {info[0][0]}, Serial Number: {info[0][1]}, BIOS Version: {info[0][2]}",
+ )
+ return None
+
+
check_info["fsc_sc2_info"] = LegacyCheckDefinition(
parse_function=parse_fsc_sc2_info,
detect=DETECT_FSC_SC2,
diff --git a/cmk/base/legacy_checks/fsc_sc2_mem_status.py b/cmk/base/legacy_checks/fsc_sc2_mem_status.py
index cf10a6abbb9..576a2d85302 100644
--- a/cmk/base/legacy_checks/fsc_sc2_mem_status.py
+++ b/cmk/base/legacy_checks/fsc_sc2_mem_status.py
@@ -6,20 +6,69 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.fsc import DETECT_FSC_SC2
-from cmk.base.check_legacy_includes.fsc_sc2 import (
- check_fsc_sc2_mem_status,
- inventory_fsc_sc2_mem_status,
-)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
def parse_fsc_sc2_mem_status(string_table: StringTable) -> StringTable:
return string_table
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.1 "DIMM-1A"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.2 "DIMM-2A"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.3 "DIMM-3A"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.4 "DIMM-1B"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.5 "DIMM-2B"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.6 "DIMM-3B"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.7 "DIMM-1C"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.8 "DIMM-2C"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.3.1.9 "DIMM-3C"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.1 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.2 2
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.3 2
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.4 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.5 2
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.6 2
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.7 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.8 2
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.4.1.9 2
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.1 4096
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.2 -1
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.3 -1
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.4 4096
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.5 -1
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.6 -1
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.7 4096
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.8 -1
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.5.1.6.1.9 -1
+
+
+def inventory_fsc_sc2_mem_status(info):
+ for line in info:
+ if line[1] != "2":
+ yield line[0], None
+
+
+def check_fsc_sc2_mem_status(item, _no_params, info):
+ def get_mem_status(status):
+ return {
+ "1": (3, "unknown"),
+ "2": (3, "not-present"),
+ "3": (0, "ok"),
+ "4": (0, "disabled"),
+ "5": (2, "error"),
+ "6": (2, "failed"),
+ "7": (1, "prefailure-predicted"),
+ "11": (0, "hidden"),
+ }.get(status, (3, "unknown"))
+
+ for designation, status, capacity in info:
+ if designation == item:
+ status_state, status_txt = get_mem_status(status)
+ return status_state, f"Status is {status_txt}, Size {capacity} MB"
+
+
check_info["fsc_sc2_mem_status"] = LegacyCheckDefinition(
parse_function=parse_fsc_sc2_mem_status,
detect=DETECT_FSC_SC2,
diff --git a/cmk/base/legacy_checks/fsc_sc2_power_consumption.py b/cmk/base/legacy_checks/fsc_sc2_power_consumption.py
index a63f05f82b6..c5421e424cf 100644
--- a/cmk/base/legacy_checks/fsc_sc2_power_consumption.py
+++ b/cmk/base/legacy_checks/fsc_sc2_power_consumption.py
@@ -7,11 +7,40 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.elphase import check_elphase
from cmk.base.check_legacy_includes.fsc import DETECT_FSC_SC2
-from cmk.base.check_legacy_includes.fsc_sc2 import parse_fsc_sc2_power_consumption
from cmk.base.config import check_info
from cmk.agent_based.v2 import SNMPTree
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.3.1 "CPU1 Power"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.3.2 "CPU2 Power"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.4.1 "HDD Power"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.7.1 "System Power"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.10.1 "PSU1 Power"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.10.2 "PSU2 Power"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.224.1 "Total Power"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.4.1.224.2 "Total Power Out"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.3.1 5
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.3.2 0
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.4.1 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.7.1 50
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.10.1 52
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.10.2 40
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.224.1 92
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.7.1.5.1.224.2 68
+
+
+def parse_fsc_sc2_power_consumption(info):
+ parsed: dict = {}
+ for designation, value in info:
+ # sometimes the device does not return a value
+ if not value:
+ parsed.setdefault(
+ designation, {"device_state": (3, "Error on device while reading value")}
+ )
+ else:
+ parsed.setdefault(designation, {"power": int(value)})
+ return parsed
+
def discover_fsc_sc2_power_consumption(section):
yield from ((item, {}) for item in section)
diff --git a/cmk/base/legacy_checks/fsc_sc2_psu.py b/cmk/base/legacy_checks/fsc_sc2_psu.py
index 72c597ede94..b41d90dca11 100644
--- a/cmk/base/legacy_checks/fsc_sc2_psu.py
+++ b/cmk/base/legacy_checks/fsc_sc2_psu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fsc import DETECT_FSC_SC2
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
# .1.3.6.1.4.1.231.2.10.2.2.10.6.2.1.3.1.1 "PSU1"
# .1.3.6.1.4.1.231.2.10.2.2.10.6.2.1.3.1.2 "PSU2"
diff --git a/cmk/base/legacy_checks/fsc_sc2_temp.py b/cmk/base/legacy_checks/fsc_sc2_temp.py
index f30d1ad6155..ec459c094b9 100644
--- a/cmk/base/legacy_checks/fsc_sc2_temp.py
+++ b/cmk/base/legacy_checks/fsc_sc2_temp.py
@@ -6,17 +6,106 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.fsc import DETECT_FSC_SC2
-from cmk.base.check_legacy_includes.fsc_sc2 import check_fsc_sc2_temp, inventory_fsc_sc2_temp
+from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
+
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.1 "Ambient"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.2 "Systemboard 1"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.3 "Systemboard 2"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.4 "CPU1"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.5 "CPU2"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.6 "MEM A"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.7 "MEM B"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.8 "MEM C"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.3.1.9 "MEM D"
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.1 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.2 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.3 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.4 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.5 2
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.6 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.7 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.8 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.5.1.9 8
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.1 26
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.2 27
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.3 33
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.4 27
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.5 0
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.6 28
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.7 28
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.8 27
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.6.1.9 27
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.1 37
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.2 75
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.3 75
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.4 77
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.5 89
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.6 78
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.7 78
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.8 78
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.7.1.9 78
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.1 42
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.2 80
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.3 80
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.4 81
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.5 93
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.6 82
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.7 82
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.8 82
+# .1.3.6.1.4.1.231.2.10.2.2.10.5.1.1.8.1.9 82
def parse_fsc_sc2_temp(string_table: StringTable) -> StringTable:
return string_table
+def inventory_fsc_sc2_temp(info):
+ for line in info:
+ if line[1] != "2":
+ yield line[0], {}
+
+
+def check_fsc_sc2_temp(item, params, info):
+ temp_status = {
+ "1": (3, "unknown"),
+ "2": (0, "not-available"),
+ "3": (0, "ok"),
+ "4": (2, "sensor-failed"),
+ "5": (2, "failed"),
+ "6": (1, "temperature-warning-toohot"),
+ "7": (2, "temperature-critical-toohot"),
+ "8": (0, "temperature-normal"),
+ "9": (1, "temperature-warning"),
+ }
+
+ for designation, status, temp, dev_warn, dev_crit in info:
+ if designation == item:
+ if not temp:
+ return 3, "Did not receive temperature data"
+
+ dev_status, dev_status_name = temp_status.get(status, (3, "unknown"))
+
+ if not dev_warn or not dev_crit:
+ return 3, "Did not receive device levels"
+
+ dev_levels = int(dev_warn), int(dev_crit)
+
+ return check_temperature(
+ int(temp),
+ params,
+ "temp_{}".format(item.replace(" ", "_")),
+ "c",
+ dev_levels,
+ None,
+ dev_status,
+ dev_status_name,
+ )
+ return None
+
+
check_info["fsc_sc2_temp"] = LegacyCheckDefinition(
parse_function=parse_fsc_sc2_temp,
detect=DETECT_FSC_SC2,
diff --git a/cmk/base/legacy_checks/fsc_sc2_voltage.py b/cmk/base/legacy_checks/fsc_sc2_voltage.py
index f61be43925a..0a9ee599aec 100644
--- a/cmk/base/legacy_checks/fsc_sc2_voltage.py
+++ b/cmk/base/legacy_checks/fsc_sc2_voltage.py
@@ -3,14 +3,100 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
+from collections.abc import Mapping
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.elphase import check_elphase
from cmk.base.check_legacy_includes.fsc import DETECT_FSC_SC2
-from cmk.base.check_legacy_includes.fsc_sc2 import parse_fsc_sc2_voltage
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
+from cmk.agent_based.v2 import SNMPTree, StringTable
+
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.1 "BATT 3.0V"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.2 "STBY 12V"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.3 "STBY 5V"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.4 "STBY 3.3V"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.5 "LAN 1.8V STBY"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.6 "iRMC 1.5V STBY"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.7 "LAN 1.0V STBY"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.8 "MAIN 12V"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.3.1.9 "MAIN 5V"
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.1 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.2 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.3 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.4 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.5 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.6 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.7 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.8 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.4.1.9 3
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.1 3270
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.2 11880
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.3 5100
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.4 3350
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.5 1800
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.6 1460
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.7 980
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.8 12160
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.5.1.9 4980
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.1 2010
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.2 11280
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.3 4630
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.4 3020
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.5 1670
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.6 1390
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.7 930
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.8 11310
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.7.1.9 4630
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.1 3500
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.2 12960
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.3 5420
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.4 3570
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.5 1930
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.6 1610
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.7 1080
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.8 12900
+# .1.3.6.1.4.1.231.2.10.2.2.10.6.3.1.8.1.9 5420
+
+Section = Mapping[str, Mapping[str, float | tuple[int, str] | tuple[float, tuple[int, str]]]]
+
+
+def parse_fsc_sc2_voltage(string_table: StringTable) -> Section:
+ # dev_state:
+ # sc2VoltageStatus OBJECT-TYPE
+ # SYNTAX INTEGER
+ # {
+ # unknown(1),
+ # not-available(2),
+ # ok(3),
+ # too-low(4),
+ # too-high(5),
+ # sensor-failed(6)
+ # }
+ # ACCESS read-only
+ # STATUS mandatory
+ # DESCRIPTION "Voltage status"
+ # ::= { sc2Voltages 4 }
+
+ parsed: dict[str, dict[str, float | tuple[int, str] | tuple[float, tuple[int, str]]]] = {}
+ for designation, dev_state, r_value, r_min_value, r_max_value in string_table:
+ if dev_state == "2":
+ continue
+ try:
+ value = float(r_value) / 1000.0
+ min_value = float(r_min_value) / 1000.0
+ max_value = float(r_max_value) / 1000.0
+ except ValueError:
+ parsed.setdefault(designation, {"device_state": (3, "Could not get all values")})
+ continue
+
+ state_info: float | tuple[float, tuple[int, str]] = value
+ if value < min_value:
+ state_info = value, (2, "too low, deceeds %.2f V" % min_value)
+ elif value >= max_value:
+ state_info = value, (2, "too high, exceeds %.2f V" % max_value)
+ parsed.setdefault(designation, {"voltage": state_info})
+ return parsed
def discover_fsc_sc2_voltage(section):
diff --git a/cmk/base/legacy_checks/fsc_subsystems.py b/cmk/base/legacy_checks/fsc_subsystems.py
index 78ee3d50c92..89dc3490877 100644
--- a/cmk/base/legacy_checks/fsc_subsystems.py
+++ b/cmk/base/legacy_checks/fsc_subsystems.py
@@ -4,14 +4,22 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ all_of,
+ any_of,
+ DiscoveryResult,
+ exists,
+ Service,
+ SNMPTree,
+ startswith,
+ StringTable,
+)
-def inventory_fsc_subsystems(string_table: StringTable) -> DiscoveryResult:
+def discover_fsc_subsystems(string_table: StringTable) -> DiscoveryResult:
yield from (Service(item=line[0]) for line in string_table if int(line[1]) > 0)
@@ -52,6 +60,6 @@ def parse_fsc_subsystems(string_table: StringTable) -> StringTable:
oids=["2", "3"],
),
service_name="FSC %s",
- discovery_function=inventory_fsc_subsystems,
+ discovery_function=discover_fsc_subsystems,
check_function=check_fsc_subsystems,
)
diff --git a/cmk/base/legacy_checks/fsc_temp.py b/cmk/base/legacy_checks/fsc_temp.py
index df310c5dd0e..3c37d0b1728 100644
--- a/cmk/base/legacy_checks/fsc_temp.py
+++ b/cmk/base/legacy_checks/fsc_temp.py
@@ -4,12 +4,21 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, exists, not_exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ all_of,
+ any_of,
+ DiscoveryResult,
+ exists,
+ not_exists,
+ Service,
+ SNMPTree,
+ startswith,
+ StringTable,
+)
# We fetch the following columns from SNMP:
# 13: name of the temperature sensor (used as item)
@@ -18,7 +27,7 @@
# 8: critical level
-def inventory_fsc_temp(string_table: StringTable) -> DiscoveryResult:
+def discover_fsc_temp(string_table: StringTable) -> DiscoveryResult:
# Ignore non-connected sensors
yield from (Service(item=line[0]) for line in string_table if int(line[1]) < 500)
@@ -58,7 +67,7 @@ def parse_fsc_temp(string_table: StringTable) -> StringTable:
oids=["13", "11", "6", "8"],
),
service_name="Temperature %s",
- discovery_function=inventory_fsc_temp,
+ discovery_function=discover_fsc_temp,
check_function=check_fsc_temp,
check_ruleset_name="temperature",
)
diff --git a/cmk/base/legacy_checks/genua_carp.py b/cmk/base/legacy_checks/genua_carp.py
index 6fb8f77e4b3..f4974476c62 100644
--- a/cmk/base/legacy_checks/genua_carp.py
+++ b/cmk/base/legacy_checks/genua_carp.py
@@ -8,8 +8,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.genua import DETECT_GENUA
# Example Agent Output:
diff --git a/cmk/base/legacy_checks/genua_fan.py b/cmk/base/legacy_checks/genua_fan.py
index 6acc3489d0c..d378dbae193 100644
--- a/cmk/base/legacy_checks/genua_fan.py
+++ b/cmk/base/legacy_checks/genua_fan.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.fan import check_fan
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.genua import DETECT_GENUA
diff --git a/cmk/base/legacy_checks/genua_pfstate.py b/cmk/base/legacy_checks/genua_pfstate.py
index 0205151b36d..a0cef353ff0 100644
--- a/cmk/base/legacy_checks/genua_pfstate.py
+++ b/cmk/base/legacy_checks/genua_pfstate.py
@@ -5,17 +5,10 @@
from collections.abc import Sequence
-from cmk.base.check_api import (
- check_levels,
- DiscoveryResult,
- LegacyCheckDefinition,
- saveint,
- Service,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.genua import DETECT_GENUA
# Example Agent Output:
@@ -25,7 +18,7 @@
# .1.3.6.1.4.1.3717.2.1.1.6.3 = INTEGER: 1
-def inventory_genua_pfstate(string_table: StringTable) -> DiscoveryResult:
+def discover_genua_pfstate(string_table: StringTable) -> DiscoveryResult:
# remove empty elements due to alternative enterprise id in snmp_info
string_table = [_f for _f in string_table if _f]
@@ -90,7 +83,7 @@ def parse_genua_pfstate(string_table: Sequence[StringTable]) -> Sequence[StringT
),
],
service_name="Paketfilter Status",
- discovery_function=inventory_genua_pfstate,
+ discovery_function=discover_genua_pfstate,
check_function=check_genua_pfstate,
check_ruleset_name="pf_used_states",
check_default_parameters={"used": None},
diff --git a/cmk/base/legacy_checks/genua_state_correlation.py b/cmk/base/legacy_checks/genua_state_correlation.py
index d7ded8c84d3..ca9c9e0cc6c 100644
--- a/cmk/base/legacy_checks/genua_state_correlation.py
+++ b/cmk/base/legacy_checks/genua_state_correlation.py
@@ -8,8 +8,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.genua import DETECT_GENUA
# Example Agent Output:
diff --git a/cmk/base/legacy_checks/genua_vpn.py b/cmk/base/legacy_checks/genua_vpn.py
index 9190eab647d..53e3bcd820c 100644
--- a/cmk/base/legacy_checks/genua_vpn.py
+++ b/cmk/base/legacy_checks/genua_vpn.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.genua import DETECT_GENUA
# .1.3.6.1.4.1.3717.2.1.3.1.1.1 1
diff --git a/cmk/base/legacy_checks/graylog_cluster_stats.py b/cmk/base/legacy_checks/graylog_cluster_stats.py
index f4ab971f27c..1bb8d9571b0 100644
--- a/cmk/base/legacy_checks/graylog_cluster_stats.py
+++ b/cmk/base/legacy_checks/graylog_cluster_stats.py
@@ -8,9 +8,10 @@
from collections.abc import Iterable
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
from cmk.plugins.lib.graylog import deserialize_and_merge_json, GraylogSection
# <<>>
@@ -162,9 +163,9 @@ def check_graylog_cluster_stats_elastic( # pylint: disable=too-many-branches
if indice_data:
for section, info, hr_func in [
("index_count", "Index count", int),
- ("store_size", "Store size", get_bytes_human_readable),
- ("id_cache_size", "ID cache size", get_bytes_human_readable),
- ("field_data_size", "Field data size", get_bytes_human_readable),
+ ("store_size", "Store size", render.bytes),
+ ("id_cache_size", "ID cache size", render.bytes),
+ ("field_data_size", "Field data size", render.bytes),
]:
indice_value = indice_data.get(section)
if indice_value is None:
@@ -224,18 +225,18 @@ def check_graylog_cluster_stats_mongodb(_no_item, params, parsed):
"storage_size",
"Allocated storage",
"mongodb_collection_storage_size",
- get_bytes_human_readable,
+ render.bytes,
),
- ("index_size", "Total size", "indexes_size", get_bytes_human_readable),
+ ("index_size", "Total size", "indexes_size", render.bytes),
(
"data_size",
"Total size of uncompressed data",
"mongodb_collection_size",
- get_bytes_human_readable,
+ render.bytes,
),
- ("file_size", "Total data files size", "file_size", get_bytes_human_readable),
- ("ns_size_mb", "Total namespace size", "namespace_size", get_bytes_human_readable),
- ("avg_obj_size", "Average document size", "avg_doc_size", get_bytes_human_readable),
+ ("file_size", "Total data files size", "file_size", render.bytes),
+ ("ns_size_mb", "Total namespace size", "namespace_size", render.bytes),
+ ("avg_obj_size", "Average document size", "avg_doc_size", render.bytes),
("num_extents", "Number of extents", "num_extents", int),
("collections", "Number of collections", "num_collections", int),
("objects", "Number of objects", "num_objects", int),
diff --git a/cmk/base/legacy_checks/graylog_cluster_traffic.py b/cmk/base/legacy_checks/graylog_cluster_traffic.py
index 065e423db45..e3b517e33e3 100644
--- a/cmk/base/legacy_checks/graylog_cluster_traffic.py
+++ b/cmk/base/legacy_checks/graylog_cluster_traffic.py
@@ -8,14 +8,10 @@
import time
from collections.abc import Iterable
-from cmk.base.check_api import (
- check_levels,
- get_bytes_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
from cmk.plugins.lib.graylog import deserialize_and_merge_json, GraylogSection
# <<>>
@@ -55,14 +51,14 @@ def check_graylog_cluster_traffic(_no_item, params, parsed):
"graylog_%s" % key,
params.get(key),
infoname=infotext,
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
)
last_updated = parsed.get("to")
if last_updated is not None:
local_timestamp = calendar.timegm(time.strptime(last_updated, "%Y-%m-%dT%H:%M:%S.%fZ"))
- yield 0, "Last updated: %s" % get_timestamp_human_readable(local_timestamp)
+ yield 0, "Last updated: %s" % render.datetime(local_timestamp)
check_info["graylog_cluster_traffic"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/graylog_jvm.py b/cmk/base/legacy_checks/graylog_jvm.py
index 0dd6d52b26f..0d575ae2106 100644
--- a/cmk/base/legacy_checks/graylog_jvm.py
+++ b/cmk/base/legacy_checks/graylog_jvm.py
@@ -6,9 +6,10 @@
from collections.abc import Iterable
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
from cmk.plugins.lib.graylog import deserialize_and_merge_json, GraylogSection
# <<>>
@@ -40,7 +41,7 @@ def check_graylog_jvm(_no_item, params, parsed):
mem_data,
metric_name,
params.get(key),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="%s heap space" % key.title(),
)
if not has_mem_data:
diff --git a/cmk/base/legacy_checks/graylog_license.py b/cmk/base/legacy_checks/graylog_license.py
index 11851ed3b1c..a045c34d665 100644
--- a/cmk/base/legacy_checks/graylog_license.py
+++ b/cmk/base/legacy_checks/graylog_license.py
@@ -6,18 +6,15 @@
import time
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_bytes_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.graylog import (
handle_iso_utc_to_localtimestamp,
parse_graylog_agent_data,
)
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
# <<>>
# {"status": [{"violated": true,"expired": false,"expiration_upcoming":
# false,"expired_since": "PT0S","expires_in": "PT550H17.849S","trial":
@@ -75,7 +72,7 @@ def check_graylog_license(_no_item, params, parsed):
traffic_limit = license_data.get("license", {}).get("enterprise", {}).get("traffic_limit")
if traffic_limit is not None:
- yield 0, "Traffic limit: %s" % get_bytes_human_readable(traffic_limit)
+ yield 0, "Traffic limit: %s" % render.bytes(traffic_limit)
expires = license_data.get("license", {}).get("expiration_date")
if expires is not None:
@@ -86,7 +83,7 @@ def check_graylog_license(_no_item, params, parsed):
time_to_expiration,
None,
(None, None, warn, crit),
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.time_offset,
infoname="Expires in",
)
diff --git a/cmk/base/legacy_checks/graylog_sidecars.py b/cmk/base/legacy_checks/graylog_sidecars.py
index ac889772d2d..9a8523e469e 100644
--- a/cmk/base/legacy_checks/graylog_sidecars.py
+++ b/cmk/base/legacy_checks/graylog_sidecars.py
@@ -8,15 +8,12 @@
import time
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.graylog import handle_iso_utc_to_localtimestamp, json
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
# <<>>
# {"sort": "node_name", "pagination": {"count": 1, "per_page": 50, "total": 1,
# "page": 1}, "sidecars": [{"co llectors": null, "node_name": "testserver",
@@ -85,13 +82,13 @@ def check_graylog_sidecars(item, params, parsed): # pylint: disable=too-many-br
local_timestamp = handle_iso_utc_to_localtimestamp(last_seen)
age = time.time() - local_timestamp
- yield 0, "Last seen: %s" % get_timestamp_human_readable(local_timestamp)
+ yield 0, "Last seen: %s" % render.datetime(local_timestamp)
yield check_levels(
age,
None,
params.get("last_seen"),
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname="Before",
)
diff --git a/cmk/base/legacy_checks/graylog_sources.py b/cmk/base/legacy_checks/graylog_sources.py
index 48bc1583a0f..a967bc9bcfe 100644
--- a/cmk/base/legacy_checks/graylog_sources.py
+++ b/cmk/base/legacy_checks/graylog_sources.py
@@ -11,11 +11,11 @@
from dataclasses import dataclass
from typing import Any
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.graylog import handle_graylog_messages
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
# <<>>
# {"sources": {"172.18.0.1": {"messages": 457, "has_since": false}}}
@@ -74,8 +74,7 @@ def _handle_graylog_sources_messages(item_data: SourceInfo, params: Mapping[str,
item_data.num_messages_in_timespan,
"graylog_diff",
params.get("msgs_diff_upper", (None, None)) + params.get("msgs_diff_lower", (None, None)),
- infoname="Total number of messages in the last "
- + get_age_human_readable(item_data.timespan),
+ infoname=f"Total number of messages in the last {render.timespan(item_data.timespan)}",
human_readable_func=int,
)
diff --git a/cmk/base/legacy_checks/gude_humidity.py b/cmk/base/legacy_checks/gude_humidity.py
index 87542230404..8eb52ffa4ec 100644
--- a/cmk/base/legacy_checks/gude_humidity.py
+++ b/cmk/base/legacy_checks/gude_humidity.py
@@ -6,12 +6,19 @@
from collections.abc import Iterable, Mapping
from itertools import chain
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.humidity import check_humidity
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, OIDEnd, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ any_of,
+ DiscoveryResult,
+ OIDEnd,
+ Service,
+ SNMPTree,
+ startswith,
+ StringTable,
+)
# 19:1100, 38:822X
# .1.3.6.1.4.1.28507.**.1.6.1.1.3.1 498 --> GUDEADS-EPC****-MIB::epc****HygroSensor.1
@@ -27,7 +34,7 @@ def parse_gude_humidity(string_table: list[StringTable]) -> Section:
}
-def inventory_gude_humidity(section: Section) -> DiscoveryResult:
+def discover_gude_humidity(section: Section) -> DiscoveryResult:
yield from (Service(item=name) for name, reading in section.items() if reading != -999.9)
@@ -55,7 +62,7 @@ def check_gude_humidity(
],
parse_function=parse_gude_humidity,
service_name="Humidity %s",
- discovery_function=inventory_gude_humidity,
+ discovery_function=discover_gude_humidity,
check_function=check_gude_humidity,
check_ruleset_name="humidity",
check_default_parameters={
diff --git a/cmk/base/legacy_checks/gude_temp.py b/cmk/base/legacy_checks/gude_temp.py
index ca1f95ac909..36b6e519ed7 100644
--- a/cmk/base/legacy_checks/gude_temp.py
+++ b/cmk/base/legacy_checks/gude_temp.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature, TempParamType
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, OIDEnd, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, OIDEnd, SNMPTree, startswith, StringTable
# 19:1100, 38:822X
# .1.3.6.1.4.1.28507.**.1.6.1.1.2.1 225 --> GUDEADS-EPC****-MIB::epc****TempSensor.1
diff --git a/cmk/base/legacy_checks/h3c_lanswitch_cpu.py b/cmk/base/legacy_checks/h3c_lanswitch_cpu.py
index 580b31be8fd..f41d05e4fa7 100644
--- a/cmk/base/legacy_checks/h3c_lanswitch_cpu.py
+++ b/cmk/base/legacy_checks/h3c_lanswitch_cpu.py
@@ -20,8 +20,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, OIDEnd, SNMPTree, StringTable
def h3c_lanswitch_cpu_genitem(item):
diff --git a/cmk/base/legacy_checks/heartbeat_nodes.py b/cmk/base/legacy_checks/heartbeat_nodes.py
index fc606497a47..65fb1f5a9a0 100644
--- a/cmk/base/legacy_checks/heartbeat_nodes.py
+++ b/cmk/base/legacy_checks/heartbeat_nodes.py
@@ -16,7 +16,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_heartbeat_nodes(info):
diff --git a/cmk/base/legacy_checks/hitachi_hnas_bossock.py b/cmk/base/legacy_checks/hitachi_hnas_bossock.py
index 27e90ab0af9..38cc85b29df 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_bossock.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_bossock.py
@@ -8,8 +8,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
DiscoveryResult = Iterable[tuple[str, Mapping]]
diff --git a/cmk/base/legacy_checks/hitachi_hnas_cifs.py b/cmk/base/legacy_checks/hitachi_hnas_cifs.py
index 00a6e1534af..2b757810def 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_cifs.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_cifs.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
diff --git a/cmk/base/legacy_checks/hitachi_hnas_cpu.py b/cmk/base/legacy_checks/hitachi_hnas_cpu.py
index 3e9fa179f86..043d606f1f6 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_cpu.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_cpu.py
@@ -4,15 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
-def inventory_hitachi_hnas_cpu(string_table: StringTable) -> DiscoveryResult:
+def discover_hitachi_hnas_cpu(string_table: StringTable) -> DiscoveryResult:
for id_, _util in string_table:
yield Service(item=id_)
@@ -46,7 +45,7 @@ def parse_hitachi_hnas_cpu(string_table: StringTable) -> StringTable:
oids=["1", "3"],
),
service_name="CPU utilization PNode %s",
- discovery_function=inventory_hitachi_hnas_cpu,
+ discovery_function=discover_hitachi_hnas_cpu,
check_function=check_hitachi_hnas_cpu,
check_ruleset_name="cpu_utilization_multiitem",
check_default_parameters={"levels": (80.0, 90.0)},
diff --git a/cmk/base/legacy_checks/hitachi_hnas_fan.py b/cmk/base/legacy_checks/hitachi_hnas_fan.py
index 16cec404a2e..b75709e47ae 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_fan.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_fan.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
diff --git a/cmk/base/legacy_checks/hitachi_hnas_fpga.py b/cmk/base/legacy_checks/hitachi_hnas_fpga.py
index f93423c573d..d0407f7e47b 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_fpga.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_fpga.py
@@ -4,15 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
-def inventory_hitachi_hnas_fpga(string_table: StringTable) -> DiscoveryResult:
+def discover_hitachi_hnas_fpga(string_table: StringTable) -> DiscoveryResult:
for clusternode, id_, name, _util in string_table:
yield Service(item=clusternode + "." + id_ + " " + name)
@@ -50,7 +49,7 @@ def parse_hitachi_hnas_fpga(string_table: StringTable) -> StringTable:
oids=["1", "2", "3", "4"],
),
service_name="FPGA %s",
- discovery_function=inventory_hitachi_hnas_fpga,
+ discovery_function=discover_hitachi_hnas_fpga,
check_function=check_hitachi_hnas_fpga,
check_ruleset_name="fpga_utilization",
check_default_parameters={"levels": (80.0, 90.0)},
diff --git a/cmk/base/legacy_checks/hitachi_hnas_pnode.py b/cmk/base/legacy_checks/hitachi_hnas_pnode.py
index 184ff2f2ba4..fe0088014e9 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_pnode.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_pnode.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
diff --git a/cmk/base/legacy_checks/hitachi_hnas_psu.py b/cmk/base/legacy_checks/hitachi_hnas_psu.py
index 7b302261c32..0e7ec1d7195 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_psu.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_psu.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
diff --git a/cmk/base/legacy_checks/hitachi_hnas_quorumdevice.py b/cmk/base/legacy_checks/hitachi_hnas_quorumdevice.py
index 78cac26b938..5fad973ac03 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_quorumdevice.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_quorumdevice.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
diff --git a/cmk/base/legacy_checks/hitachi_hnas_temp.py b/cmk/base/legacy_checks/hitachi_hnas_temp.py
index 2adaa929d30..57d0a7b96fd 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_temp.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
diff --git a/cmk/base/legacy_checks/hitachi_hnas_vnode.py b/cmk/base/legacy_checks/hitachi_hnas_vnode.py
index 9f7eb7301d8..c403e24b5ce 100644
--- a/cmk/base/legacy_checks/hitachi_hnas_vnode.py
+++ b/cmk/base/legacy_checks/hitachi_hnas_vnode.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hitachi_hnas import DETECT
diff --git a/cmk/base/legacy_checks/hitachi_hus_dkc.py b/cmk/base/legacy_checks/hitachi_hus_dkc.py
deleted file mode 100644
index fedca7a4778..00000000000
--- a/cmk/base/legacy_checks/hitachi_hus_dkc.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hitachi_hus import check_hitachi_hus, inventory_hitachi_hus
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def parse_hitachi_hus_dkc(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["hitachi_hus_dkc"] = LegacyCheckDefinition(
- parse_function=parse_hitachi_hus_dkc,
- detect=any_of(
- contains(".1.3.6.1.2.1.1.1.0", "hm700"),
- contains(".1.3.6.1.2.1.1.1.0", "hm800"),
- contains(".1.3.6.1.2.1.1.1.0", "hm850"),
- ),
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.116.5.11.4.1.1.6.1",
- oids=["1", "2", "3", "4", "5", "6", "7", "8", "9"],
- ),
- service_name="HUS DKC Chassis %s",
- discovery_function=inventory_hitachi_hus,
- check_function=check_hitachi_hus,
-)
diff --git a/cmk/base/legacy_checks/hitachi_hus_dku.py b/cmk/base/legacy_checks/hitachi_hus_dku.py
deleted file mode 100644
index 88a6c4d7e95..00000000000
--- a/cmk/base/legacy_checks/hitachi_hus_dku.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hitachi_hus import check_hitachi_hus, inventory_hitachi_hus
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def parse_hitachi_hus_dku(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["hitachi_hus_dku"] = LegacyCheckDefinition(
- parse_function=parse_hitachi_hus_dku,
- detect=any_of(
- contains(".1.3.6.1.2.1.1.1.0", "hm700"),
- contains(".1.3.6.1.2.1.1.1.0", "hm800"),
- contains(".1.3.6.1.2.1.1.1.0", "hm850"),
- ),
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.116.5.11.4.1.1.7.1",
- oids=["1", "2", "3", "4", "5"],
- ),
- service_name="HUS DKU Chassis %s",
- discovery_function=inventory_hitachi_hus,
- check_function=check_hitachi_hus,
-)
diff --git a/cmk/base/legacy_checks/hitachi_hus_status.py b/cmk/base/legacy_checks/hitachi_hus_status.py
index bbcf1bbca3b..e6682b70bcc 100644
--- a/cmk/base/legacy_checks/hitachi_hus_status.py
+++ b/cmk/base/legacy_checks/hitachi_hus_status.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_hitachi_hus_status(info):
diff --git a/cmk/base/legacy_checks/hivemanager_devices.py b/cmk/base/legacy_checks/hivemanager_devices.py
index e1e435e8398..3856fc880c0 100644
--- a/cmk/base/legacy_checks/hivemanager_devices.py
+++ b/cmk/base/legacy_checks/hivemanager_devices.py
@@ -10,10 +10,12 @@
# BBSA-WIFI-LSN-Hald-F2-1|24|Cleared|True|57 Days, 3 Hrs 24 Mins 22 Secs
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
+
+TOKEN_MULTIPLIER = (1, 60, 3600, 86400, 31536000)
def inventory_hivemanager_devices(info):
@@ -54,22 +56,17 @@ def check_hivemanager_devices(item, params, info): # pylint: disable=too-many-b
yield 0, infotext, perfdata
# Uptime
- state = 0
- warn, crit = 0, 0
- infotext = ""
- uptime_secs = 0
- if infos["upTime"] != "down":
- token_multiplier = [1, 60, 3600, 86400, 31536000]
- for idx, entry in enumerate(map(int, infos["upTime"].split()[-2::-2])):
- uptime_secs += token_multiplier[idx] * entry
- infotext = "Uptime: %s" % get_age_human_readable(uptime_secs)
- if "max_uptime" in params:
- warn, crit = params["max_uptime"]
- if uptime_secs >= crit:
- state = 2
- elif uptime_secs >= warn:
- state = 1
- yield state, infotext, [("uptime", uptime_secs, warn, crit)]
+ if (raw_uptime := infos["upTime"]) != "down":
+ yield check_levels(
+ sum(
+ factor * int(token)
+ for factor, token in zip(TOKEN_MULTIPLIER, raw_uptime.split()[-2::-2])
+ ),
+ "uptime",
+ params.get("max_uptime"),
+ human_readable_func=render.timespan,
+ infoname="Uptime",
+ )
# Additional Information
additional_informations = [
diff --git a/cmk/base/legacy_checks/hp_blade.py b/cmk/base/legacy_checks/hp_blade.py
index 278a44e9c62..5c09838b078 100644
--- a/cmk/base/legacy_checks/hp_blade.py
+++ b/cmk/base/legacy_checks/hp_blade.py
@@ -16,8 +16,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hp import DETECT_HP_BLADE
# GENERAL MAPS:
diff --git a/cmk/base/legacy_checks/hp_blade_blades.py b/cmk/base/legacy_checks/hp_blade_blades.py
index c3980e9fa77..97ccf740b14 100644
--- a/cmk/base/legacy_checks/hp_blade_blades.py
+++ b/cmk/base/legacy_checks/hp_blade_blades.py
@@ -25,8 +25,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hp import DETECT_HP_BLADE
# GENERAL MAPS:
diff --git a/cmk/base/legacy_checks/hp_blade_fan.py b/cmk/base/legacy_checks/hp_blade_fan.py
index 814c58f5ad6..c6f60edcdf3 100644
--- a/cmk/base/legacy_checks/hp_blade_fan.py
+++ b/cmk/base/legacy_checks/hp_blade_fan.py
@@ -16,8 +16,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hp import DETECT_HP_BLADE
# GENERAL MAPS:
diff --git a/cmk/base/legacy_checks/hp_blade_manager.py b/cmk/base/legacy_checks/hp_blade_manager.py
index 5123aabaea5..a464581aa2a 100644
--- a/cmk/base/legacy_checks/hp_blade_manager.py
+++ b/cmk/base/legacy_checks/hp_blade_manager.py
@@ -16,11 +16,10 @@
from collections.abc import Mapping
-from cmk.base.check_api import CheckResult, DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import CheckResult, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.hp import DETECT_HP_BLADE
# GENERAL MAPS:
@@ -37,7 +36,7 @@
}
-def inventory_hp_blade_manager(string_table: StringTable) -> DiscoveryResult:
+def discover_hp_blade_manager(string_table: StringTable) -> DiscoveryResult:
# FIXME: Check if the implementation of the condition is correct or again a wrong implemented value
# => if hp_blade_present_map[int(line[1])] == 'present'
yield from (Service(item=line[0], parameters={"role": line[3]}) for line in string_table)
@@ -81,7 +80,7 @@ def parse_hp_blade_manager(string_table: StringTable) -> StringTable:
oids=["3", "10", "12", "9", "8"],
),
service_name="Manager %s",
- discovery_function=inventory_hp_blade_manager,
+ discovery_function=discover_hp_blade_manager,
check_function=check_hp_blade_manager,
check_default_parameters={},
)
diff --git a/cmk/base/legacy_checks/hp_blade_psu.py b/cmk/base/legacy_checks/hp_blade_psu.py
index b15360eba15..e72e44a7c3c 100644
--- a/cmk/base/legacy_checks/hp_blade_psu.py
+++ b/cmk/base/legacy_checks/hp_blade_psu.py
@@ -19,8 +19,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hp import DETECT_HP_BLADE
# GENERAL MAPS:
diff --git a/cmk/base/legacy_checks/hp_eml_sum.py b/cmk/base/legacy_checks/hp_eml_sum.py
index be077cbdf48..0edf8837afc 100644
--- a/cmk/base/legacy_checks/hp_eml_sum.py
+++ b/cmk/base/legacy_checks/hp_eml_sum.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
hp_eml_sum_map = {
# snmp_value: (nagios_status, txt)
diff --git a/cmk/base/legacy_checks/hp_mcs_system.py b/cmk/base/legacy_checks/hp_mcs_system.py
index cf8e7676240..ddb0ff6201b 100644
--- a/cmk/base/legacy_checks/hp_mcs_system.py
+++ b/cmk/base/legacy_checks/hp_mcs_system.py
@@ -4,14 +4,13 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDBytes, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, OIDBytes, Service, SNMPTree, startswith, StringTable
-def inventory_hp_mcs_system(section: StringTable) -> DiscoveryResult:
+def discover_hp_mcs_system(section: StringTable) -> DiscoveryResult:
if not section:
return
yield Service(item=section[0][0])
@@ -45,6 +44,6 @@ def parse_hp_mcs_system(string_table: StringTable) -> StringTable:
oids=["2.2.4.2", OIDBytes("11.2.10.1"), "11.2.10.3"],
),
service_name="%s",
- discovery_function=inventory_hp_mcs_system,
+ discovery_function=discover_hp_mcs_system,
check_function=check_hp_mcs_system,
)
diff --git a/cmk/base/legacy_checks/hp_msa_controller.py b/cmk/base/legacy_checks/hp_msa_controller.py
deleted file mode 100644
index 3fe11eee4e8..00000000000
--- a/cmk/base/legacy_checks/hp_msa_controller.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
-from cmk.base.check_legacy_includes.hp_msa import parse_hp_msa
-from cmk.base.config import check_info
-
-# <<>>
-# controller-statistics 1 durable-id controller_A
-# controller-statistics 1 cpu-load 3
-# controller-statistics 1 power-on-time 7855017
-# controller-statistics 1 write-cache-used 24
-# controller-statistics 1 bytes-per-second 6434.3KB
-# controller-statistics 1 bytes-per-second-numeric 6434304
-# controller-statistics 1 iops 184
-# controller-statistics 1 number-of-reads 67423711
-# controller-statistics 1 read-cache-hits 86626091
-# controller-statistics 1 read-cache-misses 172382632
-# controller-statistics 1 number-of-writes 500652138
-# controller-statistics 1 write-cache-hits 281297065
-# controller-statistics 1 write-cache-misses 1063951139
-# controller-statistics 1 data-read 7711.4GB
-# controller-statistics 1 data-read-numeric 7711480795648
-# controller-statistics 1 data-written 40.8TB
-# controller-statistics 1 data-written-numeric 40830379518976
-# controller-statistics 1 num-forwarded-cmds 1
-# controller-statistics 1 reset-time 2015-05-22 13:54:37
-# controller-statistics 1 reset-time-numeric 1432302877
-# controller-statistics 1 start-sample-time 2015-08-21 11:51:52
-# controller-statistics 1 start-sample-time-numeric 1440157912
-# controller-statistics 1 stop-sample-time 2015-08-21 11:51:57
-# controller-statistics 1 stop-sample-time-numeric 1440157917
-# controller-statistics 1 total-power-on-hours 2636.59
-
-# .--controller cpu------------------------------------------------------.
-# | _ _ _ |
-# | ___ ___ _ __ | |_ _ __ ___ | | | ___ _ __ ___ _ __ _ _ |
-# | / __/ _ \| '_ \| __| '__/ _ \| | |/ _ \ '__| / __| '_ \| | | | |
-# | | (_| (_) | | | | |_| | | (_) | | | __/ | | (__| |_) | |_| | |
-# | \___\___/|_| |_|\__|_| \___/|_|_|\___|_| \___| .__/ \__,_| |
-# | |_| |
-# +----------------------------------------------------------------------+
-# | main check |
-# '----------------------------------------------------------------------'
-
-
-def inventory_hp_msa_controller_cpu(parsed):
- for key in parsed:
- yield key, {}
-
-
-def check_hp_msa_controller_cpu(item, params, parsed):
- if item in parsed:
- # hp msa 2040 reference guide:
- # cpu-load: percentage of time the CPU is busy, from 0-100
- return check_cpu_util(float(parsed[item]["cpu-load"]), params)
- return None
-
-
-check_info["hp_msa_controller"] = LegacyCheckDefinition(
- parse_function=parse_hp_msa,
- service_name="CPU Utilization %s",
- discovery_function=inventory_hp_msa_controller_cpu,
- check_function=check_hp_msa_controller_cpu,
- check_ruleset_name="cpu_utilization_multiitem",
- check_default_parameters={
- "levels": (80.0, 90.0),
- },
-)
diff --git a/cmk/base/legacy_checks/hp_msa_disk.py b/cmk/base/legacy_checks/hp_msa_disk.py
deleted file mode 100644
index 70a15920b5a..00000000000
--- a/cmk/base/legacy_checks/hp_msa_disk.py
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hp_msa import (
- check_hp_msa_health,
- inventory_hp_msa_health,
- parse_hp_msa,
-)
-from cmk.base.check_legacy_includes.temperature import check_temperature_list
-from cmk.base.config import check_info
-
-# drives 1 durable-id disk_01.01
-# drives 1 enclosure-id 1
-# drives 1 drawer-id 255
-# drives 1 slot 1
-# drives 1 location 1.1
-# drives 1 port 0
-# drives 1 scsi-id 0
-# drives 1 blocks 1172123568
-# drives 1 serial-number W7GB13NV
-# drives 1 vendor HP
-# drives 1 model EG0600FBVFP
-# drives 1 revision HPDC
-# drives 1 secondary-channel 0
-# drives 1 container-index 0
-# drives 1 member-index 0
-# drives 1 description SAS
-# drives 1 description-numeric 4
-# drives 1 architecture HDD
-# drives 1 architecture-numeric 1
-# drives 1 interface SAS
-# drives 1 interface-numeric 0
-# drives 1 single-ported Disabled
-# drives 1 single-ported-numeric 0
-# drives 1 type SAS
-# drives 1 type-numeric 4
-# drives 1 usage LINEAR POOL
-# drives 1 usage-numeric 1
-# drives 1 job-running
-# drives 1 job-running-numeric 0
-# drives 1 state LINEAR POOL
-# drives 1 current-job-completion
-# drives 1 blink 0
-# drives 1 locator-led Off
-# drives 1 locator-led-numeric 0
-# drives 1 speed 0
-# drives 1 smart Enabled
-# drives 1 smart-numeric 1
-# drives 1 dual-port 1
-# drives 1 error 0
-# drives 1 fc-p1-channel 0
-# drives 1 fc-p1-device-id 0
-# drives 1 fc-p1-node-wwn 5000CCA07014111C
-# drives 1 fc-p1-port-wwn 0000000000000000
-# drives 1 fc-p1-unit-number 0
-# drives 1 fc-p2-channel 0
-# drives 1 fc-p2-device-id 0
-# drives 1 fc-p2-node-wwn
-# drives 1 fc-p2-port-wwn
-# drives 1 fc-p2-unit-number 0
-# drives 1 drive-down-code 0
-# drives 1 owner A
-# drives 1 owner-numeric 1
-# drives 1 index 0
-# drives 1 rpm 10
-# drives 1 size 600.1GB
-# drives 1 size-numeric 1172123568
-# drives 1 sector-format 512n
-# drives 1 sector-format-numeric 0
-# drives 1 transfer-rate 6.0
-# drives 1 transfer-rate-numeric 3
-# drives 1 attributes
-# drives 1 attributes-numeric 2
-# drives 1 enclosure-wwn 500C0FF01E82BB3C
-# drives 1 recon-state N/A
-# drives 1 recon-state-numeric 0
-# drives 1 copyback-state N/A
-# drives 1 copyback-state-numeric 0
-# drives 1 virtual-disk-serial 00c0ff1ec44a00001e23415500000000
-# drives 1 disk-group IMSAKO2B1_U1_B01-04
-# drives 1 storage-pool-name IMSAKO2B1_U1_B01-04
-# drives 1 storage-tier N/A
-# drives 1 storage-tier-numeric 0
-# drives 1 ssd-life-left N/A
-# drives 1 ssd-life-left-numeric 255
-# drives 1 led-status-numeric 1
-# drives 1 disk-dsd-count 0
-# drives 1 spun-down 0
-# drives 1 number-of-ios 0
-# drives 1 total-data-transferred 0B
-# drives 1 total-data-transferred-numeric 0
-# drives 1 avg-rsp-time 0
-# drives 1 fde-state Not FDE Capable
-# drives 1 fde-state-numeric 1
-# drives 1 lock-key-id 00000000
-# drives 1 import-lock-key-id 00000000
-# drives 1 fde-config-time N/A
-# drives 1 fde-config-time-numeric 0
-# drives 1 pi-formatted Unsupported
-# drives 1 pi-formatted-numeric 4
-# drives 1 power-on-hours 2663
-# drives 1 health OK
-# drives 1 health-numeric 0
-# drives 1 health-reason
-# drives 1 health-recommendation
-# disk-statistics 1 durable-id disk_01.01
-# disk-statistics 1 serial-number W7GB13NV
-# disk-statistics 1 bytes-per-second 771.0KB
-# disk-statistics 1 bytes-per-second-numeric 771072
-# disk-statistics 1 iops 13
-# disk-statistics 1 number-of-reads 49797666
-# disk-statistics 1 number-of-writes 20095262
-# disk-statistics 1 data-read 50.6TB
-# disk-statistics 1 data-read-numeric 50656968970752
-# disk-statistics 1 data-written 2800.2GB
-# disk-statistics 1 data-written-numeric 2800282933760
-# disk-statistics 1 queue-depth 0
-# disk-statistics 1 reset-time 2015-05-22 13:55:39
-# disk-statistics 1 reset-time-numeric 1432302939
-# disk-statistics 1 start-sample-time 2015-08-18 10:37:02
-# disk-statistics 1 start-sample-time-numeric 1439894222
-# disk-statistics 1 stop-sample-time 2015-08-18 11:09:27
-# disk-statistics 1 stop-sample-time-numeric 1439896167
-# disk-statistics 1 smart-count-1 0
-# disk-statistics 1 io-timeout-count-1 0
-# disk-statistics 1 no-response-count-1 0
-# disk-statistics 1 spinup-retry-count-1 0
-# disk-statistics 1 number-of-media-errors-1 0
-# disk-statistics 1 number-of-nonmedia-errors-1 6
-# disk-statistics 1 number-of-block-reassigns-1 0
-# disk-statistics 1 number-of-bad-blocks-1 0
-# disk-statistics 1 smart-count-2 0
-# disk-statistics 1 io-timeout-count-2 0
-# disk-statistics 1 no-response-count-2 0
-# disk-statistics 1 spinup-retry-count-2 0
-# disk-statistics 1 number-of-media-errors-2 0
-# disk-statistics 1 number-of-nonmedia-errors-2 1
-# disk-statistics 1 number-of-block-reassigns-2 0
-# disk-statistics 1 number-of-bad-blocks-2 0
-
-# .--health--------------------------------------------------------------.
-# | _ _ _ _ |
-# | | |__ ___ __ _| | |_| |__ |
-# | | '_ \ / _ \/ _` | | __| '_ \ |
-# | | | | | __/ (_| | | |_| | | | |
-# | |_| |_|\___|\__,_|_|\__|_| |_| |
-# | |
-# +----------------------------------------------------------------------+
-# | main check |
-# '----------------------------------------------------------------------'
-
-check_info["hp_msa_disk"] = LegacyCheckDefinition(
- parse_function=parse_hp_msa,
- service_name="Disk Health %s",
- discovery_function=inventory_hp_msa_health,
- check_function=check_hp_msa_health,
-)
-
-
-# .
-# .--temperature---------------------------------------------------------.
-# | _ _ |
-# | | |_ ___ _ __ ___ _ __ ___ _ __ __ _| |_ _ _ _ __ ___ |
-# | | __/ _ \ '_ ` _ \| '_ \ / _ \ '__/ _` | __| | | | '__/ _ \ |
-# | | || __/ | | | | | |_) | __/ | | (_| | |_| |_| | | | __/ |
-# | \__\___|_| |_| |_| .__/ \___|_| \__,_|\__|\__,_|_| \___| |
-# | |_| |
-# '----------------------------------------------------------------------'
-
-
-def inventory_hp_msa_disk_temp(parsed):
- return [("Disks", {})]
-
-
-def check_hp_msa_disk_temp(item, params, parsed):
- disks = []
- for key, values in parsed.items():
- disks.append((key, float(values["temperature-numeric"])))
-
- return check_temperature_list(disks, params, "hp_msa_disk_temp_%s" % item)
-
-
-check_info["hp_msa_disk.temp"] = LegacyCheckDefinition(
- service_name="Temperature %s",
- sections=["hp_msa_disk"],
- discovery_function=inventory_hp_msa_disk_temp,
- check_function=check_hp_msa_disk_temp,
- check_ruleset_name="temperature",
- check_default_parameters={
- "levels": (40.0, 45.0), # just an assumption
- },
-)
-
-# .
diff --git a/cmk/base/legacy_checks/hp_msa_fan.py b/cmk/base/legacy_checks/hp_msa_fan.py
deleted file mode 100644
index ad80d8cb1da..00000000000
--- a/cmk/base/legacy_checks/hp_msa_fan.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hp_msa import parse_hp_msa
-from cmk.base.config import check_info
-
-# <<>>
-# fan 1 durable-id fan_1.1
-# fan 1 name Fan Loc:left-PSU 1
-# fan 1 location Enclosure 1 - Left
-# fan 1 status Up
-# fan 1 status-numeric 0
-# fan 1 speed 3760
-# fan 1 position Left
-# fan 1 position-numeric 0
-# fan 1 serial-number
-# fan 1 fw-revision
-# fan 1 hw-revision
-# fan 1 health OK
-# fan 1 health-numeric 0
-# fan 1 health-reason
-# fan 1 health-recommendation
-# fan 2 durable-id fan_1.2
-# fan 2 name Fan Loc:right-PSU 2
-# fan 2 location Enclosure 1 - Right
-# fan 2 status Up
-# fan 2 status-numeric 0
-# fan 2 speed 3880
-# fan 2 position Right
-# fan 2 position-numeric 1
-# fan 2 serial-number
-# fan 2 fw-revision
-# fan 2 hw-revision
-# fan 2 health OK
-# fan 2 health-numeric 0
-# fan 2 health-reason
-# fan 2 health-recommendation
-
-hp_msa_state_numeric_map = {
- "0": (0, "up"),
- "1": (2, "error"),
- "2": (1, "off"),
- "3": (3, "missing"),
-}
-
-hp_msa_health_state_numeric_map = {
- "0": (0, "OK"),
- "1": (1, "degraded"),
- "2": (2, "fault"),
- "3": (2, "N/A"),
- "4": (3, "unknown"),
-}
-
-
-def inventory_hp_msa_fan(parsed):
- for item in parsed:
- yield item, None
-
-
-def check_hp_msa_fan(item, params, parsed):
- if item in parsed:
- fan_speed = int(parsed[item]["speed"])
- fan_state, fan_state_readable = hp_msa_state_numeric_map[parsed[item]["status-numeric"]]
- fan_health_state, fan_health_state_readable = hp_msa_health_state_numeric_map[
- parsed[item]["health-numeric"]
- ]
- fan_health_reason = parsed[item].get("health-reason", "")
-
- yield fan_state, f"Status: {fan_state_readable}, speed: {fan_speed} RPM"
-
- if fan_health_state and fan_health_reason:
- yield fan_health_state, "health: {} ({})".format(
- fan_health_state_readable,
- fan_health_reason,
- )
-
-
-check_info["hp_msa_fan"] = LegacyCheckDefinition(
- parse_function=parse_hp_msa,
- service_name="Fan %s",
- discovery_function=inventory_hp_msa_fan,
- check_function=check_hp_msa_fan,
-)
diff --git a/cmk/base/legacy_checks/hp_msa_psu.py b/cmk/base/legacy_checks/hp_msa_psu.py
deleted file mode 100644
index 0a5c8359e63..00000000000
--- a/cmk/base/legacy_checks/hp_msa_psu.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hp_msa import (
- check_hp_msa_health,
- inventory_hp_msa_health,
- parse_hp_msa,
-)
-from cmk.base.check_legacy_includes.temperature import check_temperature
-from cmk.base.config import check_info
-
-# <<>>
-# power-supplies 1 durable-id psu_1.1
-# power-supplies 1 enclosure-id 1
-# power-supplies 1 serial-number 7CE451T700
-# power-supplies 1 description FRU,Pwr Sply,595W,AC,2U,LC,HP
-# power-supplies 1 name PSU 1, Left
-# power-supplies 1 revision D1
-# power-supplies 1 model 592267-002
-# power-supplies 1 vendor 0x
-# power-supplies 1 location Enclosure 1 - Left
-# power-supplies 1 position Left
-# power-supplies 1 position-numeric 0
-# power-supplies 1 part-number 592267-002
-# power-supplies 1 dash-level
-# power-supplies 1 fru-shortname AC Power Supply
-# power-supplies 1 mfg-date 2014-10-29 16:57:47
-# power-supplies 1 mfg-date-numeric 1414601867
-# power-supplies 1 mfg-location Zhongshan,Guangdong,CN
-# power-supplies 1 mfg-vendor-id 0x
-# power-supplies 1 configuration-serialnumber 7CE451T700
-# power-supplies 1 dc12v 1195
-# power-supplies 1 dc5v 508
-# power-supplies 1 dc33v 336
-# power-supplies 1 dc12i 548
-# power-supplies 1 dc5i 489
-# power-supplies 1 dctemp 34
-# power-supplies 1 health OK
-# power-supplies 1 health-numeric 0
-# power-supplies 1 health-reason
-# power-supplies 1 health-recommendation
-# power-supplies 1 status Up
-# power-supplies 1 status-numeric 0
-
-# .--health--------------------------------------------------------------.
-# | _ _ _ _ |
-# | | |__ ___ __ _| | |_| |__ |
-# | | '_ \ / _ \/ _` | | __| '_ \ |
-# | | | | | __/ (_| | | |_| | | | |
-# | |_| |_|\___|\__,_|_|\__|_| |_| |
-# | |
-# +----------------------------------------------------------------------+
-# | main check |
-# '----------------------------------------------------------------------'
-
-check_info["hp_msa_psu"] = LegacyCheckDefinition(
- parse_function=parse_hp_msa,
- service_name="Power Supply Health %s",
- discovery_function=inventory_hp_msa_health,
- check_function=check_hp_msa_health,
-)
-
-# .
-# .--voltage-------------------------------------------------------------.
-# | _ _ |
-# | __ _____ | | |_ __ _ __ _ ___ |
-# | \ \ / / _ \| | __/ _` |/ _` |/ _ \ |
-# | \ V / (_) | | || (_| | (_| | __/ |
-# | \_/ \___/|_|\__\__,_|\__, |\___| |
-# | |___/ |
-# '----------------------------------------------------------------------'
-
-# Just an assumption
-
-
-def inventory_hp_msa_psu(parsed):
- """detect if PSU info is invalid
-
- Some fields where deprecated for HP MSA 1050/2050.
- If the PSU is freezing and has no voltage we assume
- that means data is not valid
- """
- indicators = ("dc12v", "dc5v", "dc33v", "dc12i", "dc5i", "dctemp")
- for item, data in parsed.items():
- if any(data.get(i) != "0" for i in indicators):
- yield item, {}
-
-
-def check_hp_msa_psu(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- for psu_type, psu_type_readable, levels_type in [
- ("dc12v", "12 V", "levels_12v_"),
- ("dc5v", "5 V", "levels_5v_"),
- ("dc33v", "3.3 V", "levels_33v_"),
- ]:
- psu_voltage = float(data[psu_type]) / 100
- levels = params[levels_type + "upper"] + params[levels_type + "lower"]
- yield check_levels(psu_voltage, None, levels, unit="V", infoname=psu_type_readable)
-
-
-check_info["hp_msa_psu.sensor"] = LegacyCheckDefinition(
- service_name="Power Supply Voltage %s",
- sections=["hp_msa_psu"],
- discovery_function=inventory_hp_msa_psu,
- check_function=check_hp_msa_psu,
- check_ruleset_name="hp_msa_psu_voltage",
- check_default_parameters={
- "levels_33v_lower": (3.25, 3.20),
- "levels_33v_upper": (3.4, 3.45),
- "levels_5v_lower": (4.9, 4.8),
- "levels_5v_upper": (5.1, 5.2),
- "levels_12v_lower": (11.9, 11.8),
- "levels_12v_upper": (12.1, 12.2),
- },
-)
-
-# .
-# .--temperature---------------------------------------------------------.
-# | _ _ |
-# | | |_ ___ _ __ ___ _ __ ___ _ __ __ _| |_ _ _ _ __ ___ |
-# | | __/ _ \ '_ ` _ \| '_ \ / _ \ '__/ _` | __| | | | '__/ _ \ |
-# | | || __/ | | | | | |_) | __/ | | (_| | |_| |_| | | | __/ |
-# | \__\___|_| |_| |_| .__/ \___|_| \__,_|\__|\__,_|_| \___| |
-# | |_| |
-# +----------------------------------------------------------------------+
-
-
-def check_hp_msa_psu_temp(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- yield check_temperature(float(data["dctemp"]), params, "hp_msa_psu_temp_%s" % item)
-
-
-check_info["hp_msa_psu.temp"] = LegacyCheckDefinition(
- service_name="Temperature Power Supply %s",
- sections=["hp_msa_psu"],
- discovery_function=inventory_hp_msa_psu,
- check_function=check_hp_msa_psu_temp,
- check_ruleset_name="temperature",
- check_default_parameters={
- "levels": (40.0, 45.0), # Just assumed
- },
-)
diff --git a/cmk/base/legacy_checks/hp_procurve_cpu.py b/cmk/base/legacy_checks/hp_procurve_cpu.py
index 148b6676ef7..ab4d5f1c026 100644
--- a/cmk/base/legacy_checks/hp_procurve_cpu.py
+++ b/cmk/base/legacy_checks/hp_procurve_cpu.py
@@ -4,12 +4,11 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, DiscoveryResult, Service, SNMPTree, StringTable
def inventory_hp_procurve_cpu(string_table: StringTable) -> DiscoveryResult:
diff --git a/cmk/base/legacy_checks/hp_procurve_mem.py b/cmk/base/legacy_checks/hp_procurve_mem.py
index 8309403d6fc..af94751ae4e 100644
--- a/cmk/base/legacy_checks/hp_procurve_mem.py
+++ b/cmk/base/legacy_checks/hp_procurve_mem.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.mem import check_memory_element
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, SNMPTree, StringTable
# FIXME
# The WATO group 'memory_simple' needs an item and the service_description should
@@ -25,13 +24,12 @@
# hpLocalMemAllocBytes 1.3.6.1.4.1.11.2.14.11.5.1.1.2.1.1.1.7
-def inventory_hp_procurve_mem(info):
+def discover_hp_procurve_mem(info):
if len(info) == 1 and int(info[0][0]) >= 0:
- return [("", {})]
- return []
+ yield None, {}
-def check_hp_procurve_mem(item, params, info):
+def check_hp_procurve_mem(_no_item, params, info):
if len(info) != 1:
return None
@@ -62,8 +60,8 @@ def parse_hp_procurve_mem(string_table: StringTable) -> StringTable:
oids=["5", "7"],
),
service_name="Memory",
- discovery_function=inventory_hp_procurve_mem,
+ discovery_function=discover_hp_procurve_mem,
check_function=check_hp_procurve_mem,
- check_ruleset_name="memory_simple",
+ check_ruleset_name="memory_simple_single",
check_default_parameters={"levels": ("perc_used", (80.0, 90.0))},
)
diff --git a/cmk/base/legacy_checks/hp_procurve_sensors.py b/cmk/base/legacy_checks/hp_procurve_sensors.py
index 50211540ac4..2e6e6245bc4 100644
--- a/cmk/base/legacy_checks/hp_procurve_sensors.py
+++ b/cmk/base/legacy_checks/hp_procurve_sensors.py
@@ -48,8 +48,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, contains, SNMPTree, StringTable
hp_procurve_status_map = {
"1": "unknown",
diff --git a/cmk/base/legacy_checks/hp_procurve_temp.py b/cmk/base/legacy_checks/hp_procurve_temp.py
index 6ea4c3fe413..2fdc90d0f62 100644
--- a/cmk/base/legacy_checks/hp_procurve_temp.py
+++ b/cmk/base/legacy_checks/hp_procurve_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
# .1.3.6.1.4.1.11.2.14.11.1.2.8.1.1.2.0 Sys-1 # system name
# .1.3.6.1.4.1.11.2.14.11.1.2.8.1.1.3.0 21C # current temperature
diff --git a/cmk/base/legacy_checks/hp_proliant.py b/cmk/base/legacy_checks/hp_proliant.py
index da675370dd6..3cbb9febe76 100644
--- a/cmk/base/legacy_checks/hp_proliant.py
+++ b/cmk/base/legacy_checks/hp_proliant.py
@@ -11,8 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, contains, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, contains, exists, SNMPTree, StringTable
def inventory_proliant_general(info):
diff --git a/cmk/base/legacy_checks/hp_proliant_cpu.py b/cmk/base/legacy_checks/hp_proliant_cpu.py
index a09e425cc94..7231ed8f082 100644
--- a/cmk/base/legacy_checks/hp_proliant_cpu.py
+++ b/cmk/base/legacy_checks/hp_proliant_cpu.py
@@ -5,14 +5,10 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hp_proliant import (
- check_hp_proliant_cpu,
- inventory_hp_proliant_cpu,
-)
+from cmk.base.check_legacy_includes.hp_proliant import sanitize_item
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hp_proliant import DETECT
@@ -20,6 +16,34 @@ def parse_hp_proliant_cpu(string_table: StringTable) -> StringTable:
return string_table
+hp_proliant_cpu_status_map = {1: "unknown", 2: "ok", 3: "degraded", 4: "failed", 5: "disabled"}
+hp_proliant_cpu_status2nagios_map = {
+ "unknown": 3,
+ "ok": 0,
+ "degraded": 2,
+ "failed": 2,
+ "disabled": 1,
+}
+
+
+def inventory_hp_proliant_cpu(info):
+ yield from ((sanitize_item(line[0]), {}) for line in info)
+
+
+def check_hp_proliant_cpu(item, params, info):
+ for line in info:
+ if sanitize_item(line[0]) == item:
+ index, slot, name, status = line
+ snmp_status = hp_proliant_cpu_status_map[int(status)]
+ status = hp_proliant_cpu_status2nagios_map[snmp_status]
+
+ return (
+ status,
+ f'CPU{index} "{name}" in slot {slot} is in state "{snmp_status}"',
+ )
+ return (3, "item not found in snmp data")
+
+
check_info["hp_proliant_cpu"] = LegacyCheckDefinition(
parse_function=parse_hp_proliant_cpu,
detect=DETECT,
diff --git a/cmk/base/legacy_checks/hp_proliant_da_cntlr.py b/cmk/base/legacy_checks/hp_proliant_da_cntlr.py
index 41f3247fe6c..86b0a40d9b7 100644
--- a/cmk/base/legacy_checks/hp_proliant_da_cntlr.py
+++ b/cmk/base/legacy_checks/hp_proliant_da_cntlr.py
@@ -5,21 +5,76 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hp_proliant import (
- check_hp_proliant_da_cntlr,
- inventory_hp_proliant_da_cntlr,
-)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hp_proliant import DETECT
+hp_proliant_da_cntlr_cond_map = {
+ "1": (3, "other"),
+ "2": (0, "ok"),
+ "3": (1, "degraded"),
+ "4": (2, "failed"),
+}
+
+hp_proliant_da_cntlr_role_map = {
+ "1": "other",
+ "2": "notDuplexed",
+ "3": "active",
+ "4": "backup",
+}
+
+
+hp_proliant_da_cntlr_state_map = {
+ "1": (3, "other"),
+ "2": (0, "ok"),
+ "3": (2, "generalFailure"),
+ "4": (2, "cableProblem"),
+ "5": (2, "poweredOff"),
+}
+
def parse_hp_proliant_da_cntlr(string_table: StringTable) -> StringTable:
return string_table
+def inventory_hp_proliant_da_cntlr(info):
+ if info:
+ return [(line[0], None) for line in info]
+ return []
+
+
+def check_hp_proliant_da_cntlr(item, params, info):
+ for line in info:
+ index, model, slot, cond, role, b_status, b_cond, serial = line
+ if index == item:
+ sum_state = 0
+ output = []
+
+ for val, label, map_ in [
+ (cond, "Condition", hp_proliant_da_cntlr_cond_map),
+ (b_cond, "Board-Condition", hp_proliant_da_cntlr_cond_map),
+ (b_status, "Board-Status", hp_proliant_da_cntlr_state_map),
+ ]:
+ this_state = map_[val][0]
+ state_txt = ""
+ if this_state == 1:
+ state_txt = " (!)"
+ elif this_state == 2:
+ state_txt = " (!!)"
+ sum_state = max(sum_state, this_state)
+ output.append(f"{label}: {map_[val][1]}{state_txt}")
+
+ output.append(
+ "(Role: {}, Model: {}, Slot: {}, Serial: {})".format(
+ hp_proliant_da_cntlr_role_map.get(role, "unknown"), model, slot, serial
+ )
+ )
+
+ return (sum_state, ", ".join(output))
+ return (3, "Controller not found in snmp data")
+
+
check_info["hp_proliant_da_cntlr"] = LegacyCheckDefinition(
parse_function=parse_hp_proliant_da_cntlr,
detect=DETECT,
diff --git a/cmk/base/legacy_checks/hp_proliant_fans.py b/cmk/base/legacy_checks/hp_proliant_fans.py
index e17d75616c5..30b3c95712a 100644
--- a/cmk/base/legacy_checks/hp_proliant_fans.py
+++ b/cmk/base/legacy_checks/hp_proliant_fans.py
@@ -5,21 +5,66 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hp_proliant import (
- check_hp_proliant_fans,
- inventory_hp_proliant_fans,
-)
+from cmk.base.check_legacy_includes.hp_proliant import hp_proliant_status2nagios_map, sanitize_item
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hp_proliant import DETECT
+hp_proliant_fans_status_map = {1: "other", 2: "ok", 3: "degraded", 4: "failed"}
+hp_proliant_speed_map = {1: "other", 2: "normal", 3: "high"}
+hp_proliant_fans_locale = {
+ 1: "other",
+ 2: "unknown",
+ 3: "system",
+ 4: "systemBoard",
+ 5: "ioBoard",
+ 6: "cpu",
+ 7: "memory",
+ 8: "storage",
+ 9: "removableMedia",
+ 10: "powerSupply",
+ 11: "ambient",
+ 12: "chassis",
+ 13: "bridgeCard",
+}
+
def parse_hp_proliant_fans(string_table: StringTable) -> StringTable:
return string_table
+def inventory_hp_proliant_fans(info):
+ for line in [l for l in info if l[2] == "3"]:
+ label = hp_proliant_fans_locale.get(int(line[1]), "other")
+ yield sanitize_item(f"{line[0]} ({label})"), {}
+
+
+def check_hp_proliant_fans(item, params, info):
+ for line in info:
+ label = "other"
+ if len(line) > 1 and int(line[1]) in hp_proliant_fans_locale:
+ label = hp_proliant_fans_locale[int(line[1])]
+
+ if sanitize_item(f"{line[0]} ({label})") == item:
+ index, _name, _present, speed, status, currentSpeed = line
+ snmp_status = hp_proliant_fans_status_map[int(status)]
+ status = hp_proliant_status2nagios_map[snmp_status]
+
+ detailOutput = ""
+ perfdata = []
+ if currentSpeed != "":
+ detailOutput = ", RPM: %s" % currentSpeed
+ perfdata = [("temp", int(currentSpeed))]
+
+ return (
+ status,
+ f'FAN Sensor {index} "{label}", Speed is {hp_proliant_speed_map[int(speed)]}, State is {snmp_status}{detailOutput}',
+ perfdata,
+ )
+ return (3, "item not found in snmp data")
+
+
check_info["hp_proliant_fans"] = LegacyCheckDefinition(
parse_function=parse_hp_proliant_fans,
detect=DETECT,
diff --git a/cmk/base/legacy_checks/hp_proliant_raid.py b/cmk/base/legacy_checks/hp_proliant_raid.py
index 57f560c0b91..9115494f112 100644
--- a/cmk/base/legacy_checks/hp_proliant_raid.py
+++ b/cmk/base/legacy_checks/hp_proliant_raid.py
@@ -6,7 +6,7 @@
import typing
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.hp_proliant import sanitize_item
from cmk.base.config import check_info
@@ -68,7 +68,7 @@ def check_hp_proliant_raid(item, _no_params, parsed):
state, state_readable = map_states.get(raid_stats.status, (3, "unknown"))
yield state, f"Status: {state_readable}"
- yield 0, f"Logical volume size: {get_bytes_human_readable(raid_stats.size_bytes)}"
+ yield 0, f"Logical volume size: {render.bytes(raid_stats.size_bytes)}"
# From CPQIDA-MIB:
# This value is the percent complete of the rebuild.
diff --git a/cmk/base/legacy_checks/hp_proliant_temp.py b/cmk/base/legacy_checks/hp_proliant_temp.py
index ac5f551e050..be293183bcc 100644
--- a/cmk/base/legacy_checks/hp_proliant_temp.py
+++ b/cmk/base/legacy_checks/hp_proliant_temp.py
@@ -5,21 +5,84 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hp_proliant import (
- check_hp_proliant_temp,
- inventory_hp_proliant_temp,
-)
+from cmk.base.check_legacy_includes.hp_proliant import hp_proliant_status2nagios_map
+from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.hp_proliant import DETECT
+hp_proliant_locale = {
+ 1: "other",
+ 2: "unknown",
+ 3: "system",
+ 4: "systemBoard",
+ 5: "ioBoard",
+ 6: "cpu",
+ 7: "memory",
+ 8: "storage",
+ 9: "removableMedia",
+ 10: "powerSupply",
+ 11: "ambient",
+ 12: "chassis",
+ 13: "bridgeCard",
+ 14: "managementBoard",
+ 15: "backplane",
+ 16: "networkSlot",
+ 17: "bladeSlot",
+ 18: "virtual",
+}
+
+hp_proliant_status_map = {
+ 1: "unknown",
+ 2: "ok",
+ 3: "degraded",
+ 4: "failed",
+ 5: "disabled",
+}
+
def parse_hp_proliant_temp(string_table: StringTable) -> StringTable:
return string_table
+def format_hp_proliant_name(line):
+ return f"{line[0]} ({hp_proliant_locale[int(line[1])]})"
+
+
+def inventory_hp_proliant_temp(info):
+ for line in info:
+ if line[-1] != "1":
+ # other(1): Temperature could not be determined
+ yield format_hp_proliant_name(line), {}
+
+
+def check_hp_proliant_temp(item, params, info):
+ for line in info:
+ if format_hp_proliant_name(line) == item:
+ value, threshold, status = line[2:]
+
+ # This case means no threshold available and
+ # the devices' web interface displays "N/A"
+ if threshold in ("-99", "0"):
+ devlevels = None
+ else:
+ threshold = float(threshold)
+ devlevels = (threshold, threshold)
+
+ snmp_status = hp_proliant_status_map[int(status)]
+
+ return check_temperature(
+ float(value),
+ params,
+ "hp_proliant_temp_%s" % item,
+ dev_levels=devlevels,
+ dev_status=hp_proliant_status2nagios_map[snmp_status],
+ dev_status_name="Unit: %s" % snmp_status,
+ )
+ return 3, "item not found in snmp data"
+
+
check_info["hp_proliant_temp"] = LegacyCheckDefinition(
parse_function=parse_hp_proliant_temp,
detect=DETECT,
diff --git a/cmk/base/legacy_checks/hp_sts_drvbox.py b/cmk/base/legacy_checks/hp_sts_drvbox.py
index 4d3b23636db..70ad4b5a400 100644
--- a/cmk/base/legacy_checks/hp_sts_drvbox.py
+++ b/cmk/base/legacy_checks/hp_sts_drvbox.py
@@ -11,8 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
hp_sts_drvbox_type_map = {
"1": "other",
diff --git a/cmk/base/legacy_checks/hp_webmgmt_status.py b/cmk/base/legacy_checks/hp_webmgmt_status.py
index e57934a0fe9..282137b0ca3 100644
--- a/cmk/base/legacy_checks/hp_webmgmt_status.py
+++ b/cmk/base/legacy_checks/hp_webmgmt_status.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, exists, SNMPTree, startswith, StringTable
def inventory_hp_webmgmt_status(info):
diff --git a/cmk/base/legacy_checks/hpux_lvm.py b/cmk/base/legacy_checks/hpux_lvm.py
deleted file mode 100644
index 05053a07219..00000000000
--- a/cmk/base/legacy_checks/hpux_lvm.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# <<>>
-# vg_name=/dev/vg00:vg_write_access=read,write:vg_status=available:max_lv=255:\
-# cur_lv=8:open_lv=8:max_pv=16:cur_pv=4:act_pv=4:max_pe_per_pv=4384:vgda=8:pe_size=16:to
-# tal_pe=17388:alloc_pe=13920:free_pe=3468:total_pvg=0:total_spare_pvs=0:total_spare_pvs_in_use=0:vg_version=1.0.0
-# lv_name=/dev/vg00/lvol1:lv_status=available,syncd:lv_size=1792:current_le=112:allocated_pe=224:used_pv=2
-# lv_name=/dev/vg00/lvol2:lv_status=available,syncd:lv_size=32768:current_le=2048:allocated_pe=4096:used_pv=2
-# lv_name=/dev/vg00/lvol3:lv_status=available,syncd:lv_size=2048:current_le=128:allocated_pe=256:used_pv=2
-# lv_name=/dev/vg00/lvol4:lv_status=available,syncd:lv_size=32768:current_le=2048:allocated_pe=4096:used_pv=2
-# lv_name=/dev/vg00/lvol5:lv_status=available,syncd:lv_size=12288:current_le=768:allocated_pe=1536:used_pv=2
-# lv_name=/dev/vg00/lvol6:lv_status=available,syncd:lv_size=5120:current_le=320:allocated_pe=640:used_pv=2
-# lv_name=/dev/vg00/lvol7:lv_status=available,syncd:lv_size=12288:current_le=768:allocated_pe=1536:used_pv=2
-# lv_name=/dev/vg00/lvol8:lv_status=available,syncd:lv_size=12288:current_le=768:allocated_pe=1536:used_pv=3
-# pv_name=/dev/disk/disk7_p2:pv_status=available:total_pe=4319:free_pe=0:autoswitch=On:proactive_polling=On
-# pv_name=/dev/disk/disk9:pv_status=available:total_pe=4375:free_pe=1734:autoswitch=On:proactive_polling=On
-# pv_name=/dev/disk/disk11_p2:pv_status=available:total_pe=4319:free_pe=175:autoswitch=On:proactive_polling=On
-# pv_name=/dev/disk/disk10:pv_status=available:total_pe=4375:free_pe=1559:autoswitch=On:proactive_polling=On
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def inventory_hpux_lvm(info):
- inventory = []
- for line in info:
- if line[0].startswith("lv_name="):
- lv_name = line[0].split("=")[1]
- inventory.append((lv_name, None))
- return inventory
-
-
-def check_hpux_lvm(item, params, info):
- for line in info:
- if line[0].startswith("vg_name"):
- vg_name = line[0].split("=")[1]
- elif line[0].startswith("lv_name"):
- lv_name = line[0].split("=")[1]
- if lv_name == item:
- status = line[1].split("=")[1]
- infotext = f"status is {status} (VG = {vg_name})"
- if status == "available,syncd":
- return (0, infotext)
- return (2, infotext)
-
- return (3, "no such volume found")
-
-
-def parse_hpux_lvm(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["hpux_lvm"] = LegacyCheckDefinition(
- parse_function=parse_hpux_lvm,
- service_name="Logical Volume %s",
- discovery_function=inventory_hpux_lvm,
- check_function=check_hpux_lvm,
-)
diff --git a/cmk/base/legacy_checks/hpux_serviceguard.py b/cmk/base/legacy_checks/hpux_serviceguard.py
index a7166c897d0..4abe7fcbff7 100644
--- a/cmk/base/legacy_checks/hpux_serviceguard.py
+++ b/cmk/base/legacy_checks/hpux_serviceguard.py
@@ -24,7 +24,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_hpux_serviceguard(info):
diff --git a/cmk/base/legacy_checks/hpux_snmp_cs.py b/cmk/base/legacy_checks/hpux_snmp_cs.py
index 6276d661214..ccc2d8e2c33 100644
--- a/cmk/base/legacy_checks/hpux_snmp_cs.py
+++ b/cmk/base/legacy_checks/hpux_snmp_cs.py
@@ -44,8 +44,8 @@
OIDEnd,
SNMPTree,
startswith,
+ StringTable,
)
-from cmk.agent_based.v2.type_defs import StringTable
def inventory_hpux_snmp_cpu(info):
diff --git a/cmk/base/legacy_checks/hr_cpu.py b/cmk/base/legacy_checks/hr_cpu.py
index 49e78746262..694169bda80 100644
--- a/cmk/base/legacy_checks/hr_cpu.py
+++ b/cmk/base/legacy_checks/hr_cpu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib import ucd_hr_detection
# .1.3.6.1.2.1.25.3.3.1.2.768 1 --> HOST-RESOURCES-MIB::hrProcessorLoad.768
@@ -37,7 +36,7 @@ def check_hr_cpu(_no_item, params, info):
return check_cpu_util(util, params, cores=cores)
-# Migration NOTE: Create a separate section, but a common check plugin for
+# Migration NOTE: Create a separate section, but a common check plug-in for
# tplink_cpu, hr_cpu, cisco_nexus_cpu, bintec_cpu, winperf_processor,
# lxc_container_cpu, docker_container_cpu.
# Migration via cmk/update_config.py!
diff --git a/cmk/base/legacy_checks/huawei_osn_fan.py b/cmk/base/legacy_checks/huawei_osn_fan.py
index 6a831ad06f7..6cb7307e4e2 100644
--- a/cmk/base/legacy_checks/huawei_osn_fan.py
+++ b/cmk/base/legacy_checks/huawei_osn_fan.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.huawei import DETECT_HUAWEI_OSN
diff --git a/cmk/base/legacy_checks/huawei_osn_laser.py b/cmk/base/legacy_checks/huawei_osn_laser.py
index cec70c3b13e..c87a8cabf3f 100644
--- a/cmk/base/legacy_checks/huawei_osn_laser.py
+++ b/cmk/base/legacy_checks/huawei_osn_laser.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.huawei import DETECT_HUAWEI_OSN
# The dBm should not get too low. So we check only for lower levels
diff --git a/cmk/base/legacy_checks/huawei_osn_power.py b/cmk/base/legacy_checks/huawei_osn_power.py
index 6b22fcc1b4a..b865a976a86 100644
--- a/cmk/base/legacy_checks/huawei_osn_power.py
+++ b/cmk/base/legacy_checks/huawei_osn_power.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.huawei import DETECT_HUAWEI_OSN
# The typical OSN power unit delivers 750 W max
diff --git a/cmk/base/legacy_checks/huawei_osn_temp.py b/cmk/base/legacy_checks/huawei_osn_temp.py
index 29b595e097b..e59b2f2c79f 100644
--- a/cmk/base/legacy_checks/huawei_osn_temp.py
+++ b/cmk/base/legacy_checks/huawei_osn_temp.py
@@ -7,8 +7,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.huawei import DETECT_HUAWEI_OSN
# The laser should not get hotter than 70°C
diff --git a/cmk/base/legacy_checks/huawei_switch_psu.py b/cmk/base/legacy_checks/huawei_switch_psu.py
index 901683c0559..a243090639b 100644
--- a/cmk/base/legacy_checks/huawei_switch_psu.py
+++ b/cmk/base/legacy_checks/huawei_switch_psu.py
@@ -12,8 +12,7 @@
)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.huawei import DETECT_HUAWEI_SWITCH
huawei_switch_hw_oper_state_map = {
diff --git a/cmk/base/legacy_checks/huawei_switch_temp.py b/cmk/base/legacy_checks/huawei_switch_temp.py
index 5a1e0a395ae..a64e61300bf 100644
--- a/cmk/base/legacy_checks/huawei_switch_temp.py
+++ b/cmk/base/legacy_checks/huawei_switch_temp.py
@@ -13,8 +13,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature, TempParamType
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.huawei import DETECT_HUAWEI_SWITCH
diff --git a/cmk/base/legacy_checks/hwg_humidity.py b/cmk/base/legacy_checks/hwg_humidity.py
index c98b5d6f8bb..7b670823889 100644
--- a/cmk/base/legacy_checks/hwg_humidity.py
+++ b/cmk/base/legacy_checks/hwg_humidity.py
@@ -5,16 +5,30 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hwg import (
- check_hwg_humidity,
- HWG_HUMIDITY_DEFAULTLEVELS,
- inventory_hwg_humidity,
- parse_hwg,
-)
+from cmk.base.check_legacy_includes.humidity import check_humidity
+from cmk.base.check_legacy_includes.hwg import parse_hwg
from cmk.base.config import check_info
from cmk.agent_based.v2 import contains, SNMPTree
+HWG_HUMIDITY_DEFAULTLEVELS = {"levels": (60.0, 70.0)}
+
+
+def inventory_hwg_humidity(parsed):
+ for index, attrs in parsed.items():
+ if attrs.get("humidity"):
+ yield index, {}
+
+
+def check_hwg_humidity(item, params, parsed):
+ if not (data := parsed.get(item)):
+ return
+
+ status, infotext, perfdata = check_humidity(data["humidity"], params)
+ infotext += " (Description: {}, Status: {})".format(data["descr"], data["dev_status_name"])
+ yield status, infotext, perfdata
+
+
check_info["hwg_humidity"] = LegacyCheckDefinition(
detect=contains(".1.3.6.1.2.1.1.1.0", "hwg"),
fetch=SNMPTree(
@@ -28,3 +42,12 @@
check_ruleset_name="humidity",
check_default_parameters=HWG_HUMIDITY_DEFAULTLEVELS,
)
+
+check_info["hwg_ste2.humidity"] = LegacyCheckDefinition(
+ service_name="Humidity %s",
+ sections=["hwg_ste2"],
+ discovery_function=inventory_hwg_humidity,
+ check_function=check_hwg_humidity,
+ check_ruleset_name="humidity",
+ check_default_parameters=HWG_HUMIDITY_DEFAULTLEVELS,
+)
diff --git a/cmk/base/legacy_checks/hwg_ste2.py b/cmk/base/legacy_checks/hwg_ste2.py
deleted file mode 100644
index 685a86e861e..00000000000
--- a/cmk/base/legacy_checks/hwg_ste2.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hwg import (
- check_hwg_humidity,
- check_hwg_temp,
- HWG_HUMIDITY_DEFAULTLEVELS,
- HWG_TEMP_DEFAULTLEVELS,
- inventory_hwg_humidity,
- inventory_hwg_temp,
- parse_hwg,
-)
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import contains, SNMPTree
-
-check_info["hwg_ste2"] = LegacyCheckDefinition(
- detect=contains(".1.3.6.1.2.1.1.1.0", "STE2"),
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.21796.4.9.3.1",
- oids=["1", "2", "3", "4", "7"],
- ),
- parse_function=parse_hwg,
- service_name="Temperature %s",
- discovery_function=inventory_hwg_temp,
- check_function=check_hwg_temp,
- check_ruleset_name="temperature",
- check_default_parameters=HWG_TEMP_DEFAULTLEVELS,
-)
-
-
-check_info["hwg_ste2.humidity"] = LegacyCheckDefinition(
- service_name="Humidity %s",
- sections=["hwg_ste2"],
- discovery_function=inventory_hwg_humidity,
- check_function=check_hwg_humidity,
- check_ruleset_name="humidity",
- check_default_parameters=HWG_HUMIDITY_DEFAULTLEVELS,
-)
diff --git a/cmk/base/legacy_checks/hwg_temp.py b/cmk/base/legacy_checks/hwg_temp.py
index deadf56d10e..fa4c23c84fe 100644
--- a/cmk/base/legacy_checks/hwg_temp.py
+++ b/cmk/base/legacy_checks/hwg_temp.py
@@ -5,16 +5,53 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.hwg import (
- check_hwg_temp,
- HWG_TEMP_DEFAULTLEVELS,
- inventory_hwg_temp,
- parse_hwg,
-)
+from cmk.base.check_legacy_includes.hwg import parse_hwg
+from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
from cmk.agent_based.v2 import contains, SNMPTree
+HWG_TEMP_DEFAULTLEVELS = {"levels": (30.0, 35.0)}
+
+READABLE_STATES = {
+ "invalid": 3,
+ "normal": 0,
+ "out of range low": 2,
+ "out of range high": 2,
+ "alarm low": 2,
+ "alarm high": 2,
+}
+
+
+def inventory_hwg_temp(parsed):
+ for index, attrs in parsed.items():
+ if attrs.get("temperature") and attrs["dev_status_name"] not in ["invalid", ""]:
+ yield index, {}
+
+
+def check_hwg_temp(item, params, parsed):
+ if not (data := parsed.get(item)):
+ return
+ state = READABLE_STATES.get(data["dev_status_name"], 3)
+ state_readable = data["dev_status_name"]
+ temp = data["temperature"]
+ if temp is None:
+ yield state, "Status: %s" % state_readable
+ return
+
+ state, infotext, perfdata = check_temperature(
+ temp,
+ params,
+ "hwg_temp_%s" % item,
+ dev_unit=data["dev_unit"],
+ dev_status=state,
+ dev_status_name=state_readable,
+ )
+
+ infotext += " (Description: {}, Status: {})".format(data["descr"], data["dev_status_name"])
+ yield state, "%s" % infotext, perfdata
+
+
check_info["hwg_temp"] = LegacyCheckDefinition(
detect=contains(".1.3.6.1.2.1.1.1.0", "hwg"),
fetch=SNMPTree(
@@ -28,3 +65,17 @@
check_ruleset_name="temperature",
check_default_parameters=HWG_TEMP_DEFAULTLEVELS,
)
+
+check_info["hwg_ste2"] = LegacyCheckDefinition(
+ detect=contains(".1.3.6.1.2.1.1.1.0", "STE2"),
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.21796.4.9.3.1",
+ oids=["1", "2", "3", "4", "7"],
+ ),
+ parse_function=parse_hwg,
+ service_name="Temperature %s",
+ discovery_function=inventory_hwg_temp,
+ check_function=check_hwg_temp,
+ check_ruleset_name="temperature",
+ check_default_parameters=HWG_TEMP_DEFAULTLEVELS,
+)
diff --git a/cmk/base/legacy_checks/hyperv_checkpoints.py b/cmk/base/legacy_checks/hyperv_checkpoints.py
index ef0ef640e3a..41a7d6aa9a7 100644
--- a/cmk/base/legacy_checks/hyperv_checkpoints.py
+++ b/cmk/base/legacy_checks/hyperv_checkpoints.py
@@ -9,10 +9,10 @@
# c85ae17b-1a6c-4a34-949a-a1b9385ef67a 2040
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
def inventory_hyperv_checkpoints(info):
@@ -26,32 +26,25 @@ def check_hyperv_checkpoints(item, params, info):
continue
snapshots.append((line[0], int(line[1])))
- if snapshots:
- yield 0, "%s checkpoints" % len(snapshots)
-
- # We assume that the last snapshot is the last line
- # of the agent output
- for title, key, snapshot in [
- ("Oldest", "age_oldest", max(snapshots, key=lambda x: x[1])),
- ("Last", "age", snapshots[-1]),
- ]:
- name, age = snapshot
- infotext = f"{title}: {get_age_human_readable(age)} ({name})"
- warn, crit = params.get(key, (None, None))
- if crit is not None and age >= crit:
- state = 2
- elif warn is not None and age >= warn:
- state = 1
- else:
- state = 0
- if state:
- infotext += " (warn/crit at {}/{})".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
- )
- yield state, infotext, [(key, age, warn, crit)]
- else:
- yield 0, "No Checkpoints found"
+ yield 0, "%s checkpoints" % len(snapshots)
+
+ if not snapshots:
+ return
+
+ # We assume that the last snapshot is the last line
+ # of the agent output
+ for title, key, snapshot in [
+ ("Oldest", "age_oldest", max(snapshots, key=lambda x: x[1])),
+ ("Last", "age", snapshots[-1]),
+ ]:
+ name, age = snapshot
+ yield check_levels(
+ age,
+ key,
+ params.get(key),
+ human_readable_func=render.timespan,
+ infoname=f"{title} ({name})",
+ )
def parse_hyperv_checkpoints(string_table: StringTable) -> StringTable:
diff --git a/cmk/base/legacy_checks/hyperv_vms.py b/cmk/base/legacy_checks/hyperv_vms.py
index 4336931866d..a111a0c59d3 100644
--- a/cmk/base/legacy_checks/hyperv_vms.py
+++ b/cmk/base/legacy_checks/hyperv_vms.py
@@ -24,7 +24,7 @@
# z4065084 Running 1.10:28:39 Operating normally
# z4133235 Running 1.03:38:23 Operating normally
-# A Version with a plugin that uses tab as seperator and quotes the strings:
+# A Version with a plug-in that uses tab as seperator and quotes the strings:
# <<>>
# "Name" "State" "Uptime" "Status"
# "z4058013" "Running" "06:05:16" "Operating normally"
diff --git a/cmk/base/legacy_checks/ibm_imm_fan.py b/cmk/base/legacy_checks/ibm_imm_fan.py
index 54f9eccdf33..5dfd1b02cad 100644
--- a/cmk/base/legacy_checks/ibm_imm_fan.py
+++ b/cmk/base/legacy_checks/ibm_imm_fan.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ibm import DETECT_IBM_IMM
@@ -69,6 +68,6 @@ def parse_ibm_imm_fan(string_table: StringTable) -> StringTable:
check_function=check_ibm_imm_fan,
check_ruleset_name="hw_fans_perc",
check_default_parameters={
- "levels_lower": (28, 25), # Just a guess. Please give feedback.
+ "levels_lower": (28.0, 25.0), # Just a guess. Please give feedback.
},
)
diff --git a/cmk/base/legacy_checks/ibm_imm_health.py b/cmk/base/legacy_checks/ibm_imm_health.py
index 707f9691877..3ad2ef5cc76 100644
--- a/cmk/base/legacy_checks/ibm_imm_health.py
+++ b/cmk/base/legacy_checks/ibm_imm_health.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ibm import DETECT_IBM_IMM
diff --git a/cmk/base/legacy_checks/ibm_imm_voltage.py b/cmk/base/legacy_checks/ibm_imm_voltage.py
index 68b43d23358..d3ff436f551 100644
--- a/cmk/base/legacy_checks/ibm_imm_voltage.py
+++ b/cmk/base/legacy_checks/ibm_imm_voltage.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ibm import DETECT_IBM_IMM
diff --git a/cmk/base/legacy_checks/ibm_mq_queues.py b/cmk/base/legacy_checks/ibm_mq_queues.py
index bb61efb21a8..b79f103acf8 100644
--- a/cmk/base/legacy_checks/ibm_mq_queues.py
+++ b/cmk/base/legacy_checks/ibm_mq_queues.py
@@ -8,10 +8,12 @@
import dateutil.parser
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition, regex
+from cmk.base.check_api import check_levels, LegacyCheckDefinition, regex
from cmk.base.check_legacy_includes.ibm_mq import is_ibm_mq_service_vanished
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
# <<>>
# QMNAME(MY.TEST) STATUS(RUNNING)
# 5724-H72 (C) Copyright IBM Corp. 1994, 2015.
@@ -143,7 +145,7 @@ def ibm_mq_msg_age(msg_age, params):
int(msg_age),
"msgage",
params.get("msgage"),
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname=label,
)
@@ -161,7 +163,7 @@ def ibm_mq_last_age(mq_date, mq_time, agent_timestamp, label, key, params):
input_time = dateutil.parser.parse(mq_datetime, default=agent_timestamp)
age = (agent_timestamp - input_time).total_seconds()
return check_levels(
- age, None, params.get(key), human_readable_func=get_age_human_readable, infoname=label
+ age, None, params.get(key), human_readable_func=render.timespan, infoname=label
)
@@ -171,9 +173,7 @@ def ibm_mq_procs(cnt, label, levels_key, metric, params):
if wato:
levels += wato.get("upper", (None, None))
levels += wato.get("lower", (None, None))
- return check_levels(
- int(cnt), metric, levels, factor=1, scale=1, human_readable_func=int, infoname=label
- )
+ return check_levels(int(cnt), metric, levels, human_readable_func=int, infoname=label)
def ibm_mq_get_qtime(qtime, label, key):
@@ -182,7 +182,7 @@ def ibm_mq_get_qtime(qtime, label, key):
info_value = "n/a"
else:
time_in_seconds = int(qtime) / 1000000
- info_value = get_age_human_readable(time_in_seconds)
+ info_value = render.timespan(time_in_seconds)
infotext = f"{label}: {info_value}"
perfdata = [(key, time_in_seconds, None, None)]
return (0, infotext, perfdata)
diff --git a/cmk/base/legacy_checks/ibm_rsa_health.py b/cmk/base/legacy_checks/ibm_rsa_health.py
index 5ce7013fd90..9980611d4d7 100644
--- a/cmk/base/legacy_checks/ibm_rsa_health.py
+++ b/cmk/base/legacy_checks/ibm_rsa_health.py
@@ -19,8 +19,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_ibm_rsa_health(info):
diff --git a/cmk/base/legacy_checks/ibm_storage_ts.py b/cmk/base/legacy_checks/ibm_storage_ts.py
index 3b28295541f..50fd3f33af8 100644
--- a/cmk/base/legacy_checks/ibm_storage_ts.py
+++ b/cmk/base/legacy_checks/ibm_storage_ts.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
def inventory_ibm_storage_ts(info):
diff --git a/cmk/base/legacy_checks/ibm_svc_eventlog.py b/cmk/base/legacy_checks/ibm_svc_eventlog.py
index 6e4002378f8..a4ba8e9ad99 100644
--- a/cmk/base/legacy_checks/ibm_svc_eventlog.py
+++ b/cmk/base/legacy_checks/ibm_svc_eventlog.py
@@ -18,7 +18,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_ibm_svc_eventlog(info):
diff --git a/cmk/base/legacy_checks/ibm_svc_nodestats.py b/cmk/base/legacy_checks/ibm_svc_nodestats.py
index 232b5a13c25..9493dca56d8 100644
--- a/cmk/base/legacy_checks/ibm_svc_nodestats.py
+++ b/cmk/base/legacy_checks/ibm_svc_nodestats.py
@@ -6,12 +6,12 @@
# mypy: disable-error-code="var-annotated"
-from cmk.base.check_api import LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.check_legacy_includes.ibm_svc import parse_ibm_svc_with_header
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render
+from cmk.agent_based.v2 import render, Service
# newer Firmware versions may return decimal values, not just integer
# <<>>
diff --git a/cmk/base/legacy_checks/ibm_svc_system.py b/cmk/base/legacy_checks/ibm_svc_system.py
index e3cbb65e756..75068be99f7 100644
--- a/cmk/base/legacy_checks/ibm_svc_system.py
+++ b/cmk/base/legacy_checks/ibm_svc_system.py
@@ -78,7 +78,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_ibm_svc_system(info):
diff --git a/cmk/base/legacy_checks/ibm_tl_changer_devices.py b/cmk/base/legacy_checks/ibm_tl_changer_devices.py
index 173801a0165..b5ff1b2352c 100644
--- a/cmk/base/legacy_checks/ibm_tl_changer_devices.py
+++ b/cmk/base/legacy_checks/ibm_tl_changer_devices.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.ibm_tape_library import ibm_tape_library_get_device_state
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
# .1.3.6.1.4.1.14851.3.1.11.2.1.4.1 Logical_Library: 1 --> SNIA-SML-MIB::changerDevice-ElementName.1
# .1.3.6.1.4.1.14851.3.1.11.2.1.4.2 Logical_Library: LTO6 --> SNIA-SML-MIB::changerDevice-ElementName.2
diff --git a/cmk/base/legacy_checks/infoblox_dhcp_stats.py b/cmk/base/legacy_checks/infoblox_dhcp_stats.py
index 18a62acfd17..e2d722f7e4d 100644
--- a/cmk/base/legacy_checks/infoblox_dhcp_stats.py
+++ b/cmk/base/legacy_checks/infoblox_dhcp_stats.py
@@ -5,14 +5,10 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.infoblox import (
- check_infoblox_statistics,
- inventory_infoblox_statistics,
-)
+from cmk.base.check_legacy_includes.infoblox import check_infoblox_statistics
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.infoblox import DETECT_INFOBLOX
# .1.3.6.1.4.1.7779.3.1.1.4.1.3.1.0 0 --> IB-DHCPONE-MIB::ibDhcpTotalNoOfDiscovers.0
@@ -26,6 +22,10 @@
# .1.3.6.1.4.1.7779.3.1.1.4.1.3.9.0 0 --> IB-DHCPONE-MIB::ibDhcpTotalNoOfOthers.0
+def inventory_infoblox_statistics(info):
+ return [(None, None)]
+
+
def check_infoblox_dhcp_stats(_no_item, _no_params, info):
discovers, requests, releases, offers, acks, nacks, declines, informs, others = map(
int, info[0]
diff --git a/cmk/base/legacy_checks/infoblox_dns_stats.py b/cmk/base/legacy_checks/infoblox_dns_stats.py
index 129209c215d..c9345b1dc80 100644
--- a/cmk/base/legacy_checks/infoblox_dns_stats.py
+++ b/cmk/base/legacy_checks/infoblox_dns_stats.py
@@ -5,17 +5,17 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.infoblox import (
- check_infoblox_statistics,
- inventory_infoblox_statistics,
-)
+from cmk.base.check_legacy_includes.infoblox import check_infoblox_statistics
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.infoblox import DETECT_INFOBLOX
+def inventory_infoblox_statistics(info):
+ return [(None, None)]
+
+
def check_infoblox_dns_stats(_no_item, _no_params, info):
successes, referrals, nxrrset, nxdomain, recursion, failures = map(int, info[0])
diff --git a/cmk/base/legacy_checks/infoblox_grid_status.py b/cmk/base/legacy_checks/infoblox_grid_status.py
index f0694735691..6830d315ffb 100644
--- a/cmk/base/legacy_checks/infoblox_grid_status.py
+++ b/cmk/base/legacy_checks/infoblox_grid_status.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.infoblox import DETECT_INFOBLOX
# .1.3.6.1.4.1.7779.3.1.1.2.1.15.0 X.X.X.X --> IB-PLATFORMONE-MIB::ibGridMasterVIP.0
diff --git a/cmk/base/legacy_checks/infoblox_replication_status.py b/cmk/base/legacy_checks/infoblox_replication_status.py
index 94f94b3d3b6..54410d2f433 100644
--- a/cmk/base/legacy_checks/infoblox_replication_status.py
+++ b/cmk/base/legacy_checks/infoblox_replication_status.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.infoblox import DETECT_INFOBLOX
# .1.3.6.1.4.1.7779.3.1.1.2.1.2.1.1.X.X.X.X.X X.X.X.X --> IB-PLATFORMONE-MIB::ibNodeIPAddress."11.112.133.14"
diff --git a/cmk/base/legacy_checks/infoblox_temp.py b/cmk/base/legacy_checks/infoblox_temp.py
deleted file mode 100644
index c94eb0d328d..00000000000
--- a/cmk/base/legacy_checks/infoblox_temp.py
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-# mypy: disable-error-code="var-annotated"
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.temperature import check_temperature
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.plugins.lib.infoblox import DETECT_INFOBLOX
-
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.2.39 1 --> IB-PLATFORMONE-MIB::ibNodeServiceStatus.cpu1-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.2.40 5 --> IB-PLATFORMONE-MIB::ibNodeServiceStatus.cpu2-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.2.41 1 --> IB-PLATFORMONE-MIB::ibNodeServiceStatus.sys-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.3.39 CPU_TEMP: +36.00 C --> IB-PLATFORMONE-MIB::ibNodeServiceDesc.cpu1-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.3.40 No temperature information available. --> IB-PLATFORMONE-MIB::ibNodeServiceDesc.cpu2-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.3.41 SYS_TEMP: +34.00 C --> IB-PLATFORMONE-MIB::ibNodeServiceDesc.sys-temp
-
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.2.39 5 --> IB-PLATFORMONE-MIB::ibNodeServiceStatus.cpu1-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.2.40 5 --> IB-PLATFORMONE-MIB::ibNodeServiceStatus.cpu2-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.2.41 5 --> IB-PLATFORMONE-MIB::ibNodeServiceStatus.sys-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.3.39 --> IB-PLATFORMONE-MIB::ibNodeServiceDesc.cpu1-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.3.40 --> IB-PLATFORMONE-MIB::ibNodeServiceDesc.cpu2-temp
-# .1.3.6.1.4.1.7779.3.1.1.2.1.10.1.3.41 --> IB-PLATFORMONE-MIB::ibNodeServiceDesc.sys-temp
-
-# Suggested by customer
-
-
-def parse_infoblox_temp(string_table):
- if not all(string_table):
- return None
- map_states = {
- "1": (0, "working"),
- "2": (1, "warning"),
- "3": (2, "failed"),
- "4": (1, "inactive"),
- "5": (3, "unknown"),
- }
-
- parsed = {}
- # Just for a better handling
- for index, state, descr in list(
- zip(["", "1", "2", ""], string_table[0][0], string_table[1][0])
- )[1:]:
- if ":" not in descr:
- continue
-
- name, val_str = descr.split(":", 1)
- r_val, unit = val_str.split()
- val = float(r_val)
-
- what_name = f"{name} {index}"
- parsed.setdefault(
- what_name.strip(),
- {
- "state": map_states[state],
- "reading": val,
- "unit": unit.lower(),
- },
- )
-
- return parsed
-
-
-def inventory_infoblox_temp(parsed):
- yield from ((name, {}) for name in parsed)
-
-
-def check_infoblox_temp(item, params, parsed):
- if sensor := parsed.get(item):
- return None
-
- devstate, devstatename = sensor["state"]
- return check_temperature(
- sensor["reading"],
- params,
- "infoblox_cpu_temp_%s" % item,
- dev_status=devstate,
- dev_status_name=devstatename,
- dev_unit=sensor["unit"],
- )
-
-
-check_info["infoblox_temp"] = LegacyCheckDefinition(
- detect=DETECT_INFOBLOX,
- fetch=[
- SNMPTree(
- base=".1.3.6.1.4.1.7779.3.1.1.2.1.10.1.2",
- oids=[OIDEnd(), "39", "40", "41"],
- ),
- SNMPTree(
- base=".1.3.6.1.4.1.7779.3.1.1.2.1.10.1.3",
- oids=[OIDEnd(), "39", "40", "41"],
- ),
- ],
- parse_function=parse_infoblox_temp,
- service_name="Temperature %s",
- discovery_function=inventory_infoblox_temp,
- check_function=check_infoblox_temp,
- check_ruleset_name="temperature",
- check_default_parameters={
- "levels": (40.0, 50.0),
- },
-)
diff --git a/cmk/base/legacy_checks/informix_dbspaces.py b/cmk/base/legacy_checks/informix_dbspaces.py
index 3340d33cf02..7549aa2ef11 100644
--- a/cmk/base/legacy_checks/informix_dbspaces.py
+++ b/cmk/base/legacy_checks/informix_dbspaces.py
@@ -6,9 +6,11 @@
# mypy: disable-error-code="var-annotated"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_informix_dbspaces(string_table):
parsed = {}
@@ -52,8 +54,8 @@ def check_informix_dbspaces(item, params, parsed):
used = size - free
infotext = "Data files: {}, Size: {}, Used: {}".format(
len(datafiles),
- get_bytes_human_readable(size),
- get_bytes_human_readable(used),
+ render.disksize(size),
+ render.disksize(used),
)
state = 0
if "levels" in params:
@@ -64,8 +66,8 @@ def check_informix_dbspaces(item, params, parsed):
state = 1
if state:
infotext += " (warn/crit at {}/{})".format(
- get_bytes_human_readable(warn),
- get_bytes_human_readable(crit),
+ render.disksize(warn),
+ render.disksize(crit),
)
yield state, infotext, [("tablespace_size", size), ("tablespace_used", used)]
diff --git a/cmk/base/legacy_checks/informix_logusage.py b/cmk/base/legacy_checks/informix_logusage.py
index da7107d53fd..d0cb47cd19f 100644
--- a/cmk/base/legacy_checks/informix_logusage.py
+++ b/cmk/base/legacy_checks/informix_logusage.py
@@ -6,9 +6,11 @@
# mypy: disable-error-code="var-annotated"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_informix_logusage(string_table):
parsed = {}
@@ -54,8 +56,8 @@ def check_informix_logusage(item, params, parsed):
infotext = "Files: {}, Size: {}, Used: {}".format(
logfiles,
- get_bytes_human_readable(size),
- get_bytes_human_readable(used),
+ render.bytes(size),
+ render.bytes(used),
)
state = 0
if "levels" in params:
@@ -66,8 +68,8 @@ def check_informix_logusage(item, params, parsed):
state = 1
if state:
infotext += " (warn/crit at {}/{})".format(
- get_bytes_human_readable(warn),
- get_bytes_human_readable(crit),
+ render.bytes(warn),
+ render.bytes(crit),
)
yield state, infotext, [
diff --git a/cmk/base/legacy_checks/informix_tabextents.py b/cmk/base/legacy_checks/informix_tabextents.py
index ecbcf8b1c27..9a4a800d51b 100644
--- a/cmk/base/legacy_checks/informix_tabextents.py
+++ b/cmk/base/legacy_checks/informix_tabextents.py
@@ -40,9 +40,7 @@ def check_informix_tabextents(item, params, parsed):
max_extents = -1
long_output = []
for entry in parsed[item]:
- extents = int(entry["extents"])
- if extents >= max_extents:
- max_extents = extents
+ max_extents = max(max_extents, int(entry["extents"]))
long_output.append(
"[{}/{}] Extents: {}, Rows: {}".format(
entry["db"], entry["tab"], entry["extents"], entry["nrows"]
diff --git a/cmk/base/legacy_checks/innovaphone_channels.py b/cmk/base/legacy_checks/innovaphone_channels.py
index a126f908bc3..c11613f6155 100644
--- a/cmk/base/legacy_checks/innovaphone_channels.py
+++ b/cmk/base/legacy_checks/innovaphone_channels.py
@@ -4,14 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.innovaphone import check_innovaphone
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, StringTable
-def inventory_innovaphone_channels(string_table: StringTable) -> DiscoveryResult:
+def discover_innovaphone_channels(string_table: StringTable) -> DiscoveryResult:
yield from (Service(item=x[0]) for x in string_table if x[1] == "Up" and x[2] == "Up")
@@ -37,7 +37,7 @@ def parse_innovaphone_channels(string_table: StringTable) -> StringTable:
check_info["innovaphone_channels"] = LegacyCheckDefinition(
parse_function=parse_innovaphone_channels,
service_name="Channel %s",
- discovery_function=inventory_innovaphone_channels,
+ discovery_function=discover_innovaphone_channels,
check_function=check_innovaphone_channels,
check_default_parameters={
"levels": (75.0, 80.0),
diff --git a/cmk/base/legacy_checks/innovaphone_cpu.py b/cmk/base/legacy_checks/innovaphone_cpu.py
index 1556618b587..8108b54d01c 100644
--- a/cmk/base/legacy_checks/innovaphone_cpu.py
+++ b/cmk/base/legacy_checks/innovaphone_cpu.py
@@ -8,7 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_innovaphone_cpu(info):
diff --git a/cmk/base/legacy_checks/innovaphone_licenses.py b/cmk/base/legacy_checks/innovaphone_licenses.py
index 2b8d00d012d..46e505c1430 100644
--- a/cmk/base/legacy_checks/innovaphone_licenses.py
+++ b/cmk/base/legacy_checks/innovaphone_licenses.py
@@ -4,13 +4,13 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, savefloat, Service
+from cmk.base.check_api import LegacyCheckDefinition, savefloat
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, StringTable
-def inventory_innovaphone_licenses(string_table: StringTable) -> DiscoveryResult:
+def discover_innovaphone_licenses(string_table: StringTable) -> DiscoveryResult:
if string_table:
yield Service()
@@ -38,7 +38,7 @@ def parse_innovaphone_licenses(string_table: StringTable) -> StringTable:
check_info["innovaphone_licenses"] = LegacyCheckDefinition(
parse_function=parse_innovaphone_licenses,
service_name="Licenses",
- discovery_function=inventory_innovaphone_licenses,
+ discovery_function=discover_innovaphone_licenses,
check_function=check_innovaphone_licenses,
check_default_parameters={
"levels": (90.0, 95.0),
diff --git a/cmk/base/legacy_checks/innovaphone_mem.py b/cmk/base/legacy_checks/innovaphone_mem.py
index eac99dfb098..6d1eeb4a1ba 100644
--- a/cmk/base/legacy_checks/innovaphone_mem.py
+++ b/cmk/base/legacy_checks/innovaphone_mem.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
def inventory_innovaphone_mem(info):
@@ -18,7 +17,7 @@ def inventory_innovaphone_mem(info):
def check_innovaphone_mem(_no_item, params, info):
yield check_levels(
int(info[0][1]),
- "usage",
+ "mem_used_percent",
params["levels"],
human_readable_func=render.percent,
infoname="Current",
diff --git a/cmk/base/legacy_checks/innovaphone_priports_l2.py b/cmk/base/legacy_checks/innovaphone_priports_l2.py
index 7ba8e3314c6..b321a5ef51c 100644
--- a/cmk/base/legacy_checks/innovaphone_priports_l2.py
+++ b/cmk/base/legacy_checks/innovaphone_priports_l2.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
def inventory_innovaphone_priports_l2(info):
diff --git a/cmk/base/legacy_checks/innovaphone_temp.py b/cmk/base/legacy_checks/innovaphone_temp.py
index fda8593454d..c4280b09d13 100644
--- a/cmk/base/legacy_checks/innovaphone_temp.py
+++ b/cmk/base/legacy_checks/innovaphone_temp.py
@@ -8,7 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_innovaphone_temp(info):
diff --git a/cmk/base/legacy_checks/intel_true_scale_chassis_temp.py b/cmk/base/legacy_checks/intel_true_scale_chassis_temp.py
index 6f86fece646..e0a27ef24a2 100644
--- a/cmk/base/legacy_checks/intel_true_scale_chassis_temp.py
+++ b/cmk/base/legacy_checks/intel_true_scale_chassis_temp.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.intel import DETECT_INTEL_TRUE_SCALE
# .1.3.6.1.4.1.10222.2.1.5.1.0 1 --> ICS-CHASSIS-MIB::icsChassisTemperatureStatus.0
diff --git a/cmk/base/legacy_checks/intel_true_scale_fans.py b/cmk/base/legacy_checks/intel_true_scale_fans.py
index 2fad0edf004..28811ba9e63 100644
--- a/cmk/base/legacy_checks/intel_true_scale_fans.py
+++ b/cmk/base/legacy_checks/intel_true_scale_fans.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.intel import DETECT_INTEL_TRUE_SCALE
# .1.3.6.1.4.1.10222.2.1.6.5.1.2.6.1 Fan 201 --> ICS-CHASSIS-MIB::icsChassisFanDescription.6.1
diff --git a/cmk/base/legacy_checks/intel_true_scale_sensors_temp.py b/cmk/base/legacy_checks/intel_true_scale_sensors_temp.py
index f5f9fb7602b..a396869a337 100644
--- a/cmk/base/legacy_checks/intel_true_scale_sensors_temp.py
+++ b/cmk/base/legacy_checks/intel_true_scale_sensors_temp.py
@@ -127,9 +127,7 @@ def inventory_intel_true_scale_sensors_temp(parsed):
def check_intel_true_scale_sensors_temp(item, params, parsed):
if item in parsed:
- yield check_temperature_list(
- parsed[item]["temp"], params, "intel_true_scale_sensors_temp_%s" % item
- )
+ yield from check_temperature_list(parsed[item]["temp"], params)
check_info["intel_true_scale_sensors_temp"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/ipr400_in_voltage.py b/cmk/base/legacy_checks/ipr400_in_voltage.py
index 1e21d9c96c9..fe2702bfaf7 100644
--- a/cmk/base/legacy_checks/ipr400_in_voltage.py
+++ b/cmk/base/legacy_checks/ipr400_in_voltage.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_ipr400_in_voltage(info):
diff --git a/cmk/base/legacy_checks/ipr400_temp.py b/cmk/base/legacy_checks/ipr400_temp.py
index db8e70df864..30ab2f173ab 100644
--- a/cmk/base/legacy_checks/ipr400_temp.py
+++ b/cmk/base/legacy_checks/ipr400_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_ipr400_temp(info):
diff --git a/cmk/base/legacy_checks/ispro_sensors_digital.py b/cmk/base/legacy_checks/ispro_sensors_digital.py
index 5c190192aaa..63a3b24ed91 100644
--- a/cmk/base/legacy_checks/ispro_sensors_digital.py
+++ b/cmk/base/legacy_checks/ispro_sensors_digital.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ispro import DETECT_ISPRO_SENSORS
# .1.3.6.1.4.1.19011.1.3.2.1.3.1.3.1.2.1 "Water Sensor-R" --> ISPRO-MIB::isDeviceMonitorDigitalInName
diff --git a/cmk/base/legacy_checks/ispro_sensors_humid.py b/cmk/base/legacy_checks/ispro_sensors_humid.py
index fdb2e0ba1d8..7e602f66bcf 100644
--- a/cmk/base/legacy_checks/ispro_sensors_humid.py
+++ b/cmk/base/legacy_checks/ispro_sensors_humid.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.ispro import ispro_sensors_alarm_states
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ispro import DETECT_ISPRO_SENSORS
# .1.3.6.1.4.1.19011.1.3.2.1.3.1.2.1.2.1 "Humidity-R" --> ISPRO-MIB::isDeviceMonitorHumidityName
diff --git a/cmk/base/legacy_checks/ispro_sensors_temp.py b/cmk/base/legacy_checks/ispro_sensors_temp.py
index 6c0f3ae32c1..fd6818a5a1f 100644
--- a/cmk/base/legacy_checks/ispro_sensors_temp.py
+++ b/cmk/base/legacy_checks/ispro_sensors_temp.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ispro import DETECT_ISPRO_SENSORS
# .1.3.6.1.4.1.19011.1.3.2.1.3.1.1.1.2.1 "Temperature-R" --> ISPRO-MIB::isDeviceMonitorTemperatureName
diff --git a/cmk/base/legacy_checks/jar_signature.py b/cmk/base/legacy_checks/jar_signature.py
index 743d88798df..47ee5f4a69e 100644
--- a/cmk/base/legacy_checks/jar_signature.py
+++ b/cmk/base/legacy_checks/jar_signature.py
@@ -27,10 +27,10 @@
import time
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
def inventory_jar_signature(info):
@@ -89,14 +89,14 @@ def check_jar_signature(item, _no_params, info): # pylint: disable=too-many-bra
if expired_since >= 0:
status_text = "Certificate expired on {} ({} ago) ".format(
expiry_date_text,
- get_age_human_readable(expired_since),
+ render.timespan(expired_since),
)
state = 2
else:
status_text = "Certificate will expire on {} (in {})".format(
expiry_date_text,
- get_age_human_readable(-expired_since),
+ render.timespan(-expired_since),
)
if -expired_since <= crit:
state = 2
@@ -104,8 +104,8 @@ def check_jar_signature(item, _no_params, info): # pylint: disable=too-many-bra
state = 1
if state:
status_text += " (warn/crit below {}/{})".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
+ render.timespan(warn),
+ render.timespan(crit),
)
return state, status_text
diff --git a/cmk/base/legacy_checks/jenkins_instance.py b/cmk/base/legacy_checks/jenkins_instance.py
deleted file mode 100644
index 9c75f121496..00000000000
--- a/cmk/base/legacy_checks/jenkins_instance.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# <<>>
-# {"quietingDown": false, "nodeDescription": "the master Jenkins node",
-# "numExecutors": 0, "mode": "NORMAL", "_class": "hudson.model.Hudson",
-# "useSecurity": true}
-
-
-import json
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-MAP_INSTANCE_STATE = {
- True: "yes",
- False: "no",
- "NORMAL": "normal",
- "EXCLUSIVE": "exclusive",
- None: "N/A",
-}
-
-
-def parse_jenkins_instance(string_table):
- parsed = {}
-
- for line in string_table:
- parsed.update(json.loads(line[0]))
-
- return parsed
-
-
-def inventory_jenkins_instance(parsed):
- yield None, {}
-
-
-def check_jenkins_instance(_no_item, _no_params, parsed):
- if not parsed:
- return
-
- inst_desc = parsed.get("nodeDescription")
- if inst_desc is not None:
- yield 0, "Description: %s" % inst_desc.title()
-
- for key, value, infotext in [
- ("quietingDown", False, "Quieting Down"),
- ("useSecurity", True, "Security used"),
- ]:
- state = 0
- parsed_data = parsed.get(key)
-
- if parsed_data is not None and parsed_data != value:
- state = 1
- elif parsed_data is None:
- state = 3
-
- yield state, f"{infotext}: {MAP_INSTANCE_STATE[parsed_data]}"
-
-
-check_info["jenkins_instance"] = LegacyCheckDefinition(
- parse_function=parse_jenkins_instance,
- service_name="Jenkins Instance",
- discovery_function=inventory_jenkins_instance,
- check_function=check_jenkins_instance,
-)
diff --git a/cmk/base/legacy_checks/jenkins_queue.py b/cmk/base/legacy_checks/jenkins_queue.py
deleted file mode 100644
index f15ceb5ace2..00000000000
--- a/cmk/base/legacy_checks/jenkins_queue.py
+++ /dev/null
@@ -1,162 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# <<>>
-# [[u'[{"task": {"color": "blue_anime", "_class":
-# "org.jenkinsci.plugins.workflow.job.WorkflowJob", "name": "testbuild"},
-# "inQueueSince": 1566823138742, "why": "Build #471 is already in progress
-# (ETA: 38 min)", "stuck": false, "_class": "hudson.model.Queue$BlockedItem",
-# "buildableStartMilliseconds": 1566823144626, "id": 174032, "blocked":
-# true}]']]
-
-
-import json
-import time
-
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
- state_markers,
-)
-from cmk.base.config import check_info
-
-MAP_QUEUE_STATES = {
- True: "yes",
- False: "no",
-}
-
-
-def parse_jenkins_queue(string_table):
- parsed = []
-
- for line in string_table:
- parsed.extend(json.loads(line[0]))
-
- return parsed
-
-
-def inventory_jenkins_queue(parsed):
- yield None, {}
-
-
-def check_jenkins_queue(_no_item, params, parsed):
- if not parsed:
- yield 0, "Queue length: 0 Tasks"
- return
-
- long_output = []
-
- yield check_levels(
- len(parsed),
- "queue",
- params.get("queue_length"),
- human_readable_func=int,
- unit="Tasks",
- infoname="Queue length",
- )
-
- levels = params["in_queue_since"]
- stuck_tasks = 0
- blocked_tasks = 0
- pending_tasks = 0
-
- for task in sorted(
- parsed, key=lambda k: (k["stuck"], k["blocked"], -k["inQueueSince"]), reverse=True
- ):
- now = time.time()
- len_state = 0
- timestamp_in_queue = task["inQueueSince"] / 1000
- since = now - timestamp_in_queue
-
- len_state, _since_infotext, _perf_data = check_levels(since, None, levels)
-
- stuck_state = 0
- if task["stuck"]:
- stuck_tasks += 1
- stuck_state = params["stuck"]
-
- blocked_state = 0
- if task["blocked"]:
- blocked_tasks += 1
- blocked_state = params["blocked"]
-
- long_output_str = "ID: %d, Stuck: %s%s, Blocked: %s%s" % (
- task["id"],
- MAP_QUEUE_STATES[task["stuck"]],
- state_markers[stuck_state],
- MAP_QUEUE_STATES[task["blocked"]],
- state_markers[blocked_state],
- )
-
- pending_state = 0
- # pending can be missing
- task_pending = task.get("pending")
- if task_pending or task_pending is False:
- if task_pending:
- pending_tasks += 1
- pending_state = params["pending"]
- long_output_str += ", Pending: {}{}".format(
- MAP_QUEUE_STATES[task_pending],
- state_markers[pending_state],
- )
-
- long_output_str += ", In queue since: {} ({})".format(
- get_age_human_readable(since),
- get_timestamp_human_readable(timestamp_in_queue),
- )
-
- if len_state:
- long_output_str += " (warn/crit at {}/{}){}".format(
- get_age_human_readable(levels[0]),
- get_age_human_readable(levels[1]),
- state_markers[len_state],
- )
-
- state = max([len_state, stuck_state, blocked_state, pending_state])
-
- long_output_str += ", Why kept: %s" % task["why"]
-
- long_output.append((state, long_output_str))
-
- max_state = max(state for state, _infotext in long_output)
-
- for key, value, infotext in [
- (stuck_tasks, "stuck_tasks", "Stuck"),
- (blocked_tasks, "blocked_tasks", "Blocked"),
- (pending_tasks, "pending_tasks", "Pending"),
- ]:
- jenkins_value = "jenkins_%s" % value
-
- yield check_levels(
- key,
- jenkins_value,
- params.get(jenkins_value),
- human_readable_func=int,
- infoname=infotext,
- )
-
- if long_output:
- yield max_state, "See long output for further information"
-
- for state, line in long_output:
- yield 0, "\n%s" % line
-
-
-check_info["jenkins_queue"] = LegacyCheckDefinition(
- parse_function=parse_jenkins_queue,
- service_name="Jenkins Queue",
- discovery_function=inventory_jenkins_queue,
- check_function=check_jenkins_queue,
- check_ruleset_name="jenkins_queue",
- check_default_parameters={
- "in_queue_since": (3600, 7200),
- "stuck": 2,
- "blocked": 0,
- "pending": 0,
- "jenkins_stuck_tasks": (1, 2),
- },
-)
diff --git a/cmk/base/legacy_checks/jira_custom_svc.py b/cmk/base/legacy_checks/jira_custom_svc.py
index 4daeefad987..739f2c7e06a 100644
--- a/cmk/base/legacy_checks/jira_custom_svc.py
+++ b/cmk/base/legacy_checks/jira_custom_svc.py
@@ -14,10 +14,10 @@
import json
import time
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_value_store
+from cmk.agent_based.v2 import get_value_store, render
def parse_jira_custom_svc(string_table):
@@ -91,7 +91,7 @@ def check_jira_custom_svc(item, params, parsed):
diff,
"jira_diff",
diff_levels_upper + diff_levels_lower,
- infoname="Difference last %s" % get_age_human_readable(timespan),
+ infoname="Difference last %s" % render.time_offset(timespan),
)
diff --git a/cmk/base/legacy_checks/jolokia_generic.py b/cmk/base/legacy_checks/jolokia_generic.py
index b8cc7283cb4..8d62518867c 100644
--- a/cmk/base/legacy_checks/jolokia_generic.py
+++ b/cmk/base/legacy_checks/jolokia_generic.py
@@ -12,8 +12,7 @@
from cmk.base.check_legacy_includes.jolokia import jolokia_basic_split
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, StringTable
Section = Mapping[str, Mapping[str, float | str]]
diff --git a/cmk/base/legacy_checks/jolokia_info.py b/cmk/base/legacy_checks/jolokia_info.py
index a9649fa7651..c2de89ea3c0 100644
--- a/cmk/base/legacy_checks/jolokia_info.py
+++ b/cmk/base/legacy_checks/jolokia_info.py
@@ -27,7 +27,7 @@ def check_jolokia_info(item, _no_params, parsed):
return
if line[0] == "ERROR" or len(line) < 3:
- yield 2, " ".join(line) or "Unknown error in plugin"
+ yield 2, " ".join(line) or "Unknown error in plug-in"
return
product = line[0]
diff --git a/cmk/base/legacy_checks/jolokia_jvm_memory.py b/cmk/base/legacy_checks/jolokia_jvm_memory.py
index a1b86dc8ea3..5a367c81d6e 100644
--- a/cmk/base/legacy_checks/jolokia_jvm_memory.py
+++ b/cmk/base/legacy_checks/jolokia_jvm_memory.py
@@ -6,7 +6,7 @@
# mypy: disable-error-code="var-annotated"
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.jolokia import (
jolokia_mbean_attribute,
parse_jolokia_json_output,
@@ -34,7 +34,7 @@ def _jolokia_check_abs_and_perc(mem_type, value, value_max, params):
perf_name,
params.get("abs_%s" % mem_type),
infoname=mem_type.title(),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
boundaries=(None, value_max),
)
@@ -142,11 +142,11 @@ def check_jolokia_jvm_memory_pools(item, params, parsed):
init = usage.get("init")
if init is not None:
- yield 0, "Initially: %s" % get_bytes_human_readable(init)
+ yield 0, "Initially: %s" % render.bytes(init)
committed = usage.get("committed")
if committed is not None:
- yield 0, "Committed: %s" % get_bytes_human_readable(committed)
+ yield 0, "Committed: %s" % render.bytes(committed)
check_info["jolokia_jvm_memory.pools"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/jolokia_metrics.py b/cmk/base/legacy_checks/jolokia_metrics.py
index 8519beaacd0..e23773d55b4 100644
--- a/cmk/base/legacy_checks/jolokia_metrics.py
+++ b/cmk/base/legacy_checks/jolokia_metrics.py
@@ -15,8 +15,13 @@
)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, GetRateError, IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ get_rate,
+ get_value_store,
+ GetRateError,
+ IgnoreResultsError,
+ StringTable,
+)
# Example output from agent:
# <<>>
@@ -247,26 +252,37 @@ def check_jolokia_metrics_bea_queue(item, params, info):
)
-# FIXME: This check could work with any JVM
-# It has no levels
-# A candidate for 1.2.1 overhaul
-def check_jolokia_metrics_bea_requests(item, _no_params, info):
- app = jolokia_metrics_app(info, item.split())
- if not app:
+def check_request_count(item, info, value_store):
+ """
+ "CompletedRequestCount" and "requestCount" are specifically queried by our agent,
+ (see the constant QUERY_SPECS_SPECIFIC_LEGACY).
+
+ CompletedRequestCount -> weblogic of BEA system; it is the total number of requests
+ (https://docs.oracle.com/middleware/1213/wls/WLMBR/core/index.html)
+
+ requestCount -> tomcat servers; it is per second
+ (https://docs.tibco.com/pub/sftm/6.0.0/doc/html/GUID-5738EB01-D159-4D0D-9F3B-22663B2D6756.html)
+ """
+
+ if not (app := jolokia_metrics_app(info, item.split())):
return
- for nk in ["CompletedRequestCount", "requestCount"]:
- if nk in app:
- requests = int(app[nk])
- rate = get_rate(
- get_value_store(),
- "j4p.bea.requests.%s" % item,
- time.time(),
- requests,
- raise_overflow=True,
- )
- yield 0, "%.2f requests/sec" % rate, [("rate", rate)]
- return
+ if (completed_request_count := app.get("CompletedRequestCount")) is not None:
+ rate = get_rate(
+ value_store,
+ "j4p.bea.requests.%s" % item,
+ time.time(),
+ int(completed_request_count),
+ raise_overflow=True,
+ )
+ yield 0, "%.2f requests/sec" % rate, [("rate", rate)]
+
+ elif (request_count := app.get("requestCount")) is not None:
+ yield 0, "%.2f requests/sec" % int(request_count), [("rate", int(request_count))]
+
+
+def check_jolokia_metrics_bea_requests(item, _no_params, info):
+ yield from check_request_count(item, info, get_value_store())
def check_jolokia_metrics_bea_threads(item, _no_params, info):
diff --git a/cmk/base/legacy_checks/juniper_alarm.py b/cmk/base/legacy_checks/juniper_alarm.py
index bf8d67f3669..4ed48796579 100644
--- a/cmk/base/legacy_checks/juniper_alarm.py
+++ b/cmk/base/legacy_checks/juniper_alarm.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER
diff --git a/cmk/base/legacy_checks/juniper_mem.py b/cmk/base/legacy_checks/juniper_mem.py
index a6d44768648..82f3ac9bae6 100644
--- a/cmk/base/legacy_checks/juniper_mem.py
+++ b/cmk/base/legacy_checks/juniper_mem.py
@@ -8,8 +8,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER
# .1.3.6.1.4.1.2636.3.1.13.1.5.9.1.0.0 Routing Engine 0 --> JUNIPER-MIB::jnxOperatingDescr.9.1.0.0
diff --git a/cmk/base/legacy_checks/juniper_mem_screenos_trpz.py b/cmk/base/legacy_checks/juniper_mem_screenos_trpz.py
new file mode 100644
index 00000000000..7b552d3d395
--- /dev/null
+++ b/cmk/base/legacy_checks/juniper_mem_screenos_trpz.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+from collections.abc import Iterator, Mapping
+from dataclasses import dataclass
+from typing import Any
+
+from cmk.base.check_api import LegacyCheckDefinition
+from cmk.base.check_legacy_includes.mem import check_memory_element
+from cmk.base.config import check_info
+
+from cmk.agent_based.v2 import SNMPTree, StringTable
+from cmk.plugins.lib.juniper import DETECT_JUNIPER_SCREENOS, DETECT_JUNIPER_TRPZ
+
+
+@dataclass(frozen=True)
+class Section:
+ used: int
+ total: int
+
+
+def parse_juniper_trpz_mem(string_table: StringTable) -> Section | None:
+ return (
+ Section(int(string_table[0][0]) * 1024, int(string_table[0][1]) * 1024)
+ if string_table
+ else None
+ )
+
+
+def discover_juniper_mem_generic(section: Section) -> Iterator[tuple[None, dict]]:
+ yield None, {}
+
+
+def check_juniper_mem_generic(
+ _no_item: None,
+ params: Mapping[str, Any],
+ section: Section,
+) -> tuple[int, str, list]:
+ return check_memory_element(
+ label="Used",
+ used=section.used,
+ total=section.total,
+ levels=params["levels"],
+ metric_name="mem_used",
+ )
+
+
+check_info["juniper_trpz_mem"] = LegacyCheckDefinition(
+ detect=DETECT_JUNIPER_TRPZ,
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.14525.4.8.1.1",
+ oids=["12.1", "6"],
+ ),
+ service_name="Memory",
+ parse_function=parse_juniper_trpz_mem,
+ discovery_function=discover_juniper_mem_generic,
+ check_function=check_juniper_mem_generic,
+ check_ruleset_name="juniper_mem",
+ check_default_parameters={
+ "levels": ("perc_used", (80.0, 90.0)),
+ },
+)
+
+
+def parse_juniper_screenos_mem(string_table):
+ if not string_table:
+ return None
+ used = int(string_table[0][0])
+ free = int(string_table[0][1])
+ return Section(used, used + free)
+
+
+check_info["juniper_screenos_mem"] = LegacyCheckDefinition(
+ detect=DETECT_JUNIPER_SCREENOS,
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.3224.16.2",
+ oids=["1.0", "2.0"],
+ ),
+ parse_function=parse_juniper_screenos_mem,
+ service_name="Memory",
+ discovery_function=discover_juniper_mem_generic,
+ check_function=check_juniper_mem_generic,
+ check_ruleset_name="juniper_mem",
+ check_default_parameters={
+ "levels": ("perc_used", (80.0, 90.0)),
+ },
+)
diff --git a/cmk/base/legacy_checks/juniper_screenos_cpu.py b/cmk/base/legacy_checks/juniper_screenos_cpu.py
index 4d5ce290d07..05e2501980b 100644
--- a/cmk/base/legacy_checks/juniper_screenos_cpu.py
+++ b/cmk/base/legacy_checks/juniper_screenos_cpu.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER_SCREENOS
diff --git a/cmk/base/legacy_checks/juniper_screenos_fan.py b/cmk/base/legacy_checks/juniper_screenos_fan.py
index 43d8db34196..d03deb2ca94 100644
--- a/cmk/base/legacy_checks/juniper_screenos_fan.py
+++ b/cmk/base/legacy_checks/juniper_screenos_fan.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER_SCREENOS
diff --git a/cmk/base/legacy_checks/juniper_screenos_mem.py b/cmk/base/legacy_checks/juniper_screenos_mem.py
deleted file mode 100644
index 882aba9cb0a..00000000000
--- a/cmk/base/legacy_checks/juniper_screenos_mem.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.juniper_mem import (
- check_juniper_mem_generic,
- discover_juniper_mem_generic,
- Section,
-)
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.juniper import DETECT_JUNIPER_SCREENOS
-
-
-def parse_juniper_screenos_mem(string_table):
- if not string_table:
- return None
- used = int(string_table[0][0])
- free = int(string_table[0][1])
- return Section(used, used + free)
-
-
-check_info["juniper_screenos_mem"] = LegacyCheckDefinition(
- detect=DETECT_JUNIPER_SCREENOS,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.3224.16.2",
- oids=["1.0", "2.0"],
- ),
- parse_function=parse_juniper_screenos_mem,
- service_name="Memory",
- discovery_function=discover_juniper_mem_generic,
- check_function=check_juniper_mem_generic,
- check_ruleset_name="juniper_mem",
- check_default_parameters={
- "levels": ("perc_used", (80.0, 90.0)),
- },
-)
diff --git a/cmk/base/legacy_checks/juniper_screenos_temp.py b/cmk/base/legacy_checks/juniper_screenos_temp.py
index 9401ea21607..413c625e336 100644
--- a/cmk/base/legacy_checks/juniper_screenos_temp.py
+++ b/cmk/base/legacy_checks/juniper_screenos_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER_SCREENOS
diff --git a/cmk/base/legacy_checks/juniper_screenos_vpn.py b/cmk/base/legacy_checks/juniper_screenos_vpn.py
index 064e1211e3a..5d1b728586b 100644
--- a/cmk/base/legacy_checks/juniper_screenos_vpn.py
+++ b/cmk/base/legacy_checks/juniper_screenos_vpn.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER_SCREENOS
diff --git a/cmk/base/legacy_checks/juniper_trpz_cpu_util.py b/cmk/base/legacy_checks/juniper_trpz_cpu_util.py
deleted file mode 100644
index 1ed0201cf48..00000000000
--- a/cmk/base/legacy_checks/juniper_trpz_cpu_util.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition, savefloat
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.juniper import DETECT_JUNIPER_TRPZ
-
-
-def inventory_juniper_trpz_cpu_util(info):
- yield None, {}
-
-
-def check_juniper_trpz_cpu_util(_no_item, params, info):
- utilc, util1, util5 = map(savefloat, info[0])
-
- warn, crit = params.get("util", (None, None)) if isinstance(params, dict) else params
-
- label1, label5 = "", ""
- state = 0
-
- if util1 >= crit:
- state = 2
- label1 = "(!!)"
- elif util1 >= warn:
- state = 1
- label1 = "(!)"
-
- if util5 >= crit:
- state = 2
- label5 = "(!!)"
- elif util5 >= warn:
- state = max(state, 1)
- label5 = "(!)"
-
- perf = [
- ("util1", util1, warn, crit),
- ("util5", util5, warn, crit),
- ("utilc", utilc),
- ]
-
- message = "%d%% current, %d%% 1min%s, %d%% 5min%s" % (utilc, util1, label1, util5, label5)
-
- return state, message, perf
-
-
-def parse_juniper_trpz_cpu_util(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
-check_info["juniper_trpz_cpu_util"] = LegacyCheckDefinition(
- parse_function=parse_juniper_trpz_cpu_util,
- detect=DETECT_JUNIPER_TRPZ,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.14525.4.8.1.1.11",
- oids=["1", "2", "3"],
- ),
- service_name="CPU utilization",
- discovery_function=inventory_juniper_trpz_cpu_util,
- check_function=check_juniper_trpz_cpu_util,
- check_ruleset_name="cpu_utilization",
- check_default_parameters={"util": (80.0, 90.0)},
-)
diff --git a/cmk/base/legacy_checks/juniper_trpz_flash.py b/cmk/base/legacy_checks/juniper_trpz_flash.py
index a6823941625..227cd17dc18 100644
--- a/cmk/base/legacy_checks/juniper_trpz_flash.py
+++ b/cmk/base/legacy_checks/juniper_trpz_flash.py
@@ -6,11 +6,10 @@
# mypy: disable-error-code="arg-type"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition, savefloat
+from cmk.base.check_api import LegacyCheckDefinition, savefloat
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER_TRPZ
@@ -21,7 +20,7 @@ def inventory_juniper_trpz_flash(info):
def check_juniper_trpz_flash(_no_item, params, info):
warn, crit = params["levels"]
used, total = map(savefloat, info[0])
- message = f"Used: {get_bytes_human_readable(used)} of {get_bytes_human_readable(total)} "
+ message = f"Used: {render.bytes(used)} of {render.bytes(total)} "
perc_used = (used / total) * 100 # fixed: true-division
if isinstance(crit, float):
a_warn = (warn / 100.0) * total
@@ -35,8 +34,8 @@ def check_juniper_trpz_flash(_no_item, params, info):
else:
perf = [("used", used, warn, crit, 0, total)]
levels = "Levels Warn/Crit are ({}, {})".format(
- get_bytes_human_readable(warn),
- get_bytes_human_readable(crit),
+ render.bytes(warn),
+ render.bytes(crit),
)
if used > crit:
return 2, message + levels, perf
diff --git a/cmk/base/legacy_checks/juniper_trpz_info.py b/cmk/base/legacy_checks/juniper_trpz_info.py
index 1f9adcf4fe2..707287ac859 100644
--- a/cmk/base/legacy_checks/juniper_trpz_info.py
+++ b/cmk/base/legacy_checks/juniper_trpz_info.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER_TRPZ
diff --git a/cmk/base/legacy_checks/juniper_trpz_mem.py b/cmk/base/legacy_checks/juniper_trpz_mem.py
deleted file mode 100644
index 73606c956a8..00000000000
--- a/cmk/base/legacy_checks/juniper_trpz_mem.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.juniper_mem import (
- check_juniper_mem_generic,
- discover_juniper_mem_generic,
- Section,
-)
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.juniper import DETECT_JUNIPER_TRPZ
-
-
-def parse_juniper_trpz_mem(string_table: StringTable) -> Section | None:
- return (
- Section(int(string_table[0][0]) * 1024, int(string_table[0][1]) * 1024)
- if string_table
- else None
- )
-
-
-check_info["juniper_trpz_mem"] = LegacyCheckDefinition(
- detect=DETECT_JUNIPER_TRPZ,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.14525.4.8.1.1",
- oids=["12.1", "6"],
- ),
- service_name="Memory",
- parse_function=parse_juniper_trpz_mem,
- discovery_function=discover_juniper_mem_generic,
- check_function=check_juniper_mem_generic,
- check_ruleset_name="juniper_mem",
- check_default_parameters={
- "levels": ("perc_used", (80.0, 90.0)),
- },
-)
diff --git a/cmk/base/legacy_checks/juniper_trpz_power.py b/cmk/base/legacy_checks/juniper_trpz_power.py
index efd59de41f3..a8d97ad9d6f 100644
--- a/cmk/base/legacy_checks/juniper_trpz_power.py
+++ b/cmk/base/legacy_checks/juniper_trpz_power.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.juniper import DETECT_JUNIPER_TRPZ
diff --git a/cmk/base/legacy_checks/keepalived.py b/cmk/base/legacy_checks/keepalived.py
index f740f682b84..d4b295da131 100644
--- a/cmk/base/legacy_checks/keepalived.py
+++ b/cmk/base/legacy_checks/keepalived.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, exists, SNMPTree, StringTable
def hex2ip(hexstr):
diff --git a/cmk/base/legacy_checks/kemp_loadmaster_ha.py b/cmk/base/legacy_checks/kemp_loadmaster_ha.py
index 0c55ad686ea..0929c86794f 100644
--- a/cmk/base/legacy_checks/kemp_loadmaster_ha.py
+++ b/cmk/base/legacy_checks/kemp_loadmaster_ha.py
@@ -10,8 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, StringTable
def inventory_kemp_loadmaster_ha(info):
diff --git a/cmk/base/legacy_checks/kentix_co.py b/cmk/base/legacy_checks/kentix_co.py
index e6176638d53..a08812ad00a 100644
--- a/cmk/base/legacy_checks/kentix_co.py
+++ b/cmk/base/legacy_checks/kentix_co.py
@@ -13,8 +13,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.kentix import DETECT_KENTIX
diff --git a/cmk/base/legacy_checks/kentix_motion.py b/cmk/base/legacy_checks/kentix_motion.py
index 9b5b94224e9..07674b7b69e 100644
--- a/cmk/base/legacy_checks/kentix_motion.py
+++ b/cmk/base/legacy_checks/kentix_motion.py
@@ -18,8 +18,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.kentix import DETECT_KENTIX
diff --git a/cmk/base/legacy_checks/kentix_temp.py b/cmk/base/legacy_checks/kentix_temp.py
index 72bcb0dd6c3..87efb95be47 100644
--- a/cmk/base/legacy_checks/kentix_temp.py
+++ b/cmk/base/legacy_checks/kentix_temp.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.kentix import DETECT_KENTIX
from cmk.plugins.lib.temperature import TempParamType
diff --git a/cmk/base/legacy_checks/knuerr_rms_humidity.py b/cmk/base/legacy_checks/knuerr_rms_humidity.py
index b7da0a56f60..7ff7c18ebec 100644
--- a/cmk/base/legacy_checks/knuerr_rms_humidity.py
+++ b/cmk/base/legacy_checks/knuerr_rms_humidity.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.humidity import check_humidity
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.knuerr import DETECT_KNUERR
diff --git a/cmk/base/legacy_checks/knuerr_rms_temp.py b/cmk/base/legacy_checks/knuerr_rms_temp.py
index 57d9db39699..54fbafb8c06 100644
--- a/cmk/base/legacy_checks/knuerr_rms_temp.py
+++ b/cmk/base/legacy_checks/knuerr_rms_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.knuerr import DETECT_KNUERR
diff --git a/cmk/base/legacy_checks/knuerr_sensors.py b/cmk/base/legacy_checks/knuerr_sensors.py
index 2198be3bcc4..44af8652ef3 100644
--- a/cmk/base/legacy_checks/knuerr_sensors.py
+++ b/cmk/base/legacy_checks/knuerr_sensors.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.knuerr import DETECT_KNUERR
diff --git a/cmk/base/legacy_checks/lgp_info.py b/cmk/base/legacy_checks/lgp_info.py
index aa43ded9f38..3fb037a8cac 100644
--- a/cmk/base/legacy_checks/lgp_info.py
+++ b/cmk/base/legacy_checks/lgp_info.py
@@ -18,8 +18,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.lgp import DETECT_LGP
lgp_info_devices = {
diff --git a/cmk/base/legacy_checks/lgp_pdu_aux.py b/cmk/base/legacy_checks/lgp_pdu_aux.py
index 96441b5bff1..d0f8f83dfa0 100644
--- a/cmk/base/legacy_checks/lgp_pdu_aux.py
+++ b/cmk/base/legacy_checks/lgp_pdu_aux.py
@@ -39,8 +39,7 @@
from cmk.base.check_api import LegacyCheckDefinition, savefloat, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.lgp import DETECT_LGP
lgp_pdu_aux_types = {
diff --git a/cmk/base/legacy_checks/lgp_pdu_info.py b/cmk/base/legacy_checks/lgp_pdu_info.py
index d064da5f9b1..a6bfc27452b 100644
--- a/cmk/base/legacy_checks/lgp_pdu_info.py
+++ b/cmk/base/legacy_checks/lgp_pdu_info.py
@@ -16,8 +16,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.lgp import DETECT_LGP
diff --git a/cmk/base/legacy_checks/libelle_business_shadow.py b/cmk/base/legacy_checks/libelle_business_shadow.py
index ea6f7def35c..f1ced3997ec 100644
--- a/cmk/base/legacy_checks/libelle_business_shadow.py
+++ b/cmk/base/legacy_checks/libelle_business_shadow.py
@@ -10,7 +10,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_list, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
# .--Example output from agent-------------------------------------------.
# <<>>
diff --git a/cmk/base/legacy_checks/liebert_bat_temp.py b/cmk/base/legacy_checks/liebert_bat_temp.py
deleted file mode 100644
index a22b859a52b..00000000000
--- a/cmk/base/legacy_checks/liebert_bat_temp.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.temperature import check_temperature
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.lgp import DETECT_LGP
-
-
-def parse_liebert_bat_temp(string_table):
- try:
- return {"Battery": int(string_table[0][0])}
- except (ValueError, IndexError):
- return {}
-
-
-def discover_liebert_bat_temp(section):
- yield from ((key, {}) for key in section)
-
-
-def check_liebert_bat_temp(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- yield check_temperature(data, params, "liebert_bat_temp_%s" % item)
-
-
-check_info["liebert_bat_temp"] = LegacyCheckDefinition(
- detect=DETECT_LGP,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.4.1.3.3.1.3",
- oids=["1"],
- ),
- parse_function=parse_liebert_bat_temp,
- service_name="Temperature %s",
- discovery_function=discover_liebert_bat_temp,
- check_function=check_liebert_bat_temp,
- check_ruleset_name="temperature",
- check_default_parameters={"levels": (40.0, 50.0)},
-)
diff --git a/cmk/base/legacy_checks/liebert_chilled_water.py b/cmk/base/legacy_checks/liebert_chilled_water.py
deleted file mode 100644
index a6e214351d3..00000000000
--- a/cmk/base/legacy_checks/liebert_chilled_water.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_str_without_unit
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.100.4626 Supply Chilled Water Over Temp
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.100.4626 Inactive Event
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.100.4703 Chilled Water Control Valve Failure
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.100.4703 Inactive Event
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.100.4980 Supply Chilled Water Loss of Flow
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.100.4980 Inactive Event
-
-
-def inventory_liebert_chilled_water(parsed):
- for key in parsed:
- if key:
- yield (key, {})
-
-
-def check_liebert_chilled_water(item, _params, parsed):
- for key, value in parsed.items():
- if item == key and value.lower() == "inactive event":
- yield 0, "Normal"
- elif item == key:
- yield 2, "%s" % value
-
-
-check_info["liebert_chilled_water"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=[
- "10.1.2.100.4626",
- "20.1.2.100.4626",
- "10.1.2.100.4703",
- "20.1.2.100.4703",
- "10.1.2.100.4980",
- "20.1.2.100.4980",
- ],
- ),
- parse_function=parse_liebert_str_without_unit,
- service_name="%s",
- discovery_function=inventory_liebert_chilled_water,
- check_function=check_liebert_chilled_water,
-)
diff --git a/cmk/base/legacy_checks/liebert_chiller_status.py b/cmk/base/legacy_checks/liebert_chiller_status.py
deleted file mode 100644
index 8b7bd207d28..00000000000
--- a/cmk/base/legacy_checks/liebert_chiller_status.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def inventory_liebert_chiller_status(info):
- return [(None, None)]
-
-
-def check_liebert_chiller_status(_no_item, _no_params, info):
- status = info[0][0]
- if status not in ["5", "7"]:
- return 2, "Device is in a non OK state"
- return 0, "Device is in a OK state"
-
-
-def parse_liebert_chiller_status(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
-check_info["liebert_chiller_status"] = LegacyCheckDefinition(
- parse_function=parse_liebert_chiller_status,
- detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.476.1.42.4.3.20"),
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.4.3.20.1.1.20",
- oids=["2"],
- ),
- service_name="Chiller status",
- discovery_function=inventory_liebert_chiller_status,
- check_function=check_liebert_chiller_status,
-)
diff --git a/cmk/base/legacy_checks/liebert_compressor.py b/cmk/base/legacy_checks/liebert_compressor.py
deleted file mode 100644
index 17463fbe58b..00000000000
--- a/cmk/base/legacy_checks/liebert_compressor.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_float
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5266.1 Compressor Head Pressure
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5266.2 Compressor Head Pressure
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5266.3 Compressor Head Pressure
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5266.4 Compressor Head Pressure
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5266.1 5.9
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5266.2 Unavailable
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5266.3 6.1
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5266.4 0.0
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5266.1 bar
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5266.2 bar
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5266.3 bar
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5266.4 bar
-
-
-def check_liebert_compressor(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- yield check_levels(data[0], None, params["levels"], unit=data[1], infoname="Head pressure")
-
-
-def discover_liebert_compressor(section):
- yield from ((item, {}) for item in section)
-
-
-check_info["liebert_compressor"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=["10.1.2.1.5266", "20.1.2.1.5266", "30.1.2.1.5266"],
- ),
- parse_function=parse_liebert_float,
- service_name="%s",
- discovery_function=discover_liebert_compressor,
- check_function=check_liebert_compressor,
- check_default_parameters={
- "levels": (8, 12),
- },
-)
diff --git a/cmk/base/legacy_checks/liebert_cooling.py b/cmk/base/legacy_checks/liebert_cooling.py
deleted file mode 100644
index 098abb30c5b..00000000000
--- a/cmk/base/legacy_checks/liebert_cooling.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_float
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5078 Cooling Capacity (Primary)
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5078 0
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5078 %
-
-
-def check_liebert_cooling(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- yield check_levels(
- data[0],
- "capacity_perc",
- (params.get("max_capacity", (None, None)) + params.get("min_capacity", (None, None))),
- unit=data[1],
- )
-
-
-def discover_liebert_cooling(section):
- yield from ((item, {}) for item in section)
-
-
-check_info["liebert_cooling"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=["10.1.2.1.5078", "20.1.2.1.5078", "30.1.2.1.5078"],
- ),
- parse_function=parse_liebert_float,
- service_name="%s",
- discovery_function=discover_liebert_cooling,
- check_function=check_liebert_cooling,
- check_ruleset_name="liebert_cooling",
- check_default_parameters={"min_capacity": (90, 80)},
-)
diff --git a/cmk/base/legacy_checks/liebert_cooling_position.py b/cmk/base/legacy_checks/liebert_cooling_position.py
deleted file mode 100644
index 73163fa3ea8..00000000000
--- a/cmk/base/legacy_checks/liebert_cooling_position.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_float
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5303 Free Cooling Valve Open Position
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5303 0
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5303 %
-
-
-def discover_liebert_cooling_position(section):
- yield from ((item, {}) for item in section if item.startswith("Free Cooling"))
-
-
-def check_liebert_cooling_position(item, params, parsed):
- if not (data := parsed.get(item)):
- return
-
- yield check_levels(
- data[0],
- "capacity_perc",
- (params.get("max_capacity", (None, None)) + params.get("min_capacity", (None, None))),
- unit=data[1],
- )
-
-
-check_info["liebert_cooling_position"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=["10.1.2.1.5303", "20.1.2.1.5303", "30.1.2.1.5303"],
- ),
- parse_function=parse_liebert_float,
- service_name="%s",
- discovery_function=discover_liebert_cooling_position,
- check_function=check_liebert_cooling_position,
- check_ruleset_name="liebert_cooling_position",
- check_default_parameters={
- "min_capacity": (90, 80),
- },
-)
diff --git a/cmk/base/legacy_checks/liebert_cooling_status.py b/cmk/base/legacy_checks/liebert_cooling_status.py
deleted file mode 100644
index 4fc1353cf9b..00000000000
--- a/cmk/base/legacy_checks/liebert_cooling_status.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from collections.abc import Iterable
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_str_without_unit
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5302 Free Cooling Status
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5302 off
-
-
-def discover_liebert_cooling_status(section: dict) -> Iterable[tuple[str, dict]]:
- yield from ((item, {}) for item in section)
-
-
-def check_libert_cooling_status(
- item: str, _no_params: object, section: dict
-) -> Iterable[tuple[int, str]]:
- if not (data := section.get(item)):
- return
- yield 0, data
-
-
-check_info["liebert_cooling_status"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=["10.1.2.1.5302", "20.1.2.1.5302"],
- ),
- parse_function=parse_liebert_str_without_unit,
- service_name="%s",
- discovery_function=discover_liebert_cooling_status,
- check_function=check_libert_cooling_status,
-)
diff --git a/cmk/base/legacy_checks/liebert_fans.py b/cmk/base/legacy_checks/liebert_fans.py
deleted file mode 100644
index a20ff126c99..00000000000
--- a/cmk/base/legacy_checks/liebert_fans.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_float
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5077 Fan Speed
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5077 0
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5077 %
-
-
-def check_liebert_fans(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- levels = params["levels"] + params.get("levels_lower", (None, None))
- yield check_levels(data[0], "filehandler_perc", levels, unit=data[1])
-
-
-def discover_liebert_fans(section):
- yield from ((item, {}) for item in section)
-
-
-check_info["liebert_fans"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=["10.1.2.1.5077", "20.1.2.1.5077", "30.1.2.1.5077"],
- ),
- parse_function=parse_liebert_float,
- service_name="%s",
- discovery_function=discover_liebert_fans,
- check_function=check_liebert_fans,
- check_ruleset_name="hw_fans_perc",
- check_default_parameters={
- "levels": (80, 90),
- },
-)
diff --git a/cmk/base/legacy_checks/liebert_fans_condenser.py b/cmk/base/legacy_checks/liebert_fans_condenser.py
deleted file mode 100644
index c161e91d09a..00000000000
--- a/cmk/base/legacy_checks/liebert_fans_condenser.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_float
-
-
-def check_liebert_fans_condenser(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- levels = params["levels"] + params.get("levels_lower", (None, None))
- yield check_levels(data[0], "filehandler_perc", levels, unit=data[1])
-
-
-def discover_liebert_fans_condenser(section):
- yield from ((item, {}) for item in section)
-
-
-check_info["liebert_fans_condenser"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=["10.1.2.1.5276", "20.1.2.1.5276", "30.1.2.1.5276"],
- ),
- parse_function=parse_liebert_float,
- service_name="%s",
- discovery_function=discover_liebert_fans_condenser,
- check_function=check_liebert_fans_condenser,
- check_ruleset_name="hw_fans_perc",
- check_default_parameters={
- "levels": (80, 90),
- },
-)
diff --git a/cmk/base/legacy_checks/liebert_maintenance.py b/cmk/base/legacy_checks/liebert_maintenance.py
deleted file mode 100644
index 47f0b44b12d..00000000000
--- a/cmk/base/legacy_checks/liebert_maintenance.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-# mypy: disable-error-code="arg-type"
-
-import time
-
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_int_without_unit
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.4868 Calculated Next Maintenance Month
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.4868 5
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.4869 Calculated Next Maintenance Year
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.4869 2017
-
-
-def inventory_liebert_maintenance(parsed):
- return [(None, {})]
-
-
-def check_liebert_maintenance(_no_item, params, parsed):
- month, year = None, None
- for key, value in parsed.items():
- if "month" in key.lower():
- month = value
- elif "year" in key.lower():
- year = value
-
- if None in (month, year):
- return
-
- yield 0, f"Next maintenance: {month}/{year}"
-
- time_left_seconds = time.mktime((year, month, 0, 0, 0, 0, 0, 0, 0)) - time.time()
-
- warn_days, crit_days = params["levels"]
- levels = (None, None, warn_days * 86400, crit_days * 86400)
- yield check_levels(time_left_seconds, None, levels, human_readable_func=get_age_human_readable)
-
-
-check_info["liebert_maintenance"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=["10.1.2.1.4868", "20.1.2.1.4868", "10.1.2.1.4869", "20.1.2.1.4869"],
- ),
- parse_function=parse_liebert_int_without_unit,
- service_name="Maintenance",
- discovery_function=inventory_liebert_maintenance,
- check_function=check_liebert_maintenance,
- check_default_parameters={"levels": (10, 5)}, # Remaining days until next maintenance,
-)
diff --git a/cmk/base/legacy_checks/liebert_pump.py b/cmk/base/legacy_checks/liebert_pump.py
deleted file mode 100644
index 9312f45009b..00000000000
--- a/cmk/base/legacy_checks/liebert_pump.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_float
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5298.1 Pump Hours
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5298.2 Pump Hours
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5298.1 3423
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5298.2 1
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5298.1 hr
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5298.2 hr
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5299.1 Pump Hours Threshold
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5299.2 Pump Hours Threshold
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5299.1 32000
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5299.2 32000
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5299.1 hr
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5299.2 hr
-
-
-def discover_liebert_pump(section):
- yield from ((item, {}) for item in section if "threshold" not in item.lower())
-
-
-def check_liebert_pump(item, _no_params, parsed):
- data = parsed.get(item)
- if data is None:
- return
-
- # TODO: this should be done in the parse function, per OID end.
- for key, (value, _unit) in parsed.items():
- if "Threshold" in key and key.replace(" Threshold", "") == item:
- crit = value
-
- yield check_levels(data[0], None, (crit, crit), unit=data[1])
-
-
-check_info["liebert_pump"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=[
- "10.1.2.1.5298",
- "20.1.2.1.5298",
- "30.1.2.1.5298",
- "10.1.2.1.5299",
- "20.1.2.1.5299",
- "30.1.2.1.5299",
- ],
- ),
- parse_function=parse_liebert_float,
- service_name="%s",
- discovery_function=discover_liebert_pump,
- check_function=check_liebert_pump,
-)
diff --git a/cmk/base/legacy_checks/liebert_reheating.py b/cmk/base/legacy_checks/liebert_reheating.py
deleted file mode 100644
index c9a88e84301..00000000000
--- a/cmk/base/legacy_checks/liebert_reheating.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_float
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5080 Reheat Utilization
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5080 0
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5080 %
-
-
-def inventory_liebert_reheating(parsed):
- if any("Reheat" in key for key in parsed):
- yield None, {}
-
-
-def check_liebert_reheating(_no_item, params, parsed):
- for key, (value, unit) in parsed.items():
- if "Reheat" not in key:
- continue
- yield check_levels(value, "filehandler_perc", params["levels"], unit=unit)
-
-
-check_info["liebert_reheating"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=["10.1.2.1.5080", "20.1.2.1.5080", "30.1.2.1.5080"],
- ),
- parse_function=parse_liebert_float,
- service_name="Reheating Utilization",
- discovery_function=inventory_liebert_reheating,
- check_function=check_liebert_reheating,
- check_default_parameters={
- "levels": (80, 90),
- },
-)
diff --git a/cmk/base/legacy_checks/liebert_temp_general.py b/cmk/base/legacy_checks/liebert_temp_general.py
deleted file mode 100644
index b2125717ee1..00000000000
--- a/cmk/base/legacy_checks/liebert_temp_general.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.liebert import check_temp_unit
-from cmk.base.check_legacy_includes.temperature import check_temperature
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.plugins.lib.liebert import DETECT_LIEBERT, parse_liebert_float
-
-# example output
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5282 Actual Supply Fluid Temp Set Point
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5282 17.7
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5282 deg C
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5288 Return Fluid Temperature
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5288 4.3
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5288 deg C
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.4643 Supply Fluid Temperature
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.4643 11.1
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.4643 deg C
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5517 Condenser Inlet Water Temperature
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5517 Unavailable
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5517 deg C
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.10.1.2.1.5518 Condenser Outlet Water Temperature
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.20.1.2.1.5518 Unavailable
-# .1.3.6.1.4.1.476.1.42.3.9.20.1.30.1.2.1.5518 deg C
-
-
-def check_liebert_temp_general(item, params, parsed):
- if not (data := parsed.get(item)):
- return
- value = check_temp_unit(data)
- yield check_temperature(value, params, "check_liebert_fluid_temp.%s" % item)
-
-
-def discover_liebert_temp_general(section):
- yield from ((item, {}) for item in section)
-
-
-check_info["liebert_temp_general"] = LegacyCheckDefinition(
- detect=DETECT_LIEBERT,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.476.1.42.3.9.20.1",
- oids=[
- "10.1.2.2.5282",
- "20.1.2.2.5282",
- "30.1.2.2.5282",
- "10.1.2.2.5288",
- "20.1.2.2.5288",
- "30.1.2.2.5288",
- "10.1.2.2.4643",
- "20.1.2.2.4643",
- "30.1.2.2.4643",
- "10.1.2.2.5517",
- "20.1.2.2.5517",
- "30.1.2.2.5517",
- "10.1.2.2.5518",
- "20.1.2.2.5518",
- "30.1.2.2.5518",
- "10.1.2.1.5519",
- "20.1.2.1.5519",
- "30.1.2.1.5519",
- ],
- ),
- parse_function=parse_liebert_float,
- service_name="%s",
- discovery_function=discover_liebert_temp_general,
- check_function=check_liebert_temp_general,
- check_ruleset_name="temperature",
-)
diff --git a/cmk/base/legacy_checks/logins.py b/cmk/base/legacy_checks/logins.py
index 5c78b3feaf8..90d2900b887 100644
--- a/cmk/base/legacy_checks/logins.py
+++ b/cmk/base/legacy_checks/logins.py
@@ -12,7 +12,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
DiscoveryResult = Iterable[tuple[None, dict]]
CheckResult = Iterable[tuple[int, str, list]]
diff --git a/cmk/base/legacy_checks/lvm_vgs.py b/cmk/base/legacy_checks/lvm_vgs.py
index 0ec7d2a7827..2e8af31f96a 100644
--- a/cmk/base/legacy_checks/lvm_vgs.py
+++ b/cmk/base/legacy_checks/lvm_vgs.py
@@ -8,7 +8,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_list, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_lvm_vgs(info):
diff --git a/cmk/base/legacy_checks/mailman_lists.py b/cmk/base/legacy_checks/mailman_lists.py
index fe6c701e583..bc29582aef1 100644
--- a/cmk/base/legacy_checks/mailman_lists.py
+++ b/cmk/base/legacy_checks/mailman_lists.py
@@ -7,7 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_mailman_lists(info):
diff --git a/cmk/base/legacy_checks/mbg_lantime_ng_power.py b/cmk/base/legacy_checks/mbg_lantime_ng_power.py
index 85e5599b9db..c35e2f37807 100644
--- a/cmk/base/legacy_checks/mbg_lantime_ng_power.py
+++ b/cmk/base/legacy_checks/mbg_lantime_ng_power.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.mbg_lantime import DETECT_MBG_LANTIME_NG
diff --git a/cmk/base/legacy_checks/mbg_lantime_ng_refclock.py b/cmk/base/legacy_checks/mbg_lantime_ng_refclock.py
index 35989a2f46a..32d85a79412 100644
--- a/cmk/base/legacy_checks/mbg_lantime_ng_refclock.py
+++ b/cmk/base/legacy_checks/mbg_lantime_ng_refclock.py
@@ -6,8 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.mbg_lantime import DETECT_MBG_LANTIME_NG
# .--general-------------------------------------------------------------.
diff --git a/cmk/base/legacy_checks/mbg_lantime_ng_state.py b/cmk/base/legacy_checks/mbg_lantime_ng_state.py
index e1a57be96a0..86eeaebb9b6 100644
--- a/cmk/base/legacy_checks/mbg_lantime_ng_state.py
+++ b/cmk/base/legacy_checks/mbg_lantime_ng_state.py
@@ -11,8 +11,7 @@
)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.mbg_lantime import DETECT_MBG_LANTIME_NG
diff --git a/cmk/base/legacy_checks/mbg_lantime_ng_temp.py b/cmk/base/legacy_checks/mbg_lantime_ng_temp.py
index b6675a4f568..f83c048efe6 100644
--- a/cmk/base/legacy_checks/mbg_lantime_ng_temp.py
+++ b/cmk/base/legacy_checks/mbg_lantime_ng_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.mbg_lantime import DETECT_MBG_LANTIME_NG
diff --git a/cmk/base/legacy_checks/mbg_lantime_refclock.py b/cmk/base/legacy_checks/mbg_lantime_refclock.py
index 659e15bd74e..7cac952564b 100644
--- a/cmk/base/legacy_checks/mbg_lantime_refclock.py
+++ b/cmk/base/legacy_checks/mbg_lantime_refclock.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
mbg_lantime_refclock_refmode_map = {
"0": "notavailable",
diff --git a/cmk/base/legacy_checks/mbg_lantime_state.py b/cmk/base/legacy_checks/mbg_lantime_state.py
index 742cb60400f..518e46975fc 100644
--- a/cmk/base/legacy_checks/mbg_lantime_state.py
+++ b/cmk/base/legacy_checks/mbg_lantime_state.py
@@ -11,8 +11,7 @@
)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, equals, not_exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, equals, not_exists, SNMPTree, StringTable
def inventory_mbg_lantime_state(info):
diff --git a/cmk/base/legacy_checks/mcafee_emailgateway_agent.py b/cmk/base/legacy_checks/mcafee_emailgateway_agent.py
index 89bc5d2a87c..bbe470716de 100644
--- a/cmk/base/legacy_checks/mcafee_emailgateway_agent.py
+++ b/cmk/base/legacy_checks/mcafee_emailgateway_agent.py
@@ -5,23 +5,22 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.mcafee_gateway import inventory_mcafee_gateway_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.mcafee_gateway import DETECT_EMAIL_GATEWAY
-# TODO together with 'mcafee_emailgateway_av_mcafee'
-# only info check?
+def parse_mcafee_emailgateway_agent(string_table: StringTable) -> StringTable | None:
+ return string_table or None
-def check_mcafee_emailgateway_agent(item, params, info):
- return 0, "Version: %s, Hostname: %s, Last file update: %s" % tuple(info[0])
+def inventory_mcafee_gateway_generic(info):
+ return [(None, {})]
-def parse_mcafee_emailgateway_agent(string_table: StringTable) -> StringTable | None:
- return string_table or None
+
+def check_mcafee_emailgateway_agent(item, params, info):
+ return 0, "Version: %s, Hostname: %s, Last file update: %s" % tuple(info[0])
check_info["mcafee_emailgateway_agent"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/mcafee_emailgateway_av_authentium.py b/cmk/base/legacy_checks/mcafee_emailgateway_av_authentium.py
index 3b0445f4fca..aa11c0dacaf 100644
--- a/cmk/base/legacy_checks/mcafee_emailgateway_av_authentium.py
+++ b/cmk/base/legacy_checks/mcafee_emailgateway_av_authentium.py
@@ -4,15 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.mcafee_gateway import DETECT_EMAIL_GATEWAY
-def inventory_mcafee_emailgateway_av_authentium(section: StringTable) -> DiscoveryResult:
+def discover_mcafee_emailgateway_av_authentium(section: StringTable) -> DiscoveryResult:
if section and section[0][0] == "1":
yield Service()
@@ -44,6 +43,6 @@ def parse_mcafee_emailgateway_av_authentium(string_table: StringTable) -> String
oids=["4", "5", "6"],
),
service_name="AV Authentium",
- discovery_function=inventory_mcafee_emailgateway_av_authentium,
+ discovery_function=discover_mcafee_emailgateway_av_authentium,
check_function=check_mcafee_emailgateway_av_authentium,
)
diff --git a/cmk/base/legacy_checks/mcafee_emailgateway_av_mcafee.py b/cmk/base/legacy_checks/mcafee_emailgateway_av_mcafee.py
index 8543d30a628..b041651d7bf 100644
--- a/cmk/base/legacy_checks/mcafee_emailgateway_av_mcafee.py
+++ b/cmk/base/legacy_checks/mcafee_emailgateway_av_mcafee.py
@@ -5,15 +5,18 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.mcafee_gateway import inventory_mcafee_gateway_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.mcafee_gateway import DETECT_EMAIL_GATEWAY
-# TODO together with 'mcafee_emailgateway_agent'
-# only info check?
+
+def parse_mcafee_emailgateway_av_mcafee(string_table: StringTable) -> StringTable | None:
+ return string_table or None
+
+
+def inventory_mcafee_gateway_generic(info):
+ return [(None, {})]
def check_mcafee_emailgateway_av_mcafee(item, params, info):
@@ -25,10 +28,6 @@ def check_mcafee_emailgateway_av_mcafee(item, params, info):
)
-def parse_mcafee_emailgateway_av_mcafee(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
check_info["mcafee_emailgateway_av_mcafee"] = LegacyCheckDefinition(
parse_function=parse_mcafee_emailgateway_av_mcafee,
detect=DETECT_EMAIL_GATEWAY,
diff --git a/cmk/base/legacy_checks/mcafee_emailgateway_bridge.py b/cmk/base/legacy_checks/mcafee_emailgateway_bridge.py
index e4687fc22ab..af355db7454 100644
--- a/cmk/base/legacy_checks/mcafee_emailgateway_bridge.py
+++ b/cmk/base/legacy_checks/mcafee_emailgateway_bridge.py
@@ -7,14 +7,18 @@
import time
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.mcafee_gateway import inventory_mcafee_gateway_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree, StringTable
from cmk.plugins.lib.mcafee_gateway import DETECT_EMAIL_GATEWAY
-# TODO states, traffic, params?
+
+def parse_mcafee_emailgateway_bridge(string_table: StringTable) -> StringTable | None:
+ return string_table or None
+
+
+def inventory_mcafee_gateway_generic(info):
+ return [(None, {})]
def check_mcafee_emailgateway_bridge(item, params, info):
@@ -61,10 +65,6 @@ def check_mcafee_emailgateway_bridge(item, params, info):
yield state, infotext, [tuple(perfdata)]
-def parse_mcafee_emailgateway_bridge(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
check_info["mcafee_emailgateway_bridge"] = LegacyCheckDefinition(
parse_function=parse_mcafee_emailgateway_bridge,
detect=DETECT_EMAIL_GATEWAY,
diff --git a/cmk/base/legacy_checks/mcafee_emailgateway_smtp.py b/cmk/base/legacy_checks/mcafee_emailgateway_smtp.py
index 7128cbbeeb8..ab0a19a0957 100644
--- a/cmk/base/legacy_checks/mcafee_emailgateway_smtp.py
+++ b/cmk/base/legacy_checks/mcafee_emailgateway_smtp.py
@@ -4,29 +4,31 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
-from cmk.base.check_legacy_includes.mcafee_gateway import inventory_mcafee_gateway_generic
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.mcafee_gateway import DETECT_EMAIL_GATEWAY
+def parse_mcafee_emailgateway_smtp(string_table: StringTable) -> StringTable | None:
+ return string_table or None
+
+
+def inventory_mcafee_gateway_generic(info):
+ return [(None, {})]
+
+
def check_mcafee_emailgateway_smtp(item, params, info):
total_connections, total_bytes, kernel_mode_blocked, kernel_mode_active = map(int, info[0])
return 0, "Total connections: {} ({}), Kernel blocked: {}, Kernel active: {}".format(
total_connections,
- get_bytes_human_readable(total_bytes),
+ render.bytes(total_bytes),
kernel_mode_blocked,
kernel_mode_active,
)
-def parse_mcafee_emailgateway_smtp(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
check_info["mcafee_emailgateway_smtp"] = LegacyCheckDefinition(
parse_function=parse_mcafee_emailgateway_smtp,
detect=DETECT_EMAIL_GATEWAY,
diff --git a/cmk/base/legacy_checks/mcafee_emailgateway_spam_mcafee.py b/cmk/base/legacy_checks/mcafee_emailgateway_spam_mcafee.py
index 18719ffaae9..942f5f7887f 100644
--- a/cmk/base/legacy_checks/mcafee_emailgateway_spam_mcafee.py
+++ b/cmk/base/legacy_checks/mcafee_emailgateway_spam_mcafee.py
@@ -5,14 +5,18 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.mcafee_gateway import inventory_mcafee_gateway_generic
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.mcafee_gateway import DETECT_EMAIL_GATEWAY
-# TODO together with other mcafee-emailgateway-info checks?
+
+def parse_mcafee_emailgateway_spam_mcafee(string_table: StringTable) -> StringTable | None:
+ return string_table or None
+
+
+def inventory_mcafee_gateway_generic(info):
+ return [(None, {})]
def check_mcafee_emailgateway_spam_mcafee(item, params, info):
@@ -20,10 +24,6 @@ def check_mcafee_emailgateway_spam_mcafee(item, params, info):
return 0, f"Engine version: {eng_version}, Rules version: {rules_version}"
-def parse_mcafee_emailgateway_spam_mcafee(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
check_info["mcafee_emailgateway_spam_mcafee"] = LegacyCheckDefinition(
parse_function=parse_mcafee_emailgateway_spam_mcafee,
detect=DETECT_EMAIL_GATEWAY,
diff --git a/cmk/base/legacy_checks/mcafee_webgateway_info.py b/cmk/base/legacy_checks/mcafee_webgateway_info.py
deleted file mode 100644
index 9f6fd544358..00000000000
--- a/cmk/base/legacy_checks/mcafee_webgateway_info.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib import mcafee_gateway
-
-
-def inventory_mcafee_webgateway_info(info):
- if info:
- return [(None, None)]
- return []
-
-
-def check_mcafee_webgateway_info(_no_item, _no_params, info):
- version, revision = info[0]
- return 0, f"Product version: {version}, Revision: {revision}"
-
-
-def parse_mcafee_webgateway_info(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["mcafee_webgateway_info"] = LegacyCheckDefinition(
- parse_function=parse_mcafee_webgateway_info,
- detect=mcafee_gateway.DETECT_WEB_GATEWAY,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.1230.2.7.1",
- oids=["3", "9"],
- ),
- service_name="Web gateway info",
- discovery_function=inventory_mcafee_webgateway_info,
- check_function=check_mcafee_webgateway_info,
-)
diff --git a/cmk/base/legacy_checks/mem.py b/cmk/base/legacy_checks/mem.py
deleted file mode 100644
index f5f176489d8..00000000000
--- a/cmk/base/legacy_checks/mem.py
+++ /dev/null
@@ -1,427 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-# mypy: disable-error-code="arg-type"
-
-import time
-
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
-from cmk.base.check_legacy_includes.mem import check_memory_dict, check_memory_element
-from cmk.base.config import check_info
-
-import cmk.plugins.lib.memory as memory
-from cmk.agent_based.v2 import get_average, get_value_store, render
-
-# .--mem.linux-----------------------------------------------------------.
-# | _ _ |
-# | _ __ ___ ___ _ __ ___ | (_)_ __ _ ___ __ |
-# | | '_ ` _ \ / _ \ '_ ` _ \ | | | '_ \| | | \ \/ / |
-# | | | | | | | __/ | | | | |_| | | | | | |_| |> < |
-# | |_| |_| |_|\___|_| |_| |_(_)_|_|_| |_|\__,_/_/\_\ |
-# | |
-# +----------------------------------------------------------------------+
-# | Specialized memory check for Linux that takes into account |
-# | all of its specific information in /proc/meminfo. |
-# '----------------------------------------------------------------------'
-
-
-def inventory_mem_linux(section):
- if memory.is_linux_section(section):
- yield None, {}
-
-
-def check_mem_linux(_no_item, params, section):
- if not section:
- return
-
- # quick fix: stop modifying parsed data in place!
- section = section.copy()
-
- # TODO: Currently some of these values are just set to generate the metrics later
- # See which ones we actually need.
-
- # SReclaimable is not available for older kernels
- # SwapCached may be missing if swap is disabled, see crash 9d22dcb4-5260-11eb-8458-0b95bfca1bb1
- # Compute memory used by caches, that can be considered "free"
- section["Caches"] = (
- section["Cached"]
- + section["Buffers"]
- + section.get("SwapCached", 0)
- + section.get("SReclaimable", 0)
- )
-
- section["MemUsed"] = section["MemTotal"] - section["MemFree"] - section["Caches"]
- section["SwapUsed"] = section["SwapTotal"] - section["SwapFree"]
- section["TotalTotal"] = section["MemTotal"] + section["SwapTotal"]
- section["TotalUsed"] = section["MemUsed"] + section["SwapUsed"]
-
- # Disk Writeback
- section["Pending"] = (
- section["Dirty"]
- + section.get("Writeback", 0)
- + section.get("NFS_Unstable", 0)
- + section.get("Bounce", 0)
- + section.get("WritebackTmp", 0)
- )
-
- results = check_memory_dict(section, params)
-
- # show this always:
- yield results.pop("virtual", (0, ""))
-
- details_results = []
- for state, text, metrics in results.values():
- if state:
- yield state, text, metrics
- else:
- details_results.append((state, text, metrics))
- MARK_AS_DETAILS = "\n"
- for state, text, perf in details_results:
- yield state, f"{MARK_AS_DETAILS}{text}", perf
-
- # Now send performance data. We simply output *all* fields of section
- # except for a few really useless values
- perfdata = []
- for name, value in sorted(section.items()):
- if name.startswith("DirectMap"):
- continue
- if (
- name.startswith("Vmalloc") and section["VmallocTotal"] > 2**40
- ): # useless on 64 Bit system
- continue
- if name.startswith("Huge"):
- if section["HugePages_Total"] == 0: # omit useless data
- continue
- if name == "Hugepagesize":
- continue # not needed
- value = value * section["Hugepagesize"] # convert number to actual memory size
- metric_name = camelcase_to_underscored(name.replace("(", "_").replace(")", ""))
- if metric_name not in {
- "mem_used",
- "mem_used_percent",
- "swap_used",
- "committed_as",
- "shmem",
- "page_tables",
- }:
- perfdata.append((metric_name, value))
- yield 0, "", perfdata
-
-
-# ThisIsACamel -> this_is_a_camel
-def camelcase_to_underscored(name):
- previous_lower = False
- previous_underscore = True
- result = ""
- for char in name:
- if char.isupper():
- if previous_lower and not previous_underscore:
- result += "_"
- previous_lower = False
- previous_underscore = False
- result += char.lower()
- elif char == "_":
- previous_lower = False
- previous_underscore = True
- result += char
- else:
- previous_lower = True
- previous_underscore = False
- result += char
- return result
-
-
-check_info["mem.linux"] = LegacyCheckDefinition(
- service_name="Memory",
- sections=["mem"],
- discovery_function=inventory_mem_linux,
- check_function=check_mem_linux,
- check_ruleset_name="memory_linux",
- check_default_parameters={
- "levels_virtual": ("perc_used", (80.0, 90.0)),
- "levels_total": ("perc_used", (120.0, 150.0)),
- "levels_shm": ("perc_used", (20.0, 30.0)),
- "levels_pagetables": ("perc_used", (8.0, 16.0)),
- "levels_committed": ("perc_used", (100.0, 150.0)),
- "levels_commitlimit": ("perc_free", (20.0, 10.0)),
- "levels_vmalloc": ("abs_free", (50 * 1024 * 1024, 30 * 1024 * 1024)),
- "levels_hardwarecorrupted": ("abs_used", (1, 1)),
- },
-)
-
-# .
-# .--mem.used------------------------------------------------------------.
-# | _ |
-# | _ __ ___ ___ _ __ ___ _ _ ___ ___ __| | |
-# | | '_ ` _ \ / _ \ '_ ` _ \ | | | / __|/ _ \/ _` | |
-# | | | | | | | __/ | | | | || |_| \__ \ __/ (_| | |
-# | |_| |_| |_|\___|_| |_| |_(_)__,_|___/\___|\__,_| |
-# | |
-# +----------------------------------------------------------------------+
-# | Memory check that takes into account the swap space. This check is |
-# | used for unixoide operating systems. |
-# '----------------------------------------------------------------------'
-
-
-# .
-# .--mem.win-------------------------------------------------------------.
-# | _ |
-# | _ __ ___ ___ _ __ ___ __ _(_)_ __ |
-# | | '_ ` _ \ / _ \ '_ ` _ \\ \ /\ / / | '_ \ |
-# | | | | | | | __/ | | | | |\ V V /| | | | | |
-# | |_| |_| |_|\___|_| |_| |_(_)_/\_/ |_|_| |_| |
-# | |
-# +----------------------------------------------------------------------+
-# | Windows now has a dedicated memory check that reflect the special |
-# | nature of the page file. |
-# '----------------------------------------------------------------------'
-
-_MB = 1024**2
-
-# Special memory and page file check for Windows
-
-
-def inventory_mem_win(section):
- if "MemTotal" in section and "PageTotal" in section:
- yield None, {}
-
-
-def _get_levels_type(levels):
- if levels is None:
- return None
- if not isinstance(levels, tuple):
- return "predictive"
- if isinstance(levels[0], float):
- return "perc_used"
- return "abs_free"
-
-
-def _get_levels_type_and_value(levels_value):
- levels_type = _get_levels_type(levels_value)
- if levels_type is None or levels_type == "predictive":
- return (
- levels_type,
- None,
- )
- return (
- levels_type,
- (
- levels_type,
- levels_value
- if levels_type == "perc_used"
- else (
- # absolute levels on free space come in MB, which cannot be changed easily
- levels_value[0] * _MB,
- levels_value[1] * _MB,
- ),
- ),
- )
-
-
-def _do_averaging(
- timestamp,
- average_horizon_min,
- paramname,
- used,
- total,
-):
- used_avg = (
- get_average(
- get_value_store(),
- "mem.win.%s" % paramname,
- timestamp,
- used / 1024.0, # use kB for compatibility
- average_horizon_min,
- )
- * 1024
- )
- return (
- used_avg,
- "%d min average: %s (%s)"
- % (
- average_horizon_min,
- render.percent(100.0 * used_avg / total),
- get_bytes_human_readable(used_avg),
- ),
- )
-
-
-def _apply_predictive_levels(
- params,
- paramname,
- title,
- used,
-):
- if "average" in params:
- titleinfo = title
- dsname = "%s_avg" % paramname
- else:
- titleinfo = title
- dsname = paramname
-
- return check_levels(
- used / _MB, # Current value stored in MB in RRDs
- dsname,
- params[paramname],
- unit="GiB", # Levels are specified in GiB...
- scale=1024, # ... in WATO ValueSpec
- infoname=titleinfo,
- )
-
-
-def check_mem_windows(_no_item, params, section):
- now = time.time()
-
- for title, prefix, paramname, metric_name in [
- ("RAM", "Mem", "memory", "mem_used"),
- ("Commit charge", "Page", "pagefile", "pagefile_used"),
- ]:
- total = section.get("%sTotal" % prefix)
- free = section.get("%sFree" % prefix)
- if None in (total, free):
- continue
- used = total - free
-
- levels_type, levels_memory_element = _get_levels_type_and_value(params.get(paramname))
- do_averaging = "average" in params
-
- state, infotext, perfdata = check_memory_element(
- title,
- used,
- total,
- None if do_averaging else levels_memory_element,
- metric_name=metric_name,
- create_percent_metric=title == "RAM",
- )
-
- # Metrics for total mem and pagefile are expected in MB
- if prefix == "Mem":
- perfdata.append(("mem_total", total / _MB))
- elif prefix == "Page":
- perfdata.append(("pagefile_total", total / _MB))
-
- # Do averaging, if configured, just for matching the levels
- if do_averaging:
- used, infoadd = _do_averaging(
- now,
- params["average"],
- paramname,
- used,
- total,
- )
- infotext += f", {infoadd}"
-
- if levels_type != "predictive":
- state, _infotext, perfadd = check_memory_element(
- title,
- used,
- total,
- levels_memory_element,
- metric_name=paramname + "_avg",
- )
-
- perfdata.append(
- (
- (averaged_metric := perfadd[0])[0],
- # the averaged metrics are expected to be in MB
- *(v / _MB if v is not None else None for v in averaged_metric[1:]),
- )
- )
-
- if levels_type == "predictive":
- state, infoadd, perfadd = _apply_predictive_levels(
- params,
- paramname,
- title,
- used,
- )
- if infoadd:
- infotext += ", " + infoadd
- perfdata += perfadd
-
- yield state, infotext, perfdata
-
-
-check_info["mem.win"] = LegacyCheckDefinition(
- service_name="Memory",
- sections=["mem"],
- discovery_function=inventory_mem_win,
- check_function=check_mem_windows,
- check_ruleset_name="memory_pagefile_win",
- check_default_parameters={
- "memory": (80.0, 90.0),
- "pagefile": (80.0, 90.0),
- },
-)
-
-# .
-# .--mem.vmalloc---------------------------------------------------------.
-# | _ _ |
-# | _ __ ___ ___ _ __ ___ __ ___ __ ___ __ _| | | ___ ___ |
-# | | '_ ` _ \ / _ \ '_ ` _ \\ \ / / '_ ` _ \ / _` | | |/ _ \ / __| |
-# | | | | | | | __/ | | | | |\ V /| | | | | | (_| | | | (_) | (__ |
-# | |_| |_| |_|\___|_| |_| |_(_)_/ |_| |_| |_|\__,_|_|_|\___/ \___| |
-# | |
-# +----------------------------------------------------------------------+
-# | This very specific check checks the usage and fragmentation of the |
-# | address space 'vmalloc' that can be problematic on 32-Bit systems. |
-# | It is superseeded by the new check mem.linux and will be removed |
-# | soon. |
-# '----------------------------------------------------------------------'
-
-
-def inventory_mem_vmalloc(section):
- if memory.is_linux_section(section):
- return # handled by new Linux memory check
-
- # newer kernel version report wrong data,
- # i.d. both VmallocUsed and Chunk equal zero
- if "VmallocTotal" in section and not (
- section["VmallocUsed"] == 0 and section["VmallocChunk"] == 0
- ):
- # Do not checks this on 64 Bit systems. They have almost
- # infinitive vmalloc
- if section["VmallocTotal"] < 4 * 1024**2:
- yield None, {}
-
-
-def check_mem_vmalloc(_item, params, section):
- total_mb = section["VmallocTotal"] / 1024.0**2
- used_mb = section["VmallocUsed"] / 1024.0**2
- chunk_mb = section["VmallocChunk"] / 1024.0**2
- used_warn_perc, used_crit_perc = params["levels_used_perc"]
-
- yield 0, f"Total: {total_mb:.1f} MB"
- yield check_levels(
- used_mb,
- dsname="used",
- params=(total_mb * used_warn_perc / 100, total_mb * used_crit_perc / 100),
- human_readable_func=lambda v: f"{v:.1f}",
- unit="MB",
- infoname="Used",
- boundaries=(0, total_mb),
- )
- yield check_levels(
- chunk_mb,
- dsname="chunk",
- params=(None, None) + params["levels_lower_chunk_mb"],
- human_readable_func=lambda v: f"{v:.1f}",
- unit="MB",
- infoname="Largest chunk",
- boundaries=(0, total_mb),
- )
-
-
-check_info["mem.vmalloc"] = LegacyCheckDefinition(
- service_name="Vmalloc address space",
- sections=["mem"],
- discovery_function=inventory_mem_vmalloc,
- check_function=check_mem_vmalloc,
- check_default_parameters={
- "levels_used_perc": (80.0, 90.0),
- "levels_lower_chunk_mb": (64, 32),
- },
-)
diff --git a/cmk/base/legacy_checks/mem_linux.py b/cmk/base/legacy_checks/mem_linux.py
new file mode 100644
index 00000000000..e9c622df0c4
--- /dev/null
+++ b/cmk/base/legacy_checks/mem_linux.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+# mypy: disable-error-code="arg-type"
+
+from cmk.base.check_api import LegacyCheckDefinition
+from cmk.base.check_legacy_includes.mem import check_memory_dict
+from cmk.base.config import check_info
+
+from cmk.plugins.lib import memory
+
+
+def inventory_mem_linux(section):
+ if memory.is_linux_section(section):
+ yield None, {}
+
+
+def check_mem_linux(_no_item, params, section):
+ if not section:
+ return
+
+ # quick fix: stop modifying parsed data in place!
+ section = section.copy()
+
+ # TODO: Currently some of these values are just set to generate the metrics later
+ # See which ones we actually need.
+
+ # SReclaimable is not available for older kernels
+ # SwapCached may be missing if swap is disabled, see crash 9d22dcb4-5260-11eb-8458-0b95bfca1bb1
+ # Compute memory used by caches, that can be considered "free"
+ section["Caches"] = (
+ section["Cached"]
+ + section["Buffers"]
+ + section.get("SwapCached", 0)
+ + section.get("SReclaimable", 0)
+ )
+
+ section["MemUsed"] = section["MemTotal"] - section["MemFree"] - section["Caches"]
+ section["SwapUsed"] = section["SwapTotal"] - section["SwapFree"]
+ section["TotalTotal"] = section["MemTotal"] + section["SwapTotal"]
+ section["TotalUsed"] = section["MemUsed"] + section["SwapUsed"]
+
+ # Disk Writeback
+ section["Pending"] = (
+ section["Dirty"]
+ + section.get("Writeback", 0)
+ + section.get("NFS_Unstable", 0)
+ + section.get("Bounce", 0)
+ + section.get("WritebackTmp", 0)
+ )
+
+ results = check_memory_dict(section, params)
+
+ # show this always:
+ yield results.pop("virtual", (0, ""))
+
+ details_results = []
+ for state, text, metrics in results.values():
+ if state:
+ yield state, text, metrics
+ else:
+ details_results.append((state, text, metrics))
+ MARK_AS_DETAILS = "\n"
+ for state, text, perf in details_results:
+ yield state, f"{MARK_AS_DETAILS}{text}", perf
+
+ # Now send performance data. We simply output *all* fields of section
+ # except for a few really useless values
+ perfdata = []
+ for name, value in sorted(section.items()):
+ if name.startswith("DirectMap"):
+ continue
+ if (
+ name.startswith("Vmalloc") and section["VmallocTotal"] > 2**40
+ ): # useless on 64 Bit system
+ continue
+ if name.startswith("Huge"):
+ if section["HugePages_Total"] == 0: # omit useless data
+ continue
+ if name == "Hugepagesize":
+ continue # not needed
+ value = value * section["Hugepagesize"] # convert number to actual memory size
+ metric_name = _camelcase_to_underscored(name.replace("(", "_").replace(")", ""))
+ if metric_name not in {
+ "mem_used",
+ "mem_used_percent",
+ "swap_used",
+ "committed_as",
+ "shmem",
+ "page_tables",
+ }:
+ perfdata.append((metric_name, value))
+ yield 0, "", perfdata
+
+
+# ThisIsACamel -> this_is_a_camel
+def _camelcase_to_underscored(name):
+ previous_lower = False
+ previous_underscore = True
+ result = ""
+ for char in name:
+ if char.isupper():
+ if previous_lower and not previous_underscore:
+ result += "_"
+ previous_lower = False
+ previous_underscore = False
+ result += char.lower()
+ elif char == "_":
+ previous_lower = False
+ previous_underscore = True
+ result += char
+ else:
+ previous_lower = True
+ previous_underscore = False
+ result += char
+ return result
+
+
+check_info["mem.linux"] = LegacyCheckDefinition(
+ service_name="Memory",
+ sections=["mem"],
+ discovery_function=inventory_mem_linux,
+ check_function=check_mem_linux,
+ check_ruleset_name="memory_linux",
+ check_default_parameters={
+ "levels_virtual": ("perc_used", (80.0, 90.0)),
+ "levels_total": ("perc_used", (120.0, 150.0)),
+ "levels_shm": ("perc_used", (20.0, 30.0)),
+ "levels_pagetables": ("perc_used", (8.0, 16.0)),
+ "levels_committed": ("perc_used", (100.0, 150.0)),
+ "levels_commitlimit": ("perc_free", (20.0, 10.0)),
+ "levels_vmalloc": ("abs_free", (50 * 1024 * 1024, 30 * 1024 * 1024)),
+ "levels_hardwarecorrupted": ("abs_used", (1, 1)),
+ },
+)
diff --git a/cmk/base/legacy_checks/mem_vmalloc.py b/cmk/base/legacy_checks/mem_vmalloc.py
new file mode 100644
index 00000000000..138579480e9
--- /dev/null
+++ b/cmk/base/legacy_checks/mem_vmalloc.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+
+# mypy: disable-error-code="arg-type"
+
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
+from cmk.base.config import check_info
+
+from cmk.plugins.lib import memory
+
+
+def inventory_mem_vmalloc(section):
+ if memory.is_linux_section(section):
+ return # handled by new Linux memory check
+
+ # newer kernel version report wrong data,
+ # i.d. both VmallocUsed and Chunk equal zero
+ if "VmallocTotal" in section and not (
+ section["VmallocUsed"] == 0 and section["VmallocChunk"] == 0
+ ):
+ # Do not checks this on 64 Bit systems. They have almost
+ # infinitive vmalloc
+ if section["VmallocTotal"] < 4 * 1024**2:
+ yield None, {}
+
+
+def check_mem_vmalloc(_item, params, section):
+ total_mb = section["VmallocTotal"] / 1024.0**2
+ used_mb = section["VmallocUsed"] / 1024.0**2
+ chunk_mb = section["VmallocChunk"] / 1024.0**2
+ used_warn_perc, used_crit_perc = params["levels_used_perc"]
+
+ yield 0, f"Total: {total_mb:.1f} MB"
+ yield check_levels(
+ used_mb,
+ dsname="used",
+ params=(total_mb * used_warn_perc / 100, total_mb * used_crit_perc / 100),
+ human_readable_func=lambda v: f"{v:.1f}",
+ unit="MB",
+ infoname="Used",
+ boundaries=(0, total_mb),
+ )
+ yield check_levels(
+ chunk_mb,
+ dsname="chunk",
+ params=(None, None) + params["levels_lower_chunk_mb"],
+ human_readable_func=lambda v: f"{v:.1f}",
+ unit="MB",
+ infoname="Largest chunk",
+ boundaries=(0, total_mb),
+ )
+
+
+check_info["mem.vmalloc"] = LegacyCheckDefinition(
+ service_name="Vmalloc address space",
+ sections=["mem"],
+ discovery_function=inventory_mem_vmalloc,
+ check_function=check_mem_vmalloc,
+ check_default_parameters={
+ "levels_used_perc": (80.0, 90.0),
+ "levels_lower_chunk_mb": (64, 32),
+ },
+)
diff --git a/cmk/base/legacy_checks/mikrotik_signal.py b/cmk/base/legacy_checks/mikrotik_signal.py
index f58618b1bc2..a968e075917 100644
--- a/cmk/base/legacy_checks/mikrotik_signal.py
+++ b/cmk/base/legacy_checks/mikrotik_signal.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_mikrotik_signal(info):
@@ -52,6 +51,6 @@ def parse_mikrotik_signal(string_table: StringTable) -> StringTable:
check_function=check_mikrotik_signal,
check_ruleset_name="signal_quality",
check_default_parameters={
- "levels_lower": (80, 70),
+ "levels_lower": (80.0, 70.0),
},
)
diff --git a/cmk/base/legacy_checks/mkbackup.py b/cmk/base/legacy_checks/mkbackup.py
index 9631dfff039..105d5f3fd58 100644
--- a/cmk/base/legacy_checks/mkbackup.py
+++ b/cmk/base/legacy_checks/mkbackup.py
@@ -31,21 +31,15 @@
# }
-# TODO: Refactor this.
-
-
# mypy: disable-error-code="var-annotated"
import time
-from cmk.base.check_api import (
- get_age_human_readable,
- get_bytes_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_mkbackup(string_table):
import json
@@ -84,8 +78,8 @@ def check_mkbackup(job_state):
0,
"The job is running for %s since %s"
% (
- get_age_human_readable(duration),
- get_timestamp_human_readable(job_state["started"]),
+ render.timespan(duration),
+ render.datetime(job_state["started"]),
),
[("backup_duration", duration), ("backup_avgspeed", job_state["bytes_per_second"])],
)
@@ -101,9 +95,9 @@ def check_mkbackup(job_state):
0,
"it was running for %s from %s till %s"
% (
- get_age_human_readable(duration),
- get_timestamp_human_readable(job_state["started"]),
- get_timestamp_human_readable(job_state["finished"]),
+ render.timespan(duration),
+ render.datetime(job_state["started"]),
+ render.datetime(job_state["finished"]),
),
[("backup_duration", duration), ("backup_avgspeed", job_state["bytes_per_second"])],
)
@@ -111,7 +105,7 @@ def check_mkbackup(job_state):
if "size" in job_state:
yield (
0,
- "Size: %s" % get_bytes_human_readable(job_state["size"]),
+ "Size: %s" % render.bytes(job_state["size"]),
[("backup_size", job_state["size"])],
)
@@ -124,7 +118,7 @@ def check_mkbackup(job_state):
state = 2
else:
state = 0
- yield state, "Next run: %s" % get_timestamp_human_readable(next_run)
+ yield state, "Next run: %s" % render.datetime(next_run)
def inventory_mkbackup_system(parsed):
diff --git a/cmk/base/legacy_checks/mkeventd_status.py b/cmk/base/legacy_checks/mkeventd_status.py
index cca76b9e700..1c7a0e357c4 100644
--- a/cmk/base/legacy_checks/mkeventd_status.py
+++ b/cmk/base/legacy_checks/mkeventd_status.py
@@ -13,10 +13,10 @@
import time
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store
+from cmk.agent_based.v2 import get_rate, get_value_store, render
def parse_mkeventd_status(string_table):
@@ -27,7 +27,7 @@ def parse_mkeventd_status(string_table):
try:
data = json.loads(line[0])
except ValueError:
- # The agent plugin asks the event console for json OutputFormat, but
+ # The agent plug-in asks the event console for json OutputFormat, but
# older versions always provide python format - even when other format
# was requested. Skipping the site. Won't eval data from other systems.
continue
@@ -63,7 +63,7 @@ def check_mkeventd_status(item, params, parsed): # pylint: disable=too-many-bra
("num_open_events", status["num_open_events"])
]
- yield 0, "Virtual memory: %s" % get_bytes_human_readable(status["virtual_memory_size"]), [
+ yield 0, "Virtual memory: %s" % render.bytes(status["virtual_memory_size"]), [
("process_virtual_size", status["virtual_memory_size"])
]
diff --git a/cmk/base/legacy_checks/mknotifyd.py b/cmk/base/legacy_checks/mknotifyd.py
index 7b0bc91066e..9ad1f2a7759 100644
--- a/cmk/base/legacy_checks/mknotifyd.py
+++ b/cmk/base/legacy_checks/mknotifyd.py
@@ -53,10 +53,10 @@
import time
from typing import Any
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
+from cmk.agent_based.v2 import IgnoreResultsError, render
def parse_mknotifyd(string_table): # pylint: disable=too-many-branches
@@ -186,9 +186,8 @@ def check_mknotifyd(item, _no_params, parsed):
status_age = parsed["timestamp"] - stat["Updated"]
if status_age > 90:
state = 2
- infotext = (
- "Status last updated %s ago, spooler seems crashed or busy"
- % get_age_human_readable(status_age)
+ infotext = "Status last updated %s ago, spooler seems crashed or busy" % render.timespan(
+ status_age
)
else:
state = 0
@@ -205,7 +204,7 @@ def check_mknotifyd(item, _no_params, parsed):
perf_data = [("corrupted_files", corrupted["Count"])]
yield 1, "%d corrupted files: youngest %s ago" % (
corrupted["Count"],
- get_age_human_readable(age),
+ render.timespan(age),
), perf_data
# Are there deferred files that are too old?
@@ -222,7 +221,7 @@ def check_mknotifyd(item, _no_params, parsed):
state = 0
yield state, "%d deferred files: oldest %s ago" % (
count,
- get_age_human_readable(age),
+ render.timespan(age),
), perf_data
return
@@ -278,7 +277,7 @@ def check_mknotifyd_connection(item, _no_params, parsed):
# Show uptime
if connection["State"] == "established":
age = parsed["timestamp"] - connection["Since"]
- yield 0, "Uptime: %s" % get_age_human_readable(age)
+ yield 0, "Uptime: %s" % render.timespan(age)
if "Connect Time" in connection:
yield 0, "Connect time: %.3f sec" % connection["Connect Time"]
diff --git a/cmk/base/legacy_checks/mongodb_asserts.py b/cmk/base/legacy_checks/mongodb_asserts.py
deleted file mode 100644
index a972245d34e..00000000000
--- a/cmk/base/legacy_checks/mongodb_asserts.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# <<>>
-# msg 0
-# rollovers 0
-# regular 0
-# warning 0
-# user 85181
-
-
-import time
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import get_rate, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def inventory_mongodb_asserts(info):
- return [(None, {})]
-
-
-def check_mongodb_asserts(_no_item, params, info):
- now = time.time()
- for line in info:
- what = line[0]
- value = int(line[1])
- warn, crit = None, None
- what_rate = get_rate(get_value_store(), what, now, value, raise_overflow=True)
-
- state = 0
- if "%s_assert_rate" % what in params:
- warn, crit = params["%s_assert_rate" % what]
- if what_rate >= crit:
- state = 2
- elif what_rate >= warn:
- state = 1
-
- yield state, f"{what_rate:.2f} {what.title()} Asserts/sec", [
- ("assert_%s" % what, what_rate)
- ]
-
-
-def parse_mongodb_asserts(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["mongodb_asserts"] = LegacyCheckDefinition(
- parse_function=parse_mongodb_asserts,
- service_name="MongoDB Asserts",
- discovery_function=inventory_mongodb_asserts,
- check_function=check_mongodb_asserts,
- check_ruleset_name="mongodb_asserts",
-)
diff --git a/cmk/base/legacy_checks/mongodb_cluster.py b/cmk/base/legacy_checks/mongodb_cluster.py
index b5b0fb6eff5..8338d86fdc6 100644
--- a/cmk/base/legacy_checks/mongodb_cluster.py
+++ b/cmk/base/legacy_checks/mongodb_cluster.py
@@ -12,9 +12,11 @@
import json
from collections.abc import Iterable, Mapping
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
Section = Mapping
@@ -337,11 +339,11 @@ def _generate_mongodb_cluster_long_output(
if has_chunks:
collections_info.append(
"- Chunks: %d (Default chunk size: %s)"
- % (total_number_of_chunks, get_bytes_human_readable(chunk_size))
+ % (total_number_of_chunks, render.bytes(chunk_size))
)
collections_info.append("- Docs: %d" % total_number_of_documents)
- collections_info.append("- Size: %s" % get_bytes_human_readable(total_collection_size))
- collections_info.append("- Storage: %s" % get_bytes_human_readable(storage_size))
+ collections_info.append("- Size: %s" % render.bytes(total_collection_size))
+ collections_info.append("- Storage: %s" % render.bytes(storage_size))
if is_sharded:
collections_info.append("- Balancer: %s" % balancer_status)
@@ -405,12 +407,12 @@ def _mongodb_cluster_get_shard_statistic_info(
output.append(
"- Size: %s%s"
% (
- get_bytes_human_readable(shard_size),
+ render.bytes(shard_size),
" (%1.2f%%)" % estDataPercent if is_sharded else "",
)
)
if is_sharded:
- output.append("--- per chunk: " + "\u2248" + " %s" % get_bytes_human_readable(estChunkData))
+ output.append("--- per chunk: " + "\u2248" + " %s" % render.bytes(estChunkData))
output.append("- Host: %s" % hostname)
return "\n".join(output)
diff --git a/cmk/base/legacy_checks/mongodb_collections.py b/cmk/base/legacy_checks/mongodb_collections.py
index 6fd263eb49d..47f3a5829d8 100644
--- a/cmk/base/legacy_checks/mongodb_collections.py
+++ b/cmk/base/legacy_checks/mongodb_collections.py
@@ -11,14 +11,10 @@
import json
-from cmk.base.check_api import (
- check_levels,
- get_bytes_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
from cmk.plugins.lib.mongodb import parse_date
@@ -78,7 +74,7 @@ def check_mongodb_collections(item, params, databases_dict):
perfdata = _mongodb_collections_get_perfdata_key(key)
yield check_levels(
- value, perfdata, levels, human_readable_func=get_bytes_human_readable, infoname=label
+ value, perfdata, levels, human_readable_func=render.bytes, infoname=label
)
# check number of indexes per collection (max is 64 indexes)
@@ -200,14 +196,14 @@ def _mongodb_collections_sort_second(tup):
def _mongodb_collections_bytes_human_readable(data, key):
try:
- return get_bytes_human_readable(int(data.get(key)))
+ return render.bytes(int(data.get(key)))
except (TypeError, ValueError):
return "n/a"
def _mongodb_collections_timestamp_human_readable(value):
try:
- return get_timestamp_human_readable(int(value))
+ return render.datetime(int(value))
except (TypeError, ValueError):
return "n/a"
diff --git a/cmk/base/legacy_checks/mongodb_connections.py b/cmk/base/legacy_checks/mongodb_connections.py
index f62145b3ccd..60dc2426c6d 100644
--- a/cmk/base/legacy_checks/mongodb_connections.py
+++ b/cmk/base/legacy_checks/mongodb_connections.py
@@ -16,8 +16,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, render, StringTable
def inventory_mongodb_connections(info):
diff --git a/cmk/base/legacy_checks/mongodb_flushing.py b/cmk/base/legacy_checks/mongodb_flushing.py
index 6ff0b8d0394..a0c9ff4034b 100644
--- a/cmk/base/legacy_checks/mongodb_flushing.py
+++ b/cmk/base/legacy_checks/mongodb_flushing.py
@@ -11,11 +11,10 @@
import time
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_average, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_average, get_value_store, render, StringTable
def inventory_mongodb_flushing(info):
@@ -67,7 +66,7 @@ def check_mongodb_flushing(_no_item, params, info):
)
yield 0, "Flushes since restart: %s" % flushed, [("flushed", flushed)]
- yield 0, "Average flush time since restart: %s" % get_age_human_readable(avg_flush_time), [
+ yield 0, "Average flush time since restart: %s" % render.timespan(avg_flush_time), [
("avg_flush_time", avg_flush_time)
]
diff --git a/cmk/base/legacy_checks/mongodb_instance.py b/cmk/base/legacy_checks/mongodb_instance.py
index 7c0ed0ba888..98da8fcb74a 100644
--- a/cmk/base/legacy_checks/mongodb_instance.py
+++ b/cmk/base/legacy_checks/mongodb_instance.py
@@ -11,7 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_mongodb_instance(info):
diff --git a/cmk/base/legacy_checks/mongodb_locks.py b/cmk/base/legacy_checks/mongodb_locks.py
index dc15b596747..9786d7a5b5f 100644
--- a/cmk/base/legacy_checks/mongodb_locks.py
+++ b/cmk/base/legacy_checks/mongodb_locks.py
@@ -15,7 +15,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_mongodb_locks(info):
diff --git a/cmk/base/legacy_checks/mongodb_mem.py b/cmk/base/legacy_checks/mongodb_mem.py
index 871776f370c..c677debc3ac 100644
--- a/cmk/base/legacy_checks/mongodb_mem.py
+++ b/cmk/base/legacy_checks/mongodb_mem.py
@@ -17,9 +17,11 @@
from collections.abc import Iterable, Mapping
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
Section = Mapping[str, str | int]
@@ -48,7 +50,7 @@ def check_mongodb_mem(_no_item, params, parsed):
value_bytes,
"process_%s_size" % key,
levels,
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="%s usage" % key.title(),
)
diff --git a/cmk/base/legacy_checks/mongodb_replica_set.py b/cmk/base/legacy_checks/mongodb_replica_set.py
index a7a052ec282..a7eb7f4fa57 100644
--- a/cmk/base/legacy_checks/mongodb_replica_set.py
+++ b/cmk/base/legacy_checks/mongodb_replica_set.py
@@ -13,15 +13,10 @@
import time
from collections.abc import Iterable, Mapping
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_value_store
+from cmk.agent_based.v2 import get_value_store, render
from cmk.plugins.lib.mongodb import parse_date
# levels_mongdb_replication_lag: (lag threshold, time interval for warning, time interval for critical)
@@ -126,7 +121,7 @@ def _check_lag_over_time(new_timestamp, member_name, name, lag_in_sec, levels):
lag_duration,
None,
levels[1:],
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname=f"{member_name} is behind {name} for",
)
@@ -148,11 +143,7 @@ def _get_long_output(member_name, member_optime_date, replication_lag_sec, name)
log.append("source: %s" % member_name)
log.append(
"syncedTo: %s (UTC)"
- % (
- datetime.datetime.fromtimestamp(member_optime_date / 1000.0).strftime(
- "%Y-%m-%d %H:%M:%S"
- )
- )
+ % (datetime.datetime.fromtimestamp(member_optime_date).strftime("%Y-%m-%d %H:%M:%S"))
)
log.append(
"member (%s) is %ds (%dh) behind %s"
@@ -200,7 +191,7 @@ def _calculate_replication_lag(start_operation_time, secondary_operation_time):
:param secondary_operation_time:
:return: replication lag in seconds
"""
- return (start_operation_time - secondary_operation_time) / 1000.0
+ return start_operation_time - secondary_operation_time
check_info["mongodb_replica_set"] = LegacyCheckDefinition(
@@ -271,13 +262,13 @@ def check_mongodb_primary_election(_item, _params, status_dict):
if last_primary_dict and (primary_name_changed or election_date_changed):
yield 1, "New primary '{}' elected {} {}".format(
primary_name,
- get_timestamp_human_readable(primary_election_time),
+ render.datetime(primary_election_time),
"(%s)" % ("node changed" if primary_name_changed else "election date changed"),
)
else:
yield 0, "Primary '{}' elected {}".format(
primary_name,
- get_timestamp_human_readable(primary_election_time),
+ render.datetime(primary_election_time),
)
# update primary information
diff --git a/cmk/base/legacy_checks/mongodb_replication_info.py b/cmk/base/legacy_checks/mongodb_replication_info.py
index d0999773dab..e1e6a15962c 100644
--- a/cmk/base/legacy_checks/mongodb_replication_info.py
+++ b/cmk/base/legacy_checks/mongodb_replication_info.py
@@ -17,14 +17,11 @@
import json
from collections.abc import Iterable, Mapping
-from cmk.base.check_api import (
- get_age_human_readable,
- get_bytes_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
Section = Mapping
@@ -61,7 +58,7 @@ def check_mongodb_replication_info(item, params, info_dict):
timestamp_last_operation = info_dict.get("tLast", 0)
time_difference_sec = timestamp_last_operation - timestamp_first_operation
time_diff = "Time difference: %s between the first and last operation on oplog" % (
- get_age_human_readable(time_difference_sec)
+ render.timespan(time_difference_sec)
)
except TypeError:
time_diff = "Time difference: n/a"
@@ -117,21 +114,21 @@ def _long_output(info_dict):
def _bytes_human_readable(data, key):
try:
- return get_bytes_human_readable(int(data.get(key)))
+ return render.bytes(int(data.get(key)))
except (TypeError, ValueError):
return "n/a"
def _timestamp_human_readable(data, key):
try:
- return get_timestamp_human_readable(int(data.get(key)))
+ return render.datetime(int(data.get(key)))
except (TypeError, ValueError):
return "n/a"
def _calc_time_diff(value1, value2):
try:
- return get_age_human_readable(value1 - value2)
+ return render.timespan(value1 - value2)
except TypeError:
return "n/a"
diff --git a/cmk/base/legacy_checks/moxa_iologik_register.py b/cmk/base/legacy_checks/moxa_iologik_register.py
index cbaac7a066d..993e839a3fb 100644
--- a/cmk/base/legacy_checks/moxa_iologik_register.py
+++ b/cmk/base/legacy_checks/moxa_iologik_register.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, SNMPTree, startswith, StringTable
def inventory_iologik_register(info):
diff --git a/cmk/base/legacy_checks/mq_queues.py b/cmk/base/legacy_checks/mq_queues.py
index b0f1193fd17..c36ccd973a4 100644
--- a/cmk/base/legacy_checks/mq_queues.py
+++ b/cmk/base/legacy_checks/mq_queues.py
@@ -21,7 +21,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_mq_queues(info):
diff --git a/cmk/base/legacy_checks/msexch_dag.py b/cmk/base/legacy_checks/msexch_dag.py
index 9535b781858..823a726b896 100644
--- a/cmk/base/legacy_checks/msexch_dag.py
+++ b/cmk/base/legacy_checks/msexch_dag.py
@@ -102,7 +102,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def parse_msexch_dag(string_table: StringTable) -> StringTable:
diff --git a/cmk/base/legacy_checks/msexch_replhealth.py b/cmk/base/legacy_checks/msexch_replhealth.py
index 6294e64e624..ee993997a6e 100644
--- a/cmk/base/legacy_checks/msexch_replhealth.py
+++ b/cmk/base/legacy_checks/msexch_replhealth.py
@@ -28,7 +28,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_msexch_replhealth(info):
diff --git a/cmk/base/legacy_checks/msoffice_licenses.py b/cmk/base/legacy_checks/msoffice_licenses.py
index da7e0c151e3..45c8b5f9ef4 100644
--- a/cmk/base/legacy_checks/msoffice_licenses.py
+++ b/cmk/base/legacy_checks/msoffice_licenses.py
@@ -59,7 +59,7 @@ def check_msoffice_licenses(item, params, parsed):
else:
warn_abs, crit_abs = warn, crit
- # the agent plugin also gathers the last 3 unused licenses with no
+ # the agent plug-in also gathers the last 3 unused licenses with no
# active licenses. To handle this, we only output consumed licenses for
# licenses with active ones
yield check_levels(
diff --git a/cmk/base/legacy_checks/msoffice_serviceplans.py b/cmk/base/legacy_checks/msoffice_serviceplans.py
index 965b5f80a0e..3a550811fa5 100644
--- a/cmk/base/legacy_checks/msoffice_serviceplans.py
+++ b/cmk/base/legacy_checks/msoffice_serviceplans.py
@@ -18,7 +18,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_msoffice_serviceplans(info):
diff --git a/cmk/base/legacy_checks/mysql.py b/cmk/base/legacy_checks/mysql.py
index d2995d61bdd..2fd735dcd03 100644
--- a/cmk/base/legacy_checks/mysql.py
+++ b/cmk/base/legacy_checks/mysql.py
@@ -4,18 +4,15 @@
# conditions defined in the file COPYING, which is part of this source code package.
-# mypy: disable-error-code="no-untyped-def"
-
import time
from collections.abc import Iterable, Mapping
from typing import Any, Protocol
from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.check_legacy_includes.diskstat import check_diskstat_line
from cmk.base.check_legacy_includes.mysql import mysql_parse_per_item
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, render
+from cmk.agent_based.v2 import get_average, get_rate, get_value_store, render
# <<>>
# [[mysql]]
@@ -48,8 +45,7 @@
class DiscoveryFunction(Protocol):
- def __call__(self, section: Section) -> Iterable[Service]:
- ...
+ def __call__(self, section: Section) -> Iterable[Service]: ...
@mysql_parse_per_item
@@ -164,8 +160,69 @@ def check_mysql_iostat(item, params, parsed):
if not ("Innodb_data_read" in data and "Innodb_data_written" in data):
return
- line = [None, None, data["Innodb_data_read"] // 512, data["Innodb_data_written"] // 512]
- yield check_diskstat_line(time.time(), "innodb_io" + item, params, line)
+ yield from check_diskstat_line(
+ time.time(),
+ "innodb_io" + item,
+ params,
+ read_value=int(data["Innodb_data_read"]),
+ write_value=int(data["Innodb_data_written"]),
+ )
+
+
+def check_diskstat_line(
+ this_time: float,
+ item: str,
+ params: Mapping[str, Any],
+ read_value: int,
+ write_value: int,
+) -> Iterable[tuple[int, str, list] | tuple[int, str]]:
+ average_range = params.get("average")
+ if average_range == 0:
+ average_range = None # disable averaging when 0 is set
+
+ value_store = get_value_store()
+
+ # collect perfdata, apparently we want to re-order them
+ perfdata: list = []
+
+ for metric_name, value in (("read", read_value), ("write", write_value)):
+ # unpack levels now, need also for perfdata
+ levels = params.get(f"{metric_name}_bytes")
+ if isinstance(levels, tuple):
+ warn, crit = levels
+ else:
+ warn, crit = None, None
+
+ bytes_per_sec = get_rate(
+ get_value_store(), metric_name, this_time, value, raise_overflow=True
+ )
+
+ # compute average of the rate over ___ minutes
+ if average_range is not None:
+ perfdata.append((metric_name, bytes_per_sec, warn, crit))
+ bytes_per_sec = get_average(
+ value_store, f"{metric_name}.avg", this_time, bytes_per_sec, average_range
+ )
+ metric_name_suffix = ".avg"
+ else:
+ metric_name_suffix = ""
+
+ # check levels (no predictive)
+ state, text, extraperf = check_levels(
+ bytes_per_sec,
+ metric_name + metric_name_suffix,
+ levels,
+ human_readable_func=render.iobandwidth,
+ infoname=metric_name.capitalize(),
+ )
+ yield state, text
+ perfdata += extraperf
+
+ # Add performance data for averaged IO
+ if average_range is not None:
+ perfdata = [perfdata[0], perfdata[2], perfdata[1], perfdata[3]]
+
+ yield 0, "", perfdata
check_info["mysql.innodb_io"] = LegacyCheckDefinition(
@@ -275,7 +332,7 @@ def check_mysql_connections(item, params, parsed):
# +----------------------------------------------------------------------+
-def _has_wsrep_provider(data) -> bool:
+def _has_wsrep_provider(data: Mapping[str, object]) -> bool:
return data.get("wsrep_provider") not in (None, "none")
diff --git a/cmk/base/legacy_checks/mysql_ping.py b/cmk/base/legacy_checks/mysql_ping.py
index 6fb96b83e2e..94627fcb1a9 100644
--- a/cmk/base/legacy_checks/mysql_ping.py
+++ b/cmk/base/legacy_checks/mysql_ping.py
@@ -10,7 +10,7 @@
from cmk.base.check_legacy_includes.mysql import mysql_parse_per_item
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
# <<>>
# [[instance]]
diff --git a/cmk/base/legacy_checks/mysql_slave.py b/cmk/base/legacy_checks/mysql_slave.py
index e8330059894..f173171d0da 100644
--- a/cmk/base/legacy_checks/mysql_slave.py
+++ b/cmk/base/legacy_checks/mysql_slave.py
@@ -6,14 +6,12 @@
# mypy: disable-error-code="arg-type"
-from cmk.base.check_api import (
- get_age_human_readable,
- get_bytes_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.mysql import mysql_parse_per_item
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
@mysql_parse_per_item
def parse_mysql_slave(string_table):
@@ -49,7 +47,7 @@ def check_mysql_slave(item, params, parsed):
output.append("Slave-IO: running")
if data["Relay_Log_Space"]:
- output.append("Relay Log: %s" % get_bytes_human_readable(data["Relay_Log_Space"]))
+ output.append("Relay Log: %s" % render.bytes(data["Relay_Log_Space"]))
perfdata.append(("relay_log_space", data["Relay_Log_Space"]))
else:
@@ -64,7 +62,7 @@ def check_mysql_slave(item, params, parsed):
output.append("Time behind master: NULL (Lost connection?)(!!)")
state = 2
else:
- out = "Time behind Master: %s" % get_age_human_readable(data["Seconds_Behind_Master"])
+ out = "Time behind Master: %s" % render.timespan(data["Seconds_Behind_Master"])
warn, crit = params.get("seconds_behind_master", (None, None))
if crit is not None and data["Seconds_Behind_Master"] > crit:
state = 2
diff --git a/cmk/base/legacy_checks/netapp_api_cluster.py b/cmk/base/legacy_checks/netapp_api_cluster.py
index 02f3b2e45b3..fe11f509c12 100644
--- a/cmk/base/legacy_checks/netapp_api_cluster.py
+++ b/cmk/base/legacy_checks/netapp_api_cluster.py
@@ -17,7 +17,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_netapp_api_cluster(info):
diff --git a/cmk/base/legacy_checks/netapp_api_connection.py b/cmk/base/legacy_checks/netapp_api_connection.py
index 4fd19f0de5a..a69f01a4b50 100644
--- a/cmk/base/legacy_checks/netapp_api_connection.py
+++ b/cmk/base/legacy_checks/netapp_api_connection.py
@@ -7,7 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_netapp_api_connection(info):
diff --git a/cmk/base/legacy_checks/netapp_api_environment.py b/cmk/base/legacy_checks/netapp_api_environment.py
index 9d1309e15ba..bf89f7e3f6e 100644
--- a/cmk/base/legacy_checks/netapp_api_environment.py
+++ b/cmk/base/legacy_checks/netapp_api_environment.py
@@ -67,7 +67,7 @@ def check_netapp_api_environment_discrete(item, _no_params, parsed):
yield state, f"Sensor state: {sensor_state}, Sensor value: {sensor_value}"
-def check_netapp_api_environment_threshold(item, _no_params, parsed):
+def check_netapp_api_environment_threshold(item, params, parsed):
"""Check a service giving continuous values and boundaries of said"""
def _perf_key(_key):
@@ -107,7 +107,7 @@ def _scale_unit(_unit):
if sensor_type == "thermal":
yield check_temperature(
_scale(sensor_value, unit),
- _no_params,
+ params,
_perf_key(f"netapp_environment_thermal_{sensor_name}"),
dev_unit=_scale_unit(unit),
dev_levels=levels[:2],
diff --git a/cmk/base/legacy_checks/netapp_api_fcp.py b/cmk/base/legacy_checks/netapp_api_fcp.py
index 1296d05c0d7..c13b040dcaf 100644
--- a/cmk/base/legacy_checks/netapp_api_fcp.py
+++ b/cmk/base/legacy_checks/netapp_api_fcp.py
@@ -71,6 +71,7 @@ def check_netapp_api_fcp(item, params, parsed):
yield _notice_only_fy(0, "Address %s" % fcp_if["address"], [])
+# in netapp_ontap_fcp.py this function is ready for migration
def _speed_result(params, fcp_if):
speed = fcp_if.get("speed")
speed_str = None if speed is None else render.nicspeed(float(speed) / 8.0)
@@ -91,6 +92,7 @@ def _speed_result(params, fcp_if):
yield 2, f"Speed: {speed_str} (expected: {expected_speed_str})"
+# in netapp_ontap_fcp.py this function is ready for migration
def _io_bytes_results(item, params, fcp_if):
bw_levels = bandwidth_levels(
params=params,
@@ -113,23 +115,25 @@ def _io_bytes_results(item, params, fcp_if):
yield check_levels(
value,
what,
- levels.config
- if isinstance(
- levels,
- PredictiveLevels,
- )
- else (
- levels.upper
- or (
- None,
- None,
+ (
+ levels.config
+ if isinstance(
+ levels,
+ PredictiveLevels,
)
- )
- + (
- levels.lower
- or (
- None,
- None,
+ else (
+ levels.upper
+ or (
+ None,
+ None,
+ )
+ )
+ + (
+ levels.lower
+ or (
+ None,
+ None,
+ )
)
),
human_readable_func=render.iobandwidth,
@@ -137,6 +141,7 @@ def _io_bytes_results(item, params, fcp_if):
)
+# in netapp_ontap_fcp.py this function is ready for migration
def _io_ops_results(item, params, fcp_if):
now = fcp_if["now"]
value_store = get_value_store()
@@ -159,6 +164,7 @@ def _io_ops_results(item, params, fcp_if):
)
+# in netapp_ontap_fcp.py this function is ready for migration
def _latency_results(item, params, fcp_if):
total_ops = fcp_if["total_ops"]
value_store = get_value_store()
diff --git a/cmk/base/legacy_checks/netapp_api_info.py b/cmk/base/legacy_checks/netapp_api_info.py
index 53cd36b460a..02c998980a0 100644
--- a/cmk/base/legacy_checks/netapp_api_info.py
+++ b/cmk/base/legacy_checks/netapp_api_info.py
@@ -20,7 +20,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_netapp_api_info(info):
diff --git a/cmk/base/legacy_checks/netapp_api_snapshots.py b/cmk/base/legacy_checks/netapp_api_snapshots.py
index 67fa8bf2593..b9ef768e8a3 100644
--- a/cmk/base/legacy_checks/netapp_api_snapshots.py
+++ b/cmk/base/legacy_checks/netapp_api_snapshots.py
@@ -6,10 +6,12 @@
# mypy: disable-error-code="arg-type"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.netapp_api import netapp_api_parse_lines
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
# Agent output:
# <<>>
# volume_snapshot volch150 percent-reserved 22 blocks-reserved 3322 size-total 12122 ...
@@ -40,7 +42,7 @@ def check_netapp_api_snapshots(item, params, parsed):
reserved_bytes = int(data[0]["snapshot-blocks-reserved"]) * 1024.0
if not reserved_bytes:
- yield 0, "Used snapshot space: %s" % get_bytes_human_readable(snapshot_total), [
+ yield 0, "Used snapshot space: %s" % render.bytes(snapshot_total), [
("bytes", snapshot_total)
]
yield params.get("state_noreserve", 1), "No snapshot reserve configured"
@@ -61,14 +63,14 @@ def check_netapp_api_snapshots(item, params, parsed):
yield state, "Reserve used: {:.1f}% ({}){}".format(
used_percent,
- get_bytes_human_readable(snapshot_total),
+ render.bytes(snapshot_total),
extra_info,
)
yield 0, "Total Reserve: {}% ({}) of {}".format(
data[0]["snapshot-percent-reserved"],
- get_bytes_human_readable(reserved_bytes),
- get_bytes_human_readable(volume_total),
+ render.bytes(reserved_bytes),
+ render.bytes(volume_total),
), [("bytes", snapshot_total, 0, 0, 0, reserved_bytes)]
diff --git a/cmk/base/legacy_checks/netapp_api_status.py b/cmk/base/legacy_checks/netapp_api_status.py
index 9f62b628c91..9a43333958f 100644
--- a/cmk/base/legacy_checks/netapp_api_status.py
+++ b/cmk/base/legacy_checks/netapp_api_status.py
@@ -10,7 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_netapp_api_status(info):
diff --git a/cmk/base/legacy_checks/netapp_api_systemtime.py b/cmk/base/legacy_checks/netapp_api_systemtime.py
index e9ca3e87daa..41ab0fcf0c8 100644
--- a/cmk/base/legacy_checks/netapp_api_systemtime.py
+++ b/cmk/base/legacy_checks/netapp_api_systemtime.py
@@ -15,14 +15,11 @@
import collections
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
NetappApiTimeEntry = collections.namedtuple( # pylint: disable=collections-namedtuple-call
"NetappApiTimeEntry",
[
@@ -51,14 +48,14 @@ def check_netapp_api_systemtime(item, params, parsed):
None,
None,
infoname="System time",
- human_readable_func=get_timestamp_human_readable,
+ human_readable_func=render.datetime,
)
yield check_levels(
entry.agent_time - entry.system_time,
"time_difference",
params.get("levels", (None, None)),
infoname="Time difference",
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.time_offset,
)
diff --git a/cmk/base/legacy_checks/netapp_api_temp.py b/cmk/base/legacy_checks/netapp_api_temp.py
index 8e2f48229b1..0c0ee798c66 100644
--- a/cmk/base/legacy_checks/netapp_api_temp.py
+++ b/cmk/base/legacy_checks/netapp_api_temp.py
@@ -6,7 +6,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.netapp_api import netapp_api_parse_lines
-from cmk.base.check_legacy_includes.temperature import check_temperature_list
+from cmk.base.check_legacy_includes.temperature import check_temperature_list, CheckTempKwargs
from cmk.base.config import check_info
# <<>>
@@ -46,7 +46,7 @@ def check_netapp_api_temp(item, params, parsed):
if values.get("temp-sensor-is-ambient") == is_ambient
)
- sensorlist = [
+ sensorlist: list[tuple[str, int, CheckTempKwargs]] = [
(
f"{item_no}/{sensor['temp-sensor-element-no']}",
sensor["temp-sensor-current-temperature"],
@@ -65,9 +65,10 @@ def check_netapp_api_temp(item, params, parsed):
]
if not sensorlist:
- return 0, "No temperature sensors assigned to this filer"
+ yield 0, "No temperature sensors assigned to this filer"
+ return
- return check_temperature_list(sensorlist, params, "netapp_api_temp_%s" % item)
+ yield from check_temperature_list(sensorlist, params)
check_info["netapp_api_temp"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/netapp_api_vf_status.py b/cmk/base/legacy_checks/netapp_api_vf_status.py
index 598cccda488..8c7c28b4564 100644
--- a/cmk/base/legacy_checks/netapp_api_vf_status.py
+++ b/cmk/base/legacy_checks/netapp_api_vf_status.py
@@ -14,7 +14,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_netapp_api_vf_status(info):
diff --git a/cmk/base/legacy_checks/netapp_api_vs_traffic.py b/cmk/base/legacy_checks/netapp_api_vs_traffic.py
index 612d1f4ef5f..7bc2b0c8fe0 100644
--- a/cmk/base/legacy_checks/netapp_api_vs_traffic.py
+++ b/cmk/base/legacy_checks/netapp_api_vs_traffic.py
@@ -8,7 +8,7 @@
import time
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.netapp_api import netapp_api_parse_lines
from cmk.base.config import check_info
@@ -35,13 +35,17 @@ def inventory_netapp_api_vs_traffic(parsed):
def check_netapp_api_vs_traffic(item, _no_params, parsed):
+ """
+ In the case of migration, a utility function is available for this check here:
+ cmk/plugins/lib/netapp_api.py -> check_netapp_vs_traffic
+ """
protocol_map = {
"lif:vserver": (
"Ethernet",
# ( what perfname perftext scale format_func)
[
- ("recv_data", "if_in_octets", "received data", 1, get_bytes_human_readable),
- ("sent_data", "if_out_octets", "sent data", 1, get_bytes_human_readable),
+ ("recv_data", "if_in_octets", "received data", 1, render.bytes),
+ ("sent_data", "if_out_octets", "sent data", 1, render.bytes),
("recv_errors", "if_in_errors", "received errors", 1, int),
("sent_errors", "if_out_errors", "sent errors", 1, int),
("recv_packet", "if_in_pkts", "received packets", 1, int),
@@ -65,8 +69,8 @@ def check_netapp_api_vs_traffic(item, _no_params, parsed):
0.001,
lambda x: "%.2f ms" % (x * 1000),
),
- ("read_data", "fcp_read_data", "read data", 1, get_bytes_human_readable),
- ("write_data", "fcp_write_data", "write data", 1, get_bytes_human_readable),
+ ("read_data", "fcp_read_data", "read data", 1, render.bytes),
+ ("write_data", "fcp_write_data", "write data", 1, render.bytes),
],
),
"cifs:vserver": (
@@ -107,8 +111,8 @@ def check_netapp_api_vs_traffic(item, _no_params, parsed):
0.001,
lambda x: "%.2f ms" % (x * 1000),
),
- ("read_data", "iscsi_read_data", "read data", 1, get_bytes_human_readable),
- ("write_data", "iscsi_write_data", "write data", 1, get_bytes_human_readable),
+ ("read_data", "iscsi_read_data", "read data", 1, render.bytes),
+ ("write_data", "iscsi_write_data", "write data", 1, render.bytes),
],
),
"nfsv3": (
diff --git a/cmk/base/legacy_checks/netapp_cluster.py b/cmk/base/legacy_checks/netapp_cluster.py
index b2661060088..a246826a244 100644
--- a/cmk/base/legacy_checks/netapp_cluster.py
+++ b/cmk/base/legacy_checks/netapp_cluster.py
@@ -22,8 +22,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, SNMPTree, startswith, StringTable
def inventory_netapp_cluster(info):
diff --git a/cmk/base/legacy_checks/netapp_cpu.py b/cmk/base/legacy_checks/netapp_cpu.py
index c3834d600d7..2de2b6c78cc 100644
--- a/cmk/base/legacy_checks/netapp_cpu.py
+++ b/cmk/base/legacy_checks/netapp_cpu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, exists, SNMPTree, startswith, StringTable
def check_netapp_cpu(item, params, info):
diff --git a/cmk/base/legacy_checks/netapp_fcpio.py b/cmk/base/legacy_checks/netapp_fcpio.py
index b983a4aa75a..56950b0c96d 100644
--- a/cmk/base/legacy_checks/netapp_fcpio.py
+++ b/cmk/base/legacy_checks/netapp_fcpio.py
@@ -6,11 +6,19 @@
import time
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, exists, get_rate, get_value_store, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import (
+ all_of,
+ exists,
+ get_rate,
+ get_value_store,
+ render,
+ SNMPTree,
+ startswith,
+ StringTable,
+)
def check_netapp_fcpio(item, params, info):
@@ -27,7 +35,7 @@ def check_netapp_fcpio(item, params, info):
avg_read,
"read",
params.get("read"),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Read",
)
@@ -35,7 +43,7 @@ def check_netapp_fcpio(item, params, info):
avg_write,
"write",
params.get("write"),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Write",
)
diff --git a/cmk/base/legacy_checks/netapp_vfiler.py b/cmk/base/legacy_checks/netapp_vfiler.py
index c904d08cfab..291f01a6a03 100644
--- a/cmk/base/legacy_checks/netapp_vfiler.py
+++ b/cmk/base/legacy_checks/netapp_vfiler.py
@@ -10,8 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, contains, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, contains, SNMPTree, startswith, StringTable
def inventory_netapp_vfiler(info):
diff --git a/cmk/base/legacy_checks/netctr.py b/cmk/base/legacy_checks/netctr.py
index 9ba5bc5b393..dd45c8f5597 100644
--- a/cmk/base/legacy_checks/netctr.py
+++ b/cmk/base/legacy_checks/netctr.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, StringTable
linux_nic_check = "lnx_if"
diff --git a/cmk/base/legacy_checks/netextreme_cpu_util.py b/cmk/base/legacy_checks/netextreme_cpu_util.py
index 7eeb113645f..349f5c860e2 100644
--- a/cmk/base/legacy_checks/netextreme_cpu_util.py
+++ b/cmk/base/legacy_checks/netextreme_cpu_util.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.netextreme import DETECT_NETEXTREME
# .1.3.6.1.4.1.1916.1.32.1.2.0 59 --> EXTREME-SOFTWARE-MONITOR-MIB::extremeCpuMonitorTotalUtilization.0$
diff --git a/cmk/base/legacy_checks/netextreme_fan.py b/cmk/base/legacy_checks/netextreme_fan.py
index c4ef987573e..a08aea04d6c 100644
--- a/cmk/base/legacy_checks/netextreme_fan.py
+++ b/cmk/base/legacy_checks/netextreme_fan.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.fan import check_fan
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.netextreme import DETECT_NETEXTREME
# Just an assumption, levels as in other fan checks
diff --git a/cmk/base/legacy_checks/netextreme_temp.py b/cmk/base/legacy_checks/netextreme_temp.py
index b7c90f76ec0..9790a7f261a 100644
--- a/cmk/base/legacy_checks/netextreme_temp.py
+++ b/cmk/base/legacy_checks/netextreme_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.netextreme import DETECT_NETEXTREME
# .1.3.6.1.4.1.1916.1.1.1.8.0 31 --> EXTREME-SYSTEM-MIB::extremeCurrentTemperature.0
diff --git a/cmk/base/legacy_checks/netscaler_cpu.py b/cmk/base/legacy_checks/netscaler_cpu.py
index 6e60d0898d7..d293060409b 100644
--- a/cmk/base/legacy_checks/netscaler_cpu.py
+++ b/cmk/base/legacy_checks/netscaler_cpu.py
@@ -14,8 +14,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.netscaler import SNMP_DETECT
diff --git a/cmk/base/legacy_checks/netscaler_dnsrates.py b/cmk/base/legacy_checks/netscaler_dnsrates.py
index d9ce8977bc0..cbb596af64b 100644
--- a/cmk/base/legacy_checks/netscaler_dnsrates.py
+++ b/cmk/base/legacy_checks/netscaler_dnsrates.py
@@ -14,8 +14,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree, StringTable
from cmk.plugins.lib.netscaler import SNMP_DETECT
diff --git a/cmk/base/legacy_checks/netscaler_ha.py b/cmk/base/legacy_checks/netscaler_ha.py
index d3835405c3a..1374669c15b 100644
--- a/cmk/base/legacy_checks/netscaler_ha.py
+++ b/cmk/base/legacy_checks/netscaler_ha.py
@@ -13,8 +13,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.netscaler import SNMP_DETECT
netscaler_ha_cur_states = {
diff --git a/cmk/base/legacy_checks/netscaler_health.py b/cmk/base/legacy_checks/netscaler_health.py
index 57730ededb7..a0d89c1c8b4 100644
--- a/cmk/base/legacy_checks/netscaler_health.py
+++ b/cmk/base/legacy_checks/netscaler_health.py
@@ -9,8 +9,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.netscaler import SNMP_DETECT
#
diff --git a/cmk/base/legacy_checks/netscaler_mem.py b/cmk/base/legacy_checks/netscaler_mem.py
index 586f9b456bc..af131d1de1b 100644
--- a/cmk/base/legacy_checks/netscaler_mem.py
+++ b/cmk/base/legacy_checks/netscaler_mem.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.mem import check_memory_element
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.netscaler import SNMP_DETECT
#
diff --git a/cmk/base/legacy_checks/nfsexports.py b/cmk/base/legacy_checks/nfsexports.py
index 10b2d086ad9..cf1329dac8a 100644
--- a/cmk/base/legacy_checks/nfsexports.py
+++ b/cmk/base/legacy_checks/nfsexports.py
@@ -13,7 +13,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_nfsexports(info):
diff --git a/cmk/base/legacy_checks/nimble_volumes.py b/cmk/base/legacy_checks/nimble_volumes.py
index 42ca08b406d..57244cec0a3 100644
--- a/cmk/base/legacy_checks/nimble_volumes.py
+++ b/cmk/base/legacy_checks/nimble_volumes.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_list, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
# example output
diff --git a/cmk/base/legacy_checks/nullmailer_mailq.py b/cmk/base/legacy_checks/nullmailer_mailq.py
index 4a5123b007e..75447e08f9b 100644
--- a/cmk/base/legacy_checks/nullmailer_mailq.py
+++ b/cmk/base/legacy_checks/nullmailer_mailq.py
@@ -23,16 +23,16 @@
# 8 1 failed
-def inventory_nullmailer_mailq(parsed):
+def discover_nullmailer_mailq(parsed):
if parsed:
- yield "", {}
+ yield None, {}
check_info["nullmailer_mailq"] = LegacyCheckDefinition(
parse_function=parse_nullmailer_mailq,
- service_name="Nullmailer Queue %s",
- discovery_function=inventory_nullmailer_mailq,
+ service_name="Nullmailer Queue",
+ discovery_function=discover_nullmailer_mailq,
check_function=check_nullmailer_mailq,
- check_ruleset_name="mail_queue_length",
+ check_ruleset_name="mail_queue_length_single",
check_default_parameters=NULLMAILER_MAILQ_DEFAULT_LEVELS,
)
diff --git a/cmk/base/legacy_checks/nvidia.py b/cmk/base/legacy_checks/nvidia.py
index 0eed9ee651e..ab7988dbea5 100644
--- a/cmk/base/legacy_checks/nvidia.py
+++ b/cmk/base/legacy_checks/nvidia.py
@@ -8,7 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def format_nvidia_name(identifier):
diff --git a/cmk/base/legacy_checks/openhardwaremonitor.py b/cmk/base/legacy_checks/openhardwaremonitor.py
index c9bcb48a404..8a80873fcba 100644
--- a/cmk/base/legacy_checks/openhardwaremonitor.py
+++ b/cmk/base/legacy_checks/openhardwaremonitor.py
@@ -6,9 +6,7 @@
import collections
from collections.abc import Mapping
-from typing import NotRequired
-
-from typing_extensions import TypedDict
+from typing import NotRequired, TypedDict
from cmk.base.check_api import LegacyCheckDefinition, regex
from cmk.base.check_legacy_includes.fan import check_fan
@@ -350,6 +348,6 @@ def check_openhardwaremonitor_smart(item, params, parsed):
check_function=check_openhardwaremonitor_smart,
check_ruleset_name="openhardwaremonitor_smart",
check_default_parameters={
- "remaining_life": (30, 10), # wild guess
+ "remaining_life": (30.0, 10.0), # wild guess
},
)
diff --git a/cmk/base/legacy_checks/openvpn_clients.py b/cmk/base/legacy_checks/openvpn_clients.py
index 6fb7f6bd611..2277c5a0880 100644
--- a/cmk/base/legacy_checks/openvpn_clients.py
+++ b/cmk/base/legacy_checks/openvpn_clients.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, render, StringTable
# Example output from agent:
# <<>>
diff --git a/cmk/base/legacy_checks/oracle_crs_version.py b/cmk/base/legacy_checks/oracle_crs_version.py
index f97a2cb7868..eb2bf511764 100644
--- a/cmk/base/legacy_checks/oracle_crs_version.py
+++ b/cmk/base/legacy_checks/oracle_crs_version.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, StringTable
def inventory_oracle_crs_version(info):
diff --git a/cmk/base/legacy_checks/oracle_crs_voting.py b/cmk/base/legacy_checks/oracle_crs_voting.py
index 70730861d04..a6a83ba0a7c 100644
--- a/cmk/base/legacy_checks/oracle_crs_voting.py
+++ b/cmk/base/legacy_checks/oracle_crs_voting.py
@@ -12,8 +12,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, StringTable
def inventory_oracle_crs_voting(info):
diff --git a/cmk/base/legacy_checks/oracle_dataguard_stats.py b/cmk/base/legacy_checks/oracle_dataguard_stats.py
index c84aa6155b3..dfe01874f0a 100644
--- a/cmk/base/legacy_checks/oracle_dataguard_stats.py
+++ b/cmk/base/legacy_checks/oracle_dataguard_stats.py
@@ -6,10 +6,10 @@
# In cooperation with Thorsten Bruhns from OPITZ Consulting
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
+from cmk.agent_based.v2 import IgnoreResultsError, render
# <<>>
# TESTDB|TESTDBU2|PHYSICAL STANDBY|apply finish time|+00 00:00:00.000|NOT ALLOWED|ENABLED|MAXIMUM PERFORMANCE|DISABLED||||APPLYING_LOG
@@ -136,7 +136,7 @@ def check_oracle_dataguard_stats(item, params, parsed): # pylint: disable=too-m
seconds,
pkey,
levels_upper + levels_lower,
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.time_offset,
infoname=label,
)
@@ -147,8 +147,8 @@ def check_oracle_dataguard_stats(item, params, parsed): # pylint: disable=too-m
and dgdata["dgstat"]["apply lag"] == ""
):
# old sql cannot detect a started standby database without running media recovery
- # => add an information for old plugin with possible wrong result
- yield 0, "old plugin data found, recovery active?"
+ # => add an information for old plug-in with possible wrong result
+ yield 0, "old plug-in data found, recovery active?"
check_info["oracle_dataguard_stats"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/oracle_diva_csm.py b/cmk/base/legacy_checks/oracle_diva_csm.py
index 0ee99edab44..30589926ef1 100644
--- a/cmk/base/legacy_checks/oracle_diva_csm.py
+++ b/cmk/base/legacy_checks/oracle_diva_csm.py
@@ -37,8 +37,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
def item_name_oracle_diva_csm(name, element_id):
diff --git a/cmk/base/legacy_checks/oracle_jobs.py b/cmk/base/legacy_checks/oracle_jobs.py
index d9c1711eba6..82a3c90cfef 100644
--- a/cmk/base/legacy_checks/oracle_jobs.py
+++ b/cmk/base/legacy_checks/oracle_jobs.py
@@ -31,11 +31,10 @@
# QS1|DBADMIN|DATENEXPORT-FUR|COMPLETED|0|3|FALSE|22-AUG-14 01.11.00.000000 AM EUROPE/BERLIN|-|
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, render, StringTable
def inventory_oracle_jobs(info):
@@ -195,7 +194,7 @@ def check_oracle_jobs(item, params, info): # pylint: disable=too-many-branches
else:
last_duration = int(job_runtime.replace(".", ",").split(",", 1)[0])
# bugfix for an error in mk_oracle agent with missing round over last_duration
- output.append("Last Duration: %s" % (get_age_human_readable(last_duration)))
+ output.append("Last Duration: %s" % (render.timespan(last_duration)))
if "run_duration" in params:
warn, crit = params["run_duration"]
diff --git a/cmk/base/legacy_checks/oracle_locks.py b/cmk/base/legacy_checks/oracle_locks.py
index 1f83b7bf7d7..6d6c7d8efbf 100644
--- a/cmk/base/legacy_checks/oracle_locks.py
+++ b/cmk/base/legacy_checks/oracle_locks.py
@@ -4,12 +4,11 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.oracle import oracle_handle_ora_errors
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, render, StringTable
# <<>>
# TUX12C|273|2985|ora12c.local|sqlplus@ora12c.local (TNS V1-V3)|46148|oracle|633|NULL|NULL
@@ -83,7 +82,7 @@ def check_oracle_locks(item, params, info): # pylint: disable=too-many-branches
state = 2
lockcount += 1
infotext += "locktime {} (!!) Session (sid,serial, proc) {},{},{} machine {} osuser {} object: {}.{} ; ".format(
- get_age_human_readable(ctime),
+ render.time_offset(ctime),
sidnr,
serial,
process,
@@ -97,7 +96,7 @@ def check_oracle_locks(item, params, info): # pylint: disable=too-many-branches
state = max(1, state)
lockcount += 1
infotext += "locktime {} (!) Session (sid,serial, proc) {},{},{} machine {} osuser {} object: {}.{} ; ".format(
- get_age_human_readable(ctime),
+ render.time_offset(ctime),
sidnr,
serial,
process,
diff --git a/cmk/base/legacy_checks/oracle_logswitches.py b/cmk/base/legacy_checks/oracle_logswitches.py
index 27c636a02d3..68f8de42578 100644
--- a/cmk/base/legacy_checks/oracle_logswitches.py
+++ b/cmk/base/legacy_checks/oracle_logswitches.py
@@ -11,8 +11,7 @@
)
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, StringTable
# <<>>
# pengt 15
diff --git a/cmk/base/legacy_checks/oracle_longactivesessions.py b/cmk/base/legacy_checks/oracle_longactivesessions.py
index e660db77824..5c9227e0350 100644
--- a/cmk/base/legacy_checks/oracle_longactivesessions.py
+++ b/cmk/base/legacy_checks/oracle_longactivesessions.py
@@ -10,11 +10,10 @@
# ORACLE_SID serial# machine process osuser program last_call_el sql_id
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, render, StringTable
def inventory_oracle_longactivesessions(info):
@@ -43,7 +42,7 @@ def check_oracle_longactivesessions(item, params, info):
sidnr,
serial,
process,
- get_age_human_readable(int(last_call_el)),
+ render.timespan(int(last_call_el)),
machine,
osuser,
program,
diff --git a/cmk/base/legacy_checks/oracle_recovery_area.py b/cmk/base/legacy_checks/oracle_recovery_area.py
index 8f64b849285..8a42a8d0f3e 100644
--- a/cmk/base/legacy_checks/oracle_recovery_area.py
+++ b/cmk/base/legacy_checks/oracle_recovery_area.py
@@ -10,11 +10,10 @@
# ORACLE_SID used_pct size used reclaimable
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, render, StringTable
def inventory_oracle_recovery_area(info):
@@ -23,6 +22,8 @@ def inventory_oracle_recovery_area(info):
def check_oracle_recovery_area(item, params, info):
for line in info:
+ if len(line) < 5:
+ continue
if line[0] == item:
size_mb, used_mb, reclaimable_mb = map(int, line[2:5])
if size_mb == 0:
@@ -46,12 +47,12 @@ def check_oracle_recovery_area(item, params, info):
state,
"%s out of %s used (%.1f%%, warn/crit at %s%%/%s%%), %s reclaimable"
% (
- get_bytes_human_readable(used_mb * mb),
- get_bytes_human_readable(size_mb * mb),
+ render.bytes(used_mb * mb),
+ render.bytes(size_mb * mb),
perc_used,
warn,
crit,
- get_bytes_human_readable(reclaimable_mb * mb),
+ render.bytes(reclaimable_mb * mb),
),
[("used", used_mb, warn_mb, crit_mb, 0, size_mb), ("reclaimable", reclaimable_mb)],
)
diff --git a/cmk/base/legacy_checks/oracle_recovery_status.py b/cmk/base/legacy_checks/oracle_recovery_status.py
index ba9040e9408..08ad45ca2c8 100644
--- a/cmk/base/legacy_checks/oracle_recovery_status.py
+++ b/cmk/base/legacy_checks/oracle_recovery_status.py
@@ -23,11 +23,10 @@
# mypy: disable-error-code="arg-type"
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import IgnoreResultsError, render, StringTable
def inventory_oracle_recovery_status(info):
@@ -119,14 +118,12 @@ def check_oracle_recovery_status(item, params, info): # pylint: disable=too-man
# we found a negative time for last checkpoint
infotext += (
", oldest checkpoint is in the future %s(!), check the time on the server"
- % get_age_human_readable(int(oldest_checkpoint_age) * -1)
+ % render.timespan(int(oldest_checkpoint_age) * -1)
)
state = max(state, 1)
else:
- infotext += ", oldest Checkpoint %s ago" % (
- get_age_human_readable(int(oldest_checkpoint_age))
- )
+ infotext += ", oldest Checkpoint %s ago" % (render.timespan(int(oldest_checkpoint_age)))
if (
(database_role == "PRIMARY" and db_name == "_MGMTDB" and db_unique_name == "_mgmtdb")
@@ -159,8 +156,8 @@ def check_oracle_recovery_status(item, params, info): # pylint: disable=too-man
state = max(1, state)
infotext += " (warn/crit at {}/{} )".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
+ render.timespan(warn),
+ render.timespan(crit),
)
if offlinecount > 0:
@@ -174,14 +171,14 @@ def check_oracle_recovery_status(item, params, info): # pylint: disable=too-man
if oldest_backup_age > 0:
infotext += " %i datafiles in backup mode oldest is %s" % (
backup_count,
- get_age_human_readable(oldest_backup_age),
+ render.timespan(oldest_backup_age),
)
if params.get("backup_age"):
warn, crit = params["backup_age"]
infotext += " (warn/crit at {}/{})".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
+ render.timespan(warn),
+ render.timespan(crit),
)
perfdata.append(("backup_age", oldest_backup_age, warn, crit))
@@ -195,7 +192,7 @@ def check_oracle_recovery_status(item, params, info): # pylint: disable=too-man
perfdata.append(("backup_age", oldest_backup_age))
else:
# create a 'dummy' performance data with 0
- # => The age from plugin is only valid when a datafile is in backup mode!
+ # => The age from plug-in is only valid when a datafile is in backup mode!
perfdata.append(("backup_age", 0))
return state, infotext, perfdata
diff --git a/cmk/base/legacy_checks/oracle_sql.py b/cmk/base/legacy_checks/oracle_sql.py
deleted file mode 100644
index 897af962463..00000000000
--- a/cmk/base/legacy_checks/oracle_sql.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# <<>>
-# [[[SID-1|SQL-A]]]
-# details:DETAILS
-# perfdata:NAME=VAL;WARN;CRIT;MIN;MAX NAME=VAL;WARN;CRIT;MIN;MAX ...
-# perfdata:NAME=VAL;WARN;CRIT;MIN;MAX ...
-# long:LONG
-# long:LONG
-# ...
-# exit:CODE
-# elapsed:TS
-# [[[SID-2|SQL-B]]]
-# details:DETAILS
-# perfdata:
-# long:LONG
-# long:LONG
-# ...
-# exit:CODE
-# elapsed:TS
-
-
-# mypy: disable-error-code="var-annotated"
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-
-def parse_oracle_sql(string_table):
- def parse_perfdata(line):
- perfdata = []
- for entry in line.split():
- if not entry:
- continue
- var_name, data_str = entry.split("=", 1)
- perf_entry = [var_name]
- for data_entry in data_str.split(";"):
- try:
- perf_entry.append(float(data_entry) if "." in data_entry else int(data_entry))
- except Exception:
- perf_entry.append(None)
- perfdata.append(tuple(perf_entry))
- return perfdata
-
- parsed = {}
- instance = None
- for line in string_table:
- if line[0].startswith("[[[") and line[0].endswith("]]]"):
- item_name = tuple(line[0][3:-3].split("|"))
- instance = parsed.setdefault(
- ("%s SQL %s" % item_name).upper(),
- {
- "details": [],
- "perfdata": [],
- "long": [],
- "exit": 0,
- "elapsed": None,
- "parsing_error": {},
- },
- )
- continue
-
- if instance is None:
- continue
-
- key = line[0]
- infotext = ":".join(line[1:]).strip()
- if key.endswith("ERROR") or key.startswith("ERROR at line") or "|FAILURE|" in key:
- instance["parsing_error"].setdefault(("instance", "PL/SQL failure", 2), []).append(
- "{}: {}".format(key.split("|")[-1], infotext)
- )
-
- elif key in ["details", "long"]:
- instance[key].append(infotext)
-
- elif key == "perfdata":
- try:
- instance[key] += parse_perfdata(line[1])
- except Exception:
- instance["parsing_error"].setdefault(("perfdata", "Perfdata error", 3), []).append(
- infotext
- )
-
- elif key == "exit":
- instance[key] = int(line[1])
-
- elif key == "elapsed":
- instance[key] = float(line[1])
-
- else:
- instance["parsing_error"].setdefault(("unknown", "Unknown error", 3), []).append(
- ":".join(line).strip()
- )
-
- return parsed
-
-
-def inventory_oracle_sql(parsed):
- for instance in parsed:
- yield instance, {}
-
-
-def check_oracle_sql(item, params, parsed):
- if item not in parsed:
- return
-
- data = parsed[item]
- for (error_key, error_title, error_state), error_lines in data["parsing_error"].items():
- error_state = params.get("%s_error_state" % error_key, error_state)
- yield error_state, "{}: {}".format(error_title, " ".join(error_lines))
-
- perfdata = data["perfdata"]
- elapsed_time = data["elapsed"]
- if elapsed_time is not None:
- perfdata.append(("elapsed_time", elapsed_time))
-
- yield data["exit"], ", ".join(data["details"]), perfdata
-
- if data["long"]:
- yield 0, "\n%s" % "\n".join(data["long"])
-
-
-check_info["oracle_sql"] = LegacyCheckDefinition(
- parse_function=parse_oracle_sql,
- service_name="ORA %s",
- discovery_function=inventory_oracle_sql,
- check_function=check_oracle_sql,
- check_ruleset_name="oracle_sql",
-)
diff --git a/cmk/base/legacy_checks/oracle_undostat.py b/cmk/base/legacy_checks/oracle_undostat.py
index 894ad01cdf4..20ffa535c57 100644
--- a/cmk/base/legacy_checks/oracle_undostat.py
+++ b/cmk/base/legacy_checks/oracle_undostat.py
@@ -9,10 +9,10 @@
# TUX2 160 0 1081 300 0
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import IgnoreResultsError
+from cmk.agent_based.v2 import IgnoreResultsError, render
def parse_oracle_undostat(string_table):
@@ -39,7 +39,7 @@ def check_oracle_undostat(item, params, parsed):
tuned_undoretention,
None,
params=None if tuned_undoretention == -1 else (None, None, warn, crit),
- human_readable_func=get_age_human_readable,
+ human_readable_func=str if tuned_undoretention == -1 else render.timespan,
infoname="Undo retention",
)
@@ -47,7 +47,7 @@ def check_oracle_undostat(item, params, parsed):
yield 0, "Active undo blocks: %d" % activeblks
yield 0, "Max concurrent transactions: %d" % maxconcurrency
- yield 0, "Max querylen: %s" % get_age_human_readable(maxquerylen)
+ yield 0, "Max querylen: %s" % render.timespan(maxquerylen)
state_errcnt = params["nospaceerrcnt_state"] if nospaceerrcnt else 0
yield state_errcnt, "Space errors: %d" % nospaceerrcnt
diff --git a/cmk/base/legacy_checks/oracle_version.py b/cmk/base/legacy_checks/oracle_version.py
index ded01471952..e2afebd0232 100644
--- a/cmk/base/legacy_checks/oracle_version.py
+++ b/cmk/base/legacy_checks/oracle_version.py
@@ -11,7 +11,7 @@
)
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
# <<>>
# XE Oracle Database 11g Express Edition Release 11.2.0.2.0 - 64bit Production
diff --git a/cmk/base/legacy_checks/orion_backup.py b/cmk/base/legacy_checks/orion_backup.py
index fdceedfd518..1264119155a 100644
--- a/cmk/base/legacy_checks/orion_backup.py
+++ b/cmk/base/legacy_checks/orion_backup.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_orion_backup(info):
diff --git a/cmk/base/legacy_checks/orion_batterytest.py b/cmk/base/legacy_checks/orion_batterytest.py
index 7ea8bcf4bb2..7428c04c41f 100644
--- a/cmk/base/legacy_checks/orion_batterytest.py
+++ b/cmk/base/legacy_checks/orion_batterytest.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_orion_batterytest(info):
diff --git a/cmk/base/legacy_checks/packeteer_fan_status.py b/cmk/base/legacy_checks/packeteer_fan_status.py
index acd1a9a3e6b..495499199b7 100644
--- a/cmk/base/legacy_checks/packeteer_fan_status.py
+++ b/cmk/base/legacy_checks/packeteer_fan_status.py
@@ -4,14 +4,13 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, startswith, StringTable
-def inventory_packeteer_fan_status(section: StringTable) -> DiscoveryResult:
+def discover_packeteer_fan_status(section: StringTable) -> DiscoveryResult:
for nr, fan_status in enumerate(section[0]):
if fan_status in ["1", "2"]:
yield Service(item=f"{nr}")
@@ -40,6 +39,6 @@ def parse_packeteer_fan_status(string_table: StringTable) -> StringTable | None:
oids=["12", "14", "22", "24"],
),
service_name="Fan Status",
- discovery_function=inventory_packeteer_fan_status,
+ discovery_function=discover_packeteer_fan_status,
check_function=check_packeteer_fan_status,
)
diff --git a/cmk/base/legacy_checks/packeteer_ps_status.py b/cmk/base/legacy_checks/packeteer_ps_status.py
index db7cca5d388..4d0d5fa83b4 100644
--- a/cmk/base/legacy_checks/packeteer_ps_status.py
+++ b/cmk/base/legacy_checks/packeteer_ps_status.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, startswith, StringTable
def inventory_packeteer_ps_status(info):
diff --git a/cmk/base/legacy_checks/palo_alto_sessions.py b/cmk/base/legacy_checks/palo_alto_sessions.py
index 7868373ac45..eb555b0d52e 100644
--- a/cmk/base/legacy_checks/palo_alto_sessions.py
+++ b/cmk/base/legacy_checks/palo_alto_sessions.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.palo_alto import DETECT_PALO_ALTO
diff --git a/cmk/base/legacy_checks/pandacom_10gm_temp.py b/cmk/base/legacy_checks/pandacom_10gm_temp.py
deleted file mode 100644
index e0622ba5547..00000000000
--- a/cmk/base/legacy_checks/pandacom_10gm_temp.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.pandacom_temp import (
- check_pandacom_module_temp,
- inventory_pandacom_module_temp,
- PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS,
-)
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.pandacom import DETECT_PANDACOM
-
-# .1.3.6.1.4.1.3652.3.3.4.1.1.2.4 4 --> SPEED-DUALLINE-10G::speedDualline10GMSlot.4
-# .1.3.6.1.4.1.3652.3.3.4.1.1.2.5 5 --> SPEED-DUALLINE-10G::speedDualline10GMSlot.5
-# .1.3.6.1.4.1.3652.3.3.4.1.1.7.4 30 --> SPEED-DUALLINE-10G::speedDualline10GMTemperature.4
-# .1.3.6.1.4.1.3652.3.3.4.1.1.7.5 32 --> SPEED-DUALLINE-10G::speedDualline10GMTemperature.5
-# .1.3.6.1.4.1.3652.3.3.4.2.1.13.4 45 --> SPEED-DUALLINE-10G::speedDualline10GMTempWarningLevel.4
-# .1.3.6.1.4.1.3652.3.3.4.2.1.13.5 45 --> SPEED-DUALLINE-10G::speedDualline10GMTempWarningLevel.5
-# .1.3.6.1.4.1.3652.3.3.4.2.1.14.4 60 --> SPEED-DUALLINE-10G::speedDualline10GMTempAlarmLevel.4
-# .1.3.6.1.4.1.3652.3.3.4.2.1.14.5 60 --> SPEED-DUALLINE-10G::speedDualline10GMTempAlarmLevel.5
-
-
-def parse_pandacom_10gm_temp(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["pandacom_10gm_temp"] = LegacyCheckDefinition(
- parse_function=parse_pandacom_10gm_temp,
- detect=DETECT_PANDACOM,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.3652.3.3.4",
- oids=["1.1.2", "1.1.7", "2.1.13", "2.1.14"],
- ),
- service_name="Temperature 10GM Module %s",
- discovery_function=inventory_pandacom_module_temp,
- check_function=check_pandacom_module_temp,
- check_ruleset_name="temperature",
- check_default_parameters=PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS,
-)
diff --git a/cmk/base/legacy_checks/pandacom_fan.py b/cmk/base/legacy_checks/pandacom_fan.py
index 51132fb93f2..29571ebd2f8 100644
--- a/cmk/base/legacy_checks/pandacom_fan.py
+++ b/cmk/base/legacy_checks/pandacom_fan.py
@@ -18,8 +18,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.pandacom import DETECT_PANDACOM
diff --git a/cmk/base/legacy_checks/pandacom_fc_temp.py b/cmk/base/legacy_checks/pandacom_fc_temp.py
deleted file mode 100644
index 5387c5136ea..00000000000
--- a/cmk/base/legacy_checks/pandacom_fc_temp.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.pandacom_temp import (
- check_pandacom_module_temp,
- inventory_pandacom_module_temp,
- PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS,
-)
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.pandacom import DETECT_PANDACOM
-
-# .1.3.6.1.4.1.3652.3.3.3.1.1.2.2 2 --> SPEED-DUALLINE-FC::speedDuallineFCMSlot.2
-# .1.3.6.1.4.1.3652.3.3.3.1.1.2.3 3 --> SPEED-DUALLINE-FC::speedDuallineFCMSlot.3
-# .1.3.6.1.4.1.3652.3.3.3.1.1.7.2 31 --> SPEED-DUALLINE-FC::speedDuallineFCMTemperature.2
-# .1.3.6.1.4.1.3652.3.3.3.1.1.7.3 29 --> SPEED-DUALLINE-FC::speedDuallineFCMTemperature.3
-# .1.3.6.1.4.1.3652.3.3.3.2.1.13.2 45 --> SPEED-DUALLINE-FC::speedDuallineFCMTempWarningLevel.2
-# .1.3.6.1.4.1.3652.3.3.3.2.1.13.3 45 --> SPEED-DUALLINE-FC::speedDuallineFCMTempWarningLevel.3
-# .1.3.6.1.4.1.3652.3.3.3.2.1.14.2 60 --> SPEED-DUALLINE-FC::speedDuallineFCMTempAlarmLevel.2
-# .1.3.6.1.4.1.3652.3.3.3.2.1.14.3 60 --> SPEED-DUALLINE-FC::speedDuallineFCMTempAlarmLevel.3
-
-
-def parse_pandacom_fc_temp(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["pandacom_fc_temp"] = LegacyCheckDefinition(
- parse_function=parse_pandacom_fc_temp,
- detect=DETECT_PANDACOM,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.3652.3.3.3",
- oids=["1.1.2", "1.1.7", "2.1.13", "2.1.14"],
- ),
- service_name="Temperature FC Module %s",
- discovery_function=inventory_pandacom_module_temp,
- check_function=check_pandacom_module_temp,
- check_ruleset_name="temperature",
- check_default_parameters=PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS,
-)
diff --git a/cmk/base/legacy_checks/pandacom_psu.py b/cmk/base/legacy_checks/pandacom_psu.py
index de433efb526..7ba1bf75ea7 100644
--- a/cmk/base/legacy_checks/pandacom_psu.py
+++ b/cmk/base/legacy_checks/pandacom_psu.py
@@ -38,6 +38,15 @@ def parse_pandacom_psu(string_table):
"9": "48 V DC 1100 W",
"10": "230 V AC 1100 W",
"255": "type not available",
+ "65025": "48 V DC 60 W",
+ "65026": "230 V AC 60 W",
+ "65027": "48 V DC 250 W",
+ "65028": "230 V AC 250 W",
+ "65029": "48 V DC 1100 W",
+ "65030": "230 V AC 1100 W",
+ "65031": "48 V DC 1100 W 1 UH",
+ "65032": "230 V AC 1100 W 1 UH",
+ "65033": "230 V AC 1200W 1 UH",
}
map_psu_state = {
"0": (3, "not installed"),
diff --git a/cmk/base/legacy_checks/pandacom_sys_temp.py b/cmk/base/legacy_checks/pandacom_sys_temp.py
index 2373551c34c..acee6c80472 100644
--- a/cmk/base/legacy_checks/pandacom_sys_temp.py
+++ b/cmk/base/legacy_checks/pandacom_sys_temp.py
@@ -4,15 +4,15 @@
# conditions defined in the file COPYING, which is part of this source code package.
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.pandacom_temp import PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.pandacom import DETECT_PANDACOM
-# .1.3.6.1.4.1.3652.3.1.1.6.0 27
+
+def parse_pandacom_sys_temp(string_table: StringTable) -> StringTable | None:
+ return string_table or None
def inventory_pandacom_sys_temp(info):
@@ -23,10 +23,6 @@ def check_pandacom_sys_temp(item, params, info):
return check_temperature(int(info[0][0]), params, "pandacom_sys_%s" % item)
-def parse_pandacom_sys_temp(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
check_info["pandacom_sys_temp"] = LegacyCheckDefinition(
parse_function=parse_pandacom_sys_temp,
detect=DETECT_PANDACOM,
@@ -38,5 +34,5 @@ def parse_pandacom_sys_temp(string_table: StringTable) -> StringTable | None:
discovery_function=inventory_pandacom_sys_temp,
check_function=check_pandacom_sys_temp,
check_ruleset_name="temperature",
- check_default_parameters=PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS,
+ check_default_parameters={"levels": (35.0, 40.0)},
)
diff --git a/cmk/base/legacy_checks/pandacom_temp.py b/cmk/base/legacy_checks/pandacom_temp.py
new file mode 100644
index 00000000000..e5865581e30
--- /dev/null
+++ b/cmk/base/legacy_checks/pandacom_temp.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+from cmk.base.check_api import LegacyCheckDefinition
+from cmk.base.check_legacy_includes.temperature import check_temperature
+from cmk.base.config import check_info
+
+from cmk.agent_based.v2 import SNMPTree, StringTable
+from cmk.plugins.lib.pandacom import DETECT_PANDACOM
+
+PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS = {"levels": (35.0, 40.0)}
+
+
+def parse_pandacom_temp(string_table: StringTable) -> StringTable:
+ return string_table
+
+
+def inventory_pandacom_module_temp(info):
+ return [(line[0], {}) for line in info]
+
+
+def check_pandacom_module_temp(item, params, info):
+ for slot, temp_str, warn_str, crit_str in info:
+ if slot == item:
+ return check_temperature(
+ int(temp_str),
+ params,
+ "pandacom_%s" % item,
+ dev_levels=(int(warn_str), int(crit_str)),
+ )
+ return None
+
+
+check_info["pandacom_10gm_temp"] = LegacyCheckDefinition(
+ parse_function=parse_pandacom_temp,
+ detect=DETECT_PANDACOM,
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.3652.3.3.4",
+ # .1.3.6.1.4.1.3652.3.3.4.1.1.2.4 4 --> SPEED-DUALLINE-10G::speedDualline10GMSlot.4
+ # .1.3.6.1.4.1.3652.3.3.4.1.1.2.5 5 --> SPEED-DUALLINE-10G::speedDualline10GMSlot.5
+ # .1.3.6.1.4.1.3652.3.3.4.1.1.7.4 30 --> SPEED-DUALLINE-10G::speedDualline10GMTemperature.4
+ # .1.3.6.1.4.1.3652.3.3.4.1.1.7.5 32 --> SPEED-DUALLINE-10G::speedDualline10GMTemperature.5
+ # .1.3.6.1.4.1.3652.3.3.4.2.1.13.4 45 --> SPEED-DUALLINE-10G::speedDualline10GMTempWarningLevel.4
+ # .1.3.6.1.4.1.3652.3.3.4.2.1.13.5 45 --> SPEED-DUALLINE-10G::speedDualline10GMTempWarningLevel.5
+ # .1.3.6.1.4.1.3652.3.3.4.2.1.14.4 60 --> SPEED-DUALLINE-10G::speedDualline10GMTempAlarmLevel.4
+ # .1.3.6.1.4.1.3652.3.3.4.2.1.14.5 60 --> SPEED-DUALLINE-10G::speedDualline10GMTempAlarmLevel.5
+ oids=["1.1.2", "1.1.7", "2.1.13", "2.1.14"],
+ ),
+ service_name="Temperature 10GM Module %s",
+ discovery_function=inventory_pandacom_module_temp,
+ check_function=check_pandacom_module_temp,
+ check_ruleset_name="temperature",
+ check_default_parameters=PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS,
+)
+
+
+check_info["pandacom_fc_temp"] = LegacyCheckDefinition(
+ parse_function=parse_pandacom_temp,
+ detect=DETECT_PANDACOM,
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.3652.3.3.3",
+ # .1.3.6.1.4.1.3652.3.3.3.1.1.2.2 2 --> SPEED-DUALLINE-FC::speedDuallineFCMSlot.2
+ # .1.3.6.1.4.1.3652.3.3.3.1.1.2.3 3 --> SPEED-DUALLINE-FC::speedDuallineFCMSlot.3
+ # .1.3.6.1.4.1.3652.3.3.3.1.1.7.2 31 --> SPEED-DUALLINE-FC::speedDuallineFCMTemperature.2
+ # .1.3.6.1.4.1.3652.3.3.3.1.1.7.3 29 --> SPEED-DUALLINE-FC::speedDuallineFCMTemperature.3
+ # .1.3.6.1.4.1.3652.3.3.3.2.1.13.2 45 --> SPEED-DUALLINE-FC::speedDuallineFCMTempWarningLevel.2
+ # .1.3.6.1.4.1.3652.3.3.3.2.1.13.3 45 --> SPEED-DUALLINE-FC::speedDuallineFCMTempWarningLevel.3
+ # .1.3.6.1.4.1.3652.3.3.3.2.1.14.2 60 --> SPEED-DUALLINE-FC::speedDuallineFCMTempAlarmLevel.2
+ # .1.3.6.1.4.1.3652.3.3.3.2.1.14.3 60 --> SPEED-DUALLINE-FC::speedDuallineFCMTempAlarmLevel.3
+ oids=["1.1.2", "1.1.7", "2.1.13", "2.1.14"],
+ ),
+ service_name="Temperature FC Module %s",
+ discovery_function=inventory_pandacom_module_temp,
+ check_function=check_pandacom_module_temp,
+ check_ruleset_name="temperature",
+ check_default_parameters=PANDACOM_TEMP_CHECK_DEFAULT_PARAMETERS,
+)
diff --git a/cmk/base/legacy_checks/perle_modules_cm1000.py b/cmk/base/legacy_checks/perle_modules_cm1000.py
index ed80fe6e548..d285ed10bf8 100644
--- a/cmk/base/legacy_checks/perle_modules_cm1000.py
+++ b/cmk/base/legacy_checks/perle_modules_cm1000.py
@@ -4,21 +4,133 @@
# conditions defined in the file COPYING, which is part of this source code package.
+from collections.abc import Mapping
+
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.perle import check_perle_cm_modules, inventory_perle_cm_modules
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.perle import DETECT_PERLE
-def parse_perle_modules_cm1000(string_table: StringTable) -> StringTable:
+def parse_perle_modules(string_table: StringTable) -> StringTable:
return string_table
+def inventory_perle_cm_modules(info):
+ yield from ((index, {}) for _name, _led, index, *_rest, in info)
+
+
+MAP_SPEED: Mapping[str, str] = {
+ "0": "10 Mbs",
+ "1": "100 Mbps",
+ "2": "1000 Mbps",
+}
+
+MAP_POWER_LED: Mapping[str, tuple[int, str]] = {
+ "0": (2, "no power"),
+ "1": (0, "power to the module"),
+ "2": (0, "loopback enabled"),
+}
+
+MAP_FIBER_LPRF: Mapping[str, tuple[int, str]] = {
+ "0": (0, "ok"),
+ "1": (2, "offline"),
+ "2": (2, "link fault"),
+ "3": (2, "auto neg error"),
+ # available for cm1110 modules
+ "99": (2, "not applicable"),
+}
+MAP_FIBER_LINK: Mapping[str, tuple[int, str]] = {
+ "0": (1, "down"),
+ "1": (0, "up"),
+}
+
+MAP_FIBER_CONNECTOR: Mapping[str, str] = {
+ "0": "sc",
+ "1": "lc",
+ "2": "st",
+ "3": "sfp",
+ "5": "fc",
+ "6": "mtrj",
+}
+MAP_COPPER_LPRF: Mapping[str, tuple[int, str]] = {
+ "0": (0, "ok"),
+ "1": (2, "remote fault"),
+}
+
+MAP_COPPER_LINK: Mapping[str, tuple[int, str]] = {
+ "0": (1, "down"),
+ "1": (0, "ok"),
+}
+
+MAP_COPPER_CONNECTOR: Mapping[str, str] = {
+ "0": "rj45",
+}
+
+
+def check_perle_cm_modules(item, _no_params, info):
+ for (
+ _name,
+ power_led,
+ index,
+ fiber_lprf,
+ fiber_link,
+ fiber_connector,
+ fiber_speed,
+ cooper_lprf,
+ copper_link,
+ copper_connector,
+ copper_speed,
+ ) in info:
+ if item != index:
+ continue
+
+ state, state_readable = MAP_POWER_LED[power_led]
+ yield state, "Power status: %s" % state_readable
+
+ yield 0, f"Fiber speed: {MAP_SPEED[fiber_speed]}"
+ state, state_readable = MAP_FIBER_LPRF[fiber_lprf]
+ yield state, f"LPRF: {state_readable}"
+ state, state_readable = MAP_FIBER_LINK[fiber_link]
+ yield state, f"Link: {state_readable}"
+ yield 0, f"Connector: {MAP_FIBER_CONNECTOR[fiber_connector]}"
+
+ yield 0, f"Copper speed: {MAP_SPEED[copper_speed]}"
+ state, state_readable = MAP_COPPER_LPRF[cooper_lprf]
+ yield state, f"LPRF: {state_readable}"
+ state, state_readable = MAP_COPPER_LINK[copper_link]
+ yield state, f"Link: {state_readable}"
+ yield 0, f"Connector: {MAP_COPPER_CONNECTOR[copper_connector]}"
+
+
+check_info["perle_modules_cm1110"] = LegacyCheckDefinition(
+ parse_function=parse_perle_modules,
+ detect=DETECT_PERLE,
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.1966.21.1.1.1.1.4.3",
+ oids=[
+ "1.1.3",
+ "3.1.3",
+ "1.1.2",
+ "1.1.21",
+ "1.1.15",
+ "1.1.16",
+ "1.1.18",
+ "1.1.32",
+ "1.1.25",
+ "1.1.26",
+ "1.1.28",
+ ],
+ ),
+ service_name="Chassis slot %s CM1110",
+ discovery_function=inventory_perle_cm_modules,
+ check_function=check_perle_cm_modules,
+)
+
+
check_info["perle_modules_cm1000"] = LegacyCheckDefinition(
- parse_function=parse_perle_modules_cm1000,
+ parse_function=parse_perle_modules,
detect=DETECT_PERLE,
fetch=SNMPTree(
base=".1.3.6.1.4.1.1966.21.1.1.1.1.4.1",
diff --git a/cmk/base/legacy_checks/perle_modules_cm1110.py b/cmk/base/legacy_checks/perle_modules_cm1110.py
deleted file mode 100644
index 7dc52ef690e..00000000000
--- a/cmk/base/legacy_checks/perle_modules_cm1110.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.perle import check_perle_cm_modules, inventory_perle_cm_modules
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-from cmk.plugins.lib.perle import DETECT_PERLE
-
-
-def parse_perle_modules_cm1110(string_table: StringTable) -> StringTable:
- return string_table
-
-
-check_info["perle_modules_cm1110"] = LegacyCheckDefinition(
- parse_function=parse_perle_modules_cm1110,
- detect=DETECT_PERLE,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.1966.21.1.1.1.1.4.3",
- oids=[
- "1.1.3",
- "3.1.3",
- "1.1.2",
- "1.1.21",
- "1.1.15",
- "1.1.16",
- "1.1.18",
- "1.1.32",
- "1.1.25",
- "1.1.26",
- "1.1.28",
- ],
- ),
- service_name="Chassis slot %s CM1110",
- discovery_function=inventory_perle_cm_modules,
- check_function=check_perle_cm_modules,
-)
diff --git a/cmk/base/legacy_checks/perle_modules_mgt.py b/cmk/base/legacy_checks/perle_modules_mgt.py
index 3d65db2497d..613801534d9 100644
--- a/cmk/base/legacy_checks/perle_modules_mgt.py
+++ b/cmk/base/legacy_checks/perle_modules_mgt.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.perle import DETECT_PERLE
# .1.3.6.1.4.1.1966.21.1.1.1.1.4.5.1.1.2.1.1 1 --> PERLE-MCR-MGT-MIB::mcrMgtSlotIndex.1.1
diff --git a/cmk/base/legacy_checks/pfsense_if.py b/cmk/base/legacy_checks/pfsense_if.py
deleted file mode 100644
index edddc68c4ca..00000000000
--- a/cmk/base/legacy_checks/pfsense_if.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.firewall_if import check_firewall_if
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import contains, SNMPTree
-
-
-def parse_pfsense_if(string_table):
- parsed = {}
- for line in string_table:
- parsed[line[0]] = {"ip4_in_blocked": int(line[1])}
- return parsed
-
-
-def inventory_pfsense_if(parsed):
- for item in parsed:
- yield item, {}
-
-
-check_info["pfsense_if"] = LegacyCheckDefinition(
- detect=contains(".1.3.6.1.2.1.1.1.0", "pfsense"),
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.12325.1.200.1.8.2.1",
- oids=["2", "12"],
- ),
- parse_function=parse_pfsense_if,
- service_name="Firewall Interface %s",
- discovery_function=inventory_pfsense_if,
- check_function=check_firewall_if,
- check_ruleset_name="firewall_if",
- check_default_parameters={
- "ipv4_in_blocked": (100.0, 10000.0),
- "average": 3,
- },
-)
diff --git a/cmk/base/legacy_checks/pfsense_status.py b/cmk/base/legacy_checks/pfsense_status.py
index d98f75c261a..68c73eb5b64 100644
--- a/cmk/base/legacy_checks/pfsense_status.py
+++ b/cmk/base/legacy_checks/pfsense_status.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_pfsense_status(info):
diff --git a/cmk/base/legacy_checks/plesk_backups.py b/cmk/base/legacy_checks/plesk_backups.py
index c5f4c598b0f..021630c4432 100644
--- a/cmk/base/legacy_checks/plesk_backups.py
+++ b/cmk/base/legacy_checks/plesk_backups.py
@@ -8,10 +8,10 @@
import time
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition, saveint
+from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
def inventory_plesk_backups(info):
@@ -54,7 +54,7 @@ def check_plesk_backups(item, params, info): # pylint: disable=too-many-branche
if size == 0:
status = 2
status_txt = " (!!)"
- output.append(f"Last Backup - Size: {get_bytes_human_readable(size)}{status_txt}")
+ output.append(f"Last Backup - Size: {render.disksize(size)}{status_txt}")
perfdata.append(("last_backup_size", size))
age_seconds = int(time.time()) - timestamp
@@ -94,7 +94,7 @@ def check_plesk_backups(item, params, info): # pylint: disable=too-many-branche
elif total_size > params["total_size"][0]:
status = max(status, 1)
status_txt = " (!)"
- output.append(f"Total Size: {get_bytes_human_readable(total_size)}{status_txt}")
+ output.append(f"Total Size: {render.disksize(total_size)}{status_txt}")
perfdata.append(("total_size", total_size))
return (status, ", ".join(output), perfdata)
diff --git a/cmk/base/legacy_checks/plesk_domains.py b/cmk/base/legacy_checks/plesk_domains.py
index e8034091cab..8cc3d6b5272 100644
--- a/cmk/base/legacy_checks/plesk_domains.py
+++ b/cmk/base/legacy_checks/plesk_domains.py
@@ -7,7 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_plesk_domains(info):
diff --git a/cmk/base/legacy_checks/postfix_mailq.py b/cmk/base/legacy_checks/postfix_mailq.py
index 267d7944e7a..f9d2191ccd6 100644
--- a/cmk/base/legacy_checks/postfix_mailq.py
+++ b/cmk/base/legacy_checks/postfix_mailq.py
@@ -79,7 +79,9 @@ def parse_postfix_mailq(string_table):
instance_name = ""
for line in string_table:
if line[0].startswith("[[[") and line[0].endswith("]]]"):
- instance_name = line[0][3:-3]
+ # deal with the pre 2.3 agent output that will send an empty instance
+ # name for the "default" queue.
+ instance_name = line[0][3:-3] or "default"
queueinfo = None
# single and old output formats
@@ -121,9 +123,6 @@ def inventory_postfix_mailq(parsed):
def check_postfix_mailq(item, params, parsed):
- if item is None:
- item = ""
-
if item not in parsed:
yield 3, "Item not found"
return
diff --git a/cmk/base/legacy_checks/postgres_bloat.py b/cmk/base/legacy_checks/postgres_bloat.py
index c91d2c567fc..ec7ec857173 100644
--- a/cmk/base/legacy_checks/postgres_bloat.py
+++ b/cmk/base/legacy_checks/postgres_bloat.py
@@ -6,7 +6,7 @@
# mypy: disable-error-code="index"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
from cmk.agent_based.v2 import IgnoreResultsError, render
@@ -89,14 +89,14 @@ def check_postgres_bloat(item, params, parsed): # pylint: disable=too-many-bran
yield 2, "{} wasted {} bytes: {} (too high)".format(
line["tablename"],
what,
- get_bytes_human_readable(wasted),
+ render.bytes(wasted),
)
show_levels = True
elif wasted >= warn:
yield 1, "{} wasted {} bytes: {} (too high)".format(
line["tablename"],
what,
- get_bytes_human_readable(wasted),
+ render.bytes(wasted),
)
show_levels = True
@@ -112,9 +112,7 @@ def check_postgres_bloat(item, params, parsed): # pylint: disable=too-many-bran
"%s Abs (%s/%s)"
% (
(what.title(),)
- + tuple(
- get_bytes_human_readable(int(x)) for x in params["%s_bloat_abs" % what]
- )
+ + tuple(render.bytes(int(x)) for x in params["%s_bloat_abs" % what])
)
)
yield 0, " ".join(levels_info)
@@ -132,9 +130,7 @@ def check_postgres_bloat(item, params, parsed): # pylint: disable=too-many-bran
yield 0, "Maximum wasted {}space at {}: {}".format(
what,
abs_max["tablename"],
- get_bytes_human_readable(
- int(abs_max["wasted%sbytes" % (what == "index" and "i" or "")])
- ),
+ render.bytes(int(abs_max["wasted%sbytes" % (what == "index" and "i" or "")])),
)
# Summary information
@@ -142,7 +138,7 @@ def check_postgres_bloat(item, params, parsed): # pylint: disable=too-many-bran
yield (
0,
"Summary of top %d wasted %sspace: %s"
- % (len(database), what, get_bytes_human_readable(total_value)),
+ % (len(database), what, render.bytes(total_value)),
[("%sspace_wasted" % what, total_value)],
)
diff --git a/cmk/base/legacy_checks/postgres_connections.py b/cmk/base/legacy_checks/postgres_connections.py
index 37ff5e98f53..e3ccb580951 100644
--- a/cmk/base/legacy_checks/postgres_connections.py
+++ b/cmk/base/legacy_checks/postgres_connections.py
@@ -88,9 +88,11 @@ def check_postgres_connections(item, params, parsed):
maximum = float(database_connections["mc"])
connections = {
- "active": database_connections["active"]
- if has_active_and_idle
- else database_connections["current"],
+ "active": (
+ database_connections["active"]
+ if has_active_and_idle
+ else database_connections["current"]
+ ),
"idle": database_connections["idle"] if has_active_and_idle else None,
}
diff --git a/cmk/base/legacy_checks/postgres_stat_database.py b/cmk/base/legacy_checks/postgres_stat_database.py
index 3ed316db837..ec11b7a1f2e 100644
--- a/cmk/base/legacy_checks/postgres_stat_database.py
+++ b/cmk/base/legacy_checks/postgres_stat_database.py
@@ -13,10 +13,10 @@
import time
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, IgnoreResultsError
+from cmk.agent_based.v2 import get_rate, get_value_store, IgnoreResultsError, render
def parse_postgres_stat_database(string_table):
@@ -57,6 +57,13 @@ def inventory_postgres_stat_database(parsed):
return [(k, {}) for k in parsed if parsed[k]["xact_commit"] > 0]
+def inventory_postgres_stat_database_size(parsed):
+ # https://www.postgresql.org/docs/current/monitoring-stats.html#MONITORING-PG-STAT-DATABASE-VIEW
+ # > datid: OID of this database, or 0 for objects belonging to a shared relation
+ # shared relations don't have a size, so we don't want to discover them.
+ return [(k, {}) for k in parsed if parsed[k]["xact_commit"] > 0 and parsed[k]["datid"] != "0"]
+
+
def check_postgres_stat_database(item, params, parsed):
if item not in parsed:
return (3, "Database not found")
@@ -123,7 +130,7 @@ def check_postgres_stat_database_size(item, params, parsed):
size,
"size",
levels,
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Size",
)
@@ -131,7 +138,7 @@ def check_postgres_stat_database_size(item, params, parsed):
check_info["postgres_stat_database.size"] = LegacyCheckDefinition(
service_name="PostgreSQL DB %s Size",
sections=["postgres_stat_database"],
- discovery_function=inventory_postgres_stat_database,
+ discovery_function=inventory_postgres_stat_database_size,
check_function=check_postgres_stat_database_size,
check_ruleset_name="postgres_stat_database",
)
diff --git a/cmk/base/legacy_checks/pulse_secure_cpu_util.py b/cmk/base/legacy_checks/pulse_secure_cpu_util.py
index afb4fb7a696..ece3f4711d7 100644
--- a/cmk/base/legacy_checks/pulse_secure_cpu_util.py
+++ b/cmk/base/legacy_checks/pulse_secure_cpu_util.py
@@ -12,8 +12,7 @@
from cmk.base.config import check_info
import cmk.plugins.lib.pulse_secure as pulse_secure
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
Section = Mapping[str, int]
diff --git a/cmk/base/legacy_checks/pulse_secure_disk_util.py b/cmk/base/legacy_checks/pulse_secure_disk_util.py
index c92c5685413..ffec682e680 100644
--- a/cmk/base/legacy_checks/pulse_secure_disk_util.py
+++ b/cmk/base/legacy_checks/pulse_secure_disk_util.py
@@ -9,8 +9,7 @@
from cmk.base.config import check_info
import cmk.plugins.lib.pulse_secure as pulse_secure
-from cmk.agent_based.v2 import render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
Section = Mapping[str, int]
diff --git a/cmk/base/legacy_checks/pulse_secure_log_util.py b/cmk/base/legacy_checks/pulse_secure_log_util.py
index 453ca81662c..b7636cc3511 100644
--- a/cmk/base/legacy_checks/pulse_secure_log_util.py
+++ b/cmk/base/legacy_checks/pulse_secure_log_util.py
@@ -9,8 +9,7 @@
from cmk.base.config import check_info
import cmk.plugins.lib.pulse_secure as pulse_secure
-from cmk.agent_based.v2 import render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
Section = Mapping[str, int]
@@ -26,14 +25,14 @@ def discover_pulse_secure_log_util(section: Section) -> Iterable[tuple[None, dic
yield None, {}
-def check_pulse_secure_log_util(item, params, parsed):
+def check_pulse_secure_log_util(_no_item, _no_params, parsed):
if not parsed:
return
yield check_levels(
parsed[METRIC_PULSE_SECURE_LOG],
METRIC_PULSE_SECURE_LOG,
- params,
+ None,
infoname="Percentage of log file used",
human_readable_func=render.percent,
)
diff --git a/cmk/base/legacy_checks/pulse_secure_mem_util.py b/cmk/base/legacy_checks/pulse_secure_mem_util.py
index f24dbcade10..e50a0be72b6 100644
--- a/cmk/base/legacy_checks/pulse_secure_mem_util.py
+++ b/cmk/base/legacy_checks/pulse_secure_mem_util.py
@@ -9,8 +9,7 @@
from cmk.base.config import check_info
import cmk.plugins.lib.pulse_secure as pulse_secure
-from cmk.agent_based.v2 import render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
Section = Mapping[str, int]
diff --git a/cmk/base/legacy_checks/pulse_secure_temp.py b/cmk/base/legacy_checks/pulse_secure_temp.py
index e4795b4539c..cf8b01e0789 100644
--- a/cmk/base/legacy_checks/pulse_secure_temp.py
+++ b/cmk/base/legacy_checks/pulse_secure_temp.py
@@ -10,8 +10,7 @@
from cmk.base.config import check_info
import cmk.plugins.lib.pulse_secure as pulse_secure
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
def parse_pulse_secure_temp(string_table: StringTable) -> Mapping[str, int] | None:
diff --git a/cmk/base/legacy_checks/qlogic_fcport.py b/cmk/base/legacy_checks/qlogic_fcport.py
index ccd6bc8ec7b..288dc9abffc 100644
--- a/cmk/base/legacy_checks/qlogic_fcport.py
+++ b/cmk/base/legacy_checks/qlogic_fcport.py
@@ -17,8 +17,8 @@
render,
SNMPTree,
startswith,
+ StringTable,
)
-from cmk.agent_based.v2.type_defs import StringTable
# settings for inventory: which ports should be inventorized
qlogic_fcport_inventory_opstates = ["1", "3"]
diff --git a/cmk/base/legacy_checks/qlogic_sanbox.py b/cmk/base/legacy_checks/qlogic_sanbox.py
index f6762e3f396..46c37c4abe0 100644
--- a/cmk/base/legacy_checks/qlogic_sanbox.py
+++ b/cmk/base/legacy_checks/qlogic_sanbox.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, OIDEnd, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, OIDEnd, SNMPTree, startswith, StringTable
qlogic_sanbox_status_map = [
"undefined", # 0
diff --git a/cmk/base/legacy_checks/qlogic_sanbox_fabric_element.py b/cmk/base/legacy_checks/qlogic_sanbox_fabric_element.py
index 4a4b4afa8ae..e3a70751eea 100644
--- a/cmk/base/legacy_checks/qlogic_sanbox_fabric_element.py
+++ b/cmk/base/legacy_checks/qlogic_sanbox_fabric_element.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, OIDEnd, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, OIDEnd, SNMPTree, startswith, StringTable
def inventory_qlogic_sanbox_fabric_element(info):
diff --git a/cmk/base/legacy_checks/qmail_stats.py b/cmk/base/legacy_checks/qmail_stats.py
index 548ef3db0b3..909eaeb3077 100644
--- a/cmk/base/legacy_checks/qmail_stats.py
+++ b/cmk/base/legacy_checks/qmail_stats.py
@@ -7,13 +7,12 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
-def inventory_qmail_stats(info):
- if len(info) > 0:
- return [("", {})]
- return []
+def discover_qmail_stats(info):
+ if info:
+ yield None, {}
def check_qmail_stats(_no_item, params, info):
@@ -43,10 +42,10 @@ def parse_qmail_stats(string_table: StringTable) -> StringTable:
check_info["qmail_stats"] = LegacyCheckDefinition(
parse_function=parse_qmail_stats,
- service_name="Qmail Queue %s",
- discovery_function=inventory_qmail_stats,
+ service_name="Qmail Queue",
+ discovery_function=discover_qmail_stats,
check_function=check_qmail_stats,
- check_ruleset_name="mail_queue_length",
+ check_ruleset_name="mail_queue_length_single",
check_default_parameters={
"deferred": (10, 20),
},
diff --git a/cmk/base/legacy_checks/qnap_disks.py b/cmk/base/legacy_checks/qnap_disks.py
index 2ddd7dacf84..3f8d7a37854 100644
--- a/cmk/base/legacy_checks/qnap_disks.py
+++ b/cmk/base/legacy_checks/qnap_disks.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.qnap import DETECT_QNAP
diff --git a/cmk/base/legacy_checks/quantum_libsmall_door.py b/cmk/base/legacy_checks/quantum_libsmall_door.py
deleted file mode 100644
index efedaa8d067..00000000000
--- a/cmk/base/legacy_checks/quantum_libsmall_door.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import all_of, contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def inventory_quantum_libsmall_door(info):
- return [(None, None)]
-
-
-def check_quantum_libsmall_door(_no_item, _no_params, info):
- if info[0][0] == "1":
- return 2, "Library door open"
- if info[0][0] == "2":
- return 0, "Library door closed"
- return 3, "Library door status unknown"
-
-
-def parse_quantum_libsmall_door(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
-check_info["quantum_libsmall_door"] = LegacyCheckDefinition(
- parse_function=parse_quantum_libsmall_door,
- detect=all_of(
- contains(".1.3.6.1.2.1.1.1.0", "linux"), contains(".1.3.6.1.2.1.1.6.0", "library")
- ),
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.3697.1.10.10.1.15.2",
- oids=["0"],
- ),
- service_name="Tape library door",
- discovery_function=inventory_quantum_libsmall_door,
- check_function=check_quantum_libsmall_door,
-)
diff --git a/cmk/base/legacy_checks/quantum_libsmall_status.py b/cmk/base/legacy_checks/quantum_libsmall_status.py
deleted file mode 100644
index 98c09600bab..00000000000
--- a/cmk/base/legacy_checks/quantum_libsmall_status.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import all_of, contains, OIDEnd, SNMPTree
-
-DEVICE_TYPE_MAP = {
- "1": "Power",
- "2": "Cooling",
- "3": "Control",
- "4": "Connectivity",
- "5": "Robotics",
- "6": "Media",
- "7": "Drive",
- "8": "Operator action request",
-}
-
-RAS_STATUS_MAP = {
- "1": (0, "good"),
- "2": (2, "failed"),
- "3": (2, "degraded"),
- "4": (1, "warning"),
- "5": (0, "informational"),
- "6": (3, "unknown"),
- "7": (3, "invalid"),
-}
-
-OPNEED_STATUS_MAP = {
- "0": (0, "no"),
- "1": (2, "yes"),
- "2": (0, "no"),
-}
-
-
-def parse_quantum_libsmall_status(string_table):
- parsed = []
- for line in string_table:
- for oidend, dev_state in line:
- dev_type = DEVICE_TYPE_MAP.get(oidend.split(".")[0])
- if not (dev_type or dev_state):
- continue
- parsed.append((dev_type, dev_state))
- return parsed
-
-
-def inventory_quantum_libsmall_status(parsed):
- if parsed:
- return [(None, None)]
- return []
-
-
-def check_quantum_libsmall_status(_no_item, _no_params, parsed):
- for dev_type, dev_state in parsed:
- if dev_type == "Operator action request":
- state, state_readable = OPNEED_STATUS_MAP.get(dev_state, (3, "unknown[%s]" % dev_state))
- else:
- state, state_readable = RAS_STATUS_MAP.get(dev_state, (3, "unknown[%s]" % dev_state))
- yield state, f"{dev_type}: {state_readable}"
-
-
-check_info["quantum_libsmall_status"] = LegacyCheckDefinition(
- detect=all_of(
- contains(".1.3.6.1.2.1.1.1.0", "linux"), contains(".1.3.6.1.2.1.1.6.0", "library")
- ),
- fetch=[
- SNMPTree(
- base=".1.3.6.1.4.1.3697.1.10.10.1.15",
- oids=[OIDEnd(), "10"],
- ),
- SNMPTree(
- base=".1.3.6.1.4.1.3764.1.10.10",
- oids=[OIDEnd(), "12"],
- ),
- ],
- parse_function=parse_quantum_libsmall_status,
- service_name="Tape library status",
- discovery_function=inventory_quantum_libsmall_status,
- check_function=check_quantum_libsmall_status,
-)
diff --git a/cmk/base/legacy_checks/ra32e_power.py b/cmk/base/legacy_checks/ra32e_power.py
index b38f433bd18..502b5c802e5 100644
--- a/cmk/base/legacy_checks/ra32e_power.py
+++ b/cmk/base/legacy_checks/ra32e_power.py
@@ -4,15 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.ra32e import DETECT_RA32E
-def inventory_ra32e_power(section: StringTable) -> DiscoveryResult:
+def discover_ra32e_power(section: StringTable) -> DiscoveryResult:
if section and section[0][0]:
yield Service()
@@ -39,6 +38,6 @@ def parse_ra32e_power(string_table: StringTable) -> StringTable:
oids=["1"],
),
service_name="Power Supply",
- discovery_function=inventory_ra32e_power,
+ discovery_function=discover_ra32e_power,
check_function=check_ra32e_power,
)
diff --git a/cmk/base/legacy_checks/ra32e_switch.py b/cmk/base/legacy_checks/ra32e_switch.py
index 47381fa57cc..f769ae52c59 100644
--- a/cmk/base/legacy_checks/ra32e_switch.py
+++ b/cmk/base/legacy_checks/ra32e_switch.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ra32e import DETECT_RA32E
diff --git a/cmk/base/legacy_checks/rabbitmq_nodes.py b/cmk/base/legacy_checks/rabbitmq_nodes.py
index 8e4d708ddd5..b946840359a 100644
--- a/cmk/base/legacy_checks/rabbitmq_nodes.py
+++ b/cmk/base/legacy_checks/rabbitmq_nodes.py
@@ -7,7 +7,7 @@
import json
from collections.abc import Callable, Iterable, Mapping, Sequence
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.mem import check_memory_element
from cmk.base.check_legacy_includes.uptime import check_uptime_seconds
from cmk.base.config import check_info
@@ -235,31 +235,6 @@ def check_rabbitmq_nodes_sockets(item, params, parsed):
)
-def check_rabbitmq_nodes_proc(item, params, parsed):
- proc_data = parsed.get(item, {}).get("proc")
- if not proc_data:
- return None
-
- used = proc_data.get("proc_used")
- if used is None:
- return None
-
- total = proc_data.get("proc_total")
- if total is None:
- return None
-
- return _handle_output(params, used, total, "Erlang processes used", "processes")
-
-
-check_info["rabbitmq_nodes.proc"] = LegacyCheckDefinition(
- service_name="RabbitMQ Node %s Processes",
- sections=["rabbitmq_nodes"],
- discovery_function=discover_key("proc"),
- check_function=check_rabbitmq_nodes_proc,
- check_ruleset_name="rabbitmq_nodes_proc",
-)
-
-
def check_rabbitmq_nodes_mem(item, params, parsed):
mem_data = parsed.get(item, {}).get("mem")
if not mem_data:
@@ -300,17 +275,17 @@ def check_rabbitmq_nodes_mem(item, params, parsed):
_METRIC_SPECS: Sequence[tuple[str, str, Callable, str]] = [
("gc_num", "GC runs", int, "gc_runs"),
("gc_num_rate", "Rate", float, "gc_runs_rate"),
- ("gc_bytes_reclaimed", "Bytes reclaimed by GC", get_bytes_human_readable, "gc_bytes"),
+ ("gc_bytes_reclaimed", "Bytes reclaimed by GC", render.bytes, "gc_bytes"),
("gc_bytes_reclaimed_rate", "Rate", render.iobandwidth, "gc_bytes_rate"),
("run_queue", "Runtime run queue", int, "runtime_run_queue"),
]
-def _get_levels(params, key, level_dir):
- if key not in params or level_dir not in params:
+def _get_levels(params, key):
+ if key not in params:
return None, None
- level_type, levels = params[key][level_dir]
+ level_type, levels = params[key]
if level_type == "no_levels":
return None, None
return levels
@@ -326,8 +301,8 @@ def check_rabbitmq_nodes_gc(item, params, parsed):
if value is None:
continue
- levels_upper = _get_levels(params, key, "levels_upper")
- levels_lower = _get_levels(params, key, "levels_lower")
+ levels_upper = _get_levels(params, f"{key}_upper")
+ levels_lower = _get_levels(params, f"{key}_lower")
yield check_levels(
value,
diff --git a/cmk/base/legacy_checks/rabbitmq_queues.py b/cmk/base/legacy_checks/rabbitmq_queues.py
index de470a93b3d..a9a6fdb72da 100644
--- a/cmk/base/legacy_checks/rabbitmq_queues.py
+++ b/cmk/base/legacy_checks/rabbitmq_queues.py
@@ -17,9 +17,11 @@
import json
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_rabbitmq_queues(string_table):
parsed = {}
@@ -104,7 +106,7 @@ def check_rabbitmq_queues(item, params, parsed):
queue_memory,
"mem_lnx_total_used",
params.get("abs_memory"),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname="Memory used",
)
diff --git a/cmk/base/legacy_checks/raritan_pdu_outletcount.py b/cmk/base/legacy_checks/raritan_pdu_outletcount.py
index 1d687edcf93..50ea59327f0 100644
--- a/cmk/base/legacy_checks/raritan_pdu_outletcount.py
+++ b/cmk/base/legacy_checks/raritan_pdu_outletcount.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, SNMPTree, startswith, StringTable
def inventory_raritan_pdu_outletcount(info):
diff --git a/cmk/base/legacy_checks/rds_licenses.py b/cmk/base/legacy_checks/rds_licenses.py
index 1ebc9326efa..6be0d705277 100644
--- a/cmk/base/legacy_checks/rds_licenses.py
+++ b/cmk/base/legacy_checks/rds_licenses.py
@@ -31,7 +31,7 @@
"5": "Windows Server 2016",
"4": "Windows Server 2012",
"3": "Windows Server 2008 R2",
- "2": "Windows Server 2008"
+ "2": "Windows Server 2008",
# 1 Not supported.
# 0 Not supported.
}
diff --git a/cmk/base/legacy_checks/redis_info.py b/cmk/base/legacy_checks/redis_info.py
index 2193f509f67..afc9c248db3 100644
--- a/cmk/base/legacy_checks/redis_info.py
+++ b/cmk/base/legacy_checks/redis_info.py
@@ -4,16 +4,13 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.redis import parse_redis_info
from cmk.base.check_legacy_includes.uptime import check_uptime_seconds
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
# <<>>
# [[[MY_FIRST_REDIS|127.0.0.1|6380]]]
# ...
@@ -200,12 +197,12 @@ def check_redis_info_persistence(item, params, item_data):
duration_val = persistence_data.get("%s_time_sec" % duration)
if duration_val is not None and duration_val != -1:
- infotext += " (Duration: %s)" % get_age_human_readable(duration_val)
+ infotext += " (Duration: %s)" % render.timespan(duration_val)
yield state, infotext
rdb_save_time = persistence_data.get("rdb_last_save_time")
if rdb_save_time is not None:
- yield 0, "Last successful RDB save: %s" % get_timestamp_human_readable(rdb_save_time)
+ yield 0, "Last successful RDB save: %s" % render.datetime(rdb_save_time)
rdb_changes = persistence_data.get("rdb_changes_since_last_save")
if rdb_changes is not None:
diff --git a/cmk/base/legacy_checks/rms200_temp.py b/cmk/base/legacy_checks/rms200_temp.py
index d6901ae1d12..6e4aff2955d 100644
--- a/cmk/base/legacy_checks/rms200_temp.py
+++ b/cmk/base/legacy_checks/rms200_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, SNMPTree, StringTable
def inventory_rms200_temp(info):
diff --git a/cmk/base/legacy_checks/sansymphony_alerts.py b/cmk/base/legacy_checks/sansymphony_alerts.py
index 6fb22c13c8f..8ff387bb646 100644
--- a/cmk/base/legacy_checks/sansymphony_alerts.py
+++ b/cmk/base/legacy_checks/sansymphony_alerts.py
@@ -7,7 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_sansymphony_alerts(info):
diff --git a/cmk/base/legacy_checks/sansymphony_ports.py b/cmk/base/legacy_checks/sansymphony_ports.py
index b55c65c4f49..00955a57cd8 100644
--- a/cmk/base/legacy_checks/sansymphony_ports.py
+++ b/cmk/base/legacy_checks/sansymphony_ports.py
@@ -15,7 +15,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_sansymphony_ports(info):
diff --git a/cmk/base/legacy_checks/sansymphony_serverstatus.py b/cmk/base/legacy_checks/sansymphony_serverstatus.py
index 474c440687d..e9e8a6e3001 100644
--- a/cmk/base/legacy_checks/sansymphony_serverstatus.py
+++ b/cmk/base/legacy_checks/sansymphony_serverstatus.py
@@ -10,7 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_sansymphony_serverstatus(info):
diff --git a/cmk/base/legacy_checks/sap_state.py b/cmk/base/legacy_checks/sap_state.py
index fd6c27a9ead..e62edb79601 100644
--- a/cmk/base/legacy_checks/sap_state.py
+++ b/cmk/base/legacy_checks/sap_state.py
@@ -7,7 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_sap_state(info):
diff --git a/cmk/base/legacy_checks/saprouter_cert.py b/cmk/base/legacy_checks/saprouter_cert.py
index ccb51a53b96..bf45860a66a 100644
--- a/cmk/base/legacy_checks/saprouter_cert.py
+++ b/cmk/base/legacy_checks/saprouter_cert.py
@@ -29,9 +29,11 @@
import time
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_saprouter_cert(string_table):
def parse_date(list_):
@@ -80,7 +82,7 @@ def check_saprouter_cert(_no_item, params, parsed):
infotext = "Valid from {} to {}, {} to go".format(
not_before_readable,
not_after_readable,
- get_age_human_readable(validity_age),
+ render.timespan(validity_age),
)
state = 0
@@ -91,8 +93,8 @@ def check_saprouter_cert(_no_item, params, parsed):
if state:
infotext += " (warn/crit below {}/{})".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
+ render.timespan(warn),
+ render.timespan(crit),
)
return state, infotext
diff --git a/cmk/base/legacy_checks/scaleio_pd.py b/cmk/base/legacy_checks/scaleio_pd.py
index e0faa838060..d38f8f849d5 100644
--- a/cmk/base/legacy_checks/scaleio_pd.py
+++ b/cmk/base/legacy_checks/scaleio_pd.py
@@ -9,7 +9,7 @@
from cmk.base.check_legacy_includes.scaleio import convert_scaleio_space
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
from cmk.plugins.lib.scaleio import parse_scaleio, ScaleioSection
# <<>>
diff --git a/cmk/base/legacy_checks/scaleio_sds.py b/cmk/base/legacy_checks/scaleio_sds.py
index 18bac0eb750..5cd3134c309 100644
--- a/cmk/base/legacy_checks/scaleio_sds.py
+++ b/cmk/base/legacy_checks/scaleio_sds.py
@@ -9,7 +9,7 @@
from cmk.base.check_legacy_includes.scaleio import convert_scaleio_space
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
from cmk.plugins.lib.scaleio import parse_scaleio, ScaleioSection
# example output
diff --git a/cmk/base/legacy_checks/scaleio_system.py b/cmk/base/legacy_checks/scaleio_system.py
index 283d1a2ba8a..0b399107bb0 100644
--- a/cmk/base/legacy_checks/scaleio_system.py
+++ b/cmk/base/legacy_checks/scaleio_system.py
@@ -8,7 +8,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_list, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
from cmk.plugins.lib.scaleio import parse_scaleio, ScaleioSection
# <<>>
diff --git a/cmk/base/legacy_checks/siemens_plc.py b/cmk/base/legacy_checks/siemens_plc.py
index 2e658e28f68..d9cbf238ca7 100644
--- a/cmk/base/legacy_checks/siemens_plc.py
+++ b/cmk/base/legacy_checks/siemens_plc.py
@@ -4,12 +4,11 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_value_store, render, StringTable
# <<>>
# PFT01 temp Gesamt 279183569715
@@ -155,7 +154,7 @@ def check_siemens_plc_duration(item, params, info):
if old_seconds is not None and old_seconds > seconds:
return (
2,
- f"Reduced from {get_age_human_readable(old_seconds)} to {get_age_human_readable(seconds)}",
+ f"Reduced from {render.time_offset(old_seconds)} to {render.time_offset(seconds)}",
perfdata,
)
@@ -168,7 +167,7 @@ def check_siemens_plc_duration(item, params, info):
elif warn is not None and seconds >= warn:
state = 1
- return state, get_age_human_readable(seconds), perfdata
+ return state, render.time_offset(seconds), perfdata
return None
diff --git a/cmk/base/legacy_checks/skype.py b/cmk/base/legacy_checks/skype.py
index 1641ec6cceb..4f004d44bc4 100644
--- a/cmk/base/legacy_checks/skype.py
+++ b/cmk/base/legacy_checks/skype.py
@@ -18,7 +18,7 @@
)
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
from cmk.plugins.lib.wmi import WMISection
diff --git a/cmk/base/legacy_checks/sni_octopuse_cpu.py b/cmk/base/legacy_checks/sni_octopuse_cpu.py
index 16a138fbcf0..a975895d437 100644
--- a/cmk/base/legacy_checks/sni_octopuse_cpu.py
+++ b/cmk/base/legacy_checks/sni_octopuse_cpu.py
@@ -4,15 +4,14 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.sni_octopuse import DETECT_SNI_OCTOPUSE
-def inventory_octopus_cpu(section: StringTable) -> DiscoveryResult:
+def discover_octopus_cpu(section: StringTable) -> DiscoveryResult:
if len(section[0]) == 1:
yield Service()
@@ -35,6 +34,6 @@ def parse_sni_octopuse_cpu(string_table: StringTable) -> StringTable | None:
oids=["7"],
),
service_name="CPU utilization",
- discovery_function=inventory_octopus_cpu,
+ discovery_function=discover_octopus_cpu,
check_function=check_octopus_cpu,
)
diff --git a/cmk/base/legacy_checks/sni_octopuse_status.py b/cmk/base/legacy_checks/sni_octopuse_status.py
index e202c5b6f82..728a3673278 100644
--- a/cmk/base/legacy_checks/sni_octopuse_status.py
+++ b/cmk/base/legacy_checks/sni_octopuse_status.py
@@ -9,15 +9,14 @@
# { normal(1), warning(2), minor(3), major(4), critical(5) }
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.sni_octopuse import DETECT_SNI_OCTOPUSE
-def inventory_octopus_status(section: StringTable) -> DiscoveryResult:
+def discover_octopus_status(section: StringTable) -> DiscoveryResult:
if len(section[0]) == 1:
yield Service()
@@ -53,6 +52,6 @@ def parse_sni_octopuse_status(string_table: StringTable) -> StringTable | None:
oids=["0"],
),
service_name="Global status",
- discovery_function=inventory_octopus_status,
+ discovery_function=discover_octopus_status,
check_function=check_octopus_status,
)
diff --git a/cmk/base/legacy_checks/sni_octopuse_trunks.py b/cmk/base/legacy_checks/sni_octopuse_trunks.py
index 76269e1fd9d..b8450b0d8e7 100644
--- a/cmk/base/legacy_checks/sni_octopuse_trunks.py
+++ b/cmk/base/legacy_checks/sni_octopuse_trunks.py
@@ -19,8 +19,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.sni_octopuse import DETECT_SNI_OCTOPUSE
diff --git a/cmk/base/legacy_checks/solaris_multipath.py b/cmk/base/legacy_checks/solaris_multipath.py
index dad08c14739..29ac88f0a0d 100644
--- a/cmk/base/legacy_checks/solaris_multipath.py
+++ b/cmk/base/legacy_checks/solaris_multipath.py
@@ -17,7 +17,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_solaris_multipath(info):
diff --git a/cmk/base/legacy_checks/solaris_prtdiag_status.py b/cmk/base/legacy_checks/solaris_prtdiag_status.py
index 937f6a1ca38..be53246df2c 100644
--- a/cmk/base/legacy_checks/solaris_prtdiag_status.py
+++ b/cmk/base/legacy_checks/solaris_prtdiag_status.py
@@ -11,7 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_solaris_prtdiag_status(info):
diff --git a/cmk/base/legacy_checks/sophos.py b/cmk/base/legacy_checks/sophos.py
index 712a978eb42..815e06744c1 100644
--- a/cmk/base/legacy_checks/sophos.py
+++ b/cmk/base/legacy_checks/sophos.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, OIDEnd, SNMPTree, StringTable
# .1.3.6.1.4.1.2604.3.4 2 --> SOPHOS::sophosHwMemoryConsumption Indicates whether the appliance is consuming excessive memory
# .1.3.6.1.4.1.2604.3.5 2 --> SOPHOS::sophosHwMemoryStatus Indicates whether the appliance detects less memory than expected
diff --git a/cmk/base/legacy_checks/sophos_cpu.py b/cmk/base/legacy_checks/sophos_cpu.py
index b1e760f704a..18c21ae9fdf 100644
--- a/cmk/base/legacy_checks/sophos_cpu.py
+++ b/cmk/base/legacy_checks/sophos_cpu.py
@@ -23,7 +23,7 @@ def check_sophos_cpu(item, params, parsed):
def discover_sophos_cpu(parsed):
- return [(None, {})] if parsed is not None else None
+ yield None, {}
check_info["sophos_cpu"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/sophos_disk.py b/cmk/base/legacy_checks/sophos_disk.py
index c880d803e53..9125d22125b 100644
--- a/cmk/base/legacy_checks/sophos_disk.py
+++ b/cmk/base/legacy_checks/sophos_disk.py
@@ -30,7 +30,7 @@ def check_sophos_disk(item, params, parsed):
def discover_sophos_disk(parsed):
- return [(None, {})] if parsed is not None else None
+ yield None, {}
check_info["sophos_disk"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/sophos_memory.py b/cmk/base/legacy_checks/sophos_memory.py
index 74addc53f1f..b0d8eb260a1 100644
--- a/cmk/base/legacy_checks/sophos_memory.py
+++ b/cmk/base/legacy_checks/sophos_memory.py
@@ -29,7 +29,7 @@ def check_sophos_memory(_item, params, parsed):
def discover_sophos_memory(parsed):
- return [(None, {})] if parsed is not None else None
+ yield None, {}
check_info["sophos_memory"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/sophos_messages.py b/cmk/base/legacy_checks/sophos_messages.py
index ea195a21bfe..cc4505f503a 100644
--- a/cmk/base/legacy_checks/sophos_messages.py
+++ b/cmk/base/legacy_checks/sophos_messages.py
@@ -23,8 +23,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import equals, get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import equals, get_rate, get_value_store, SNMPTree, StringTable
def inventory_sophos_messages(info):
diff --git a/cmk/base/legacy_checks/splunk_alerts.py b/cmk/base/legacy_checks/splunk_alerts.py
index 0ef76774b30..435186348e2 100644
--- a/cmk/base/legacy_checks/splunk_alerts.py
+++ b/cmk/base/legacy_checks/splunk_alerts.py
@@ -10,7 +10,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_splunk_alerts(info):
diff --git a/cmk/base/legacy_checks/splunk_license_state.py b/cmk/base/legacy_checks/splunk_license_state.py
index e6edf28283e..435a4a34ed2 100644
--- a/cmk/base/legacy_checks/splunk_license_state.py
+++ b/cmk/base/legacy_checks/splunk_license_state.py
@@ -16,14 +16,11 @@
import collections
import time
-from cmk.base.check_api import (
- get_age_human_readable,
- get_bytes_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
SplunkLicenseState = collections.namedtuple( # pylint: disable=collections-namedtuple-call
"SplunkLicenseState",
[
@@ -49,8 +46,8 @@ def parse_splunk_license_state(string_table):
SplunkLicenseState(
max_violations,
window_period,
- get_bytes_human_readable(int(quota)),
- get_timestamp_human_readable(int(expiration_time)),
+ render.bytes(int(quota)),
+ render.datetime(int(expiration_time)),
time_to_expiration,
status,
)
@@ -86,9 +83,9 @@ def check_splunk_license_state(item, params, parsed):
if state != 0:
infotext += " (expires in {} - Warn/Crit at {}/{})".format(
- get_age_human_readable(data.time_to_expiration),
- get_age_human_readable(warn),
- get_age_human_readable(crit),
+ render.timespan(data.time_to_expiration),
+ render.timespan(warn),
+ render.timespan(crit),
)
yield state, infotext
diff --git a/cmk/base/legacy_checks/splunk_license_usage.py b/cmk/base/legacy_checks/splunk_license_usage.py
index 879aa02e156..54bd58ef565 100644
--- a/cmk/base/legacy_checks/splunk_license_usage.py
+++ b/cmk/base/legacy_checks/splunk_license_usage.py
@@ -11,9 +11,11 @@
import collections
-from cmk.base.check_api import check_levels, get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
SplunkLicenseUsage = collections.namedtuple( # pylint: disable=collections-namedtuple-call
"SplunkLicenseUsage", ["quota", "slaves_usage_bytes"]
)
@@ -43,7 +45,7 @@ def inventory_splunk_license_usage(parsed):
def check_splunk_license_usage(item, params, parsed):
data = parsed["License Usage"][0]
- yield 0, "Quota: %s" % get_bytes_human_readable(data.quota)
+ yield 0, "Quota: %s" % render.bytes(data.quota)
warn, crit = params["usage_bytes"]
@@ -56,7 +58,7 @@ def check_splunk_license_usage(item, params, parsed):
value,
"splunk_slave_usage_bytes",
(warn, crit),
- human_readable_func=get_bytes_human_readable,
+ human_readable_func=render.bytes,
infoname=infotext,
)
diff --git a/cmk/base/legacy_checks/statgrab_cpu.py b/cmk/base/legacy_checks/statgrab_cpu.py
index f74cf06a97a..4e2ccdfe699 100644
--- a/cmk/base/legacy_checks/statgrab_cpu.py
+++ b/cmk/base/legacy_checks/statgrab_cpu.py
@@ -8,7 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util_unix, CPUInfo
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_statgrab_cpu(info):
diff --git a/cmk/base/legacy_checks/steelhead_connections.py b/cmk/base/legacy_checks/steelhead_connections.py
index de4486d565a..8a4f57c97c1 100644
--- a/cmk/base/legacy_checks/steelhead_connections.py
+++ b/cmk/base/legacy_checks/steelhead_connections.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.steelhead import DETECT_STEELHEAD
diff --git a/cmk/base/legacy_checks/steelhead_peers.py b/cmk/base/legacy_checks/steelhead_peers.py
index 3e3e2f2a423..0d40bbfd980 100644
--- a/cmk/base/legacy_checks/steelhead_peers.py
+++ b/cmk/base/legacy_checks/steelhead_peers.py
@@ -12,8 +12,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.steelhead import DETECT_STEELHEAD
diff --git a/cmk/base/legacy_checks/steelhead_status.py b/cmk/base/legacy_checks/steelhead_status.py
index a7a269e4200..438e0224d8f 100644
--- a/cmk/base/legacy_checks/steelhead_status.py
+++ b/cmk/base/legacy_checks/steelhead_status.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.steelhead import DETECT_STEELHEAD
diff --git a/cmk/base/legacy_checks/stormshield_cluster.py b/cmk/base/legacy_checks/stormshield_cluster.py
index 41c5ed5b7cf..b540b7f44f2 100644
--- a/cmk/base/legacy_checks/stormshield_cluster.py
+++ b/cmk/base/legacy_checks/stormshield_cluster.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, startswith, StringTable
sync_name_mapping = {
"1": "Synced",
diff --git a/cmk/base/legacy_checks/stormshield_cluster_node.py b/cmk/base/legacy_checks/stormshield_cluster_node.py
index e5af06f5bf7..0b069a28b00 100644
--- a/cmk/base/legacy_checks/stormshield_cluster_node.py
+++ b/cmk/base/legacy_checks/stormshield_cluster_node.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, startswith
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, equals, exists, SNMPTree, startswith, StringTable
online_mapping = {"1": "online", "0": "offline"}
diff --git a/cmk/base/legacy_checks/stormshield_cpu_temp.py b/cmk/base/legacy_checks/stormshield_cpu_temp.py
index 3159ce106ee..f7555f0e230 100644
--- a/cmk/base/legacy_checks/stormshield_cpu_temp.py
+++ b/cmk/base/legacy_checks/stormshield_cpu_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.stormshield import DETECT_STORMSHIELD
diff --git a/cmk/base/legacy_checks/stormshield_info.py b/cmk/base/legacy_checks/stormshield_info.py
index c4824482b69..051de27c23f 100644
--- a/cmk/base/legacy_checks/stormshield_info.py
+++ b/cmk/base/legacy_checks/stormshield_info.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.stormshield import DETECT_STORMSHIELD
diff --git a/cmk/base/legacy_checks/stormshield_packets.py b/cmk/base/legacy_checks/stormshield_packets.py
index b5cf0702a41..c76a37fdd10 100644
--- a/cmk/base/legacy_checks/stormshield_packets.py
+++ b/cmk/base/legacy_checks/stormshield_packets.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, SNMPTree, StringTable
from cmk.plugins.lib.stormshield import DETECT_STORMSHIELD
# Unfortunalty we can not use the normal interface names here, because
diff --git a/cmk/base/legacy_checks/stormshield_policy.py b/cmk/base/legacy_checks/stormshield_policy.py
index bf663b125a9..09f4cece486 100644
--- a/cmk/base/legacy_checks/stormshield_policy.py
+++ b/cmk/base/legacy_checks/stormshield_policy.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.stormshield import DETECT_STORMSHIELD
diff --git a/cmk/base/legacy_checks/stormshield_route.py b/cmk/base/legacy_checks/stormshield_route.py
index a5995fb0667..c770782a259 100644
--- a/cmk/base/legacy_checks/stormshield_route.py
+++ b/cmk/base/legacy_checks/stormshield_route.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.stormshield import DETECT_STORMSHIELD
route_type_mapping = {
diff --git a/cmk/base/legacy_checks/stormshield_updates.py b/cmk/base/legacy_checks/stormshield_updates.py
index c7884f38a8a..75df9019d24 100644
--- a/cmk/base/legacy_checks/stormshield_updates.py
+++ b/cmk/base/legacy_checks/stormshield_updates.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.stormshield import DETECT_STORMSHIELD
diff --git a/cmk/base/legacy_checks/strem1_sensors.py b/cmk/base/legacy_checks/strem1_sensors.py
index d40e916b82a..e6a31eac0e8 100644
--- a/cmk/base/legacy_checks/strem1_sensors.py
+++ b/cmk/base/legacy_checks/strem1_sensors.py
@@ -11,8 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def strem1_sensors_parse_info(info):
diff --git a/cmk/base/legacy_checks/stulz_alerts.py b/cmk/base/legacy_checks/stulz_alerts.py
index bf839f217ff..36873013426 100644
--- a/cmk/base/legacy_checks/stulz_alerts.py
+++ b/cmk/base/legacy_checks/stulz_alerts.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.stulz import DETECT_STULZ
diff --git a/cmk/base/legacy_checks/stulz_humidity.py b/cmk/base/legacy_checks/stulz_humidity.py
index 25a0dbe669d..fbf5e64eeae 100644
--- a/cmk/base/legacy_checks/stulz_humidity.py
+++ b/cmk/base/legacy_checks/stulz_humidity.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.humidity import check_humidity
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.stulz import DETECT_STULZ
diff --git a/cmk/base/legacy_checks/stulz_powerstate.py b/cmk/base/legacy_checks/stulz_powerstate.py
index 02ca4b997ee..2afac273716 100644
--- a/cmk/base/legacy_checks/stulz_powerstate.py
+++ b/cmk/base/legacy_checks/stulz_powerstate.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.stulz import DETECT_STULZ
diff --git a/cmk/base/legacy_checks/stulz_pump.py b/cmk/base/legacy_checks/stulz_pump.py
index 4e51adaece0..ab36005ca68 100644
--- a/cmk/base/legacy_checks/stulz_pump.py
+++ b/cmk/base/legacy_checks/stulz_pump.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.stulz import DETECT_STULZ
diff --git a/cmk/base/legacy_checks/supermicro.py b/cmk/base/legacy_checks/supermicro.py
index 6fb015c4588..c600318d896 100644
--- a/cmk/base/legacy_checks/supermicro.py
+++ b/cmk/base/legacy_checks/supermicro.py
@@ -51,8 +51,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import all_of, any_of, contains, equals, exists, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import all_of, any_of, contains, equals, exists, SNMPTree, StringTable
DETECT_SUPERMICRO = any_of(
equals(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.311.1.1.3.1.2"),
diff --git a/cmk/base/legacy_checks/superstack3_sensors.py b/cmk/base/legacy_checks/superstack3_sensors.py
index 8404472245e..169ef3cbb79 100644
--- a/cmk/base/legacy_checks/superstack3_sensors.py
+++ b/cmk/base/legacy_checks/superstack3_sensors.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import contains, SNMPTree, StringTable
def inventory_superstack3_sensors(info):
diff --git a/cmk/base/legacy_checks/suseconnect.py b/cmk/base/legacy_checks/suseconnect.py
index 9a7de13a67d..bb7c8988a4c 100644
--- a/cmk/base/legacy_checks/suseconnect.py
+++ b/cmk/base/legacy_checks/suseconnect.py
@@ -48,15 +48,14 @@
# mypy: disable-error-code="no-untyped-def"
import time
-
-# Registered
-##.
from collections.abc import Iterable
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
from cmk.base.plugins.agent_based.suseconnect import get_data, Section
+from cmk.agent_based.v2 import render
+
def inventory_suseconnect(section: Section) -> Iterable[tuple[None, dict]]:
if get_data(section) is not None:
@@ -101,13 +100,13 @@ def check_suseconnect(_no_item, params, section: Section):
else:
state = 0
- infotext = "Expires in: %s" % get_age_human_readable(expiration_time)
+ infotext = "Expires in: %s" % render.timespan(expiration_time)
if state:
infotext += " (warn/crit at %d/%d days)" % (warn, crit)
yield state, infotext
else:
- yield 2, "Expired since: %s" % get_age_human_readable(-1.0 * expiration_time)
+ yield 2, "Expired since: %s" % render.timespan(-1.0 * expiration_time)
check_info["suseconnect"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/sylo.py b/cmk/base/legacy_checks/sylo.py
index 13e84c9daf0..94a59e143ee 100644
--- a/cmk/base/legacy_checks/sylo.py
+++ b/cmk/base/legacy_checks/sylo.py
@@ -31,8 +31,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, StringTable
def inventory_sylo(info):
diff --git a/cmk/base/legacy_checks/symantec_av_progstate.py b/cmk/base/legacy_checks/symantec_av_progstate.py
index 4aad48c511f..c615903742c 100644
--- a/cmk/base/legacy_checks/symantec_av_progstate.py
+++ b/cmk/base/legacy_checks/symantec_av_progstate.py
@@ -9,7 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_symantec_av_progstate(info):
diff --git a/cmk/base/legacy_checks/symantec_av_quarantine.py b/cmk/base/legacy_checks/symantec_av_quarantine.py
index f0205a9f4cd..5f4544f2bfb 100644
--- a/cmk/base/legacy_checks/symantec_av_quarantine.py
+++ b/cmk/base/legacy_checks/symantec_av_quarantine.py
@@ -10,7 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_symantec_av_quarantine(info):
diff --git a/cmk/base/legacy_checks/symantec_av_updates.py b/cmk/base/legacy_checks/symantec_av_updates.py
index 0b20b48ba40..8f537517cd0 100644
--- a/cmk/base/legacy_checks/symantec_av_updates.py
+++ b/cmk/base/legacy_checks/symantec_av_updates.py
@@ -18,8 +18,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, StringTable
def inventory_symantec_av_updates(info):
diff --git a/cmk/base/legacy_checks/systemtime.py b/cmk/base/legacy_checks/systemtime.py
index 5e1d918bf4a..f8129df04e8 100644
--- a/cmk/base/legacy_checks/systemtime.py
+++ b/cmk/base/legacy_checks/systemtime.py
@@ -7,10 +7,12 @@
import time
from collections.abc import Iterable
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
from cmk.base.plugins.agent_based.systemtime import Section
+from cmk.agent_based.v2 import render
+
def discover_systemtime(section: Section) -> Iterable[tuple[None, dict]]:
if section:
@@ -32,7 +34,7 @@ def check_systemtime(item, params, parsed):
offset,
"offset",
(warn, crit, -warn, -crit),
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.time_offset,
infoname="Offset",
)
diff --git a/cmk/base/legacy_checks/tplink_cpu.py b/cmk/base/legacy_checks/tplink_cpu.py
index 7f82993ff09..96084c8e382 100644
--- a/cmk/base/legacy_checks/tplink_cpu.py
+++ b/cmk/base/legacy_checks/tplink_cpu.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.cpu_util import check_cpu_util
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.tplink import DETECT_TPLINK
@@ -35,7 +34,7 @@ def check_tplink_cpu(_no_item, params, info):
return check_cpu_util(util, params, cores=cores)
-# Migration NOTE: Create a separate section, but a common check plugin for
+# Migration NOTE: Create a separate section, but a common check plug-in for
# tplink_cpu, hr_cpu, cisco_nexus_cpu, bintec_cpu, winperf_processor,
# lxc_container_cpu, docker_container_cpu.
# Migration via cmk/update_config.py!
diff --git a/cmk/base/legacy_checks/tplink_mem.py b/cmk/base/legacy_checks/tplink_mem.py
index 3799829045f..895b3f36558 100644
--- a/cmk/base/legacy_checks/tplink_mem.py
+++ b/cmk/base/legacy_checks/tplink_mem.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import render, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.tplink import DETECT_TPLINK
diff --git a/cmk/base/legacy_checks/tplink_poe_summary.py b/cmk/base/legacy_checks/tplink_poe_summary.py
index 1ca35e72332..8d3085036f9 100644
--- a/cmk/base/legacy_checks/tplink_poe_summary.py
+++ b/cmk/base/legacy_checks/tplink_poe_summary.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.tplink import DETECT_TPLINK
diff --git a/cmk/base/legacy_checks/tsm_drives.py b/cmk/base/legacy_checks/tsm_drives.py
index 2ac89a1a495..30b88306e4c 100644
--- a/cmk/base/legacy_checks/tsm_drives.py
+++ b/cmk/base/legacy_checks/tsm_drives.py
@@ -35,7 +35,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_tsm_drives(info):
diff --git a/cmk/base/legacy_checks/tsm_paths.py b/cmk/base/legacy_checks/tsm_paths.py
index c5f55b218fb..f33aee73be0 100644
--- a/cmk/base/legacy_checks/tsm_paths.py
+++ b/cmk/base/legacy_checks/tsm_paths.py
@@ -7,7 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_tsm_paths(info):
diff --git a/cmk/base/legacy_checks/tsm_sessions.py b/cmk/base/legacy_checks/tsm_sessions.py
index 14854bfc8b4..6dcc168ee2e 100644
--- a/cmk/base/legacy_checks/tsm_sessions.py
+++ b/cmk/base/legacy_checks/tsm_sessions.py
@@ -11,7 +11,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_tsm_sessions(info):
diff --git a/cmk/base/legacy_checks/tsm_storagepools.py b/cmk/base/legacy_checks/tsm_storagepools.py
index cc279b9fe25..58a6813734c 100644
--- a/cmk/base/legacy_checks/tsm_storagepools.py
+++ b/cmk/base/legacy_checks/tsm_storagepools.py
@@ -21,9 +21,11 @@
# mypy: disable-error-code="var-annotated"
-from cmk.base.check_api import get_bytes_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_tsm_storagepools(string_table):
parsed = {}
@@ -55,7 +57,7 @@ def check_tsm_storagepools(item, _no_params, parsed):
size = int(float(data["size"]) * 1024**2)
return (
0,
- f"Used size: {get_bytes_human_readable(size)}, Type: {stype}",
+ f"Used size: {render.disksize(size)}, Type: {stype}",
[("used_space", size)],
)
diff --git a/cmk/base/legacy_checks/ucd_disk.py b/cmk/base/legacy_checks/ucd_disk.py
index 4964bdc4d6a..5b51be72190 100644
--- a/cmk/base/legacy_checks/ucd_disk.py
+++ b/cmk/base/legacy_checks/ucd_disk.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.df import df_check_filesystem_single, FILESYSTEM_DEFAULT_PARAMS
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib import ucd_hr_detection
# .1.3.6.1.4.1.2021.9.1.2.1 / --> UCD-SNMP-MIB::dskPath.1
@@ -22,12 +21,22 @@ def inventory_ucd_disk(info):
def check_ucd_disk(item, params, info):
- for disk_path, disk_total_str, disk_avail_str in info:
+ """Provided elements are
+ 2: dskPath
+ 6: dskTotal (kb)
+ 7: dskAvail (kb)
+ see https://oidref.com/1.3.6.1.4.1.2021.9.1
+ """
+ for disk_path, disk_total_kb_str, disk_avail_kb_str in info:
if disk_path == item:
- disk_total_mb = float(disk_total_str) / 1024
- disk_avail_mb = float(disk_avail_str) / 1024
return df_check_filesystem_single(
- item, disk_total_mb, disk_avail_mb, 0, None, None, params
+ mountpoint=item,
+ size_mb=float(disk_total_kb_str) / 1024,
+ avail_mb=float(disk_avail_kb_str) / 1024,
+ reserved_mb=0,
+ inodes_total=None,
+ inodes_avail=None,
+ params=params,
)
return None
diff --git a/cmk/base/legacy_checks/ucd_mem.py b/cmk/base/legacy_checks/ucd_mem.py
index 59a61c38807..3b6d70cddeb 100644
--- a/cmk/base/legacy_checks/ucd_mem.py
+++ b/cmk/base/legacy_checks/ucd_mem.py
@@ -26,14 +26,13 @@
# .1.3.6.1.4.1.2021.4.100.0 0 --> UCD-SNMP-MIB::memSwapError.0
# .1.3.6.1.4.1.2021.4.101.0 --> UCD-SNMP-MIB::smemSwapErrorMsg.0
-# suggested by customer
-
def inventory_ucd_mem(parsed):
- return [("", {})] if parsed else []
+ if parsed:
+ yield None, {}
-def check_ucd_mem(item, params, parsed):
+def check_ucd_mem(_no_item, params, parsed):
if not parsed:
return
@@ -55,12 +54,12 @@ def check_ucd_mem(item, params, parsed):
yield params.get("swap_errors", 0), "Swap error: %s" % parsed["error_swap_msg"]
-# This check plugin uses the migrated section in cmk/base/plugins/agent_based/ucd_mem.py!
+# This check plug-in uses the migrated section in cmk/base/plugins/agent_based/ucd_mem.py!
check_info["ucd_mem"] = LegacyCheckDefinition(
service_name="Memory",
discovery_function=inventory_ucd_mem,
check_function=check_ucd_mem,
- check_ruleset_name="memory_simple",
+ check_ruleset_name="memory_simple_single",
check_default_parameters={
"levels": ("perc_used", (80.0, 90.0)),
"swap_errors": 0,
diff --git a/cmk/base/legacy_checks/ucd_processes.py b/cmk/base/legacy_checks/ucd_processes.py
index 6672756e134..341bbcb6cf3 100644
--- a/cmk/base/legacy_checks/ucd_processes.py
+++ b/cmk/base/legacy_checks/ucd_processes.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib import ucd_hr_detection
# .1.3.6.1.4.1.2021.2.1.2.1 Web-Processes --> UCD-SNMP-MIB::prNames.1
diff --git a/cmk/base/legacy_checks/ucs_bladecenter_fans.py b/cmk/base/legacy_checks/ucs_bladecenter_fans.py
index ea22550bc28..e2745dccf73 100644
--- a/cmk/base/legacy_checks/ucs_bladecenter_fans.py
+++ b/cmk/base/legacy_checks/ucs_bladecenter_fans.py
@@ -5,7 +5,7 @@
from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.check_legacy_includes.temperature import check_temperature_list
+from cmk.base.check_legacy_includes.temperature import check_temperature_list, CheckTempKwargs
from cmk.base.config import check_info
import cmk.plugins.lib.ucs_bladecenter as ucs_bladecenter
@@ -107,7 +107,7 @@ def inventory_ucs_bladecenter_fans_temp(parsed):
def check_ucs_bladecenter_fans_temp(item, params, parsed):
sensor_item = item[8:-4] # drop "Ambient " and " FAN"
- sensor_list = []
+ sensor_list: list[tuple[str, int | float, CheckTempKwargs]] = []
for key, values in parsed.items():
if key.startswith(sensor_item) and "AmbientTemp" in values:
loc = key.split()[-1].split(".")
@@ -115,9 +115,10 @@ def check_ucs_bladecenter_fans_temp(item, params, parsed):
(
f"Module {loc[0]} Fan {loc[1]}",
float(values.get("AmbientTemp")),
+ {},
)
)
- return check_temperature_list(sensor_list, params, "ucs_bladecenter_fans_%s" % item)
+ yield from check_temperature_list(sensor_list, params)
check_info["ucs_bladecenter_fans.temp"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/ucs_bladecenter_psu.py b/cmk/base/legacy_checks/ucs_bladecenter_psu.py
index b7492de2de1..77414c011fc 100644
--- a/cmk/base/legacy_checks/ucs_bladecenter_psu.py
+++ b/cmk/base/legacy_checks/ucs_bladecenter_psu.py
@@ -6,7 +6,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.elphase import check_elphase
-from cmk.base.check_legacy_includes.temperature import check_temperature_list
+from cmk.base.check_legacy_includes.temperature import check_temperature_list, CheckTempKwargs
from cmk.base.config import check_info
import cmk.plugins.lib.ucs_bladecenter as ucs_bladecenter
@@ -158,17 +158,12 @@ def inventory_ucs_bladecenter_psu_chassis_temp(parsed):
def check_ucs_bladecenter_psu_chassis_temp(item, params, parsed):
sensor_item = item[8:] # drop "Ambient "
- sensor_list = []
-
- for key, values in sorted(parsed.items()):
- if key.startswith(sensor_item) and "AmbientTemp" in values:
- sensor_list.append(
- (
- "Module %s" % key.split()[-1],
- float(values.get("AmbientTemp")),
- )
- )
- return check_temperature_list(sensor_list, params, "ucs_bladecenter_psu_chassis_temp_%s" % item)
+ sensor_list: list[tuple[str, float, CheckTempKwargs]] = [
+ ("Module %s" % key.split()[-1], float(values.get("AmbientTemp")), {})
+ for key, values in sorted(parsed.items())
+ if key.startswith(sensor_item) and "AmbientTemp" in values
+ ]
+ yield from check_temperature_list(sensor_list, params)
check_info["ucs_bladecenter_psu.chassis_temp"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/ucs_bladecenter_topsystem.py b/cmk/base/legacy_checks/ucs_bladecenter_topsystem.py
index 98e632800c7..105b7333f67 100644
--- a/cmk/base/legacy_checks/ucs_bladecenter_topsystem.py
+++ b/cmk/base/legacy_checks/ucs_bladecenter_topsystem.py
@@ -10,7 +10,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_ucs_bladecenter_topsystem(info):
diff --git a/cmk/base/legacy_checks/unitrends_backup.py b/cmk/base/legacy_checks/unitrends_backup.py
index 3a83d6dce1b..84d8a41e706 100644
--- a/cmk/base/legacy_checks/unitrends_backup.py
+++ b/cmk/base/legacy_checks/unitrends_backup.py
@@ -14,7 +14,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_unitrends_backup(info):
diff --git a/cmk/base/legacy_checks/unitrends_replication.py b/cmk/base/legacy_checks/unitrends_replication.py
index 5af64d197df..d5b67d6f9db 100644
--- a/cmk/base/legacy_checks/unitrends_replication.py
+++ b/cmk/base/legacy_checks/unitrends_replication.py
@@ -9,7 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_unitrends_replication(info):
diff --git a/cmk/base/legacy_checks/ups_bat_temp.py b/cmk/base/legacy_checks/ups_bat_temp.py
index 52a1ae3ad16..31ea8807a60 100644
--- a/cmk/base/legacy_checks/ups_bat_temp.py
+++ b/cmk/base/legacy_checks/ups_bat_temp.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ups import DETECT_UPS_GENERIC
diff --git a/cmk/base/legacy_checks/ups_cps_outphase.py b/cmk/base/legacy_checks/ups_cps_outphase.py
index 28b624f6b09..840c38ecc3a 100644
--- a/cmk/base/legacy_checks/ups_cps_outphase.py
+++ b/cmk/base/legacy_checks/ups_cps_outphase.py
@@ -5,9 +5,7 @@
from collections.abc import Iterable, Mapping
-from typing import Literal
-
-from typing_extensions import TypedDict
+from typing import Literal, TypedDict
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.elphase import check_elphase
diff --git a/cmk/base/legacy_checks/ups_eaton_enviroment.py b/cmk/base/legacy_checks/ups_eaton_enviroment.py
index 284fead7f21..0aaac559995 100644
--- a/cmk/base/legacy_checks/ups_eaton_enviroment.py
+++ b/cmk/base/legacy_checks/ups_eaton_enviroment.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, equals, SNMPTree, StringTable
def inventory_ups_eaton_enviroment(info):
diff --git a/cmk/base/legacy_checks/ups_in_voltage.py b/cmk/base/legacy_checks/ups_in_voltage.py
index f50e3257ae5..8be4a1a30db 100644
--- a/cmk/base/legacy_checks/ups_in_voltage.py
+++ b/cmk/base/legacy_checks/ups_in_voltage.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.ups_in_voltage import check_ups_in_voltage
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.ups import DETECT_UPS_GENERIC
diff --git a/cmk/base/legacy_checks/ups_modulys_alarms.py b/cmk/base/legacy_checks/ups_modulys_alarms.py
index 6a8c0952cd3..9884438ce68 100644
--- a/cmk/base/legacy_checks/ups_modulys_alarms.py
+++ b/cmk/base/legacy_checks/ups_modulys_alarms.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.ups_modulys import DETECT_UPS_MODULYS
diff --git a/cmk/base/legacy_checks/ups_out_voltage.py b/cmk/base/legacy_checks/ups_out_voltage.py
index 8086beba994..bc42ac4e131 100644
--- a/cmk/base/legacy_checks/ups_out_voltage.py
+++ b/cmk/base/legacy_checks/ups_out_voltage.py
@@ -10,8 +10,7 @@
from cmk.base.check_legacy_includes.ups_out_voltage import check_ups_out_voltage
from cmk.base.config import check_info
-from cmk.agent_based.v2 import OIDEnd, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import OIDEnd, SNMPTree, StringTable
from cmk.plugins.lib.ups import DETECT_UPS_GENERIC
diff --git a/cmk/base/legacy_checks/ups_socomec_capacity.py b/cmk/base/legacy_checks/ups_socomec_capacity.py
index 662c5d99661..208e3488bff 100644
--- a/cmk/base/legacy_checks/ups_socomec_capacity.py
+++ b/cmk/base/legacy_checks/ups_socomec_capacity.py
@@ -13,8 +13,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ups_socomec import DETECT_SOCOMEC
diff --git a/cmk/base/legacy_checks/ups_socomec_in_voltage.py b/cmk/base/legacy_checks/ups_socomec_in_voltage.py
index deb4eeb1aa7..fc2784e4c6b 100644
--- a/cmk/base/legacy_checks/ups_socomec_in_voltage.py
+++ b/cmk/base/legacy_checks/ups_socomec_in_voltage.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.ups_in_voltage import check_ups_in_voltage
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ups_socomec import DETECT_SOCOMEC
diff --git a/cmk/base/legacy_checks/ups_socomec_out_source.py b/cmk/base/legacy_checks/ups_socomec_out_source.py
index e080f50fcab..2de2ce34f89 100644
--- a/cmk/base/legacy_checks/ups_socomec_out_source.py
+++ b/cmk/base/legacy_checks/ups_socomec_out_source.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ups_socomec import DETECT_SOCOMEC
diff --git a/cmk/base/legacy_checks/ups_socomec_out_voltage.py b/cmk/base/legacy_checks/ups_socomec_out_voltage.py
index b37d61ced56..7cbe3afd1a1 100644
--- a/cmk/base/legacy_checks/ups_socomec_out_voltage.py
+++ b/cmk/base/legacy_checks/ups_socomec_out_voltage.py
@@ -8,8 +8,7 @@
from cmk.base.check_legacy_includes.ups_out_voltage import check_ups_out_voltage
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.ups_socomec import DETECT_SOCOMEC
diff --git a/cmk/base/legacy_checks/ups_test.py b/cmk/base/legacy_checks/ups_test.py
index c89664cbd5f..b9d2efb5a86 100644
--- a/cmk/base/legacy_checks/ups_test.py
+++ b/cmk/base/legacy_checks/ups_test.py
@@ -6,12 +6,11 @@
from collections.abc import Sequence
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.uptime import parse_snmp_uptime
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import render, SNMPTree, StringTable
from cmk.plugins.lib.ups import DETECT_UPS_GENERIC
# Description of OIDs used from RFC 1628
@@ -121,7 +120,7 @@ def check_ups_test(_no_item, params, info):
uptime - start_time,
None,
params.get("levels_elapsed_time"),
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname=label,
)
diff --git a/cmk/base/legacy_checks/vbox_guest.py b/cmk/base/legacy_checks/vbox_guest.py
index c1a0e5dfc17..de3cb6cc86c 100644
--- a/cmk/base/legacy_checks/vbox_guest.py
+++ b/cmk/base/legacy_checks/vbox_guest.py
@@ -7,7 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def vbox_guest_make_dict(info):
diff --git a/cmk/base/legacy_checks/veeam_client.py b/cmk/base/legacy_checks/veeam_client.py
index ed63ae9184f..53b5836de84 100644
--- a/cmk/base/legacy_checks/veeam_client.py
+++ b/cmk/base/legacy_checks/veeam_client.py
@@ -8,11 +8,7 @@
import time
-from cmk.base.check_api import (
- get_age_human_readable,
- get_bytes_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
from cmk.agent_based.v2 import render
@@ -74,20 +70,20 @@ def check_veeam_client(item, params, parsed): # pylint: disable=too-many-branch
TotalSizeByte = int(data["TotalSizeByte"])
perfdata.append(("totalsize", TotalSizeByte))
- size_info.append(get_bytes_human_readable(TotalSizeByte))
+ size_info.append(render.bytes(TotalSizeByte))
size_legend.append("total")
# Output ReadSize and TransferedSize if available
if "ReadSizeByte" in data:
ReadSizeByte = int(data["ReadSizeByte"])
perfdata.append(("readsize", ReadSizeByte))
- size_info.append(get_bytes_human_readable(ReadSizeByte))
+ size_info.append(render.bytes(ReadSizeByte))
size_legend.append("read")
if "TransferedSizeByte" in data:
TransferedSizeByte = int(data["TransferedSizeByte"])
perfdata.append(("transferredsize", TransferedSizeByte))
- size_info.append(get_bytes_human_readable(TransferedSizeByte))
+ size_info.append(render.bytes(TransferedSizeByte))
size_legend.append("transferred")
infotexts.append("Size ({}): {}".format("/".join(size_legend), "/ ".join(size_info)))
@@ -116,17 +112,17 @@ def check_veeam_client(item, params, parsed): # pylint: disable=too-many-branch
state = 2
label = "(!!)"
levels = " (Warn/Crit: {}/{})".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
+ render.timespan(warn),
+ render.timespan(crit),
)
elif age >= warn:
state = max(state, 1)
label = "(!)"
levels = " (Warn/Crit: {}/{})".format(
- get_age_human_readable(warn),
- get_age_human_readable(crit),
+ render.timespan(warn),
+ render.timespan(crit),
)
- infotexts.append(f"Last backup: {get_age_human_readable(age)} ago{label}{levels}")
+ infotexts.append(f"Last backup: {render.timespan(age)} ago{label}{levels}")
# Check duration only if currently not running
if data["Status"] not in ["InProgress", "Pending"]:
@@ -138,7 +134,7 @@ def check_veeam_client(item, params, parsed): # pylint: disable=too-many-branch
duration += minutes * 60
duration += hours * 60 * 60
duration += days * 60 * 60 * 24
- infotexts.append("Duration: %s" % get_age_human_readable(duration))
+ infotexts.append("Duration: %s" % render.timespan(duration))
perfdata.append(("duration", duration))
if "AvgSpeedBps" in data:
diff --git a/cmk/base/legacy_checks/veeam_jobs.py b/cmk/base/legacy_checks/veeam_jobs.py
index 7831352e712..85d70273d72 100644
--- a/cmk/base/legacy_checks/veeam_jobs.py
+++ b/cmk/base/legacy_checks/veeam_jobs.py
@@ -18,7 +18,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_veeam_jobs(info):
diff --git a/cmk/base/legacy_checks/veeam_tapejobs.py b/cmk/base/legacy_checks/veeam_tapejobs.py
index 7d9d973cc2e..24f1d217f17 100644
--- a/cmk/base/legacy_checks/veeam_tapejobs.py
+++ b/cmk/base/legacy_checks/veeam_tapejobs.py
@@ -6,15 +6,10 @@
import time
-from cmk.base.check_api import (
- check_levels,
- get_age_human_readable,
- get_timestamp_human_readable,
- LegacyCheckDefinition,
-)
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_value_store
+from cmk.agent_based.v2 import get_value_store, render
BACKUP_STATE = {"Success": 0, "Warning": 1, "Failed": 2}
@@ -60,22 +55,22 @@ def check_veeam_tapejobs(item, params, parsed):
value_store[f"{job_id}.running_since"] = None
return
- running_since = value_store.get("%s.running_since" % job_id)
+ running_since = value_store.get(f"{job_id}.running_since")
now = time.time()
if not running_since:
running_since = now
- value_store[f"{job_id}.running_since" % job_id] = now
+ value_store[f"{job_id}.running_since"] = now
running_time = now - running_since
yield 0, "Backup in progress since {} (currently {})".format(
- get_timestamp_human_readable(running_since),
+ render.datetime(running_since),
last_state.lower(),
)
yield check_levels(
running_time,
None,
params["levels_upper"],
- human_readable_func=get_age_human_readable,
+ human_readable_func=render.timespan,
infoname="Running time",
)
diff --git a/cmk/base/legacy_checks/viprinet_firmware.py b/cmk/base/legacy_checks/viprinet_firmware.py
index fafe0d0c7c4..73d802dacc6 100644
--- a/cmk/base/legacy_checks/viprinet_firmware.py
+++ b/cmk/base/legacy_checks/viprinet_firmware.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.viprinet import DETECT_VIPRINET
diff --git a/cmk/base/legacy_checks/viprinet_mem.py b/cmk/base/legacy_checks/viprinet_mem.py
index 515cc7a3b7c..6a3d7d7814e 100644
--- a/cmk/base/legacy_checks/viprinet_mem.py
+++ b/cmk/base/legacy_checks/viprinet_mem.py
@@ -4,17 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import (
- DiscoveryResult,
- get_bytes_human_readable,
- LegacyCheckDefinition,
- saveint,
- Service,
-)
+from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, render, Service, SNMPTree, StringTable
from cmk.plugins.lib.viprinet import DETECT_VIPRINET
@@ -30,7 +23,7 @@ def discover_viprinet_mem(section: StringTable) -> DiscoveryResult:
def check_viprinet_mem(_no_item, _no_params, info):
return (
0,
- "Memory used: %s" % get_bytes_human_readable(saveint(info[0][0])),
+ "Memory used: %s" % render.bytes(saveint(info[0][0])),
)
diff --git a/cmk/base/legacy_checks/viprinet_power.py b/cmk/base/legacy_checks/viprinet_power.py
index 46b6ba8e148..b46a9ce33a3 100644
--- a/cmk/base/legacy_checks/viprinet_power.py
+++ b/cmk/base/legacy_checks/viprinet_power.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.viprinet import DETECT_VIPRINET
diff --git a/cmk/base/legacy_checks/viprinet_router.py b/cmk/base/legacy_checks/viprinet_router.py
index 4112f6cd4f6..8c339f5f0ed 100644
--- a/cmk/base/legacy_checks/viprinet_router.py
+++ b/cmk/base/legacy_checks/viprinet_router.py
@@ -4,11 +4,10 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.viprinet import DETECT_VIPRINET
diff --git a/cmk/base/legacy_checks/viprinet_serial.py b/cmk/base/legacy_checks/viprinet_serial.py
index 249dddf986a..6ba1ae71690 100644
--- a/cmk/base/legacy_checks/viprinet_serial.py
+++ b/cmk/base/legacy_checks/viprinet_serial.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import SNMPTree, StringTable
from cmk.plugins.lib.viprinet import DETECT_VIPRINET
diff --git a/cmk/base/legacy_checks/viprinet_temp.py b/cmk/base/legacy_checks/viprinet_temp.py
index 8471c85a38d..335b5309419 100644
--- a/cmk/base/legacy_checks/viprinet_temp.py
+++ b/cmk/base/legacy_checks/viprinet_temp.py
@@ -4,12 +4,11 @@
# conditions defined in the file COPYING, which is part of this source code package.
-from cmk.base.check_api import DiscoveryResult, LegacyCheckDefinition, Service
+from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import DiscoveryResult, Service, SNMPTree, StringTable
from cmk.plugins.lib.viprinet import DETECT_VIPRINET
diff --git a/cmk/base/legacy_checks/vms_queuejobs.py b/cmk/base/legacy_checks/vms_queuejobs.py
index 5008acd8864..56efa521e85 100644
--- a/cmk/base/legacy_checks/vms_queuejobs.py
+++ b/cmk/base/legacy_checks/vms_queuejobs.py
@@ -12,7 +12,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_vms_queuejobs(info):
diff --git a/cmk/base/legacy_checks/vms_system.py b/cmk/base/legacy_checks/vms_system.py
index 09cbbe81e25..a93602cb5a1 100644
--- a/cmk/base/legacy_checks/vms_system.py
+++ b/cmk/base/legacy_checks/vms_system.py
@@ -16,7 +16,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def parse_vms_system(string_table: StringTable) -> StringTable:
diff --git a/cmk/base/legacy_checks/vms_users.py b/cmk/base/legacy_checks/vms_users.py
index 62d901b814c..d26b058ffb7 100644
--- a/cmk/base/legacy_checks/vms_users.py
+++ b/cmk/base/legacy_checks/vms_users.py
@@ -13,7 +13,7 @@
from cmk.base.check_api import LegacyCheckDefinition, saveint
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_vms_users(info):
diff --git a/cmk/base/legacy_checks/vnx_version.py b/cmk/base/legacy_checks/vnx_version.py
index d209ecfe7b8..e6e3ecc0708 100644
--- a/cmk/base/legacy_checks/vnx_version.py
+++ b/cmk/base/legacy_checks/vnx_version.py
@@ -7,7 +7,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_vnx_version(info):
diff --git a/cmk/base/legacy_checks/vxvm_objstatus.py b/cmk/base/legacy_checks/vxvm_objstatus.py
index c9c39c5260d..8724ef374f6 100644
--- a/cmk/base/legacy_checks/vxvm_objstatus.py
+++ b/cmk/base/legacy_checks/vxvm_objstatus.py
@@ -15,7 +15,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def vxvm_objstatus_disks(info):
diff --git a/cmk/base/legacy_checks/wagner_titanus_topsense.py b/cmk/base/legacy_checks/wagner_titanus_topsense.py
index 435a4c670d3..39a7783a81a 100644
--- a/cmk/base/legacy_checks/wagner_titanus_topsense.py
+++ b/cmk/base/legacy_checks/wagner_titanus_topsense.py
@@ -6,12 +6,11 @@
from collections.abc import Sequence
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
+from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.check_legacy_includes.temperature import check_temperature
from cmk.base.config import check_info
-from cmk.agent_based.v2 import any_of, equals, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import any_of, equals, render, SNMPTree, StringTable
def parse_wagner_titanus_topsense(
@@ -88,13 +87,13 @@ def parse_wagner_titanus_topsens(info):
def inventory_wagner_titanus_topsense_info(info):
- return [(None, None)]
+ yield None, {}
def check_wagner_titanus_topsense_info(item, _no_params, info):
parsed = parse_wagner_titanus_topsens(info)
message = "System: " + parsed[0][0][0]
- message += ", Uptime: " + get_age_human_readable(int(parsed[0][0][1]) // 100)
+ message += ", Uptime: " + render.timespan(int(parsed[0][0][1]) // 100)
message += ", System Name: " + parsed[0][0][3]
message += ", System Contact: " + parsed[0][0][2]
message += ", System Location: " + parsed[0][0][4]
@@ -133,7 +132,7 @@ def check_wagner_titanus_topsense_info(item, _no_params, info):
def inventory_wagner_titanus_topsense_overall_status(info):
- return [(None, None)]
+ yield None, {}
def check_wagner_titanus_topsense_overall_status(item, _no_params, info):
@@ -168,7 +167,8 @@ def check_wagner_titanus_topsense_overall_status(item, _no_params, info):
def inventory_wagner_titanus_topsense_alarm(info):
- return [("1", None), ("2", None)]
+ yield "1", {}
+ yield "2", {}
def check_wagner_titanus_topsense_alarm(item, _no_params, info):
@@ -218,7 +218,8 @@ def check_wagner_titanus_topsense_alarm(item, _no_params, info):
def inventory_wagner_titanus_topsense_smoke(info):
- return [("1", None), ("2", None)]
+ yield "1", {}
+ yield "2", {}
def check_wagner_titanus_topsense_smoke(item, _no_params, info):
@@ -260,7 +261,8 @@ def check_wagner_titanus_topsense_smoke(item, _no_params, info):
def inventory_wagner_titanus_topsense_chamber_deviation(info):
- return [("1", None), ("2", None)]
+ yield "1", {}
+ yield "2", {}
def check_wagner_titanus_topsense_chamber_deviation(item, _no_params, info):
@@ -296,10 +298,8 @@ def check_wagner_titanus_topsense_chamber_deviation(item, _no_params, info):
def inventory_wagner_titanus_topsense_airflow_deviation(info):
- return [
- ("1", {}),
- ("2", {}),
- ]
+ yield "1", {}
+ yield "2", {}
def check_wagner_titanus_topsense_airflow_deviation(item, params, info):
@@ -344,7 +344,8 @@ def check_wagner_titanus_topsense_airflow_deviation(item, params, info):
def inventory_wagner_titanus_topsense_temp(info):
- return [("Ambient 1", {}), ("Ambient 2", {})]
+ yield "Ambient 1", {}
+ yield "Ambient 2", {}
def check_wagner_titanus_topsense_temp(item, params, info):
diff --git a/cmk/base/legacy_checks/watchdog_sensors.py b/cmk/base/legacy_checks/watchdog_sensors.py
index 3ae2971ba8a..a56257b98a2 100644
--- a/cmk/base/legacy_checks/watchdog_sensors.py
+++ b/cmk/base/legacy_checks/watchdog_sensors.py
@@ -243,8 +243,8 @@ def check_watchdog_sensors_humidity(item, params, parsed):
check_function=check_watchdog_sensors_humidity,
check_ruleset_name="humidity",
check_default_parameters={
- "levels": (50, 55),
- "levels_lower": (10, 15),
+ "levels": (50.0, 55.0),
+ "levels_lower": (10.0, 15.0),
},
)
diff --git a/cmk/base/legacy_checks/websphere_mq_channels.py b/cmk/base/legacy_checks/websphere_mq_channels.py
deleted file mode 100644
index f26a85868be..00000000000
--- a/cmk/base/legacy_checks/websphere_mq_channels.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# Anzahl_Message Channelname MaxMessages_Moeglich Status"
-# <<>>
-# 0 CHANNEL(C000052.C000051) 5000 Unknown
-# 0 CHANNEL(C000052.CATSOS.03) 5000 RUNNING
-# 0 CHANNEL(C000052.DXUZ001) 5000 RUNNING
-# 0 CHANNEL(C000052.N000011) 5000 RUNNING
-# 0 CHANNEL(C000052.SI0227450.T1) 10000 RUNNING
-# 0 CHANNEL(C000052.SOX10.T1) 10000 STOPPED
-# 0 CHANNEL(C000052.SV1348520.T1) 5000 RUNNING
-# 0 CHANNEL(C000052.SV2098742.T1) 5000 Unknown
-
-
-# mypy: disable-error-code="var-annotated"
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import render
-
-
-def parse_websphere_mq_channels(string_table):
- parsed = {}
- for line in string_table:
- if len(line) == 2:
- messages, max_messages = 0, 0
- channel_name = line[0]
- channel_status = line[1]
- elif len(line) == 4:
- messages = int(line[0])
- channel_name = line[1]
- max_messages = int(line[2])
- channel_status = line[3]
- else:
- continue
-
- parsed.setdefault(
- channel_name,
- {
- "messages": messages,
- "max_messages": max_messages,
- "channel_status": channel_status,
- },
- )
- return parsed
-
-
-def inventory_websphere_mq_channels(parsed):
- for channel_name in parsed:
- yield channel_name, {}
-
-
-def check_websphere_mq_channels(item, params, parsed):
- if isinstance(params, tuple):
- params = {
- "message_count": params,
- "status": {
- "RUNNING": 0,
- "STOPPED": 1,
- },
- }
-
- if item in parsed:
- data = parsed[item]
- messages = data["messages"]
- max_messages = data["max_messages"]
- channel_status = data["channel_status"]
-
- state = params["status"].get(channel_status, params["status"].get("other", 2))
- yield state, "Channel status: %s" % channel_status, []
-
- infotext = "%d/%d messages" % (messages, max_messages)
- state = 0
- if params["message_count"]:
- warn, crit = params["message_count"]
- if messages >= crit:
- state = 2
- elif messages >= warn:
- state = 1
- if state > 0:
- infotext += " (warn crit at %d/%d messages)" % (warn, crit)
- else:
- warn, crit = None, None
-
- yield state, infotext, [("messages", messages, warn, crit, 0, max_messages)]
-
- if params.get("message_count_perc") and max_messages > 0:
- warn, crit = params["message_count_perc"]
- messages_perc = 1.0 * messages / max_messages
- infotext = render.percent(messages_perc)
- state = 0
-
- if messages_perc >= crit:
- state = 2
- elif messages_perc >= warn:
- state = 1
- if state > 0:
- infotext += " (warn/crit at {}/{})".format(
- render.percent(warn),
- render.percent(crit),
- )
-
- yield state, infotext
-
-
-check_info["websphere_mq_channels"] = LegacyCheckDefinition(
- parse_function=parse_websphere_mq_channels,
- service_name="MQ Channel %s",
- discovery_function=inventory_websphere_mq_channels,
- check_function=check_websphere_mq_channels,
- check_ruleset_name="websphere_mq_channels",
- check_default_parameters={
- "message_count": (900, 1000),
- "status": {"RUNNING": 0, "STOPPED": 1},
- },
-)
diff --git a/cmk/base/legacy_checks/websphere_mq_instance.py b/cmk/base/legacy_checks/websphere_mq_instance.py
deleted file mode 100644
index d5392870be9..00000000000
--- a/cmk/base/legacy_checks/websphere_mq_instance.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# <<>>
-# QMNAME(QMIMIQ11) STATUS(Running)
-# INSTANCE(iasv0001) MODE(Active)
-# INSTANCE(tasv0397) MODE(Standby)
-# MQv8.0.0.5,p800-005-160516.2,64-bit;
-# QMNAME(QMIMIQ11) STATUS(Running) DEFAULT(no) STANDBY(Permitted) INSTNAME(Installation1) INSTPATH(/usr/mqm) INSTVER(8.0.0.5)$
-
-# <<>>
-# QMNAME(QMTEMQS02A) STATUS(Ended immediately)
-# QMNAME(QMTEMQS02) STATUS(Running)
-# INSTANCE(tasv0065) MODE(Active)
-# MQv8.0.0.4,p800-004-151017,64-bit;
-# QMNAME(QMTEMQS02A) STATUS(Ended immediately) DEFAULT(no) STANDBY(Not applicable) INSTNAME(Installation1) INSTPATH(/usr/mqm) INSTVER(8.0.0.4)
-# QMNAME(QMTEMQS02) STATUS(Running) DEFAULT(yes) STANDBY(Not permitted) INSTNAME(Installation1) INSTPATH(/usr/mqm) INSTVER(8.0.0.4)
-
-# <<>>
-# QMNAME(QMIMIQ11) STATUS(Running as standby)
-# INSTANCE(iasv0001) MODE(Active)
-# INSTANCE(tasv0397) MODE(Standby)
-# MQv8.0.0.5,p800-005-160516.2,64-bit;
-# QMNAME(QMIMIQ11) STATUS(Running as standby) DEFAULT(yes) STANDBY(Permitted) INSTNAME(Installation1) INSTPATH(/usr/mqm) INSTVER(8.0.0.5)
-
-# .--helpers-------------------------------------------------------------.
-# | _ _ |
-# | | |__ ___| |_ __ ___ _ __ ___ |
-# | | '_ \ / _ \ | '_ \ / _ \ '__/ __| |
-# | | | | | __/ | |_) | __/ | \__ \ |
-# | |_| |_|\___|_| .__/ \___|_| |___/ |
-# | |_| |
-# '----------------------------------------------------------------------'
-
-
-# mypy: disable-error-code="var-annotated"
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-
-def get_websphere_mq_status(what, status, params):
- state, statekey = {
- "manager": {
- "Starting": (0, "starting"),
- "Running": (0, "running"),
- "Running as standby": (0, "running_as_standby"),
- "Running elsewhere": (0, "running_elsewhere"),
- "Quiescing": (0, "quiescing"),
- "Ending immediately": (0, "ending_immediately"),
- "Ending pre-emptively": (0, "ending_pre_emptively"),
- "Ended normally": (0, "ended_normally"),
- "Ended immediately": (0, "ended_immediately"),
- "Ended unexpectedly": (2, "ended_unexpectedly"),
- "Ended pre-emptively": (1, "ended_pre_emptively"),
- "Status not available": (0, "status_not_available"),
- },
- "standby": {
- "Permitted": (0, "permitted"),
- "Not permitted": (0, "not_permitted"),
- "Not applicable": (0, "not_applicable"),
- },
- "instance": {
- "Active": (0, "active"),
- "Standby": (0, "standby"),
- },
- }[what].get(status, (3, "unknown"))
-
- if statekey in dict(params.get("map_%s_states" % what, [])):
- return dict(params["map_%s_states" % what])[statekey]
- return state
-
-
-def parse_websphere_mq_instance(string_table):
- def get_data_of_line(line):
- data = {}
- for elem in line:
- if "(" in elem:
- key, exp = elem.split("(", 1)
- data.setdefault(key.strip(), exp.strip())
- return data
-
- parsed = {"manager": {}, "instances": {}}
- for line in string_table:
- data = get_data_of_line(line)
- if data:
- if "QMNAME" in data:
- this_qm_name = data["QMNAME"]
- parsed["manager"].setdefault(this_qm_name, {})
- parsed["manager"][this_qm_name].update(data)
-
- elif "INSTANCE" in data:
- this_inst_name = data["INSTANCE"]
- parsed["instances"].setdefault(this_inst_name, {})
- parsed["instances"][this_inst_name].update(data)
- if this_qm_name is not None:
- parsed["instances"][this_inst_name].setdefault("QMNAME", this_qm_name)
-
- return parsed
-
-
-# .
-# .--instances-----------------------------------------------------------.
-# | _ _ |
-# | (_)_ __ ___| |_ __ _ _ __ ___ ___ ___ |
-# | | | '_ \/ __| __/ _` | '_ \ / __/ _ \/ __| |
-# | | | | | \__ \ || (_| | | | | (_| __/\__ \ |
-# | |_|_| |_|___/\__\__,_|_| |_|\___\___||___/ |
-# | |
-# '----------------------------------------------------------------------'
-
-
-def inventory_websphere_mq_instance(parsed):
- return [(item, {}) for item in parsed["instances"]]
-
-
-def check_websphere_mq_instance(item, params, parsed):
- if item in parsed["instances"]:
- data = parsed["instances"][item]
- mode = data["MODE"]
- qm_name = data["QMNAME"]
- return get_websphere_mq_status("instance", mode, params), "Status: {}, Manager: {}".format(
- mode.lower(),
- qm_name,
- )
- return None
-
-
-check_info["websphere_mq_instance"] = LegacyCheckDefinition(
- parse_function=parse_websphere_mq_instance,
- service_name="MQ Instance %s",
- discovery_function=inventory_websphere_mq_instance,
- check_function=check_websphere_mq_instance,
- check_ruleset_name="websphere_mq_instance",
-)
-
-# .
-# .--manager-------------------------------------------------------------.
-# | |
-# | _ __ ___ __ _ _ __ __ _ __ _ ___ _ __ |
-# | | '_ ` _ \ / _` | '_ \ / _` |/ _` |/ _ \ '__| |
-# | | | | | | | (_| | | | | (_| | (_| | __/ | |
-# | |_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|_| |
-# | |___/ |
-# '----------------------------------------------------------------------'
-
-
-def inventory_websphere_mq_manager(parsed):
- for item in parsed["manager"]:
- yield item, {}
-
-
-def check_websphere_mq_manager(item, params, parsed): # pylint: disable=too-many-branches
- if item in parsed["manager"]:
- data = parsed["manager"][item]
- status = data["STATUS"]
- standby = data.get("STANDBY", "")
- installation_name = data.get("INSTNAME")
- installation_path = data.get("INSTPATH")
- installation_version = data.get("INSTVER")
-
- instances_modes = []
- for _instance, instance_info in parsed["instances"].items():
- if instance_info["QMNAME"] == item:
- instances_modes.append(instance_info["MODE"])
-
- yield get_websphere_mq_status("manager", status, params), "Status: %s" % status.lower()
-
- standby_info = standby.lower()
- if standby.startswith("Not"):
- if len(instances_modes) == 1:
- state = 0
- standby_info += " (standalone)"
- else:
- state = 1
- standby_info += " (standalone but %d instances)" % len(instances_modes)
- elif standby == "Permitted":
- if instances_modes in [["Active", "Standby"], ["Standby", "Active"]]:
- state = 0
- else:
- state = 1
- standby_info += " (Missing partner)"
- else:
- state = 1
- standby_info += " (unknown)"
-
- if "map_instance_states" in params:
- state = get_websphere_mq_status("standby", standby, params)
-
- yield state, "Standby: %s" % standby_info.strip()
- yield 0, "Default: %s" % data["DEFAULT"]
-
- for what, title in [
- (installation_name, "Name"),
- (installation_path, "Path"),
- (installation_version, "Version"),
- ]:
- if what:
- yield 0, f"{title}: {what}"
-
-
-check_info["websphere_mq_instance.manager"] = LegacyCheckDefinition(
- service_name="MQ Manager %s",
- sections=["websphere_mq_instance"],
- discovery_function=inventory_websphere_mq_manager,
- check_function=check_websphere_mq_manager,
- check_ruleset_name="websphere_mq_manager",
-)
diff --git a/cmk/base/legacy_checks/websphere_mq_queues.py b/cmk/base/legacy_checks/websphere_mq_queues.py
deleted file mode 100644
index 588ed03d4d4..00000000000
--- a/cmk/base/legacy_checks/websphere_mq_queues.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# <<>>
-# 0 CD.ISS.CATSOS.REPLY.C000052 5000
-# 0 CD.ISS.COBA.REPLY.C000052 5000
-# 0 CD.ISS.DEUBA.REPLY.C000052 5000
-# 0 CD.ISS.TIQS.REPLY.C000052 5000
-# 0 CD.ISS.VWD.REPLY.C000052 5000
-
-# Old output
-# <<>>
-# 0 CD.ISS.CATSOS.REPLY.C000052
-# 0 CD.ISS.COBA.REPLY.C000052
-# 0 CD.ISS.DEUBA.REPLY.C000052
-# 0 CD.ISS.TIQS.REPLY.C000052
-# 0 CD.ISS.VWD.REPLY.C000052
-
-# Very new output
-# <<>>
-# 0 BRK.REPLY.CONVERTQ 2016_04_08-15_31_43
-# 0 BRK.REPLY.CONVERTQ 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 BRK.REPLY.FAILUREQ 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 BRK.REPLY.INQ 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 BRK.REPLY.OUTQ 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 BRK.REPLYQ.IMS.MILES 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 BRK.REPLYQ.MILES 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 BRK.REQUEST.FAILUREQ 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 BRK.REQUEST.INQ 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 BRK.REQUESTQ.MILES 5000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 DEAD.QUEUE.IGNORE 100000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-# 0 DEAD.QUEUE.SECURITY 100000 CURDEPTH(0)LGETDATE()LGETTIME() 2016_04_08-15_31_43
-
-
-# mypy: disable-error-code="var-annotated"
-
-import time
-
-from cmk.base.check_api import check_levels, get_age_human_readable, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import IgnoreResultsError, render
-
-websphere_mq_queues_default_levels = {
- "message_count": (1000, 1200),
- "message_count_perc": (80.0, 90.0),
-}
-
-
-def parse_websphere_mq_queues(string_table):
- parsed = {}
- for line in string_table:
- if len(line) < 2:
- continue
-
- try:
- cur_depth = int(line[0])
- except ValueError:
- continue
-
- inst = parsed.setdefault(line[1], {})
- inst.setdefault("cur_depth", cur_depth)
-
- if len(line) >= 3:
- if line[2].isdigit():
- inst.setdefault("max_depth", int(line[2]))
-
- if len(line) > 3:
- for what in "".join(line[3:-1]).replace(" ", "").split(")"):
- if "(" in what:
- key, val = what.split("(")
- inst.setdefault(key, val)
-
- try:
- inst.setdefault(
- "time_on_client", time.mktime(time.strptime(line[-1], "%Y_%m_%d-%H_%M_%S"))
- )
- except ValueError:
- pass
-
- return parsed
-
-
-def inventory_websphere_mq_queues(parsed):
- return [(queue_name, websphere_mq_queues_default_levels) for queue_name in parsed]
-
-
-def check_websphere_mq_queues(item, params, parsed):
- data = parsed.get(item)
- if data is None:
- raise IgnoreResultsError("Login into database failed")
-
- if isinstance(params, tuple):
- params = {
- "message_count": params,
- "message_count_perc": websphere_mq_queues_default_levels["message_count_perc"],
- }
-
- cur_depth = data["cur_depth"]
- yield check_levels(
- cur_depth,
- "queue",
- params.get("message_count", (None, None)),
- human_readable_func=lambda x: "%d" % x,
- infoname="Messages in queue",
- )
-
- max_depth = data.get("max_depth")
- if max_depth:
- # Just for ordering:
- # 1. message count
- # 2. message count percent
- used_perc = float(cur_depth) / max_depth * 100
- yield check_levels(
- used_perc,
- None,
- params.get("message_count_perc", (None, None)),
- human_readable_func=render.percent,
- infoname="Of max. %d messages" % max_depth,
- )
-
- if data.get("time_on_client") and "LGETDATE" in data and "LGETTIME" in data:
- lgetdate = data["LGETDATE"]
- lgettime = data["LGETTIME"]
-
- params = params.get("messages_not_processed", {})
-
- if cur_depth and lgetdate and lgettime:
- time_str = f"{lgetdate} {lgettime}"
- time_diff = data["time_on_client"] - time.mktime(
- time.strptime(time_str, "%Y-%m-%d %H.%M.%S")
- )
-
- diff_state, diff_info, _diff_perf = check_levels(
- time_diff,
- None,
- params.get("age", (None, None)),
- human_readable_func=get_age_human_readable,
- )
-
- yield diff_state, "Messages not processed since %s" % diff_info
-
- elif cur_depth:
- yield params.get("state", 0), "No age of %d message%s not processed" % (
- cur_depth,
- cur_depth > 1 and "s" or "",
- )
-
- else:
- yield 0, "Messages processed"
-
-
-check_info["websphere_mq_queues"] = LegacyCheckDefinition(
- parse_function=parse_websphere_mq_queues,
- service_name="MQ Queue %s",
- discovery_function=inventory_websphere_mq_queues,
- check_function=check_websphere_mq_queues,
- check_ruleset_name="websphere_mq",
-)
diff --git a/cmk/base/legacy_checks/win_license.py b/cmk/base/legacy_checks/win_license.py
index 70271859403..afbd64a1359 100644
--- a/cmk/base/legacy_checks/win_license.py
+++ b/cmk/base/legacy_checks/win_license.py
@@ -12,9 +12,11 @@
# Time remaining: 11820 minute(s) (8 day(s))
-from cmk.base.check_api import get_age_human_readable, LegacyCheckDefinition, regex
+from cmk.base.check_api import check_levels, LegacyCheckDefinition, regex
from cmk.base.config import check_info
+from cmk.agent_based.v2 import render
+
def parse_win_license(string_table):
parsed: dict[str, str | int] = {}
@@ -45,9 +47,7 @@ def inventory_win_license(parsed):
def check_win_license(_item, params, parsed):
- sw_license = parsed.get("License", None)
-
- if not sw_license:
+ if (sw_license := parsed.get("License")) is None:
return
message = "Software is %s" % sw_license
@@ -59,26 +59,20 @@ def check_win_license(_item, params, parsed):
yield license_state, message
- time_left = parsed.get("expiration_time", None)
-
- if not time_left:
+ if (time_left := parsed.get("expiration_time")) is None:
return
- time_message = "License will expire in %s" % get_age_human_readable(time_left)
-
- warn, crit = params["expiration_time"]
-
- time_state = 0
-
- if time_left < crit:
- time_state = 2
- elif time_left < warn:
- time_state = 1
-
- if time_state:
- time_message += " (warn/crit at %s/%s)" % tuple(map(get_age_human_readable, (warn, crit)))
+ if time_left < 0:
+ yield 2, f"Licence expired {render.timespan(-time_left)} ago"
+ return
- yield time_state, time_message
+ yield check_levels(
+ time_left,
+ None,
+ (None, None) + params["expiration_time"],
+ human_readable_func=render.timespan,
+ infoname="Time until license expires",
+ )
check_info["win_license"] = LegacyCheckDefinition(
diff --git a/cmk/base/legacy_checks/win_printers.py b/cmk/base/legacy_checks/win_printers.py
index 3c83e8eaefe..4c5b3e5985a 100644
--- a/cmk/base/legacy_checks/win_printers.py
+++ b/cmk/base/legacy_checks/win_printers.py
@@ -16,7 +16,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
class PrinterQueue(NamedTuple):
diff --git a/cmk/base/legacy_checks/windows_broadcom_bonding.py b/cmk/base/legacy_checks/windows_broadcom_bonding.py
index 5488100a9c0..f669c7b8d2b 100644
--- a/cmk/base/legacy_checks/windows_broadcom_bonding.py
+++ b/cmk/base/legacy_checks/windows_broadcom_bonding.py
@@ -13,7 +13,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_windows_broadcom_bonding(info):
diff --git a/cmk/base/legacy_checks/windows_multipath.py b/cmk/base/legacy_checks/windows_multipath.py
index 983972020a4..2fdb0116309 100644
--- a/cmk/base/legacy_checks/windows_multipath.py
+++ b/cmk/base/legacy_checks/windows_multipath.py
@@ -12,7 +12,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_windows_multipath(info):
diff --git a/cmk/base/legacy_checks/windows_tasks.py b/cmk/base/legacy_checks/windows_tasks.py
deleted file mode 100644
index fc0b287671d..00000000000
--- a/cmk/base/legacy_checks/windows_tasks.py
+++ /dev/null
@@ -1,219 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# Example output from agent:
-# <<>>
-# TaskName : \WebShopPictureUpload
-# Last Run Time : 17.10.2013 23:00:00
-# Next Run Time : 18.10.2013 23:00:00
-# Last Result : 0
-# Scheduled Task State : Enabled
-#
-# TaskName : \OfficeSoftwareProtectionPlatform\SvcRestartTask
-# Last Run Time : N/A
-# Next Run Time : Disabled
-# Last Result : 1
-# Scheduled Task State : Disabled
-
-# A list of all task state can be found here:
-# http://msdn.microsoft.com/en-us/library/aa383604%28VS.85%29.aspx
-
-
-# mypy: disable-error-code="var-annotated"
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-
-def parse_windows_tasks(string_table):
- data = {}
- last_task: bool | str = False
- for line in string_table:
- name = line[0].strip()
- value = ":".join(line[1:]).strip()
- if value and last_task and name != "TaskName":
- data[last_task][name] = value
-
- elif name == "TaskName":
- last_task = value
- data[last_task] = {}
-
- # this is a line continuation from TaskName
- elif not value and not data[last_task]:
- data.pop(last_task)
- last_task += " " + name
- data[last_task] = {}
-
- return data
-
-
-def inventory_windows_tasks(parsed):
- return [(n, None) for n, v in parsed.items() if v.get("Scheduled Task State") != "Disabled"]
-
-
-# Code duplication with cmk/gui/plugins/wato/check_parameters/windows_tasks.py
-# because of base/gui import restrictions
-# This is protected via unit test
-_MAP_EXIT_CODES = {
- "0x00000000": (0, "The task exited successfully"),
- "0x00041300": (0, "The task is ready to run at its next scheduled time."),
- "0x00041301": (0, "The task is currently running."),
- "0x00041302": (0, "The task will not run at the scheduled times because it has been disabled."),
- "0x00041303": (0, "The task has not yet run."),
- "0x00041304": (0, "There are no more runs scheduled for this task."),
- "0x00041305": (
- 1,
- "One or more of the properties that are needed to run this task on a schedule have not been set.",
- ),
- "0x00041306": (0, "The last run of the task was terminated by the user."),
- "0x00041307": (
- 1,
- "Either the task has no triggers or the existing triggers are disabled or not set.",
- ),
- "0x00041308": (1, "Event triggers do not have set run times."),
- "0x80041309": (1, "A task's trigger is not found."),
- "0x8004130a": (1, "One or more of the properties required to run this task have not been set."),
- "0x8004130b": (0, "There is no running instance of the task."),
- "0x8004130c": (2, "The Task Scheduler service is not installed on this computer."),
- "0x8004130d": (1, "The task object could not be opened."),
- "0x8004130e": (1, "The object is either an invalid task object or is not a task object."),
- "0x8004130f": (
- 1,
- "No account information could be found in the Task Scheduler security database for the task indicated.",
- ),
- "0x80041310": (1, "Unable to establish existence of the account specified."),
- "0x80041311": (
- 2,
- "Corruption was detected in the Task Scheduler security database; the database has been reset.",
- ),
- "0x80041312": (1, "Task Scheduler security services are available only on Windows NT."),
- "0x80041313": (1, "The task object version is either unsupported or invalid."),
- "0x80041314": (
- 1,
- "The task has been configured with an unsupported combination of account settings and run time options.",
- ),
- "0x80041315": (1, "The Task Scheduler Service is not running."),
- "0x80041316": (1, "The task XML contains an unexpected node."),
- "0x80041317": (
- 1,
- "The task XML contains an element or attribute from an unexpected namespace.",
- ),
- "0x80041318": (
- 1,
- "The task XML contains a value which is incorrectly formatted or out of range.",
- ),
- "0x80041319": (1, "The task XML is missing a required element or attribute."),
- "0x8004131a": (1, "The task XML is malformed."),
- "0x0004131b": (
- 1,
- "The task is registered, but not all specified triggers will start the task.",
- ),
- "0x0004131c": (
- 1,
- "The task is registered, but may fail to start. Batch logon privilege needs to be enabled for the task principal.",
- ),
- "0x8004131d": (1, "The task XML contains too many nodes of the same type."),
- "0x8004131e": (1, "The task cannot be started after the trigger end boundary."),
- "0x8004131f": (0, "An instance of this task is already running."),
- "0x80041320": (1, "The task will not run because the user is not logged on."),
- "0x80041321": (1, "The task image is corrupt or has been tampered with."),
- "0x80041322": (1, "The Task Scheduler service is not available."),
- "0x80041323": (
- 1,
- "The Task Scheduler service is too busy to handle your request. Please try again later.",
- ),
- "0x80041324": (
- 1,
- "The Task Scheduler service attempted to run the task, but the task did not run due to one of the constraints in the task definition.",
- ),
- "0x00041325": (0, "The Task Scheduler service has asked the task to run."),
- "0x80041326": (0, "The task is disabled."),
- "0x80041327": (
- 1,
- "The task has properties that are not compatible with earlier versions of Windows.",
- ),
- "0x80041328": (1, "The task settings do not allow the task to start on demand."),
-}
-
-
-def check_windows_tasks(item, params, parsed):
- if item not in parsed:
- yield 3, "Task not found on server"
- return
-
- state_not_enabled = params.get("state_not_enabled", 1)
-
- custom_map_exit_codes = {
- exit_code: (
- user_defined_mapping["monitoring_state"],
- user_defined_mapping.get(
- "info_text",
- # in case info_text was not specified, we use the default one if available
- _MAP_EXIT_CODES.get(exit_code, (None, None))[1],
- ),
- )
- for user_defined_mapping in params.get("exit_code_to_state", [])
- for exit_code in [user_defined_mapping["exit_code"]]
- }
- map_exit_codes = {
- **_MAP_EXIT_CODES,
- **custom_map_exit_codes,
- }
-
- data = parsed[item]
- last_result = data["Last Result"]
-
- # schtasks.exe (used by the check plugin) returns a signed integer
- # e.g. -2147024629. However, error codes are unsigned integers.
- # To make it easier for the user to lookup the error code (e.g. on
- # MSDN) we convert the negative numbers to the hexadecimal
- # representation.
- last_result_unsigned = int(last_result) & 0xFFFFFFFF
- last_result_hex = f"{last_result_unsigned:#010x}" # padding with zeros
-
- state, state_txt = map_exit_codes.get(
- last_result_hex,
- (2, None),
- )
- yield (
- state,
- f"{state_txt} ({last_result_hex})" if state_txt else f"Got exit code {last_result_hex}",
- )
-
- if data.get("Scheduled Task State", None) != "Enabled":
- yield state_not_enabled, "Task not enabled"
-
- additional_infos = []
- for key, title in [
- ("Last Run Time", "Last run time"),
- ("Next Run Time", "Next run time"),
- ]:
- if key in data:
- additional_infos.append(f"{title}: {data[key]}")
-
- if additional_infos:
- yield 0, ", ".join(additional_infos)
-
-
-check_info["windows_tasks"] = LegacyCheckDefinition(
- parse_function=parse_windows_tasks,
- service_name="Task %s",
- discovery_function=inventory_windows_tasks,
- check_function=check_windows_tasks,
- check_ruleset_name="windows_tasks",
- check_default_parameters={
- # This list is overruled by a ruleset, if configured.
- # The defaults are brought back individually below.
- # Put them here anyway to make them available in the checks man page.
- "exit_code_to_state": [
- {
- "exit_code": key,
- "monitoring_state": state,
- "info_text": text,
- }
- for key, (state, text) in _MAP_EXIT_CODES.items()
- ],
- },
-)
diff --git a/cmk/base/legacy_checks/winperf.py b/cmk/base/legacy_checks/winperf.py
index 8c1775f199c..419a45a9b9f 100644
--- a/cmk/base/legacy_checks/winperf.py
+++ b/cmk/base/legacy_checks/winperf.py
@@ -7,8 +7,7 @@
from cmk.base.check_api import check_levels, LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store, IgnoreResultsError, render
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, IgnoreResultsError, render, StringTable
def inventory_win_cpuusage(info):
diff --git a/cmk/base/legacy_checks/winperf_mem.py b/cmk/base/legacy_checks/winperf_mem.py
index bee21c3b115..831a91a1127 100644
--- a/cmk/base/legacy_checks/winperf_mem.py
+++ b/cmk/base/legacy_checks/winperf_mem.py
@@ -43,8 +43,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, StringTable
def inventory_winperf_mem(info):
diff --git a/cmk/base/legacy_checks/winperf_ts_sessions.py b/cmk/base/legacy_checks/winperf_ts_sessions.py
index 1f51a192923..3cea686291b 100644
--- a/cmk/base/legacy_checks/winperf_ts_sessions.py
+++ b/cmk/base/legacy_checks/winperf_ts_sessions.py
@@ -19,7 +19,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import StringTable
def inventory_winperf_ts_sessions(info):
diff --git a/cmk/base/legacy_checks/wmi_webservices.py b/cmk/base/legacy_checks/wmi_webservices.py
index df5fd656fb2..e8a8d1d630c 100644
--- a/cmk/base/legacy_checks/wmi_webservices.py
+++ b/cmk/base/legacy_checks/wmi_webservices.py
@@ -8,13 +8,13 @@
from cmk.base.check_legacy_includes.wmi import (
inventory_wmi_table_instances,
parse_wmi_table,
- wmi_yield_raw_persec,
+ wmi_yield_raw_counter,
)
from cmk.base.config import check_info
def check_wmi_webservices(item, params, parsed):
- yield from wmi_yield_raw_persec(
+ yield from wmi_yield_raw_counter(
parsed[""], item, "CurrentConnections", infoname="Connections", perfvar="connections"
)
diff --git a/cmk/base/legacy_checks/wmic_process.py b/cmk/base/legacy_checks/wmic_process.py
index f5bb84f91d1..da4f46a47fe 100644
--- a/cmk/base/legacy_checks/wmic_process.py
+++ b/cmk/base/legacy_checks/wmic_process.py
@@ -9,8 +9,7 @@
from cmk.base.check_api import LegacyCheckDefinition
from cmk.base.config import check_info
-from cmk.agent_based.v2 import get_rate, get_value_store
-from cmk.agent_based.v2.type_defs import StringTable
+from cmk.agent_based.v2 import get_rate, get_value_store, StringTable
def check_wmic_process(item, params, info):
diff --git a/cmk/base/legacy_checks/zebra_model.py b/cmk/base/legacy_checks/zebra_model.py
deleted file mode 100644
index 22380a0ed05..00000000000
--- a/cmk/base/legacy_checks/zebra_model.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from collections.abc import Sequence
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def inventory_zebra_model(info):
- if info[0] != [] or info[1] != [] or info[2] != []:
- return [(None, {})]
- return []
-
-
-def check_zebra_model(_no_item, _no_params, info):
- model, serial, release = None, None, None
-
- if info[0]:
- model, serial, release, serial_maybe = info[0][0]
- if not serial:
- serial = serial_maybe
-
- if not model:
- model = info[2][0][0]
-
- if not release:
- release = info[1][0][0]
-
- yield 0, "Zebra model: %s" % model
-
- if serial:
- yield 0, "Serial number: %s" % serial
-
- if release:
- yield 0, "Firmware release: %s" % release
-
-
-def parse_zebra_model(string_table: Sequence[StringTable]) -> Sequence[StringTable]:
- return string_table
-
-
-check_info["zebra_model"] = LegacyCheckDefinition(
- parse_function=parse_zebra_model,
- detect=contains(".1.3.6.1.2.1.1.1.0", "zebra"),
- fetch=[
- SNMPTree(
- base=".1.3.6.1.4.1.10642",
- oids=["1.1.0", "200.19.5.0", "1.2.0", "1.9.0"],
- ),
- SNMPTree(
- base=".1.3.6.1.4.1.683.1.9",
- oids=["0"],
- ),
- SNMPTree(
- base=".1.3.6.1.4.1.683.6.2.3.2.1.15",
- oids=["1"],
- ),
- ],
- service_name="Zebra Printer Model",
- discovery_function=inventory_zebra_model,
- check_function=check_zebra_model,
-)
diff --git a/cmk/base/legacy_checks/zebra_printer_status.py b/cmk/base/legacy_checks/zebra_printer_status.py
deleted file mode 100644
index c71abf7d3e7..00000000000
--- a/cmk/base/legacy_checks/zebra_printer_status.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-
-from cmk.base.check_api import LegacyCheckDefinition, saveint
-from cmk.base.config import check_info
-
-from cmk.agent_based.v2 import contains, SNMPTree
-from cmk.agent_based.v2.type_defs import StringTable
-
-
-def inventory_zebra_printer_status(info):
- if info[0][0]:
- return [(None, {})]
- return []
-
-
-def check_zebra_printer_status(item, params, info):
- zebra_status = saveint(info[0][0])
-
- if zebra_status == 3:
- return 0, "Printer is online and ready for the next print job"
- if zebra_status == 4:
- return 0, "Printer is printing"
- if zebra_status == 5:
- return 0, "Printer is warming up"
- if zebra_status == 1:
- return 2, "Printer is offline"
- return 3, "Unknown printer status"
-
-
-def parse_zebra_printer_status(string_table: StringTable) -> StringTable | None:
- return string_table or None
-
-
-check_info["zebra_printer_status"] = LegacyCheckDefinition(
- parse_function=parse_zebra_printer_status,
- detect=contains(".1.3.6.1.2.1.1.1.0", "zebra"),
- fetch=SNMPTree(
- base=".1.3.6.1.2.1.25.3.5.1.1",
- oids=["1"],
- ),
- service_name="Zebra Printer Status",
- discovery_function=inventory_zebra_printer_status,
- check_function=check_zebra_printer_status,
-)
diff --git a/cmk/base/legacy_checks/zerto_vpg_rpo.py b/cmk/base/legacy_checks/zerto_vpg_rpo.py
deleted file mode 100644
index 97f5a25eec2..00000000000
--- a/cmk/base/legacy_checks/zerto_vpg_rpo.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-# 2019-01-07, comNET GmbH, Fabian Binder
-
-
-# mypy: disable-error-code="var-annotated"
-
-from cmk.base.check_api import LegacyCheckDefinition
-from cmk.base.config import check_info
-
-MAP_RPO_STATES = {
- "0": (1, "VPG is initializing"),
- "1": (0, "Meeting SLA specification"),
- "2": (2, "Not meeting SLA specification for RPO SLA and journal history"),
- "3": (2, "Not meeting SLA specification for RPO SLA"),
- "4": (2, "Not meeting SLA specification for journal history"),
- "5": (1, "VPG is in a failover operation"),
- "6": (1, "VPG is in a move operation"),
- "7": (1, "VPG is being deleted"),
- "8": (1, "VPG has been recovered"),
-}
-
-
-def parse_zerto_vpg(string_table):
- parsed = {}
- for line in string_table:
- if len(line) < 3:
- continue
- vpgname = line[0]
- vpg = parsed.setdefault(vpgname, {})
- vpg["state"] = line[1]
- vpg["actual_rpo"] = line[2]
- return parsed
-
-
-def check_zerto_vpg_rpo(item, _params, parsed):
- if not (data := parsed.get(item)):
- return
- state, vpg_info = MAP_RPO_STATES.get(data.get("state"), (3, "Unknown"))
- yield state, "VPG Status: %s" % vpg_info
-
-
-def discover_zerto_vpg_rpo(section):
- yield from ((item, {}) for item in section)
-
-
-check_info["zerto_vpg_rpo"] = LegacyCheckDefinition(
- parse_function=parse_zerto_vpg,
- service_name="Zerto VPG RPO %s",
- discovery_function=discover_zerto_vpg_rpo,
- check_function=check_zerto_vpg_rpo,
- check_ruleset_name="zerto_vpg_rpo",
-)
diff --git a/cmk/base/legacy_checks/zorp_connections.py b/cmk/base/legacy_checks/zorp_connections.py
deleted file mode 100644
index eb55be7f69a..00000000000
--- a/cmk/base/legacy_checks/zorp_connections.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-"""Zorp FW - connections
-This check displays individual connections returned by
- zorpctl szig -r zorp.stats.active_connections
-It sums up all connections and checks against configurable maximum values.
-"""
-
-
-from cmk.base.check_api import check_levels, LegacyCheckDefinition
-from cmk.base.config import check_info
-
-
-def parse_zorp_connections(string_table):
- """Creates dict name -> connections
- from string_table =
- [["Instance :", "walking"], ["zorp.stats.active_connections:", ""],
- ["Instance :", "walking"], ["zorp.stats.active_connections:", ""],
- ...]
- """
- return {
- instance[1].rstrip(":"): int(state[1]) if state[1] != "None" else 0
- for instance, state in zip(string_table[::2], string_table[1::2])
- }
-
-
-def check_zorp_connections(item, params, parsed):
- """List number of connections for each connection type and check against
- total number of connections"""
- if not parsed:
- return
-
- yield from ((0, "%s: %d" % elem) for elem in parsed.items())
-
- yield check_levels(
- sum(parsed.values()),
- "connections",
- params.get("levels"),
- infoname="Total connections",
- human_readable_func=int,
- )
-
-
-def discover_zorp_connections(parsed):
- return [(None, {})]
-
-
-check_info["zorp_connections"] = LegacyCheckDefinition(
- parse_function=parse_zorp_connections,
- service_name="Zorp Connections",
- discovery_function=discover_zorp_connections,
- check_function=check_zorp_connections,
- check_ruleset_name="zorp_connections",
- check_default_parameters={
- "levels": (15, 20),
- },
-)
diff --git a/cmk/base/modes/__init__.py b/cmk/base/modes/__init__.py
index 7b3ffd9e3f4..b395976a870 100644
--- a/cmk/base/modes/__init__.py
+++ b/cmk/base/modes/__init__.py
@@ -12,9 +12,9 @@
from cmk.utils.hostaddress import HostName, Hosts
from cmk.utils.log import console
from cmk.utils.plugin_loader import import_plugins
+from cmk.utils.rulesets.tuple_rulesets import hosttags_match_taglist
from cmk.utils.tags import TagID
-import cmk.base.config as config
from cmk.base.config import ConfigCache
OptionSpec = str
@@ -165,7 +165,7 @@ def parse_hostname_list(
num_found = 0
for hostname in valid_hosts:
- if config.hosttags_match_taglist(
+ if hosttags_match_taglist(
config_cache.tag_list(hostname), (TagID(_) for _ in tagspec)
):
hostlist.append(hostname)
@@ -403,7 +403,11 @@ def get_sub_options(self, all_opts: Options) -> dict[OptionName, Argument | int
continue
if option.is_deprecated_option(o):
- console.warning("%r is deprecated in favour of option %r", o, option.name())
+ console.warning(
+ console.format_warning(
+ f"{o!r} is deprecated in favour of option {option.name()!r}\n"
+ )
+ )
if a and not option.takes_argument():
raise MKGeneralException("No argument to %s expected." % o)
diff --git a/cmk/base/modes/check_mk.py b/cmk/base/modes/check_mk.py
index 42be3c0e90e..58fd1514b4a 100644
--- a/cmk/base/modes/check_mk.py
+++ b/cmk/base/modes/check_mk.py
@@ -14,15 +14,15 @@
from contextlib import suppress
from functools import partial
from pathlib import Path
-from typing import Final, Literal, NamedTuple, overload, Protocol, TypeVar
-
-from typing_extensions import TypedDict
+from typing import Final, Literal, NamedTuple, overload, Protocol, TypedDict, TypeVar
import livestatus
import cmk.utils.cleanup
+import cmk.utils.config_warnings as config_warnings
import cmk.utils.debug
import cmk.utils.log as log
+import cmk.utils.password_store
import cmk.utils.paths
import cmk.utils.piggyback as piggyback
import cmk.utils.store as store
@@ -31,6 +31,7 @@
from cmk.utils.agentdatatype import AgentRawData
from cmk.utils.auto_queue import AutoQueue
from cmk.utils.check_utils import maincheckify
+from cmk.utils.config_path import LATEST_CONFIG
from cmk.utils.cpu_tracking import CPUTracker, Snapshot
from cmk.utils.diagnostics import (
DiagnosticsModesParameters,
@@ -47,6 +48,7 @@
from cmk.utils.log import console, section
from cmk.utils.resulttype import Result
from cmk.utils.rulesets.ruleset_matcher import RulesetMatcher
+from cmk.utils.rulesets.tuple_rulesets import hosttags_match_taglist
from cmk.utils.sectionname import SectionMap, SectionName
from cmk.utils.structured_data import (
ImmutableTree,
@@ -70,8 +72,10 @@
)
import cmk.fetchers.snmp as snmp_factory
-from cmk.fetchers import FetcherType, get_raw_data
+from cmk.fetchers import get_raw_data
from cmk.fetchers import Mode as FetchMode
+from cmk.fetchers import SNMPScanConfig, TLSConfig
+from cmk.fetchers.config import make_persisted_section_dir
from cmk.fetchers.filecache import FileCacheOptions, MaxAge
import cmk.checkengine.inventory as inventory
@@ -82,7 +86,7 @@
execute_check_discovery,
remove_autochecks_of_host,
)
-from cmk.checkengine.fetcher import FetcherFunction, SourceInfo, SourceType
+from cmk.checkengine.fetcher import FetcherFunction, FetcherType, SourceInfo, SourceType
from cmk.checkengine.inventory import HWSWInventoryParameters, InventoryPlugin, InventoryPluginName
from cmk.checkengine.parser import (
NO_SELECTION,
@@ -118,12 +122,18 @@
InventoryPluginMapper,
SectionPluginMapper,
)
-from cmk.base.config import ConfigCache
+from cmk.base.config import (
+ ConfigCache,
+ ConfiguredIPLookup,
+ handle_ip_lookup_failure,
+ lookup_mgmt_board_ip_address,
+)
from cmk.base.core_factory import create_core, get_licensing_handler_type
from cmk.base.errorhandling import CheckResultErrorHandler, create_section_crash_dump
from cmk.base.modes import keepalive_option, Mode, modes, Option
-from cmk.base.plugins.server_side_calls import load_active_checks
-from cmk.base.sources import make_parser
+from cmk.base.parent_scan import ScanConfig
+from cmk.base.server_side_calls import load_active_checks
+from cmk.base.sources import make_parser, SNMPFetcherConfig
from cmk.agent_based.v1.value_store import set_value_store_manager
from cmk.discover_plugins import discover_families, PluginGroup
@@ -419,7 +429,7 @@ def _list_all_hosts_with_tags(tags: Sequence[TagID]) -> Sequence[HostName]:
hosts = []
for h in set(hostnames):
- if config.hosttags_match_taglist(config_cache.tag_list(h), tags):
+ if hosttags_match_taglist(config_cache.tag_list(h), tags):
hosts.append(h)
return hosts
@@ -464,7 +474,9 @@ def mode_list_checks() -> None:
# active checks using both new and old API have to be collected
all_checks += [
"check_" + name
- for name in itertools.chain(config.active_check_info, load_active_checks()[1])
+ for name in itertools.chain(
+ config.active_check_info, (p.name for p in load_active_checks()[1].values())
+ )
]
for plugin_name in sorted(all_checks, key=str):
@@ -535,17 +547,45 @@ def mode_dump_agent(options: Mapping[str, object], hostname: HostName) -> None:
if hostname in hosts_config.clusters:
raise MKBailOut("Can not be used with cluster hosts")
- ipaddress = config.lookup_ip_address(config_cache, hostname)
+ ip_stack_config = ConfigCache.ip_stack_config(hostname)
+ ipaddress = (
+ None
+ if ip_stack_config is ip_lookup.IPStackConfig.NO_IP
+ else config.lookup_ip_address(config_cache, hostname)
+ )
check_interval = config_cache.check_mk_check_interval(hostname)
+ stored_walk_path = Path(cmk.utils.paths.snmpwalks_dir)
+ walk_cache_path = Path(cmk.utils.paths.var_dir) / "snmp_cache"
+ section_cache_path = Path(cmk.utils.paths.var_dir)
+ file_cache_path = Path(cmk.utils.paths.data_source_cache_dir)
+ tcp_cache_path = Path(cmk.utils.paths.tcp_cache_dir)
+ tls_config = TLSConfig(
+ cas_dir=Path(cmk.utils.paths.agent_cas_dir),
+ ca_store=Path(cmk.utils.paths.agent_cert_store),
+ site_crt=Path(cmk.utils.paths.site_cert_file),
+ )
+ snmp_scan_config = SNMPScanConfig(
+ on_error=OnError.RAISE,
+ missing_sys_description=config_cache.missing_sys_description(hostname),
+ oid_cache_dir=Path(cmk.utils.paths.snmp_scan_cache_dir),
+ )
output = []
# Show errors of problematic data sources
has_errors = False
+ pending_passwords_file = cmk.utils.password_store.pending_password_store_path()
for source in sources.make_sources(
hostname,
ipaddress,
- ConfigCache.address_family(hostname),
- config_cache=config_cache,
+ ip_stack_config,
+ fetcher_factory=config_cache.fetcher_factory(),
+ snmp_fetcher_config=SNMPFetcherConfig(
+ scan_config=snmp_scan_config,
+ selected_sections=NO_SELECTION,
+ backend_override=snmp_backend_override,
+ stored_walk_path=stored_walk_path,
+ walk_cache_path=walk_cache_path,
+ ),
is_cluster=False,
simulation_mode=config.simulation_mode,
file_cache_options=file_cache_options,
@@ -554,7 +594,26 @@ def mode_dump_agent(options: Mapping[str, object], hostname: HostName) -> None:
discovery=1.5 * check_interval,
inventory=1.5 * check_interval,
),
- snmp_backend_override=snmp_backend_override,
+ snmp_backend=config_cache.get_snmp_backend(hostname),
+ file_cache_path=file_cache_path,
+ tcp_cache_path=tcp_cache_path,
+ tls_config=tls_config,
+ computed_datasources=config_cache.computed_datasources(hostname),
+ datasource_programs=config_cache.datasource_programs(hostname),
+ tag_list=config_cache.tag_list(hostname),
+ management_ip=lookup_mgmt_board_ip_address(config_cache, hostname),
+ management_protocol=config_cache.management_protocol(hostname),
+ special_agent_command_lines=config_cache.special_agent_command_lines(
+ hostname,
+ ipaddress,
+ passwords=cmk.utils.password_store.load(pending_passwords_file),
+ password_store_file=pending_passwords_file,
+ ip_address_of=ConfiguredIPLookup(
+ config_cache, error_handler=handle_ip_lookup_failure
+ ),
+ ),
+ agent_connection_mode=config_cache.agent_connection_mode(hostname),
+ check_mk_check_interval=config_cache.check_mk_check_interval(hostname),
):
source_info = source.source_info()
if source_info.fetcher_type is FetcherType.SNMP:
@@ -570,11 +629,18 @@ def mode_dump_agent(options: Mapping[str, object], hostname: HostName) -> None:
)
host_sections = parse_raw_data(
make_parser(
- config_cache,
- source_info,
+ config_cache.parser_factory(),
+ source_info.hostname,
+ source_info.fetcher_type,
checking_sections=config_cache.make_checking_sections(
hostname, selected_sections=NO_SELECTION
),
+ persisted_section_dir=make_persisted_section_dir(
+ source_info.hostname,
+ fetcher_type=source_info.fetcher_type,
+ ident=source_info.ident,
+ section_cache_path=section_cache_path,
+ ),
keep_outdated=file_cache_options.keep_outdated,
logger=log.logger,
),
@@ -585,19 +651,12 @@ def mode_dump_agent(options: Mapping[str, object], hostname: HostName) -> None:
hostname,
ipaddress,
host_sections,
- exit_spec=config_cache.exit_code_spec(hostname, source_info.ident),
- time_settings=config.get_config_cache().get_piggybacked_hosts_time_settings(
- piggybacked_hostname=hostname,
- ),
- is_piggyback=config_cache.is_piggyback_host(hostname),
+ config_cache.summary_config(hostname, source_info.ident),
fetcher_type=source_info.fetcher_type,
)
if any(r.state != 0 for r in source_results):
- console.error(
- "ERROR [%s]: %s\n",
- source_info.ident,
- ", ".join(r.summary for r in source_results),
- )
+ summaries = ", ".join(r.summary for r in source_results)
+ console.error(f"ERROR [{source_info.ident}]: {summaries}\n")
has_errors = True
if raw_data.is_ok():
assert raw_data.ok is not None
@@ -656,7 +715,7 @@ def mode_dump_hosts(hostlist: Iterable[HostName]) -> None:
for hostname in sorted(hosts - all_hosts):
sys.stderr.write(f"unknown host: {hostname}\n")
for hostname in sorted(hosts & all_hosts):
- cmk.base.dump_host.dump_host(config_cache, hostname)
+ cmk.base.dump_host.dump_host(config_cache, hostname, simulation_mode=config.simulation_mode)
modes.register(
@@ -819,14 +878,26 @@ def mode_scan_parents(options: dict, args: list[str]) -> None:
config_cache = config.get_config_cache()
hosts_config = config.make_hosts_config()
- if "procs" in options:
- config.max_num_processes = options["procs"]
+ max_num_processes = max(options.get("procs", config.max_num_processes), 1)
+ hosts = [HostName(hn) for hn in args]
+
+ def make_scan_config() -> Mapping[HostName, ScanConfig]:
+ return {
+ host: config_cache.make_parent_scan_config(host)
+ for host in itertools.chain(
+ hosts,
+ hosts_config.hosts,
+ ([HostName(config.monitoring_host)] if config.monitoring_host else ()),
+ )
+ }
cmk.base.parent_scan.do_scan_parents(
- config_cache,
+ make_scan_config(),
hosts_config,
HostName(config.monitoring_host) if config.monitoring_host is not None else None,
- [HostName(hn) for hn in args],
+ hosts,
+ max_num_processes=max_num_processes,
+ lookup_ip_address=partial(config.lookup_ip_address, config_cache),
)
@@ -1010,16 +1081,25 @@ def mode_snmpwalk(options: dict, hostnames: list[str]) -> None:
raise MKBailOut("Please specify host names to walk on.")
config_cache = config.get_config_cache()
+ stored_walk_path = Path(cmk.utils.paths.snmpwalks_dir)
for hostname in (HostName(hn) for hn in hostnames):
+ if ConfigCache.ip_stack_config(hostname) is ip_lookup.IPStackConfig.NO_IP:
+ raise MKGeneralException(f"Host is configured as No-IP host: {hostname}")
+
ipaddress = config.lookup_ip_address(config_cache, hostname)
if not ipaddress:
raise MKGeneralException("Failed to gather IP address of %s" % hostname)
- snmp_config = config_cache.make_snmp_config(hostname, ipaddress, SourceType.HOST)
- if snmp_backend_override is not None:
- snmp_config = dataclasses.replace(snmp_config, snmp_backend=snmp_backend_override)
- _do_snmpwalk(options, backend=snmp_factory.make_backend(snmp_config, log.logger))
+ snmp_config = config_cache.make_snmp_config(
+ hostname, ipaddress, SourceType.HOST, backend_override=snmp_backend_override
+ )
+ _do_snmpwalk(
+ options,
+ backend=snmp_factory.make_backend(
+ snmp_config, log.logger, stored_walk_path=stored_walk_path
+ ),
+ )
modes.register(
@@ -1091,20 +1171,27 @@ def mode_snmpget(options: Mapping[str, object], args: Sequence[str]) -> None:
for host in frozenset(hosts_config.hosts)
if config_cache.is_active(host)
and config_cache.is_online(host)
- and config_cache.is_snmp_host(host)
+ and config_cache.computed_datasources(host).is_snmp
)
assert hostnames
+ stored_walk_path = Path(cmk.utils.paths.snmpwalks_dir)
for hostname in (HostName(hn) for hn in hostnames):
+ if ConfigCache.ip_stack_config(hostname) is ip_lookup.IPStackConfig.NO_IP:
+ raise MKGeneralException(f"Host is configured as No-IP host: {hostname}")
ipaddress = config.lookup_ip_address(config_cache, hostname)
if not ipaddress:
raise MKGeneralException("Failed to gather IP address of %s" % hostname)
- snmp_config = config_cache.make_snmp_config(hostname, ipaddress, SourceType.HOST)
- if snmp_backend_override is not None:
- snmp_config = dataclasses.replace(snmp_config, snmp_backend=snmp_backend_override)
-
- backend = snmp_factory.make_backend(snmp_config, log.logger)
+ snmp_config = config_cache.make_snmp_config(
+ hostname,
+ ipaddress,
+ SourceType.HOST,
+ backend_override=snmp_backend_override,
+ )
+ backend = snmp_factory.make_backend(
+ snmp_config, log.logger, stored_walk_path=stored_walk_path
+ )
value = get_single_oid(oid, single_oid_cache={}, backend=backend)
sys.stdout.write(f"{backend.hostname} ({backend.address}): {value!r}\n")
@@ -1207,7 +1294,7 @@ def mode_flush(hosts: list[HostName]) -> None: # pylint: disable=too-many-branc
config_cache.effective_host,
partial(config.service_description, ruleset_matcher),
)
- for node in config_cache.nodes_of(host) or [host]
+ for node in config_cache.nodes(host) or [host]
)
# config_cache.remove_autochecks(host)
if count:
@@ -1255,16 +1342,55 @@ def mode_flush(hosts: list[HostName]) -> None: # pylint: disable=too-many-branc
# '----------------------------------------------------------------------'
-def mode_dump_nagios_config(args: list[HostName]) -> None:
+def mode_dump_nagios_config(args: Sequence[HostName]) -> None:
from cmk.utils.config_path import VersionedConfigPath
from cmk.base.core_nagios import create_config # pylint: disable=import-outside-toplevel
+ hostnames = args if args else None
+
+ if config.host_notification_periods:
+ config_warnings.warn(
+ "host_notification_periods is not longer supported. Please use extra_host_conf['notification_period'] instead."
+ )
+
+ if config.service_notification_periods:
+ config_warnings.warn(
+ "service_notification_periods is not longer supported. Please use extra_service_conf['notification_period'] instead."
+ )
+
+ # Map service_period to _SERVICE_PERIOD. This field does not exist in Nagios.
+ # The CMC has this field natively.
+ if "service_period" in config.extra_host_conf:
+ config.extra_host_conf["_SERVICE_PERIOD"] = config.extra_host_conf["service_period"]
+ del config.extra_host_conf["service_period"]
+ if "service_period" in config.extra_service_conf:
+ config.extra_service_conf["_SERVICE_PERIOD"] = config.extra_service_conf["service_period"]
+ del config.extra_service_conf["service_period"]
+
+ config_cache = config.get_config_cache()
+
+ if hostnames is None:
+ hosts_config = config_cache.hosts_config
+ hostnames = sorted(
+ {
+ hn
+ for hn in itertools.chain(hosts_config.hosts, hosts_config.clusters)
+ if config_cache.is_active(hn) and config_cache.is_online(hn)
+ }
+ )
+ else:
+ hostnames = sorted(hostnames)
+
create_config(
sys.stdout,
next(VersionedConfigPath.current()),
- args if len(args) else None,
- get_licensing_handler_type().make(),
+ config_cache,
+ hostnames=hostnames,
+ licensing_handler=get_licensing_handler_type().make(),
+ passwords=cmk.utils.password_store.load(
+ cmk.utils.password_store.pending_password_store_path()
+ ),
)
@@ -1301,10 +1427,14 @@ def mode_update() -> None:
config_cache = config.get_config_cache()
hosts_config = config_cache.hosts_config
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ )
try:
with cmk.base.core.activation_lock(mode=config.restart_locking):
do_create_config(
core=create_core(config.monitoring_core),
+ ip_address_of=ip_address_of,
config_cache=config_cache,
all_hosts=hosts_config.hosts,
duplicates=sorted(
@@ -1330,9 +1460,9 @@ def mode_update() -> None:
"Updates the core configuration based on the current Checkmk "
"configuration. When using the Nagios core, the precompiled host "
"checks are created and the nagios configuration is updated. "
- "When using the CheckMK Microcore, the core configuration is created "
+ "When using the CheckMK Micro Core, the core configuration is created "
"and the configuration for the Core helper processes is being created.",
- "The agent bakery is updating the agents.",
+ "The Agent Bakery is updating the agents.",
],
)
)
@@ -1351,8 +1481,12 @@ def mode_update() -> None:
def mode_restart(args: Sequence[HostName]) -> None:
config_cache = config.get_config_cache()
hosts_config = config_cache.hosts_config
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ )
cmk.base.core.do_restart(
config_cache,
+ ip_address_of,
create_core(config.monitoring_core),
hosts_to_update=set(args) if args else None,
locking_mode=config.restart_locking,
@@ -1375,7 +1509,7 @@ def mode_restart(args: Sequence[HostName]) -> None:
long_help=[
"You may add hostnames as additional arguments. This enables the incremental "
"activate mechanism, only compiling these hostnames and using cached data for all "
- "other hosts. Only supported with Checkmk Microcore."
+ "other hosts. Only supported with Checkmk Micro Core."
],
handler_function=mode_restart,
short_help="Create core config + core restart",
@@ -1396,8 +1530,12 @@ def mode_restart(args: Sequence[HostName]) -> None:
def mode_reload(args: Sequence[HostName]) -> None:
config_cache = config.get_config_cache()
hosts_config = config_cache.hosts_config
+ ip_address_of = config.ConfiguredIPLookup(
+ config_cache, error_handler=config.handle_ip_lookup_failure
+ )
cmk.base.core.do_reload(
config_cache,
+ ip_address_of,
create_core(config.monitoring_core),
hosts_to_update=set(args) if args else None,
locking_mode=config.restart_locking,
@@ -1420,7 +1558,7 @@ def mode_reload(args: Sequence[HostName]) -> None:
long_help=[
"You may add hostnames as additional arguments. This enables the incremental "
"activate mechanism, only compiling these hostnames and using cached data for all "
- "other hosts. Only supported with Checkmk Microcore."
+ "other hosts. Only supported with Checkmk Micro Core."
],
handler_function=mode_reload,
short_help="Create core config + core reload",
@@ -1591,7 +1729,20 @@ def mode_notify(options: dict, args: list[str]) -> int | None:
with store.lock_checkmk_configuration():
config.load(with_conf_d=True, validate_hosts=False)
- return notify.do_notify(options, args)
+
+ def ensure_nagios(msg: str) -> None:
+ if config.is_cmc():
+ raise RuntimeError(msg)
+
+ return notify.do_notify(
+ options,
+ args,
+ host_parameters_cb=lambda hostname, plugin: config.get_config_cache().notification_plugin_parameters(
+ hostname, plugin
+ ),
+ get_http_proxy=config.get_http_proxy,
+ ensure_nagios=ensure_nagios,
+ )
modes.register(
@@ -1647,6 +1798,7 @@ def mode_check_discovery(
discovery_file_cache_max_age = 1.5 * check_interval if file_cache_options.use_outdated else 0
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=False,
mode=FetchMode.DISCOVERY,
@@ -1659,16 +1811,20 @@ def mode_check_discovery(
inventory=1.5 * check_interval,
),
snmp_backend_override=snmp_backend_override,
+ password_store_file=cmk.utils.password_store.core_password_store_path(LATEST_CONFIG),
)
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=NO_SELECTION,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.discovery"),
)
summarizer = CMKSummarizer(
- config_cache,
hostname,
+ config_cache.summary_config,
override_non_ok_state=None,
)
error_handler = CheckResultErrorHandler(
@@ -1680,14 +1836,14 @@ def mode_check_discovery(
snmp_backend=config_cache.get_snmp_backend(hostname),
keepalive=keepalive,
)
- check_result = ActiveCheckResult(3, "unknown error")
+ checks_result: Sequence[ActiveCheckResult] = [ActiveCheckResult(3, "unknown error")]
with error_handler:
fetched = fetcher(hostname, ip_address=None)
with plugin_contexts.current_host(hostname):
- check_result = execute_check_discovery(
+ checks_result = execute_check_discovery(
hostname,
is_cluster=hostname in config_cache.hosts_config.clusters,
- cluster_nodes=config_cache.nodes_of(hostname) or (),
+ cluster_nodes=config_cache.nodes(hostname),
params=config_cache.discovery_check_parameters(hostname),
fetched=((f[0], f[1]) for f in fetched),
parser=parser,
@@ -1710,7 +1866,9 @@ def mode_check_discovery(
)
if error_handler.result is not None:
- check_result = error_handler.result
+ checks_result = [error_handler.result]
+
+ check_result = ActiveCheckResult.from_subresults(*checks_result)
active_check_handler(hostname, check_result.as_text())
if keepalive:
@@ -1881,7 +2039,7 @@ def _extract_plugin_selection(
inventory_plugin_names,
)
- raise NotImplementedError(f"unknown plugin name {type_}")
+ raise NotImplementedError(f"unknown plug-in name {type_}")
_DiscoveryOptions = TypedDict(
@@ -1968,13 +2126,17 @@ def mode_discover(options: _DiscoveryOptions, args: list[str]) -> None:
selected_sections, run_plugin_names = _extract_plugin_selection(options, CheckPluginName)
config_cache = config.get_config_cache()
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=selected_sections,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.discovery"),
)
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=False,
mode=FetchMode.DISCOVERY if selected_sections is NO_SELECTION else FetchMode.FORCE_SECTIONS,
@@ -1982,12 +2144,13 @@ def mode_discover(options: _DiscoveryOptions, args: list[str]) -> None:
selected_sections=selected_sections,
simulation_mode=config.simulation_mode,
snmp_backend_override=snmp_backend_override,
+ password_store_file=cmk.utils.password_store.pending_password_store_path(),
)
for hostname in sorted(
_preprocess_hostnames(
frozenset(hostnames),
is_cluster=lambda hn: hn in config_cache.hosts_config.clusters,
- resolve_nodes=lambda hn: config_cache.nodes_of(hn) or (),
+ resolve_nodes=config_cache.nodes,
config_cache=config_cache,
only_host_labels="only-host-labels" in options,
)
@@ -2104,8 +2267,7 @@ def __call__(
dry_run: bool,
perfdata_format: Literal["pnp", "standard"],
show_perfdata: bool,
- ) -> Submitter:
- ...
+ ) -> Submitter: ...
def mode_check(
@@ -2115,6 +2277,7 @@ def mode_check(
*,
active_check_handler: Callable[[HostName, str], object],
keepalive: bool,
+ precompiled_host_check: bool = False,
) -> ServiceState:
file_cache_options = _handle_fetcher_options(options)
try:
@@ -2134,6 +2297,7 @@ def mode_check(
selected_sections, run_plugin_names = _extract_plugin_selection(options, CheckPluginName)
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=False,
mode=FetchMode.CHECKING if selected_sections is NO_SELECTION else FetchMode.FORCE_SECTIONS,
@@ -2141,16 +2305,24 @@ def mode_check(
selected_sections=selected_sections,
simulation_mode=config.simulation_mode,
snmp_backend_override=snmp_backend_override,
+ password_store_file=(
+ cmk.utils.password_store.core_password_store_path(LATEST_CONFIG)
+ if precompiled_host_check
+ else cmk.utils.password_store.pending_password_store_path()
+ ),
)
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=selected_sections,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.checking"),
)
summarizer = CMKSummarizer(
- config_cache,
hostname,
+ config_cache.summary_config,
override_non_ok_state=None,
)
dry_run = options.get("no-submit", False)
@@ -2163,7 +2335,7 @@ def mode_check(
snmp_backend=config_cache.get_snmp_backend(hostname),
keepalive=keepalive,
)
- check_result = ActiveCheckResult(3, "unknown error")
+ checks_result: Sequence[ActiveCheckResult] = [ActiveCheckResult(3, "unknown error")]
fetched: Sequence[
tuple[
SourceInfo,
@@ -2171,10 +2343,14 @@ def mode_check(
Snapshot,
]
] = ()
- with error_handler, plugin_contexts.current_host(hostname), set_value_store_manager(
- ValueStoreManager(hostname), store_changes=not dry_run
- ) as value_store_manager:
- console.vverbose("Checkmk version %s\n", cmk_version.__version__)
+ with (
+ error_handler,
+ plugin_contexts.current_host(hostname),
+ set_value_store_manager(
+ ValueStoreManager(hostname), store_changes=not dry_run
+ ) as value_store_manager,
+ ):
+ console.debug(f"Checkmk version {cmk_version.__version__}\n")
fetched = fetcher(hostname, ip_address=ipaddress)
check_plugins = CheckPluginMapper(
config_cache,
@@ -2183,7 +2359,7 @@ def mode_check(
rtc_package=None,
)
with CPUTracker() as tracker:
- check_result = execute_checkmk_checks(
+ checks_result = execute_checkmk_checks(
hostname=hostname,
fetched=((f[0], f[1]) for f in fetched),
parser=parser,
@@ -2214,17 +2390,19 @@ def mode_check(
exit_spec=config_cache.exit_code_spec(hostname),
)
- check_result = ActiveCheckResult.from_subresults(
- check_result,
+ checks_result = [
+ *checks_result,
make_timing_results(
tracker.duration,
tuple((f[0], f[2]) for f in fetched),
perfdata_with_times=config.check_mk_perfdata_with_times,
),
- )
+ ]
if error_handler.result is not None:
- check_result = error_handler.result
+ checks_result = [error_handler.result]
+
+ check_result = ActiveCheckResult.from_subresults(*checks_result)
active_check_handler(hostname, check_result.as_text())
if keepalive:
@@ -2350,6 +2528,7 @@ def mode_inventory(options: _InventoryOptions, args: list[str]) -> None:
selected_sections, run_plugin_names = _extract_plugin_selection(options, InventoryPluginName)
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=False,
mode=FetchMode.INVENTORY if selected_sections is NO_SELECTION else FetchMode.FORCE_SECTIONS,
@@ -2357,9 +2536,13 @@ def mode_inventory(options: _InventoryOptions, args: list[str]) -> None:
selected_sections=selected_sections,
simulation_mode=config.simulation_mode,
snmp_backend_override=snmp_backend_override,
+ password_store_file=cmk.utils.password_store.pending_password_store_path(),
)
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=selected_sections,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.inventory"),
@@ -2389,8 +2572,8 @@ def section_error_handling(
parameters = config_cache.hwsw_inventory_parameters(hostname)
raw_intervals_from_config = config_cache.inv_retention_intervals(hostname)
summarizer = CMKSummarizer(
- config_cache,
hostname,
+ config_cache.summary_config,
override_non_ok_state=parameters.fail_status,
)
@@ -2400,7 +2583,7 @@ def section_error_handling(
previous_tree = load_tree(Path(cmk.utils.paths.inventory_output_dir, hostname))
if hostname in hosts_config.clusters:
check_result = inventory.inventorize_cluster(
- config_cache.nodes_of(hostname) or (),
+ config_cache.nodes(hostname),
parameters=parameters,
previous_tree=previous_tree,
).check_result
@@ -2494,7 +2677,7 @@ def _execute_active_check_inventory(
if host_name in hosts_config.clusters:
result = inventory.inventorize_cluster(
- config_cache.nodes_of(host_name) or (),
+ config_cache.nodes(host_name),
parameters=parameters,
previous_tree=previous_tree,
)
@@ -2522,6 +2705,8 @@ def _execute_active_check_inventory(
if result.no_data_or_files:
AutoQueue(cmk.utils.paths.autoinventory_dir).add(host_name)
+ else:
+ (AutoQueue(cmk.utils.paths.autoinventory_dir).path / str(host_name)).unlink(missing_ok=True)
if not (result.processing_failed or result.no_data_or_files):
save_tree_actions = _get_save_tree_actions(
@@ -2531,8 +2716,10 @@ def _execute_active_check_inventory(
)
# The order of archive or save is important:
if save_tree_actions.do_archive:
+ console.verbose("Archive current inventory tree.\n")
tree_or_archive_store.archive(host_name=host_name)
if save_tree_actions.do_save:
+ console.verbose("Save new inventory tree.\n")
tree_or_archive_store.save(host_name=host_name, tree=result.inventory_tree)
return result.check_result
@@ -2559,7 +2746,7 @@ def _get_save_tree_actions(
return _SaveTreeActions(do_archive=False, do_save=True)
if has_changed := previous_tree != inventory_tree:
- console.verbose("Inventory tree has changed. Add history entry.\n")
+ console.verbose("Inventory tree has changed.\n")
if update_result.save_tree:
console.verbose(str(update_result))
@@ -2590,6 +2777,7 @@ def mode_inventory_as_check(
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=False,
mode=FetchMode.INVENTORY,
@@ -2597,16 +2785,20 @@ def mode_inventory_as_check(
selected_sections=NO_SELECTION,
simulation_mode=config.simulation_mode,
snmp_backend_override=snmp_backend_override,
+ password_store_file=cmk.utils.password_store.core_password_store_path(LATEST_CONFIG),
)
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=NO_SELECTION,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.inventory"),
)
summarizer = CMKSummarizer(
- config_cache,
hostname,
+ config_cache.summary_config,
override_non_ok_state=parameters.fail_status,
)
error_handler = CheckResultErrorHandler(
@@ -2662,7 +2854,7 @@ def register_mode_inventory_as_check(
),
argument=True,
argument_descr="HOST",
- short_help="Do HW/SW-Inventory, behave like check plugin",
+ short_help="Do HW/SW-Inventory, behave like check plug-in",
sub_options=[
*_FETCHER_OPTIONS,
_SNMP_BACKEND_OPTION,
@@ -2736,13 +2928,17 @@ def mode_inventorize_marked_hosts(options: Mapping[str, object]) -> None:
config.load()
config_cache = config.get_config_cache()
parser = CMKParser(
- config_cache,
+ config_cache.parser_factory(),
+ checking_sections=lambda hostname: config_cache.make_checking_sections(
+ hostname, selected_sections=NO_SELECTION
+ ),
selected_sections=NO_SELECTION,
keep_outdated=file_cache_options.keep_outdated,
logger=logging.getLogger("cmk.base.inventory"),
)
fetcher = CMKFetcher(
config_cache,
+ config_cache.fetcher_factory(),
file_cache_options=file_cache_options,
force_snmp_cache_refresh=False,
mode=FetchMode.INVENTORY,
@@ -2750,12 +2946,13 @@ def mode_inventorize_marked_hosts(options: Mapping[str, object]) -> None:
selected_sections=NO_SELECTION,
simulation_mode=config.simulation_mode,
snmp_backend_override=snmp_backend_override,
+ password_store_file=cmk.utils.password_store.core_password_store_path(LATEST_CONFIG),
)
def summarizer(host_name: HostName) -> CMKSummarizer:
return CMKSummarizer(
- config_cache,
host_name,
+ config_cache.summary_config,
override_non_ok_state=config_cache.hwsw_inventory_parameters(host_name).fail_status,
)
diff --git a/cmk/base/notify.py b/cmk/base/notify.py
index 6bb94a0e8a2..17e38f6ffc6 100644
--- a/cmk/base/notify.py
+++ b/cmk/base/notify.py
@@ -15,6 +15,7 @@
# => These already bear all information about the contact, the plugin
# to call and its parameters.
+import datetime
import io
import logging
import os
@@ -25,15 +26,16 @@
import traceback
import uuid
from collections.abc import Mapping, Sequence
-from functools import cache
+from functools import cache, partial
from pathlib import Path
-from typing import Any, cast, Literal, overload
+from typing import Any, Callable, cast, Literal, overload, TypeAlias
import cmk.utils.debug
import cmk.utils.log as log
import cmk.utils.paths
import cmk.utils.store as store
from cmk.utils.exceptions import MKGeneralException
+from cmk.utils.http_proxy_config import HTTPProxyConfig
from cmk.utils.log import console
from cmk.utils.macros import replace_macros_in_str
from cmk.utils.notify import (
@@ -50,6 +52,7 @@
from cmk.utils.notify_types import (
Contact,
ContactName,
+ EnrichedEventContext,
EventContext,
EventRule,
HostEventType,
@@ -70,10 +73,9 @@
from cmk.utils.regex import regex
from cmk.utils.store.host_storage import ContactgroupName
from cmk.utils.timeout import MKTimeout, Timeout
-from cmk.utils.timeperiod import timeperiod_active
+from cmk.utils.timeperiod import is_timeperiod_active, load_timeperiods, timeperiod_active
import cmk.base.config as config
-import cmk.base.core
import cmk.base.events as events
import cmk.base.obsolete_output as out
import cmk.base.utils
@@ -81,7 +83,8 @@
try:
import cmk.base.cee.keepalive as keepalive
except ImportError:
- keepalive = None # type: ignore[assignment]
+ # Edition layering...
+ keepalive: TypeAlias = None # type: ignore[no-redef]
from cmk.utils.hostaddress import HostName
@@ -163,7 +166,7 @@
def _initialize_logging() -> None:
log.logger.setLevel(config.notification_logging)
- log.open_log(notification_log)
+ log.setup_watched_file_logging_handler(notification_log)
# .
@@ -203,29 +206,34 @@ def notify_usage() -> None:
# Main function called by cmk --notify. It either starts the
# keepalive mode (used by CMC), sends out one notifications from
# several possible sources or sends out all ripe bulk notifications.
-def do_notify( # pylint: disable=too-many-branches
+def do_notify(
options: dict[str, bool],
args: list[str],
+ *,
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ ensure_nagios: Callable[[str], object],
) -> int | None:
+ # pylint: disable=too-many-branches
global _log_to_stdout, notify_mode
_log_to_stdout = options.get("log-to-stdout", _log_to_stdout)
+ if not os.path.exists(notification_logdir):
+ os.makedirs(notification_logdir)
+ if not os.path.exists(notification_spooldir):
+ os.makedirs(notification_spooldir)
+ _initialize_logging()
+
if keepalive and "keepalive" in options:
keepalive.enable()
convert_legacy_configuration()
try:
- if not os.path.exists(notification_logdir):
- os.makedirs(notification_logdir)
- if not os.path.exists(notification_spooldir):
- os.makedirs(notification_spooldir)
- _initialize_logging()
-
notify_mode = "notify"
if args:
notify_mode = args[0]
- if notify_mode not in ["stdin", "spoolfile", "replay", "send-bulks"]:
+ if notify_mode not in ["stdin", "spoolfile", "replay", "test", "send-bulks"]:
console.error("ERROR: Invalid call to check_mk --notify.\n\n")
notify_usage()
sys.exit(1)
@@ -241,22 +249,40 @@ def do_notify( # pylint: disable=too-many-branches
# -> mknotifyd deletes this file
if notify_mode == "spoolfile":
filename = args[1]
- return handle_spoolfile(filename)
+ return handle_spoolfile(filename, host_parameters_cb, get_http_proxy)
if keepalive and keepalive.enabled():
- notify_keepalive()
+ notify_keepalive(host_parameters_cb, get_http_proxy, ensure_nagios)
elif notify_mode == "replay":
try:
replay_nr = int(args[1])
except (IndexError, ValueError):
replay_nr = 0
- notify_notify(raw_context_from_backlog(replay_nr))
+ notify_notify(
+ raw_context_from_backlog(replay_nr),
+ host_parameters_cb,
+ get_http_proxy,
+ ensure_nagios,
+ )
+ elif notify_mode == "test":
+ assert isinstance(args[0], dict)
+ notify_notify(EventContext(args[0]), host_parameters_cb, get_http_proxy, ensure_nagios)
elif notify_mode == "stdin":
- notify_notify(events.raw_context_from_string(sys.stdin.read()))
+ notify_notify(
+ events.raw_context_from_string(sys.stdin.read()),
+ host_parameters_cb,
+ get_http_proxy,
+ ensure_nagios,
+ )
elif notify_mode == "send-bulks":
- send_ripe_bulks()
+ send_ripe_bulks(get_http_proxy)
else:
- notify_notify(raw_context_from_env(os.environ))
+ notify_notify(
+ raw_context_from_env(os.environ),
+ host_parameters_cb,
+ get_http_proxy,
+ ensure_nagios,
+ )
except Exception:
crash_dir = Path(cmk.utils.paths.var_dir) / "notify"
@@ -292,7 +318,15 @@ def convert_legacy_configuration() -> None:
config.notification_logging = 10
-def notify_notify(raw_context: EventContext, analyse: bool = False) -> NotifyAnalysisInfo | None:
+def notify_notify(
+ raw_context: EventContext,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ ensure_nagios: Callable[[str], object],
+ *,
+ analyse: bool = False,
+ dispatch: bool = False,
+) -> NotifyAnalysisInfo | None:
"""
This function processes one raw notification and decides wether it should be spooled or not.
In the latter cased a local delivery is being done.
@@ -303,6 +337,13 @@ def notify_notify(raw_context: EventContext, analyse: bool = False) -> NotifyAna
notification rule.
:param analyse:
"""
+ enriched_context = events.complete_raw_context(
+ raw_context,
+ ensure_nagios,
+ with_dump=config.notification_logging <= 10,
+ contacts_needed=True,
+ )
+
if not analyse:
store_notification_backlog(raw_context)
@@ -310,8 +351,8 @@ def notify_notify(raw_context: EventContext, analyse: bool = False) -> NotifyAna
if analyse:
logger.info(
"Analysing notification (%s) context with %s variables",
- events.find_host_service_in_context(raw_context),
- len(raw_context),
+ events.find_host_service_in_context(enriched_context),
+ len(enriched_context),
)
else:
logger.info(
@@ -322,34 +363,38 @@ def notify_notify(raw_context: EventContext, analyse: bool = False) -> NotifyAna
# Add some further variable for the conveniance of the plugins
- logger.debug(events.render_context_dump(raw_context))
+ logger.debug(events.render_context_dump(enriched_context))
- raw_context["LOGDIR"] = notification_logdir
- events.complete_raw_context(
- raw_context,
- with_dump=config.notification_logging <= 10,
- contacts_needed=True,
- )
+ enriched_context["LOGDIR"] = notification_logdir
# Spool notification to remote host, if this is enabled
if config.notification_spooling in ("remote", "both"):
create_spoolfile(
logger,
Path(notification_spooldir),
- NotificationForward({"context": raw_context, "forward": True}),
+ NotificationForward({"context": enriched_context, "forward": True}),
)
if config.notification_spooling != "remote":
- return locally_deliver_raw_context(raw_context, analyse=analyse)
+ return locally_deliver_raw_context(
+ enriched_context, host_parameters_cb, get_http_proxy, analyse=analyse, dispatch=dispatch
+ )
return None
def locally_deliver_raw_context(
- raw_context: EventContext, analyse: bool = False
+ enriched_context: EnrichedEventContext,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ *,
+ analyse: bool = False,
+ dispatch: bool = False,
) -> NotifyAnalysisInfo | None:
try:
logger.debug("Preparing rule based notifications")
- return notify_rulebased(raw_context, analyse=analyse)
+ return notify_rulebased(
+ enriched_context, host_parameters_cb, get_http_proxy, analyse=analyse, dispatch=dispatch
+ )
except Exception:
if cmk.utils.debug.enabled():
@@ -359,20 +404,63 @@ def locally_deliver_raw_context(
return None
-def notification_replay_backlog(nr: int) -> None:
+def notification_replay_backlog(
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ ensure_nagios: Callable[[str], object],
+ nr: int,
+) -> None:
global notify_mode
notify_mode = "replay"
_initialize_logging()
raw_context = raw_context_from_backlog(nr)
- notify_notify(raw_context)
+ notify_notify(raw_context, host_parameters_cb, get_http_proxy, ensure_nagios)
-def notification_analyse_backlog(nr: int) -> NotifyAnalysisInfo | None:
+def notification_analyse_backlog(
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ ensure_nagios: Callable[[str], object],
+ nr: int,
+) -> NotifyAnalysisInfo | None:
global notify_mode
notify_mode = "replay"
_initialize_logging()
raw_context = raw_context_from_backlog(nr)
- return notify_notify(raw_context, analyse=True)
+ return notify_notify(
+ raw_context,
+ host_parameters_cb,
+ get_http_proxy,
+ ensure_nagios,
+ analyse=True,
+ )
+
+
+def notification_test(
+ raw_context: NotificationContext,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ ensure_nagios: Callable[[str], object],
+ *,
+ dispatch: bool,
+) -> NotifyAnalysisInfo | None:
+ global notify_mode
+ notify_mode = "test"
+ _initialize_logging()
+ contacts = events.livestatus_fetch_contacts(
+ HostName(raw_context["HOSTNAME"]), raw_context.get("SERVICEDESC")
+ )
+ raw_context["CONTACTS"] = ",".join(contacts) if contacts else "?"
+ plugin_context = EventContext({})
+ plugin_context.update(cast(EventContext, raw_context))
+ return notify_notify(
+ plugin_context,
+ host_parameters_cb,
+ get_http_proxy,
+ ensure_nagios,
+ analyse=True,
+ dispatch=dispatch,
+ )
# .
@@ -390,11 +478,20 @@ def notification_analyse_backlog(nr: int) -> NotifyAnalysisInfo | None:
# TODO: Make use of the generic do_keepalive() mechanism?
-def notify_keepalive() -> None:
+def notify_keepalive(
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ ensure_nagios: Callable[[str], object],
+) -> None:
cmk.base.utils.register_sigint_handler()
events.event_keepalive(
- event_function=notify_notify,
- call_every_loop=send_ripe_bulks,
+ event_function=partial(
+ notify_notify,
+ host_parameters_cb=host_parameters_cb,
+ get_http_proxy=get_http_proxy,
+ ensure_nagios=ensure_nagios,
+ ),
+ call_every_loop=partial(send_ripe_bulks, get_http_proxy),
loop_interval=config.notification_bulk_interval,
)
@@ -412,7 +509,14 @@ def notify_keepalive() -> None:
# '----------------------------------------------------------------------'
-def notify_rulebased(raw_context: EventContext, analyse: bool = False) -> NotifyAnalysisInfo:
+def notify_rulebased(
+ enriched_context: EnrichedEventContext,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ *,
+ analyse: bool = False,
+ dispatch: bool = False,
+) -> NotifyAnalysisInfo:
# First step: go through all rules and construct our table of
# notification plugins to call. This is a dict from (users, plugin) to
# a triple of (locked, parameters, bulk). If locked is True, then a user
@@ -431,7 +535,7 @@ def notify_rulebased(raw_context: EventContext, analyse: bool = False) -> Notify
for rule in config.notification_rules + user_notification_rules():
contact_info = _get_contact_info_text(rule)
- why_not = rbn_match_rule(rule, raw_context)
+ why_not = rbn_match_rule(rule, enriched_context, analyse)
if why_not:
logger.log(log.VERBOSE, contact_info)
logger.log(log.VERBOSE, " -> does not match: %s", why_not)
@@ -442,10 +546,22 @@ def notify_rulebased(raw_context: EventContext, analyse: bool = False) -> Notify
num_rule_matches += 1
notifications, rule_info = _create_notifications(
- raw_context, rule, notifications, rule_info
+ enriched_context,
+ rule,
+ notifications,
+ rule_info,
+ host_parameters_cb,
)
- plugin_info = _process_notifications(raw_context, notifications, num_rule_matches, analyse)
+ plugin_info = _process_notifications(
+ enriched_context,
+ notifications,
+ num_rule_matches,
+ host_parameters_cb,
+ get_http_proxy,
+ analyse=analyse,
+ dispatch=dispatch,
+ )
return rule_info, plugin_info
@@ -457,12 +573,13 @@ def _get_contact_info_text(rule: EventRule) -> str:
def _create_notifications(
- raw_context: EventContext,
+ enriched_context: EnrichedEventContext,
rule: EventRule,
notifications: Notifications,
rule_info: list[NotifyRuleInfo],
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
) -> tuple[Notifications, list[NotifyRuleInfo]]:
- contacts = rbn_rule_contacts(rule, raw_context)
+ contacts = rbn_rule_contacts(rule, enriched_context)
contactstxt = ", ".join(contacts)
plugin_name, plugin_parameters = rule["notify_plugin"]
@@ -519,7 +636,7 @@ def _create_notifications(
bulk = rbn_get_bulk_params(rule)
final_parameters = rbn_finalize_plugin_parameters(
- raw_context["HOSTNAME"], plugin_name, plugin_parameters
+ enriched_context["HOSTNAME"], plugin_name, host_parameters_cb, plugin_parameters
)
notifications[key] = (not rule.get("allow_disable"), final_parameters, bulk)
@@ -527,9 +644,17 @@ def _create_notifications(
return notifications, rule_info
-def _process_notifications( # pylint: disable=too-many-branches
- raw_context: EventContext, notifications: Notifications, num_rule_matches: int, analyse: bool
+def _process_notifications(
+ enriched_context: EnrichedEventContext,
+ notifications: Notifications,
+ num_rule_matches: int,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+ *,
+ analyse: bool,
+ dispatch: bool = False,
) -> list[NotifyPluginInfo]:
+ # pylint: disable=too-many-branches
plugin_info: list[NotifyPluginInfo] = []
if not notifications:
@@ -544,9 +669,11 @@ def _process_notifications( # pylint: disable=too-many-branches
plugin_name, fallback_params = config.notification_fallback_format
fallback_params = rbn_finalize_plugin_parameters(
- raw_context["HOSTNAME"], plugin_name, fallback_params
+ enriched_context["HOSTNAME"], plugin_name, host_parameters_cb, fallback_params
+ )
+ plugin_context = create_plugin_context(
+ enriched_context, fallback_params, get_http_proxy
)
- plugin_context = create_plugin_context(raw_context, fallback_params)
rbn_add_contact_information(plugin_context, fallback_contacts)
plugin_contexts = (
[plugin_context]
@@ -561,7 +688,7 @@ def _process_notifications( # pylint: disable=too-many-branches
# Now do the actual notifications
logger.info("Executing %d notifications:", len(notifications))
for (contacts, plugin_name), (_locked, params, bulk) in sorted(notifications.items()):
- verb = "would notify" if analyse else "notifying"
+ verb = "would notify" if analyse and not dispatch else "notifying"
contactstxt = ", ".join(contacts)
plugintxt = plugin_name
paramtxt = ", ".join(params) if params else "(no parameters)"
@@ -576,7 +703,7 @@ def _process_notifications( # pylint: disable=too-many-branches
)
try:
- plugin_context = create_plugin_context(raw_context, params)
+ plugin_context = create_plugin_context(enriched_context, params, get_http_proxy)
rbn_add_contact_information(plugin_context, contacts)
split_contexts = (
@@ -594,7 +721,7 @@ def _process_notifications( # pylint: disable=too-many-branches
for context in plugin_contexts:
plugin_info.append((context["CONTACTNAME"], plugin_name, params, bulk))
- if analyse:
+ if analyse and not dispatch:
continue
if bulk:
do_bulk_notify(plugin_name, params, context, bulk)
@@ -644,29 +771,31 @@ def rbn_fallback_contacts() -> Contacts:
def rbn_finalize_plugin_parameters(
hostname: HostName,
plugin_name: NotificationPluginNameStr,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
rule_parameters: NotifyPluginParamsList,
-) -> NotifyPluginParamsList:
- ...
+) -> NotifyPluginParamsList: ...
@overload
def rbn_finalize_plugin_parameters(
hostname: HostName,
plugin_name: NotificationPluginNameStr,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
rule_parameters: NotifyPluginParamsDict,
-) -> NotifyPluginParamsDict:
- ...
+) -> NotifyPluginParamsDict: ...
def rbn_finalize_plugin_parameters(
- hostname: HostName, plugin_name: NotificationPluginNameStr, rule_parameters: NotifyPluginParams
+ hostname: HostName,
+ plugin_name: NotificationPluginNameStr,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ rule_parameters: NotifyPluginParams,
) -> NotifyPluginParams:
# Right now we are only able to finalize notification plugins with dict parameters..
if not isinstance(rule_parameters, dict):
return rule_parameters
- config_cache = config.get_config_cache()
- parameters = dict(config_cache.notification_plugin_parameters(hostname, plugin_name)).copy()
+ parameters = dict(host_parameters_cb(hostname, plugin_name)).copy()
parameters.update(rule_parameters)
# Added in 2.0.0b8. Applies if no value is set either in the notification rule
@@ -806,7 +935,11 @@ def rbn_get_bulk_params(rule: EventRule) -> NotifyBulkParameters | None:
return None
-def rbn_match_rule(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_rule(
+ rule: EventRule,
+ enriched_context: EnrichedEventContext,
+ analyse: bool = False,
+) -> str | None:
return events.apply_matchers(
[
rbn_match_rule_disabled,
@@ -819,17 +952,57 @@ def rbn_match_rule(rule: EventRule, context: EventContext) -> str | None:
rbn_match_hostlabels,
rbn_match_servicelabels,
rbn_match_event_console,
+ rbn_match_timeperiod,
],
rule,
- context,
+ enriched_context,
+ analyse,
)
-def rbn_match_rule_disabled(rule: EventRule, _context: EventContext) -> str | None:
+def rbn_match_timeperiod(rule: EventRule, context: EventContext, analyse: bool) -> str | None:
+ # This test is only done on notification tests, otherwise
+ # events.event_match_timeperiod() is used
+ if not analyse:
+ return None
+
+ if (timeperiod_name := rule.get("match_timeperiod")) is None:
+ return None
+
+ if timeperiod_name == "24X7":
+ return None
+
+ all_timeperiods = load_timeperiods()
+ if "MICROTIME" in context:
+ timestamp = float(context["MICROTIME"]) / 1000000.0
+ else:
+ timestamp = datetime.datetime.strptime(
+ context["SHORTDATETIME"], "%Y-%m-%d %H:%M:%S"
+ ).timestamp()
+
+ if not is_timeperiod_active(
+ timestamp=timestamp,
+ timeperiod_name=timeperiod_name,
+ all_timeperiods=all_timeperiods,
+ ):
+ return f"The notification does not match the timeperiod '{timeperiod_name}'"
+
+ return None
+
+
+def rbn_match_rule_disabled(
+ rule: EventRule,
+ _context: EventContext,
+ _analyse: bool,
+) -> str | None:
return "This rule is disabled" if rule.get("disabled") else None
-def rbn_match_escalation(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_escalation(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_escalation" in rule:
from_number, to_number = rule["match_escalation"]
if context["WHAT"] == "HOST":
@@ -845,7 +1018,11 @@ def rbn_match_escalation(rule: EventRule, context: EventContext) -> str | None:
return None
-def rbn_match_escalation_throtte(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_escalation_throtte(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_escalation_throttle" in rule:
# We do not want to suppress recovery notifications.
if (context["WHAT"] == "HOST" and context.get("HOSTSTATE", "UP") == "UP") or (
@@ -867,7 +1044,11 @@ def rbn_match_escalation_throtte(rule: EventRule, context: EventContext) -> str
return None
-def rbn_match_host_event(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_host_event(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_host_event" in rule:
if context["WHAT"] != "HOST":
if "match_service_event" not in rule:
@@ -882,7 +1063,11 @@ def rbn_match_host_event(rule: EventRule, context: EventContext) -> str | None:
return None
-def rbn_match_service_event(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_service_event(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_service_event" in rule:
if context["WHAT"] != "SERVICE":
if "match_host_event" not in rule:
@@ -1047,7 +1232,11 @@ def rbn_match_contact_groups(
return None
-def rbn_match_notification_comment(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_notification_comment(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_notification_comment" in rule:
r = regex(rule["match_notification_comment"])
notification_comment = context.get("NOTIFICATIONCOMMENT", "")
@@ -1058,14 +1247,22 @@ def rbn_match_notification_comment(rule: EventRule, context: EventContext) -> st
return None
-def rbn_match_hostlabels(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_hostlabels(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_hostlabels" in rule:
return _rbn_handle_labels(rule, context, "host")
return None
-def rbn_match_servicelabels(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_servicelabels(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_servicelabels" in rule:
return _rbn_handle_labels(rule, context, "service")
@@ -1092,7 +1289,11 @@ def _rbn_handle_labels(
return None
-def rbn_match_event_console(rule: EventRule, context: EventContext) -> str | None:
+def rbn_match_event_console(
+ rule: EventRule,
+ context: EventContext,
+ _analyse: bool,
+) -> str | None:
if "match_ec" in rule:
match_ec = rule["match_ec"]
is_ec_notification = "EC_ID" in context
@@ -1173,7 +1374,7 @@ def _contactgroup_members() -> Mapping[ContactgroupName, set[ContactName]]:
"""Get the members of all contact groups
Is computed once for the process lifetime since it's either a short lived process or in case of
- the Microcore notify helper, it is restarted once a new configuration is applied to the core.
+ the Micro Core notify helper, it is restarted once a new configuration is applied to the core.
"""
members: dict[ContactgroupName, set[ContactName]] = {}
for name, contact in config.contacts.items():
@@ -1213,21 +1414,26 @@ def rbn_emails_contacts(emails: list[str]) -> list[str]:
# 2: Cannot send, retry does not make sense
-# Add the plugin parameters to the envinroment. We have two types of parameters:
+# Add the plug-in parameters to the envinroment. We have two types of parameters:
# - list, the legacy style. This will lead to PARAMETERS_1, ...
# - dict, the new style for scripts with WATO rule. This will lead to
# PARAMETER_FOO_BAR for a dict key named "foo_bar".
def create_plugin_context(
- raw_context: EventContext, params: list | NotifyPluginParams
+ enriched_context: EnrichedEventContext,
+ params: list | NotifyPluginParams,
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
) -> NotificationContext:
plugin_context = NotificationContext({})
- plugin_context.update(cast(Mapping[str, str], raw_context)) # Make a real copy
- events.add_to_event_context(plugin_context, "PARAMETER", params)
+ plugin_context.update(cast(Mapping[str, str], enriched_context)) # Make a real copy
+ events.add_to_event_context(plugin_context, "PARAMETER", params, get_http_proxy)
return plugin_context
-def create_bulk_parameter_context(params: NotifyPluginParams) -> list[str]:
- dict_context = create_plugin_context({}, params)
+def create_bulk_parameter_context(
+ params: NotifyPluginParams,
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+) -> list[str]:
+ dict_context = create_plugin_context({}, params, get_http_proxy)
return [
"{}={}\n".format(varname, value.replace("\r", "").replace("\n", "\1"))
for (varname, value) in dict_context.items()
@@ -1247,7 +1453,7 @@ def path_to_notification_script(plugin_name: NotificationPluginNameStr) -> str |
path = cmk.utils.paths.notifications_dir / plugin_name
if not path.exists():
- logger.info("Notification plugin '%s' not found", plugin_name)
+ logger.info("Notification plug-in '%s' not found", plugin_name)
logger.info(" not in %s", cmk.utils.paths.notifications_dir)
logger.info(" and not in %s", cmk.utils.paths.local_notifications_dir)
return None
@@ -1298,7 +1504,7 @@ def plugin_log(s: str) -> None:
with Timeout(
config.notification_plugin_timeout,
- message="Notification plugin timed out",
+ message="Notification plug-in timed out",
) as timeout_guard:
try:
while True:
@@ -1313,7 +1519,7 @@ def plugin_log(s: str) -> None:
out.output(line)
except MKTimeout:
plugin_log(
- "Notification plugin did not finish within %d seconds. Terminating."
+ "Notification plug-in did not finish within %d seconds. Terminating."
% config.notification_plugin_timeout
)
p.kill()
@@ -1383,7 +1589,11 @@ def format_(value: str) -> str:
# 2. Notifications for async local delivery. Contain key "plugin"
# 3. Notifications that *were* forwarded (e.g. received from a slave). Contain neither of both.
# Spool files of type 1 are not handled here!
-def handle_spoolfile(spoolfile: str) -> int:
+def handle_spoolfile(
+ spoolfile: str,
+ host_parameters_cb: Callable[[HostName, NotificationPluginNameStr], Mapping[str, object]],
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+) -> int:
notif_uuid = spoolfile.rsplit("/", 1)[-1]
logger.info("----------------------------------------------------------------------")
data = None
@@ -1418,8 +1628,8 @@ def handle_spoolfile(spoolfile: str) -> int:
events.find_host_service_in_context(raw_context),
)
- store_notification_backlog(data["context"])
- locally_deliver_raw_context(data["context"])
+ store_notification_backlog(raw_context)
+ locally_deliver_raw_context(raw_context, host_parameters_cb, get_http_proxy)
# TODO: It is a bug that we don't transport result information and monitoring history
# entries back to the origin site. The intermediate or final results should be sent back to
# the origin site. Also log_to_history calls should not log the entries to the local
@@ -1728,20 +1938,25 @@ def listdir_visible(path: str) -> list[str]:
return bulks
-def send_ripe_bulks() -> None:
+def send_ripe_bulks(get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig]) -> None:
ripe = find_bulks(True)
if ripe:
logger.info("Sending out %d ripe bulk notifications", len(ripe))
for bulk in ripe:
try:
- notify_bulk(bulk[0], bulk[-1])
+ notify_bulk(bulk[0], bulk[-1], get_http_proxy)
except Exception:
if cmk.utils.debug.enabled():
raise
logger.exception("Error sending bulk %s:", bulk[0])
-def notify_bulk(dirname: str, uuids: UUIDs) -> None: # pylint: disable=too-many-branches
+def notify_bulk(
+ dirname: str,
+ uuids: UUIDs,
+ get_http_proxy: Callable[[tuple[str, str]], HTTPProxyConfig],
+) -> None:
+ # pylint: disable=too-many-branches
parts = dirname.split("/")
contact = parts[-3]
plugin_name = cast(NotificationPluginNameStr, parts[-2])
@@ -1777,14 +1992,14 @@ def notify_bulk(dirname: str, uuids: UUIDs) -> None: # pylint: disable=too-many
if bulk_context: # otherwise: only corrupted files
# Per default the uuids are sorted chronologically from oldest to newest
- # Therefore the notification plugin also shows the oldest entry first
+ # Therefore the notification plug-in also shows the oldest entry first
# The following configuration option allows to reverse the sorting
if isinstance(old_params, dict) and old_params.get("bulk_sort_order") == "newest_first":
bulk_context.reverse()
assert old_params is not None
plugin_text = NotificationPluginName("bulk " + (plugin_name))
- context_lines = create_bulk_parameter_context(old_params)
+ context_lines = create_bulk_parameter_context(old_params, get_http_proxy)
for context in bulk_context:
# Do not forget to add this to the monitoring log. We create
# a single entry for each notification contained in the bulk.
@@ -1816,7 +2031,7 @@ def notify_bulk(dirname: str, uuids: UUIDs) -> None: # pylint: disable=too-many
# Repeat with unhandled uuids (due to different parameters)
if unhandled_uuids:
- notify_bulk(dirname, unhandled_uuids)
+ notify_bulk(dirname, unhandled_uuids, get_http_proxy)
# Remove directory. Not necessary if emtpy
try:
@@ -1831,7 +2046,7 @@ def call_bulk_notification_script(
) -> tuple[NotificationResultCode, list[str]]:
path = path_to_notification_script(plugin_name)
if not path:
- raise MKGeneralException("Notification plugin %s not found" % plugin_name)
+ raise MKGeneralException("Notification plug-in %s not found" % plugin_name)
timed_out = False
# Protocol: The script gets the context on standard input and
@@ -1852,7 +2067,7 @@ def call_bulk_notification_script(
)
except subprocess.TimeoutExpired:
logger.info(
- "Notification plugin did not finish within %d seconds. Terminating.",
+ "Notification plug-in did not finish within %d seconds. Terminating.",
config.notification_plugin_timeout,
)
p.kill()
diff --git a/cmk/base/parent_scan.py b/cmk/base/parent_scan.py
index 2565da913d0..dfd953afc91 100644
--- a/cmk/base/parent_scan.py
+++ b/cmk/base/parent_scan.py
@@ -8,38 +8,52 @@
import subprocess
import sys
import time
-from collections.abc import Iterable, Sequence
+from collections.abc import Iterable, Mapping, Sequence
+from dataclasses import dataclass
from pathlib import Path
+from typing import Protocol
import cmk.utils.debug
import cmk.utils.paths
import cmk.utils.tty as tty
from cmk.utils.caching import cache_manager, DictCache
-from cmk.utils.exceptions import MKGeneralException
+from cmk.utils.exceptions import MKGeneralException, MKIPAddressLookupError
from cmk.utils.hostaddress import HostAddress, HostName, Hosts
from cmk.utils.log import console
-from cmk.automations.results import Gateway
+from cmk.automations.results import Gateway, GatewayResult
-import cmk.base.config as config
import cmk.base.obsolete_output as out
-from cmk.base.config import ConfigCache
+from cmk.base.ip_lookup import IPStackConfig
+
+
+@dataclass(frozen=True, kw_only=True)
+class ScanConfig:
+ active: bool
+ online: bool
+ ip_stack_config: IPStackConfig
+ parents: Sequence[HostName]
+
+
+class _IpAddressLookup(Protocol):
+ def __call__(
+ self, hostname: HostName, *, family: socket.AddressFamily
+ ) -> HostAddress | None: ...
def do_scan_parents(
- config_cache: ConfigCache,
+ scan_config: Mapping[HostName, ScanConfig],
hosts_config: Hosts,
monitoring_host: HostName | None,
hosts: list[HostName],
+ *,
+ max_num_processes: int,
+ lookup_ip_address: _IpAddressLookup,
) -> None:
# pylint: disable=too-many-branches
if not hosts:
hosts = sorted(
- {
- hn
- for hn in hosts_config.hosts
- if config_cache.is_active(hn) and config_cache.is_online(hn)
- }
+ {hn for hn in hosts_config.hosts if scan_config[hn].active and scan_config[hn].online}
)
parent_hosts = []
@@ -47,9 +61,6 @@ def do_scan_parents(
parent_rules = []
gateway_hosts: set[HostName] = set()
- # TODO: Sneakily changing a global variable looks like a bad idea!
- config.max_num_processes = max(config.max_num_processes, 1)
-
outfilename = Path(cmk.utils.paths.check_mk_config_dir) / "parents.mk"
if not traceroute_available():
@@ -71,32 +82,37 @@ def do_scan_parents(
"the file and try again."
)
- out.output("Scanning for parents (%d processes)..." % config.max_num_processes)
+ out.output("Scanning for parents (%d processes)..." % max_num_processes)
while hosts:
chunk: list[HostName] = []
- while len(chunk) < config.max_num_processes and hosts:
+ while len(chunk) < max_num_processes and hosts:
host = hosts.pop()
# skip hosts that already have a parent
- if config_cache.parents(host):
+ if scan_config[host].parents:
console.verbose("(manual parent) ")
continue
chunk.append(host)
- gws = scan_parents_of(config_cache, hosts_config, monitoring_host, chunk)
+ results = scan_parents_of(
+ scan_config,
+ hosts_config,
+ monitoring_host,
+ chunk,
+ lookup_ip_address=lookup_ip_address,
+ )
- for host, (gw, _unused_state, _unused_ping_fails, _unused_message) in zip(chunk, gws):
- if gw:
- gateway, gateway_ip, dns_name = gw
- if not gateway: # create artificial host
- if dns_name:
- gateway = dns_name
+ for host, result in zip(chunk, results):
+ if gw := result.gateway:
+ if not gw.existing_gw_host_name: # create artificial host
+ if gw.dns_name:
+ gateway = gw.dns_name
else:
- gateway = HostName("gw-%s" % (gateway_ip.replace(".", "-")))
+ gateway = HostName("gw-%s" % (gw.ip.replace(".", "-")))
if gateway not in gateway_hosts:
gateway_hosts.add(gateway)
parent_hosts.append("%s|parent|ping" % gateway)
- parent_ips[gateway] = gateway_ip
+ parent_ips[gateway] = gw.ip
if monitoring_host:
parent_rules.append(
(monitoring_host, [gateway])
@@ -132,21 +148,27 @@ def traceroute_available() -> str | None:
def scan_parents_of(
- config_cache: ConfigCache,
+ scan_config: Mapping[HostName, ScanConfig],
hosts_config: Hosts,
monitoring_host: HostName | None,
hosts: Iterable[HostName],
silent: bool = False,
settings: dict[str, int] | None = None,
-) -> Sequence[Gateway]:
+ *,
+ lookup_ip_address: _IpAddressLookup,
+) -> Sequence[GatewayResult]:
# pylint: disable=too-many-branches
if settings is None:
settings = {}
- if monitoring_host:
- nagios_ip = config.lookup_ip_address(config_cache, monitoring_host, family=socket.AF_INET)
- else:
- nagios_ip = None
+ nagios_ip = (
+ None
+ if (
+ monitoring_host is None
+ or scan_config[monitoring_host].ip_stack_config is IPStackConfig.NO_IP
+ )
+ else lookup_ip_address(monitoring_host, family=socket.AddressFamily.AF_INET)
+ )
os.putenv("LANG", "")
os.putenv("LC_ALL", "")
@@ -155,8 +177,16 @@ def scan_parents_of(
procs: list[tuple[HostName, HostAddress | None, str | subprocess.Popen]] = []
for host in hosts:
console.verbose("%s " % host)
+ if scan_config[host].ip_stack_config is IPStackConfig.NO_IP:
+ procs.append((host, None, "ERROR: Configured to be a No-IP host"))
+ continue
+
try:
- ip = config.lookup_ip_address(config_cache, host, family=socket.AF_INET)
+ ip = lookup_ip_address(
+ host,
+ # [IPv6] -- what about it?
+ family=socket.AddressFamily.AF_INET,
+ )
if ip is None:
raise RuntimeError()
command = [
@@ -170,7 +200,7 @@ def scan_parents_of(
"-n",
ip,
]
- console.vverbose("Running '%s'\n" % subprocess.list2cmdline(command))
+ console.debug("Running '%s'\n" % subprocess.list2cmdline(command))
procs.append(
(
@@ -197,7 +227,7 @@ def dot(color: str, dot: str = "o") -> None:
# Now all run and we begin to read the answers. For each host
# we add a triple to gateways: the gateway, a scan state and a diagnostic output
- gateways: list[Gateway] = []
+ gateways: list[GatewayResult] = []
for host, ip, proc_or_error in procs:
if isinstance(proc_or_error, str):
lines = [proc_or_error]
@@ -211,15 +241,17 @@ def dot(color: str, dot: str = "o") -> None:
if exitstatus:
dot(tty.red, "*")
gateways.append(
- (None, "failed", 0, "Traceroute failed with exit code %d" % (exitstatus & 255))
+ GatewayResult(
+ None, "failed", 0, "Traceroute failed with exit code %d" % (exitstatus & 255)
+ )
)
continue
if len(lines) == 1 and lines[0].startswith("ERROR:"):
message = lines[0][6:].strip()
- console.verbose("%s: %s\n", host, message, stream=sys.stderr)
+ console.verbose(f"{host}: {message}\n", stream=sys.stderr)
dot(tty.red, "D")
- gateways.append((None, "dnserror", 0, message))
+ gateways.append(GatewayResult(None, "dnserror", 0, message))
continue
if len(lines) == 0:
@@ -234,7 +266,7 @@ def dot(color: str, dot: str = "o") -> None:
if not silent:
console.error("{}: {}\n".format(host, " ".join(lines)))
gateways.append(
- (
+ GatewayResult(
None,
"garbled",
0,
@@ -273,7 +305,7 @@ def dot(color: str, dot: str = "o") -> None:
if len(routes) == 0:
error = "incomplete output from traceroute. No routes found."
console.error(f"{host}: {error}\n")
- gateways.append((None, "garbled", 0, error))
+ gateways.append(GatewayResult(None, "garbled", 0, error))
dot(tty.red)
continue
@@ -283,13 +315,17 @@ def dot(color: str, dot: str = "o") -> None:
# this in monitoring_host.
if len(routes) == 1:
if ip == nagios_ip:
- gateways.append((None, "root", 0, "")) # We are the root-monitoring host
+ gateways.append(
+ GatewayResult(None, "root", 0, "")
+ ) # We are the root-monitoring host
dot(tty.white, "N")
elif monitoring_host and nagios_ip:
- gateways.append(((monitoring_host, nagios_ip, None), "direct", 0, ""))
+ gateways.append(
+ GatewayResult(Gateway(monitoring_host, nagios_ip, None), "direct", 0, "")
+ )
dot(tty.cyan, "L")
else:
- gateways.append((None, "direct", 0, ""))
+ gateways.append(GatewayResult(None, "direct", 0, ""))
continue
# Try far most route which is not identical with host itself
@@ -303,7 +339,7 @@ def dot(color: str, dot: str = "o") -> None:
# gateway can be monitored via the standard host check
if ping_probes:
if not gateway_reachable_via_ping(r, ping_probes):
- console.verbose("(not using %s, not reachable)\n", r, stream=sys.stderr)
+ console.verbose(f"(not using {r}, not reachable)\n", stream=sys.stderr)
skipped_gateways += 1
continue
this_route = r
@@ -312,21 +348,25 @@ def dot(color: str, dot: str = "o") -> None:
error = "No usable routing information"
if not silent:
console.error(f"{host}: {error}\n")
- gateways.append((None, "notfound", 0, error))
+ gateways.append(GatewayResult(None, "notfound", 0, error))
dot(tty.blue)
continue
# TTLs already have been filtered out)
gateway_ip = this_route
- gateway = _ip_to_hostname(config_cache, hosts_config, this_route)
+ gateway = _ip_to_hostname(
+ scan_config, hosts_config, this_route, lookup_ip_address=lookup_ip_address
+ )
if gateway:
- console.verbose("%s(%s) ", gateway, gateway_ip)
+ console.verbose(f"{gateway}({gateway_ip}) ")
else:
- console.verbose("%s ", gateway_ip)
+ console.verbose(f"{gateway_ip} ")
# Try to find DNS name of host via reverse DNS lookup
dns_name = _ip_to_dnsname(gateway_ip)
- gateways.append(((gateway, gateway_ip, dns_name), "gateway", skipped_gateways, ""))
+ gateways.append(
+ GatewayResult(Gateway(gateway, gateway_ip, dns_name), "gateway", skipped_gateways, "")
+ )
dot(tty.green, "G")
return gateways
@@ -344,19 +384,28 @@ def gateway_reachable_via_ping(ip: HostAddress, probes: int) -> bool:
def _ip_to_hostname(
- config_cache: ConfigCache, hosts_config: Hosts, ip: HostAddress | None
+ scan_config: Mapping[HostName, ScanConfig],
+ hosts_config: Hosts,
+ ip: HostAddress | None,
+ lookup_ip_address: _IpAddressLookup,
) -> HostName | None:
"""Find hostname belonging to an ip address."""
absent = "ip_to_hostname" not in cache_manager
cache = cache_manager.obtain_cache("ip_to_hostname")
if absent:
- _fill_ip_to_hostname_cache(cache, config_cache, hosts_config)
+ _fill_ip_to_hostname_cache(
+ cache, scan_config, hosts_config, lookup_ip_address=lookup_ip_address
+ )
return cache.get(ip)
def _fill_ip_to_hostname_cache(
- cache: DictCache, config_cache: ConfigCache, hosts_config: Hosts
+ cache: DictCache,
+ scan_config: Mapping[HostName, ScanConfig],
+ hosts_config: Hosts,
+ *,
+ lookup_ip_address: _IpAddressLookup,
) -> None:
"""We must not use reverse DNS but the Checkmk mechanisms, since we do not
want to find the DNS name but the name of a matching host from all_hosts"""
@@ -364,11 +413,13 @@ def _fill_ip_to_hostname_cache(
# inconsistent with do_scan_parents where a list of hosts could be passed as an argument
hn
for hn in hosts_config.hosts
- if config_cache.is_active(hn) and config_cache.is_online(hn)
+ if scan_config[hn].active and scan_config[hn].online
}:
+ if scan_config[host].ip_stack_config is IPStackConfig.NO_IP:
+ continue
try:
- cache[config.lookup_ip_address(config_cache, host, family=socket.AF_INET)] = host
- except Exception:
+ cache[lookup_ip_address(host, family=socket.AddressFamily.AF_INET)] = host
+ except MKIPAddressLookupError:
pass
diff --git a/cmk/base/plugins/__init__.py b/cmk/base/plugins/__init__.py
index 67165df1c35..c0d5ce1ee83 100644
--- a/cmk/base/plugins/__init__.py
+++ b/cmk/base/plugins/__init__.py
@@ -12,6 +12,6 @@
# This folder is part of a namespace package, that can be shadowed/extended
# using the local/ hierarchy.
#
-# Do not change the following line, is is picked up by the build process:
+# Do not change the following line, it is picked up by the build process:
# check_mk.make: do-not-deploy
#
diff --git a/cmk/base/plugins/agent_based/__init__.py b/cmk/base/plugins/agent_based/__init__.py
index 67165df1c35..c0d5ce1ee83 100644
--- a/cmk/base/plugins/agent_based/__init__.py
+++ b/cmk/base/plugins/agent_based/__init__.py
@@ -12,6 +12,6 @@
# This folder is part of a namespace package, that can be shadowed/extended
# using the local/ hierarchy.
#
-# Do not change the following line, is is picked up by the build process:
+# Do not change the following line, it is picked up by the build process:
# check_mk.make: do-not-deploy
#
diff --git a/cmk/base/plugins/agent_based/alertmanager.py b/cmk/base/plugins/agent_based/alertmanager.py
index a14f7535311..fd02fa7ec12 100644
--- a/cmk/base/plugins/agent_based/alertmanager.py
+++ b/cmk/base/plugins/agent_based/alertmanager.py
@@ -6,9 +6,7 @@
import json
from enum import Enum
-from typing import NamedTuple
-
-from typing_extensions import TypedDict
+from typing import NamedTuple, TypedDict
from .agent_based_api.v1 import register, Result, Service, State, type_defs
from .agent_based_api.v1.type_defs import CheckResult, DiscoveryResult
diff --git a/cmk/base/plugins/agent_based/apc_inrow_system_events.py b/cmk/base/plugins/agent_based/apc_inrow_system_events.py
index 11eb09d6468..dd01e7a13e5 100644
--- a/cmk/base/plugins/agent_based/apc_inrow_system_events.py
+++ b/cmk/base/plugins/agent_based/apc_inrow_system_events.py
@@ -4,8 +4,7 @@
# conditions defined in the file COPYING, which is part of this source code package.
from collections.abc import Mapping
-
-from typing_extensions import TypedDict
+from typing import TypedDict
from cmk.plugins.lib.apc import DETECT
diff --git a/cmk/base/plugins/agent_based/apc_netbotz_other_sensors.py b/cmk/base/plugins/agent_based/apc_netbotz_other_sensors.py
new file mode 100644
index 00000000000..f36502ab281
--- /dev/null
+++ b/cmk/base/plugins/agent_based/apc_netbotz_other_sensors.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+# Max. eigth sensors
+# .1.3.6.1.4.1.5528.100.4.2.10.1.4.399845582 Wasserstand_FG1
+# .1.3.6.1.4.1.5528.100.4.2.10.1.4.3502248167 Ethernet Link Status
+# .1.3.6.1.4.1.5528.100.4.2.10.1.4.3823829717 A-Link Bus Power
+# .1.3.6.1.4.1.5528.100.4.2.10.1.3.399845582 0
+# .1.3.6.1.4.1.5528.100.4.2.10.1.3.3502248167 0
+# .1.3.6.1.4.1.5528.100.4.2.10.1.3.3823829717 0
+# .1.3.6.1.4.1.5528.100.4.2.10.1.7.399845582 No Leak
+# .1.3.6.1.4.1.5528.100.4.2.10.1.7.3502248167 Up
+# .1.3.6.1.4.1.5528.100.4.2.10.1.7.3823829717 OK
+
+from collections.abc import Sequence
+from dataclasses import dataclass
+
+from .agent_based_api.v1 import register, Result, Service, SNMPTree, startswith, State
+from .agent_based_api.v1.type_defs import CheckResult, DiscoveryResult, StringTable
+
+
+@dataclass(frozen=True)
+class Sensor:
+ label: str
+ error_state: str
+ state_readable: str
+
+
+def parse_apc_netbotz_other_sensors(string_table: StringTable) -> Sequence[Sensor]:
+ return [
+ Sensor(label=label, error_state=error_state, state_readable=state_readable)
+ for label, error_state, state_readable in string_table
+ ]
+
+
+register.snmp_section(
+ name="apc_netbotz_v2_other_sensors",
+ parse_function=parse_apc_netbotz_other_sensors,
+ parsed_section_name="apc_netbotz_other_sensors",
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.5528.100.4.2.10.1",
+ oids=[
+ "4", # NETBOTZV2-MIB::otherNumericSensorLabel
+ "3", # NETBOTZV2-MIB::otherNumericSensorErrorStatus
+ "7", # NETBOTZV2-MIB::otherNumericSensorValueStr
+ ],
+ ),
+ detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.5528.100.20.10"),
+)
+
+register.snmp_section(
+ name="apc_netbotz_50_other_sensors",
+ parse_function=parse_apc_netbotz_other_sensors,
+ parsed_section_name="apc_netbotz_other_sensors",
+ fetch=SNMPTree(
+ base=".1.3.6.1.4.1.52674.500.4.2.10.1",
+ oids=[
+ "4", # NETBOTZ50-MIB::otherNumericSensorLabel
+ "3", # NETBOTZ50-MIB::otherNumericSensorErrorStatus
+ "7", # NETBOTZ50-MIB::otherNumericSensorValueStr
+ ],
+ ),
+ detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.52674.500"),
+)
+
+
+# MIB: The sensor reading shown as a string (or empty string
+# if it is not plugged into a port).
+def discover_apc_netbotz_other_sensors(section: Sequence[Sensor]) -> DiscoveryResult:
+ for sensor in section:
+ if sensor.state_readable != "":
+ yield Service()
+ return
+
+
+def check_apc_netbotz_other_sensors(section: Sequence[Sensor]) -> CheckResult:
+ count_ok_sensors = 0
+ for sensor in section:
+ if sensor.state_readable != "":
+ if sensor.state_readable != "OK":
+ state_readable = sensor.state_readable.lower()
+
+ if sensor.error_state == "0":
+ count_ok_sensors += 1
+ else:
+ yield Result(state=State.CRIT, summary=f"{sensor.label}: {state_readable}")
+
+ if count_ok_sensors > 0:
+ yield Result(state=State.OK, summary=f"{count_ok_sensors} sensors are OK")
+
+
+register.check_plugin(
+ name="apc_netbotz_other_sensors",
+ service_name="Numeric sensors summary",
+ discovery_function=discover_apc_netbotz_other_sensors,
+ check_function=check_apc_netbotz_other_sensors,
+)
diff --git a/cmk/base/plugins/agent_based/apc_netbotz_sensors.py b/cmk/base/plugins/agent_based/apc_netbotz_sensors.py
new file mode 100644
index 00000000000..ccebf03f28d
--- /dev/null
+++ b/cmk/base/plugins/agent_based/apc_netbotz_sensors.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python3
+# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
+# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
+# conditions defined in the file COPYING, which is part of this source code package.
+
+from collections.abc import Callable, Mapping
+from dataclasses import dataclass
+
+from cmk.plugins.lib.humidity import check_humidity
+from cmk.plugins.lib.temperature import check_temperature, TempParamType
+
+from .agent_based_api.v1 import (
+ get_value_store,
+ register,
+ Result,
+ Service,
+ SNMPTree,
+ startswith,
+ State,
+)
+from .agent_based_api.v1.type_defs import CheckResult, DiscoveryResult, StringTable
+
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.636159851 nbAlinkEnc_0_4_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.882181375 nbAlinkEnc_2_1_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.1619732064 nbAlinkEnc_0_2_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.1665932156 nbAlinkEnc_1_4_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.1751899818 nbAlinkEnc_2_2_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.1857547767 nbAlinkEnc_1_5_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.2370211927 nbAlinkEnc_1_6_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.2618588815 nbAlinkEnc_2_3_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.2628357572 nbAlinkEnc_0_1_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3031356659 nbAlinkEnc_0_5_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3056253200 nbAlinkEnc_0_6_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3103062985 nbAlinkEnc_2_4_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3328914949 nbAlinkEnc_1_3_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.1.3406802758 nbAlinkEnc_0_3_TEMP
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.636159851 252
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.882181375 222
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.1619732064 222
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.1665932156 216
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.1751899818 245
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.1857547767 234
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.2370211927 240
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.2618588815 220
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.2628357572 229
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3031356659 0
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3056253200 0
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3103062985 215
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3328914949 234
+# .1.3.6.1.4.1.5528.100.4.1.1.1.2.3406802758 238
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.636159851 25.200000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.882181375 22.200000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.1619732064 22.200000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.1665932156 21.600000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.1751899818 24.500000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.1857547767 23.400000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.2370211927 24.000000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.2618588815 22.000000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.2628357572 22.900000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3031356659
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3056253200
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3103062985 21.500000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3328914949 23.400000
+# .1.3.6.1.4.1.5528.100.4.1.1.1.7.3406802758 23.800000
+
+# .1.3.6.1.4.1.5528.100.4.1.2.1.1.421607638 nbAlinkEnc_1_5_HUMI
+# .1.3.6.1.4.1.5528.100.4.1.2.1.1.581338442 nbAlinkEnc_1_3_HUMI
+# .1.3.6.1.4.1.5528.100.4.1.2.1.1.1121716336 nbAlinkEnc_0_6_HUMI
+# .1.3.6.1.4.1.5528.100.4.1.2.1.1.3273299739 nbAlinkEnc_0_3_HUMI
+# .1.3.6.1.4.1.5528.100.4.1.2.1.1.4181308384 nbAlinkEnc_0_5_HUMI
+# .1.3.6.1.4.1.5528.100.4.1.2.1.2.421607638 370
+# .1.3.6.1.4.1.5528.100.4.1.2.1.2.581338442 320
+# .1.3.6.1.4.1.5528.100.4.1.2.1.2.1121716336 0
+# .1.3.6.1.4.1.5528.100.4.1.2.1.2.3273299739 320
+# .1.3.6.1.4.1.5528.100.4.1.2.1.2.4181308384 0
+# .1.3.6.1.4.1.5528.100.4.1.2.1.7.421607638 37.000000
+# .1.3.6.1.4.1.5528.100.4.1.2.1.7.581338442 32.000000
+# .1.3.6.1.4.1.5528.100.4.1.2.1.7.1121716336
+# .1.3.6.1.4.1.5528.100.4.1.2.1.7.3273299739 32.000000
+# .1.3.6.1.4.1.5528.100.4.1.2.1.7.4181308384
+
+# .1.3.6.1.4.1.5528.100.4.1.3.1.1.1000015730 nbAlinkEnc_0_5_DWPT
+# .1.3.6.1.4.1.5528.100.4.1.3.1.1.1490079962 nbAlinkEnc_0_3_DWPT
+# .1.3.6.1.4.1.5528.100.4.1.3.1.1.2228353183 nbAlinkEnc_0_6_DWPT
+# .1.3.6.1.4.1.5528.100.4.1.3.1.1.2428087247 nbAlinkEnc_1_3_DWPT
+# .1.3.6.1.4.1.5528.100.4.1.3.1.1.3329736831 nbAlinkEnc_1_5_DWPT
+# .1.3.6.1.4.1.5528.100.4.1.3.1.2.1000015730 0
+# .1.3.6.1.4.1.5528.100.4.1.3.1.2.1490079962 61
+# .1.3.6.1.4.1.5528.100.4.1.3.1.2.2228353183 0
+# .1.3.6.1.4.1.5528.100.4.1.3.1.2.2428087247 57
+# .1.3.6.1.4.1.5528.100.4.1.3.1.2.3329736831 78
+# .1.3.6.1.4.1.5528.100.4.1.3.1.7.1000015730
+# .1.3.6.1.4.1.5528.100.4.1.3.1.7.1490079962 6.100000
+# .1.3.6.1.4.1.5528.100.4.1.3.1.7.2228353183
+# .1.3.6.1.4.1.5528.100.4.1.3.1.7.2428087247 5.700000
+# .1.3.6.1.4.1.5528.100.4.1.3.1.7.3329736831 7.800000
+
+
+@dataclass(frozen=True)
+class SensorData:
+ reading: float
+ label: str
+
+
+Section = Mapping[str, Mapping[str, SensorData]]
+
+
+def parse_apc_netbotz_sensors(
+ string_table: list[StringTable], parse_reading: Callable[[str], float]
+) -> Section:
+ parsed: dict[str, dict[str, SensorData]] = {}
+ for item_type, block in zip(("temp", "humidity", "dewpoint"), string_table):
+ for item_name, reading_str, label, plugged_in_state in block:
+ if not plugged_in_state:
+ continue
+ parsed.setdefault(item_type, {}).setdefault(
+ item_name, SensorData(reading=parse_reading(reading_str), label=label)
+ )
+ return parsed
+
+
+def discover_apc_netbotz_sensors(section: Section, sensor_type: str) -> DiscoveryResult:
+ for item in section.get(sensor_type, []):
+ yield Service(item=item)
+
+
+def check_apc_netbotz_sensors(
+ item: str, params: TempParamType, section: Section, sensor_type: str
+) -> CheckResult:
+ if item in section.get(sensor_type, []):
+ data = section[sensor_type][item]
+ yield Result(state=State.OK, summary=f"[{data.label}]")
+ yield from check_temperature(
+ data.reading,
+ params,
+ unique_name=f"apc_netbotz_sensors_{sensor_type}_{item}",
+ value_store=get_value_store(),
+ )
+
+
+# ACP Netbotz v2 sensors deliver sensor readings in tenth of a degree or tenth of a percent
+def parse_apc_netbotz_v2_sensors(string_table: list[StringTable]) -> Section:
+ def parse_reading(reading: str) -> float:
+ return float(reading) / 10.0
+
+ return parse_apc_netbotz_sensors(string_table, parse_reading)
+
+
+register.snmp_section(
+ name="apc_netbotz_v2_sensors",
+ parse_function=parse_apc_netbotz_v2_sensors,
+ parsed_section_name="apc_netbotz_sensors",
+ fetch=[
+ SNMPTree(
+ base=".1.3.6.1.4.1.5528.100.4.1.1.1",
+ oids=["1", "2", "4", "7"],
+ ),
+ SNMPTree(
+ base=".1.3.6.1.4.1.5528.100.4.1.2.1",
+ oids=["1", "2", "4", "7"],
+ ),
+ SNMPTree(
+ base=".1.3.6.1.4.1.5528.100.4.1.3.1",
+ oids=["1", "2", "4", "7"],
+ ),
+ ],
+ detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.5528.100.20.10"),
+)
+
+
+# ACP Netbotz 50 sensors deliver sensor readings in degrees or percent
+def parse_apc_netbotz_50_sensors(string_table: list[StringTable]) -> Section:
+ return parse_apc_netbotz_sensors(string_table, float)
+
+
+register.snmp_section(
+ name="apc_netbotz_50_sensors",
+ parse_function=parse_apc_netbotz_50_sensors,
+ parsed_section_name="apc_netbotz_sensors",
+ fetch=[
+ SNMPTree(
+ base=".1.3.6.1.4.1.52674.500.4.1.1.1",
+ oids=["1", "2", "4", "7"],
+ ),
+ SNMPTree(
+ base=".1.3.6.1.4.1.52674.500.4.1.2.1",
+ oids=["1", "2", "4", "7"],
+ ),
+ SNMPTree(
+ base=".1.3.6.1.4.1.52674.500.4.1.3.1",
+ oids=["1", "2", "4", "7"],
+ ),
+ ],
+ detect=startswith(".1.3.6.1.2.1.1.2.0", ".1.3.6.1.4.1.52674.500"),
+)
+
+# .--temperature---------------------------------------------------------.
+# | _ _ |
+# | | |_ ___ _ __ ___ _ __ ___ _ __ __ _| |_ _ _ _ __ ___ |
+# | | __/ _ \ '_ ` _ \| '_ \ / _ \ '__/ _` | __| | | | '__/ _ \ |
+# | | || __/ | | | | | |_) | __/ | | (_| | |_| |_| | | | __/ |
+# | \__\___|_| |_| |_| .__/ \___|_| \__,_|\__|\__,_|_| \___| |
+# | |_| |
+# '----------------------------------------------------------------------'
+
+
+def discover_apc_netbotz_sensors_temp(section: Section) -> DiscoveryResult:
+ yield from discover_apc_netbotz_sensors(section, "temp")
+
+
+def check_apc_netbotz_sensors_temp(
+ item: str, params: TempParamType, section: Section
+) -> CheckResult:
+ yield from check_apc_netbotz_sensors(item, params, section, "temp")
+
+
+register.check_plugin(
+ name="apc_netbotz_sensors",
+ sections=["apc_netbotz_sensors"],
+ service_name="Temperature %s",
+ discovery_function=discover_apc_netbotz_sensors_temp,
+ check_function=check_apc_netbotz_sensors_temp,
+ check_ruleset_name="temperature",
+ check_default_parameters={ # suggested by customer
+ "levels": (30.0, 35.0),
+ "levels_lower": (25.0, 20.0),
+ },
+)
+
+
+# .
+# .--dewpoint------------------------------------------------------------.
+# | _ _ _ |
+# | __| | _____ ___ __ ___ (_)_ __ | |_ |
+# | / _` |/ _ \ \ /\ / / '_ \ / _ \| | '_ \| __| |
+# | | (_| | __/\ V V /| |_) | (_) | | | | | |_ |
+# | \__,_|\___| \_/\_/ | .__/ \___/|_|_| |_|\__| |
+# | |_| |
+# '----------------------------------------------------------------------'
+
+
+def discover_apc_netbotz_sensors_dewpoint(section: Section) -> DiscoveryResult:
+ yield from discover_apc_netbotz_sensors(section, "dewpoint")
+
+
+def check_apc_netbotz_sensors_dewpoint(
+ item: str, params: TempParamType, section: Section
+) -> CheckResult:
+ yield from check_apc_netbotz_sensors(item, params, section, "dewpoint")
+
+
+register.check_plugin(
+ name="apc_netbotz_sensors_dewpoint",
+ sections=["apc_netbotz_sensors"],
+ service_name="Dew point %s",
+ discovery_function=discover_apc_netbotz_sensors_dewpoint,
+ check_function=check_apc_netbotz_sensors_dewpoint,
+ check_ruleset_name="temperature",
+ check_default_parameters={ # suggested by customer
+ "levels": (18.0, 25.0),
+ "levels_lower": (-4.0, -6.0),
+ },
+)
+
+# .
+# .--humidity------------------------------------------------------------.
+# | _ _ _ _ _ |
+# | | |__ _ _ _ __ ___ (_) __| (_) |_ _ _ |
+# | | '_ \| | | | '_ ` _ \| |/ _` | | __| | | | |
+# | | | | | |_| | | | | | | | (_| | | |_| |_| | |
+# | |_| |_|\__,_|_| |_| |_|_|\__,_|_|\__|\__, | |
+# | |___/ |
+# '----------------------------------------------------------------------'
+
+
+def discover_apc_netbotz_sensors_humidity(section: Section) -> DiscoveryResult:
+ yield from discover_apc_netbotz_sensors(section, "humidity")
+
+
+def check_apc_netbotz_sensors_humidity(
+ item: str, params: Mapping[str, object], section: Section
+) -> CheckResult:
+ if item in section.get("humidity", []):
+ data = section["humidity"][item]
+ yield Result(state=State.OK, summary=f"[{data.label}]")
+ yield from check_humidity(data.reading, params)
+
+
+register.check_plugin(
+ name="apc_netbotz_sensors_humidity",
+ sections=["apc_netbotz_sensors"],
+ service_name="Humidity %s",
+ discovery_function=discover_apc_netbotz_sensors_humidity,
+ check_function=check_apc_netbotz_sensors_humidity,
+ check_ruleset_name="humidity",
+ check_default_parameters={ # suggested by customer
+ "levels": (60.0, 65.0),
+ "levels_lower": (35.0, 30.0),
+ },
+)
diff --git a/cmk/base/plugins/agent_based/arbor_peakflow_sp.py b/cmk/base/plugins/agent_based/arbor_peakflow_sp.py
deleted file mode 100644
index a61ab81bff8..00000000000
--- a/cmk/base/plugins/agent_based/arbor_peakflow_sp.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.plugins.lib.arbor import DETECT_PEAKFLOW_SP, parse_arbor_cpu_load
-
-from .agent_based_api.v1 import register, SNMPTree
-
-register.snmp_section(
- name="arbor_peakflow_sp_cpu_load",
- parsed_section_name="cpu",
- parse_function=parse_arbor_cpu_load,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.9694.1.4.2.1",
- oids=[
- "1.0", # deviceCpuLoadAvg1min
- "2.0", # deviceCpuLoadAvg5min
- "3.0", # deviceCpuLoadAvg15min
- ],
- ),
- detect=DETECT_PEAKFLOW_SP,
-)
diff --git a/cmk/base/plugins/agent_based/arbor_peakflow_tms.py b/cmk/base/plugins/agent_based/arbor_peakflow_tms.py
deleted file mode 100644
index 6732831feaa..00000000000
--- a/cmk/base/plugins/agent_based/arbor_peakflow_tms.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.plugins.lib.arbor import DETECT_PEAKFLOW_TMS, parse_arbor_cpu_load
-
-from .agent_based_api.v1 import register, SNMPTree
-
-register.snmp_section(
- name="arbor_peakflow_tms_cpu_load",
- parsed_section_name="cpu",
- parse_function=parse_arbor_cpu_load,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.9694.1.5.2",
- oids=[
- "3.0", # deviceCpuLoadAvg1min
- "4.0", # deviceCpuLoadAvg5min
- "5.0", # deviceCpuLoadAvg15min
- ],
- ),
- detect=DETECT_PEAKFLOW_TMS,
-)
diff --git a/cmk/base/plugins/agent_based/arbor_pravail.py b/cmk/base/plugins/agent_based/arbor_pravail.py
deleted file mode 100644
index fb32b768aef..00000000000
--- a/cmk/base/plugins/agent_based/arbor_pravail.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2019 Checkmk GmbH - License: GNU General Public License v2
-# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
-# conditions defined in the file COPYING, which is part of this source code package.
-
-from cmk.plugins.lib.arbor import DETECT_PRAVAIL, parse_arbor_cpu_load
-
-from .agent_based_api.v1 import register, SNMPTree
-
-register.snmp_section(
- name="arbor_pravail_cpu_load",
- parsed_section_name="cpu",
- parse_function=parse_arbor_cpu_load,
- fetch=SNMPTree(
- base=".1.3.6.1.4.1.9694.1.6.2",
- oids=[
- "3.0", # deviceCpuLoadAvg1min
- "4.0", # deviceCpuLoadAvg5min
- "5.0", # deviceCpuLoadAvg15min
- ],
- ),
- detect=DETECT_PRAVAIL,
-)
diff --git a/cmk/base/plugins/agent_based/aruba_psu.py b/cmk/base/plugins/agent_based/aruba_psu.py
index 04cf4ffea16..13d3188b3fa 100644
--- a/cmk/base/plugins/agent_based/aruba_psu.py
+++ b/cmk/base/plugins/agent_based/aruba_psu.py
@@ -5,9 +5,7 @@
from collections.abc import Mapping
from enum import Enum
-from typing import NamedTuple
-
-from typing_extensions import TypedDict
+from typing import NamedTuple, TypedDict
from cmk.plugins.lib.aruba import DETECT_2930M
from cmk.plugins.lib.temperature import check_temperature, TempParamDict, TempParamType
diff --git a/cmk/base/plugins/agent_based/bgp_peer.py b/cmk/base/plugins/agent_based/bgp_peer.py
index 946e97c323b..84eba9a2884 100644
--- a/cmk/base/plugins/agent_based/bgp_peer.py
+++ b/cmk/base/plugins/agent_based/bgp_peer.py
@@ -94,9 +94,7 @@
"""
from collections.abc import Mapping, Sequence
-from typing import NamedTuple
-
-from typing_extensions import TypedDict
+from typing import NamedTuple, TypedDict
from cmk.plugins.lib.ip_format import clean_v4_address, clean_v6_address
@@ -149,6 +147,7 @@ class BGPPeerParams(TypedDict):
DEFAULT_PEER_STATE_MAPPING: PeerStateMapping = {
"idle": 0,
+ "connect": 0,
"active": 0,
"opensent": 0,
"openconfirm": 0,
@@ -190,9 +189,11 @@ def _create_item_data(entry: list[str | list[int]]) -> BGPData:
}.get(entry[5] if isinstance(entry[5], str) else "0", "unknown(%r)" % entry[5]),
last_received_error=entry[6] if isinstance(entry[6], str) else "unknown(%r)" % entry[6],
established_time=int(entry[7]) if isinstance(entry[7], str) else 0,
- description=(entry[-2] if isinstance(entry[-2], str) else "unknown(%r)" % entry[-2])
- if len(entry) > len(BGPData.__annotations__) - 1
- else "n/a",
+ description=(
+ (entry[-2] if isinstance(entry[-2], str) else "unknown(%r)" % entry[-2])
+ if len(entry) > len(BGPData.__annotations__) - 1
+ else "n/a"
+ ),
bgp_version=4,
)
diff --git a/cmk/base/plugins/agent_based/bi_aggregation.py b/cmk/base/plugins/agent_based/bi_aggregation.py
index 9b8966cf12b..bbad415915b 100644
--- a/cmk/base/plugins/agent_based/bi_aggregation.py
+++ b/cmk/base/plugins/agent_based/bi_aggregation.py
@@ -3,8 +3,9 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
import ast
-from collections.abc import Mapping
-from typing import Any
+from collections.abc import Iterator, Mapping, Sequence
+from dataclasses import dataclass, field
+from typing import Any, TypedDict
from cmk.checkengine.checkresults import state_markers # pylint: disable=cmk-module-layer-violation
@@ -14,6 +15,37 @@
Section = Mapping[str, Any]
+class ErrorInfo(TypedDict, total=False):
+ state: int
+ output: str
+
+
+class CustomInfo(TypedDict, total=False):
+ output: str
+
+
+class AggrInfos(TypedDict, total=False):
+ error: ErrorInfo
+ custom: CustomInfo
+
+
+Infos = tuple[AggrInfos, Sequence["Infos"]]
+
+
+@dataclass
+class Aggregation:
+ error_state: int | None = None
+ error_output: str | None = None
+ custom_output: str | None = None
+ children: list["Aggregation"] = field(default_factory=list)
+
+
+@dataclass
+class AggregationError:
+ output: str
+ affects_state: bool
+
+
def parse_bi_aggregation(string_table: StringTable) -> Section:
parsed = {}
for line in string_table:
@@ -32,29 +64,41 @@ def discover_bi_aggregation(section: Section) -> DiscoveryResult:
yield Service(item=aggr_name)
-def render_bi_infos(infos) -> None | list[str]: # type: ignore[no-untyped-def]
- if not infos:
- return None
-
+def get_aggregations(infos: Infos) -> Aggregation:
own_infos, nested_infos = infos
- lines = []
- if "error" in own_infos:
- lines.append(
- "{} {}".format(state_markers[own_infos["error"]["state"]], own_infos["error"]["output"])
+
+ return Aggregation(
+ error_state=own_infos.get("error", {}).get("state"),
+ error_output=own_infos.get("error", {}).get("output"),
+ custom_output=own_infos.get("custom", {}).get("output"),
+ children=[get_aggregations(nested_info) for nested_info in nested_infos],
+ )
+
+
+def get_aggregation_errors(
+ aggr: Aggregation, parent_affects_state: bool
+) -> Iterator[AggregationError]:
+ affects_state = aggr.error_state is not None and parent_affects_state
+
+ if aggr.error_state is not None:
+ yield AggregationError(
+ output=f"{state_markers[aggr.error_state]} {aggr.error_output}",
+ affects_state=affects_state,
)
- if "custom" in own_infos:
- lines.append(own_infos["custom"]["output"])
- for nested_info in nested_infos:
- nested_lines = render_bi_infos(nested_info)
- assert nested_lines is not None
- for idx, line in enumerate(nested_lines):
- if idx == 0:
- lines.append("+-- %s" % line)
- else:
- lines.append("| %s" % line)
+ if aggr.custom_output is not None:
+ yield AggregationError(output=aggr.custom_output, affects_state=affects_state)
- return lines
+ for child in aggr.children:
+ if errors := list(get_aggregation_errors(child, affects_state)):
+ yield AggregationError(
+ output=f"+-- {errors[0].output}", affects_state=errors[0].affects_state
+ )
+
+ for error in errors[1:]:
+ yield AggregationError(
+ output=f"| {error.output}", affects_state=error.affects_state
+ )
def check_bi_aggregation(item: str, section: Section) -> CheckResult:
@@ -62,9 +106,17 @@ def check_bi_aggregation(item: str, section: Section) -> CheckResult:
return
overall_state = bi_data["state_computed_by_agent"]
+ # The state of an aggregation may be PENDING (-1). Map it to OK.
+ bi_state_map = {
+ 0: "Ok",
+ 1: "Warning",
+ 2: "Critical",
+ 3: "Unknown",
+ -1: "Pending",
+ }
yield Result(
- state=State(overall_state),
- summary="Aggregation state: %s" % ["Ok", "Warning", "Critical", "Unknown"][overall_state],
+ state=State(0 if overall_state == -1 else overall_state),
+ summary="Aggregation state: %s" % bi_state_map[overall_state],
)
yield Result(
@@ -77,10 +129,21 @@ def check_bi_aggregation(item: str, section: Section) -> CheckResult:
)
if bi_data["infos"]:
- infos = ["", "Aggregation Errors"]
- info = render_bi_infos(bi_data["infos"])
- assert info is not None
- infos.extend(info)
+ aggregations = get_aggregations(bi_data["infos"])
+ errors = list(get_aggregation_errors(aggregations, bool(overall_state)))
+
+ errors_affecting_state = [error.output for error in errors if error.affects_state]
+ other_errors = [error.output for error in errors if not error.affects_state]
+
+ infos = []
+ if errors_affecting_state:
+ infos.extend(["", "Aggregation problems affecting the state:"])
+ infos.extend(errors_affecting_state)
+
+ if other_errors:
+ infos.extend(["", "Aggregation problems not affecting the state:"])
+ infos.extend(other_errors)
+
yield Result(state=State.OK, notice="\n".join(infos))
diff --git a/cmk/base/plugins/agent_based/bluecat_dhcp.py b/cmk/base/plugins/agent_based/bluecat_dhcp.py
index fd36087aa07..42d51a0a522 100644
--- a/cmk/base/plugins/agent_based/bluecat_dhcp.py
+++ b/cmk/base/plugins/agent_based/bluecat_dhcp.py
@@ -3,7 +3,6 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
from collections.abc import Mapping
-from typing import Any
from cmk.plugins.lib.bluecat import (
check_bluecat_operational_state,
@@ -43,7 +42,7 @@ def discover_bluecat_dhcp(section: Section) -> type_defs.DiscoveryResult:
def check_bluecat_dhcp(
- params: Mapping[str, Any],
+ params: Mapping[str, object],
section: Section,
) -> type_defs.CheckResult:
yield from check_bluecat_operational_state(
@@ -53,7 +52,7 @@ def check_bluecat_dhcp(
def cluster_check_bluecat_dhcp(
- params: Mapping[str, Any],
+ params: Mapping[str, object],
section: ClusterSection,
) -> type_defs.CheckResult:
yield from cluster_check_bluecat_operational_state(
diff --git a/cmk/base/plugins/agent_based/bluecat_dns.py b/cmk/base/plugins/agent_based/bluecat_dns.py
index 1e981938c11..3b0f596527d 100644
--- a/cmk/base/plugins/agent_based/bluecat_dns.py
+++ b/cmk/base/plugins/agent_based/bluecat_dns.py
@@ -3,7 +3,6 @@
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.
from collections.abc import Mapping
-from typing import Any
from cmk.plugins.lib.bluecat import (
check_bluecat_operational_state,
@@ -39,7 +38,7 @@ def discover_bluecat_dns(section: Section) -> type_defs.DiscoveryResult:
def check_bluecat_dns(
- params: Mapping[str, Any],
+ params: Mapping[str, object],
section: Section,
) -> type_defs.CheckResult:
yield from check_bluecat_operational_state(
@@ -49,7 +48,7 @@ def check_bluecat_dns(
def cluster_check_bluecat_dns(
- params: Mapping[str, Any],
+ params: Mapping[str, object],
section: ClusterSection,
) -> type_defs.CheckResult:
yield from cluster_check_bluecat_operational_state(
diff --git a/cmk/base/plugins/agent_based/bluecoat_sensors.py b/cmk/base/plugins/agent_based/bluecoat_sensors.py
index 598ccc50c5c..8d555564327 100644
--- a/cmk/base/plugins/agent_based/bluecoat_sensors.py
+++ b/cmk/base/plugins/agent_based/bluecoat_sensors.py
@@ -36,8 +36,7 @@ class Sensor:
@dataclasses.dataclass(frozen=True)
-class VoltageSensor(Sensor):
- ...
+class VoltageSensor(Sensor): ...
@dataclasses.dataclass(frozen=True)
diff --git a/cmk/base/plugins/agent_based/bonding.py b/cmk/base/plugins/agent_based/bonding.py
index 607230d6486..91714235148 100644
--- a/cmk/base/plugins/agent_based/bonding.py
+++ b/cmk/base/plugins/agent_based/bonding.py
@@ -4,7 +4,7 @@
# conditions defined in the file COPYING, which is part of this source code package.
from collections.abc import Mapping
-from typing import Any, get_args, Literal
+from typing import Any, TypedDict
from cmk.plugins.lib import bonding
@@ -60,33 +60,39 @@ def _check_ieee_302_3ad_specific(params: Mapping[str, Any], status: bonding.Bond
)
-MODE_OPTION = Literal[
- "balance-rr",
- "active-backup",
- "balance-xor",
- "broadcast",
- "802.3ad",
- "balance-tlb",
- "balance-alb",
-]
-BONDING_MODE_CONFIG = tuple[MODE_OPTION, State]
-
+mode_map = {
+ "mode_0": "round-robin",
+ "mode_1": "active-backup",
+ "mode_2": "xor",
+ "mode_3": "broadcast",
+ "mode_4": "802.3ad",
+ "mode_5": "transmit",
+ "mode_6": "adaptive",
+}
-def _check_bonding_mode(current_mode: str, config: BONDING_MODE_CONFIG) -> CheckResult:
- expected_mode, state_if_not_expected = config
- for mode in get_args(MODE_OPTION):
- if mode in current_mode.lower():
- current_mode = mode
+class BondingModeConfig(TypedDict):
+ mode_0: int
+ mode_1: int
+ mode_2: int
+ mode_3: int
+ mode_4: int
+ mode_5: int
+ mode_6: int
+
+
+def _check_bonding_mode(current_mode: str, config: BondingModeConfig) -> CheckResult:
+ state = State.OK
+ summary = f"Mode: {current_mode}"
+ for mode, mode_str in mode_map.items():
+ if mode_str in current_mode.lower():
+ summary = f"Mode: {mode_str}"
+ state = State(config[mode]) # type: ignore[literal-required]
+ if state != State.OK:
+ summary += " (not allowed)"
break
- if expected_mode not in current_mode.lower():
- yield Result(
- state=State(state_if_not_expected),
- summary=f"Mode: {current_mode} (expected mode: {expected_mode})",
- )
- else:
- yield Result(state=State.OK, summary=f"Mode: {current_mode}")
+ yield Result(state=state, summary=summary)
def check_bonding( # pylint: disable=too-many-branches
@@ -113,6 +119,7 @@ def check_bonding( # pylint: disable=too-many-branches
Result(state=