From 7a76059f219710d74d29ce9624215e7f20acbccd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Huss?= Date: Sun, 5 Jan 2025 14:25:38 +0100 Subject: [PATCH] Adding backup feature --- Cargo.lock | 101 +++---- Cargo.toml | 3 +- agent/Dockerfile | 4 +- agent/parent.toml | 2 +- agent/scripts/lib/backup_context.rhai | 247 ++++++++++++++++++ agent/scripts/lib/build_context.rhai | 2 +- agent/scripts/lib/install_from_dir.rhai | 16 +- agent/scripts/lib/wait.rhai | 10 + agent/scripts/packages/build.rhai | 2 + agent/scripts/tenant/backup.rhai | 52 ++++ agent/scripts/tenant/backup_check.rhai | 9 + agent/scripts/tenant/backup_init.rhai | 10 + .../tenant/backup_prepare_postgresql.rhai | 16 ++ .../scripts/tenant/backup_prepare_secret.rhai | 14 + agent/scripts/tenant/backup_prune.rhai | 9 + agent/scripts/tenant/backup_run.rhai | 9 + agent/scripts/tenant/context.rhai | 7 +- agent/scripts/tenant/context_tenant.rhai | 23 ++ agent/scripts/tenant/init_from.rhai | 39 +++ agent/scripts/tenant/install.rhai | 9 + agent/scripts/tenant/maintenance_start.rhai | 5 + agent/scripts/tenant/maintenance_stop.rhai | 5 + agent/scripts/tenant/restore.rhai | 40 +++ agent/scripts/tenant/restore_postgresql.rhai | 20 ++ agent/scripts/tenant/restore_run.rhai | 9 + agent/scripts/tenant/restore_secret.rhai | 24 ++ agent/scripts/tenant/schedule_backup.rhai | 36 +++ agent/src/boxes/mod.rs | 2 +- agent/src/main.rs | 2 +- agent/src/tenant/backup.rs | 30 ++- agent/src/tenant/delete.rs | 17 ++ agent/src/tenant/install.rs | 17 ++ agent/src/tenant/mod.rs | 12 +- agent/src/tenant/reconfigure.rs | 17 ++ agent/src/tenant/restore.rs | 30 ++- agent/templates/backup.yaml.hbs | 37 +++ agent/templates/cronbackup.yaml.hbs | 45 ++++ agent/templates/init_job.yaml.hbs | 39 +++ agent/templates/restore.yaml.hbs | 37 +++ box/vynil/crds/crd.yaml | 30 ++- box/vynil/package.yaml | 2 +- box/vynil/systems/rbac.yaml.hbs | 23 ++ common/src/handlebarshandler.rs | 14 +- common/src/instancesystem.rs | 4 +- common/src/instancetenant.rs | 127 ++++++++- common/src/jukebox.rs | 2 +- common/src/k8sgeneric.rs | 55 +++- common/src/k8sworkload.rs | 1 - common/src/passwordhandler.rs | 2 +- common/src/rhaihandler.rs | 36 ++- deploy/crd/crd.yaml | 30 ++- operator/parent.toml | 2 +- operator/src/manager.rs | 2 +- operator/templates/package.yaml.hbs | 7 + 54 files changed, 1230 insertions(+), 115 deletions(-) create mode 100644 agent/scripts/lib/backup_context.rhai create mode 100644 agent/scripts/tenant/backup.rhai create mode 100644 agent/scripts/tenant/backup_check.rhai create mode 100644 agent/scripts/tenant/backup_init.rhai create mode 100644 agent/scripts/tenant/backup_prepare_postgresql.rhai create mode 100644 agent/scripts/tenant/backup_prepare_secret.rhai create mode 100644 agent/scripts/tenant/backup_prune.rhai create mode 100644 agent/scripts/tenant/backup_run.rhai create mode 100644 agent/scripts/tenant/init_from.rhai create mode 100644 agent/scripts/tenant/maintenance_start.rhai create mode 100644 agent/scripts/tenant/maintenance_stop.rhai create mode 100644 agent/scripts/tenant/restore.rhai create mode 100644 agent/scripts/tenant/restore_postgresql.rhai create mode 100644 agent/scripts/tenant/restore_run.rhai create mode 100644 agent/scripts/tenant/restore_secret.rhai create mode 100644 agent/scripts/tenant/schedule_backup.rhai create mode 100644 agent/templates/backup.yaml.hbs create mode 100644 agent/templates/cronbackup.yaml.hbs create mode 100644 agent/templates/init_job.yaml.hbs create mode 100644 agent/templates/restore.yaml.hbs diff --git a/Cargo.lock b/Cargo.lock index 7116a3d..481f270 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -202,7 +202,7 @@ checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "agent" -version = "0.3.2" +version = "0.3.3" dependencies = [ "clap", "common", @@ -326,9 +326,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.94" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "argon2" @@ -344,9 +344,9 @@ dependencies = [ [[package]] name = "async-broadcast" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20cd0e2e25ea8e5f7e9df04578dc6cf5c83577fd09b1a46aaf5c85e1c33f2a7e" +checksum = "435a87a52755b8f27fcf321ac4f04b2802e337c8c4872923137471ec39c37532" dependencies = [ "event-listener", "event-listener-strategy", @@ -378,9 +378,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.83" +version = "0.1.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +checksum = "1b1244b10dcd56c92219da4e14caa97e312079e185f04ba3eea25061561dc0a0" dependencies = [ "proc-macro2", "quote", @@ -613,9 +613,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.5" +version = "1.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e" +checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7" dependencies = [ "jobserver", "libc", @@ -707,7 +707,7 @@ checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "common" -version = "0.3.2" +version = "0.3.3" dependencies = [ "actix-web", "argon2", @@ -733,7 +733,7 @@ dependencies = [ "serde_yaml", "sha256", "tar", - "thiserror 2.0.8", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -1297,9 +1297,9 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "h2" @@ -1341,10 +1341,11 @@ dependencies = [ [[package]] name = "handlebars" -version = "6.2.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4ccde012831f9a071a637b0d4e31df31c0f6c525784b35ae76a9ac6bc1e315" +checksum = "3d6b224b95c1e668ac0270325ad563b2eef1469fbbb8959bc7c692c844b813d9" dependencies = [ + "derive_builder", "heck", "log", "num-order", @@ -1353,7 +1354,7 @@ dependencies = [ "rhai", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.9", ] [[package]] @@ -1805,9 +1806,9 @@ dependencies = [ [[package]] name = "impl-more" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aae21c3177a27788957044151cc2800043d127acaa460a47ebb9b84dfa2c6aa0" +checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" [[package]] name = "indexmap" @@ -2360,7 +2361,7 @@ dependencies = [ "serde_json", "strum", "strum_macros", - "thiserror 2.0.8", + "thiserror 2.0.9", ] [[package]] @@ -2505,7 +2506,7 @@ dependencies = [ [[package]] name = "operator" -version = "0.3.2" +version = "0.3.3" dependencies = [ "actix-web", "async-trait", @@ -2616,7 +2617,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 2.0.8", + "thiserror 2.0.9", "ucd-trie", ] @@ -2801,7 +2802,7 @@ dependencies = [ "rustc-hash", "rustls", "socket2", - "thiserror 2.0.8", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -2820,7 +2821,7 @@ dependencies = [ "rustls", "rustls-pki-types", "slab", - "thiserror 2.0.8", + "thiserror 2.0.9", "tinyvec", "tracing", "web-time", @@ -2842,9 +2843,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] @@ -2940,9 +2941,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" -version = "0.12.9" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" dependencies = [ "base64 0.22.1", "bytes", @@ -2978,6 +2979,7 @@ dependencies = [ "tokio-native-tls", "tokio-rustls", "tokio-util", + "tower 0.5.2", "tower-service", "url", "wasm-bindgen", @@ -3138,9 +3140,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "ryu" @@ -3244,9 +3246,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.216" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] @@ -3263,9 +3265,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.216" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", @@ -3285,9 +3287,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.134" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" dependencies = [ "itoa", "memchr", @@ -3495,9 +3497,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.90" +version = "2.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" +checksum = "46f71c0377baf4ef1cc3e3402ded576dccc315800fbc62dfc7fe04b009773b4a" dependencies = [ "proc-macro2", "quote", @@ -3558,12 +3560,13 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" dependencies = [ "cfg-if", "fastrand", + "getrandom", "once_cell", "rustix", "windows-sys 0.59.0", @@ -3589,11 +3592,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.8" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f5383f3e0071702bf93ab5ee99b52d26936be9dedd9413067cbdcddcb6141a" +checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc" dependencies = [ - "thiserror-impl 2.0.8", + "thiserror-impl 2.0.9", ] [[package]] @@ -3609,9 +3612,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.8" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f357fcec90b3caef6623a099691be676d033b40a058ac95d2a6ade6fa0c943" +checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4" dependencies = [ "proc-macro2", "quote", @@ -4012,9 +4015,9 @@ checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unicase" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" @@ -4101,7 +4104,7 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "vynil" -version = "0.3.2" +version = "0.3.3" [[package]] name = "want" @@ -4372,9 +4375,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.20" +version = "0.6.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" +checksum = "39281189af81c07ec09db316b302a3e67bf9bd7cbf6c820b50e35fee9c2fa980" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index 9c3191a..9f2024e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ version = "0.96.0" [workspace.package] -version = "0.3.2" +version = "0.3.3" authors = ["Sébastien Huss "] edition = "2021" license = "BSD-3-Clause" @@ -77,6 +77,7 @@ operator = { cmd=[ "podman build . -f operator/Dockerfile -t docker.io/sebt3/vynil-operator:$(cargo run --bin agent -- version) && podman push docker.io/sebt3/vynil-operator:$(cargo run --bin agent -- version)", ]} box = { cmd=[ + "cargo cmd generate_crd", "cargo run --bin agent -- package update --source ./box/vynil/", "cargo run --bin agent -- package build -o ./box/vynil/ --tag $(cargo run --bin agent -- version) -r docker.io -n sebt3/vynil -u $(jq -r '.auths[\"docker.io\"].auth' "] edition = "2021" license = "BSD-3-Clause" diff --git a/agent/scripts/lib/backup_context.rhai b/agent/scripts/lib/backup_context.rhai new file mode 100644 index 0000000..c5e8194 --- /dev/null +++ b/agent/scripts/lib/backup_context.rhai @@ -0,0 +1,247 @@ +// create a run context for backup/restore pod in from the preparation of the "run" function bellow +fn from_args(context) { + context["deployment_list"] = get_env("DEPLOYMENT_LIST").split(" ").filter(|x| x!=""); + context["statefulset_list"] = get_env("STATEFULSET_LIST").split(" ").filter(|x| x!=""); + context["secret_list"] = get_env("SECRET_LIST").split(" ").filter(|x| x!=""); + context["pg_list"] = get_env("PG_LIST").split(" ").filter(|x| x!=""); + context["restic_tags"] = get_env("RESTIC_TAGS"); + context["snapshot"] = get_env("RESTIC_SNAPSHOT"); + context["max_dayly"] = get_env("RESTIC_MAX_DAYLY"); + if context["max_dayly"] == "" {context["max_dayly"] = "7";} + context["max_weekly"] = get_env("RESTIC_MAX_WEEKLY"); + if context["max_weekly"] == "" {context["max_weekly"] = "5";} + context["max_monthly"] = get_env("RESTIC_MAX_MONTHLY"); + if context["max_monthly"] == "" {context["max_monthly"] = "12";} + context["max_yearly"] = get_env("RESTIC_MAX_YEARLY"); + if context["max_yearly"] == "" {context["max_yearly"] = "4";} + context["namespace"] = get_env("NAMESPACE"); + try { + context["scale_target"] = parse_int(get_env("SCALE_TARGET")); + } catch { + context["scale_target"] = 1; + } + let sub_path = get_env("SUB_PATH"); + let base_path = get_env("BASE_REPO_URL"); + sub_path.replace("/"," "); + sub_path.trim(); + sub_path.replace(" ","/"); + base_path.replace("/"," "); + base_path.trim(); + base_path.replace(" ","/"); + context["sub_path"] = sub_path; + context["base_path"] = base_path; + context["s3_url"] = `s3:${base_path}/${sub_path}`; + context["restic_args"] = ""; + if get_env("INSECURE_TLS") == "true" { + context["restic_args"] += " --insecure-tls"; + } + context +} + +fn run(instance, context, use_init_from) { + // TODO: Prepare MongoDB backup + // TODO: Prepare Redis backup + let secret_name = "backup-settings"; + let sub_path = `${context.instance.namespace}/${context.instance.appslug}`; + if use_init_from { + if instance.spec.initFrom.secretName != () { + secret_name = instance.spec.initFrom.secretName; + } + if instance.spec.initFrom.subPath != () { + sub_path = instance.spec.initFrom.subPath; + } + } + context["volumes"] = [#{ + name: "empty-dir", + emptyDir: #{} + }]; + context["mounts"] = [#{ + name: "empty-dir", + mountPath: `/backup` + }]; + context["envs_from"] = [#{ + secretRef: #{ + name: secret_name + } + }]; + context["envs"] = [#{ + name: "RESTIC_TAGS", + value: `tenant:${context.tenant.name},namespace:${context.instance.namespace},instance:${context.instance.name},component:${context.instance["package"].name},version:${context.instance.requested}` + }, #{ + name: "RESTIC_HOST", + value: context.instance.appslug + }, #{ + name: "AGENT_IMAGE", + value: context.agent_image + }, #{ + name: "SCALE_TARGET", + value: if context.namespace.ha { "2" } else { "1" } + }, #{ + name: "VYNIL_NAMESPACE", + value: context.cluster.vynil_namespace + }, #{ + name: "INSTANCE", + value: context.instance.name + }, #{ + name: "TAG", + value: context.instance.requested + }, #{ + name: "SUB_PATH", + value: sub_path + }, #{ + name: "APPSLUG", + value: context.instance.appslug + }, #{ + name: "NAMESPACE", + valueFrom: #{ + fieldRef: #{ + apiVersion: "v1", + fieldPath: "metadata.namespace" + } + } + }, #{ + name: "POD_NAME", + valueFrom: #{ + fieldRef: #{ + apiVersion: "v1", + fieldPath: "metadata.name" + } + } + }]; + if use_init_from { + context["envs"] += #{ + name: "RESTIC_SNAPSHOT", + value: instance.spec.initFrom.snapshot + }; + } + context["files"] = []; + if is_dir(`${context.package_dir}/scripts`) { + for f in read_dir(`${context.package_dir}/scripts`) { + let base = basename(f); + if base.starts_with("restore") || base.starts_with("backup") || base.starts_with("maintenance") { + context["files"] += #{ + name: base, + content: file_read(f) + }; + } + } + } + context["has_files"] = context["files"].len() > 0; + if context["has_files"] { + context["volumes"] += #{ + name: "backups-scripts", + configMap: #{ + defaultMode: 493, + name: `${context.instance.appslug}-backups-scripts` + } + }; + context["mounts"] += #{ + name: "backups-scripts", + mountPath: "/package/scripts" + }; + } + context["schedule"] = `${context.namespace.maintenance_start_minut} ${context.namespace.maintenance_start_hour} * * *`; + context["service_account"] = `${context.instance.appslug}-backup`; + let pgs = []; + let secrets = []; + for v in instance.status.vitals { + let name = v.name; + name.replace(context.instance.appslug,""); + name.replace("-"," "); + name.trim(); + name.replace(" ","-"); + if v.kind == "PersistentVolumeClaim" { + if name.is_empty() { + name = "data"; + } + context["volumes"] += #{ + name: name, + persistentVolumeClaim: #{ + claimName: v.name + } + }; + context["mounts"] += #{ + name: name, + mountPath: `/backup/${name}` + }; + } else if v.kind == "Secret" { + if name.is_empty() { + name = "secret"; + } + secrets += name; + if ! use_init_from { + context["volumes"] += #{ + name: name, + secret: #{ + secretName: v.name + } + }; + context["mounts"] += #{ + name: name, + mountPath: `/secrets/${name}` + }; + } + context["envs"] += #{ + name: `secret_${name}_target`, + value: v.name + }; + } else if v.kind == "Cluster" { + if name.is_empty() { + name = "postgres"; + } + pgs += name; + let lst = k8s_resource("Secrets", context.instance.namespace).list_meta().items; + let secret = `${v.name}-app`; + if lst.filter(|s| s.metadata.name == `${v.name}-superuser`).len() > 0 { + secret = `${v.name}-superuser`; + } + for i in ["host", "username", "password", "dbname"] { + context["envs"] += #{ + name: `${name}_${i}`, + valueFrom: #{ + secretKeyRef: #{ + name: secret, + key: i + } + } + }; + } + } + } + let deploy = []; + let sts = []; + if instance.status.scalables != () { + for s in instance.status.scalables { + if s.kind == "Deployment" { + deploy += s.name; + } else if s.kind == "StatefulSet" { + sts += s.name; + } + } + } + if deploy.len() > 0 { + context["envs"] += #{ + name: `DEPLOYMENT_LIST`, + value: deploy.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` }) + }; + } + if sts.len() > 0 { + context["envs"] += #{ + name: `STATEFULSET_LIST`, + value: sts.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` }) + }; + } + if pgs.len() > 0 { + context["envs"] += #{ + name: `PG_LIST`, + value: pgs.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` }) + }; + } + if secrets.len() > 0 { + context["envs"] += #{ + name: `SECRET_LIST`, + value: secrets.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` }) + }; + } + context +} diff --git a/agent/scripts/lib/build_context.rhai b/agent/scripts/lib/build_context.rhai index 14b65ab..7ff68ca 100644 --- a/agent/scripts/lib/build_context.rhai +++ b/agent/scripts/lib/build_context.rhai @@ -94,6 +94,6 @@ fn run(instance, args) { }, values: get_values(instance.spec.options, defaults), defaults: defaults, - package_dir: args.package_dir + package_dir: args.package_dir, } } \ No newline at end of file diff --git a/agent/scripts/lib/install_from_dir.rhai b/agent/scripts/lib/install_from_dir.rhai index 64b6804..686b7b9 100644 --- a/agent/scripts/lib/install_from_dir.rhai +++ b/agent/scripts/lib/install_from_dir.rhai @@ -34,11 +34,17 @@ fn get_objects(context, dir) { hbs.register_helper_dir(`${context.package_dir}/handlebars/helpers`); let ret = []; for file in read_dir(dir) { - let objects = if file.ends_with(".yaml") || file.ends_with(".yml") { - yaml_decode_multi(file_read(file)) - } else if file.ends_with(".yaml.hbs") || file.ends_with(".yml.hbs") { - yaml_decode_multi(hbs.render_from(file_read(file), context)) - } else {[]}; + let objects = []; + try { + objects = if file.ends_with(".yaml") || file.ends_with(".yml") { + yaml_decode_multi(file_read(file)) + } else if file.ends_with(".yaml.hbs") || file.ends_with(".yml.hbs") { + yaml_decode_multi(hbs.render_from(file_read(file), context)) + } else {[]}; + } catch(e) { + log_error(`While parsing ${file} :`); + throw e; + } for obj in objects.filter(|obj| type_of(obj) != "map" || ! obj.keys().contains("kind") || ! obj.keys().contains("metadata") || type_of(obj.metadata) != "map") { log_warn(`No kind for an object in file ${file}`); log_debug(yaml_encode(obj)); diff --git a/agent/scripts/lib/wait.rhai b/agent/scripts/lib/wait.rhai index e2dbf07..c3cd214 100644 --- a/agent/scripts/lib/wait.rhai +++ b/agent/scripts/lib/wait.rhai @@ -52,6 +52,16 @@ fn vital(lst, duration) { log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`); let sts = get_statefulset(v.namespace, v.name); sts.wait_available(duration); + } else if ["BucketClaim"].contains(v.kind) { + log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`); + let api = k8s_resource(v.kind, v.namespace); + let obj = api.get_obj(v.name); + obj.wait_status("bucketReady", duration); + } else if ["BucketAccess"].contains(v.kind) { + log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`); + let api = k8s_resource(v.kind, v.namespace); + let obj = api.get_obj(v.name); + obj.wait_status("accessGranted", duration); } } } diff --git a/agent/scripts/packages/build.rhai b/agent/scripts/packages/build.rhai index 41ab4b9..97cca10 100644 --- a/agent/scripts/packages/build.rhai +++ b/agent/scripts/packages/build.rhai @@ -21,6 +21,8 @@ fn build(args) { copy::dir_exts(item, args.temp+"/"+base, [".hbs", ".yaml", ".yml"]); } else if base in ["vitals", "scalables", "others"] && is_dir(item) && valid.metadata.type == "tenant" { copy::dir_exts(item, args.temp+"/"+base, [".hbs", ".yaml", ".yml"]); + } else if base == "pods" && is_dir(item) && valid.metadata.type == "tenant" { + copy::dir_exts(item, args.temp+"/"+base, [".hbs"]); } else if base == "handlebars" && is_dir(item) { for sub in read_dir(item) { let subbase = basename(sub); diff --git a/agent/scripts/tenant/backup.rhai b/agent/scripts/tenant/backup.rhai new file mode 100644 index 0000000..883fbf1 --- /dev/null +++ b/agent/scripts/tenant/backup.rhai @@ -0,0 +1,52 @@ +import "backup_context" as ctx; +fn run(args) { + let context = ctx::from_args(args); + log_info(`Starting backup using target: ${context.s3_url}`); + + import_run("backup_pre", context); + if is_file(`${args.package_dir}/scripts/backup.sh`) { + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/backup.sh FAILED returning ${rc}`; + } else { + import_run("backup_post", context); + return rc; + } + } + import_run("backup_init", context); + import_run("maintenance_start", context); + if is_file(`${args.package_dir}/scripts/backup_prepare.sh`) { + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/backup_prepare.sh FAILED returning ${rc}`; + } + } else { + if is_file(`${args.package_dir}/scripts/backup_prepare_secret.sh`) { + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare_secret.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/backup_prepare_secret.sh FAILED returning ${rc}`; + } + } else { + import_run("backup_prepare_secret", context); + } + if is_file(`${args.package_dir}/scripts/backup_prepare_postgresql.sh`) { + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare_postgresql.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/backup_prepare_postgresql.sh FAILED returning ${rc}`; + } + } else { + import_run("backup_prepare_postgresql", context); + } + } + if is_file(`${args.package_dir}/scripts/backup_before.sh`) { + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_before.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/backup_before.sh FAILED returning ${rc}`; + } + } + import_run("backup_run", context); + import_run("maintenance_stop", context); + import_run("backup_check", context); + import_run("backup_prune", context); + import_run("backup_post", context); +} diff --git a/agent/scripts/tenant/backup_check.rhai b/agent/scripts/tenant/backup_check.rhai new file mode 100644 index 0000000..a32e88b --- /dev/null +++ b/agent/scripts/tenant/backup_check.rhai @@ -0,0 +1,9 @@ +fn run(context) { + import_run("backup_check_pre", context); + log_info("Checking restic repository"); + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";restic check`); + if rc != 0 { + throw `restic check FAILED returning ${rc}`; + } + import_run("backup_check_post", context); +} diff --git a/agent/scripts/tenant/backup_init.rhai b/agent/scripts/tenant/backup_init.rhai new file mode 100644 index 0000000..832e3ba --- /dev/null +++ b/agent/scripts/tenant/backup_init.rhai @@ -0,0 +1,10 @@ +fn run(context) { + import_run("backup_init_pre", context); + log_info("Checking for restic repository existance"); + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";restic ${context.restic_args} cat config`); + if rc == 10 { + log_info("Create restic repository"); + shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";restic init ${context.restic_args}`); + } + import_run("backup_init_post", context); +} diff --git a/agent/scripts/tenant/backup_prepare_postgresql.rhai b/agent/scripts/tenant/backup_prepare_postgresql.rhai new file mode 100644 index 0000000..388abb3 --- /dev/null +++ b/agent/scripts/tenant/backup_prepare_postgresql.rhai @@ -0,0 +1,16 @@ +fn run(context) { + import_run("backup_prepare_postgresql_pre", context); + for pg in context.pg_list { + log_info(`Dumping postgresql target: ${pg}`); + let dbname = get_env(`${pg}_dbname`); + let rc = if dbname == "*" { + shell_run("export PGPASSWORD=\"${"+pg+"_password}\";pg_dumpall -h \"${"+pg+"_host}\" -U \"${"+pg+"_username}\" --clean -f /backup/postgresql_"+pg+".sql") + } else { + shell_run("export PGPASSWORD=\"${"+pg+"_password}\";pg_dump -h \"${"+pg+"_host}\" -d \"${"+pg+"_dbname}\" -U \"${"+pg+"_username}\" --clean -f /backup/postgresql_"+pg+".sql") + }; + if rc != 0 { + throw `pg_dump failed for ${pg}`; + } + } + import_run("backup_prepare_postgresql_post", context); +} diff --git a/agent/scripts/tenant/backup_prepare_secret.rhai b/agent/scripts/tenant/backup_prepare_secret.rhai new file mode 100644 index 0000000..f66a325 --- /dev/null +++ b/agent/scripts/tenant/backup_prepare_secret.rhai @@ -0,0 +1,14 @@ +fn run(context) { + import_run("backup_prepare_secret_pre", context); + for secret in context.secret_list { + log_info(`Saving secret: ${secret}`); + create_dir(`/backup/${secret}`); + for file in read_dir(`/secrets/${secret}`) { + let base = basename(file); + if ! base.starts_with(".") { + file_copy(file, `/backup/${secret}/${base}`); + } + } + } + import_run("backup_prepare_secret_post", context); +} diff --git a/agent/scripts/tenant/backup_prune.rhai b/agent/scripts/tenant/backup_prune.rhai new file mode 100644 index 0000000..fcbd441 --- /dev/null +++ b/agent/scripts/tenant/backup_prune.rhai @@ -0,0 +1,9 @@ +fn run(context) { + import_run("backup_prune_pre", context); + log_info("Pruning restic backup"); + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";restic forget --keep-daily "${context.max_dayly}" --keep-weekly "${context.max_weekly}" --keep-monthly "${context.max_monthly}" --keep-yearly "${context.max_yearly}" --prune`); + if rc != 0 { + throw `restic prune FAILED returning ${rc}`; + } + import_run("backup_prune_post", context); +} diff --git a/agent/scripts/tenant/backup_run.rhai b/agent/scripts/tenant/backup_run.rhai new file mode 100644 index 0000000..500c72d --- /dev/null +++ b/agent/scripts/tenant/backup_run.rhai @@ -0,0 +1,9 @@ +fn run(context) { + import_run("backup_run_pre", context); + log_info("Starting restic backup"); + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";restic backup /backup ${context.restic_args} --tag "${context.restic_tags}"`); + if rc != 0 { + throw `restic backup FAILED returning ${rc}`; + } + import_run("backup_run_post", context); +} diff --git a/agent/scripts/tenant/context.rhai b/agent/scripts/tenant/context.rhai index b0a20c0..c4faa58 100644 --- a/agent/scripts/tenant/context.rhai +++ b/agent/scripts/tenant/context.rhai @@ -1,6 +1,8 @@ import "build_context" as build; fn run(instance, args) { let context = build::run(instance, args); + context["template_dir"] = args.template_dir; + context["agent_image"] = args.agent_image; let ctx = import_run("context_pre", instance, context); if type_of(ctx) == "map" { context = ctx; @@ -10,13 +12,14 @@ fn run(instance, args) { context["tenant"] = #{ name: name, namespaces: nss, + maintenance_start_hour: "1", + maintenance_start_minut: "0", }; ctx = import_run("context_tenant", instance, context); if type_of(ctx) == "map" { context = ctx; } - let extra = import_run("context_extra", instance, context); - context["extra"] = extra; + context["extra"] = import_run("context_extra", instance, context); ctx = import_run("context_post", instance, context); if type_of(ctx) == "map" { context = ctx; diff --git a/agent/scripts/tenant/context_tenant.rhai b/agent/scripts/tenant/context_tenant.rhai index 1492f3d..e68e94b 100644 --- a/agent/scripts/tenant/context_tenant.rhai +++ b/agent/scripts/tenant/context_tenant.rhai @@ -1,3 +1,26 @@ fn run(instance, context) { + context.tenant["use_backup"] = false; + context.tenant["ha"] = context.cluster.ha; + context["namespace"] = context.tenant; + context.namespace["name"] = instance.metadata.namespace; + let ns_api = k8s_resource("Namespace"); + try { + let prefix = "vynil.solidite.fr/"; + let ns = ns_api.get(instance.metadata.namespace); + let annotations = ns.metadata.annotations; + if type_of(annotations) == "map" { + for k in annotations.keys().filter(|k| k.starts_with(prefix)) { + let opt = k; + opt.remove(prefix); + try { + context.namespace[opt] = json_decode(annotations[k]); + } catch {} + } + if type_of(context.namespace.use_backup) != "bool" { + context.namespace["use_backup"] = context.tenant.use_backup; + } + } + + } catch {} context } \ No newline at end of file diff --git a/agent/scripts/tenant/init_from.rhai b/agent/scripts/tenant/init_from.rhai new file mode 100644 index 0000000..9b7a713 --- /dev/null +++ b/agent/scripts/tenant/init_from.rhai @@ -0,0 +1,39 @@ +import "backup_context" as ctx; +import "install_from_dir" as dir; +import "wait" as wait; + +fn run(instance, context) { + let ctx = import_run("init_from_pre", instance, context); + if type_of(ctx) == "map" { + context = ctx; + } + try { + context = ctx::run(instance, context, true); + let ctx = import_run("init_from_prepare", instance, context); + if type_of(ctx) == "map" { + context = ctx; + } + let hbs = new_hbs(); + hbs.register_partial_dir(context.template_dir); + if is_dir(`${context.package_dir}/pods`) { + hbs.register_partial_dir(`${context.package_dir}/pods`); + } + let all = yaml_decode_multi(hbs.render_from("{{> init_job.yaml }}", context)); + let applied_objs = []; + for obj in all { + applied_objs += dir::install_allowed_obj(obj, context.tenant.namespaces, instance.metadata.namespace); + } + wait::all(applied_objs); + } catch (e) { + switch type_of(e) { + "string" => instance.set_status_init_failed(e), + _ => instance.set_status_init_failed(json_encode(e)) + } + throw e; + } + ctx = import_run("init_from_post", instance, context); + if type_of(ctx) == "map" { + context = ctx; + } + context +} diff --git a/agent/scripts/tenant/install.rhai b/agent/scripts/tenant/install.rhai index a2ea976..80f88a7 100644 --- a/agent/scripts/tenant/install.rhai +++ b/agent/scripts/tenant/install.rhai @@ -17,6 +17,10 @@ fn run(instance, context) { } instance = get_tenant_instance(instance.metadata.namespace, instance.metadata.name); } + // there is some vitals, currently running initial installation and an initFrom is set + if instance.status != () && instance.status.vitals != () && type_of(instance.status.tag) == "()" && instance.spec.initFrom != () { + import_run("init_from", instance, context); + } if is_dir(`${context.package_dir}/others`) { ctx = import_run("install_others", instance, context); if type_of(ctx) == "map" { @@ -31,6 +35,11 @@ fn run(instance, context) { } instance = get_tenant_instance(instance.metadata.namespace, instance.metadata.name); } + // there is some vitals to backup and backup flag is set + if instance.status != () && instance.status.vitals != () && context.namespace.use_backup { + // this need the scalables objects to be set + import_run("schedule_backup", instance, context); + } ctx = import_run("install_post", instance, context); if type_of(ctx) == "map" { context = ctx; diff --git a/agent/scripts/tenant/maintenance_start.rhai b/agent/scripts/tenant/maintenance_start.rhai new file mode 100644 index 0000000..f29c14f --- /dev/null +++ b/agent/scripts/tenant/maintenance_start.rhai @@ -0,0 +1,5 @@ +fn run(context) { + import_run("maintenance_start_pre", context); + // TODO: scale "scalables" (aka : context.deployment_list and context.statefulset_list) to 0 + import_run("maintenance_start_post", context); +} diff --git a/agent/scripts/tenant/maintenance_stop.rhai b/agent/scripts/tenant/maintenance_stop.rhai new file mode 100644 index 0000000..01149c8 --- /dev/null +++ b/agent/scripts/tenant/maintenance_stop.rhai @@ -0,0 +1,5 @@ +fn run(context) { + import_run("maintenance_stop_pre", context); + // TODO: scale "scalables" (aka : context.deployment_list and context.statefulset_list) to context.scale_target + import_run("maintenance_stop_post", context); +} diff --git a/agent/scripts/tenant/restore.rhai b/agent/scripts/tenant/restore.rhai new file mode 100644 index 0000000..675d615 --- /dev/null +++ b/agent/scripts/tenant/restore.rhai @@ -0,0 +1,40 @@ +import "backup_context" as ctx; +fn run(args) { + let context = ctx::from_args(args); + import_run("restore_pre", context); + if is_file(`${args.package_dir}/scripts/restore.sh`) { + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/restore.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/restore.sh FAILED returning ${rc}`; + } else { + import_run("restore_post", context); + return rc; + } + } + import_run("maintenance_start", context); + import_run("restore_run", context); + if is_file(`${args.package_dir}/scripts/restore_prepare.sh`) { + let rc = shell_run(`${context.package_dir}/scripts/restore_prepare.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/restore_prepare.sh FAILED returning ${rc}`; + } + } + if is_file(`${args.package_dir}/scripts/restore_secret.sh`) { + let rc = shell_run(`${context.package_dir}/scripts/restore_secret.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/restore_secret.sh FAILED returning ${rc}`; + } + } else { + import_run("restore_secret", context); + } + if is_file(`${args.package_dir}/scripts/restore_postgresql.sh`) { + let rc = shell_run(`${context.package_dir}/scripts/restore_postgresql.sh`); + if rc != 0 { + throw `${context.package_dir}/scripts/restore_postgresql.sh FAILED returning ${rc}`; + } + } else { + import_run("restore_postgresql", context); + } + import_run("maintenance_stop", context); + import_run("restore_post", context); +} diff --git a/agent/scripts/tenant/restore_postgresql.rhai b/agent/scripts/tenant/restore_postgresql.rhai new file mode 100644 index 0000000..1fe43b1 --- /dev/null +++ b/agent/scripts/tenant/restore_postgresql.rhai @@ -0,0 +1,20 @@ +fn run(context) { + import_run("restore_postgresql_pre", context); + for pg in context.pg_list { + log_info(`Starting postgreSQL restore for ${pg}`); + let dbname = get_env(`${pg}_dbname`); + let args = "-h \"${"+pg+"_host}\" -U \"${"+pg+"_username}\""; + let file = `/backup/postgresql_${pg}.sql`; + if dbname != "*" { + args += " -d \"${"+pg+"_dbname}\""; + } + if is_file(`/backup/postgresql_${pg}_cleaned.sql`) { + file = `/backup/postgresql_${pg}_cleaned.sql`; + } + let rc = shell_run("export PGPASSWORD=\"${"+pg+"_password}\";psql "+args+" < "+file); + if rc != 0 { + throw `psql failed for ${pg}`; + } + } + import_run("restore_postgresql_post", context); +} diff --git a/agent/scripts/tenant/restore_run.rhai b/agent/scripts/tenant/restore_run.rhai new file mode 100644 index 0000000..8488613 --- /dev/null +++ b/agent/scripts/tenant/restore_run.rhai @@ -0,0 +1,9 @@ +fn run(context) { + import_run("restore_run_pre", context); + log_info(`Starting restic restore for ${context.snapshot}`); + let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";restic restore ${context.snapshot} --target / ${context.restic_args}`); + if rc != 0 { + throw `restic backup FAILED returning ${rc}`; + } + import_run("restore_run_post", context); +} diff --git a/agent/scripts/tenant/restore_secret.rhai b/agent/scripts/tenant/restore_secret.rhai new file mode 100644 index 0000000..c496a16 --- /dev/null +++ b/agent/scripts/tenant/restore_secret.rhai @@ -0,0 +1,24 @@ +fn run(context) { + import_run("restore_secret_pre", context); + let api = k8s_resource("Secrets", context.namespace); + let hbs = new_hbs(); + hbs.register_partial_dir(context.template_dir); + for secret in context.secret_list { + let name = get_env(`secret_${secret}_target`); + log_info(`Restoring secret ${secret} (${name})`); + let data = #{}; + for file in read_dir(`/backup/${secret}`) { + let base = basename(file); + data[base] = file_read(file); + } + api.apply(name, #{ + apiVersion: "v1", + kind: "Secret", + metadata: #{ + name: name + }, + stringData: data + }); + } + import_run("restore_secret_post", context); +} diff --git a/agent/scripts/tenant/schedule_backup.rhai b/agent/scripts/tenant/schedule_backup.rhai new file mode 100644 index 0000000..e917cc3 --- /dev/null +++ b/agent/scripts/tenant/schedule_backup.rhai @@ -0,0 +1,36 @@ +import "backup_context" as ctx; +import "install_from_dir" as dir; + +fn run(instance, context) { + let ctx = import_run("schedule_backup_pre", instance, context); + if type_of(ctx) == "map" { + context = ctx; + } + try { + context = ctx::run(instance, context, false); + let ctx = import_run("schedule_backup_prepare", instance, context); + if type_of(ctx) == "map" { + context = ctx; + } + let hbs = new_hbs(); + hbs.register_partial_dir(context.template_dir); + if is_dir(`${context.package_dir}/pods`) { + hbs.register_partial_dir(`${context.package_dir}/pods`); + } + let all = yaml_decode_multi(hbs.render_from("{{> cronbackup.yaml }}", context)); + for obj in all { + dir::install_allowed_obj(obj, context.tenant.namespaces, instance.metadata.namespace); + } + } catch (e) { + switch type_of(e) { + "string" => instance.set_status_schedule_backup_failed(e), + _ => instance.set_status_schedule_backup_failed(json_encode(e)) + } + throw e; + } + ctx = import_run("schedule_backup_post", instance, context); + if type_of(ctx) == "map" { + context = ctx; + } + context +} diff --git a/agent/src/boxes/mod.rs b/agent/src/boxes/mod.rs index 1531557..198d45a 100644 --- a/agent/src/boxes/mod.rs +++ b/agent/src/boxes/mod.rs @@ -10,7 +10,7 @@ pub struct Parameters { #[derive(Subcommand, Debug)] pub enum Commands { - /// Update a jukeboxution + /// Update a jukebox Scan(scan::Parameters), } diff --git a/agent/src/main.rs b/agent/src/main.rs index f12a401..f0629d1 100644 --- a/agent/src/main.rs +++ b/agent/src/main.rs @@ -18,7 +18,7 @@ pub struct Parameters { #[derive(Subcommand, Debug)] pub enum Commands { - /// Run given git repo as a jukeboxution source + /// Run given git repo as a jukebox source Run(run::Parameters), /// generate CRDs Crdgen(crdgen::Parameters), diff --git a/agent/src/tenant/backup.rs b/agent/src/tenant/backup.rs index 9234437..9088a8f 100644 --- a/agent/src/tenant/backup.rs +++ b/agent/src/tenant/backup.rs @@ -1,6 +1,6 @@ -use common::{context::set_tenant, instancetenant::TenantInstance, rhaihandler::Script, Result}; -use serde::{Deserialize, Serialize}; use clap::Args; +use common::{rhaihandler::Script, Result}; +use serde::{Deserialize, Serialize}; #[derive(Args, Debug, Serialize, Deserialize)] pub struct Parameters { @@ -36,6 +36,23 @@ pub struct Parameters { default_value = "./agent/scripts" )] script_dir: String, + /// Agent template directory + #[arg( + short = 't', + long = "template-dir", + env = "TEMPLATE_DIRECTORY", + value_name = "TEMPLATE_DIRECTORY", + default_value = "./agent/templates" + )] + template_dir: String, + /// Agent image + #[arg( + long = "agent-image", + env = "AGENT_IMAGE", + value_name = "AGENT_IMAGE", + default_value = "docker.io/sebt3/vynil-agent:0.3.3" + )] + agent_image: String, /// version #[arg(long = "tag", env = "TAG", value_name = "TAG")] tag: String, @@ -65,15 +82,10 @@ pub async fn run(args: &Parameters) -> Result<()> { format!("{}/tenant", args.script_dir), format!("{}/lib", args.script_dir), ]); - let context = TenantInstance::get(args.namespace.clone(), args.instance.clone()).await?; - set_tenant(context.clone()); - rhai.ctx.set_value("instance", context); rhai.set_dynamic("args", &serde_json::to_value(args).unwrap()); let _ = rhai.eval( - "import(\"context\") as ctx;\n\ - let context = ctx::run(instance, args);\n\ - import(\"backup\") as backup;\n\ - backup::run(instance, context);", + "import(\"backup\") as backup;\n\ + backup::run(args);", )?; Ok(()) } diff --git a/agent/src/tenant/delete.rs b/agent/src/tenant/delete.rs index 9b2d76d..66cab10 100644 --- a/agent/src/tenant/delete.rs +++ b/agent/src/tenant/delete.rs @@ -36,6 +36,23 @@ pub struct Parameters { default_value = "./agent/scripts" )] script_dir: String, + /// Agent template directory + #[arg( + short = 't', + long = "template-dir", + env = "TEMPLATE_DIRECTORY", + value_name = "TEMPLATE_DIRECTORY", + default_value = "./agent/templates" + )] + template_dir: String, + /// Agent image + #[arg( + long = "agent-image", + env = "AGENT_IMAGE", + value_name = "AGENT_IMAGE", + default_value = "docker.io/sebt3/vynil-agent:0.3.3" + )] + agent_image: String, /// Configuration directory #[arg( short = 'c', diff --git a/agent/src/tenant/install.rs b/agent/src/tenant/install.rs index 9717233..9e53f0a 100644 --- a/agent/src/tenant/install.rs +++ b/agent/src/tenant/install.rs @@ -36,6 +36,23 @@ pub struct Parameters { default_value = "./agent/scripts" )] script_dir: String, + /// Agent template directory + #[arg( + short = 't', + long = "template-dir", + env = "TEMPLATE_DIRECTORY", + value_name = "TEMPLATE_DIRECTORY", + default_value = "./agent/templates" + )] + template_dir: String, + /// Agent image + #[arg( + long = "agent-image", + env = "AGENT_IMAGE", + value_name = "AGENT_IMAGE", + default_value = "docker.io/sebt3/vynil-agent:0.3.3" + )] + agent_image: String, /// version #[arg(long = "tag", env = "TAG", value_name = "TAG")] tag: String, diff --git a/agent/src/tenant/mod.rs b/agent/src/tenant/mod.rs index a30ed69..452a435 100644 --- a/agent/src/tenant/mod.rs +++ b/agent/src/tenant/mod.rs @@ -1,8 +1,8 @@ -//mod backup; +mod backup; mod delete; mod install; //mod reconfigure; -//mod restore; +mod restore; use clap::{Parser, Subcommand}; use std::process; @@ -19,9 +19,9 @@ pub enum Commands { /// Delete an instance Delete(delete::Parameters), // Backup an instance - // Backup(backup::Parameters), + Backup(backup::Parameters), // Restore an instance - // Restore(restore::Parameters), + Restore(restore::Parameters), // Reconfigure an instance // Reconfigure(reconfigure::Parameters), } @@ -36,7 +36,7 @@ pub async fn run(cmd: &Parameters) { tracing::error!("Deleting a package failed with: {e:}"); process::exit(3) }), - /*Commands::Backup(args) => backup::run(args).await.unwrap_or_else(|e| { + Commands::Backup(args) => backup::run(args).await.unwrap_or_else(|e| { tracing::error!("Backup of a package failed with: {e:}"); process::exit(4) }), @@ -44,7 +44,7 @@ pub async fn run(cmd: &Parameters) { tracing::error!("Restore of a package failed with: {e:}"); process::exit(5) }), - Commands::Reconfigure(args) => reconfigure::run(args).await.unwrap_or_else(|e| { + /*Commands::Reconfigure(args) => reconfigure::run(args).await.unwrap_or_else(|e| { tracing::error!("Reconfiguring of a package failed with: {e:}"); process::exit(6) }),*/ diff --git a/agent/src/tenant/reconfigure.rs b/agent/src/tenant/reconfigure.rs index fb4b10e..8816714 100644 --- a/agent/src/tenant/reconfigure.rs +++ b/agent/src/tenant/reconfigure.rs @@ -36,6 +36,23 @@ pub struct Parameters { default_value = "./agent/scripts" )] script_dir: String, + /// Agent template directory + #[arg( + short = 't', + long = "template-dir", + env = "TEMPLATE_DIRECTORY", + value_name = "TEMPLATE_DIRECTORY", + default_value = "./agent/templates" + )] + template_dir: String, + /// Agent image + #[arg( + long = "agent-image", + env = "AGENT_IMAGE", + value_name = "AGENT_IMAGE", + default_value = "docker.io/sebt3/vynil-agent:0.3.3" + )] + agent_image: String, /// version #[arg(long = "tag", env = "TAG", value_name = "TAG")] tag: String, diff --git a/agent/src/tenant/restore.rs b/agent/src/tenant/restore.rs index efed071..2b23bbd 100644 --- a/agent/src/tenant/restore.rs +++ b/agent/src/tenant/restore.rs @@ -1,6 +1,6 @@ -use common::{context::set_tenant, instancetenant::TenantInstance, rhaihandler::Script, Result}; -use serde::{Deserialize, Serialize}; use clap::Args; +use common::{rhaihandler::Script, Result}; +use serde::{Deserialize, Serialize}; #[derive(Args, Debug, Serialize, Deserialize)] pub struct Parameters { @@ -36,6 +36,23 @@ pub struct Parameters { default_value = "./agent/scripts" )] script_dir: String, + /// Agent template directory + #[arg( + short = 't', + long = "template-dir", + env = "TEMPLATE_DIRECTORY", + value_name = "TEMPLATE_DIRECTORY", + default_value = "./agent/templates" + )] + template_dir: String, + /// Agent image + #[arg( + long = "agent-image", + env = "AGENT_IMAGE", + value_name = "AGENT_IMAGE", + default_value = "docker.io/sebt3/vynil-agent:0.3.3" + )] + agent_image: String, /// version #[arg(long = "tag", env = "TAG", value_name = "TAG")] tag: String, @@ -65,15 +82,10 @@ pub async fn run(args: &Parameters) -> Result<()> { format!("{}/tenant", args.script_dir), format!("{}/lib", args.script_dir), ]); - let context = TenantInstance::get(args.namespace.clone(), args.instance.clone()).await?; - set_tenant(context.clone()); - rhai.ctx.set_value("instance", context); rhai.set_dynamic("args", &serde_json::to_value(args).unwrap()); let _ = rhai.eval( - "import(\"context\") as ctx;\n\ - let context = ctx::run(instance, args);\n\ - import(\"restore\") as restore;\n\ - restore::run(instance, context);", + "import(\"restore\") as restore;\n\ + restore::run(args);", )?; Ok(()) } diff --git a/agent/templates/backup.yaml.hbs b/agent/templates/backup.yaml.hbs new file mode 100644 index 0000000..7fbe498 --- /dev/null +++ b/agent/templates/backup.yaml.hbs @@ -0,0 +1,37 @@ +{{#to_json format="yaml"}} +--- +metadata: + annotations: + mayfly.cloud.namecheap.com/expire: 120h +spec: + containers: + - name: backup + image: {{ agent_image }} + imagePullPolicy: IfNotPresent + args: + - tenant + - backup + env: {{json_to_str envs}} + envFrom: {{json_to_str envs_from}} + volumeMounts: {{json_to_str mounts}} + securityContext: + runAsUser: 0 + runAsGroup: 0 + privileged: false + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + add: + - CHOWN + - FOWNER + - DAC_OVERRIDE + - DAC_READ_SEARCH + volumes: {{json_to_str volumes}} + serviceAccountName: {{ service_account }} + restartPolicy: Never + securityContext: + runAsGroup: 0 + runAsUser: 0 + runAsNonRoot: false +{{/to_json}} diff --git a/agent/templates/cronbackup.yaml.hbs b/agent/templates/cronbackup.yaml.hbs new file mode 100644 index 0000000..bac2fad --- /dev/null +++ b/agent/templates/cronbackup.yaml.hbs @@ -0,0 +1,45 @@ +{{#if has_files}} +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: "{{ instance.appslug }}-backups-scripts" +data: +{{#each files}} + {{name}}: {{json_to_str content}} +{{/each}} +{{/if}} +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ instance.appslug }}-backup +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ instance.appslug }}-backup +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: {{cluster.vynil_namespace}}:backups +subjects: +- kind: ServiceAccount + name: {{ instance.appslug }}-backup + namespace: {{ instance.namespace }} +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: '{{ instance.appslug }}-backups' +spec: + concurrencyPolicy: Forbid + failedJobsHistoryLimit: 1 + schedule: {{ schedule }} + successfulJobsHistoryLimit: 3 + suspend: false + jobTemplate: + metadata: + creationTimestamp: null + spec: + template: {{> backup.yaml }} diff --git a/agent/templates/init_job.yaml.hbs b/agent/templates/init_job.yaml.hbs new file mode 100644 index 0000000..f190263 --- /dev/null +++ b/agent/templates/init_job.yaml.hbs @@ -0,0 +1,39 @@ +{{#if has_files}} +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: "{{ instance.appslug }}-backups-scripts" +data: +{{#each files}} + {{name}}: {{json_to_str content}} +{{/each}} +{{/if}} +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ instance.appslug }}-backup +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ instance.appslug }}-backup +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: {{cluster.vynil_namespace}}:backups +subjects: +- kind: ServiceAccount + name: {{ instance.appslug }}-backup + namespace: {{ instance.namespace }} +--- +apiVersion: batch/v1 +kind: Job +metadata: + name: '{{ instance.appslug }}-init-restore' +spec: + backoffLimit: 3 + parallelism: 1 + suspend: false + template: {{> restore.yaml }} diff --git a/agent/templates/restore.yaml.hbs b/agent/templates/restore.yaml.hbs new file mode 100644 index 0000000..0b6d65f --- /dev/null +++ b/agent/templates/restore.yaml.hbs @@ -0,0 +1,37 @@ +{{#to_json format="yaml"}} +--- +metadata: + annotations: + mayfly.cloud.namecheap.com/expire: 120h +spec: + containers: + - name: restore + image: {{ agent_image }} + imagePullPolicy: IfNotPresent + args: + - tenant + - restore + env: {{json_to_str envs}} + envFrom: {{json_to_str envs_from}} + volumeMounts: {{json_to_str mounts}} + securityContext: + runAsUser: 0 + runAsGroup: 0 + privileged: false + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + add: + - CHOWN + - FOWNER + - DAC_OVERRIDE + - DAC_READ_SEARCH + volumes: {{json_to_str volumes}} + serviceAccountName: {{ service_account }} + restartPolicy: Never + securityContext: + runAsGroup: 0 + runAsUser: 0 + runAsNonRoot: false +{{/to_json}} diff --git a/box/vynil/crds/crd.yaml b/box/vynil/crds/crd.yaml index 661653a..addbd4a 100644 --- a/box/vynil/crds/crd.yaml +++ b/box/vynil/crds/crd.yaml @@ -34,7 +34,7 @@ spec: description: Custom resource representing a JukeBox, source of vynil packages properties: spec: - description: Describe a source of vynil packages jukeboxution + description: Describe a source of vynil packages jukebox properties: maturity: description: Jukebox maturity (stable/beta/alpha) @@ -345,13 +345,31 @@ spec: description: Custom resource representing an Vynil tenant package installation properties: spec: - description: Describe a source of vynil packages jukeboxution + description: Describe a source of vynil packages jukebox properties: category: description: The category name type: string + initFrom: + description: Init from a previous backup + nullable: true + properties: + secretName: + description: 'Name of the secret containing: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BASE_REPO_URL and RESTIC_PASSWORD. Default to "backup-settings"' + nullable: true + type: string + snapshot: + description: Snapshot id for restoration + type: string + subPath: + description: Path within the bucket containing the backup to use for recovery. Default to "/" + nullable: true + type: string + required: + - snapshot + type: object jukebox: - description: The jukeboxution source name + description: The jukebox source name type: string options: description: Parameters @@ -408,6 +426,8 @@ spec: - TofuInstalled - VitalApplied - ScalableApplied + - InitFrom + - ScheduleBackup - OtherApplied - RhaiApplied type: string @@ -556,13 +576,13 @@ spec: description: Custom resource representing an Vynil cluster package installation properties: spec: - description: Describe a source of vynil packages jukeboxution + description: Describe a source of vynil packages jukebox properties: category: description: The category name type: string jukebox: - description: The jukeboxution source name + description: The jukebox source name type: string options: description: Parameters diff --git a/box/vynil/package.yaml b/box/vynil/package.yaml index c4cd114..6792d9f 100644 --- a/box/vynil/package.yaml +++ b/box/vynil/package.yaml @@ -8,7 +8,7 @@ images: repository: sebt3/vynil-operator kind: Package metadata: - app_version: 0.3.2 + app_version: 0.3.3 category: core description: Vynil controller to manage vynil packages installations features: diff --git a/box/vynil/systems/rbac.yaml.hbs b/box/vynil/systems/rbac.yaml.hbs index 7ebd8bf..536064c 100644 --- a/box/vynil/systems/rbac.yaml.hbs +++ b/box/vynil/systems/rbac.yaml.hbs @@ -143,3 +143,26 @@ rules: - create - delete - deletecollection +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: {{instance.namespace}}:backups +rules: +- apiGroups: ["apps"] + resources: + - deployments/scale + - statefulsets/scale + - replicasets/scale + verbs: + - update + - patch +- apiGroups: [""] + resources: + - secrets + verbs: + - get + - list + - create + - update + - patch diff --git a/common/src/handlebarshandler.rs b/common/src/handlebarshandler.rs index 2eb29c6..60446e8 100644 --- a/common/src/handlebarshandler.rs +++ b/common/src/handlebarshandler.rs @@ -28,7 +28,7 @@ handlebars_helper!(to_decimal: |arg:Value| format!("{}", u32::from_str_radix(arg warn!("handlebars::to_decimal received a non-string parameter: {:?}",arg); "" }), 8).unwrap_or_else(|_| { - warn!("handlebars::base64_encode received a non-string parameter: {:?}",arg); + warn!("handlebars::to_decimal received a non-string parameter: {:?}",arg); 0 }))); handlebars_helper!(header_basic: |username:Value, password:Value| format!("Basic {}",STANDARD.encode(format!("{}:{}",username.as_str().unwrap_or_else(|| { @@ -52,8 +52,8 @@ handlebars_helper!(bcrypt_hash: |password:Value| crate::hasheshandlers::bcrypt_h warn!("handlebars::bcrypt_hash failed to convert to string with: {e:?}"); String::new() })); -handlebars_helper!(gen_password: |len:u32| Passwords::new().generate(len, 6, 2, 2)); -handlebars_helper!(gen_password_alphanum: |len:u32| Passwords::new().generate(len, 8, 2, 0)); +handlebars_helper!(gen_password: |len:u32| Passwords::new().generate(len.into(), 6, 2, 2)); +handlebars_helper!(gen_password_alphanum: |len:u32| Passwords::new().generate(len.into(), 8, 2, 0)); handlebars_helper!(selector: |ctx: Value, {comp:str=""}| { let mut sel = ctx.as_object().unwrap()["instance"].as_object().unwrap()["selector"].as_object().unwrap().clone(); if !comp.is_empty() { @@ -61,8 +61,12 @@ handlebars_helper!(selector: |ctx: Value, {comp:str=""}| { } sel }); -handlebars_helper!(labels: |ctx: Value| { - ctx.as_object().unwrap()["instance"].as_object().unwrap()["labels"].clone() +handlebars_helper!(labels: |ctx: Value, {comp:str=""}| { + let mut sel = ctx.as_object().unwrap()["instance"].as_object().unwrap()["labels"].as_object().unwrap().clone(); + if !comp.is_empty() { + sel.insert("app.kubernetes.io/component".into(), Value::from(comp)); + } + sel }); handlebars_helper!(have_crd: |ctx: Value, name: String| { ctx.as_object().unwrap()["cluster"].as_object().unwrap()["crds"].as_array().unwrap().iter().any(|crd| *crd==name) diff --git a/common/src/instancesystem.rs b/common/src/instancesystem.rs index d21b4b0..5cf848f 100644 --- a/common/src/instancesystem.rs +++ b/common/src/instancesystem.rs @@ -17,7 +17,7 @@ use serde_json::json; use tokio::{runtime::Handle, task::block_in_place}; use tracing::field::debug; -/// Describe a source of vynil packages jukeboxution +/// Describe a source of vynil packages jukebox #[derive(CustomResource, Deserialize, Serialize, Clone, Debug, JsonSchema)] #[kube( kind = "SystemInstance", @@ -38,7 +38,7 @@ use tracing::field::debug; {"name":"errors", "type":"string", "description":"Errors", "jsonPath":".status.conditions[?(@.status == 'False')].message"}"# )] pub struct SystemInstanceSpec { - /// The jukeboxution source name + /// The jukebox source name pub jukebox: String, /// The category name pub category: String, diff --git a/common/src/instancetenant.rs b/common/src/instancetenant.rs index f0a96d2..67a7401 100644 --- a/common/src/instancetenant.rs +++ b/common/src/instancetenant.rs @@ -17,7 +17,19 @@ use serde::{Deserialize, Serialize}; use serde_json::json; use tokio::{runtime::Handle, task::block_in_place}; -/// Describe a source of vynil packages jukeboxution +/// InitFrom contains the informations for the backup to use to initialize the installation +#[derive(Serialize, Deserialize, Eq, PartialEq, Clone, Debug, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct InitFrom { + /// Name of the secret containing: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BASE_REPO_URL and RESTIC_PASSWORD. Default to "backup-settings" + pub secret_name: Option, + /// Path within the bucket containing the backup to use for recovery. Default to "/" + pub sub_path: Option, + /// Snapshot id for restoration + pub snapshot: String, +} + +/// Describe a source of vynil packages jukebox #[derive(CustomResource, Deserialize, Serialize, Clone, Debug, JsonSchema)] #[kube( kind = "TenantInstance", @@ -37,8 +49,9 @@ use tokio::{runtime::Handle, task::block_in_place}; {"name":"last_updated", "type":"date", "description":"Last update date", "format": "date-time", "jsonPath":".status.conditions[?(@.type == 'Ready')].lastTransitionTime"}, {"name":"errors", "type":"string", "description":"Errors", "jsonPath":".status.conditions[?(@.status == 'False')].message"}"# )] +#[serde(rename_all = "camelCase")] pub struct TenantInstanceSpec { - /// The jukeboxution source name + /// The jukebox source name pub jukebox: String, /// The category name pub category: String, @@ -46,6 +59,8 @@ pub struct TenantInstanceSpec { pub package: String, /// The package version pub version: Option, + /// Init from a previous backup + pub init_from: Option, /// Parameters pub options: Option>, } @@ -61,6 +76,8 @@ pub enum ConditionsType { TofuInstalled, VitalApplied, ScalableApplied, + InitFrom, + ScheduleBackup, OtherApplied, RhaiApplied, } @@ -266,6 +283,24 @@ impl ApplicationCondition { generation, ) } + + pub fn init_ko(message: &str, generation: i64) -> ApplicationCondition { + ApplicationCondition::new( + message, + ConditionsStatus::False, + ConditionsType::InitFrom, + generation, + ) + } + + pub fn schedule_ko(message: &str, generation: i64) -> ApplicationCondition { + ApplicationCondition::new( + message, + ConditionsStatus::False, + ConditionsType::ScheduleBackup, + generation, + ) + } } @@ -467,6 +502,8 @@ impl TenantInstance { ConditionsType::AgentStarted, ConditionsType::Ready, ConditionsType::Installed, + ConditionsType::InitFrom, + ConditionsType::ScheduleBackup, ]); conditions.push(ApplicationCondition::ready_ok(generation)); conditions.push(ApplicationCondition::installed_ok(generation)); @@ -621,6 +658,78 @@ impl TenantInstance { Ok(result) } + pub async fn set_status_init_failed(&mut self, reason: String) -> Result { + let client = get_client(); + let generation = self.metadata.generation.unwrap_or(1); + let mut conditions: Vec = self.get_conditions_excluding(vec![ + ConditionsType::AgentStarted, + ConditionsType::InitFrom, + ConditionsType::Installed, + ]); + conditions.push(ApplicationCondition::init_ko(&reason, generation)); + conditions.push(ApplicationCondition::installed_ko(&reason, generation)); + if !conditions + .clone() + .into_iter() + .any(|c| c.condition_type == ConditionsType::Ready) + { + conditions.push(ApplicationCondition::ready_ko(generation)); + } + let result: TenantInstance = self + .patch_status( + client.clone(), + json!({ + "conditions": conditions + }), + ) + .await?; + self.send_event(client, Event { + type_: EventType::Warning, + reason: "InitFromFail".to_string(), + note: Some(reason), + action: "InitFrom".to_string(), + secondary: None, + }) + .await?; + Ok(result) + } + + pub async fn set_status_schedule_backup_failed(&mut self, reason: String) -> Result { + let client = get_client(); + let generation = self.metadata.generation.unwrap_or(1); + let mut conditions: Vec = self.get_conditions_excluding(vec![ + ConditionsType::AgentStarted, + ConditionsType::ScheduleBackup, + ConditionsType::Installed, + ]); + conditions.push(ApplicationCondition::schedule_ko(&reason, generation)); + conditions.push(ApplicationCondition::installed_ko(&reason, generation)); + if !conditions + .clone() + .into_iter() + .any(|c| c.condition_type == ConditionsType::Ready) + { + conditions.push(ApplicationCondition::ready_ko(generation)); + } + let result: TenantInstance = self + .patch_status( + client.clone(), + json!({ + "conditions": conditions + }), + ) + .await?; + self.send_event(client, Event { + type_: EventType::Warning, + reason: "ScheduleBackupFailed".to_string(), + note: Some(reason), + action: "ScheduleBackup".to_string(), + secondary: None, + }) + .await?; + Ok(result) + } + pub async fn set_status_vitals(&mut self, vitals: Vec) -> Result { let count = vitals.len(); let client = get_client(); @@ -991,6 +1100,20 @@ impl TenantInstance { .map_err(rhai_err) } + pub fn rhai_set_status_schedule_backup_failed(&mut self, reason: String) -> RhaiRes { + block_in_place(|| { + Handle::current().block_on(async move { self.set_status_schedule_backup_failed(reason).await }) + }) + .map_err(rhai_err) + } + + pub fn rhai_set_status_init_failed(&mut self, reason: String) -> RhaiRes { + block_in_place(|| { + Handle::current().block_on(async move { self.set_status_init_failed(reason).await }) + }) + .map_err(rhai_err) + } + pub fn rhai_set_rhaistate(&mut self, rhaistate: String) -> RhaiRes { block_in_place(|| Handle::current().block_on(async move { self.set_rhaistate(rhaistate).await })) .map_err(rhai_err) diff --git a/common/src/jukebox.rs b/common/src/jukebox.rs index cc6ec45..0e2ebd0 100644 --- a/common/src/jukebox.rs +++ b/common/src/jukebox.rs @@ -43,7 +43,7 @@ pub enum JukeBoxMaturity { Alpha, } -/// Describe a source of vynil packages jukeboxution +/// Describe a source of vynil packages jukebox #[derive(CustomResource, Deserialize, Serialize, Clone, Debug, JsonSchema)] #[kube( kind = "JukeBox", diff --git a/common/src/k8sgeneric.rs b/common/src/k8sgeneric.rs index d63f846..8faa491 100644 --- a/common/src/k8sgeneric.rs +++ b/common/src/k8sgeneric.rs @@ -41,6 +41,7 @@ pub fn update_cache() { pub struct K8sObject { pub api: Api, pub obj: PartialObjectMeta, + pub kind: String, } impl K8sObject { pub fn rhai_delete(&mut self) -> RhaiRes<()> { @@ -138,7 +139,6 @@ impl K8sObject { pub fn wait_condition(&mut self, condition: String, timeout: i64) -> RhaiRes<()> { let name = self.obj.name_any(); - tracing::debug!("wait_condition({}) for {}", &condition, name); let cond = await_condition(self.api.clone(), &name, Self::is_condition(condition)); tokio::task::block_in_place(|| { tokio::runtime::Handle::current().block_on(async move { @@ -153,6 +153,55 @@ impl K8sObject { Ok(()) } + pub fn is_status(prop: String) -> impl Condition { + move |obj: Option<&DynamicObject>| { + if let Some(dynobj) = &obj { + if dynobj.data.is_object() + && dynobj + .data + .as_object() + .unwrap() + .keys() + .collect::>() + .contains(&&"status".to_string()) + { + let status = dynobj.data.as_object().unwrap()["status"].clone(); + if status.is_object() + && status + .as_object() + .unwrap() + .keys() + .collect::>() + .contains(&&prop) + { + let conditions = status.as_object().unwrap()[&prop].clone(); + if conditions.is_boolean() && conditions.as_bool().unwrap() { + return true; + } + } + } + } + false + } + } + + pub fn wait_status(&mut self, prop: String, timeout: i64) -> RhaiRes<()> { + let name = self.obj.name_any(); + tracing::debug!("wait_status({}) for {} {}", &prop, self.kind, name); + let cond = await_condition(self.api.clone(), &name, Self::is_status(prop)); + tokio::task::block_in_place(|| { + tokio::runtime::Handle::current().block_on(async move { + tokio::time::timeout(std::time::Duration::from_secs(timeout as u64), cond) + .await + .map_err(Error::Elapsed) + }) + }) + .map_err(rhai_err)? + .map_err(Error::KubeWaitError) + .map_err(rhai_err)?; + Ok(()) + } + pub fn is_for( cond: Box Result>>, ) -> impl Condition { @@ -195,6 +244,7 @@ pub struct K8sGeneric { pub api: Option>, pub ns: Option, pub scope: Scope, + pub kind: String, } // TODO: scale et exec @@ -229,12 +279,14 @@ impl K8sGeneric { api: Some(api), ns, scope: cap.scope, + kind: res.kind, } } else { K8sGeneric { api: None, ns: None, scope: Scope::Cluster, + kind: String::new(), } } } @@ -339,6 +391,7 @@ impl K8sGeneric { Ok(K8sObject { api: self.api.clone().unwrap(), obj: res, + kind: self.kind.clone(), }) } diff --git a/common/src/k8sworkload.rs b/common/src/k8sworkload.rs index 34bf8a7..e12a8b9 100644 --- a/common/src/k8sworkload.rs +++ b/common/src/k8sworkload.rs @@ -85,7 +85,6 @@ pub struct K8sStatefulSet { impl K8sStatefulSet { pub fn is_sts_available() -> impl Condition { |obj: Option<&StatefulSet>| { - tracing::warn!("Testing conditions"); if let Some(sts) = &obj { if let Some(spec) = &sts.spec { if let Some(s) = &sts.status { diff --git a/common/src/passwordhandler.rs b/common/src/passwordhandler.rs index 145fc69..8b278d9 100644 --- a/common/src/passwordhandler.rs +++ b/common/src/passwordhandler.rs @@ -33,7 +33,7 @@ impl Passwords { } } - pub fn generate(&mut self, length: u32, alpha: u32, numbers: u32, symbols: u32) -> String { + pub fn generate(&mut self, length: i64, alpha: u32, numbers: u32, symbols: u32) -> String { let mut character_sets = vec![ALPHA]; if numbers > 0 { character_sets.push(NUMBERS); diff --git a/common/src/rhaihandler.rs b/common/src/rhaihandler.rs index efbb332..a356187 100644 --- a/common/src/rhaihandler.rs +++ b/common/src/rhaihandler.rs @@ -44,7 +44,6 @@ pub struct Script { pub engine: Engine, pub ctx: Scope<'static>, } - impl Script { #[must_use] pub fn new(resolver_path: Vec) -> Script { @@ -76,15 +75,21 @@ impl Script { .register_fn("bcrypt_hash", |s: ImmutableString| { crate::hasheshandlers::bcrypt_hash(s.to_string()).map_err(rhai_err) }) - .register_fn("gen_password", |len: u32| -> String { + .register_fn("gen_password", |len: i64| -> String { Passwords::new().generate(len, 6, 2, 2) }) - .register_fn("gen_password_alphanum", |len: u32| -> String { + .register_fn("gen_password_alphanum", |len: i64| -> String { Passwords::new().generate(len, 8, 2, 0) }) .register_fn("get_env", |var: ImmutableString| -> String { std::env::var(var.to_string()).unwrap_or("".into()) }) + .register_fn("to_decimal", |val: ImmutableString| -> RhaiRes { + Ok(u32::from_str_radix(val.as_str(), 8).unwrap_or_else(|_| { + tracing::warn!("to_decimal received a non-valid parameter: {:?}", val); + 0 + })) + }) .register_fn( "base64_decode", |val: ImmutableString| -> RhaiRes { @@ -249,6 +254,7 @@ impl Script { .register_get("metadata", K8sObject::get_metadata) .register_fn("delete", K8sObject::rhai_delete) .register_fn("wait_condition", K8sObject::wait_condition) + .register_fn("wait_status", K8sObject::wait_status) .register_fn("wait_deleted", K8sObject::rhai_wait_deleted) /*.register_fn("wait_for", |context: NativeCallContext, k8sobj: &mut K8sObject, fnp: FnPtr, timeout: i64| { let condition = Box::new(move |obj: &DynamicObject| -> RhaiRes { @@ -354,6 +360,14 @@ impl Script { "set_status_rhai_failed", TenantInstance::rhai_set_status_rhai_failed, ) + .register_fn( + "set_status_schedule_backup_failed", + TenantInstance::rhai_set_status_schedule_backup_failed, + ) + .register_fn( + "set_status_init_failed", + TenantInstance::rhai_set_status_init_failed, + ) .register_get("metadata", TenantInstance::get_metadata) .register_get("spec", TenantInstance::get_spec) .register_get("status", TenantInstance::get_status); @@ -418,6 +432,22 @@ impl Script { .register_get("images", VynilPackageSource::get_images) .register_get("resources", VynilPackageSource::get_resources); script.add_code("fn assert(cond, mess) {if (!cond){throw mess}}"); + script.add_code( + "fn import_run(name, instance, context, args) {\n\ + try {\n\ + import name as imp;\n\ + return imp::run(instance, context, args);\n\ + } catch(e) {\n\ + if type_of(e) == \"map\" && \"error\" in e && e.error == \"ErrorModuleNotFound\" {\n\ + log_debug(`No ${name} module, skipping.`);\n\ + } else if type_of(e) == \"map\" && \"error\" in e && e.error == \"ErrorFunctionNotFound\" {\n\ + log_debug(`No ${name}::run function, skipping.`);\n\ + } else {\n\ + throw e;\n\ + }\n\ + }\n\ + }", + ); script.add_code( "fn import_run(name, instance, context) {\n\ try {\n\ diff --git a/deploy/crd/crd.yaml b/deploy/crd/crd.yaml index 661653a..addbd4a 100644 --- a/deploy/crd/crd.yaml +++ b/deploy/crd/crd.yaml @@ -34,7 +34,7 @@ spec: description: Custom resource representing a JukeBox, source of vynil packages properties: spec: - description: Describe a source of vynil packages jukeboxution + description: Describe a source of vynil packages jukebox properties: maturity: description: Jukebox maturity (stable/beta/alpha) @@ -345,13 +345,31 @@ spec: description: Custom resource representing an Vynil tenant package installation properties: spec: - description: Describe a source of vynil packages jukeboxution + description: Describe a source of vynil packages jukebox properties: category: description: The category name type: string + initFrom: + description: Init from a previous backup + nullable: true + properties: + secretName: + description: 'Name of the secret containing: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, BASE_REPO_URL and RESTIC_PASSWORD. Default to "backup-settings"' + nullable: true + type: string + snapshot: + description: Snapshot id for restoration + type: string + subPath: + description: Path within the bucket containing the backup to use for recovery. Default to "/" + nullable: true + type: string + required: + - snapshot + type: object jukebox: - description: The jukeboxution source name + description: The jukebox source name type: string options: description: Parameters @@ -408,6 +426,8 @@ spec: - TofuInstalled - VitalApplied - ScalableApplied + - InitFrom + - ScheduleBackup - OtherApplied - RhaiApplied type: string @@ -556,13 +576,13 @@ spec: description: Custom resource representing an Vynil cluster package installation properties: spec: - description: Describe a source of vynil packages jukeboxution + description: Describe a source of vynil packages jukebox properties: category: description: The category name type: string jukebox: - description: The jukeboxution source name + description: The jukebox source name type: string options: description: Parameters diff --git a/operator/parent.toml b/operator/parent.toml index 6729002..2dc75df 100644 --- a/operator/parent.toml +++ b/operator/parent.toml @@ -26,7 +26,7 @@ version = "0.96.0" [workspace.package] -version = "0.3.2" +version = "0.3.3" authors = ["Sébastien Huss "] edition = "2021" license = "BSD-3-Clause" diff --git a/operator/src/manager.rs b/operator/src/manager.rs index f5b524e..f9c4b11 100644 --- a/operator/src/manager.rs +++ b/operator/src/manager.rs @@ -12,7 +12,7 @@ use serde::Serialize; use serde_json::{json, Value}; use std::{collections::BTreeMap, path::PathBuf, sync::Arc}; use tokio::sync::RwLock; -static DEFAULT_AGENT_IMAGE: &str = "docker.io/sebt3/vynil-agent:0.3.0"; +static DEFAULT_AGENT_IMAGE: &str = "docker.io/sebt3/vynil-agent:0.3.3"; pub struct JukeCacheItem { pub pull_secret: Option, diff --git a/operator/templates/package.yaml.hbs b/operator/templates/package.yaml.hbs index f3e310d..3cfd313 100644 --- a/operator/templates/package.yaml.hbs +++ b/operator/templates/package.yaml.hbs @@ -34,6 +34,8 @@ spec: value: "1" - name: RUST_LOG value: {{ log_level }},agent={{ log_level }} + - name: AGENT_IMAGE + value: {{ agent_image }} - name: VYNIL_NAMESPACE valueFrom: fieldRef: @@ -84,6 +86,8 @@ spec: volumeMounts: - name: package mountPath: /package + - name: cache + mountPath: /var/cache/restic {{#if use_secret }} - name: pullsecret mountPath: /secret @@ -99,6 +103,9 @@ spec: - name: package emptyDir: sizeLimit: 500Mi + - name: cache + emptyDir: + sizeLimit: 500Mi - name: config configMap: name: vynil