-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
57 changed files
with
1,369 additions
and
205 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -28,7 +28,7 @@ version = "0.96.0" | |
|
||
|
||
[workspace.package] | ||
version = "0.3.2" | ||
version = "0.3.3" | ||
authors = ["Sébastien Huss <[email protected]>"] | ||
edition = "2021" | ||
license = "BSD-3-Clause" | ||
|
@@ -77,6 +77,7 @@ operator = { cmd=[ | |
"podman build . -f operator/Dockerfile -t docker.io/sebt3/vynil-operator:$(cargo run --bin agent -- version) && podman push docker.io/sebt3/vynil-operator:$(cargo run --bin agent -- version)", | ||
]} | ||
box = { cmd=[ | ||
"cargo cmd generate_crd", | ||
"cargo run --bin agent -- package update --source ./box/vynil/", | ||
"cargo run --bin agent -- package build -o ./box/vynil/ --tag $(cargo run --bin agent -- version) -r docker.io -n sebt3/vynil -u $(jq -r '.auths[\"docker.io\"].auth' </run/user/$(id -u)/containers/auth.json |base64 -d|awk -F: '{print $1}') -p $(jq -r '.auths[\"docker.io\"].auth' </run/user/$(id -u)/containers/auth.json |base64 -d|awk -F: '{print $2}')", | ||
]} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -26,7 +26,7 @@ version = "0.96.0" | |
|
||
|
||
[workspace.package] | ||
version = "0.3.2" | ||
version = "0.3.3" | ||
authors = ["Sébastien Huss <[email protected]>"] | ||
edition = "2021" | ||
license = "BSD-3-Clause" | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,247 @@ | ||
// create a run context for backup/restore pod in from the preparation of the "run" function bellow | ||
fn from_args(context) { | ||
context["deployment_list"] = get_env("DEPLOYMENT_LIST").split(" ").filter(|x| x!=""); | ||
context["statefulset_list"] = get_env("STATEFULSET_LIST").split(" ").filter(|x| x!=""); | ||
context["secret_list"] = get_env("SECRET_LIST").split(" ").filter(|x| x!=""); | ||
context["pg_list"] = get_env("PG_LIST").split(" ").filter(|x| x!=""); | ||
context["restic_tags"] = get_env("RESTIC_TAGS"); | ||
context["snapshot"] = get_env("RESTIC_SNAPSHOT"); | ||
context["max_dayly"] = get_env("RESTIC_MAX_DAYLY"); | ||
if context["max_dayly"] == "" {context["max_dayly"] = "7";} | ||
context["max_weekly"] = get_env("RESTIC_MAX_WEEKLY"); | ||
if context["max_weekly"] == "" {context["max_weekly"] = "5";} | ||
context["max_monthly"] = get_env("RESTIC_MAX_MONTHLY"); | ||
if context["max_monthly"] == "" {context["max_monthly"] = "12";} | ||
context["max_yearly"] = get_env("RESTIC_MAX_YEARLY"); | ||
if context["max_yearly"] == "" {context["max_yearly"] = "4";} | ||
context["namespace"] = get_env("NAMESPACE"); | ||
try { | ||
context["scale_target"] = parse_int(get_env("SCALE_TARGET")); | ||
} catch { | ||
context["scale_target"] = 1; | ||
} | ||
let sub_path = get_env("SUB_PATH"); | ||
let base_path = get_env("BASE_REPO_URL"); | ||
sub_path.replace("/"," "); | ||
sub_path.trim(); | ||
sub_path.replace(" ","/"); | ||
base_path.replace("/"," "); | ||
base_path.trim(); | ||
base_path.replace(" ","/"); | ||
context["sub_path"] = sub_path; | ||
context["base_path"] = base_path; | ||
context["s3_url"] = `s3:${base_path}/${sub_path}`; | ||
context["restic_args"] = ""; | ||
if get_env("INSECURE_TLS") == "true" { | ||
context["restic_args"] += " --insecure-tls"; | ||
} | ||
context | ||
} | ||
|
||
fn run(instance, context, use_init_from) { | ||
// TODO: Prepare MongoDB backup | ||
// TODO: Prepare Redis backup | ||
let secret_name = "backup-settings"; | ||
let sub_path = `${context.instance.namespace}/${context.instance.appslug}`; | ||
if use_init_from { | ||
if instance.spec.initFrom.secretName != () { | ||
secret_name = instance.spec.initFrom.secretName; | ||
} | ||
if instance.spec.initFrom.subPath != () { | ||
sub_path = instance.spec.initFrom.subPath; | ||
} | ||
} | ||
context["volumes"] = [#{ | ||
name: "empty-dir", | ||
emptyDir: #{} | ||
}]; | ||
context["mounts"] = [#{ | ||
name: "empty-dir", | ||
mountPath: `/backup` | ||
}]; | ||
context["envs_from"] = [#{ | ||
secretRef: #{ | ||
name: secret_name | ||
} | ||
}]; | ||
context["envs"] = [#{ | ||
name: "RESTIC_TAGS", | ||
value: `tenant:${context.tenant.name},namespace:${context.instance.namespace},instance:${context.instance.name},component:${context.instance["package"].name},version:${context.instance.requested}` | ||
}, #{ | ||
name: "RESTIC_HOST", | ||
value: context.instance.appslug | ||
}, #{ | ||
name: "AGENT_IMAGE", | ||
value: context.agent_image | ||
}, #{ | ||
name: "SCALE_TARGET", | ||
value: if context.namespace.ha { "2" } else { "1" } | ||
}, #{ | ||
name: "VYNIL_NAMESPACE", | ||
value: context.cluster.vynil_namespace | ||
}, #{ | ||
name: "INSTANCE", | ||
value: context.instance.name | ||
}, #{ | ||
name: "TAG", | ||
value: context.instance.requested | ||
}, #{ | ||
name: "SUB_PATH", | ||
value: sub_path | ||
}, #{ | ||
name: "APPSLUG", | ||
value: context.instance.appslug | ||
}, #{ | ||
name: "NAMESPACE", | ||
valueFrom: #{ | ||
fieldRef: #{ | ||
apiVersion: "v1", | ||
fieldPath: "metadata.namespace" | ||
} | ||
} | ||
}, #{ | ||
name: "POD_NAME", | ||
valueFrom: #{ | ||
fieldRef: #{ | ||
apiVersion: "v1", | ||
fieldPath: "metadata.name" | ||
} | ||
} | ||
}]; | ||
if use_init_from { | ||
context["envs"] += #{ | ||
name: "RESTIC_SNAPSHOT", | ||
value: instance.spec.initFrom.snapshot | ||
}; | ||
} | ||
context["files"] = []; | ||
if is_dir(`${context.package_dir}/scripts`) { | ||
for f in read_dir(`${context.package_dir}/scripts`) { | ||
let base = basename(f); | ||
if base.starts_with("restore") || base.starts_with("backup") || base.starts_with("maintenance") { | ||
context["files"] += #{ | ||
name: base, | ||
content: file_read(f) | ||
}; | ||
} | ||
} | ||
} | ||
context["has_files"] = context["files"].len() > 0; | ||
if context["has_files"] { | ||
context["volumes"] += #{ | ||
name: "backups-scripts", | ||
configMap: #{ | ||
defaultMode: 493, | ||
name: `${context.instance.appslug}-backups-scripts` | ||
} | ||
}; | ||
context["mounts"] += #{ | ||
name: "backups-scripts", | ||
mountPath: "/package/scripts" | ||
}; | ||
} | ||
context["schedule"] = `${context.namespace.maintenance_start_minut} ${context.namespace.maintenance_start_hour} * * *`; | ||
context["service_account"] = `${context.instance.appslug}-backup`; | ||
let pgs = []; | ||
let secrets = []; | ||
for v in instance.status.vitals { | ||
let name = v.name; | ||
name.replace(context.instance.appslug,""); | ||
name.replace("-"," "); | ||
name.trim(); | ||
name.replace(" ","-"); | ||
if v.kind == "PersistentVolumeClaim" { | ||
if name.is_empty() { | ||
name = "data"; | ||
} | ||
context["volumes"] += #{ | ||
name: name, | ||
persistentVolumeClaim: #{ | ||
claimName: v.name | ||
} | ||
}; | ||
context["mounts"] += #{ | ||
name: name, | ||
mountPath: `/backup/${name}` | ||
}; | ||
} else if v.kind == "Secret" { | ||
if name.is_empty() { | ||
name = "secret"; | ||
} | ||
secrets += name; | ||
if ! use_init_from { | ||
context["volumes"] += #{ | ||
name: name, | ||
secret: #{ | ||
secretName: v.name | ||
} | ||
}; | ||
context["mounts"] += #{ | ||
name: name, | ||
mountPath: `/secrets/${name}` | ||
}; | ||
} | ||
context["envs"] += #{ | ||
name: `secret_${name}_target`, | ||
value: v.name | ||
}; | ||
} else if v.kind == "Cluster" { | ||
if name.is_empty() { | ||
name = "postgres"; | ||
} | ||
pgs += name; | ||
let lst = k8s_resource("Secrets", context.instance.namespace).list_meta().items; | ||
let secret = `${v.name}-app`; | ||
if lst.filter(|s| s.metadata.name == `${v.name}-superuser`).len() > 0 { | ||
secret = `${v.name}-superuser`; | ||
} | ||
for i in ["host", "username", "password", "dbname"] { | ||
context["envs"] += #{ | ||
name: `${name}_${i}`, | ||
valueFrom: #{ | ||
secretKeyRef: #{ | ||
name: secret, | ||
key: i | ||
} | ||
} | ||
}; | ||
} | ||
} | ||
} | ||
let deploy = []; | ||
let sts = []; | ||
if instance.status.scalables != () { | ||
for s in instance.status.scalables { | ||
if s.kind == "Deployment" { | ||
deploy += s.name; | ||
} else if s.kind == "StatefulSet" { | ||
sts += s.name; | ||
} | ||
} | ||
} | ||
if deploy.len() > 0 { | ||
context["envs"] += #{ | ||
name: `DEPLOYMENT_LIST`, | ||
value: deploy.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` }) | ||
}; | ||
} | ||
if sts.len() > 0 { | ||
context["envs"] += #{ | ||
name: `STATEFULSET_LIST`, | ||
value: sts.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` }) | ||
}; | ||
} | ||
if pgs.len() > 0 { | ||
context["envs"] += #{ | ||
name: `PG_LIST`, | ||
value: pgs.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` }) | ||
}; | ||
} | ||
if secrets.len() > 0 { | ||
context["envs"] += #{ | ||
name: `SECRET_LIST`, | ||
value: secrets.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` }) | ||
}; | ||
} | ||
context | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
import "backup_context" as ctx; | ||
fn run(args) { | ||
let context = ctx::from_args(args); | ||
log_info(`Starting backup using target: ${context.s3_url}`); | ||
|
||
import_run("backup_pre", context); | ||
if is_file(`${args.package_dir}/scripts/backup.sh`) { | ||
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup.sh`); | ||
if rc != 0 { | ||
throw `${context.package_dir}/scripts/backup.sh FAILED returning ${rc}`; | ||
} else { | ||
import_run("backup_post", context); | ||
return rc; | ||
} | ||
} | ||
import_run("backup_init", context); | ||
import_run("maintenance_start", context); | ||
if is_file(`${args.package_dir}/scripts/backup_prepare.sh`) { | ||
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare.sh`); | ||
if rc != 0 { | ||
throw `${context.package_dir}/scripts/backup_prepare.sh FAILED returning ${rc}`; | ||
} | ||
} else { | ||
if is_file(`${args.package_dir}/scripts/backup_prepare_secret.sh`) { | ||
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare_secret.sh`); | ||
if rc != 0 { | ||
throw `${context.package_dir}/scripts/backup_prepare_secret.sh FAILED returning ${rc}`; | ||
} | ||
} else { | ||
import_run("backup_prepare_secret", context); | ||
} | ||
if is_file(`${args.package_dir}/scripts/backup_prepare_postgresql.sh`) { | ||
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_prepare_postgresql.sh`); | ||
if rc != 0 { | ||
throw `${context.package_dir}/scripts/backup_prepare_postgresql.sh FAILED returning ${rc}`; | ||
} | ||
} else { | ||
import_run("backup_prepare_postgresql", context); | ||
} | ||
} | ||
if is_file(`${args.package_dir}/scripts/backup_before.sh`) { | ||
let rc = shell_run(`export RESTIC_REPOSITORY="${context.s3_url}";${context.package_dir}/scripts/backup_before.sh`); | ||
if rc != 0 { | ||
throw `${context.package_dir}/scripts/backup_before.sh FAILED returning ${rc}`; | ||
} | ||
} | ||
import_run("backup_run", context); | ||
import_run("maintenance_stop", context); | ||
import_run("backup_check", context); | ||
import_run("backup_prune", context); | ||
import_run("backup_post", context); | ||
} |
Oops, something went wrong.