Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(e2e): rhidp-5296 e2e - main nightly job failing on runtime configuration change step #2120

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions .ibm/pipelines/jobs/main.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,15 @@ handle_main() {
check_and_test "${RELEASE_NAME}" "${NAME_SPACE}" "${url}"
local rbac_url="https://${RELEASE_NAME_RBAC}-backstage-${NAME_SPACE_RBAC}.${K8S_CLUSTER_ROUTER_BASE}"
check_and_test "${RELEASE_NAME_RBAC}" "${NAME_SPACE_RBAC}" "${rbac_url}"

# Deploy `showcase-runtime` to run tests that require configuration changes at runtime
configure_namespace "${NAME_SPACE_RUNTIME}"
uninstall_helmchart "${NAME_SPACE_RUNTIME}" "${RELEASE_NAME}"
oc apply -f "$DIR/resources/redis-cache/redis-deployment.yaml" --namespace="${NAME_SPACE_RUNTIME}"

local runtime_url="https://${RELEASE_NAME}-backstage-${NAME_SPACE_RUNTIME}.${K8S_CLUSTER_ROUTER_BASE}"

apply_yaml_files "${DIR}" "${NAME_SPACE_RUNTIME}" "${runtime_url}"
helm upgrade -i "${RELEASE_NAME}" -n "${NAME_SPACE_RUNTIME}" "${HELM_REPO_NAME}/${HELM_IMAGE_NAME}" --version "${CHART_VERSION}" -f "${DIR}/value_files/${HELM_CHART_VALUE_FILE_NAME}" --set global.clusterRouterBase="${K8S_CLUSTER_ROUTER_BASE}" --set upstream.backstage.image.repository="${QUAY_REPO}" --set upstream.backstage.image.tag="${TAG_NAME}"
check_and_test "${RELEASE_NAME}" "${NAME_SPACE_RUNTIME}" "${runtime_url}"
}
20 changes: 10 additions & 10 deletions .ibm/pipelines/jobs/periodic.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,14 @@ handle_nightly() {
local rds_url="https://${RELEASE_NAME}-backstage-${NAME_SPACE_RDS}.${K8S_CLUSTER_ROUTER_BASE}"
check_and_test "${RELEASE_NAME}" "${NAME_SPACE_RDS}" "${rds_url}"

# Deploy `showcase-runtime` to run tests that require configuration changes at runtime
configure_namespace "${NAME_SPACE_RUNTIME}"
uninstall_helmchart "${NAME_SPACE_RUNTIME}" "${RELEASE_NAME}"
oc apply -f "$DIR/resources/redis-cache/redis-deployment.yaml" --namespace="${NAME_SPACE_RUNTIME}"

local runtime_url="https://${RELEASE_NAME}-backstage-${NAME_SPACE_RUNTIME}.${K8S_CLUSTER_ROUTER_BASE}"

apply_yaml_files "${DIR}" "${NAME_SPACE_RUNTIME}" "${runtime_url}"
helm upgrade -i "${RELEASE_NAME}" -n "${NAME_SPACE_RUNTIME}" "${HELM_REPO_NAME}/${HELM_IMAGE_NAME}" --version "${CHART_VERSION}" -f "${DIR}/value_files/${HELM_CHART_VALUE_FILE_NAME}" --set global.clusterRouterBase="${K8S_CLUSTER_ROUTER_BASE}" --set upstream.backstage.image.repository="${QUAY_REPO}" --set upstream.backstage.image.tag="${TAG_NAME}"
check_and_test "${RELEASE_NAME}" "${NAME_SPACE_RUNTIME}" "${runtime_url}"
# # Deploy `showcase-runtime` to run tests that require configuration changes at runtime
# configure_namespace "${NAME_SPACE_RUNTIME}"
# uninstall_helmchart "${NAME_SPACE_RUNTIME}" "${RELEASE_NAME}"
# oc apply -f "$DIR/resources/redis-cache/redis-deployment.yaml" --namespace="${NAME_SPACE_RUNTIME}"
#
# local runtime_url="https://${RELEASE_NAME}-backstage-${NAME_SPACE_RUNTIME}.${K8S_CLUSTER_ROUTER_BASE}"
#
# apply_yaml_files "${DIR}" "${NAME_SPACE_RUNTIME}" "${runtime_url}"
# helm upgrade -i "${RELEASE_NAME}" -n "${NAME_SPACE_RUNTIME}" "${HELM_REPO_NAME}/${HELM_IMAGE_NAME}" --version "${CHART_VERSION}" -f "${DIR}/value_files/${HELM_CHART_VALUE_FILE_NAME}" --set global.clusterRouterBase="${K8S_CLUSTER_ROUTER_BASE}" --set upstream.backstage.image.repository="${QUAY_REPO}" --set upstream.backstage.image.tag="${TAG_NAME}"
# check_and_test "${RELEASE_NAME}" "${NAME_SPACE_RUNTIME}" "${runtime_url}"
}
39 changes: 31 additions & 8 deletions e2e-tests/playwright/e2e/configuration-test/config-map.spec.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { test, expect } from "@playwright/test";
import { KubeClient } from "../../utils/kube-client";
import { LOGGER } from "../../utils/logger";
import { Common } from "../../utils/common";
import { UIhelper } from "../../utils/ui-helper";
import * as yaml from "js-yaml";

test.describe("Change app-config at e2e test runtime", () => {
test("Verify title change after ConfigMap modification", async ({ page }) => {
Expand All @@ -16,17 +16,28 @@ test.describe("Change app-config at e2e test runtime", () => {
const dynamicTitle = generateDynamicTitle();
const uiHelper = new UIhelper(page);
try {
LOGGER.info(`Updating ConfigMap '${configMapName}' with new title.`);
console.log(`Updating ConfigMap '${configMapName}' with new title.`);
await kubeUtils.updateConfigMapTitle(
configMapName,
namespace,
dynamicTitle,
);

LOGGER.info(
console.log(
`Restarting deployment '${deploymentName}' to apply ConfigMap changes.`,
);
await kubeUtils.restartDeployment(deploymentName, namespace);
// await kubeUtils.restartDeployment(deploymentName, namespace);
await kubeUtils.restartDeploymentWithAnnotation(deploymentName, namespace);

console.log(`Verifying ConfigMap '${configMapName}' contains the new title.`);
const updatedConfigMap = await kubeUtils.getConfigMap(configMapName, namespace);
const appConfigYaml = updatedConfigMap.body.data[`${configMapName}.yaml`];

const appConfig = yaml.load(appConfigYaml) as any;
const updatedTitle = appConfig?.app?.title;

console.log(`Updated title in ConfigMap: ${updatedTitle}`);
expect(updatedTitle).toBe(dynamicTitle);

const common = new Common(page);
await page.context().clearCookies();
Expand All @@ -37,11 +48,23 @@ test.describe("Change app-config at e2e test runtime", () => {
await uiHelper.verifyHeading("Welcome back!");
await uiHelper.verifyText("Quick Access");
await expect(page.locator("#search-bar-text-field")).toBeVisible();
LOGGER.info("Verifying new title in the UI...");
expect(await page.title()).toContain(dynamicTitle);
LOGGER.info("Title successfully verified in the UI.");
console.log("Verifying new title in the UI...");

const title = await page.evaluate(() => document.title);
console.log(title);
console.log(page.title());
const title2 = await page.locator("title").textContent();
console.log(title2);

expect(title2).toContain(dynamicTitle);

await expect(page.locator("title")).toHaveText(new RegExp(dynamicTitle), {
timeout: 60000,
});

console.log("Title successfully verified in the UI.");
} catch (error) {
LOGGER.error(
console.error(
`Test failed during ConfigMap update or deployment restart:`,
error,
);
Expand Down
35 changes: 35 additions & 0 deletions e2e-tests/playwright/utils/kube-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -350,6 +350,41 @@ export class KubeClient {
}
}

async restartDeploymentWithAnnotation(deploymentName: string, namespace: string) {
try {
console.log(`Adding annotation to deployment '${deploymentName}' for redeploy.`);

const patch = [
{
op: "add",
path: "/spec/template/metadata/annotations/restartTime",
value: new Date().toISOString(),
},
];

const options = {
headers: { "Content-Type": k8s.PatchUtils.PATCH_FORMAT_JSON_PATCH },
};

await this.appsApi.patchNamespacedDeployment(
deploymentName,
namespace,
patch,
undefined,
undefined,
undefined,
undefined,
undefined,
options,
);

console.log(`Annotation added to deployment '${deploymentName}'.`);
} catch (error) {
console.error(`Error adding annotation to deployment '${deploymentName}':`, error);
throw error;
}
}

async logPodConditions(namespace: string, labelSelector?: string) {
const selector =
labelSelector ||
Expand Down
Loading