Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 70 additions & 15 deletions .ibm/pipelines/utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,17 @@ yq_merge_value_files() {
fi
}

is_pr_e2e_ocp_helm_job() {
[[ "${JOB_NAME}" == *pull* ]] && [[ "${JOB_NAME}" == *e2e-ocp-helm* ]]
}

# Post-process merged Helm values to disable all orchestrator plugins
# This avoids hardcoding plugin versions in PR diff files
disable_orchestrator_plugins_in_values() {
local values_file=$1
yq eval -i '(.global.dynamic.plugins[] | select(.package | contains("orchestrator")) | .disabled) = true' "${values_file}"
}

# Waits for a Kubernetes/OpenShift deployment to become ready within a specified timeout period
wait_for_deployment() {
local namespace=$1
Expand Down Expand Up @@ -945,11 +956,12 @@ cluster_setup_ocp_helm() {
install_pipelines_operator
install_crunchy_postgres_ocp_operator

# Skip orchestrator infra installation on OSD-GCP due to infrastructure limitations
if [[ ! "${JOB_NAME}" =~ osd-gcp ]]; then
install_orchestrator_infra_chart
# Skip orchestrator infra installation on OSD-GCP due to infrastructure limitations.
# Also skip it for the mandatory PR job (e2e-ocp-helm) to speed up presubmits.
if [[ "${JOB_NAME}" =~ osd-gcp ]] || is_pr_e2e_ocp_helm_job; then
echo "Skipping orchestrator-infra installation on this job: ${JOB_NAME}"
else
echo "Skipping orchestrator-infra installation on OSD-GCP environment"
install_orchestrator_infra_chart
fi

# then wait for the right status one by one
Expand Down Expand Up @@ -1056,9 +1068,29 @@ base_deployment() {
local rhdh_base_url="https://${RELEASE_NAME}-developer-hub-${NAME_SPACE}.${K8S_CLUSTER_ROUTER_BASE}"
apply_yaml_files "${DIR}" "${NAME_SPACE}" "${rhdh_base_url}"
log::info "Deploying image from repository: ${QUAY_REPO}, TAG_NAME: ${TAG_NAME}, in NAME_SPACE: ${NAME_SPACE}"
perform_helm_install "${RELEASE_NAME}" "${NAME_SPACE}" "${HELM_CHART_VALUE_FILE_NAME}"

deploy_orchestrator_workflows "${NAME_SPACE}"
if is_pr_e2e_ocp_helm_job; then
local merged_pr_value_file="/tmp/merged-values_showcase_PR.yaml"
yq_merge_value_files "merge" "${DIR}/value_files/${HELM_CHART_VALUE_FILE_NAME}" "${DIR}/value_files/diff-values_showcase_PR.yaml" "${merged_pr_value_file}"
disable_orchestrator_plugins_in_values "${merged_pr_value_file}"

mkdir -p "${ARTIFACT_DIR}/${NAME_SPACE}"
cp -a "${merged_pr_value_file}" "${ARTIFACT_DIR}/${NAME_SPACE}/" || true
# shellcheck disable=SC2046
helm upgrade -i "${RELEASE_NAME}" -n "${NAME_SPACE}" \
"${HELM_CHART_URL}" --version "${CHART_VERSION}" \
-f "${merged_pr_value_file}" \
--set global.clusterRouterBase="${K8S_CLUSTER_ROUTER_BASE}" \
$(get_image_helm_set_params)
else
perform_helm_install "${RELEASE_NAME}" "${NAME_SPACE}" "${HELM_CHART_VALUE_FILE_NAME}"
fi

if is_pr_e2e_ocp_helm_job; then
log::warn "Skipping orchestrator workflows deployment on PR job: ${JOB_NAME}"
else
deploy_orchestrator_workflows "${NAME_SPACE}"
fi
}

rbac_deployment() {
Expand All @@ -1070,20 +1102,43 @@ rbac_deployment() {
local rbac_rhdh_base_url="https://${RELEASE_NAME_RBAC}-developer-hub-${NAME_SPACE_RBAC}.${K8S_CLUSTER_ROUTER_BASE}"
apply_yaml_files "${DIR}" "${NAME_SPACE_RBAC}" "${rbac_rhdh_base_url}"
log::info "Deploying image from repository: ${QUAY_REPO}, TAG_NAME: ${TAG_NAME}, in NAME_SPACE: ${RELEASE_NAME_RBAC}"
perform_helm_install "${RELEASE_NAME_RBAC}" "${NAME_SPACE_RBAC}" "${HELM_CHART_RBAC_VALUE_FILE_NAME}"
if is_pr_e2e_ocp_helm_job; then
local merged_pr_rbac_value_file="/tmp/merged-values_showcase-rbac_PR.yaml"
yq_merge_value_files "merge" "${DIR}/value_files/${HELM_CHART_RBAC_VALUE_FILE_NAME}" "${DIR}/value_files/diff-values_showcase-rbac_PR.yaml" "${merged_pr_rbac_value_file}"
disable_orchestrator_plugins_in_values "${merged_pr_rbac_value_file}"

mkdir -p "${ARTIFACT_DIR}/${NAME_SPACE_RBAC}"
cp -a "${merged_pr_rbac_value_file}" "${ARTIFACT_DIR}/${NAME_SPACE_RBAC}/" || true
# shellcheck disable=SC2046
helm upgrade -i "${RELEASE_NAME_RBAC}" -n "${NAME_SPACE_RBAC}" \
"${HELM_CHART_URL}" --version "${CHART_VERSION}" \
-f "${merged_pr_rbac_value_file}" \
--set global.clusterRouterBase="${K8S_CLUSTER_ROUTER_BASE}" \
$(get_image_helm_set_params)
else
perform_helm_install "${RELEASE_NAME_RBAC}" "${NAME_SPACE_RBAC}" "${HELM_CHART_RBAC_VALUE_FILE_NAME}"
fi

# NOTE: This is a workaround to allow the sonataflow platform to connect to the external postgres db using ssl.
# Wait for the sonataflow database creation job to complete with robust error handling
if ! wait_for_job_completion "${NAME_SPACE_RBAC}" "${RELEASE_NAME_RBAC}-create-sonataflow-database" 10 10; then
echo "❌ Failed to create sonataflow database. Aborting RBAC deployment."
return 1
if is_pr_e2e_ocp_helm_job; then
log::warn "Skipping sonataflow (orchestrator) external DB SSL workaround on PR job: ${JOB_NAME}"
else
# Wait for the sonataflow database creation job to complete with robust error handling
if ! wait_for_job_completion "${NAME_SPACE_RBAC}" "${RELEASE_NAME_RBAC}-create-sonataflow-database" 10 10; then
echo "❌ Failed to create sonataflow database. Aborting RBAC deployment."
return 1
fi
oc -n "${NAME_SPACE_RBAC}" patch sfp sonataflow-platform --type=merge \
-p '{"spec":{"services":{"jobService":{"podTemplate":{"container":{"env":[{"name":"QUARKUS_DATASOURCE_REACTIVE_URL","value":"postgresql://postgress-external-db-primary.postgress-external-db.svc.cluster.local:5432/sonataflow?search_path=jobs-service&sslmode=require&ssl=true&trustAll=true"},{"name":"QUARKUS_DATASOURCE_REACTIVE_SSL_MODE","value":"require"},{"name":"QUARKUS_DATASOURCE_REACTIVE_TRUST_ALL","value":"true"}]}}}}}}'
oc rollout restart deployment/sonataflow-platform-jobs-service -n "${NAME_SPACE_RBAC}"
fi
oc -n "${NAME_SPACE_RBAC}" patch sfp sonataflow-platform --type=merge \
-p '{"spec":{"services":{"jobService":{"podTemplate":{"container":{"env":[{"name":"QUARKUS_DATASOURCE_REACTIVE_URL","value":"postgresql://postgress-external-db-primary.postgress-external-db.svc.cluster.local:5432/sonataflow?search_path=jobs-service&sslmode=require&ssl=true&trustAll=true"},{"name":"QUARKUS_DATASOURCE_REACTIVE_SSL_MODE","value":"require"},{"name":"QUARKUS_DATASOURCE_REACTIVE_TRUST_ALL","value":"true"}]}}}}}}'
oc rollout restart deployment/sonataflow-platform-jobs-service -n "${NAME_SPACE_RBAC}"

# initiate orchestrator workflows deployment
deploy_orchestrator_workflows "${NAME_SPACE_RBAC}"
if is_pr_e2e_ocp_helm_job; then
log::warn "Skipping orchestrator workflows deployment on PR job: ${JOB_NAME}"
else
deploy_orchestrator_workflows "${NAME_SPACE_RBAC}"
fi
}

initiate_deployments() {
Expand Down
8 changes: 8 additions & 0 deletions .ibm/pipelines/value_files/diff-values_showcase-rbac_PR.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# This file is for the mandatory PR job only (e2e-ocp-helm).
# It is applied by `helm upgrade` after the `values_showcase-rbac.yaml` is applied and only contains complementary differences for PRs.
# Note, that it overwrites the whole key that is present in this file.
# The only exception is global.dynamic.plugins, that gets merged with the base file.

# Disable orchestrator for PRs to speed up the mandatory presubmit job and avoid extra infra installs.
# Orchestrator dynamic plugins are disabled via post-processing (see rbac_deployment in utils.sh)
orchestrator: null
8 changes: 8 additions & 0 deletions .ibm/pipelines/value_files/diff-values_showcase_PR.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# This file is for the mandatory PR job only (e2e-ocp-helm).
# It is applied by `helm upgrade` after the `values_showcase.yaml` is applied and only contains complementary differences for PRs.
# Note, that it overwrites the whole key that is present in this file.
# The only exception is global.dynamic.plugins, that gets merged with the base file.

# Disable orchestrator for PRs to speed up the mandatory presubmit job and avoid extra infra installs.
# Orchestrator dynamic plugins are disabled via post-processing (see base_deployment in utils.sh)
orchestrator: null
28 changes: 13 additions & 15 deletions docs/e2e-tests/CI.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ When a new Pull Request (PR) is opened at [rhdh](https://github.com/redhat-devel
For scenarios where tests are not automatically triggered, or when you need to manually initiate tests (e.g., for draft PRs or external contributions), you can use the following commands:

1. **Commenting `/ok-to-test`:**

- **Purpose:** This command is used to validate a PR for testing, especially important for external contributors or when tests are not automatically triggered.
- **Who Can Use It:** Only members of the [janus-idp](https://github.com/janus-idp) GitHub organization can mark the PR with this comment.
- **Use Cases:**
Expand All @@ -29,15 +28,15 @@ For scenarios where tests are not automatically triggered, or when you need to m
- `/test e2e-ocp-helm` for mandatory PR checks
- **Note:** Avoid using `/test all` as it may trigger unnecessary jobs and consume CI resources. Instead, use `/test ?` to see available options and trigger only the specific tests you need.
3. **Triggering Optional Nightly Job Execution on Pull Requests:**
The following optional nightly jobs can be manually triggered on PRs targeting the `main` branch and `release-*` branches. These jobs help validate changes across various deployment environments by commenting the trigger command on PR.
The following optional nightly jobs can be manually triggered on PRs targeting the `main` branch and `release-*` branches. These jobs help validate changes across various deployment environments by commenting the trigger command on PR.

**Job Name Format:** Jobs follow the naming scheme `redhat-developer-rhdh-PLATFORM-[VERSION]-INSTALL_METHOD-[SPECIAL_TEST]-nightly` where:
- `PLATFORM`: The target platform (e.g., `ocp`, `aks`, `gke`)
- `VERSION`: The platform version (e.g., `v4-17`, `v4-18`, `v4-19`)
- `INSTALL_METHOD`: The deployment method (e.g., `helm`, `operator`)
- `SPECIAL_TEST`: Optional special test type (e.g., `auth-providers`, `upgrade`)
**Job Name Format:** Jobs follow the naming scheme `redhat-developer-rhdh-PLATFORM-[VERSION]-INSTALL_METHOD-[SPECIAL_TEST]-nightly` where:
- `PLATFORM`: The target platform (e.g., `ocp`, `aks`, `gke`)
- `VERSION`: The platform version (e.g., `v4-17`, `v4-18`, `v4-19`)
- `INSTALL_METHOD`: The deployment method (e.g., `helm`, `operator`)
- `SPECIAL_TEST`: Optional special test type (e.g., `auth-providers`, `upgrade`)

Use `/test ?` to see the complete list of available jobs for your specific branch and PR context.
Use `/test ?` to see the complete list of available jobs for your specific branch and PR context.

These interactions are picked up by the OpenShift-CI service, which sets up a test environmentr. The configurations and steps for setting up this environment are defined in the `openshift-ci-tests.sh` script. For more details, see the [High-Level Overview of `openshift-ci-tests.sh`](#high-level-overview-of-openshift-ci-testssh).

Expand All @@ -58,14 +57,14 @@ If the initial automatically triggered tests fail, OpenShift-CI will add a comme
- Tests are executed on both **RBAC** (Role-Based Access Control) and **non-RBAC** namespaces. Different sets of tests are executed for both the **non-RBAC RHDH instance** and the **RBAC RHDH instance**, each deployed in separate namespaces.
- **Access:** In order to access the environment, you can run the bash at `.ibm/pipelines/ocp-cluster-claim-login.sh`. You will be prompted the prow url (the url from the openshift agent, which looks like https://prow.ci.openshift.org/...). Once you test calimed a cluster, this script will forward the cluster web console url along with the credentials.
- **Steps:**

1. **Detection:** OpenShift-CI detects the PR event.
2. **Environment Setup:** The test environment is set up using the `openshift-ci-tests.sh` script (see the [High-Level Overview](#high-level-overview-of-openshift-ci-testssh)).
- **Cluster Configuration:** Sets up the required namespaces and applies necessary configurations and secrets.
- **Application Deployment:** Deploys the RHDH instances using Helm charts, tailored to the specific test scenarios.
3. **Test Execution:**
- **Running Tests:** Executes test suites using `yarn playwright test --project=<project-name>` directly.
- **Retry Logic:** Individual tests are retried up to 2 times as specified in the Playwright configuration.
- **Note:** Orchestrator infra setup and subsequent associated tests are excluded from the mandatory PR job (`/test e2e-ocp-helm`) and run in nightly jobs instead.
4. **Artifact Collection:**
- Collects test artifacts (logs, screenshots, recordings).
- Stores artifacts in the designated `ARTIFACT_DIR` for a retention period of **6 months**.
Expand Down Expand Up @@ -136,7 +135,6 @@ The nightly job for the `main` branch also runs against three OpenShift Containe
### Automation Processes

- **Script Used:**

- **`openshift-ci-tests.sh`**: Automates the setup of the test environment, deployment of RHDH instances, and execution of tests. For more details, refer to the [High-Level Overview of `openshift-ci-tests.sh`](#high-level-overview-of-openshift-ci-testssh).

### High-Level Overview of `openshift-ci-tests.sh`
Expand Down Expand Up @@ -167,12 +165,12 @@ All Playwright project names are defined in a single JSON file: [`e2e-tests/play

When adding or modifying Playwright projects, update `projects.json` first. The project names are automatically available as:

| JSON Key | Shell Variable | Value |
|----------|----------------|-------|
| `SHOWCASE` | `$PW_PROJECT_SHOWCASE` | `showcase` |
| JSON Key | Shell Variable | Value |
| --------------- | --------------------------- | --------------- |
| `SHOWCASE` | `$PW_PROJECT_SHOWCASE` | `showcase` |
| `SHOWCASE_RBAC` | `$PW_PROJECT_SHOWCASE_RBAC` | `showcase-rbac` |
| `SHOWCASE_K8S` | `$PW_PROJECT_SHOWCASE_K8S` | `showcase-k8s` |
| ... | ... | ... |
| `SHOWCASE_K8S` | `$PW_PROJECT_SHOWCASE_K8S` | `showcase-k8s` |
| ... | ... | ... |

When the test run is complete, the status will be reported under your PR checks.

Expand Down
7 changes: 7 additions & 0 deletions e2e-tests/playwright.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ import { PW_PROJECT } from "./playwright/projects";
process.env.JOB_NAME = process.env.JOB_NAME || "";
process.env.IS_OPENSHIFT = process.env.IS_OPENSHIFT || "";

const isPrE2eOcpHelmJob =
process.env.JOB_NAME.includes("pull") &&
process.env.JOB_NAME.includes("e2e-ocp-helm");

// Set LOCALE based on which project is being run
const args = process.argv;

Expand Down Expand Up @@ -80,6 +84,9 @@ export default defineConfig({
"**/playwright/e2e/plugins/tekton/tekton.spec.ts",
"**/playwright/e2e/dynamic-home-page-customization.spec.ts",
"**/playwright/e2e/plugins/scorecard/scorecard.spec.ts",
...(isPrE2eOcpHelmJob
? ["**/playwright/e2e/plugins/orchestrator/**/*.spec.ts"]
: []),
],
},
{
Expand Down
7 changes: 5 additions & 2 deletions e2e-tests/playwright/e2e/plugins/rbac/rbac.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -402,9 +402,11 @@ test.describe("Test RBAC", () => {
await rbacPo.deleteRole("role:default/test-role");
});

test("Edit users and groups and update policies of a role from the overview page", async ({
//FIXME: https://issues.redhat.com/browse/RHDHBUGS-2483
test.skip("Edit users and groups and update policies of a role from the overview page", async ({
page,
}) => {
// TODO: Test needs to be updated for new wizard flow with additional step
const uiHelper = new UIhelper(page);
const rbacPo = new RbacPo(page);
await rbacPo.createRole(
Expand Down Expand Up @@ -747,7 +749,8 @@ test.describe("Test RBAC", () => {
await uiHelper.verifyHeading("All roles (1)");
});

test("Test that user with `IsOwner` condition can access the RBAC page, create a role, edit a role, and delete the role", async ({
//FIXME: https://issues.redhat.com/browse/RHDHBUGS-2483
test.skip("Test that user with `IsOwner` condition can access the RBAC page, create a role, edit a role, and delete the role", async ({
page,
}) => {
const common = new Common(page);
Expand Down
Loading