diff --git a/IaC/spot-price-auto-updater.yml b/IaC/spot-price-auto-updater.yml new file mode 100644 index 0000000000..0c95fddc07 --- /dev/null +++ b/IaC/spot-price-auto-updater.yml @@ -0,0 +1,200 @@ +- job: + name: spot-price-auto-updater + project-type: pipeline + description: | + Checks current AWS spot prices for each AZ + and updates maxBidPrice in all templates: currentAWSSpotPrice + 0.07$ + Do not edit this job through the web! + disabled: false + concurrent: false + properties: + - build-discarder: + days-to-keep: -1 + num-to-keep: 1000 + artifact-days-to-keep: -1 + artifact-num-to-keep: 1000 + triggers: + - timed: 'H/15 * * * *' + dsl: | + import jenkins.model.Jenkins + import hudson.plugins.ec2.AmazonEC2Cloud + import hudson.plugins.ec2.SlaveTemplate + import hudson.plugins.ec2.SpotConfiguration + + import com.amazonaws.auth.InstanceProfileCredentialsProvider + import com.amazonaws.services.ec2.AmazonEC2ClientBuilder + import com.amazonaws.services.ec2.model.DescribeSpotPriceHistoryRequest + + // Get the Jenkins instance + def jenkins = Jenkins.instance + + // Find all AmazonEC2Cloud instances in Jenkins + def ec2Clouds = jenkins.clouds.findAll { it instanceof AmazonEC2Cloud } + + // Create a map to store data + def cloudData = [:] + + // Flag to check if there were any price changes + def priceChanges = false + + // Multiline output + def dbgOutput = "" + + // Iterate through each AmazonEC2Cloud instance + ec2Clouds.each { cloud -> + // Get the region and CloudName + def region = cloud.getRegion() + def CloudName = cloud.getDisplayName() + + // Get availability zone + def AvailabilityZone = "${region}${CloudName[-1..-1]}" + + // Get all templates for the current cloud + def templates = cloud.getTemplates() + + // Initialize a set to track unique instance types per Availability Zone + def uniqueInstanceTypes = new HashSet() + + // Iterate through each template + templates.each { template -> + if (template instanceof SlaveTemplate) { + def instanceType = template.type.toString() + + // Check if this instance type is already processed for this Availability Zone + if (!uniqueInstanceTypes.contains(instanceType)) { + // Create an entry for the instance type in the AvailabilityZone data + def instanceData = [:] + instanceData.instanceType = instanceType + + // Get the SpotConfiguration for the template + def spotConfig = template.spotConfig + if (spotConfig instanceof SpotConfiguration) { + def maxBidPrice = spotConfig.getSpotMaxBidPrice() + + // Create an entry for the instance type in the AvailabilityZone data + instanceData.maxBidPrice = maxBidPrice + instanceData.az = null + instanceData.timestamp = null + instanceData.awsCurrentPrice = null + instanceData.newMaxBidPrice = null + + // Add the instance data to the AvailabilityZone + if (!cloudData[AvailabilityZone]) { + cloudData[AvailabilityZone] = [:] + cloudData[AvailabilityZone].availabilityZone = AvailabilityZone + cloudData[AvailabilityZone].instanceTypes = [] + } + + cloudData[AvailabilityZone].instanceTypes << instanceData + + // Add the instance type to the set to mark it as processed + uniqueInstanceTypes.add(instanceType) + + // Create the EC2 client using the instance profile credentials provider + def ec2Client = AmazonEC2ClientBuilder.standard() + .withRegion(region) + .build() + + // Set the product description + def productDescription = "Linux/UNIX" + + // Make the necessary API call to AWS to retrieve the spot price history + def request = new DescribeSpotPriceHistoryRequest() + .withInstanceTypes(instanceType) + .withProductDescriptions(productDescription.toString()) + .withAvailabilityZone(AvailabilityZone) // Filter by Availability Zone + .withMaxResults(1) + + def response = ec2Client.describeSpotPriceHistory(request) + if (response.getSpotPriceHistory().size() > 0) { + def spotPriceHistory = response.getSpotPriceHistory().get(0) + instanceData.az = spotPriceHistory.getAvailabilityZone() + instanceData.timestamp = spotPriceHistory.getTimestamp() + instanceData.awsCurrentPrice = spotPriceHistory.getSpotPrice() + instanceData.newMaxBidPrice = ((instanceData.awsCurrentPrice as Float) + 0.07).toString()[0..7] + } + } + } + } + } + + } + + // Print the debug data + cloudData.each { availabilityZone, data -> + dbgOutput += "Availability Zone: ${availabilityZone}\n" + data.instanceTypes.each { instanceData -> + dbgOutput += " Availability Zone: ${instanceData.az}\n" + dbgOutput += " Instance Type: ${instanceData.instanceType}\n" + dbgOutput += " Max Bid Price: ${instanceData.maxBidPrice}\n" + dbgOutput += " Current AWS Spot Price: ${instanceData.awsCurrentPrice ?: 'N/A'}\n" + dbgOutput += " New Max Bid Price: ${instanceData.newMaxBidPrice ?: 'N/A'}\n" + dbgOutput += " Timestamp: ${instanceData.timestamp}\n" + dbgOutput += "\n" + } + } + + // Iterate through each cloud + ec2Clouds.each { cloud -> + // Get the region and CloudName for the current cloud + def region = cloud.getRegion() + def CloudName = cloud.getDisplayName() + def cloudAvailabilityZone = "${region}${CloudName[-1..-1]}" + + // Get all templates for the current cloud + def templates = cloud.getTemplates() + + // Iterate through each template + templates.each { template -> + if (template instanceof SlaveTemplate) { + def instanceType = template.type.toString() + def maxBidPrice = template.spotConfig.getSpotMaxBidPrice() + def templateName = template.getDisplayName() + + // Iterate through each entry in cloudData to find the corresponding instanceType and AZ + cloudData.each { _, data -> + data.instanceTypes.each { instanceData -> + if (instanceData.instanceType == instanceType && data.availabilityZone == cloudAvailabilityZone) { + // Update only if the newMaxBidPrice is different + if (instanceData.newMaxBidPrice != maxBidPrice) { + template.spotConfig.setSpotMaxBidPrice(instanceData.newMaxBidPrice) + priceChanges = true + dbgOutput += " Price change for ${templateName}(${data.availabilityZone}, ${instanceData.instanceType}):\n" + dbgOutput += " ${maxBidPrice} -> ${instanceData.newMaxBidPrice}\n" + } + } + } + } + } + } + // Remove the existing cloud with the same name + def cloudsToRemove = [] + jenkins.clouds.each { + if (it.hasProperty('cloudName') && it['cloudName'] == cloud.getDisplayName()) { + cloudsToRemove.add(it) + } + } + // Remove the clouds outside prev iteration: fixes java.util.NoSuchElementException + // if only 1 cloud is configured + cloudsToRemove.each { + jenkins.clouds.remove(it) + } + + // Add the updated cloud configuration + jenkins.clouds.add(cloud) + } + jenkins.save() + + // Print output + println dbgOutput + + // Set the build result and description based on the priceChanges flag + if (priceChanges) { + currentBuild.result = 'UNSTABLE' + currentBuild.description = 'The prices were changed' + } else { + currentBuild.result = 'SUCCESS' + currentBuild.description = 'No price changes' + } + + return diff --git a/cloud/jenkins/pg-operator-latest-scheduler.groovy b/cloud/jenkins/pg-operator-latest-scheduler.groovy new file mode 100644 index 0000000000..af16580339 --- /dev/null +++ b/cloud/jenkins/pg-operator-latest-scheduler.groovy @@ -0,0 +1,126 @@ +library changelog: false, identifier: 'lib@master', retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) _ + + +pipeline { + parameters { + choice( + choices: ['run-release.csv', 'run-distro.csv'], + description: 'Choose test suite from file (e2e-tests/run-*), used only if TEST_LIST not specified.', + name: 'TEST_SUITE') + text( + defaultValue: '', + description: 'List of tests to run separated by new line', + name: 'TEST_LIST') + choice( + choices: 'NO\nYES', + description: 'Ignore passed tests in previous run (run all)', + name: 'IGNORE_PREVIOUS_RUN' + ) + choice( + choices: 'YES\nNO', + description: 'Run tests with cluster wide', + name: 'CLUSTER_WIDE') + string( + defaultValue: 'latest', + description: 'Kubernetes target version', + name: 'PLATFORM_VER') + string( + defaultValue: 'main', + description: 'Tag/Branch for percona/percona-postgresql-operator repository', + name: 'GIT_BRANCH') + string( + defaultValue: 'https://github.com/percona/percona-postgresql-operator', + description: 'percona-postgresql-operator repository', + name: 'GIT_REPO') + string( + defaultValue: '', + description: 'PG version', + name: 'PG_VERSION') + string( + defaultValue: '', + description: 'Operator image: perconalab/percona-postgresql-operator:main', + name: 'OPERATOR_IMAGE') + string( + defaultValue: '', + description: 'Operators pgBouncer image: perconalab/percona-postgresql-operator:main-ppg15-pgbouncer', + name: 'PGO_PGBOUNCER_IMAGE') + string( + defaultValue: '', + description: 'For EKS Operators postgres image: perconalab/percona-postgresql-operator:main-ppg15-postgres', + name: 'PGO_POSTGRES_IMAGE') + string( + defaultValue: '', + description: 'For GKE/OPENSHIFT) Operators postgres image: perconalab/percona-postgresql-operator:main-ppg15-postgres', + name: 'PGO_POSTGRES_HA_IMAGE') + string( + defaultValue: '', + description: 'Operators backrest utility image: perconalab/percona-postgresql-operator:main-ppg15-pgbackrest', + name: 'PGO_BACKREST_IMAGE') + string( + defaultValue: '', + description: 'PMM client image: perconalab/pmm-client:dev-latest', + name: 'IMAGE_PMM_CLIENT') + string( + defaultValue: '', + description: 'PMM server image: perconalab/pmm-server:dev-latest', + name: 'IMAGE_PMM_SERVER') + } + agent { + label 'docker' + } + options { + skipDefaultCheckout() + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '10', artifactNumToKeepStr: '10')) + timestamps () + } + triggers { + cron('0 15 * * 0') + } + stages { + stage("Run parallel") { + parallel{ + + stage('Trigger pgo-operator-gke-latest job 3 times') { + steps { + build job: 'pgo-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_HA_IMAGE', value: "${PGO_POSTGRES_HA_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pgo-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_HA_IMAGE', value: "${PGO_POSTGRES_HA_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pgo-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_HA_IMAGE', value: "${PGO_POSTGRES_HA_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + + stage('Trigger pgo-operator-eks-latest job 3 times') { + steps { + build job: 'pgo-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_IMAGE', value: "${PGO_POSTGRES_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pgo-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_IMAGE', value: "${PGO_POSTGRES_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pgo-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_IMAGE', value: "${PGO_POSTGRES_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + stage('Trigger pgo-operator-aws-openshift-latest job 3 times') { + steps { + build job: 'pgo-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_HA_IMAGE', value: "${PGO_POSTGRES_HA_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pgo-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_HA_IMAGE', value: "${PGO_POSTGRES_HA_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pgo-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PG_VERSION', value: "${PG_VERSION}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'PGO_PGBOUNCER_IMAGE', value: "${PGO_PGBOUNCER_IMAGE}"),string(name: 'PGO_POSTGRES_HA_IMAGE', value: "${PGO_POSTGRES_HA_IMAGE}"),string(name: 'PGO_BACKREST_IMAGE', value: "${PGO_BACKREST_IMAGE}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + } + } + } + post { + always { + + copyArtifacts(projectName: 'pgo-operator-gke-latest', selector: lastCompleted(), target: 'pgo-operator-gke-latest') + + copyArtifacts(projectName: 'pgo-operator-eks-latest', selector: lastCompleted(), target: 'pgo-operator-eks-latest') + + copyArtifacts(projectName: 'pgo-operator-aws-openshift-latest', selector: lastCompleted(), target: 'pgo-operator-aws-openshift-latest') + + archiveArtifacts '*/*.xml' + step([$class: 'JUnitResultArchiver', testResults: '*/*.xml', healthScaleFactor: 1.0]) + + } + } +} diff --git a/cloud/jenkins/pg-operator-latest-scheduler.yml b/cloud/jenkins/pg-operator-latest-scheduler.yml new file mode 100644 index 0000000000..da2c9de99f --- /dev/null +++ b/cloud/jenkins/pg-operator-latest-scheduler.yml @@ -0,0 +1,17 @@ +- job: + name: pg-operator-latest-scheduler + project-type: pipeline + description: | + Do not edit this job through the web! + triggers: + - timed: "0 6 * * 3" + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: cloud/jenkins/pg-operator-latest-scheduler.groovy + diff --git a/cloud/jenkins/pg_containers_docker_build.groovy b/cloud/jenkins/pg_containers_docker_build.groovy index 8f2aefe3d7..f71fc9d183 100644 --- a/cloud/jenkins/pg_containers_docker_build.groovy +++ b/cloud/jenkins/pg_containers_docker_build.groovy @@ -119,7 +119,7 @@ pipeline { build('postgres') } retry(3) { - build('postgres-postgis') + build('postgres-gis') } retry(3) { build('pgbadger') @@ -133,7 +133,7 @@ pipeline { pushImageToDocker('pgbouncer') pushImageToDocker('postgres-ha') pushImageToDocker('postgres') - pushImageToDocker('postgres-postgis') + pushImageToDocker('postgres-gis') pushImageToDocker('pgbadger') } } @@ -189,13 +189,13 @@ pipeline { } } } - stage('postgres-postgis'){ + stage('postgres-gis'){ steps { - checkImageForDocker('postgres-postgis') + checkImageForDocker('postgres-gis') } post { always { - junit allowEmptyResults: true, skipPublishingChecks: true, testResults: "*-postgres-postgis.xml" + junit allowEmptyResults: true, skipPublishingChecks: true, testResults: "*-postgres-gis.xml" } } } diff --git a/cloud/jenkins/pgo-operator-aws-openshift-latest.yml b/cloud/jenkins/pgo-operator-aws-openshift-latest.yml index e6bbfa8a5f..b72deed4d1 100644 --- a/cloud/jenkins/pgo-operator-aws-openshift-latest.yml +++ b/cloud/jenkins/pgo-operator-aws-openshift-latest.yml @@ -3,8 +3,6 @@ project-type: pipeline description: | Do not edit this job through the web! - triggers: - - timed: "0 6 * * 3" pipeline-scm: scm: - git: @@ -13,4 +11,4 @@ - 'master' wipe-workspace: false lightweight-checkout: true - script-path: cloud/jenkins/pgo_operator_aws_openshift-latest.groovy + script-path: cloud/jenkins/pgo_operator_aws_openshift_latest.groovy diff --git a/cloud/jenkins/pgo-operator-eks-latest.yml b/cloud/jenkins/pgo-operator-eks-latest.yml index 4765baa67d..c5a26f24ae 100644 --- a/cloud/jenkins/pgo-operator-eks-latest.yml +++ b/cloud/jenkins/pgo-operator-eks-latest.yml @@ -1,10 +1,8 @@ - job: - name: pgo-operator-eks + name: pgo-operator-eks-latest project-type: pipeline description: | Do not edit this job through the web! - triggers: - - timed: "0 15 * * 0" pipeline-scm: scm: - git: @@ -13,4 +11,4 @@ - 'master' wipe-workspace: false lightweight-checkout: true - script-path: cloud/jenkins/pgo_operator_eks.groovy + script-path: cloud/jenkins/pgo_operator_eks_latest.groovy diff --git a/cloud/jenkins/pgo-operator-gke-latest.yml b/cloud/jenkins/pgo-operator-gke-latest.yml index d0c2063388..bb480c0582 100644 --- a/cloud/jenkins/pgo-operator-gke-latest.yml +++ b/cloud/jenkins/pgo-operator-gke-latest.yml @@ -10,8 +10,6 @@ num-to-keep: 10 artifact-days-to-keep: -1 artifact-num-to-keep: 10 - triggers: - - timed: "0 15 * * 0" pipeline-scm: scm: - git: diff --git a/cloud/jenkins/pgo_operator_aws_openshift_latest.groovy b/cloud/jenkins/pgo_operator_aws_openshift_latest.groovy index d948a3552b..6163beace5 100644 --- a/cloud/jenkins/pgo_operator_aws_openshift_latest.groovy +++ b/cloud/jenkins/pgo_operator_aws_openshift_latest.groovy @@ -399,6 +399,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() + copyArtifactPermission('pg-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/pgo_operator_eks_latest.groovy b/cloud/jenkins/pgo_operator_eks_latest.groovy index 07aa717d06..7367966f02 100644 --- a/cloud/jenkins/pgo_operator_eks_latest.groovy +++ b/cloud/jenkins/pgo_operator_eks_latest.groovy @@ -422,9 +422,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 15 * * 0') + copyArtifactPermission('pg-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/pgo_operator_gke_latest.groovy b/cloud/jenkins/pgo_operator_gke_latest.groovy index a33f75556d..ace5d06493 100644 --- a/cloud/jenkins/pgo_operator_gke_latest.groovy +++ b/cloud/jenkins/pgo_operator_gke_latest.groovy @@ -371,9 +371,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 15 * * 0') + copyArtifactPermission('pg-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/pgo_v1_operator_aws_openshift-4.groovy b/cloud/jenkins/pgo_v1_operator_aws_openshift-4.groovy index 98fcf6cd4d..19c68a6003 100644 --- a/cloud/jenkins/pgo_v1_operator_aws_openshift-4.groovy +++ b/cloud/jenkins/pgo_v1_operator_aws_openshift-4.groovy @@ -402,6 +402,7 @@ pipeline { runTest('tls-check', 'sandbox') runTest('users', 'sandbox') runTest('ns-mode', 'sandbox') + runTest('data-migration-gcs', 'sandbox') ShutdownCluster('sandbox') } } diff --git a/cloud/jenkins/pgo_v1_operator_eks.groovy b/cloud/jenkins/pgo_v1_operator_eks.groovy index f2f8d551ea..ca4b406f5d 100644 --- a/cloud/jenkins/pgo_v1_operator_eks.groovy +++ b/cloud/jenkins/pgo_v1_operator_eks.groovy @@ -361,6 +361,9 @@ EOF runTest('version-service') runTest('users') runTest('ns-mode') + runTest('data-migration-gcs') + runTest('clone-cluster') + runTest('tls-check') } } stage('Make report') { diff --git a/cloud/jenkins/pgo_v1_operator_gke_version.groovy b/cloud/jenkins/pgo_v1_operator_gke_version.groovy index af572d962a..7ea5e9c934 100644 --- a/cloud/jenkins/pgo_v1_operator_gke_version.groovy +++ b/cloud/jenkins/pgo_v1_operator_gke_version.groovy @@ -406,6 +406,7 @@ pipeline { runTest('tls-check', 'sandbox') runTest('users', 'sandbox') runTest('ns-mode', 'sandbox') + runTest('data-migration-gcs', 'sandbox') ShutdownCluster('sandbox') } } diff --git a/cloud/jenkins/ps-operator-eks-latest.yml b/cloud/jenkins/ps-operator-eks-latest.yml index 301a4e4e57..6533d7cafc 100644 --- a/cloud/jenkins/ps-operator-eks-latest.yml +++ b/cloud/jenkins/ps-operator-eks-latest.yml @@ -3,8 +3,6 @@ project-type: pipeline description: | Do not edit this job through the web! - triggers: - - timed: "0 8 * * 0" pipeline-scm: scm: - git: diff --git a/cloud/jenkins/ps-operator-gke-latest.yml b/cloud/jenkins/ps-operator-gke-latest.yml index 2859a056bd..702116970c 100644 --- a/cloud/jenkins/ps-operator-gke-latest.yml +++ b/cloud/jenkins/ps-operator-gke-latest.yml @@ -10,8 +10,6 @@ num-to-keep: 10 artifact-days-to-keep: -1 artifact-num-to-keep: 10 - triggers: - - timed: "0 8 * * 0" pipeline-scm: scm: - git: diff --git a/cloud/jenkins/ps-operator-latest-scheduler.groovy b/cloud/jenkins/ps-operator-latest-scheduler.groovy new file mode 100644 index 0000000000..0eb55d4221 --- /dev/null +++ b/cloud/jenkins/ps-operator-latest-scheduler.groovy @@ -0,0 +1,119 @@ +library changelog: false, identifier: 'lib@master', retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) _ + + +pipeline { + parameters { + choice( + choices: ['run-release.csv', 'run-distro.csv'], + description: 'Choose test suite from file (e2e-tests/run-*), used only if TEST_LIST not specified.', + name: 'TEST_SUITE') + text( + defaultValue: '', + description: 'List of tests to run separated by new line', + name: 'TEST_LIST') + choice( + choices: 'NO\nYES', + description: 'Ignore passed tests in previous run (run all)', + name: 'IGNORE_PREVIOUS_RUN' + ) + string( + defaultValue: 'main', + description: 'Tag/Branch for percona/percona-server-mysql-operator repository', + name: 'GIT_BRANCH') + string( + defaultValue: 'https://github.com/percona/percona-server-mysql-operator', + description: 'percona-server-mysql-operator repository', + name: 'GIT_REPO') + string( + defaultValue: 'latest', + description: 'GKE version', + name: 'PLATFORM_VER') + choice( + choices: 'NO\nYES', + description: 'GKE alpha/stable', + name: 'IS_GKE_ALPHA') + string( + defaultValue: '', + description: 'Operator image: perconalab/percona-server-mysql-operator:main', + name: 'OPERATOR_IMAGE') + string( + defaultValue: '', + description: 'PS for MySQL image: perconalab/percona-server-mysql-operator:main-ps8.0', + name: 'IMAGE_MYSQL') + string( + defaultValue: '', + description: 'Orchestrator image: perconalab/percona-server-mysql-operator:main-orchestrator', + name: 'IMAGE_ORCHESTRATOR') + string( + defaultValue: '', + description: 'MySQL Router image: perconalab/percona-server-mysql-operator:main-router', + name: 'IMAGE_ROUTER') + string( + defaultValue: '', + description: 'XtraBackup image: perconalab/percona-server-mysql-operator:main-backup', + name: 'IMAGE_BACKUP') + string( + defaultValue: '', + description: 'Toolkit image: perconalab/percona-server-mysql-operator:main-toolkit', + name: 'IMAGE_TOOLKIT') + string( + defaultValue: '', + description: 'HAProxy image: perconalab/percona-server-mysql-operator:main-haproxy', + name: 'IMAGE_HAPROXY') + string( + defaultValue: '', + description: 'PMM client image: perconalab/pmm-client:dev-latest', + name: 'IMAGE_PMM_CLIENT') + string( + defaultValue: '', + description: 'PMM server image: perconalab/pmm-server:dev-latest', + name: 'IMAGE_PMM_SERVER') + } + agent { + label 'docker' + } + options { + skipDefaultCheckout() + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '10', artifactNumToKeepStr: '10')) + timestamps () + } + triggers { + cron('0 8 * * 0') + } + stages { + stage("Run parallel") { + parallel{ + + stage('Trigger ps-operator-gke-latest job 3 times') { + steps { + build job: 'ps-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'IS_GKE_ALPHA', value: "${IS_GKE_ALPHA}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MYSQL', value: "${IMAGE_MYSQL}"),string(name: 'IMAGE_ORCHESTRATOR', value: "${IMAGE_ORCHESTRATOR}"),string(name: 'IMAGE_ROUTER', value: "${IMAGE_ROUTER}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_TOOLKIT', value: "${IMAGE_TOOLKIT}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_CLIENT}")] + build job: 'ps-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'IS_GKE_ALPHA', value: "${IS_GKE_ALPHA}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MYSQL', value: "${IMAGE_MYSQL}"),string(name: 'IMAGE_ORCHESTRATOR', value: "${IMAGE_ORCHESTRATOR}"),string(name: 'IMAGE_ROUTER', value: "${IMAGE_ROUTER}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_TOOLKIT', value: "${IMAGE_TOOLKIT}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_CLIENT}")] + build job: 'ps-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'IS_GKE_ALPHA', value: "${IS_GKE_ALPHA}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MYSQL', value: "${IMAGE_MYSQL}"),string(name: 'IMAGE_ORCHESTRATOR', value: "${IMAGE_ORCHESTRATOR}"),string(name: 'IMAGE_ROUTER', value: "${IMAGE_ROUTER}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_TOOLKIT', value: "${IMAGE_TOOLKIT}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_CLIENT}")] + } + } + + stage('Trigger ps-operator-eks-latest job 3 times') { + steps { + build job: 'ps-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MYSQL', value: "${IMAGE_MYSQL}"),string(name: 'IMAGE_ORCHESTRATOR', value: "${IMAGE_ORCHESTRATOR}"),string(name: 'IMAGE_ROUTER', value: "${IMAGE_ROUTER}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_TOOLKIT', value: "${IMAGE_TOOLKIT}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_CLIENT}")] + build job: 'ps-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MYSQL', value: "${IMAGE_MYSQL}"),string(name: 'IMAGE_ORCHESTRATOR', value: "${IMAGE_ORCHESTRATOR}"),string(name: 'IMAGE_ROUTER', value: "${IMAGE_ROUTER}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_TOOLKIT', value: "${IMAGE_TOOLKIT}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_CLIENT}")] + build job: 'ps-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),string(name: 'TEST_LIST', value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MYSQL', value: "${IMAGE_MYSQL}"),string(name: 'IMAGE_ORCHESTRATOR', value: "${IMAGE_ORCHESTRATOR}"),string(name: 'IMAGE_ROUTER', value: "${IMAGE_ROUTER}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_TOOLKIT', value: "${IMAGE_TOOLKIT}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_CLIENT}")] + } + } + } + } + } + post { + always { + + copyArtifacts(projectName: 'ps-operator-gke-latest', selector: lastCompleted(), target: 'ps-operator-gke-latest') + copyArtifacts(projectName: 'ps-operator-eks-latest', selector: lastCompleted(), target: 'ps-operator-eks-latest') + archiveArtifacts '*/*.xml' + step([$class: 'JUnitResultArchiver', testResults: '*/*.xml', healthScaleFactor: 1.0]) + + } + } +} diff --git a/cloud/jenkins/ps-operator-latest-scheduler.yml b/cloud/jenkins/ps-operator-latest-scheduler.yml new file mode 100644 index 0000000000..e13de9639d --- /dev/null +++ b/cloud/jenkins/ps-operator-latest-scheduler.yml @@ -0,0 +1,17 @@ +- job: + name: ps-operator-latest-scheduler + project-type: pipeline + description: | + Do not edit this job through the web! + triggers: + - timed: "0 8 * * 0" + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: cloud/jenkins/ps-operator-latest-scheduler.groovy + diff --git a/cloud/jenkins/ps_operator_eks_latest.groovy b/cloud/jenkins/ps_operator_eks_latest.groovy index 9417ff7845..a226b15d4a 100644 --- a/cloud/jenkins/ps_operator_eks_latest.groovy +++ b/cloud/jenkins/ps_operator_eks_latest.groovy @@ -413,9 +413,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 8 * * 0') + copyArtifactPermission('ps-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/ps_operator_eks_version.groovy b/cloud/jenkins/ps_operator_eks_version.groovy index 04798ba933..d3552dca4b 100644 --- a/cloud/jenkins/ps_operator_eks_version.groovy +++ b/cloud/jenkins/ps_operator_eks_version.groovy @@ -55,7 +55,6 @@ EOF sh """ export KUBECONFIG=/tmp/${CLUSTER_NAME}-${CLUSTER_SUFFIX} export PATH=/home/ec2-user/.local/bin:$PATH - source $HOME/google-cloud-sdk/path.bash.inc eksctl create cluster -f cluster-${CLUSTER_SUFFIX}.yaml """ } @@ -68,7 +67,6 @@ void shutdownCluster(String CLUSTER_SUFFIX) { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'eks-cicd', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-$CLUSTER_SUFFIX - source $HOME/google-cloud-sdk/path.bash.inc eksctl delete addon --name aws-ebs-csi-driver --cluster $CLUSTER_NAME-$CLUSTER_SUFFIX --region $AWSRegion || true for namespace in \$(kubectl get namespaces --no-headers | awk '{print \$1}' | grep -vE "^kube-|^openshift" | sed '/-operator/ s/^/1-/' | sort | sed 's/^1-//'); do kubectl delete deployments --all -n \$namespace --force --grace-period=0 || true @@ -125,13 +123,8 @@ void pushArtifactFile(String FILE_NAME) { void prepareNode() { sh ''' - if [ ! -d $HOME/google-cloud-sdk/bin ]; then - rm -rf $HOME/google-cloud-sdk - curl https://sdk.cloud.google.com | bash - fi - - source $HOME/google-cloud-sdk/path.bash.inc - gcloud components install kubectl + sudo curl -s -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl + kubectl version --client --output=yaml curl -s https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz \ | sudo tar -C /usr/local/bin --strip-components 1 -zvxpf - @@ -270,7 +263,6 @@ void runTest(Integer TEST_ID) { export IMAGE_PMM_SERVER=$IMAGE_PMM_SERVER export PATH="/home/ec2-user/.local/bin:${HOME}/.krew/bin:$PATH" - source $HOME/google-cloud-sdk/path.bash.inc export KUBECONFIG=/tmp/$CLUSTER_NAME-$clusterSuffix kubectl kuttl test --config ./e2e-tests/kuttl.yaml --test "^$testName\$" @@ -495,7 +487,6 @@ pipeline { sh """ sudo docker system prune -fa sudo rm -rf ./* - sudo rm -rf $HOME/google-cloud-sdk """ deleteDir() } diff --git a/cloud/jenkins/ps_operator_gke_latest.groovy b/cloud/jenkins/ps_operator_gke_latest.groovy index 0fa5d5f414..d4ac191931 100644 --- a/cloud/jenkins/ps_operator_gke_latest.groovy +++ b/cloud/jenkins/ps_operator_gke_latest.groovy @@ -410,9 +410,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 8 * * 0') + copyArtifactPermission('ps-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/ps_operator_gke_version.groovy b/cloud/jenkins/ps_operator_gke_version.groovy index 37e65232d5..2598591154 100644 --- a/cloud/jenkins/ps_operator_gke_version.groovy +++ b/cloud/jenkins/ps_operator_gke_version.groovy @@ -17,7 +17,6 @@ void runGKEcluster(String CLUSTER_SUFFIX) { NODES_NUM=3 export KUBECONFIG=/tmp/$CLUSTER_NAME-${CLUSTER_SUFFIX} export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc ret_num=0 while [ \${ret_num} -lt 15 ]; do ret_val=0 @@ -38,7 +37,6 @@ void runGKEclusterAlpha(String CLUSTER_SUFFIX) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-${CLUSTER_SUFFIX} export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc ret_num=0 while [ \${ret_num} -lt 15 ]; do ret_val=0 @@ -66,7 +64,6 @@ void shutdownCluster(String CLUSTER_SUFFIX) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-$CLUSTER_SUFFIX export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc gcloud auth activate-service-account $ACCOUNT@"$GCP_PROJECT".iam.gserviceaccount.com --key-file=$CLIENT_SECRET_FILE gcloud config set project $GCP_PROJECT for namespace in \$(kubectl get namespaces --no-headers | awk '{print \$1}' | grep -vE "^kube-|^openshift" | sed '/-operator/ s/^/1-/' | sort | sed 's/^1-//'); do @@ -98,14 +95,20 @@ void pushArtifactFile(String FILE_NAME) { void prepareNode() { sh ''' - if [ ! -d $HOME/google-cloud-sdk/bin ]; then - rm -rf $HOME/google-cloud-sdk - curl https://sdk.cloud.google.com | bash - fi - - source $HOME/google-cloud-sdk/path.bash.inc + sudo tee /etc/yum.repos.d/google-cloud-sdk.repo << EOF +[google-cloud-cli] +name=Google Cloud CLI +baseurl=https://packages.cloud.google.com/yum/repos/cloud-sdk-el7-x86_64 +enabled=1 +gpgcheck=1 +repo_gpgcheck=0 +gpgkey=https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg +EOF + sudo yum install -y google-cloud-cli google-cloud-cli-gke-gcloud-auth-plugin gcloud components install alpha - gcloud components install kubectl + + sudo curl -s -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl + kubectl version --client --output=yaml curl -s https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz \ | sudo tar -C /usr/local/bin --strip-components 1 -zvxpf - @@ -241,7 +244,6 @@ void runTest(Integer TEST_ID){ export KUBECONFIG=/tmp/$CLUSTER_NAME-$clusterSuffix export PATH="${HOME}/.krew/bin:$PATH" - source $HOME/google-cloud-sdk/path.bash.inc kubectl kuttl test --config ./e2e-tests/kuttl.yaml --test "^$testName\$" """ @@ -475,7 +477,6 @@ pipeline { sh """ sudo docker system prune -fa sudo rm -rf ./* - sudo rm -rf $HOME/google-cloud-sdk """ deleteDir() } diff --git a/cloud/jenkins/psmdb-operator-aks-latest.yml b/cloud/jenkins/psmdb-operator-aks-latest.yml index a0bf697244..1ccff61152 100644 --- a/cloud/jenkins/psmdb-operator-aks-latest.yml +++ b/cloud/jenkins/psmdb-operator-aks-latest.yml @@ -10,8 +10,6 @@ num-to-keep: 10 artifact-days-to-keep: -1 artifact-num-to-keep: 10 - triggers: - - timed: "0 15 * * 6" pipeline-scm: scm: - git: diff --git a/cloud/jenkins/psmdb-operator-aws-openshift-latest.yml b/cloud/jenkins/psmdb-operator-aws-openshift-latest.yml index f33b0333de..b07ab98333 100644 --- a/cloud/jenkins/psmdb-operator-aws-openshift-latest.yml +++ b/cloud/jenkins/psmdb-operator-aws-openshift-latest.yml @@ -3,8 +3,6 @@ project-type: pipeline description: | Do not edit this job through the web! - triggers: - - timed: "0 15 * * 6" pipeline-scm: scm: - git: @@ -13,4 +11,4 @@ - 'master' wipe-workspace: false lightweight-checkout: true - script-path: cloud/jenkins/psmdb_operator_aws_openshift-latest.groovy + script-path: cloud/jenkins/psmdb_operator_aws_openshift_latest.groovy diff --git a/cloud/jenkins/psmdb-operator-gke-latest.yml b/cloud/jenkins/psmdb-operator-gke-latest.yml index f3b68c6858..bb5bd5a774 100644 --- a/cloud/jenkins/psmdb-operator-gke-latest.yml +++ b/cloud/jenkins/psmdb-operator-gke-latest.yml @@ -10,8 +10,6 @@ num-to-keep: 10 artifact-days-to-keep: -1 artifact-num-to-keep: 10 - triggers: - - timed: "0 15 * * 6" pipeline-scm: scm: - git: diff --git a/cloud/jenkins/psmdb-operator-latest-scheduler.groovy b/cloud/jenkins/psmdb-operator-latest-scheduler.groovy new file mode 100644 index 0000000000..be5c5dc004 --- /dev/null +++ b/cloud/jenkins/psmdb-operator-latest-scheduler.groovy @@ -0,0 +1,126 @@ +library changelog: false, identifier: 'lib@master', retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) _ + + +pipeline { + parameters { + choice( + choices: ['run-release.csv', 'run-distro.csv'], + description: 'Choose test suite from file (e2e-tests/run-*), used only if TEST_LIST not specified.', + name: 'TEST_SUITE') + text( + defaultValue: '', + description: 'List of tests to run separated by new line', + name: 'TEST_LIST') + choice( + choices: 'NO\nYES', + description: 'Ignore passed tests in previous run (run all)', + name: 'IGNORE_PREVIOUS_RUN' + ) + string( + defaultValue: 'main', + description: 'Tag/Branch for percona/percona-server-mongodb-operator repository', + name: 'GIT_BRANCH') + string( + defaultValue: 'https://github.com/percona/percona-server-mongodb-operator', + description: 'percona-server-mongodb-operator repository', + name: 'GIT_REPO') + string( + defaultValue: 'latest', + description: 'GKE kubernetes version', + name: 'PLATFORM_VER') + choice( + choices: 'YES\nNO', + description: 'Run tests in cluster wide mode', + name: 'CLUSTER_WIDE') + string( + defaultValue: '', + description: 'Operator image: perconalab/percona-server-mongodb-operator:main', + name: 'OPERATOR_IMAGE') + string( + defaultValue: '', + description: 'MONGOD image: perconalab/percona-server-mongodb-operator:main-mongod5.0', + name: 'IMAGE_MONGOD') + string( + defaultValue: '', + description: 'Backup image: perconalab/percona-server-mongodb-operator:main-backup', + name: 'IMAGE_BACKUP') + string( + defaultValue: '', + description: 'PMM client image: perconalab/pmm-client:dev-latest', + name: 'IMAGE_PMM_CLIENT') + string( + defaultValue: '', + description: 'PMM server image: perconalab/pmm-server:dev-latest', + name: 'IMAGE_PMM_SERVER') + } + agent { + label 'docker' + } + options { + skipDefaultCheckout() + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '10', artifactNumToKeepStr: '10')) + timestamps () + } + triggers { + cron('0 15 * * 6') + } + stages { + stage("Run parallel") { + parallel{ + + stage('Trigger psmdb-operator-aks-latest job 3 times') { + steps { + build job: 'psmdb-operator-aks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'psmdb-operator-aks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'psmdb-operator-aks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + + stage('Trigger psmdb-operator-gke-latest job 3 times') { + steps { + build job: 'psmdb-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'psmdb-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'psmdb-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + + stage('Trigger psmdb-operator-eks-latest job 3 times') { + steps { + build job: 'psmdb-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'psmdb-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'psmdb-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + + stage('Trigger psmdb-operator-aws-openshift-latest job 3 times') { + steps { + build job: 'psmdb-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'psmdb-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'psmdb-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST',value: "${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_MONGOD', value: "${IMAGE_MONGOD}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + + } + } + } + post { + always { + + copyArtifacts(projectName: 'psmdb-operator-aks-latest', selector: lastCompleted(), target: 'psmdb-operator-aks-latest') + + copyArtifacts(projectName: 'psmdb-operator-gke-latest', selector: lastCompleted(), target: 'psmdb-operator-gke-latest') + + copyArtifacts(projectName: 'psmdb-operator-eks-latest', selector: lastCompleted(), target: 'psmdb-operator-eks-latest') + + copyArtifacts(projectName: 'psmdb-operator-aws-openshift-latest', selector: lastCompleted(), target: 'psmdb-operator-aws-openshift-latest') + + archiveArtifacts '*/*.xml' + step([$class: 'JUnitResultArchiver', testResults: '*/*.xml', healthScaleFactor: 1.0]) + + } + } +} diff --git a/cloud/jenkins/psmdb-operator-latest-scheduler.yml b/cloud/jenkins/psmdb-operator-latest-scheduler.yml new file mode 100644 index 0000000000..6e3093f5a3 --- /dev/null +++ b/cloud/jenkins/psmdb-operator-latest-scheduler.yml @@ -0,0 +1,16 @@ +- job: + name: psmdb-operator-latest-scheduler + project-type: pipeline + description: | + Do not edit this job through the web! + triggers: + - timed: "0 15 * * 6" + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: cloud/jenkins/psmdb-operator-latest-scheduler.groovy diff --git a/cloud/jenkins/psmdb_operator_aks_latest.groovy b/cloud/jenkins/psmdb_operator_aks_latest.groovy index 0cc2669fec..6cf81a0744 100644 --- a/cloud/jenkins/psmdb_operator_aks_latest.groovy +++ b/cloud/jenkins/psmdb_operator_aks_latest.groovy @@ -335,9 +335,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 15 * * 6') + copyArtifactPermission('psmdb-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/psmdb_operator_aws_openshift-4.groovy b/cloud/jenkins/psmdb_operator_aws_openshift-4.groovy index a023d834fe..782a3c86d7 100644 --- a/cloud/jenkins/psmdb_operator_aws_openshift-4.groovy +++ b/cloud/jenkins/psmdb_operator_aws_openshift-4.groovy @@ -80,7 +80,6 @@ void shutdownCluster(String CLUSTER_SUFFIX) { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'openshift-cicd'], file(credentialsId: 'aws-openshift-41-key-pub', variable: 'AWS_NODES_KEY_PUB'), file(credentialsId: 'openshift-secret-file', variable: 'OPENSHIFT-CONF-FILE')]) { sshagent(['aws-openshift-41-key']) { sh """ - source $HOME/google-cloud-sdk/path.bash.inc export KUBECONFIG=$WORKSPACE/openshift/$CLUSTER_SUFFIX/auth/kubeconfig for namespace in \$(kubectl get namespaces --no-headers | awk '{print \$1}' | grep -vE "^kube-|^openshift" | sed '/-operator/ s/^/1-/' | sort | sed 's/^1-//'); do kubectl delete deployments --all -n \$namespace --force --grace-period=0 || true @@ -218,7 +217,6 @@ void runTest(Integer TEST_ID) { export IMAGE_PMM_CLIENT=$IMAGE_PMM_CLIENT export IMAGE_PMM_SERVER=$IMAGE_PMM_SERVER - source $HOME/google-cloud-sdk/path.bash.inc export KUBECONFIG=$WORKSPACE/openshift/$clusterSuffix/auth/kubeconfig oc whoami @@ -341,14 +339,8 @@ pipeline { sudo mv terraform /usr/local/bin/ && rm terraform_0.11.14_linux_amd64.zip """ sh ''' - if [ ! -d $HOME/google-cloud-sdk/bin ]; then - rm -rf $HOME/google-cloud-sdk - curl https://sdk.cloud.google.com | bash - fi - - source $HOME/google-cloud-sdk/path.bash.inc - gcloud components update kubectl - gcloud version + sudo curl -s -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl + kubectl version --client --output=yaml curl -s https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz \ | sudo tar -C /usr/local/bin --strip-components 1 -zvxpf - @@ -458,7 +450,6 @@ pipeline { sh """ sudo docker system prune -fa - sudo rm -rf $HOME/google-cloud-sdk sudo rm -rf ./* """ deleteDir() diff --git a/cloud/jenkins/psmdb_operator_aws_openshift_latest.groovy b/cloud/jenkins/psmdb_operator_aws_openshift_latest.groovy index 780323760a..30c26e54aa 100644 --- a/cloud/jenkins/psmdb_operator_aws_openshift_latest.groovy +++ b/cloud/jenkins/psmdb_operator_aws_openshift_latest.groovy @@ -361,9 +361,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 15 * * 6') + copyArtifactPermission('psmdb-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/psmdb_operator_eks_latest.groovy b/cloud/jenkins/psmdb_operator_eks_latest.groovy index 805b952f1d..c77e8fb806 100644 --- a/cloud/jenkins/psmdb_operator_eks_latest.groovy +++ b/cloud/jenkins/psmdb_operator_eks_latest.groovy @@ -383,9 +383,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 15 * * 6') + copyArtifactPermission('psmdb-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/psmdb_operator_eks_version.groovy b/cloud/jenkins/psmdb_operator_eks_version.groovy index 4cc4c109e8..3b95d874c7 100644 --- a/cloud/jenkins/psmdb_operator_eks_version.groovy +++ b/cloud/jenkins/psmdb_operator_eks_version.groovy @@ -55,7 +55,6 @@ EOF sh """ export KUBECONFIG=/tmp/${CLUSTER_NAME}-${CLUSTER_SUFFIX} export PATH=/home/ec2-user/.local/bin:$PATH - source $HOME/google-cloud-sdk/path.bash.inc eksctl create cluster -f cluster-${CLUSTER_SUFFIX}.yaml """ } @@ -65,7 +64,6 @@ void shutdownCluster(String CLUSTER_SUFFIX) { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'eks-cicd', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-$CLUSTER_SUFFIX - source $HOME/google-cloud-sdk/path.bash.inc eksctl delete addon --name aws-ebs-csi-driver --cluster $CLUSTER_NAME-$CLUSTER_SUFFIX --region $AWSRegion || true for namespace in \$(kubectl get namespaces --no-headers | awk '{print \$1}' | grep -vE "^kube-|^openshift" | sed '/-operator/ s/^/1-/' | sort | sed 's/^1-//'); do kubectl delete deployments --all -n \$namespace --force --grace-period=0 || true @@ -234,7 +232,6 @@ void runTest(Integer TEST_ID) { export IMAGE_PMM_SERVER=$IMAGE_PMM_SERVER export PATH=/home/ec2-user/.local/bin:$PATH - source $HOME/google-cloud-sdk/path.bash.inc export KUBECONFIG=/tmp/$CLUSTER_NAME-$clusterSuffix e2e-tests/$testName/run """ @@ -348,14 +345,8 @@ pipeline { initTests() sh ''' - if [ ! -d $HOME/google-cloud-sdk/bin ]; then - rm -rf $HOME/google-cloud-sdk - curl https://sdk.cloud.google.com | bash - fi - - source $HOME/google-cloud-sdk/path.bash.inc - gcloud components update kubectl - gcloud version + sudo curl -s -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl + kubectl version --client --output=yaml curl -s https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz \ | sudo tar -C /usr/local/bin --strip-components 1 -zvxpf - @@ -455,7 +446,6 @@ pipeline { sh """ sudo docker system prune -fa - sudo rm -rf $HOME/google-cloud-sdk sudo rm -rf ./* """ deleteDir() diff --git a/cloud/jenkins/psmdb_operator_gke_latest.groovy b/cloud/jenkins/psmdb_operator_gke_latest.groovy index 64cb0d8506..5c60f338ce 100644 --- a/cloud/jenkins/psmdb_operator_gke_latest.groovy +++ b/cloud/jenkins/psmdb_operator_gke_latest.groovy @@ -349,9 +349,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 15 * * 6') + copyArtifactPermission('psmdb-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/psmdb_operator_gke_version.groovy b/cloud/jenkins/psmdb_operator_gke_version.groovy index 8ff46168f5..9626044e83 100644 --- a/cloud/jenkins/psmdb_operator_gke_version.groovy +++ b/cloud/jenkins/psmdb_operator_gke_version.groovy @@ -20,7 +20,6 @@ void runGKEcluster(String CLUSTER_SUFFIX) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-${CLUSTER_SUFFIX} export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc ret_num=0 while [ \${ret_num} -lt 15 ]; do ret_val=0 @@ -42,7 +41,6 @@ void runGKEclusterAlpha(String CLUSTER_SUFFIX) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-${CLUSTER_SUFFIX} export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc ret_num=0 while [ \${ret_num} -lt 15 ]; do ret_val=0 @@ -71,7 +69,6 @@ void shutdownCluster(String CLUSTER_SUFFIX) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-$CLUSTER_SUFFIX export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc gcloud auth activate-service-account $ACCOUNT@"$GCP_PROJECT".iam.gserviceaccount.com --key-file=$CLIENT_SECRET_FILE gcloud config set project $GCP_PROJECT for namespace in \$(kubectl get namespaces --no-headers | awk '{print \$1}' | grep -vE "^kube-|^openshift" | sed '/-operator/ s/^/1-/' | sort | sed 's/^1-//'); do @@ -208,7 +205,6 @@ void runTest(Integer TEST_ID) { export IMAGE_PMM_SERVER=$IMAGE_PMM_SERVER export KUBECONFIG=/tmp/$CLUSTER_NAME-$clusterSuffix - source $HOME/google-cloud-sdk/path.bash.inc e2e-tests/$testName/run """ } @@ -325,14 +321,20 @@ pipeline { initTests() sh """ - if [ ! -d $HOME/google-cloud-sdk/bin ]; then - rm -rf $HOME/google-cloud-sdk - curl https://sdk.cloud.google.com | bash - fi - - source $HOME/google-cloud-sdk/path.bash.inc + sudo curl -s -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl + kubectl version --client --output=yaml + + sudo tee /etc/yum.repos.d/google-cloud-sdk.repo << EOF +[google-cloud-cli] +name=Google Cloud CLI +baseurl=https://packages.cloud.google.com/yum/repos/cloud-sdk-el7-x86_64 +enabled=1 +gpgcheck=1 +repo_gpgcheck=0 +gpgkey=https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg +EOF + sudo yum install -y google-cloud-cli google-cloud-cli-gke-gcloud-auth-plugin gcloud components install alpha - gcloud components install kubectl curl -s https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz \ | sudo tar -C /usr/local/bin --strip-components 1 -zvxpf - diff --git a/cloud/jenkins/pxc-operator-aks-latest.yml b/cloud/jenkins/pxc-operator-aks-latest.yml index bde83e015c..c3ff73667a 100644 --- a/cloud/jenkins/pxc-operator-aks-latest.yml +++ b/cloud/jenkins/pxc-operator-aks-latest.yml @@ -10,8 +10,6 @@ num-to-keep: 10 artifact-days-to-keep: -1 artifact-num-to-keep: 10 - triggers: - - timed: "0 8 * * 6" pipeline-scm: scm: - git: diff --git a/cloud/jenkins/pxc-operator-aws-openshift-latest.yml b/cloud/jenkins/pxc-operator-aws-openshift-latest.yml index 081a269726..ca9ff57354 100644 --- a/cloud/jenkins/pxc-operator-aws-openshift-latest.yml +++ b/cloud/jenkins/pxc-operator-aws-openshift-latest.yml @@ -3,8 +3,6 @@ project-type: pipeline description: | Do not edit this job through the web! - triggers: - - timed: "0 8 * * 6" pipeline-scm: scm: - git: @@ -13,4 +11,4 @@ - 'master' wipe-workspace: false lightweight-checkout: true - script-path: cloud/jenkins/pxc_operator_aws_openshift-latest.groovy + script-path: cloud/jenkins/pxc_operator_aws_openshift_latest.groovy diff --git a/cloud/jenkins/pxc-operator-eks-latest.yml b/cloud/jenkins/pxc-operator-eks-latest.yml index b3f586f8e1..d0401675bd 100644 --- a/cloud/jenkins/pxc-operator-eks-latest.yml +++ b/cloud/jenkins/pxc-operator-eks-latest.yml @@ -3,8 +3,6 @@ project-type: pipeline description: | Do not edit this job through the web! - triggers: - - timed: "0 8 * * 6" pipeline-scm: scm: - git: diff --git a/cloud/jenkins/pxc-operator-gke-latest.yml b/cloud/jenkins/pxc-operator-gke-latest.yml index 75d8594566..c99c3df79a 100644 --- a/cloud/jenkins/pxc-operator-gke-latest.yml +++ b/cloud/jenkins/pxc-operator-gke-latest.yml @@ -10,8 +10,6 @@ num-to-keep: 10 artifact-days-to-keep: -1 artifact-num-to-keep: 10 - triggers: - - timed: "0 8 * * 6" pipeline-scm: scm: - git: diff --git a/cloud/jenkins/pxc-operator-latest-scheduler.groovy b/cloud/jenkins/pxc-operator-latest-scheduler.groovy new file mode 100644 index 0000000000..a9836389b4 --- /dev/null +++ b/cloud/jenkins/pxc-operator-latest-scheduler.groovy @@ -0,0 +1,136 @@ +library changelog: false, identifier: 'lib@master', retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) _ + + +pipeline { + parameters { + choice( + choices: ['run-release.csv', 'run-distro.csv'], + description: 'Choose test suite from file (e2e-tests/run-*), used only if TEST_LIST not specified.', + name: 'TEST_SUITE') + text( + defaultValue: '', + description: 'List of tests to run separated by new line', + name: 'TEST_LIST') + choice( + choices: 'NO\nYES', + description: 'Ignore passed tests in previous run (run all)', + name: 'IGNORE_PREVIOUS_RUN' + ) + string( + defaultValue: 'main', + description: 'Tag/Branch for percona/percona-xtradb-cluster-operator repository', + name: 'GIT_BRANCH') + string( + defaultValue: 'https://github.com/percona/percona-xtradb-cluster-operator', + description: 'percona-xtradb-cluster-operator repository', + name: 'GIT_REPO') + string( + defaultValue: 'latest', + description: 'EKS kubernetes version', + name: 'PLATFORM_VER') + choice( + choices: 'YES\nNO', + description: 'Run tests in cluster wide mode', + name: 'CLUSTER_WIDE') + string( + defaultValue: '', + description: 'Operator image: perconalab/percona-xtradb-cluster-operator:main', + name: 'OPERATOR_IMAGE') + string( + defaultValue: '', + description: 'PXC image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0', + name: 'IMAGE_PXC') + string( + defaultValue: '', + description: 'PXC proxy image: perconalab/percona-xtradb-cluster-operator:main-proxysql', + name: 'IMAGE_PROXY') + string( + defaultValue: '', + description: 'PXC haproxy image: perconalab/percona-xtradb-cluster-operator:main-haproxy', + name: 'IMAGE_HAPROXY') + string( + defaultValue: '', + description: 'Backup image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup', + name: 'IMAGE_BACKUP') + string( + defaultValue: '', + description: 'PXC logcollector image: perconalab/percona-xtradb-cluster-operator:main-logcollector', + name: 'IMAGE_LOGCOLLECTOR') + string( + defaultValue: '', + description: 'PMM client image: perconalab/pmm-client:dev-latest', + name: 'IMAGE_PMM_CLIENT') + string( + defaultValue: '', + description: 'PMM server image: perconalab/pmm-server:dev-latest', + name: 'IMAGE_PMM_SERVER') + } + agent { + label 'docker' + } + options { + skipDefaultCheckout() + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '10', artifactNumToKeepStr: '10')) + timestamps () + } + triggers { + cron('0 8 * * 6') + } + stages { + stage("Run parallel") { + parallel{ + + stage('Trigger pxc-operator-aks-latest job 3 times') { + steps { + build job: 'pxc-operator-aks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pxc-operator-aks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pxc-operator-aks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + + stage('Trigger pxc-operator-gke-latest job 3 times') { + steps { + build job: 'pxc-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pxc-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pxc-operator-gke-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + + stage('Trigger pxc-operator-eks-latest job 3 times') { + steps { + build job: 'pxc-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pxc-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pxc-operator-eks-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + stage('Trigger pxc-operator-aws-openshift-latest job 3 times') { + steps { + build job: 'pxc-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pxc-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + build job: 'pxc-operator-aws-openshift-latest', propagate: false, wait: true, parameters: [string(name: 'TEST_SUITE', value: "${TEST_SUITE}"),text(name: 'TEST_LIST', value:"${TEST_LIST}"),string(name: 'IGNORE_PREVIOUS_RUN', value: "${IGNORE_PREVIOUS_RUN}"),string(name: 'GIT_REPO', value: "${GIT_REPO}"),string(name: 'GIT_BRANCH', value: "${GIT_BRANCH}"),string(name: 'PLATFORM_VER', value: "${PLATFORM_VER}"),string(name: 'CLUSTER_WIDE', value: "${CLUSTER_WIDE}"),string(name: 'OPERATOR_IMAGE', value: "${OPERATOR_IMAGE}"),string(name: 'IMAGE_PXC', value: "${IMAGE_PXC}"),string(name: 'IMAGE_PROXY', value: "${IMAGE_PROXY}"),string(name: 'IMAGE_HAPROXY', value: "${IMAGE_HAPROXY}"),string(name: 'IMAGE_BACKUP', value: "${IMAGE_BACKUP}"),string(name: 'IMAGE_LOGCOLLECTOR', value: "${IMAGE_LOGCOLLECTOR}"),string(name: 'IMAGE_PMM_CLIENT', value: "${IMAGE_PMM_CLIENT}"),string(name: 'IMAGE_PMM_SERVER', value: "${IMAGE_PMM_SERVER}")] + } + } + } + } + } + post { + always { + + copyArtifacts(projectName: 'pxc-operator-aks-latest', selector: lastCompleted(), target: 'pxc-operator-aks-latest') + + copyArtifacts(projectName: 'pxc-operator-gke-latest', selector: lastCompleted(), target: 'pxc-operator-gke-latest') + + copyArtifacts(projectName: 'pxc-operator-eks-latest', selector: lastCompleted(), target: 'pxc-operator-eks-latest') + + copyArtifacts(projectName: 'pxc-operator-aws-openshift-latest', selector: lastCompleted(), target: 'pxc-operator-aws-openshift-latest') + + archiveArtifacts '*/*.xml' + step([$class: 'JUnitResultArchiver', testResults: '*/*.xml', healthScaleFactor: 1.0]) + + } + } +} diff --git a/cloud/jenkins/pxc-operator-latest-scheduler.yml b/cloud/jenkins/pxc-operator-latest-scheduler.yml new file mode 100644 index 0000000000..9f5a7286c5 --- /dev/null +++ b/cloud/jenkins/pxc-operator-latest-scheduler.yml @@ -0,0 +1,17 @@ +- job: + name: pxc-operator-latest-scheduler + project-type: pipeline + description: | + Do not edit this job through the web! + triggers: + - timed: "0 8 * * 6" + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: cloud/jenkins/pxc-operator-latest-scheduler.groovy + diff --git a/cloud/jenkins/pxc_operator_aks_latest.groovy b/cloud/jenkins/pxc_operator_aks_latest.groovy index f07e0757ca..8f3c1a7e67 100644 --- a/cloud/jenkins/pxc_operator_aks_latest.groovy +++ b/cloud/jenkins/pxc_operator_aks_latest.groovy @@ -355,9 +355,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 8 * * 6') + copyArtifactPermission('pxc-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/pxc_operator_aws_openshift-4.groovy b/cloud/jenkins/pxc_operator_aws_openshift-4.groovy index a0c58cd560..cc4a706ad1 100644 --- a/cloud/jenkins/pxc_operator_aws_openshift-4.groovy +++ b/cloud/jenkins/pxc_operator_aws_openshift-4.groovy @@ -80,7 +80,6 @@ void shutdownCluster(String CLUSTER_SUFFIX) { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'openshift-cicd'], file(credentialsId: 'aws-openshift-41-key-pub', variable: 'AWS_NODES_KEY_PUB'), file(credentialsId: 'openshift-secret-file', variable: 'OPENSHIFT-CONF-FILE')]) { sshagent(['aws-openshift-41-key']) { sh """ - source $HOME/google-cloud-sdk/path.bash.inc export KUBECONFIG=$WORKSPACE/openshift/$CLUSTER_SUFFIX/auth/kubeconfig for namespace in \$(kubectl get namespaces --no-headers | awk '{print \$1}' | grep -vE "^kube-|^openshift" | sed '/-operator/ s/^/1-/' | sort | sed 's/^1-//'); do kubectl delete deployments --all -n \$namespace --force --grace-period=0 || true @@ -221,7 +220,6 @@ void runTest(Integer TEST_ID) { export IMAGE_PMM_CLIENT=$IMAGE_PMM_CLIENT export IMAGE_PMM_SERVER=$IMAGE_PMM_SERVER - source $HOME/google-cloud-sdk/path.bash.inc export KUBECONFIG=$WORKSPACE/openshift/$clusterSuffix/auth/kubeconfig oc whoami @@ -359,14 +357,8 @@ pipeline { sudo mv terraform /usr/local/bin/ && rm terraform_0.11.14_linux_amd64.zip """ sh ''' - if [ ! -d $HOME/google-cloud-sdk/bin ]; then - rm -rf $HOME/google-cloud-sdk - curl https://sdk.cloud.google.com | bash - fi - - source $HOME/google-cloud-sdk/path.bash.inc - gcloud components update kubectl - gcloud version + sudo curl -s -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl + kubectl version --client --output=yaml curl -s https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz \ | sudo tar -C /usr/local/bin --strip-components 1 -zvxpf - @@ -492,7 +484,6 @@ pipeline { sh """ sudo docker system prune -fa - sudo rm -rf $HOME/google-cloud-sdk sudo rm -rf ./* """ deleteDir() diff --git a/cloud/jenkins/pxc_operator_aws_openshift_latest.groovy b/cloud/jenkins/pxc_operator_aws_openshift_latest.groovy index e4bb62bdb1..e1524fb4eb 100644 --- a/cloud/jenkins/pxc_operator_aws_openshift_latest.groovy +++ b/cloud/jenkins/pxc_operator_aws_openshift_latest.groovy @@ -383,9 +383,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 8 * * 6') + copyArtifactPermission('pxc-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/pxc_operator_eks_latest.groovy b/cloud/jenkins/pxc_operator_eks_latest.groovy index 83260f6da7..a05cb27b26 100644 --- a/cloud/jenkins/pxc_operator_eks_latest.groovy +++ b/cloud/jenkins/pxc_operator_eks_latest.groovy @@ -403,9 +403,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 8 * * 6') + copyArtifactPermission('pxc-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/pxc_operator_eks_version.groovy b/cloud/jenkins/pxc_operator_eks_version.groovy index 8060709bc2..7481d95871 100644 --- a/cloud/jenkins/pxc_operator_eks_version.groovy +++ b/cloud/jenkins/pxc_operator_eks_version.groovy @@ -55,7 +55,6 @@ EOF sh """ export KUBECONFIG=/tmp/${CLUSTER_NAME}-${CLUSTER_SUFFIX} export PATH=/home/ec2-user/.local/bin:$PATH - source $HOME/google-cloud-sdk/path.bash.inc eksctl create cluster -f cluster-${CLUSTER_SUFFIX}.yaml """ } @@ -65,7 +64,6 @@ void shutdownCluster(String CLUSTER_SUFFIX) { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'eks-cicd', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-$CLUSTER_SUFFIX - source $HOME/google-cloud-sdk/path.bash.inc eksctl delete addon --name aws-ebs-csi-driver --cluster $CLUSTER_NAME-$CLUSTER_SUFFIX --region $AWSRegion || true for namespace in \$(kubectl get namespaces --no-headers | awk '{print \$1}' | grep -vE "^kube-|^openshift" | sed '/-operator/ s/^/1-/' | sort | sed 's/^1-//'); do kubectl delete deployments --all -n \$namespace --force --grace-period=0 || true @@ -237,7 +235,6 @@ void runTest(Integer TEST_ID) { export IMAGE_PMM_SERVER=$IMAGE_PMM_SERVER export PATH=/home/ec2-user/.local/bin:$PATH - source $HOME/google-cloud-sdk/path.bash.inc export KUBECONFIG=/tmp/$CLUSTER_NAME-$clusterSuffix ./e2e-tests/$testName/run """ @@ -367,14 +364,8 @@ pipeline { sudo percona-release enable-only tools sudo yum install -y percona-xtrabackup-80 | true - if [ ! -d $HOME/google-cloud-sdk/bin ]; then - rm -rf $HOME/google-cloud-sdk - curl https://sdk.cloud.google.com | bash - fi - - source $HOME/google-cloud-sdk/path.bash.inc - gcloud components update kubectl - gcloud version + sudo curl -s -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl + kubectl version --client --output=yaml curl -s https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz \ | sudo tar -C /usr/local/bin --strip-components 1 -zvxpf - @@ -491,7 +482,6 @@ pipeline { sh """ sudo docker system prune -fa - sudo rm -rf $HOME/google-cloud-sdk sudo rm -rf ./* """ deleteDir() diff --git a/cloud/jenkins/pxc_operator_gke_latest.groovy b/cloud/jenkins/pxc_operator_gke_latest.groovy index 33c04be582..b5647720bc 100644 --- a/cloud/jenkins/pxc_operator_gke_latest.groovy +++ b/cloud/jenkins/pxc_operator_gke_latest.groovy @@ -367,9 +367,7 @@ pipeline { buildDiscarder(logRotator(daysToKeepStr: '-1', artifactDaysToKeepStr: '-1', numToKeepStr: '30', artifactNumToKeepStr: '30')) skipDefaultCheckout() disableConcurrentBuilds() - } - triggers { - cron('0 8 * * 6') + copyArtifactPermission('pxc-operator-latest-scheduler'); } stages { stage('Prepare node') { diff --git a/cloud/jenkins/pxc_operator_gke_version.groovy b/cloud/jenkins/pxc_operator_gke_version.groovy index a9b4e6f7b9..dfae1d99e2 100644 --- a/cloud/jenkins/pxc_operator_gke_version.groovy +++ b/cloud/jenkins/pxc_operator_gke_version.groovy @@ -22,7 +22,6 @@ void runGKEcluster(String CLUSTER_SUFFIX) { NODES_NUM=3 export KUBECONFIG=/tmp/$CLUSTER_NAME-${CLUSTER_SUFFIX} export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc ret_num=0 while [ \${ret_num} -lt 15 ]; do ret_val=0 @@ -44,7 +43,6 @@ void runGKEclusterAlpha(String CLUSTER_SUFFIX) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-${CLUSTER_SUFFIX} export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc ret_num=0 while [ \${ret_num} -lt 15 ]; do ret_val=0 @@ -73,7 +71,6 @@ void shutdownCluster(String CLUSTER_SUFFIX) { sh """ export KUBECONFIG=/tmp/$CLUSTER_NAME-$CLUSTER_SUFFIX export USE_GKE_GCLOUD_AUTH_PLUGIN=True - source $HOME/google-cloud-sdk/path.bash.inc gcloud auth activate-service-account $ACCOUNT@"$GCP_PROJECT".iam.gserviceaccount.com --key-file=$CLIENT_SECRET_FILE gcloud config set project $GCP_PROJECT for namespace in \$(kubectl get namespaces --no-headers | awk '{print \$1}' | grep -vE "^kube-|^openshift" | sed '/-operator/ s/^/1-/' | sort | sed 's/^1-//'); do @@ -214,7 +211,6 @@ void runTest(Integer TEST_ID) { export IMAGE_PMM_SERVER=$IMAGE_PMM_SERVER export KUBECONFIG=/tmp/$CLUSTER_NAME-$clusterSuffix - source $HOME/google-cloud-sdk/path.bash.inc ./e2e-tests/$testName/run """ } @@ -348,14 +344,19 @@ pipeline { sudo percona-release enable-only tools sudo yum install -y percona-xtrabackup-80 | true - if [ ! -d $HOME/google-cloud-sdk/bin ]; then - rm -rf $HOME/google-cloud-sdk - curl https://sdk.cloud.google.com | bash - fi - - source $HOME/google-cloud-sdk/path.bash.inc - gcloud components install alpha - gcloud components install kubectl + sudo curl -s -L -o /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl + kubectl version --client --output=yaml + + sudo tee /etc/yum.repos.d/google-cloud-sdk.repo << EOF +[google-cloud-cli] +name=Google Cloud CLI +baseurl=https://packages.cloud.google.com/yum/repos/cloud-sdk-el7-x86_64 +enabled=1 +gpgcheck=1 +repo_gpgcheck=0 +gpgkey=https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg +EOF + sudo yum install -y google-cloud-cli google-cloud-cli-gke-gcloud-auth-plugin curl -s https://get.helm.sh/helm-v3.9.4-linux-amd64.tar.gz \ | sudo tar -C /usr/local/bin --strip-components 1 -zvxpf - diff --git a/pbm/pbm-e2e-tests.groovy b/pbm/pbm-e2e-tests.groovy index eb98b7eb52..2acb691c0a 100644 --- a/pbm/pbm-e2e-tests.groovy +++ b/pbm/pbm-e2e-tests.groovy @@ -7,7 +7,7 @@ void runTest(String TEST_SCRIPT, String MONGO_VERSION, String BCP_TYPE) { """ } -void prepareCluster() { +void prepareCluster(String TEST_TYPE) { sh """ docker kill \$(docker ps -a -q) || true docker rm \$(docker ps -a -q) || true @@ -15,13 +15,36 @@ void prepareCluster() { sudo rm -rf ./* """ + sh """ + sudo curl -L "https://github.com/docker/compose/releases/download/v2.23.0/docker-compose-linux-x86_64" -o /usr/local/bin/docker-compose + sudo chmod +x /usr/local/bin/docker-compose + + wget https://download.docker.com/linux/static/stable/x86_64/docker-24.0.7.tgz + tar -xvf docker-24.0.7.tgz + sudo systemctl stop docker containerd + sudo cp docker/* /usr/bin/ + sudo systemctl start docker containerd + """ + git poll: false, branch: params.PBM_BRANCH, url: 'https://github.com/percona/percona-backup-mongodb.git' + withCredentials([file(credentialsId: 'PBM-AWS-S3', variable: 'PBM_AWS_S3_YML'), file(credentialsId: 'PBM-GCS-S3', variable: 'PBM_GCS_S3_YML'), file(credentialsId: 'PBM-AZURE', variable: 'PBM_AZURE_YML')]) { sh """ - sudo curl -L "https://github.com/docker/compose/releases/download/v2.16.0/docker-compose-linux-x86_64" -o /usr/local/bin/docker-compose - sudo chmod +x /usr/local/bin/docker-compose + cp $PBM_AWS_S3_YML ./e2e-tests/docker/conf/aws.yaml + cp $PBM_GCS_S3_YML ./e2e-tests/docker/conf/gcs.yaml +# cp $PBM_AZURE_YML ./e2e-tests/docker/conf/azure.yaml + sed -i s:pbme2etest:pbme2etest-${TEST_TYPE}:g ./e2e-tests/docker/conf/aws.yaml + sed -i s:pbme2etest:pbme2etest-${TEST_TYPE}:g ./e2e-tests/docker/conf/gcs.yaml +# sed -i s:pbme2etest:pbme2etest-${TEST_TYPE}:g ./e2e-tests/docker/conf/azure.yaml + + chmod 664 ./e2e-tests/docker/conf/aws.yaml + chmod 664 ./e2e-tests/docker/conf/gcs.yaml +# chmod 664 ./e2e-tests/docker/conf/azure.yaml + + openssl rand -base64 756 > ./e2e-tests/docker/keyFile """ + } } library changelog: false, identifier: "lib@master", retriever: modernSCM([ @@ -40,6 +63,9 @@ pipeline { parameters { string(name: 'PBM_BRANCH', defaultValue: 'main', description: 'PBM branch') } + triggers { + cron('0 3 * * 1') + } stages { stage('Set build name'){ steps { @@ -55,7 +81,7 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('44-newc-logic') runTest('run-new-cluster', '4.4', 'logical') } } @@ -64,7 +90,7 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('50-newc-logic') runTest('run-new-cluster', '5.0', 'logical') } } @@ -73,17 +99,25 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('60-newc-logic') runTest('run-new-cluster', '6.0', 'logical') } } - + stage('New cluster 7.0 logical') { + agent { + label 'docker' + } + steps { + prepareCluster('70-newc-logic') + runTest('run-new-cluster', '7.0', 'logical') + } + } stage('Sharded 4.4 logical') { agent { label 'docker-32gb' } steps { - prepareCluster() + prepareCluster('44-shrd-logic') runTest('run-sharded', '4.4', 'logical') } } @@ -92,7 +126,7 @@ pipeline { label 'docker-32gb' } steps { - prepareCluster() + prepareCluster('50-shrd-logic') runTest('run-sharded', '5.0', 'logical') } } @@ -101,17 +135,25 @@ pipeline { label 'docker-32gb' } steps { - prepareCluster() + prepareCluster('60-shrd-logic') runTest('run-sharded', '6.0', 'logical') } } - + stage('Sharded 7.0 logical') { + agent { + label 'docker-32gb' + } + steps { + prepareCluster('70-shrd-logic') + runTest('run-sharded', '7.0', 'logical') + } + } stage('Non-sharded 4.4 logical') { agent { label 'docker' } steps { - prepareCluster() + prepareCluster('44-rs-logic') runTest('run-rs', '4.4', 'logical') } } @@ -120,7 +162,7 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('50-rs-logic') runTest('run-rs', '5.0', 'logical') } } @@ -129,17 +171,25 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('60-rs-logic') runTest('run-rs', '6.0', 'logical') } } - + stage('Non-sharded 7.0 logical') { + agent { + label 'docker' + } + steps { + prepareCluster('70-rs-logic') + runTest('run-rs', '7.0', 'logical') + } + } stage('Single-node 4.4 logical') { agent { label 'docker' } steps { - prepareCluster() + prepareCluster('44-single-logic') runTest('run-single', '4.4', 'logical') } } @@ -148,7 +198,7 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('50-single-logic') runTest('run-single', '5.0', 'logical') } } @@ -157,17 +207,25 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('60-single-logic') runTest('run-single', '6.0', 'logical') } } - + stage('Single-node 7.0 logical') { + agent { + label 'docker' + } + steps { + prepareCluster('70-single-logic') + runTest('run-single', '7.0', 'logical') + } + } stage('Sharded 4.4 physical') { agent { label 'docker-32gb' } steps { - prepareCluster() + prepareCluster('44-shrd-phys') runTest('run-sharded', '4.4', 'physical') } } @@ -176,7 +234,7 @@ pipeline { label 'docker-32gb' } steps { - prepareCluster() + prepareCluster('50-shrd-phys') runTest('run-sharded', '5.0', 'physical') } } @@ -185,17 +243,25 @@ pipeline { label 'docker-32gb' } steps { - prepareCluster() + prepareCluster('60-shrd-phys') runTest('run-sharded', '6.0', 'physical') } } - + stage('Sharded 7.0 physical') { + agent { + label 'docker-32gb' + } + steps { + prepareCluster('70-shrd-phys') + runTest('run-sharded', '7.0', 'physical') + } + } stage('Non-sharded 4.4 physical') { agent { label 'docker' } steps { - prepareCluster() + prepareCluster('44-rs-phys') runTest('run-rs', '4.4', 'physical') } } @@ -204,7 +270,7 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('50-rs-phys') runTest('run-rs', '5.0', 'physical') } } @@ -213,17 +279,25 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('60-rs-phys') runTest('run-rs', '6.0', 'physical') } } - + stage('Non-sharded 7.0 physical') { + agent { + label 'docker' + } + steps { + prepareCluster('70-rs-phys') + runTest('run-rs', '7.0', 'physical') + } + } stage('Single-node 4.4 physical') { agent { label 'docker' } steps { - prepareCluster() + prepareCluster('44-single-phys') runTest('run-single', '4.4', 'physical') } } @@ -232,7 +306,7 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('50-single-phys') runTest('run-single', '5.0', 'physical') } } @@ -241,10 +315,19 @@ pipeline { label 'docker' } steps { - prepareCluster() + prepareCluster('60-single-phys') runTest('run-single', '6.0', 'physical') } } + stage('Single-node 7.0 physical') { + agent { + label 'docker' + } + steps { + prepareCluster('70-single-phys') + runTest('run-single', '7.0', 'physical') + } + } } } } diff --git a/pbm/pbm-functional-aws-rs.groovy b/pbm/pbm-functional-aws-rs.groovy index 5a81e6ef06..967a056c34 100644 --- a/pbm/pbm-functional-aws-rs.groovy +++ b/pbm/pbm-functional-aws-rs.groovy @@ -83,7 +83,7 @@ pipeline { } stage ('Create infrastructure') { steps { - withCredentials([aws(accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: '67d10a9b-d873-450b-bf0f-95d32477501c', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY')]) { + withCredentials([aws(accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: '8468e4e0-5371-4741-a9bb-7c143140acea', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY')]) { script{ moleculeExecuteActionWithScenario(moleculeDir, "converge", env.SCENARIO) } diff --git a/pbm/pbm-functional-aws-sharded.groovy b/pbm/pbm-functional-aws-sharded.groovy index 6824bae9d6..9b7783b5c6 100644 --- a/pbm/pbm-functional-aws-sharded.groovy +++ b/pbm/pbm-functional-aws-sharded.groovy @@ -83,7 +83,7 @@ pipeline { } stage ('Create infrastructure') { steps { - withCredentials([aws(accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: '67d10a9b-d873-450b-bf0f-95d32477501c', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY')]) { + withCredentials([aws(accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: '8468e4e0-5371-4741-a9bb-7c143140acea', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY')]) { script{ moleculeExecuteActionWithScenario(moleculeDir, "converge", env.SCENARIO) } diff --git a/pbm/pbm-manual.groovy b/pbm/pbm-manual.groovy index bf5f9fa8bb..2e41ac7df7 100644 --- a/pbm/pbm-manual.groovy +++ b/pbm/pbm-manual.groovy @@ -76,7 +76,7 @@ pipeline { } stage ('Create infrastructure') { steps { - withCredentials([aws(accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: '67d10a9b-d873-450b-bf0f-95d32477501c', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY')]) { + withCredentials([aws(accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: '8468e4e0-5371-4741-a9bb-7c143140acea', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY')]) { script{ moleculeExecuteActionWithScenario(moleculeDir, "converge", params.LAYOUT) } diff --git a/pdmdb/pdmdb-parallel.groovy b/pdmdb/pdmdb-parallel.groovy index 24a9ebc453..9417f4c29c 100644 --- a/pdmdb/pdmdb-parallel.groovy +++ b/pdmdb/pdmdb-parallel.groovy @@ -25,7 +25,8 @@ pipeline { string( defaultValue: 'pdmdb-4.2.8', description: 'PDMDB Version for tests', - name: 'PDMDB_VERSION') + name: 'PDMDB_VERSION' + ) string( defaultValue: '1.6.0', description: 'PBM Version for tests', @@ -40,6 +41,13 @@ pipeline { disableConcurrentBuilds() } stages { + stage('Set build name'){ + steps { + script { + currentBuild.displayName = "${params.REPO}-${params.PDMDB_VERSION}" + } + } + } stage('Checkout') { steps { deleteDir() diff --git a/pdmdb/pdmdb-setup.groovy b/pdmdb/pdmdb-setup.groovy index d08c10ab6d..75395abd8e 100644 --- a/pdmdb/pdmdb-setup.groovy +++ b/pdmdb/pdmdb-setup.groovy @@ -21,15 +21,18 @@ pipeline { string( defaultValue: 'pdmdb-4.2.8', description: 'PDMDB Version for tests', - name: 'PDMDB_VERSION') + name: 'PDMDB_VERSION' + ) string( defaultValue: '1.6.0', description: 'PBM Version for tests', - name: 'VERSION') + name: 'VERSION' + ) string( defaultValue: 'main', description: 'Branch for testing repository', - name: 'TESTING_BRANCH') + name: 'TESTING_BRANCH' + ) } options { withCredentials(moleculePbmJenkinsCreds()) diff --git a/pdps/orchestrator.groovy b/pdps/orchestrator.groovy index 33c10e0408..6c8f32ae83 100644 --- a/pdps/orchestrator.groovy +++ b/pdps/orchestrator.groovy @@ -58,6 +58,10 @@ pipeline { stages { stage('Checkout') { steps { + script { + currentBuild.displayName = "#${BUILD_NUMBER}-${REPO}-${VERSION}" + currentBuild.description = "${TESTING_BRANCH}-${TESTING_GIT_ACCOUNT}-${DESTROY_ENV}" + } deleteDir() git poll: false, branch: TESTING_BRANCH, url: 'https://github.com/${TESTING_GIT_ACCOUNT}/package-testing.git' } @@ -72,28 +76,28 @@ pipeline { stage ('Create virtual machines') { steps { script{ - moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "create", "ubuntu-bionic") + moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "create", "ubuntu-jammy") } } } stage ('Run playbook for test') { steps { script{ - moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "converge", "ubuntu-bionic") + moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "converge", "ubuntu-jammy") } } } stage ('Start testinfra tests') { steps { script{ - moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "verify", "ubuntu-bionic") + moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "verify", "ubuntu-jammy") } } } stage ('Start Cleanup ') { steps { script { - moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "cleanup", "ubuntu-bionic") + moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "cleanup", "ubuntu-jammy") } } } @@ -102,7 +106,7 @@ pipeline { always { script { if (env.DESTROY_ENV == "yes") { - moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "destroy", "ubuntu-bionic") + moleculeExecuteActionWithScenario(env.MOLECULE_DIR, "destroy", "ubuntu-jammy") junit "${MOLECULE_DIR}/report.xml" } } diff --git a/pdps/pdps-site-check.groovy b/pdps/pdps-site-check.groovy new file mode 100644 index 0000000000..2316e01a1b --- /dev/null +++ b/pdps/pdps-site-check.groovy @@ -0,0 +1,82 @@ +library changelog: false, identifier: "lib@master", retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) + +pipeline { + agent { + label 'docker' + } + environment { + PATH = '/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/ec2-user/.local/bin' + } + parameters { + string( + defaultValue: '8.0.34-26.1', + description: 'Full PS Version for tests. Examples: 8.0.34-26.1; 8.1.0-1.1', + name: 'PS_VER_FULL') + string( + defaultValue: '8.0.34-29.1', + description: 'Full PXB Version for tests. Examples: 8.0.34-29.1; 8.1.0-1.1', + name: 'PXB_VER_FULL') + string( + defaultValue: '3.2.6-10', + description: 'Full Orchestrator Version for tests. Example: 3.2.6-10', + name: 'ORCH_VER_FULL') + string( + defaultValue: '3.5.4', + description: 'PT Version for tests. Example: 3.5.4', + name: 'PT_VER') + string( + defaultValue: '2.5.5', + description: 'Proxysql Version for tests. Example: 2.5.5', + name: 'PROXYSQL_VER') + string( + defaultValue: 'master', + description: 'Branch for testing repository', + name: 'TESTING_BRANCH') + string( + defaultValue: 'Percona-QA', + description: 'Branch for testing repository', + name: 'TESTING_GIT_ACCOUNT') + } + stages { + stage('Set build name'){ + steps { + script { + currentBuild.displayName = "#${BUILD_NUMBER}-${params.PS_VER_FULL}-${params.TESTING_BRANCH}" + } + } + } + stage('Checkout') { + steps { + deleteDir() + git poll: false, branch: TESTING_BRANCH, url: "https://github.com/${TESTING_GIT_ACCOUNT}/package-testing.git" + } + } + stage('Test') { + steps { + script { + sh """ + cd site_checks + docker run --env PS_VER_FULL=${params.PS_VER_FULL} --env PXB_VER_FULL=${params.PXB_VER_FULL} --env ORCH_VER_FULL=${params.ORCH_VER_FULL} \ + --env PT_VER=${params.PT_VER} --env PROXYSQL_VER=${params.PROXYSQL_VER} \ + --rm -v `pwd`:/tmp -w /tmp python bash -c \ + 'pip3 install requests pytest setuptools && pytest -s --junitxml=junit.xml test_pdps.py || [ \$? = 1 ] ' + """ + } + } + } + } + post { + always { + script { + junit testResults: "**/junit.xml", keepLongStdio: true, allowEmptyResults: true, skipPublishingChecks: true + sh ''' + sudo rm -rf ./* + ''' + deleteDir() + } + } + } +} diff --git a/pdps/pdps-site-check.yml b/pdps/pdps-site-check.yml new file mode 100644 index 0000000000..bc3fba91bd --- /dev/null +++ b/pdps/pdps-site-check.yml @@ -0,0 +1,14 @@ +- job: + name: pdps-site-check + project-type: pipeline + description: | + Do not edit this job through the web! + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: pdps/pdps-site-check.groovy diff --git a/pdpxc/pdpxc-site-check.groovy b/pdpxc/pdpxc-site-check.groovy new file mode 100644 index 0000000000..3d72148fdb --- /dev/null +++ b/pdpxc/pdpxc-site-check.groovy @@ -0,0 +1,86 @@ +library changelog: false, identifier: "lib@master", retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) + +pipeline { + agent { + label 'docker' + } + environment { + PATH = '/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/ec2-user/.local/bin' + } + parameters { + string( + defaultValue: '8.0.34-26.1', + description: 'Full PXC version for tests. Examples: 8.0.34-26.1; 8.1.0-1.1', + name: 'PXC_VER_FULL') + string( + defaultValue: '8.0.34-29.1', + description: 'Full PXB version for tests. Examples: 8.0.34-29.1; 8.1.0-1.1', + name: 'PXB_VER_FULL') + string( + defaultValue: '3.5.5', + description: 'PT version for tests. Example: 3.5.5', + name: 'PT_VER') + string( + defaultValue: '2.5.5', + description: 'Proxysql version for tests. Example: 2.5.5', + name: 'PROXYSQL_VER') + string( + defaultValue: '2.8.1', + description: 'HAproxy version for tests. Example: 2.8.1', + name: 'HAPROXY_VER') + string( + defaultValue: '1.0', + description: 'Replication manager version for tests. Example: 1.0', + name: 'REPL_MAN_VER') + string( + defaultValue: 'master', + description: 'Branch for testing repository', + name: 'TESTING_BRANCH') + string( + defaultValue: 'Percona-QA', + description: 'Branch for testing repository', + name: 'TESTING_GIT_ACCOUNT') + } + stages { + stage('Set build name'){ + steps { + script { + currentBuild.displayName = "#${BUILD_NUMBER}-${params.PXC_VER_FULL}-${params.TESTING_BRANCH}" + } + } + } + stage('Checkout') { + steps { + deleteDir() + git poll: false, branch: TESTING_BRANCH, url: "https://github.com/${TESTING_GIT_ACCOUNT}/package-testing.git" + } + } + stage('Test') { + steps { + script { + sh """ + cd site_checks + docker run --env PXC_VER_FULL=${params.PXC_VER_FULL} --env PXB_VER_FULL=${params.PXB_VER_FULL} --env PT_VER=${params.PT_VER} \ + --env PROXYSQL_VER=${params.PROXYSQL_VER} --env HAPROXY_VER=${params.HAPROXY_VER} --env REPL_MAN_VER=${params.REPL_MAN_VER} \ + --rm -v `pwd`:/tmp -w /tmp python bash -c \ + 'pip3 install requests pytest setuptools && pytest -s --junitxml=junit.xml test_pdpxc.py || [ \$? = 1 ] ' + """ + } + } + } + } + post { + always { + script { + junit testResults: "**/junit.xml", keepLongStdio: true, allowEmptyResults: true, skipPublishingChecks: true + sh ''' + sudo rm -rf ./* + ''' + deleteDir() + } + } + } +} diff --git a/pdpxc/pdpxc-site-check.yml b/pdpxc/pdpxc-site-check.yml new file mode 100644 index 0000000000..6ae8183fb3 --- /dev/null +++ b/pdpxc/pdpxc-site-check.yml @@ -0,0 +1,14 @@ +- job: + name: pdpxc-site-check + project-type: pipeline + description: | + Do not edit this job through the web! + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: pdpxc/pdpxc-site-check.groovy diff --git a/pdpxc/percona-replication-manager.groovy b/pdpxc/percona-replication-manager.groovy index 1c3a9e2443..8f65941b5e 100644 --- a/pdpxc/percona-replication-manager.groovy +++ b/pdpxc/percona-replication-manager.groovy @@ -160,19 +160,6 @@ pipeline { uploadRPMfromAWS("rpm/", AWS_STASH_PATH) } } - stage('Ubuntu Bionic(18.04)') { - agent { - label 'docker' - } - steps { - cleanUpWS() - popArtifactFolder("source_deb/", AWS_STASH_PATH) - buildStage("ubuntu:bionic", "--build_deb=1") - - pushArtifactFolder("deb/", AWS_STASH_PATH) - uploadDEBfromAWS("deb/", AWS_STASH_PATH) - } - } stage('Ubuntu Focal(20.04)') { agent { label 'docker' diff --git a/pmm/aws-staging-start.groovy b/pmm/aws-staging-start.groovy index aece58cac7..d9e7cfa026 100644 --- a/pmm/aws-staging-start.groovy +++ b/pmm/aws-staging-start.groovy @@ -242,7 +242,7 @@ pipeline { pushd /srv/pmm-qa sudo git clone --single-branch --branch ${PMM_QA_GIT_BRANCH} https://github.com/percona/pmm-qa.git . sudo git checkout ${PMM_QA_GIT_COMMIT_HASH} - sudo svn export https://github.com/Percona-QA/percona-qa.git/trunk/get_download_link.sh + sudo wget https://raw.githubusercontent.com/Percona-QA/percona-qa/master/get_download_link.sh sudo chmod 755 get_download_link.sh popd """ @@ -288,8 +288,8 @@ pipeline { docker network create pmm-qa || true docker run -d \ - -p 80:80 \ - -p 443:443 \ + -p 80:8080 \ + -p 443:8443 \ -p 9000:9000 \ --volumes-from ${VM_NAME}-data \ --name ${VM_NAME}-server \ diff --git a/pmm/pmm2-ami-staging-start.groovy b/pmm/pmm2-ami-staging-start.groovy index 6fd4dfe809..6c3b924585 100644 --- a/pmm/pmm2-ami-staging-start.groovy +++ b/pmm/pmm2-ami-staging-start.groovy @@ -163,7 +163,7 @@ pipeline { --output text \ --query 'Reservations[].Instances[].PrivateIpAddress' \ | tee PRIVATE_IP - + # wait for the instance to get ready aws ec2 wait instance-running \ --instance-ids $INSTANCE_ID @@ -196,11 +196,11 @@ pipeline { sudo dnf remove -y podman buildah sudo dnf -y install 'dnf-command(config-manager)' sudo dnf config-manager --add-repo=https://download.docker.com/linux/centos/docker-ce.repo - sudo dnf install -y git svn docker-ce docker-ce-cli containerd.io docker-compose-plugin + sudo dnf install -y git wget docker-ce docker-ce-cli containerd.io docker-compose-plugin else echo "exclude=mirror.es.its.nyu.edu" | sudo tee -a /etc/yum/pluginconf.d/fastestmirror.conf sudo yum makecache - sudo yum -y install git svn docker + sudo yum -y install git wget docker fi sudo systemctl start docker @@ -211,7 +211,7 @@ pipeline { pushd /srv/pmm-qa sudo git clone --single-branch --branch ${PMM_QA_GIT_BRANCH} https://github.com/percona/pmm-qa.git . sudo git checkout ${PMM_QA_GIT_COMMIT_HASH} - sudo svn export https://github.com/Percona-QA/percona-qa.git/trunk/get_download_link.sh + sudo wget https://raw.githubusercontent.com/Percona-QA/percona-qa/master/get_download_link.sh sudo chmod 755 get_download_link.sh popd " @@ -281,14 +281,14 @@ pipeline { success { script { if (params.NOTIFY == "true") { - slackSend botUser: true, - channel: '#pmm-ci', - color: '#00FF00', + slackSend botUser: true, + channel: '#pmm-ci', + color: '#00FF00', message: "[${JOB_NAME}]: build ${BUILD_URL} finished, owner: @${OWNER} - https://${PUBLIC_IP}, Instance ID: ${INSTANCE_ID}" if (OWNER_SLACK) { - slackSend botUser: true, - channel: "@${OWNER_SLACK}", - color: '#00FF00', + slackSend botUser: true, + channel: "@${OWNER_SLACK}", + color: '#00FF00', message: "[${JOB_NAME}]: build ${BUILD_URL} finished - https://${PUBLIC_IP}, Instance ID: ${INSTANCE_ID}" } } diff --git a/pmm/pmm2-ami-upgrade-tests.groovy b/pmm/pmm2-ami-upgrade-tests.groovy index 79553e981e..75c0a52164 100644 --- a/pmm/pmm2-ami-upgrade-tests.groovy +++ b/pmm/pmm2-ami-upgrade-tests.groovy @@ -222,7 +222,8 @@ pipeline { pushd /srv/pmm-qa sudo git clone --single-branch --branch \${PMM_QA_GIT_BRANCH} https://github.com/percona/pmm-qa.git . sudo git checkout \${PMM_QA_GIT_COMMIT_HASH} - sudo svn export https://github.com/Percona-QA/percona-qa.git/trunk/get_download_link.sh + sudo yum -y install wget + sudo wget https://raw.githubusercontent.com/Percona-QA/percona-qa/master/get_download_link.sh sudo chmod 755 get_download_link.sh popd sudo ln -s /usr/bin/chromium-browser /usr/bin/chromium @@ -248,12 +249,12 @@ pipeline { # we only want to see the http code to improve troubleshooting HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" --connect-timeout 5 ${PMM_URL}/ping) set +x - + if [[ $HTTP_CODE == "200" ]]; then RET_VAL=0 break fi - + # 000 means the host is unreachable # curl is set to timeout in 5 secs if the host is unreachable, so we only sleep if otherwise [ $HTTP_CODE != "000" ] && sleep 5 diff --git a/pmm/pmm2-api-tests.groovy b/pmm/pmm2-api-tests.groovy index 0c949c2c77..30ce07ccd6 100644 --- a/pmm/pmm2-api-tests.groovy +++ b/pmm/pmm2-api-tests.groovy @@ -24,10 +24,6 @@ pipeline { defaultValue: 'perconalab/pmm-server:dev-latest', description: 'PMM Server docker container version (image-name:version-tag)', name: 'DOCKER_VERSION') - string( - defaultValue: 'dev-latest', - description: 'PMM Client version', - name: 'CLIENT_VERSION') string( defaultValue: 'percona:5.7', description: 'Percona Server Docker Container Image', @@ -37,17 +33,13 @@ pipeline { description: 'Postgresql Docker Container Image', name: 'POSTGRES_IMAGE') string( - defaultValue: 'percona/percona-server-mongodb:4.2', + defaultValue: 'percona/percona-server-mongodb:4.4', description: 'Percona Server MongoDb Docker Container Image', name: 'MONGO_IMAGE') string( defaultValue: '', description: 'Author of recent Commit to pmm', name: 'OWNER') - string ( - defaultValue: 'master', - description: 'Branch for pmm-agent Repo, used for docker-compose setup', - name: 'GIT_BRANCH_PMM_AGENT') } options { skipDefaultCheckout() @@ -71,43 +63,41 @@ pipeline { } stage('Checkout Commit') { when { - expression { env.GIT_COMMIT_HASH.length()>0 } + expression { env.GIT_COMMIT_HASH.length() > 0 } } steps { sh 'git checkout ' + env.GIT_COMMIT_HASH } } - stage('API Tests Setup') - { - steps{ + stage('API Tests Setup') { + steps { withCredentials([usernamePassword(credentialsId: 'hub.docker.com', passwordVariable: 'PASS', usernameVariable: 'USER')]) { - sh """ + sh ''' echo "${PASS}" | docker login -u "${USER}" --password-stdin - """ + ''' } sh ''' docker run -d \ - -e ENABLE_ALERTING=1 \ - -e PMM_DEBUG=1 \ - -e PERCONA_TEST_CHECKS_INTERVAL=10s \ - -e ENABLE_BACKUP_MANAGEMENT=1 \ - -e PERCONA_TEST_DBAAS=0 \ - -e PERCONA_TEST_PLATFORM_ADDRESS=https://check-dev.percona.com \ - -e PERCONA_TEST_PLATFORM_PUBLIC_KEY=RWTg+ZmCCjt7O8eWeAmTLAqW+1ozUbpRSKSwNTmO+exlS5KEIPYWuYdX \ - -p 80:80 \ - -p 443:443 \ - -v \${PWD}/managed/testdata/checks:/srv/checks \ - \${DOCKER_VERSION} + -e PMM_DEBUG=1 \ + -e PERCONA_TEST_CHECKS_INTERVAL=10s \ + -e PERCONA_TEST_PLATFORM_ADDRESS=https://check-dev.percona.com \ + -e PERCONA_TEST_PLATFORM_PUBLIC_KEY=RWTg+ZmCCjt7O8eWeAmTLAqW+1ozUbpRSKSwNTmO+exlS5KEIPYWuYdX \ + -p 80:80 \ + -p 443:443 \ + -v ${PWD}/managed/testdata/checks:/srv/checks \ + ${DOCKER_VERSION} docker build -t pmm-api-tests . cd api-tests docker-compose up test_db - MYSQL_IMAGE=\${MYSQL_IMAGE} docker-compose up -d mysql - MONGO_IMAGE=\${MONGO_IMAGE} docker-compose up -d mongo - POSTGRES_IMAGE=\${POSTGRES_IMAGE} docker-compose up -d postgres - docker-compose up -d sysbench - cd ../ + + # None of these services is required to run API tests + # MYSQL_IMAGE=${MYSQL_IMAGE} docker-compose up -d mysql + # MONGO_IMAGE=${MONGO_IMAGE} docker-compose up -d mongo + # POSTGRES_IMAGE=${POSTGRES_IMAGE} docker-compose up -d postgres + # docker-compose up -d sysbench + cd - ''' script { env.VM_IP = "127.0.0.1" @@ -115,8 +105,7 @@ pipeline { } } } - stage('Sanity Check') - { + stage('Sanity Check') { steps { sh 'timeout 100 bash -c \'while [[ "$(curl -s -o /dev/null -w \'\'%{http_code}\'\' \${PMM_URL}/ping)" != "200" ]]; do sleep 5; done\' || false' } @@ -124,7 +113,7 @@ pipeline { stage('Run API Test') { steps { sh ''' - docker run -e PMM_SERVER_URL=\${PMM_URL} \ + docker run -e PMM_SERVER_URL=${PMM_URL} \ -e PMM_RUN_UPDATE_TEST=0 \ -e PMM_RUN_STT_TESTS=0 \ --name ${BUILD_TAG} \ @@ -144,12 +133,7 @@ pipeline { junit '${BUILD_TAG}.xml' script { archiveArtifacts artifacts: 'logs.zip' - if (currentBuild.result == 'SUCCESS') { - slackSend botUser: true, - channel: '#pmm-ci', - color: '#00FF00', - message: "[${JOB_NAME}]: build finished - ${BUILD_URL}" - } else { + if (currentBuild.result != 'SUCCESS') { slackSend botUser: true, channel: '#pmm-ci', color: '#FF0000', @@ -157,5 +141,14 @@ pipeline { } } } + success { + script { + slackSend botUser: true, + channel: '#pmm-ci', + color: '#00FF00', + message: "[${JOB_NAME}]: build finished - ${BUILD_URL}" + + } + } } } diff --git a/pmm/pmm2-client-autobuild.groovy b/pmm/pmm2-client-autobuild.groovy index 21e0edb31a..a85007fe25 100644 --- a/pmm/pmm2-client-autobuild.groovy +++ b/pmm/pmm2-client-autobuild.groovy @@ -178,6 +178,11 @@ pipeline { sh "${PATH_TO_SCRIPTS}/build-client-deb debian:bullseye" } } + stage('Build client binary deb Bookworm') { + steps { + sh "${PATH_TO_SCRIPTS}/build-client-deb debian:bookworm" + } + } stage('Build client binary deb Jammy') { steps { sh "${PATH_TO_SCRIPTS}/build-client-deb ubuntu:jammy" diff --git a/pmm/pmm2-dbaas-upgrade-tests.groovy b/pmm/pmm2-dbaas-upgrade-tests.groovy index d014a6ca72..715bb5a59b 100644 --- a/pmm/pmm2-dbaas-upgrade-tests.groovy +++ b/pmm/pmm2-dbaas-upgrade-tests.groovy @@ -258,7 +258,7 @@ pipeline { ssh -i "${KEY_PATH}" -o ConnectTimeout=1 -o StrictHostKeyChecking=no ${USER}@${VM_IP} ' docker stop ${VM_NAME}-server docker rename ${VM_NAME}-server ${VM_NAME}-server-old - docker run -d -p 80:80 -p 443:443 --volumes-from ${VM_NAME}-data --name pmm-server-upgraded --restart always -e ENABLE_DBAAS=1 -e PERCONA_TEST_VERSION_SERVICE_URL=https://check-dev.percona.com/versions/v1 -e PERCONA_TEST_DBAAS_PMM_CLIENT=perconalab/pmm-client:dev-latest ${PMM_SERVER_TAG} + docker run -d -p 80:8080 -p 443:8443 --volumes-from ${VM_NAME}-data --name pmm-server-upgraded --restart always -e ENABLE_DBAAS=1 -e PERCONA_TEST_VERSION_SERVICE_URL=https://check-dev.percona.com/versions/v1 -e PERCONA_TEST_DBAAS_PMM_CLIENT=perconalab/pmm-client:dev-latest ${PMM_SERVER_TAG} ' """ } diff --git a/pmm/pmm2-ovf-image-test.groovy b/pmm/pmm2-ovf-image-test.groovy index 120a24d5cc..fc8a74513b 100644 --- a/pmm/pmm2-ovf-image-test.groovy +++ b/pmm/pmm2-ovf-image-test.groovy @@ -102,7 +102,7 @@ pipeline { ''' script { env.PUBLIC_IP = sh( - returnStdout: true, + returnStdout: true, script: 'curl -s http://169.254.169.254/metadata/v1/interfaces/public/0/ipv4/address' ).trim() } @@ -138,7 +138,7 @@ pipeline { sudo mkdir -p /srv/pmm-qa || : pushd /srv/pmm-qa sudo git clone https://github.com/percona/pmm-qa.git . - sudo svn export https://github.com/Percona-QA/percona-qa.git/trunk/get_download_link.sh + sudo wget https://raw.githubusercontent.com/Percona-QA/percona-qa/master/get_download_link.sh sudo chmod 755 get_download_link.sh popd sudo git clone --single-branch --branch ${GIT_BRANCH} https://github.com/percona/pmm-ui-tests.git @@ -172,10 +172,10 @@ pipeline { VBoxManage modifyvm ${VM_NAME} \ --memory $VM_MEMORY \ --audio none \ - --natpf1 "guestssh,tcp,,80,,80" \ + --natpf1 "guestssh,tcp,,80,,8080" \ --uart1 0x3F8 4 --uartmode1 file /tmp/${VM_NAME}-console.log \ --groups "/${OWNER},/${JOB_NAME}" - VBoxManage modifyvm ${VM_NAME} --natpf1 "guesthttps,tcp,,443,,443" + VBoxManage modifyvm ${VM_NAME} --natpf1 "guesthttps,tcp,,443,,8443" for p in $(seq 0 15); do VBoxManage modifyvm ${VM_NAME} --natpf1 "guestexporters$p,tcp,,4200$p,,4200$p" diff --git a/pmm/pmm2-ovf-staging-start.groovy b/pmm/pmm2-ovf-staging-start.groovy index c889237280..89bfd2492d 100644 --- a/pmm/pmm2-ovf-staging-start.groovy +++ b/pmm/pmm2-ovf-staging-start.groovy @@ -126,10 +126,10 @@ pipeline { --memory ${VM_MEMORY} \ --audio none \ --cpus 6 \ - --natpf1 "guestweb,tcp,,80,,80" \ + --natpf1 "guestweb,tcp,,80,,8080" \ --uart1 0x3F8 4 --uartmode1 file /tmp/${VM_NAME}-console.log \ --groups "/pmm" - VBoxManage modifyvm ${VM_NAME} --natpf1 "guesthttps,tcp,,443,,443" + VBoxManage modifyvm ${VM_NAME} --natpf1 "guesthttps,tcp,,443,,8443" VBoxManage modifyvm ${VM_NAME} --natpf1 "guestssh,tcp,,3022,,22" for p in \$(seq 0 30); do VBoxManage modifyvm ${VM_NAME} --natpf1 "guestexporters\$p,tcp,,4200\$p,,4200\$p" diff --git a/pmm/pmm2-ovf-upgrade-tests.groovy b/pmm/pmm2-ovf-upgrade-tests.groovy index 2d6cebbd1f..41e5afa1a2 100644 --- a/pmm/pmm2-ovf-upgrade-tests.groovy +++ b/pmm/pmm2-ovf-upgrade-tests.groovy @@ -223,7 +223,7 @@ pipeline { pushd /srv/pmm-qa sudo git clone --single-branch --branch \${PMM_QA_GIT_BRANCH} https://github.com/percona/pmm-qa.git . sudo git checkout \${PMM_QA_GIT_COMMIT_HASH} - sudo svn export https://github.com/Percona-QA/percona-qa.git/trunk/get_download_link.sh + sudo wget https://raw.githubusercontent.com/Percona-QA/percona-qa/master/get_download_link.sh sudo chmod 755 get_download_link.sh popd sudo ln -s /usr/bin/chromium-browser /usr/bin/chromium diff --git a/pmm/pmm2-release-candidate.groovy b/pmm/pmm2-release-candidate.groovy index 197c25fb71..6c42229e0d 100644 --- a/pmm/pmm2-release-candidate.groovy +++ b/pmm/pmm2-release-candidate.groovy @@ -47,7 +47,6 @@ def pmm_submodules() { "pmm-qa", "mysqld_exporter", "grafana", - "dbaas-controller", "node_exporter", "postgres_exporter", "clickhouse_exporter", diff --git a/ppg/component.groovy b/ppg/component.groovy index e0c4b869da..213485c092 100644 --- a/ppg/component.groovy +++ b/ppg/component.groovy @@ -102,7 +102,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/ppg/docker-parallel.groovy b/ppg/docker-parallel.groovy index ee0db811d1..33ba4e230f 100644 --- a/ppg/docker-parallel.groovy +++ b/ppg/docker-parallel.groovy @@ -69,7 +69,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/ppg/docker.groovy b/ppg/docker.groovy index 0ed5506362..05131cec63 100644 --- a/ppg/docker.groovy +++ b/ppg/docker.groovy @@ -66,7 +66,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/ppg/docker.yml b/ppg/docker.yml index d86db8f26d..d872d68382 100644 --- a/ppg/docker.yml +++ b/ppg/docker.yml @@ -8,7 +8,7 @@ - git: url: https://github.com/Percona-Lab/jenkins-pipelines.git branches: - - 'ppg-docker' + - 'master' wipe-workspace: false lightweight-checkout: true script-path: ppg/docker.groovy diff --git a/ppg/ppg-12-full-major-upgrade-parallel.groovy b/ppg/ppg-12-full-major-upgrade-parallel.groovy index b919f5096f..e7737d459c 100644 --- a/ppg/ppg-12-full-major-upgrade-parallel.groovy +++ b/ppg/ppg-12-full-major-upgrade-parallel.groovy @@ -46,7 +46,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/ppg/ppg-12-full-major-upgrade.groovy b/ppg/ppg-12-full-major-upgrade.groovy index 22250d0a84..212a89041d 100644 --- a/ppg/ppg-12-full-major-upgrade.groovy +++ b/ppg/ppg-12-full-major-upgrade.groovy @@ -58,7 +58,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/ppg/ppg-parallel.groovy b/ppg/ppg-parallel.groovy index 8a7b92756d..83815eeb43 100644 --- a/ppg/ppg-parallel.groovy +++ b/ppg/ppg-parallel.groovy @@ -74,7 +74,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/ppg/ppg-upgrade-parallel.groovy b/ppg/ppg-upgrade-parallel.groovy index f942525a29..6f80180af1 100644 --- a/ppg/ppg-upgrade-parallel.groovy +++ b/ppg/ppg-upgrade-parallel.groovy @@ -83,7 +83,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/ppg/ppg-upgrade.groovy b/ppg/ppg-upgrade.groovy index 1dd6fa32c5..5d5b548da6 100644 --- a/ppg/ppg-upgrade.groovy +++ b/ppg/ppg-upgrade.groovy @@ -76,7 +76,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/ppg/ppg.groovy b/ppg/ppg.groovy index b3c9e9106f..ca1c5b6e70 100644 --- a/ppg/ppg.groovy +++ b/ppg/ppg.groovy @@ -71,7 +71,7 @@ pipeline { stage ('Prepare') { steps { script { - installMolecule() + installMoleculePPG() } } } diff --git a/proxysql/proxysql.groovy b/proxysql/proxysql.groovy index dd2a2ffe3a..e4060f47ec 100644 --- a/proxysql/proxysql.groovy +++ b/proxysql/proxysql.groovy @@ -174,19 +174,6 @@ pipeline { uploadRPMfromAWS("rpm/", AWS_STASH_PATH) } } - stage('Ubuntu Bionic(18.04)') { - agent { - label 'docker' - } - steps { - cleanUpWS() - popArtifactFolder("source_deb/", AWS_STASH_PATH) - buildStage("ubuntu:bionic", "--build_deb=1") - - pushArtifactFolder("deb/", AWS_STASH_PATH) - uploadDEBfromAWS("deb/", AWS_STASH_PATH) - } - } stage('Ubuntu Focal(20.04)') { agent { label 'docker' @@ -265,19 +252,6 @@ pipeline { uploadTarballfromAWS("test/tarball/", AWS_STASH_PATH, 'binary') } } - stage('Ubuntu Xenial(16.04) tarball') { - agent { - label 'docker-32gb' - } - steps { - cleanUpWS() - popArtifactFolder("source_tarball/", AWS_STASH_PATH) - buildStage("ubuntu:xenial", "--build_tarball=1") - - pushArtifactFolder("test/tarball/", AWS_STASH_PATH) - uploadTarballfromAWS("test/tarball/", AWS_STASH_PATH, 'binary') - } - } stage('Ubuntu Bionic(18.04) tarball') { agent { label 'docker-32gb' diff --git a/ps/jenkins/mysql-shell.groovy b/ps/jenkins/mysql-shell.groovy index 46033c7d11..e737aa1b65 100644 --- a/ps/jenkins/mysql-shell.groovy +++ b/ps/jenkins/mysql-shell.groovy @@ -78,7 +78,7 @@ pipeline { steps { // slackNotify("", "#00FF00", "[${JOB_NAME}]: starting build for ${GIT_BRANCH} - [${BUILD_URL}]") cleanUpWS() - buildStage("ubuntu:bionic", "--get_sources=1") + buildStage("debian:buster", "--get_sources=1") sh ''' REPO_UPLOAD_PATH=$(grep "UPLOAD" test/mysql-shell.properties | cut -d = -f 2 | sed "s:$:${BUILD_NUMBER}:") AWS_STASH_PATH=$(echo ${REPO_UPLOAD_PATH} | sed "s:UPLOAD/experimental/::") @@ -117,7 +117,7 @@ pipeline { steps { cleanUpWS() popArtifactFolder("source_tarball/", AWS_STASH_PATH) - buildStage("ubuntu:bionic", "--build_source_deb=1") + buildStage("debian:buster", "--build_source_deb=1") pushArtifactFolder("source_deb/", AWS_STASH_PATH) uploadDEBfromAWS("source_deb/", AWS_STASH_PATH) @@ -192,19 +192,6 @@ pipeline { uploadRPMfromAWS("rpm/", AWS_STASH_PATH) } } - stage('Ubuntu Bionic (18.04)') { - agent { - label 'docker' - } - steps { - cleanUpWS() - popArtifactFolder("source_deb/", AWS_STASH_PATH) - buildStage("ubuntu:bionic", "--build_deb=1") - - pushArtifactFolder("deb/", AWS_STASH_PATH) - uploadDEBfromAWS("deb/", AWS_STASH_PATH) - } - } stage('Ubuntu Focal (20.04)') { agent { label 'docker' @@ -296,19 +283,6 @@ pipeline { uploadTarballfromAWS("test/tarball/", AWS_STASH_PATH, 'binary') } } - stage('Ubuntu Bionic (18.04) tarball') { - agent { - label 'docker-32gb' - } - steps { - cleanUpWS() - popArtifactFolder("source_tarball/", AWS_STASH_PATH) - buildStage("ubuntu:bionic", "--build_tarball=1") - - pushArtifactFolder("test/tarball/", AWS_STASH_PATH) - uploadTarballfromAWS("test/tarball/", AWS_STASH_PATH, 'binary') - } - } stage('Debian Buster (10) tarball') { agent { label 'docker-32gb' diff --git a/ps/jenkins/percona-server-for-mysql-8.0.groovy b/ps/jenkins/percona-server-for-mysql-8.0.groovy index 551b426ae7..df62ad345d 100644 --- a/ps/jenkins/percona-server-for-mysql-8.0.groovy +++ b/ps/jenkins/percona-server-for-mysql-8.0.groovy @@ -66,7 +66,7 @@ void cleanUpWS() { } def installDependencies(def nodeName) { - def aptNodes = ['min-buster-x64', 'min-bullseye-x64', 'min-bookworm-x64', 'min-bionic-x64', 'min-focal-x64', 'min-jammy-x64'] + def aptNodes = ['min-buster-x64', 'min-bullseye-x64', 'min-bookworm-x64', 'min-focal-x64', 'min-jammy-x64'] def yumNodes = ['min-ol-8-x64', 'min-centos-7-x64', 'min-ol-9-x64', 'min-amazon-2-x64'] try{ if (aptNodes.contains(nodeName)) { @@ -75,7 +75,7 @@ def installDependencies(def nodeName) { sudo apt-get update sudo apt-get install -y ansible git wget ''' - }else if(nodeName == "min-bionic-x64" || nodeName == "min-focal-x64" || nodeName == "min-jammy-x64"){ + }else if(nodeName == "min-focal-x64" || nodeName == "min-jammy-x64"){ sh ''' sudo apt-get update sudo apt-get install -y software-properties-common @@ -157,7 +157,6 @@ def minitestNodes = [ "min-buster-x64", "min-bookworm-x64", "min-centos-7-x64", "min-ol-8-x64", - "min-bionic-x64", "min-focal-x64", "min-amazon-2-x64", "min-jammy-x64", @@ -222,7 +221,7 @@ parameters { stage('Create PS source tarball') { agent { - label 'min-bionic-x64' + label 'min-buster-x64' } steps { slackNotify("${SLACKNOTIFY}", "#00FF00", "[${JOB_NAME}]: starting build for ${BRANCH} - [${BUILD_URL}]") @@ -266,7 +265,7 @@ parameters { } stage('Build PS generic source deb') { agent { - label 'min-bionic-x64' + label 'min-buster-x64' } steps { cleanUpWS() @@ -353,20 +352,6 @@ parameters { pushArtifactFolder("rpm/", AWS_STASH_PATH) } } - stage('Ubuntu Bionic(18.04)') { - agent { - label 'min-bionic-x64' - } - steps { - cleanUpWS() - installCli("deb") - unstash 'properties' - popArtifactFolder("source_deb/", AWS_STASH_PATH) - buildStage("none", "--build_deb=1") - - pushArtifactFolder("deb/", AWS_STASH_PATH) - } - } stage('Ubuntu Focal(20.04)') { agent { label 'min-focal-x64' @@ -535,34 +520,6 @@ parameters { pushArtifactFolder("tarball/", AWS_STASH_PATH) } } - stage('Bionic(18.04) binary tarball') { - agent { - label 'min-bionic-x64' - } - steps { - cleanUpWS() - installCli("deb") - unstash 'properties' - popArtifactFolder("source_tarball/", AWS_STASH_PATH) - buildStage("none", "--build_tarball=1 ") - - pushArtifactFolder("tarball/", AWS_STASH_PATH) - } - } - stage('Bionic(18.04) debug tarball') { - agent { - label 'min-bionic-x64' - } - steps { - cleanUpWS() - installCli("deb") - unstash 'properties' - popArtifactFolder("source_tarball/", AWS_STASH_PATH) - buildStage("none", "--debug=1 --build_tarball=1 ") - - pushArtifactFolder("tarball/", AWS_STASH_PATH) - } - } stage('Ubuntu Focal(20.04) tarball') { agent { label 'min-focal-x64' @@ -662,7 +619,11 @@ parameters { script { PS_MAJOR_RELEASE = sh(returnStdout: true, script: ''' echo ${BRANCH} | sed "s/release-//g" | sed "s/\\.//g" | awk '{print substr($0, 0, 2)}' ''').trim() // sync packages - sync2ProdAutoBuild("ps-"+PS_MAJOR_RELEASE, COMPONENT) + if ("${PS_MAJOR_RELEASE}" == "80") { + sync2ProdAutoBuild("ps-80", COMPONENT) + } else { + sync2ProdAutoBuild("ps-8x-innovation", COMPONENT) + } } } } @@ -681,7 +642,7 @@ parameters { } stage('Build docker containers') { agent { - label 'min-bionic-x64' + label 'min-buster-x64' } steps { echo "====> Build docker container" @@ -693,9 +654,9 @@ parameters { unstash 'properties' sh ''' PS_RELEASE=$(echo ${BRANCH} | sed 's/release-//g') - MYSQL_SHELL_RELEASE=$(echo ${BRANCH} | sed 's/release-//g' | awk '{print substr($0, 0, 6)}' | sed 's/-//g') - MYSQL_ROUTER_RELEASE=$(echo ${BRANCH} | sed 's/release-//g' | awk '{print substr($0, 0, 6)}' | sed 's/-//g') - PS_MAJOR_RELEASE=$(echo ${BRANCH} | sed "s/release-//g" | sed "s/\\.//g" | awk '{print substr($0, 0, 2)}') + MYSQL_SHELL_RELEASE=$(echo ${BRANCH} | sed 's/release-//g' | awk '{print substr($0, 0, 7)}' | sed 's/-//g') + MYSQL_ROUTER_RELEASE=$(echo ${BRANCH} | sed 's/release-//g' | awk '{print substr($0, 0, 7)}' | sed 's/-//g') + PS_MAJOR_RELEASE=$(echo ${BRANCH} | sed "s/release-//g" | sed "s/\\.//g" | awk '{print substr($0, 0, 3)}') sudo apt-get install -y apt-transport-https ca-certificates curl gnupg-agent software-properties-common sudo apt-get install -y docker.io sudo systemctl status docker @@ -737,7 +698,7 @@ parameters { sh ''' echo "${PASS}" | sudo docker login -u "${USER}" --password-stdin PS_RELEASE=$(echo ${BRANCH} | sed 's/release-//g') - MYSQL_ROUTER_RELEASE=$(echo ${BRANCH} | sed 's/release-//g' | awk '{print substr($0, 0, 6)}' | sed 's/-//g') + MYSQL_ROUTER_RELEASE=$(echo ${BRANCH} | sed 's/release-//g' | awk '{print substr($0, 0, 7)}' | sed 's/-//g') PS_MAJOR_RELEASE=$(echo ${BRANCH} | sed "s/release-//g" | awk '{print substr($0, 0, 3)}') sudo docker tag perconalab/percona-server:${PS_RELEASE}.${RPM_RELEASE} perconalab/percona-server:${PS_RELEASE} sudo docker push perconalab/percona-server:${PS_RELEASE}.${RPM_RELEASE} diff --git a/ps/ps-site-check.groovy b/ps/ps-site-check.groovy new file mode 100644 index 0000000000..219b60505b --- /dev/null +++ b/ps/ps-site-check.groovy @@ -0,0 +1,65 @@ +library changelog: false, identifier: "lib@master", retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) + +pipeline { + agent { + label 'docker' + } + environment { + PATH = '/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/ec2-user/.local/bin' + } + parameters { + string( + defaultValue: '8.0.34-26.1', + description: 'Full PS Version for tests. Examples: 5.7.44-48.1; 8.0.34-26.1; 8.1.0-1.1', + name: 'PS_VER_FULL') + string( + defaultValue: 'master', + description: 'Branch for testing repository', + name: 'TESTING_BRANCH') + string( + defaultValue: 'Percona-QA', + description: 'Branch for testing repository', + name: 'TESTING_GIT_ACCOUNT') + } + stages { + stage('Set build name'){ + steps { + script { + currentBuild.displayName = "#${BUILD_NUMBER}-${params.PS_VER_FULL}-${params.TESTING_BRANCH}" + } + } + } + stage('Checkout') { + steps { + deleteDir() + git poll: false, branch: TESTING_BRANCH, url: "https://github.com/${TESTING_GIT_ACCOUNT}/package-testing.git" + } + } + stage('Test') { + steps { + script { + sh """ + cd site_checks + docker run --env PS_VER_FULL=${params.PS_VER_FULL} \ + --rm -v `pwd`:/tmp -w /tmp python bash -c \ + 'pip3 install requests pytest setuptools && pytest -s --junitxml=junit.xml test_ps.py || [ \$? = 1 ] ' + """ + } + } + } + } + post { + always { + script { + junit testResults: "**/junit.xml", keepLongStdio: true, allowEmptyResults: true, skipPublishingChecks: true + sh ''' + sudo rm -rf ./* + ''' + deleteDir() + } + } + } +} diff --git a/ps/ps-site-check.yml b/ps/ps-site-check.yml new file mode 100644 index 0000000000..97dfb7e2ce --- /dev/null +++ b/ps/ps-site-check.yml @@ -0,0 +1,14 @@ +- job: + name: ps-site-check + project-type: pipeline + description: | + Do not edit this job through the web! + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: ps/ps-site-check.groovy diff --git a/psmdb/psmdb-fips.groovy b/psmdb/psmdb-fips.groovy new file mode 100644 index 0000000000..002097ed28 --- /dev/null +++ b/psmdb/psmdb-fips.groovy @@ -0,0 +1,98 @@ +library changelog: false, identifier: "lib@master", retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) + +def moleculeDir = "psmdb/psmdb" +def fipsOS = ['rhel7-fips','rhel8-fips','ubuntu-focal-pro'] + +pipeline { + agent { + label 'min-centos-7-x64' + } + environment { + PATH = '/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/ec2-user/.local/bin' + ANSIBLE_DISPLAY_SKIPPED_HOSTS = false + } + parameters { + choice( + name: 'REPO', + description: 'Repo for testing', + choices: [ + 'testing', + 'release', + 'experimental' + ] + ) + string( + defaultValue: '5.0.22', + description: 'PSMDB Version for tests', + name: 'PSMDB_VERSION' + ) + choice( + name: 'GATED_BUILD', + description: 'Test private repo?', + choices: [ + 'false', + 'true' + ] + ) + string( + defaultValue: 'main', + description: 'Branch for testing repository', + name: 'TESTING_BRANCH') + } + options { + withCredentials(moleculePbmJenkinsCreds()) + disableConcurrentBuilds() + } + stages { + stage('Set build name'){ + steps { + script { + currentBuild.displayName = "${params.PSMDB_REPO}-${params.PSMDB_VERSION}" + } + } + } + stage('Checkout') { + steps { + deleteDir() + git poll: false, branch: TESTING_BRANCH, url: 'https://github.com/Percona-QA/psmdb-testing.git' + } + } + stage ('Prepare') { + steps { + script { + installMolecule() + } + } + } + stage('Test') { + steps { + withCredentials([usernamePassword(credentialsId: 'PSMDB_PRIVATE_REPO_ACCESS', passwordVariable: 'PASSWORD', usernameVariable: 'USERNAME')]) { + script { + moleculeParallelTest(fipsOS, moleculeDir) + } + } + } + post { + always { + junit testResults: "**/*-report.xml", keepLongStdio: true, allowEmptyResults: true, skipPublishingChecks: true + } + } + } + } + post { + success { + slackNotify("#mongodb_autofeed", "#00FF00", "[${JOB_NAME}]: Package tests for PSMDB ${PSMDB_VERSION} on FIPS-enabled OSs, repo ${REPO}, pro repo - ${GATED_BUILD} finished succesfully - [${BUILD_URL}]") + } + failure { + slackNotify("#mongodb_autofeed", "#FF0000", "[${JOB_NAME}]: Package tests for PSMDB ${PSMDB_VERSION} on FIPS-enabled OSs, repo ${REPO}, pro repo - ${GATED_BUILD} failed - [${BUILD_URL}]") + } + always { + script { + moleculeParallelPostDestroy(fipsOS, moleculeDir) + } + } + } +} diff --git a/psmdb/psmdb-fips.yml b/psmdb/psmdb-fips.yml new file mode 100644 index 0000000000..1045cef978 --- /dev/null +++ b/psmdb/psmdb-fips.yml @@ -0,0 +1,14 @@ +- job: + name: psmdb-fips + project-type: pipeline + description: | + Do not edit this job through the web! + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: psmdb/psmdb-fips.groovy diff --git a/psmdb/psmdb-multi-parallel.groovy b/psmdb/psmdb-multi-parallel.groovy index 20e07577cb..aedec28720 100644 --- a/psmdb/psmdb-multi-parallel.groovy +++ b/psmdb/psmdb-multi-parallel.groovy @@ -35,11 +35,23 @@ pipeline { description: 'previous major PSMDB version for upgrade tests (leave blank to skip)', name: 'PREV_MAJ_PSMDB_VERSION' ) + string( + defaultValue: '6.0.4', + description: 'next major PSMDB version for upgrade tests (leave blank to skip)', + name: 'NEXT_MAJ_PSMDB_VERSION' + ) string( defaultValue: 'main', description: 'base Branch for upgrade test', name: 'TESTING_BRANCH') - + choice( + name: 'functionaltests', + choices: ['no','yes'], + description: 'run functional tests') + choice( + name: 'no_encryption', + choices: ['no','yes'], + description: 'check upgrade without encryption') } options { withCredentials(moleculePbmJenkinsCreds()) @@ -50,17 +62,25 @@ pipeline { parallel { stage ('functional tests') { steps { - build job: 'psmdb-parallel', parameters: [ - string(name: 'REPO', value: "${env.REPO}"), - string(name: 'PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), - string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") - ] + script { + if (params.integrationtests == "yes") { + build job: 'psmdb-parallel', parameters: [ + string(name: 'REPO', value: "${env.REPO}"), + string(name: 'PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped functional tests' + } + } + } } stage('upgrade from minor version without encryption') { steps { script { - if (env.PREV_MIN_PSMDB_VERSION != '') { + if ((env.PREV_MIN_PSMDB_VERSION != '') && (params.no_encryption == "yes")) { build job: "psmdb-upgrade-parallel", parameters: [ string(name: 'TO_REPO', value: "${env.REPO}"), string(name: 'FROM_REPO', value: "release"), @@ -77,7 +97,7 @@ pipeline { } } } - stage('upgrade from minor vesrsion with vault encryption') { + stage('upgrade from minor version with vault encryption') { steps { script { if (env.PREV_MIN_PSMDB_VERSION != '') { @@ -97,10 +117,30 @@ pipeline { } } } - stage('upgrade from major version without encryption') { + stage('upgrade from minor version with KMIP encryption') { steps { script { - if (env.PREV_MAJ_PSMDB_VERSION != '') { + if (env.PREV_MIN_PSMDB_VERSION != '') { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "${env.REPO}"), + string(name: 'FROM_REPO', value: "release"), + string(name: 'TO_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.PREV_MIN_PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "KMIP"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped upgrade from minor version' + } + } + } + } + stage('upgrade from prev major version without encryption') { + steps { + script { + if ((env.PREV_MIN_PSMDB_VERSION != '') && (params.no_encryption == "yes")) { build job: "psmdb-upgrade-parallel", parameters: [ string(name: 'TO_REPO', value: "${env.REPO}"), string(name: 'FROM_REPO', value: "release"), @@ -117,7 +157,7 @@ pipeline { } } } - stage('upgrade from major version with vault encryption') { + stage('upgrade from prev major version with vault encryption') { steps { script { if (env.PREV_MAJ_PSMDB_VERSION != '') { @@ -137,10 +177,90 @@ pipeline { } } } + stage('upgrade from prev major version with KMIP encryption') { + steps { + script { + if (env.PREV_MAJ_PSMDB_VERSION != '') { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "${env.REPO}"), + string(name: 'FROM_REPO', value: "release"), + string(name: 'TO_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.PREV_MAJ_PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "KMIP"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped upgrade from major version' + } + } + } + } + stage('upgrade to next major version without encryption') { + steps { + script { + if ((env.PREV_MIN_PSMDB_VERSION != '') && (params.no_encryption == "yes")) { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "release"), + string(name: 'FROM_REPO', value: "${env.REPO}"), + string(name: 'TO_PSMDB_VERSION', value: "${env.NEXT_MAJ_PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "NONE"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped upgrade to major version' + } + } + } + } + stage('upgrade to next major version with vault encryption') { + steps { + script { + if (env.NEXT_MAJ_PSMDB_VERSION != '') { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "release"), + string(name: 'FROM_REPO', value: "${env.REPO}"), + string(name: 'TO_PSMDB_VERSION', value: "${env.NEXT_MAJ_PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "VAULT"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped upgrade to major version' + } + } + } + } + stage('upgrade to next major version with KMIP encryption') { + steps { + script { + if (env.NEXT_MAJ_PSMDB_VERSION != '') { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "release"), + string(name: 'FROM_REPO', value: "${env.REPO}"), + string(name: 'TO_PSMDB_VERSION', value: "${env.NEXT_MAJ_PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "KMIP"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped upgrade to major version' + } + } + } + } stage('downgrade to minor version without encryption') { steps { script { - if (env.PREV_MIN_PSMDB_VERSION != '') { + if ((env.PREV_MIN_PSMDB_VERSION != '') && (params.no_encryption == "yes")) { build job: "psmdb-upgrade-parallel", parameters: [ string(name: 'TO_REPO', value: "release"), string(name: 'FROM_REPO', value: "${env.REPO}"), @@ -177,10 +297,30 @@ pipeline { } } } - stage('downgrade to major version without encryption') { + stage('downgrade to minor version with KMIP encryption') { steps { script { - if (env.PREV_MAJ_PSMDB_VERSION != '') { + if (env.PREV_MIN_PSMDB_VERSION != '') { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "release"), + string(name: 'FROM_REPO', value: "${env.REPO}"), + string(name: 'TO_PSMDB_VERSION', value: "${env.PREV_MIN_PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "KMIP"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped downgrade to minor version' + } + } + } + } + stage('downgrade to prev major version without encryption') { + steps { + script { + if ((env.PREV_MIN_PSMDB_VERSION != '') && (params.no_encryption == "yes")) { build job: "psmdb-upgrade-parallel", parameters: [ string(name: 'TO_REPO', value: "release"), string(name: 'FROM_REPO', value: "${env.REPO}"), @@ -197,7 +337,7 @@ pipeline { } } } - stage('downgrade to major version with vault encryption') { + stage('downgrade to prev major version with vault encryption') { steps { script { if (env.PREV_MAJ_PSMDB_VERSION != '') { @@ -217,6 +357,86 @@ pipeline { } } } + stage('downgrade to prev major version with KMIP encryption') { + steps { + script { + if (env.PREV_MAJ_PSMDB_VERSION != '') { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "release"), + string(name: 'FROM_REPO', value: "${env.REPO}"), + string(name: 'TO_PSMDB_VERSION', value: "${env.PREV_MAJ_PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "KMIP"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped downgrade to major version' + } + } + } + } + stage('downgrade from next major version without encryption') { + steps { + script { + if ((env.PREV_MIN_PSMDB_VERSION != '') && (params.no_encryption == "yes")) { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "${env.REPO}"), + string(name: 'FROM_REPO', value: "release"), + string(name: 'TO_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.NEXT_MAJ_PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "NONE"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped downgrade from major version' + } + } + } + } + stage('downgrade from next major version with vault encryption') { + steps { + script { + if (env.NEXT_MAJ_PSMDB_VERSION != '') { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "${env.REPO}"), + string(name: 'FROM_REPO', value: "release"), + string(name: 'TO_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.NEXT_MAJ_PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "VAULT"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped downgrade from major version' + } + } + } + } + stage('downgrade from next major version with KMIP encryption') { + steps { + script { + if (env.NEXT_MAJ_PSMDB_VERSION != '') { + build job: "psmdb-upgrade-parallel", parameters: [ + string(name: 'TO_REPO', value: "${env.REPO}"), + string(name: 'FROM_REPO', value: "release"), + string(name: 'TO_PSMDB_VERSION', value: "${env.PSMDB_VERSION}"), + string(name: 'FROM_PSMDB_VERSION', value: "${env.NEXT_MAJ_PSMDB_VERSION}"), + string(name: 'ENCRYPTION', value: "KMIP"), + string(name: 'CIPHER', value: "AES256-CBC"), + string(name: 'TESTING_BRANCH', value: "${env.TESTING_BRANCH}") + ] + } + else { + echo 'skipped downgrade from major version' + } + } + } + } } } } diff --git a/psmdb/psmdb-parallel.groovy b/psmdb/psmdb-parallel.groovy index be85f4bd06..c22e937ab2 100644 --- a/psmdb/psmdb-parallel.groovy +++ b/psmdb/psmdb-parallel.groovy @@ -26,7 +26,8 @@ pipeline { string( defaultValue: '4.4.8', description: 'PSMDB Version for tests', - name: 'PSMDB_VERSION') + name: 'PSMDB_VERSION' + ) choice( name: 'ENABLE_TOOLKIT', description: 'Enable or disable percona toolkit check', @@ -53,6 +54,13 @@ pipeline { disableConcurrentBuilds() } stages { + stage('Set build name'){ + steps { + script { + currentBuild.displayName = "${params.REPO}-${params.PSMDB_VERSION}" + } + } + } stage('Checkout') { steps { deleteDir() @@ -74,6 +82,11 @@ pipeline { } } } + post { + always { + junit testResults: "**/*-report.xml", keepLongStdio: true, allowEmptyResults: true, skipPublishingChecks: true + } + } } } post { diff --git a/psmdb/psmdb-upgrade-parallel.groovy b/psmdb/psmdb-upgrade-parallel.groovy index a4c9e41a91..58182a19c3 100644 --- a/psmdb/psmdb-upgrade-parallel.groovy +++ b/psmdb/psmdb-upgrade-parallel.groovy @@ -24,6 +24,14 @@ pipeline { 'release' ] ) + choice( + name: 'FROM_REPO_PRO', + description: 'USE PRO repo for base install', + choices: [ + 'false', + 'true' + ] + ) string( defaultValue: '4.4.8', description: 'From this version PSMDB will be updated', @@ -38,6 +46,14 @@ pipeline { 'release' ] ) + choice( + name: 'TO_REPO_PRO', + description: 'USE PRO repo for update', + choices: [ + 'false', + 'true' + ] + ) string( defaultValue: '5.0.2', description: 'To this version PSMDB will be updated', @@ -49,7 +65,8 @@ pipeline { choices: [ 'NONE', 'VAULT', - 'KEYFILE' + 'KEYFILE', + 'KMIP' ] ) choice( @@ -100,9 +117,11 @@ pipeline { } stage('Install old version') { steps { + withCredentials([usernamePassword(credentialsId: 'PSMDB_PRIVATE_REPO_ACCESS', passwordVariable: 'PASSWORD', usernameVariable: 'USERNAME')]) { script { runMoleculeCommandParallelWithVariableList(pdmdbOperatingSystems(), moleculeDir, "converge", env.OLDVERSIONS) } + } } } stage('Test old version') { @@ -114,9 +133,11 @@ pipeline { } stage('Install new version') { steps { + withCredentials([usernamePassword(credentialsId: 'PSMDB_PRIVATE_REPO_ACCESS', passwordVariable: 'PASSWORD', usernameVariable: 'USERNAME')]) { script { runMoleculeCommandParallelWithVariableList(pdmdbOperatingSystems(), moleculeDir, "side-effect", env.NEWVERSIONS) } + } } } stage('Test new version') { diff --git a/psmdb/psmdb-upgrade.groovy b/psmdb/psmdb-upgrade.groovy index 6f85a93051..46f461b2cc 100644 --- a/psmdb/psmdb-upgrade.groovy +++ b/psmdb/psmdb-upgrade.groovy @@ -29,6 +29,14 @@ pipeline { 'release' ] ) + choice( + name: 'FROM_REPO_PRO', + description: 'USE PRO repo for base install', + choices: [ + 'false', + 'true' + ] + ) string( defaultValue: '4.4.8', description: 'From this version PSMDB will be updated', @@ -43,6 +51,14 @@ pipeline { 'release' ] ) + choice( + name: 'TO_REPO_PRO', + description: 'Use PRO repo for update', + choices: [ + 'false', + 'true' + ] + ) string( defaultValue: '5.0.2', description: 'To this version PDMDB will be updated', @@ -54,7 +70,8 @@ pipeline { choices: [ 'NONE', 'VAULT', - 'KEYFILE' + 'KEYFILE', + 'KMIP' ] ) choice( @@ -112,9 +129,11 @@ pipeline { } stage ('Run playbook for test with old version') { steps { + withCredentials([usernamePassword(credentialsId: 'PSMDB_PRIVATE_REPO_ACCESS', passwordVariable: 'PASSWORD', usernameVariable: 'USERNAME')]){ script{ moleculeExecuteActionWithVariableListAndScenario(moleculeDir, "converge", env.PLATFORM, env.OLDVERSIONS) } + } } } stage ('Start testinfra tests for old version') { @@ -126,9 +145,11 @@ pipeline { } stage ('Run playbook for test with new version') { steps { + withCredentials([usernamePassword(credentialsId: 'PSMDB_PRIVATE_REPO_ACCESS', passwordVariable: 'PASSWORD', usernameVariable: 'USERNAME')]) { script{ moleculeExecuteActionWithVariableListAndScenario(moleculeDir, "side-effect", env.PLATFORM, env.NEWVERSIONS) } + } } } stage ('Start testinfra tests for new version') { diff --git a/psmdb/psmdb.groovy b/psmdb/psmdb.groovy index 2cb09dce3c..f19d3f29fb 100644 --- a/psmdb/psmdb.groovy +++ b/psmdb/psmdb.groovy @@ -40,6 +40,14 @@ pipeline { 'true' ] ) + choice( + name: 'GATED_BUILD', + description: 'Test private repo?', + choices: [ + 'false', + 'true' + ] + ) string( defaultValue: 'main', description: 'Branch for testing repository', @@ -79,9 +87,11 @@ pipeline { } stage ('Run playbook for test') { steps { + withCredentials([usernamePassword(credentialsId: 'PSMDB_PRIVATE_REPO_ACCESS', passwordVariable: 'PASSWORD', usernameVariable: 'USERNAME')]) { script{ moleculeExecuteActionWithScenario(moleculeDir, "converge", env.PLATFORM) } + } } } stage ('Start testinfra tests') { diff --git a/pt/jenkins/percona-toolkit.groovy b/pt/jenkins/percona-toolkit.groovy index e3080f16d6..a64d1449b1 100644 --- a/pt/jenkins/percona-toolkit.groovy +++ b/pt/jenkins/percona-toolkit.groovy @@ -158,19 +158,6 @@ pipeline { uploadRPMfromAWS("rpm/", AWS_STASH_PATH) } } - stage('Ubuntu Bionic(18.04)') { - agent { - label 'docker' - } - steps { - cleanUpWS() - popArtifactFolder("source_deb/", AWS_STASH_PATH) - buildStage("ubuntu:bionic", "--build_deb=1") - - pushArtifactFolder("deb/", AWS_STASH_PATH) - uploadDEBfromAWS("deb/", AWS_STASH_PATH) - } - } stage('Ubuntu Focal(20.04)') { agent { label 'docker' diff --git a/pxb/jenkins/pxb-80.groovy b/pxb/jenkins/pxb-80.groovy index af291f231c..3f185534f4 100644 --- a/pxb/jenkins/pxb-80.groovy +++ b/pxb/jenkins/pxb-80.groovy @@ -47,8 +47,8 @@ pipeline { defaultValue: '1', description: 'DEB release value', name: 'DEB_RELEASE') - string( - defaultValue: 'pxb-80', + choice( + choices: 'pxb-80\npxb-8x-innovation\npxb-8x-lts\npxb-9x-innovation\npxb-9x-lts', description: 'PXB repo name', name: 'PXB_REPO') choice( @@ -66,7 +66,7 @@ pipeline { steps { // slackNotify("", "#00FF00", "[${JOB_NAME}]: starting build for ${BRANCH} - [${BUILD_URL}]") cleanUpWS() - buildStage("ubuntu:bionic", "--get_sources=1") + buildStage("ubuntu:focal", "--get_sources=1") sh ''' REPO_UPLOAD_PATH=$(grep "UPLOAD" test/percona-xtrabackup-8.0.properties | cut -d = -f 2 | sed "s:$:${BUILD_NUMBER}:") AWS_STASH_PATH=$(echo ${REPO_UPLOAD_PATH} | sed "s:UPLOAD/experimental/::") @@ -158,19 +158,6 @@ pipeline { uploadRPMfromAWS("rpm/", AWS_STASH_PATH) } } - stage('Ubuntu Bionic(18.04)') { - agent { - label 'docker-32gb' - } - steps { - cleanUpWS() - popArtifactFolder("source_deb/", AWS_STASH_PATH) - buildStage("ubuntu:bionic", "--build_deb=1") - - pushArtifactFolder("deb/", AWS_STASH_PATH) - uploadDEBfromAWS("deb/", AWS_STASH_PATH) - } - } stage('Ubuntu Focal(20.04)') { agent { label 'docker-32gb' @@ -202,12 +189,19 @@ pipeline { label 'docker-32gb' } steps { - cleanUpWS() - popArtifactFolder("source_deb/", AWS_STASH_PATH) - buildStage("debian:buster", "--build_deb=1") + script { + PXB_MAJOR_RELEASE = sh(returnStdout: true, script: ''' echo ${BRANCH} | sed "s/release-//g" | sed "s/\\.//g" | awk '{print substr($0, 0, 2)}' ''').trim() + if ("${PXB_MAJOR_RELEASE}" == "80") { + cleanUpWS() + popArtifactFolder("source_deb/", AWS_STASH_PATH) + buildStage("debian:buster", "--build_deb=1") - pushArtifactFolder("deb/", AWS_STASH_PATH) - uploadDEBfromAWS("deb/", AWS_STASH_PATH) + pushArtifactFolder("deb/", AWS_STASH_PATH) + uploadDEBfromAWS("deb/", AWS_STASH_PATH) + } else { + echo "The step is skiped" + } + } } } stage('Debian Bullseye(11)') { diff --git a/pxb/pxb-site-check.groovy b/pxb/pxb-site-check.groovy new file mode 100644 index 0000000000..4be7239495 --- /dev/null +++ b/pxb/pxb-site-check.groovy @@ -0,0 +1,65 @@ +library changelog: false, identifier: "lib@master", retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) + +pipeline { + agent { + label 'docker' + } + environment { + PATH = '/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/ec2-user/.local/bin' + } + parameters { + string( + defaultValue: '8.0.34-29.1', + description: 'Full PXB version for tests. Examples: 2.4.28-1; 8.0.34-29.1; 8.1.0-1.1', + name: 'PXB_VER_FULL') + string( + defaultValue: 'master', + description: 'Branch for testing repository', + name: 'TESTING_BRANCH') + string( + defaultValue: 'Percona-QA', + description: 'Branch for testing repository', + name: 'TESTING_GIT_ACCOUNT') + } + stages { + stage('Set build name'){ + steps { + script { + currentBuild.displayName = "#${BUILD_NUMBER}-${params.PXB_VER_FULL}-${params.TESTING_BRANCH}" + } + } + } + stage('Checkout') { + steps { + deleteDir() + git poll: false, branch: TESTING_BRANCH, url: "https://github.com/${TESTING_GIT_ACCOUNT}/package-testing.git" + } + } + stage('Test') { + steps { + script { + sh """ + cd site_checks + docker run --env PXB_VER_FULL=${params.PXB_VER_FULL} \ + --rm -v `pwd`:/tmp -w /tmp python bash -c \ + 'pip3 install requests pytest setuptools && pytest -s --junitxml=junit.xml test_pxb.py || [ \$? = 1 ] ' + """ + } + } + } + } + post { + always { + script { + junit testResults: "**/junit.xml", keepLongStdio: true, allowEmptyResults: true, skipPublishingChecks: true + sh ''' + sudo rm -rf ./* + ''' + deleteDir() + } + } + } +} diff --git a/pxb/pxb-site-check.yml b/pxb/pxb-site-check.yml new file mode 100644 index 0000000000..7ea07f153e --- /dev/null +++ b/pxb/pxb-site-check.yml @@ -0,0 +1,14 @@ +- job: + name: pxb-site-check + project-type: pipeline + description: | + Do not edit this job through the web! + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: pxb/pxb-site-check.groovy diff --git a/pxc/jenkins/percona-xtradb-cluster-8.0.groovy b/pxc/jenkins/percona-xtradb-cluster-8.0.groovy index 6d765700e4..a6194184dc 100644 --- a/pxc/jenkins/percona-xtradb-cluster-8.0.groovy +++ b/pxc/jenkins/percona-xtradb-cluster-8.0.groovy @@ -176,21 +176,6 @@ pipeline { uploadRPMfromAWS("rpm/", AWS_STASH_PATH) } } - stage('Ubuntu Bionic(18.04)') { - agent { - label 'docker-32gb' - } - steps { - cleanUpWS() - unstash 'pxc-80.properties' - popArtifactFolder("source_deb/", AWS_STASH_PATH) - buildStage("ubuntu:bionic", "--build_deb=1") - - stash includes: 'test/pxc-80.properties', name: 'pxc-80.properties' - pushArtifactFolder("deb/", AWS_STASH_PATH) - uploadDEBfromAWS("deb/", AWS_STASH_PATH) - } - } stage('Ubuntu Focal(20.04)') { agent { label 'docker-32gb' @@ -355,7 +340,7 @@ pipeline { } stage('Build docker containers') { agent { - label 'min-bionic-x64' + label 'min-buster-x64' } steps { echo "====> Build docker containers" diff --git a/pxc/pxc-site-check.groovy b/pxc/pxc-site-check.groovy new file mode 100644 index 0000000000..087fb9f145 --- /dev/null +++ b/pxc/pxc-site-check.groovy @@ -0,0 +1,69 @@ +library changelog: false, identifier: "lib@master", retriever: modernSCM([ + $class: 'GitSCMSource', + remote: 'https://github.com/Percona-Lab/jenkins-pipelines.git' +]) + +pipeline { + agent { + label 'docker' + } + environment { + PATH = '/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/ec2-user/.local/bin' + } + parameters { + string( + defaultValue: '8.0.34-26.1', + description: 'Full PXC version for tests. Examples: 5.7.43-31.65.1; 8.0.34-26.1; 8.1.0-1.1', + name: 'PXC_VER_FULL') + string( + defaultValue: '47', + description: 'PXC 5.7 INNODB version for tests. Only for 5.7. Leave default for PXC 8.0 +', + name: 'PXC57_INNODB') + string( + defaultValue: 'master', + description: 'Branch for testing repository', + name: 'TESTING_BRANCH') + string( + defaultValue: 'Percona-QA', + description: 'Branch for testing repository', + name: 'TESTING_GIT_ACCOUNT') + } + stages { + stage('Set build name'){ + steps { + script { + currentBuild.displayName = "#${BUILD_NUMBER}-${params.PXC_VER_FULL}-${params.TESTING_BRANCH}" + } + } + } + stage('Checkout') { + steps { + deleteDir() + git poll: false, branch: TESTING_BRANCH, url: "https://github.com/${TESTING_GIT_ACCOUNT}/package-testing.git" + } + } + stage('Test') { + steps { + script { + sh """ + cd site_checks + docker run --env PXC_VER_FULL=${params.PXC_VER_FULL} --env PXC57_INNODB=${params.PXC57_INNODB} \ + --rm -v `pwd`:/tmp -w /tmp python bash -c \ + 'pip3 install requests pytest setuptools && pytest -s --junitxml=junit.xml test_pxc.py || [ \$? = 1 ] ' + """ + } + } + } + } + post { + always { + script { + junit testResults: "**/junit.xml", keepLongStdio: true, allowEmptyResults: true, skipPublishingChecks: true + sh ''' + sudo rm -rf ./* + ''' + deleteDir() + } + } + } +} diff --git a/pxc/pxc-site-check.yml b/pxc/pxc-site-check.yml new file mode 100644 index 0000000000..dd3b4fe634 --- /dev/null +++ b/pxc/pxc-site-check.yml @@ -0,0 +1,14 @@ +- job: + name: pxc-site-check + project-type: pipeline + description: | + Do not edit this job through the web! + pipeline-scm: + scm: + - git: + url: https://github.com/Percona-Lab/jenkins-pipelines.git + branches: + - 'master' + wipe-workspace: false + lightweight-checkout: true + script-path: pxc/pxc-site-check.groovy diff --git a/vars/installMoleculePPG.groovy b/vars/installMoleculePPG.groovy new file mode 100644 index 0000000000..240b17a595 --- /dev/null +++ b/vars/installMoleculePPG.groovy @@ -0,0 +1,19 @@ +def call() { + sh """ + sudo dnf module install -y python38 + sudo alternatives --set python3 /usr/bin/python3.8 + sudo dnf install -y gcc python38-pip python38-devel libselinux-python3 python3-libselinux + sudo yum remove ansible -y + python3 -m venv virtenv --system-site-packages + . virtenv/bin/activate + python3 --version + python3 -m pip install --upgrade pip + python3 -m pip install --upgrade setuptools + python3 -m pip install --upgrade setuptools-rust + python3 -m pip install --upgrade molecule==3.3.0 molecule[ansible] molecule-ec2==0.3 pytest-testinfra pytest "ansible-lint>=5.1.1,<6.0.0" boto3 boto selinux + sudo cp -r /usr/lib64/python3.6/site-packages/selinux /usr/lib64/python3.8/site-packages + echo $PATH + pip list + ansible --version && molecule --version + """ +}