diff --git a/.github/pykmip/Dockerfile b/.github/pykmip/Dockerfile index d77d029735..6ceaea8ee2 100644 --- a/.github/pykmip/Dockerfile +++ b/.github/pykmip/Dockerfile @@ -17,9 +17,10 @@ RUN apk add --no-cache \ pip3 install --upgrade typing-extensions>=4.13.2 && \ git clone https://github.com/openkmip/pykmip.git && \ cd pykmip && \ - python3 setup.py install && \ + git checkout 6cd44b572b0ca55adf01a8a12078b2284602e64c && \ + pip3 install . && \ apk del .build-deps && \ - rm -rf /pykmip && \ + rm -rf /var/cache/apk/* /pykmip && \ mkdir /pykmip ADD ./bin /usr/local/bin diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 9e1a02a23b..d930b04643 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -816,98 +816,6 @@ jobs: source: /tmp/artifacts if: always() - ceph-backend-test: - runs-on: ubuntu-24.04 - needs: build - env: - S3BACKEND: mem - S3DATA: multiple - S3KMS: file - CI_CEPH: 'true' - MPU_TESTING: "yes" - S3_LOCATION_FILE: /usr/src/app/tests/locationConfig/locationConfigCeph.json - MONGODB_IMAGE: ghcr.io/${{ github.repository }}/ci-mongodb:${{ github.sha }} - CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}:${{ github.sha }}-testcoverage - JOB_NAME: ${{ github.job }} - ENABLE_NULL_VERSION_COMPAT_MODE: true # needed with mongodb backend - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Login to GitHub Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ github.token }} - - name: Setup CI environment - uses: ./.github/actions/setup-ci - - uses: ruby/setup-ruby@v1 - with: - ruby-version: '3.2' - - name: Install Ruby dependencies - run: | - gem install nokogiri:1.15.5 excon:0.111.0 fog-aws:3.19.0 json:2.7.6 mime-types:3.5.2 rspec:3.12.0 - - name: Install Java dependencies - run: | - sudo apt-get update && sudo apt-get install -y --fix-missing default-jdk maven - - name: Setup CI services - run: docker compose --profile ceph up -d - working-directory: .github/docker - env: - S3METADATA: mongodb - - name: Run Ceph multiple backend tests - run: |- - set -ex -o pipefail; - bash .github/ceph/wait_for_ceph.sh - bash wait_for_local_port.bash 27018 40 - bash wait_for_local_port.bash 8000 40 - yarn run multiple_backend_test | tee /tmp/artifacts/${{ github.job }}/multibackend-tests.log - env: - S3_LOCATION_FILE: tests/locationConfig/locationConfigTests.json - S3METADATA: mem - - name: Run Java tests - run: |- - set -ex -o pipefail; - mvn test | tee /tmp/artifacts/${{ github.job }}/java-tests.log - working-directory: tests/functional/jaws - - name: Run Ruby tests - run: |- - set -ex -o pipefail; - rspec -fd --backtrace tests.rb | tee /tmp/artifacts/${{ github.job }}/ruby-tests.log - working-directory: tests/functional/fog - - name: Run Javascript AWS SDK tests - run: |- - set -ex -o pipefail; - yarn run ft_awssdk | tee /tmp/artifacts/${{ github.job }}/js-awssdk-tests.log; - yarn run ft_s3cmd | tee /tmp/artifacts/${{ github.job }}/js-s3cmd-tests.log; - env: - S3_LOCATION_FILE: tests/locationConfig/locationConfigCeph.json - S3BACKEND: file - S3VAULT: mem - S3METADATA: mongodb - - name: Cleanup and upload coverage - uses: ./.github/actions/cleanup-and-coverage - with: - profiles: ceph - codecov-token: ${{ secrets.CODECOV_TOKEN }} - if: always() - - name: Upload test results to Codecov - uses: codecov/test-results-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: '**/junit/*junit*.xml' - flags: ceph-backend-test - if: always() && !cancelled() - - name: Upload logs to artifacts - uses: scality/action-artifacts@v4 - with: - method: upload - url: https://artifacts.scality.net - user: ${{ secrets.ARTIFACTS_USER }} - password: ${{ secrets.ARTIFACTS_PASSWORD }} - source: /tmp/artifacts - if: always() - # This test with the final yarn run ft_sse_arn covers more code than the kmip tests sse-kms-migration-tests: strategy: diff --git a/lib/routes/routeVeeam.js b/lib/routes/routeVeeam.js index 972eddc62f..bfd83ae1e5 100644 --- a/lib/routes/routeVeeam.js +++ b/lib/routes/routeVeeam.js @@ -29,6 +29,24 @@ const apiToAction = { LIST: 'ListObjects', }; +const allowedSdkQueryKeys = new Set([ + 'x-id', + 'x-amz-user-agent', +]); + +// Allowed query parameters for SigV4 presigned URLs (lower-cased). +const allowedPresignQueryKeys = new Set([ + 'x-amz-algorithm', + 'x-amz-credential', + 'x-amz-date', + 'x-amz-expires', + 'x-amz-signedheaders', + 'x-amz-signature', + 'x-amz-security-token', + // Used by Veeam UI for delete operations. + 'tagging', +]); + const routeMap = { GET: getVeeamFile, PUT: putVeeamFile, @@ -64,13 +82,27 @@ function checkBucketAndKey(bucketName, objectKey, requestQueryParams, method, lo } if (method !== 'LIST') { // Reject any unsupported request, but allow downloads and deletes from UI - // Download relies on GETs calls with auth in query parameters, that can be - // checked if 'X-Amz-Credential' is included. - // Deletion requires that the tags of the object are returned. - if (requestQueryParams && Object.keys(requestQueryParams).length > 0 - && !(method === 'GET' && (requestQueryParams['X-Amz-Credential'] || ('tagging' in requestQueryParams)))) { - return errorInstances.InvalidRequest - .customizeDescription('The Veeam SOSAPI folder does not support this action.'); + // Download relies on GETs calls with auth in query parameters, and delete + // requires that the tags of the object are returned. + const originalQuery = requestQueryParams || {}; + + for (const [key, value] of Object.entries(originalQuery)) { + const normalizedKey = key.toLowerCase(); + + // Ensure x-id, when present, matches the expected action for the method. + if (normalizedKey === 'x-id' && value !== apiToAction[method]) { + return errorInstances.InvalidRequest + .customizeDescription('The Veeam SOSAPI folder does not support this action.'); + } + + const isAllowedSdkKey = allowedSdkQueryKeys.has(normalizedKey) + || normalizedKey.startsWith('x-amz-sdk-'); + const isAllowedPresignKey = allowedPresignQueryKeys.has(normalizedKey); + + if (!isAllowedSdkKey && !isAllowedPresignKey) { + return errorInstances.InvalidRequest + .customizeDescription('The Veeam SOSAPI folder does not support this action.'); + } } if (typeof objectKey !== 'string' || !validObjectKeys.includes(objectKey)) { log.debug('invalid object name', { objectKey }); diff --git a/package.json b/package.json index 2e7ebd72d1..0bed10b108 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@zenko/cloudserver", - "version": "9.0.33", + "version": "9.0.34", "description": "Zenko CloudServer, an open-source Node.js implementation of a server handling the Amazon S3 protocol", "main": "index.js", "engines": { diff --git a/tests/unit/internal/routeVeeam.js b/tests/unit/internal/routeVeeam.js index 7b2b49b9c8..49f2d3ba4b 100644 --- a/tests/unit/internal/routeVeeam.js +++ b/tests/unit/internal/routeVeeam.js @@ -77,6 +77,117 @@ describe('RouteVeeam: checkBucketAndKey', () => { assert.strictEqual(routeVeeam.checkBucketAndKey(...test), undefined); }); }); + + it('should allow SigV4 presigned GET query parameters in mixed case', () => { + const err = routeVeeam.checkBucketAndKey( + 'test', + '.system-d26a9498-cb7c-4a87-a44a-8ae204f5ba6c/system.xml', + { + 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', + 'X-Amz-Credential': 'cred', + 'X-Amz-Date': '20240101T000000Z', + 'X-Amz-Expires': '900', + 'X-Amz-SignedHeaders': 'host', + 'X-Amz-Signature': 'signature', + 'X-Amz-Security-Token': 'token', + }, + 'GET', + log, + ); + assert.strictEqual(err, undefined); + }); + + it('should allow SigV4-style query parameters on non-GET when they are presigned', () => { + const err = routeVeeam.checkBucketAndKey( + 'test', + '.system-d26a9498-cb7c-4a87-a44a-8ae204f5ba6c/system.xml', + { + 'x-amz-algorithm': 'AWS4-HMAC-SHA256', + 'x-amz-credential': 'cred', + 'x-amz-date': '20240101T000000Z', + 'x-amz-expires': '900', + 'x-amz-signedheaders': 'host', + 'x-amz-signature': 'signature', + }, + 'DELETE', + log, + ); + assert.strictEqual(err, undefined); + }); + + it('should reject unexpected query parameters even when presigned GET keys are present', () => { + const err = routeVeeam.checkBucketAndKey( + 'test', + '.system-d26a9498-cb7c-4a87-a44a-8ae204f5ba6c/system.xml', + { + 'X-Amz-Credential': 'a', + extra: 'not-allowed', + }, + 'GET', + log, + ); + assert.strictEqual(err.is.InvalidRequest, true); + }); + + it('should allow AWS SDK x-id=PutObject query on PUT for system.xml', () => { + const err = routeVeeam.checkBucketAndKey( + 'test', + '.system-d26a9498-cb7c-4a87-a44a-8ae204f5ba6c/system.xml', + { 'x-id': 'PutObject' }, + 'PUT', + log, + ); + assert.strictEqual(err, undefined); + }); + + it('should allow AWS SDK auxiliary x-amz-sdk-* query params on PUT for system.xml', () => { + const err = routeVeeam.checkBucketAndKey( + 'test', + '.system-d26a9498-cb7c-4a87-a44a-8ae204f5ba6c/system.xml', + { + 'x-id': 'PutObject', + 'x-amz-sdk-request': 'attempt=1', + 'x-amz-sdk-invocation-id': 'abc-123', + 'x-amz-user-agent': 'aws-sdk-js-v3', + }, + 'PUT', + log, + ); + assert.strictEqual(err, undefined); + }); + + it('should reject mismatched x-id value on PUT for system.xml', () => { + const err = routeVeeam.checkBucketAndKey( + 'test', + '.system-d26a9498-cb7c-4a87-a44a-8ae204f5ba6c/system.xml', + { 'x-id': 'GetObject' }, + 'PUT', + log, + ); + assert.strictEqual(err.is.InvalidRequest, true); + }); + + it('should reject mismatched x-id value on GET for system.xml', () => { + const err = routeVeeam.checkBucketAndKey( + 'test', + '.system-d26a9498-cb7c-4a87-a44a-8ae204f5ba6c/system.xml', + { 'x-id': 'PutObject' }, + 'GET', + log, + ); + assert.strictEqual(err.is.InvalidRequest, true); + }); + + it('should accept x-id with different casing when value matches action', () => { + const err = routeVeeam.checkBucketAndKey( + 'test', + '.system-d26a9498-cb7c-4a87-a44a-8ae204f5ba6c/system.xml', + { 'X-Id': 'GetObject' }, + 'GET', + log, + ); + assert.strictEqual(err, undefined); + }); }); describe('RouteVeeam: checkBucketAndKey', () => {