diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index f5b83a39549..a737f8e178d 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -302,3 +302,35 @@ jobs: - uses: ./.github/actions/node/active-lts - run: yarn test:appsec:plugins:ci - uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2 + + kafka: + runs-on: ubuntu-latest + services: + kafka: + image: apache/kafka-native:3.8.0-rc2 + env: + KAFKA_PROCESS_ROLES: broker,controller + KAFKA_NODE_ID: '1' + KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093 + KAFKA_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9093 + KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER + KAFKA_CLUSTER_ID: r4zt_wrqTRuT7W2NJsB_GA + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092 + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: '1' + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: '0' + ports: + - 9092:9092 + - 9093:9093 + env: + PLUGINS: kafkajs + SERVICES: kafka + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: ./.github/actions/node/oldest-maintenance-lts + - uses: ./.github/actions/install + - run: yarn test:appsec:plugins:ci + - uses: ./.github/actions/node/active-lts + - run: yarn test:appsec:plugins:ci + - uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2 diff --git a/packages/datadog-instrumentations/src/confluentinc-kafka-javascript.js b/packages/datadog-instrumentations/src/confluentinc-kafka-javascript.js index 7e92a20bb04..cd7c4321118 100644 --- a/packages/datadog-instrumentations/src/confluentinc-kafka-javascript.js +++ b/packages/datadog-instrumentations/src/confluentinc-kafka-javascript.js @@ -2,8 +2,7 @@ const { addHook, - channel, - AsyncResource + channel } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') @@ -32,7 +31,6 @@ const disabledHeaderWeakSet = new WeakSet() // we need to store the offset per partition per topic for the consumer to track offsets for DSM const latestConsumerOffsets = new Map() -// Customize the instrumentation for Confluent Kafka JavaScript addHook({ name: '@confluentinc/kafka-javascript', versions: ['>=1.0.0'] }, (module) => { // Hook native module classes first instrumentBaseModule(module) @@ -55,30 +53,31 @@ function instrumentBaseModule (module) { // Hook the produce method if (typeof producer?.produce === 'function') { shimmer.wrap(producer, 'produce', function wrapProduce (produce) { - return function wrappedProduce (topic, partition, message, key, timestamp, opaque) { + return function wrappedProduce (topic, partition, message, key, timestamp, opaque, headers) { if (!channels.producerStart.hasSubscribers) { return produce.apply(this, arguments) } const brokers = this.globalConfig?.['bootstrap.servers'] - const asyncResource = new AsyncResource('bound-anonymous-fn') - return asyncResource.runInAsyncScope(() => { - try { - channels.producerStart.publish({ - topic, - messages: [{ key, value: message }], - bootstrapServers: brokers - }) + const ctx = {} + ctx.topic = topic + ctx.messages = [{ key, value: message }] + ctx.bootstrapServers = brokers - const result = produce.apply(this, arguments) + return channels.producerStart.runStores(ctx, () => { + try { + headers = convertHeaders(ctx.messages[0].headers) + const result = produce.apply(this, [topic, partition, message, key, timestamp, opaque, headers]) - channels.producerCommit.publish(undefined) - channels.producerFinish.publish(undefined) + ctx.res = result + channels.producerCommit.publish(ctx) + channels.producerFinish.runStores(ctx, () => {}) return result } catch (error) { - channels.producerError.publish(error) - channels.producerFinish.publish(undefined) + ctx.err = error + channels.producerError.publish(ctx) + channels.producerFinish.runStores(ctx, () => {}) throw error } }) @@ -110,32 +109,37 @@ function instrumentBaseModule (module) { callback = numMessages } + const ctx = {} // Handle callback-based consumption if (typeof callback === 'function') { return consume.call(this, numMessages, function wrappedCallback (err, messages) { if (messages && messages.length > 0) { messages.forEach(message => { - channels.consumerStart.publish({ - topic: message?.topic, - partition: message?.partition, - message, - groupId - }) + ctx.topic = message?.topic + ctx.partition = message?.partition + ctx.message = message + ctx.groupId = groupId + + channels.consumerStart.runStores(ctx, () => {}) updateLatestOffset(message?.topic, message?.partition, message?.offset, groupId) }) } if (err) { - channels.consumerError.publish(err) + ctx.err = err + channels.consumerError.publish(ctx) } try { const result = callback.apply(this, arguments) - channels.consumerFinish.publish(undefined) + if (messages && messages.length > 0) { + channels.consumerFinish.runStores(ctx, () => {}) + } return result } catch (error) { - channels.consumerError.publish(error) - channels.consumerFinish.publish(undefined) + ctx.err = error + channels.consumerError.publish(ctx) + channels.consumerFinish.runStores(ctx, () => {}) throw error } }) @@ -204,44 +208,42 @@ function instrumentKafkaJS (kafkaJS) { return send.apply(this, arguments) } - const asyncResource = new AsyncResource('bound-anonymous-fn') - return asyncResource.runInAsyncScope(() => { - try { - channels.producerStart.publish({ - topic: payload?.topic, - messages: payload?.messages || [], - bootstrapServers: kafka._ddBrokers, - disableHeaderInjection: disabledHeaderWeakSet.has(producer) - }) + const ctx = {} + ctx.topic = payload?.topic + ctx.messages = payload?.messages || [] + ctx.bootstrapServers = kafka._ddBrokers + ctx.disableHeaderInjection = disabledHeaderWeakSet.has(producer) + return channels.producerStart.runStores(ctx, () => { + try { const result = send.apply(this, arguments) - result.then( - asyncResource.bind(res => { - channels.producerCommit.publish(res) - channels.producerFinish.publish(undefined) - }), - asyncResource.bind(err => { - if (err) { - // Fixes bug where we would inject message headers for kafka brokers - // that don't support headers (version <0.11). On the error, we disable - // header injection. Tnfortunately the error name / type is not more specific. - // This approach is implemented by other tracers as well. - if (err.name === 'KafkaJSError' && err.type === 'ERR_UNKNOWN') { - disabledHeaderWeakSet.add(producer) - log.error('Kafka Broker responded with UNKNOWN_SERVER_ERROR (-1). ' + - 'Please look at broker logs for more information. ' + - 'Tracer message header injection for Kafka is disabled.') - } - channels.producerError.publish(err) + result.then((res) => { + ctx.res = res + channels.producerCommit.publish(ctx) + channels.producerFinish.publish(undefined) + }, (err) => { + if (err) { + // Fixes bug where we would inject message headers for kafka brokers + // that don't support headers (version <0.11). On the error, we disable + // header injection. Tnfortunately the error name / type is not more specific. + // This approach is implemented by other tracers as well. + if (err.name === 'KafkaJSError' && err.type === 'ERR_UNKNOWN') { + disabledHeaderWeakSet.add(producer) + log.error('Kafka Broker responded with UNKNOWN_SERVER_ERROR (-1). ' + + 'Please look at broker logs for more information. ' + + 'Tracer message header injection for Kafka is disabled.') } - channels.producerFinish.publish(undefined) - }) - ) + ctx.err = err + channels.producerError.publish(ctx) + } + channels.producerFinish.publish(undefined) + }) return result } catch (e) { - channels.producerError.publish(e) + ctx.err = e + channels.producerError.publish(ctx) channels.producerFinish.publish(undefined) throw e } @@ -350,10 +352,10 @@ function wrapKafkaCallback (callback, { startCh, commitCh, finishCh, errorCh }, return function wrappedKafkaCallback (payload) { const commitPayload = getPayload(payload) - const asyncResource = new AsyncResource('bound-anonymous-fn') - return asyncResource.runInAsyncScope(() => { - startCh.publish(commitPayload) + const ctx = {} + ctx.extractedArgs = commitPayload + return startCh.runStores(ctx, () => { updateLatestOffset(commitPayload?.topic, commitPayload?.partition, commitPayload?.offset, commitPayload?.groupId) try { @@ -361,22 +363,25 @@ function wrapKafkaCallback (callback, { startCh, commitCh, finishCh, errorCh }, if (result && typeof result.then === 'function') { return result - .then(asyncResource.bind(res => { - finishCh.publish(undefined) + .then((res) => { + ctx.res = res + finishCh.runStores(ctx, () => {}) return res - })) - .catch(asyncResource.bind(err => { - errorCh.publish(err) - finishCh.publish(undefined) + }) + .catch((err) => { + ctx.err = err + errorCh.publish(ctx) + finishCh.runStores(ctx, () => {}) throw err - })) + }) } else { - finishCh.publish(undefined) + finishCh.runStores(ctx, () => {}) return result } } catch (error) { - errorCh.publish(error) - finishCh.publish(undefined) + ctx.err = error + errorCh.publish(ctx) + finishCh.runStores(ctx, () => {}) throw error } }) @@ -404,3 +409,8 @@ function updateLatestOffset (topic, partition, offset, groupId) { function getLatestOffsets () { return Array.from(latestConsumerOffsets.values()) } + +function convertHeaders (headers) { + // convert headers from object to array of objects with 1 key and value per array entry + return Object.entries(headers).map(([key, value]) => ({ [key.toString()]: value.toString() })) +} diff --git a/packages/datadog-instrumentations/src/kafkajs.js b/packages/datadog-instrumentations/src/kafkajs.js index 483a30e069b..2e3cab3dfdc 100644 --- a/packages/datadog-instrumentations/src/kafkajs.js +++ b/packages/datadog-instrumentations/src/kafkajs.js @@ -2,8 +2,7 @@ const { channel, - addHook, - AsyncResource + addHook } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') @@ -60,29 +59,31 @@ addHook({ name: 'kafkajs', file: 'src/index.js', versions: ['>=1.4'] }, (BaseKaf producer.send = function () { const wrappedSend = (clusterId) => { - const innerAsyncResource = new AsyncResource('bound-anonymous-fn') - - return innerAsyncResource.runInAsyncScope(() => { - if (!producerStartCh.hasSubscribers) { - return send.apply(this, arguments) + const ctx = {} + ctx.bootstrapServers = bootstrapServers + ctx.clusterId = clusterId + ctx.disableHeaderInjection = disabledHeaderWeakSet.has(producer) + + const { topic, messages = [] } = arguments[0] + for (const message of messages) { + if (message !== null && typeof message === 'object' && !ctx.disableHeaderInjection) { + message.headers = message.headers || {} } + } + ctx.topic = topic + ctx.messages = messages + return producerStartCh.runStores(ctx, () => { try { - const { topic, messages = [] } = arguments[0] - producerStartCh.publish({ - topic, - messages, - bootstrapServers, - clusterId, - disableHeaderInjection: disabledHeaderWeakSet.has(producer) - }) const result = send.apply(this, arguments) result.then( - innerAsyncResource.bind(res => { - producerFinishCh.publish(undefined) - producerCommitCh.publish(res) - }), - innerAsyncResource.bind(err => { + (res) => { + ctx.res = res + producerFinishCh.runStores(ctx, () => {}) + producerCommitCh.publish(ctx) + }, + (err) => { + ctx.err = err if (err) { // Fixes bug where we would inject message headers for kafka brokers that don't support headers // (version <0.11). On the error, we disable header injection. @@ -96,14 +97,14 @@ addHook({ name: 'kafkajs', file: 'src/index.js', versions: ['>=1.4'] }, (BaseKaf } producerErrorCh.publish(err) } - producerFinishCh.publish(undefined) + producerFinishCh.runStores(ctx, () => {}) }) - ) return result } catch (e) { - producerErrorCh.publish(e) - producerFinishCh.publish(undefined) + ctx.err = e + producerErrorCh.publish(ctx) + producerFinishCh.runStores(ctx, () => {}) throw e } }) @@ -188,29 +189,33 @@ addHook({ name: 'kafkajs', file: 'src/index.js', versions: ['>=1.4'] }, (BaseKaf const wrappedCallback = (fn, startCh, finishCh, errorCh, extractArgs, clusterId) => { return typeof fn === 'function' ? function (...args) { - const innerAsyncResource = new AsyncResource('bound-anonymous-fn') - return innerAsyncResource.runInAsyncScope(() => { - const extractedArgs = extractArgs(args, clusterId) + const ctx = {} + const extractedArgs = extractArgs(args, clusterId) + ctx.extractedArgs = extractedArgs - startCh.publish(extractedArgs) + return startCh.runStores(ctx, () => { try { const result = fn.apply(this, args) if (result && typeof result.then === 'function') { result.then( - innerAsyncResource.bind(() => finishCh.publish(undefined)), - innerAsyncResource.bind(err => { + (res) => { + ctx.res = res + finishCh.runStores(ctx, () => {}) + }, + (err) => { + ctx.err = err if (err) { - errorCh.publish(err) + errorCh.publish(ctx) } - finishCh.publish(undefined) + finishCh.runStores(ctx, () => {}) }) - ) } else { - finishCh.publish(undefined) + finishCh.runStores(ctx, () => {}) } return result } catch (e) { - errorCh.publish(e) + ctx.err = e + errorCh.publish(ctx) finishCh.publish(undefined) throw e } diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js index 75168ebfb26..3ad6b675ffe 100644 --- a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js +++ b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js @@ -491,7 +491,7 @@ describe('Plugin', () => { tracer.use('@confluentinc/kafka-javascript', { dsmEnabled: true }) messages = [{ key: 'key1', value: 'test2' }] consumer = kafka.consumer({ - kafkaJS: { groupId: 'test-group', autoCommit: false } + kafkaJS: { groupId: 'test-group', fromBeginning: false } }) await consumer.connect() await consumer.subscribe({ topic: testTopic }) @@ -515,7 +515,6 @@ describe('Plugin', () => { afterEach(async () => { setDataStreamsContextSpy.restore() - await consumer.disconnect() }) it('Should set a checkpoint on produce', async () => { diff --git a/packages/datadog-plugin-kafkajs/src/batch-consumer.js b/packages/datadog-plugin-kafkajs/src/batch-consumer.js index 4c701fac01e..74c3caa1451 100644 --- a/packages/datadog-plugin-kafkajs/src/batch-consumer.js +++ b/packages/datadog-plugin-kafkajs/src/batch-consumer.js @@ -6,7 +6,9 @@ class KafkajsBatchConsumerPlugin extends ConsumerPlugin { static get id () { return 'kafkajs' } static get operation () { return 'consume-batch' } - start ({ topic, partition, messages, groupId, clusterId }) { + start (ctx) { + const { topic, messages, groupId, clusterId } = ctx.extractedArgs || ctx + if (!this.config.dsmEnabled) return for (const message of messages) { if (!message || !message.headers) continue diff --git a/packages/datadog-plugin-kafkajs/src/consumer.js b/packages/datadog-plugin-kafkajs/src/consumer.js index 836e0edcfd9..e0fbbeb2fac 100644 --- a/packages/datadog-plugin-kafkajs/src/consumer.js +++ b/packages/datadog-plugin-kafkajs/src/consumer.js @@ -64,7 +64,9 @@ class KafkajsConsumerPlugin extends ConsumerPlugin { } } - start ({ topic, partition, message, groupId, clusterId }) { + bindStart (ctx) { + const { topic, partition, message, groupId, clusterId } = ctx.extractedArgs || ctx + let childOf const headers = convertToTextMap(message?.headers) if (headers) { @@ -83,7 +85,7 @@ class KafkajsConsumerPlugin extends ConsumerPlugin { metrics: { 'kafka.partition': partition } - }) + }, ctx) if (message?.offset) span.setTag('kafka.message.offset', message?.offset) if (this.config.dsmEnabled && headers) { @@ -97,16 +99,20 @@ class KafkajsConsumerPlugin extends ConsumerPlugin { } if (afterStartCh.hasSubscribers) { - afterStartCh.publish({ topic, partition, message, groupId }) + afterStartCh.publish({ topic, partition, message, groupId, currentStore: ctx.currentStore }) } + + return ctx.currentStore } - finish () { + bindFinish (ctx) { if (beforeFinishCh.hasSubscribers) { beforeFinishCh.publish() } super.finish() + + return ctx.parentStore } } diff --git a/packages/datadog-plugin-kafkajs/src/producer.js b/packages/datadog-plugin-kafkajs/src/producer.js index 912886b0f1b..03edd2059f1 100644 --- a/packages/datadog-plugin-kafkajs/src/producer.js +++ b/packages/datadog-plugin-kafkajs/src/producer.js @@ -52,7 +52,9 @@ class KafkajsProducerPlugin extends ProducerPlugin { * @param {ProducerResponseItem[]} commitList * @returns {void} */ - commit (commitList) { + commit (ctx) { + const commitList = ctx.res + if (!this.config.dsmEnabled) return if (!commitList || !Array.isArray(commitList)) return const keys = [ @@ -67,7 +69,8 @@ class KafkajsProducerPlugin extends ProducerPlugin { } } - start ({ topic, messages, bootstrapServers, clusterId, disableHeaderInjection }) { + bindStart (ctx) { + const { topic, messages, bootstrapServers, clusterId, disableHeaderInjection } = ctx const span = this.startSpan({ resource: topic, meta: { @@ -79,7 +82,7 @@ class KafkajsProducerPlugin extends ProducerPlugin { metrics: { 'kafka.batch_size': messages.length } - }) + }, ctx) if (bootstrapServers) { span.setTag(BOOTSTRAP_SERVERS_KEY, bootstrapServers) } @@ -105,6 +108,8 @@ class KafkajsProducerPlugin extends ProducerPlugin { } } } + + return ctx.currentStore } } diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index 826f6204252..ce2efc5ab04 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -2,6 +2,7 @@ const { expect } = require('chai') const semver = require('semver') +const { storage } = require('../../datadog-core') const dc = require('dc-polyfill') const agent = require('../../dd-trace/test/plugins/agent') const { expectSomeSpan, withDefaults } = require('../../dd-trace/test/plugins/helpers') @@ -353,7 +354,9 @@ describe('Plugin', () => { const afterStart = dc.channel('dd-trace:kafkajs:consumer:afterStart') const spy = sinon.spy(() => { - expect(tracer.scope().active()).to.not.be.null + const store = storage('legacy').getStore() + expect(store).to.not.be.null + expect(store).to.not.be.undefined afterStart.unsubscribe(spy) }) afterStart.subscribe(spy) diff --git a/packages/dd-trace/src/appsec/iast/context/context-plugin.js b/packages/dd-trace/src/appsec/iast/context/context-plugin.js index d65b68258ae..de0c7672af0 100644 --- a/packages/dd-trace/src/appsec/iast/context/context-plugin.js +++ b/packages/dd-trace/src/appsec/iast/context/context-plugin.js @@ -11,7 +11,7 @@ const { TagKey } = require('../telemetry/iast-metric') class IastContextPlugin extends IastPlugin { startCtxOn (channelName, tag) { - super.addSub(channelName, (message) => this.startContext()) + super.addSub(channelName, (message) => this.startContext(message?.currentStore)) this._getAndRegisterSubscription({ channelName, @@ -44,11 +44,10 @@ class IastContextPlugin extends IastPlugin { } } - startContext () { + startContext (store = storage('legacy').getStore()) { let isRequestAcquired = false let iastContext - const store = storage('legacy').getStore() if (store) { const topContext = this.getTopContext() const rootSpan = this.getRootSpan(store) diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/plugins/kafka.js b/packages/dd-trace/src/appsec/iast/taint-tracking/plugins/kafka.js index ac95722a996..033eb42ebcb 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/plugins/kafka.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/plugins/kafka.js @@ -1,7 +1,6 @@ 'use strict' const shimmer = require('../../../../../../datadog-shimmer') -const { storage } = require('../../../../../../datadog-core') const { getIastContext } = require('../../iast-context') const { KAFKA_MESSAGE_KEY, KAFKA_MESSAGE_VALUE } = require('../source-types') const { newTaintedObject, newTaintedString } = require('../operations') @@ -10,7 +9,7 @@ const { SourceIastPlugin } = require('../../iast-plugin') class KafkaConsumerIastPlugin extends SourceIastPlugin { onConfigure () { this.addSub({ channelName: 'dd-trace:kafkajs:consumer:afterStart', tag: [KAFKA_MESSAGE_KEY, KAFKA_MESSAGE_VALUE] }, - ({ message }) => this.taintKafkaMessage(message) + ({ message, currentStore }) => this.taintKafkaMessage(message, currentStore) ) } @@ -21,8 +20,8 @@ class KafkaConsumerIastPlugin extends SourceIastPlugin { } } - taintKafkaMessage (message) { - const iastContext = getIastContext(storage('legacy').getStore()) + taintKafkaMessage (message, currentStore) { + const iastContext = getIastContext(currentStore) if (iastContext && message) { const { key, value } = message diff --git a/packages/dd-trace/src/plugins/consumer.js b/packages/dd-trace/src/plugins/consumer.js index 0a1f223b89a..775eea7c3bb 100644 --- a/packages/dd-trace/src/plugins/consumer.js +++ b/packages/dd-trace/src/plugins/consumer.js @@ -7,14 +7,14 @@ class ConsumerPlugin extends InboundPlugin { static get kind () { return 'consumer' } static get type () { return 'messaging' } - startSpan (options) { + startSpan (options, ctx) { if (!options.service) { options.service = this.config.service || this.serviceName() } if (!options.kind) { options.kind = this.constructor.kind } - return super.startSpan(this.operationName(), options) + return super.startSpan(this.operationName(), options, ctx) } } diff --git a/packages/dd-trace/src/plugins/producer.js b/packages/dd-trace/src/plugins/producer.js index 13b9bd84d20..3fc277dbc64 100644 --- a/packages/dd-trace/src/plugins/producer.js +++ b/packages/dd-trace/src/plugins/producer.js @@ -7,7 +7,7 @@ class ProducerPlugin extends OutboundPlugin { static get kind () { return 'producer' } static get type () { return 'messaging' } - startSpan (options) { + startSpan (options, ctx) { const spanDefaults = { kind: this.constructor.kind } @@ -19,7 +19,7 @@ class ProducerPlugin extends OutboundPlugin { if (!options[key]) options[key] = spanDefaults[key] } ) - return super.startSpan(this.operationName(), options) + return super.startSpan(this.operationName(), options, ctx) } } diff --git a/packages/dd-trace/src/plugins/tracing.js b/packages/dd-trace/src/plugins/tracing.js index e9823fd5d3e..03d29769bc2 100644 --- a/packages/dd-trace/src/plugins/tracing.js +++ b/packages/dd-trace/src/plugins/tracing.js @@ -60,7 +60,7 @@ class TracingPlugin extends Plugin { error (ctxOrError) { if (ctxOrError?.currentStore) { - ctxOrError.currentStore?.span.setTag('error', ctxOrError?.error) + ctxOrError.currentStore?.span.setTag('error', ctxOrError?.error || ctxOrError?.err) return } this.addError(ctxOrError) diff --git a/packages/dd-trace/test/appsec/iast/context/context-plugin.spec.js b/packages/dd-trace/test/appsec/iast/context/context-plugin.spec.js index db5f76987e3..4e00128328f 100644 --- a/packages/dd-trace/test/appsec/iast/context/context-plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/context/context-plugin.spec.js @@ -125,26 +125,24 @@ describe('IastContextPlugin', () => { }) it('should obtain needed info from data before starting iast context', () => { - const data = {} - sinon.stub(plugin, 'getTopContext').returns(topContext) sinon.stub(plugin, 'getRootSpan').returns(rootSpan) - plugin.startContext(data) + plugin.startContext() expect(plugin.getTopContext).to.be.calledOnce expect(plugin.getRootSpan).to.be.calledWith(store) }) it('should call overheadController before starting iast context', () => { - plugin.startContext({}) + plugin.startContext() expect(acquireRequest).to.be.calledOnceWith(rootSpan) }) it('should add _dd.iast.enabled:0 tag in the rootSpan', () => { const addTags = sinon.stub(rootSpan, 'addTags') - plugin.startContext({}) + plugin.startContext() expect(addTags).to.be.calledOnceWith({ [IAST_ENABLED_TAG_KEY]: 0 }) }) @@ -152,7 +150,7 @@ describe('IastContextPlugin', () => { it('should not fail if store does not contain span', () => { getStore.returns({}) - plugin.startContext({}) + plugin.startContext() expect(acquireRequest).to.be.calledOnceWith(undefined) }) @@ -171,28 +169,26 @@ describe('IastContextPlugin', () => { it('should add _dd.iast.enabled: 1 tag in the rootSpan', () => { const addTags = sinon.stub(rootSpan, 'addTags') - plugin.startContext({}) + plugin.startContext() expect(addTags).to.be.calledOnceWith({ [IAST_ENABLED_TAG_KEY]: 1 }) }) it('should create and save new IAST context and store it', () => { - const data = {} - plugin.startContext(data) + plugin.startContext() expect(newIastContext).to.be.calledOnceWith(rootSpan) expect(saveIastContext).to.be.calledOnceWith(store, topContext, context) }) it('should create new taint-tracking transaction', () => { - const data = {} - plugin.startContext(data) + plugin.startContext() expect(createTransaction).to.be.calledOnceWith('span-id', context) }) it('should obtain needed info from data before starting iast context', () => { - plugin.startContext({}) + plugin.startContext() expect(initializeRequestContext).to.be.calledOnceWith(context) })