From 4f895c71ffef18cbd12e382dca8c7536ad653fde Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Thu, 10 Jul 2025 15:25:41 -0500 Subject: [PATCH 01/29] DSM optimizations - major refactoring to get rid of LinkedHashMap --- .../DefaultDataStreamsMonitoring.java | 53 +-- .../datastreams/DefaultPathwayContext.java | 103 ++---- .../MsgPackDatastreamsPayloadWriter.java | 46 ++- .../trace/core/datastreams/StatsBucket.java | 10 +- .../trace/core/datastreams/StatsGroup.java | 16 +- .../trace/core/datastreams/TagsProcessor.java | 142 -------- .../AgentDataStreamsMonitoring.java | 3 +- .../trace/api/datastreams/Backlog.java | 12 +- .../api/datastreams/DataStreamsContext.java | 47 +-- .../api/datastreams/DataStreamsTags.java | 333 ++++++++++++++++++ .../datastreams/DataStreamsTagsProcessor.java | 5 + .../NoopDataStreamsMonitoring.java | 3 +- .../trace/api/datastreams/StatsPoint.java | 14 +- 13 files changed, 448 insertions(+), 339 deletions(-) delete mode 100644 dd-trace-core/src/main/java/datadog/trace/core/datastreams/TagsProcessor.java create mode 100644 internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java create mode 100644 internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsProcessor.java diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java index d92fac69e87..4bf30213a2b 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java @@ -3,12 +3,6 @@ import static datadog.communication.ddagent.DDAgentFeaturesDiscovery.V01_DATASTREAMS_ENDPOINT; import static datadog.trace.api.datastreams.DataStreamsContext.fromTags; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.MANUAL_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.util.AgentThreadFactory.AgentThread.DATA_STREAMS_MONITORING; import static datadog.trace.util.AgentThreadFactory.THREAD_JOIN_TIMOUT_MS; import static datadog.trace.util.AgentThreadFactory.newAgentThread; @@ -19,12 +13,7 @@ import datadog.trace.api.Config; import datadog.trace.api.TraceConfig; import datadog.trace.api.WellKnownTags; -import datadog.trace.api.datastreams.Backlog; -import datadog.trace.api.datastreams.DataStreamsContext; -import datadog.trace.api.datastreams.InboxItem; -import datadog.trace.api.datastreams.NoopPathwayContext; -import datadog.trace.api.datastreams.PathwayContext; -import datadog.trace.api.datastreams.StatsPoint; +import datadog.trace.api.datastreams.*; import datadog.trace.api.experimental.DataStreamsContextCarrier; import datadog.trace.api.time.TimeSource; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -36,11 +25,9 @@ import datadog.trace.core.DDSpan; import datadog.trace.core.DDTraceCoreInfo; import datadog.trace.util.AgentTaskScheduler; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -57,9 +44,9 @@ public class DefaultDataStreamsMonitoring implements DataStreamsMonitoring, Even static final long FEATURE_CHECK_INTERVAL_NANOS = TimeUnit.MINUTES.toNanos(5); private static final StatsPoint REPORT = - new StatsPoint(Collections.emptyList(), 0, 0, 0, 0, 0, 0, 0, null); + new StatsPoint(DataStreamsTags.EMPTY, 0, 0, 0, 0, 0, 0, 0, null); private static final StatsPoint POISON_PILL = - new StatsPoint(Collections.emptyList(), 0, 0, 0, 0, 0, 0, 0, null); + new StatsPoint(DataStreamsTags.EMPTY, 0, 0, 0, 0, 0, 0, 0, null); private final Map> timeToBucket = new HashMap<>(); private final MpscArrayQueue inbox = new MpscArrayQueue<>(1024); @@ -223,15 +210,7 @@ public void mergePathwayContextIntoSpan(AgentSpan span, DataStreamsContextCarrie } } - public void trackBacklog(LinkedHashMap sortedTags, long value) { - List tags = new ArrayList<>(sortedTags.size()); - for (Map.Entry entry : sortedTags.entrySet()) { - String tag = TagsProcessor.createTag(entry.getKey(), entry.getValue()); - if (tag == null) { - continue; - } - tags.add(tag); - } + public void trackBacklog(DataStreamsTags tags, long value) { inbox.offer(new Backlog(tags, value, timeSource.getCurrentTimeNanos(), getThreadServiceName())); } @@ -256,14 +235,9 @@ public void setConsumeCheckpoint(String type, String source, DataStreamsContextC return; } mergePathwayContextIntoSpan(span, carrier); - - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(MANUAL_TAG, "true"); - sortedTags.put(TOPIC_TAG, source); - sortedTags.put(TYPE_TAG, type); - - setCheckpoint(span, fromTags(sortedTags)); + setCheckpoint( + span, + fromTags(DataStreamsTags.Create(type, DataStreamsTags.Direction.Inbound, source, true))); } public void setProduceCheckpoint( @@ -279,15 +253,10 @@ public void setProduceCheckpoint( return; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - if (manualCheckpoint) { - sortedTags.put(MANUAL_TAG, "true"); - } - sortedTags.put(TOPIC_TAG, target); - sortedTags.put(TYPE_TAG, type); - - DataStreamsContext dsmContext = fromTags(sortedTags); + DataStreamsContext dsmContext = + fromTags( + DataStreamsTags.Create( + type, DataStreamsTags.Direction.Outbound, target, manualCheckpoint)); this.propagator.inject( span.with(dsmContext), carrier, DataStreamsContextCarrierAdapter.INSTANCE); } diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java index 3d5a5266bf6..6420863a67a 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java @@ -12,19 +12,13 @@ import datadog.trace.api.ProcessTags; import datadog.trace.api.WellKnownTags; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; import datadog.trace.api.datastreams.PathwayContext; import datadog.trace.api.datastreams.StatsPoint; import datadog.trace.api.time.TimeSource; import datadog.trace.util.FNV64Hash; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; import java.util.Base64; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -51,23 +45,7 @@ public class DefaultPathwayContext implements PathwayContext { // state variables used to memoize the pathway hash with // direction != current direction private long closestOppositeDirectionHash; - private String previousDirection; - - private static final Set hashableTagKeys = - new HashSet( - Arrays.asList( - TagsProcessor.GROUP_TAG, - TagsProcessor.TYPE_TAG, - TagsProcessor.DIRECTION_TAG, - TagsProcessor.TOPIC_TAG, - TagsProcessor.EXCHANGE_TAG)); - - private static final Set extraAggregationTagKeys = - new HashSet( - Arrays.asList( - TagsProcessor.DATASET_NAME_TAG, - TagsProcessor.DATASET_NAMESPACE_TAG, - TagsProcessor.MANUAL_TAG)); + private DataStreamsTags.Direction previousDirection; public DefaultPathwayContext( TimeSource timeSource, long hashOfKnownTags, String serviceNameOverride) { @@ -109,13 +87,8 @@ public synchronized void setCheckpoint( long startNanos = timeSource.getCurrentTimeNanos(); long nanoTicks = timeSource.getNanoTicks(); - // So far, each tag key has only one tag value, so we're initializing the capacity to match - // the number of tag keys for now. We should revisit this later if it's no longer the case. - LinkedHashMap sortedTags = context.sortedTags(); - List allTags = new ArrayList<>(sortedTags.size()); PathwayHashBuilder pathwayHashBuilder = new PathwayHashBuilder(hashOfKnownTags, serviceNameOverride); - DataSetHashBuilder aggregationHashBuilder = new DataSetHashBuilder(); if (!started) { long defaultTimestamp = context.defaultTimestamp(); @@ -135,43 +108,39 @@ public synchronized void setCheckpoint( log.debug("Started {}", this); } - for (Map.Entry entry : sortedTags.entrySet()) { - String tag = TagsProcessor.createTag(entry.getKey(), entry.getValue()); - if (tag == null) { - continue; - } - if (hashableTagKeys.contains(entry.getKey())) { - pathwayHashBuilder.addTag(tag); - } - if (extraAggregationTagKeys.contains(entry.getKey())) { - aggregationHashBuilder.addValue(tag); - } - allTags.add(tag); + // generate node hash + long nodeHash = hashOfKnownTags; + if (serviceNameOverride != null) { + nodeHash = FNV64Hash.continueHash(nodeHash, serviceNameOverride, FNV64Hash.Version.v1); } + nodeHash = + FNV64Hash.continueHash( + nodeHash, DataStreamsTags.longToBytes(context.tags().getHash()), FNV64Hash.Version.v1); - long nodeHash = generateNodeHash(pathwayHashBuilder); // loop protection - a node should not be chosen as parent // for a sequential node with the same direction, as this // will cause a `cardinality explosion` for hash / parentHash tag values - if (sortedTags.containsKey(TagsProcessor.DIRECTION_TAG)) { - String direction = sortedTags.get(TagsProcessor.DIRECTION_TAG); - if (direction.equals(previousDirection)) { - hash = closestOppositeDirectionHash; - } else { - previousDirection = direction; - closestOppositeDirectionHash = hash; - } + DataStreamsTags.Direction direction = context.tags().getDirection(); + if (direction == previousDirection) { + hash = closestOppositeDirectionHash; + } else { + previousDirection = direction; + closestOppositeDirectionHash = hash; } long newHash = generatePathwayHash(nodeHash, hash); - long aggregationHash = aggregationHashBuilder.addValue(newHash); + long aggregationHash = + FNV64Hash.continueHash( + context.tags().getAggregationHash(), + DataStreamsTags.longToBytes(newHash), + FNV64Hash.Version.v1); long pathwayLatencyNano = nanoTicks - pathwayStartNanoTicks; long edgeLatencyNano = nanoTicks - edgeStartNanoTicks; StatsPoint point = new StatsPoint( - allTags, + context.tags(), newHash, hash, aggregationHash, @@ -310,32 +279,6 @@ private static DefaultPathwayContext decode( serviceNameOverride); } - static class DataSetHashBuilder { - private long currentHash = 0L; - - public long addValue(String val) { - currentHash = FNV64Hash.generateHash(currentHash + val, FNV64Hash.Version.v1); - return currentHash; - } - - public long addValue(long val) { - byte[] b = - new byte[] { - (byte) val, - (byte) (val >> 8), - (byte) (val >> 16), - (byte) (val >> 24), - (byte) (val >> 32), - (byte) (val >> 40), - (byte) (val >> 48), - (byte) (val >> 56) - }; - - currentHash = FNV64Hash.continueHash(currentHash, b, FNV64Hash.Version.v1); - return currentHash; - } - } - private static class PathwayHashBuilder { private long hash; @@ -350,6 +293,10 @@ public void addTag(String tag) { hash = FNV64Hash.continueHash(hash, tag, FNV64Hash.Version.v1); } + public void addValue(long val) { + hash = FNV64Hash.continueHash(hash, DataStreamsTags.longToBytes(val), FNV64Hash.Version.v1); + } + public long getHash() { return hash; } diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java index 6dbf342b27b..37f5602680e 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java @@ -9,6 +9,7 @@ import datadog.trace.api.Config; import datadog.trace.api.ProcessTags; import datadog.trace.api.WellKnownTags; +import datadog.trace.api.datastreams.DataStreamsTags; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.common.metrics.Sink; import java.util.Collection; @@ -160,9 +161,7 @@ private void writeBucket(StatsBucket bucket, Writable packer) { Collection groups = bucket.getGroups(); packer.startArray(groups.size()); for (StatsGroup group : groups) { - boolean firstNode = group.getEdgeTags().isEmpty(); - - packer.startMap(firstNode ? 5 : 6); + packer.startMap(6); /* 1 */ packer.writeUTF8(PATHWAY_LATENCY); @@ -184,29 +183,42 @@ private void writeBucket(StatsBucket bucket, Writable packer) { packer.writeUTF8(PARENT_HASH); packer.writeUnsignedLong(group.getParentHash()); - if (!firstNode) { - /* 6 */ - packer.writeUTF8(EDGE_TAGS); - packer.startArray(group.getEdgeTags().size()); - for (String tag : group.getEdgeTags()) { - packer.writeString(tag, null); - } - } + /* 6 */ + packer.writeUTF8(EDGE_TAGS); + writeDataStreamsTags(group.getTags(), packer); } } - private void writeBacklogs(Collection, Long>> backlogs, Writable packer) { + private void writeBacklogs( + Collection> backlogs, Writable packer) { packer.writeUTF8(BACKLOGS); packer.startArray(backlogs.size()); - for (Map.Entry, Long> entry : backlogs) { + for (Map.Entry entry : backlogs) { packer.startMap(2); + packer.writeUTF8(BACKLOG_TAGS); - packer.startArray(entry.getKey().size()); - for (String tag : entry.getKey()) { - packer.writeString(tag, null); - } + writeDataStreamsTags(entry.getKey(), packer); + packer.writeUTF8(BACKLOG_VALUE); packer.writeLong(entry.getValue()); } } + + private void writeStringIfNotEmpty(String name, String value, Writable packer) { + if (value == null || value.isEmpty()) { + return; + } + + packer.writeString(name + ":" + value, null); + } + + private void writeDataStreamsTags(DataStreamsTags tags, Writable packer) { + packer.startArray(tags.getSize()); + + tags.forEachTag( + (name, value) -> { + packer.writeString(name + ":" + value, null); + }, + DataStreamsTags.TagTraverseMode.All); + } } diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/StatsBucket.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/StatsBucket.java index 26bdd9ba105..c61550d0e3e 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/StatsBucket.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/StatsBucket.java @@ -1,17 +1,17 @@ package datadog.trace.core.datastreams; import datadog.trace.api.datastreams.Backlog; +import datadog.trace.api.datastreams.DataStreamsTags; import datadog.trace.api.datastreams.StatsPoint; import java.util.Collection; import java.util.HashMap; -import java.util.List; import java.util.Map; public class StatsBucket { private final long startTimeNanos; private final long bucketDurationNanos; private final Map hashToGroup = new HashMap<>(); - private final Map, Long> backlogs = new HashMap<>(); + private final Map backlogs = new HashMap<>(); public StatsBucket(long startTimeNanos, long bucketDurationNanos) { this.startTimeNanos = startTimeNanos; @@ -27,7 +27,7 @@ public void addPoint(StatsPoint statsPoint) { statsPoint.getAggregationHash(), hash -> new StatsGroup( - statsPoint.getEdgeTags(), statsPoint.getHash(), statsPoint.getParentHash())) + statsPoint.getTags(), statsPoint.getHash(), statsPoint.getParentHash())) .add( statsPoint.getPathwayLatencyNano(), statsPoint.getEdgeLatencyNano(), @@ -36,7 +36,7 @@ public void addPoint(StatsPoint statsPoint) { public void addBacklog(Backlog backlog) { backlogs.compute( - backlog.getSortedTags(), + backlog.getTags(), (k, v) -> (v == null) ? backlog.getValue() : Math.max(v, backlog.getValue())); } @@ -52,7 +52,7 @@ public Collection getGroups() { return hashToGroup.values(); } - public Collection, Long>> getBacklogs() { + public Collection> getBacklogs() { return backlogs.entrySet(); } } diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/StatsGroup.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/StatsGroup.java index b5923c36e6d..7831b58e365 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/StatsGroup.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/StatsGroup.java @@ -1,21 +1,21 @@ package datadog.trace.core.datastreams; +import datadog.trace.api.datastreams.DataStreamsTags; import datadog.trace.core.histogram.Histogram; import datadog.trace.core.histogram.Histograms; -import java.util.List; public class StatsGroup { private static final double NANOSECONDS_TO_SECOND = 1_000_000_000d; - private final List edgeTags; + private final DataStreamsTags tags; private final long hash; private final long parentHash; private final Histogram pathwayLatency; private final Histogram edgeLatency; private final Histogram payloadSize; - public StatsGroup(List edgeTags, long hash, long parentHash) { - this.edgeTags = edgeTags; + public StatsGroup(DataStreamsTags tags, long hash, long parentHash) { + this.tags = tags; this.hash = hash; this.parentHash = parentHash; pathwayLatency = Histograms.newLogHistogram(); @@ -31,8 +31,8 @@ public void add(long pathwayLatencyNano, long edgeLatencyNano, long payloadSizeB if (payloadSizeBytes != 0) payloadSize.accept((double) payloadSizeBytes); } - public List getEdgeTags() { - return edgeTags; + public DataStreamsTags getTags() { + return tags; } public long getHash() { @@ -58,8 +58,8 @@ public Histogram getPayloadSize() { @Override public String toString() { return "StatsGroup{" - + "edgeTags='" - + edgeTags + + "tags='" + + tags + '\'' + ", hash=" + hash diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/TagsProcessor.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/TagsProcessor.java deleted file mode 100644 index 1838b47239b..00000000000 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/TagsProcessor.java +++ /dev/null @@ -1,142 +0,0 @@ -package datadog.trace.core.datastreams; - -import datadog.trace.api.cache.DDCache; -import datadog.trace.api.cache.DDCaches; -import java.util.HashMap; -import java.util.Map; -import java.util.function.Function; - -public class TagsProcessor { - public static final class StringPrefix implements Function { - private final String prefix; - - public StringPrefix(String prefix) { - this.prefix = prefix; - } - - @Override - public String apply(String key) { - return prefix + key; - } - } - - public static final String MANUAL_TAG = "manual_checkpoint"; - public static final String TYPE_TAG = "type"; - private static final DDCache TYPE_TAG_CACHE = DDCaches.newFixedSizeCache(32); - private static final Function TYPE_TAG_PREFIX = new StringPrefix("type:"); - - public static final String DIRECTION_TAG = "direction"; - // service centric direction - data flowing into the service - public static final String DIRECTION_IN = "in"; - // service centric direction - data flowing out of the service - public static final String DIRECTION_OUT = "out"; - private static final DDCache DIRECTION_TAG_CACHE = DDCaches.newFixedSizeCache(32); - private static final Function DIRECTION_TAG_PREFIX = - new StringPrefix("direction:"); - // SNS Topic - public static final String TOPIC_TAG = "topic"; - private static final DDCache TOPIC_TAG_CACHE = DDCaches.newFixedSizeCache(32); - private static final Function TOPIC_TAG_PREFIX = new StringPrefix("topic:"); - // EventBridge Bus - public static final String BUS_TAG = "bus"; - private static final DDCache BUS_TAG_CACHE = DDCaches.newFixedSizeCache(32); - private static final Function BUS_TAG_PREFIX = new StringPrefix("bus:"); - - public static final String PARTITION_TAG = "partition"; - private static final DDCache PARTITION_TAG_CACHE = DDCaches.newFixedSizeCache(32); - private static final Function PARTITION_TAG_PREFIX = - new StringPrefix("partition:"); - public static final String GROUP_TAG = "group"; - public static final String CONSUMER_GROUP_TAG = "consumer_group"; - private static final DDCache GROUP_TAG_CACHE = DDCaches.newFixedSizeCache(32); - private static final DDCache CONSUMER_GROUP_TAG_CACHE = - DDCaches.newFixedSizeCache(32); - private static final Function GROUP_TAG_PREFIX = new StringPrefix("group:"); - private static final Function CONSUMER_GROUP_TAG_PREFIX = - new StringPrefix("consumer_group:"); - public static final String SUBSCRIPTION_TAG = "subscription"; - private static final DDCache SUBSCRIPTION_TAG_CACHE = - DDCaches.newFixedSizeCache(32); - private static final Function SUBSCRIPTION_TAG_PREFIX = - new StringPrefix("subscription:"); - public static final String EXCHANGE_TAG = "exchange"; - private static final DDCache EXCHANGE_TAG_CACHE = DDCaches.newFixedSizeCache(32); - private static final Function EXCHANGE_TAG_PREFIX = new StringPrefix("exchange:"); - - public static final String DATASET_NAME_TAG = "ds.name"; - private static final DDCache DATASET_NAME_TAG_CACHE = - DDCaches.newFixedSizeCache(32); - private static final Function DATASET_NAME_TAG_PREFIX = - new StringPrefix("ds.name:"); - - public static final String DATASET_NAMESPACE_TAG = "ds.namespace"; - private static final DDCache DATASET_NAMESPACE_TAG_CACHE = - DDCaches.newFixedSizeCache(32); - private static final Function DATASET_NAMESPACE_TAG_PREFIX = - new StringPrefix("ds.namespace:"); - - public static final String HAS_ROUTING_KEY_TAG = "has_routing_key"; - private static final DDCache HAS_ROUTING_KEY_TAG_CACHE = - DDCaches.newFixedSizeCache(2); // true or false - private static final Function HAS_ROUTING_KEY_TAG_PREFIX = - new StringPrefix("has_routing_key:"); - - public static final String KAFKA_CLUSTER_ID_TAG = "kafka_cluster_id"; - private static final DDCache KAFKA_CLUSTER_ID_TAG_CACHE = - DDCaches.newFixedSizeCache(32); - private static final Function KAFKA_CLUSTER_ID_TAG_PREFIX = - new StringPrefix("kafka_cluster_id:"); - - private static final Map> TAG_TO_CACHE = createTagToCacheMap(); - private static final Map> TAG_TO_PREFIX = createTagToPrefixMap(); - - private static Map> createTagToCacheMap() { - Map> result = new HashMap<>(); - result.put(TYPE_TAG, TYPE_TAG_CACHE); - result.put(DIRECTION_TAG, DIRECTION_TAG_CACHE); - result.put(TOPIC_TAG, TOPIC_TAG_CACHE); - result.put(BUS_TAG, BUS_TAG_CACHE); - result.put(PARTITION_TAG, PARTITION_TAG_CACHE); - result.put(GROUP_TAG, GROUP_TAG_CACHE); - result.put(CONSUMER_GROUP_TAG, CONSUMER_GROUP_TAG_CACHE); - result.put(SUBSCRIPTION_TAG, SUBSCRIPTION_TAG_CACHE); - result.put(EXCHANGE_TAG, EXCHANGE_TAG_CACHE); - result.put(HAS_ROUTING_KEY_TAG, HAS_ROUTING_KEY_TAG_CACHE); - result.put(KAFKA_CLUSTER_ID_TAG, KAFKA_CLUSTER_ID_TAG_CACHE); - result.put(DATASET_NAME_TAG, DATASET_NAME_TAG_CACHE); - result.put(DATASET_NAMESPACE_TAG, DATASET_NAMESPACE_TAG_CACHE); - return result; - } - - private static Map> createTagToPrefixMap() { - Map> result = new HashMap<>(); - result.put(TYPE_TAG, TYPE_TAG_PREFIX); - result.put(DIRECTION_TAG, DIRECTION_TAG_PREFIX); - result.put(TOPIC_TAG, TOPIC_TAG_PREFIX); - result.put(BUS_TAG, BUS_TAG_PREFIX); - result.put(PARTITION_TAG, PARTITION_TAG_PREFIX); - result.put(GROUP_TAG, GROUP_TAG_PREFIX); - result.put(CONSUMER_GROUP_TAG, CONSUMER_GROUP_TAG_PREFIX); - result.put(SUBSCRIPTION_TAG, SUBSCRIPTION_TAG_PREFIX); - result.put(EXCHANGE_TAG, EXCHANGE_TAG_PREFIX); - result.put(HAS_ROUTING_KEY_TAG, HAS_ROUTING_KEY_TAG_PREFIX); - result.put(KAFKA_CLUSTER_ID_TAG, KAFKA_CLUSTER_ID_TAG_PREFIX); - result.put(DATASET_NAME_TAG, DATASET_NAME_TAG_PREFIX); - result.put(DATASET_NAMESPACE_TAG, DATASET_NAMESPACE_TAG_PREFIX); - return result; - } - - // Creates the tag string using the provided tagKey and tagValue. - // Returns null if either tagKey or tagValue is null. - public static String createTag(String tagKey, String tagValue) { - if (tagKey == null || tagValue == null) { - return null; - } - DDCache cache = TAG_TO_CACHE.get(tagKey); - Function prefix = TAG_TO_PREFIX.get(tagKey); - if (cache != null && prefix != null) { - return cache.computeIfAbsent(tagValue, prefix); - } - return tagKey + ":" + tagValue; - } -} diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/AgentDataStreamsMonitoring.java b/internal-api/src/main/java/datadog/trace/api/datastreams/AgentDataStreamsMonitoring.java index e6ddac36bac..b7c51bd36ec 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/AgentDataStreamsMonitoring.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/AgentDataStreamsMonitoring.java @@ -4,10 +4,9 @@ import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.Schema; import datadog.trace.bootstrap.instrumentation.api.SchemaIterator; -import java.util.LinkedHashMap; public interface AgentDataStreamsMonitoring extends DataStreamsCheckpointer { - void trackBacklog(LinkedHashMap sortedTags, long value); + void trackBacklog(DataStreamsTags tags, long value); /** * Sets data streams checkpoint, used for both produce and consume operations. diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/Backlog.java b/internal-api/src/main/java/datadog/trace/api/datastreams/Backlog.java index 378ddf4b95c..8920e844d07 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/Backlog.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/Backlog.java @@ -1,13 +1,11 @@ package datadog.trace.api.datastreams; -import java.util.List; - // Backlog allows us to track the size of a queue in data streams. For example, by monitoring both // the consumer and the producer, // we can get the size in bytes of a Kafka queue. public class Backlog implements InboxItem { - public List getSortedTags() { - return sortedTags; + public DataStreamsTags getTags() { + return tags; } public long getValue() { @@ -22,14 +20,14 @@ public String getServiceNameOverride() { return serviceNameOverride; } - private final List sortedTags; + private final DataStreamsTags tags; private final long value; private final long timestampNanos; private final String serviceNameOverride; public Backlog( - List sortedTags, long value, long timestampNanos, String serviceNameOverride) { - this.sortedTags = sortedTags; + DataStreamsTags tags, long value, long timestampNanos, String serviceNameOverride) { + this.tags = tags; this.value = value; this.timestampNanos = timestampNanos; this.serviceNameOverride = serviceNameOverride; diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java index 92d8c8a3b38..4809d985cde 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java @@ -3,28 +3,23 @@ import datadog.context.Context; import datadog.context.ContextKey; import datadog.context.ImplicitContextKeyed; -import java.util.LinkedHashMap; public class DataStreamsContext implements ImplicitContextKeyed { private static final ContextKey CONTEXT_KEY = ContextKey.named("dsm-context-key"); - private static final LinkedHashMap CLIENT_PATHWAY_EDGE_TAGS; - private static final LinkedHashMap SERVER_PATHWAY_EDGE_TAGS; + private static final DataStreamsTags CLIENT_PATHWAY_EDGE_TAGS; + private static final DataStreamsTags SERVER_PATHWAY_EDGE_TAGS; - final LinkedHashMap sortedTags; + final DataStreamsTags tags; final long defaultTimestamp; final long payloadSizeBytes; final boolean sendCheckpoint; static { - CLIENT_PATHWAY_EDGE_TAGS = new LinkedHashMap<>(2); - // TODO: Refactor TagsProcessor to move it into a package that we can link the constants for. - CLIENT_PATHWAY_EDGE_TAGS.put("direction", "out"); - CLIENT_PATHWAY_EDGE_TAGS.put("type", "http"); - SERVER_PATHWAY_EDGE_TAGS = new LinkedHashMap<>(2); - // TODO: Refactor TagsProcessor to move it into a package that we can link the constants for. - SERVER_PATHWAY_EDGE_TAGS.put("direction", "in"); - SERVER_PATHWAY_EDGE_TAGS.put("type", "http"); + CLIENT_PATHWAY_EDGE_TAGS = + DataStreamsTags.Create("http", DataStreamsTags.Direction.Outbound, null); + SERVER_PATHWAY_EDGE_TAGS = + DataStreamsTags.Create("http", DataStreamsTags.Direction.Inbound, null); } public static DataStreamsContext fromContext(Context context) { @@ -52,17 +47,17 @@ public static DataStreamsContext forHttpServer() { /** * Creates a DSM context. * - * @param sortedTags alphabetically sorted tags for the checkpoint (direction, queue type etc) + * @param tags DataStreamsTags object * @return the created context. */ - public static DataStreamsContext fromTags(LinkedHashMap sortedTags) { - return new DataStreamsContext(sortedTags, 0, 0, true); + public static DataStreamsContext fromTags(DataStreamsTags tags) { + return new DataStreamsContext(tags, 0, 0, true); } /** * Creates a DSM context. * - * @param sortedTags alphabetically sorted tags for the checkpoint (direction, queue type etc) + * @param tags object * @param defaultTimestamp unix timestamp to use as a start of the pathway if this is the first * checkpoint in the chain. Zero should be passed if we can't extract the timestamp from the * message / payload itself (for instance: produce operations; http produce / consume etc). @@ -72,29 +67,25 @@ public static DataStreamsContext fromTags(LinkedHashMap sortedTa * @return the created context. */ public static DataStreamsContext create( - LinkedHashMap sortedTags, long defaultTimestamp, long payloadSizeBytes) { - return new DataStreamsContext(sortedTags, defaultTimestamp, payloadSizeBytes, true); + DataStreamsTags tags, long defaultTimestamp, long payloadSizeBytes) { + return new DataStreamsContext(tags, defaultTimestamp, payloadSizeBytes, true); } - public static DataStreamsContext fromTagsWithoutCheckpoint( - LinkedHashMap sortedTags) { - return new DataStreamsContext(sortedTags, 0, 0, false); + public static DataStreamsContext fromTagsWithoutCheckpoint(DataStreamsTags tags) { + return new DataStreamsContext(tags, 0, 0, false); } // That's basically a record for now private DataStreamsContext( - LinkedHashMap sortedTags, - long defaultTimestamp, - long payloadSizeBytes, - boolean sendCheckpoint) { - this.sortedTags = sortedTags; + DataStreamsTags tags, long defaultTimestamp, long payloadSizeBytes, boolean sendCheckpoint) { + this.tags = tags; this.defaultTimestamp = defaultTimestamp; this.payloadSizeBytes = payloadSizeBytes; this.sendCheckpoint = sendCheckpoint; } - public LinkedHashMap sortedTags() { - return this.sortedTags; + public DataStreamsTags tags() { + return this.tags; } public long defaultTimestamp() { diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java new file mode 100644 index 00000000000..c4eb78cb497 --- /dev/null +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -0,0 +1,333 @@ +package datadog.trace.api.datastreams; + +import datadog.trace.util.FNV64Hash; + +import javax.xml.crypto.Data; + +public class DataStreamsTags { + public enum Direction { + Inbound, + Outbound, + } + + public enum TagTraverseMode { + HashOnly, + GroupOnly, + ValueOnly, + All + } + + public static DataStreamsTags EMPTY = DataStreamsTags.Create(null, null, null); + + // hash tags + private final String bus; + private final Direction direction; + private final String exchange; + private final String topic; + private final String type; + private final String subscription; + // additional grouping tags + private final String datasetName; + private final String datasetNamespace; + private final Boolean isManual; + // informational tags + private final String group; + private final Boolean hasRoutingKey; + private final String kafkaClusterId; + private final String partition; + + private long hash; + private long aggregationHash; + private long completeHash; + private int size; + + public static final String MANUAL_TAG = "manual_checkpoint"; + public static final String TYPE_TAG = "type"; + public static final String DIRECTION_TAG = "direction"; + public static final String DIRECTION_IN = "in"; + public static final String DIRECTION_OUT = "out"; + public static final String TOPIC_TAG = "topic"; + public static final String BUS_TAG = "bus"; + public static final String PARTITION_TAG = "partition"; + public static final String GROUP_TAG = "group"; + public static final String CONSUMER_GROUP_TAG = "consumer_group"; + public static final String SUBSCRIPTION_TAG = "subscription"; + public static final String EXCHANGE_TAG = "exchange"; + public static final String DATASET_NAME_TAG = "ds.name"; + public static final String DATASET_NAMESPACE_TAG = "ds.namespace"; + public static final String HAS_ROUTING_KEY_TAG = "has_routing_key"; + public static final String KAFKA_CLUSTER_ID_TAG = "kafka_cluster_id"; + + public static DataStreamsTags Create(String type, Direction direction, String topic) { + return DataStreamsTags.Create(type, direction, topic, false); + } + + public static DataStreamsTags Create( + String type, Direction direction, String topic, Boolean isManual) { + return new DataStreamsTags( + type, direction, topic, isManual, null, null, null, null, null, null, false, null, null); + } + + public static byte[] longToBytes(long val) { + return new byte[] { + (byte) val, + (byte) (val >> 8), + (byte) (val >> 16), + (byte) (val >> 24), + (byte) (val >> 32), + (byte) (val >> 40), + (byte) (val >> 48), + (byte) (val >> 56) + }; + } + + private DataStreamsTags( + String type, + Direction direction, + String topic, + Boolean isManual, + String bus, + String exchange, + String subscription, + String datasetName, + String datasetNamespace, + String group, + Boolean hasRoutingKey, + String kafkaClusterId, + String partition) { + this.bus = bus; + this.direction = direction; + this.exchange = exchange; + this.topic = topic; + this.type = type; + this.subscription = subscription; + this.isManual = isManual; + this.datasetName = datasetName; + this.datasetNamespace = datasetNamespace; + this.group = group; + this.hasRoutingKey = hasRoutingKey; + this.kafkaClusterId = kafkaClusterId; + this.partition = partition; + + this.size = + this.forEachTag( + (name, value) -> { + this.hash = + FNV64Hash.continueHash(this.hash, name + ":" + value, FNV64Hash.Version.v1); + }, + TagTraverseMode.HashOnly); + + this.aggregationHash = this.hash; + this.size += + this.forEachTag( + (name, value) -> { + this.aggregationHash = + FNV64Hash.continueHash( + this.aggregationHash, name + ":" + value, FNV64Hash.Version.v1); + }, + TagTraverseMode.GroupOnly); + + this.completeHash = aggregationHash; + this.size += + this.forEachTag( + (name, value) -> { + this.completeHash = + FNV64Hash.continueHash( + this.completeHash, name + ":" + value, FNV64Hash.Version.v1); + }, + TagTraverseMode.ValueOnly); + } + + public int forEachTag(DataStreamsTagsProcessor processor, TagTraverseMode mode) { + int count = 0; + + if (mode == TagTraverseMode.HashOnly || mode == TagTraverseMode.All) { + if (this.bus != null) { + processor.process(BUS_TAG, this.bus); + count += 1; + } + + count += 1; + if (this.direction == Direction.Inbound) { + processor.process(DIRECTION_TAG, DIRECTION_IN); + } else { + processor.process(DIRECTION_TAG, DIRECTION_OUT); + } + + if (this.exchange != null) { + count += 1; + processor.process(EXCHANGE_TAG, this.exchange); + } + + // topic and type are always required, no need to check for null + count += 2; + processor.process(TOPIC_TAG, this.topic); + processor.process(TYPE_TAG, this.type); + + if (this.subscription != null) { + count += 1; + processor.process(SUBSCRIPTION_TAG, this.subscription); + } + } + + if (mode == TagTraverseMode.GroupOnly || mode == TagTraverseMode.All) { + count += 1; + processor.process(MANUAL_TAG, this.isManual.toString()); + + if (this.datasetName != null) { + count += 1; + processor.process(DATASET_NAME_TAG, this.datasetName); + } + + if (this.datasetNamespace != null) { + count += 1; + processor.process(DATASET_NAMESPACE_TAG, this.datasetNamespace); + } + } + + if (mode == TagTraverseMode.ValueOnly || mode == TagTraverseMode.All) { + if (this.hasRoutingKey != null) { + count += 1; + processor.process(HAS_ROUTING_KEY_TAG, this.hasRoutingKey.toString()); + } + if (this.kafkaClusterId != null) { + count += 1; + processor.process(KAFKA_CLUSTER_ID_TAG, this.kafkaClusterId); + } + if (this.partition != null) { + count += 1; + processor.process(PARTITION_TAG, this.partition); + } + } + + return count; + } + + public Direction getDirection() { + return direction; + } + + public String getTopic() { + return topic; + } + + public String getType() { + return type; + } + + public Boolean isManual() { + return isManual; + } + + public String getBus() { + return bus; + } + + public String getExchange() { + return exchange; + } + + public String getSubscription() { + return subscription; + } + + public String getDatasetName() { + return datasetName; + } + + public String getDatasetNamespace() { + return datasetNamespace; + } + + public String getGroup() { + return group; + } + + public String getPartition() { + return partition; + } + + public String getKafkaClusterId() { + return kafkaClusterId; + } + + public boolean getHasRoutingKey() { + return hasRoutingKey; + } + + public long getHash() { + return hash; + } + + public long getAggregationHash() { + return aggregationHash; + } + + public long getCompleteHash() { + return completeHash; + } + + public int getSize() { + return size; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + DataStreamsTags that = (DataStreamsTags) o; + return this.completeHash == that.completeHash; + } + + @Override + public String toString() { + return "DataStreamsTags{" + + "bus='" + + bus + + "," + + ", direction=" + + direction + + "," + + ", exchange='" + + exchange + + "," + + ", topic='" + + topic + + "," + + ", type='" + + type + + "," + + ", subscription='" + + subscription + + "," + + ", datasetName='" + + datasetName + + "," + + ", datasetNamespace='" + + datasetNamespace + + "," + + ", isManual=" + + isManual + + ", group='" + + group + + "," + + ", hasRoutingKey='" + + hasRoutingKey + + "," + + ", kafkaClusterId='" + + kafkaClusterId + + "," + + ", partition='" + + partition + + "," + + ", hash=" + + hash + + "," + + ", aggregationHash=" + + aggregationHash + + "," + + ", size=" + + size; + } +} diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsProcessor.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsProcessor.java new file mode 100644 index 00000000000..ce147374b2a --- /dev/null +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsProcessor.java @@ -0,0 +1,5 @@ +package datadog.trace.api.datastreams; + +public interface DataStreamsTagsProcessor { + void process(String name, String value); +} diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/NoopDataStreamsMonitoring.java b/internal-api/src/main/java/datadog/trace/api/datastreams/NoopDataStreamsMonitoring.java index 020b492639d..f5cdcb0c82f 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/NoopDataStreamsMonitoring.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/NoopDataStreamsMonitoring.java @@ -4,13 +4,12 @@ import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.Schema; import datadog.trace.bootstrap.instrumentation.api.SchemaIterator; -import java.util.LinkedHashMap; public class NoopDataStreamsMonitoring implements AgentDataStreamsMonitoring { public static final NoopDataStreamsMonitoring INSTANCE = new NoopDataStreamsMonitoring(); @Override - public void trackBacklog(LinkedHashMap sortedTags, long value) {} + public void trackBacklog(DataStreamsTags tags, long value) {} @Override public void setCheckpoint(AgentSpan span, DataStreamsContext context) {} diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/StatsPoint.java b/internal-api/src/main/java/datadog/trace/api/datastreams/StatsPoint.java index 9379267f46c..3e8f8aeab01 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/StatsPoint.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/StatsPoint.java @@ -1,9 +1,7 @@ package datadog.trace.api.datastreams; -import java.util.List; - public class StatsPoint implements InboxItem { - private final List edgeTags; + private final DataStreamsTags tags; private final long hash; private final long parentHash; private final long aggregationHash; @@ -14,7 +12,7 @@ public class StatsPoint implements InboxItem { private final String serviceNameOverride; public StatsPoint( - List edgeTags, + DataStreamsTags tags, long hash, long parentHash, long aggregationHash, @@ -23,7 +21,7 @@ public StatsPoint( long edgeLatencyNano, long payloadSizeBytes, String serviceNameOverride) { - this.edgeTags = edgeTags; + this.tags = tags; this.hash = hash; this.parentHash = parentHash; this.aggregationHash = aggregationHash; @@ -34,8 +32,8 @@ public StatsPoint( this.serviceNameOverride = serviceNameOverride; } - public List getEdgeTags() { - return edgeTags; + public DataStreamsTags getTags() { + return tags; } public long getHash() { @@ -74,7 +72,7 @@ public String getServiceNameOverride() { public String toString() { return "StatsPoint{" + "tags='" - + edgeTags + + tags + '\'' + ", hash=" + hash From 1a1e77ebba79751ee53dc1676a1e51c6af95dc44 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Fri, 11 Jul 2025 15:48:33 -0500 Subject: [PATCH 02/29] Refactored DSM tags for all integrations --- .../grpc/client/GrpcClientDecorator.java | 15 +- .../grpc/server/GrpcServerDecorator.java | 20 +- .../eventbridge/EventBridgeInterceptor.java | 24 +-- .../aws/v0/AwsSdkClientDecorator.java | 40 ++-- .../aws/v0/TracingRequestHandler.java | 22 +-- .../aws/v2/AwsSdkClientDecorator.java | 56 +++--- .../aws/v1/sns/SnsInterceptor.java | 20 +- .../aws/v2/sns/SnsInterceptor.java | 20 +- .../aws/v1/sqs/SqsInterceptor.java | 19 +- .../aws/v1/sqs/TracingIterator.java | 19 +- .../aws/v2/sqs/SqsInterceptor.java | 25 ++- .../aws/v2/sqs/TracingIterator.java | 20 +- .../googlepubsub/PubSubDecorator.java | 19 +- .../PublisherInstrumentation.java | 19 +- .../grpc/client/GrpcClientDecorator.java | 15 +- .../grpc/server/GrpcServerDecorator.java | 20 +- .../ConsumerCoordinatorInstrumentation.java | 29 ++- .../kafka_clients/KafkaProducerCallback.java | 25 ++- .../KafkaProducerInstrumentation.java | 28 ++- .../kafka_clients/TracingIterator.java | 29 ++- .../ConsumerCoordinatorAdvice.java | 29 ++- .../DDOffsetCommitCallback.java | 36 ++-- .../KafkaProducerCallback.java | 23 +-- .../kafka_clients38/PayloadSizeAdvice.java | 2 +- .../kafka_clients38/ProducerAdvice.java | 26 ++- .../kafka_clients38/TracingIterator.java | 29 ++- .../KafkaStreamTaskInstrumentation.java | 53 +++--- .../amqp/RabbitChannelInstrumentation.java | 24 ++- .../rabbitmq/amqp/RabbitDecorator.java | 20 +- .../spark/AbstractDatadogSparkListener.java | 25 +-- .../DefaultDataStreamsMonitoring.java | 26 ++- .../api/datastreams/DataStreamsContext.java | 10 +- .../api/datastreams/DataStreamsTags.java | 175 +++++++----------- .../datastreams/DataStreamsTagsBuilder.java | 95 ++++++++++ 34 files changed, 520 insertions(+), 537 deletions(-) create mode 100644 internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsBuilder.java diff --git a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java index 9fa7c98f0af..aa13260467c 100644 --- a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java +++ b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java @@ -3,9 +3,6 @@ import static datadog.context.propagation.Propagators.defaultPropagator; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import datadog.context.Context; import datadog.context.propagation.CarrierSetter; @@ -14,6 +11,8 @@ import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; @@ -23,7 +22,6 @@ import io.grpc.MethodDescriptor; import io.grpc.Status; import java.util.BitSet; -import java.util.LinkedHashMap; import java.util.Set; import java.util.function.Function; @@ -35,10 +33,11 @@ public class GrpcClientDecorator extends ClientDecorator { public static final CharSequence GRPC_MESSAGE = UTF8BytesString.create("grpc.message"); private static DataStreamsContext createDsmContext() { - LinkedHashMap result = new LinkedHashMap<>(); - result.put(DIRECTION_TAG, DIRECTION_OUT); - result.put(TYPE_TAG, "grpc"); - return DataStreamsContext.fromTags(result); + return DataStreamsContext.fromTags( + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Outbound) + .withType("grpc") + .build()); } public static final GrpcClientDecorator DECORATE = new GrpcClientDecorator(); diff --git a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java index 40c1ec23736..fc364131869 100644 --- a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java +++ b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java @@ -1,12 +1,10 @@ package datadog.trace.instrumentation.armeria.grpc.server; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; - import datadog.trace.api.Config; import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.ErrorPriorities; @@ -18,7 +16,6 @@ import io.grpc.StatusException; import io.grpc.StatusRuntimeException; import java.util.BitSet; -import java.util.LinkedHashMap; import java.util.function.Function; public class GrpcServerDecorator extends ServerDecorator { @@ -33,15 +30,14 @@ public class GrpcServerDecorator extends ServerDecorator { public static final CharSequence COMPONENT_NAME = UTF8BytesString.create("armeria-grpc-server"); public static final CharSequence GRPC_MESSAGE = UTF8BytesString.create("grpc.message"); - private static final LinkedHashMap createServerPathwaySortedTags() { - LinkedHashMap result = new LinkedHashMap<>(); - result.put(DIRECTION_TAG, DIRECTION_IN); - result.put(TYPE_TAG, "grpc"); - return result; + private static final DataStreamsTags createServerPathwaySortedTags() { + return new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Inbound) + .withGroup("grpc") + .build(); } - public static final LinkedHashMap SERVER_PATHWAY_EDGE_TAGS = - createServerPathwaySortedTags(); + public static final DataStreamsTags SERVER_PATHWAY_EDGE_TAGS = createServerPathwaySortedTags(); public static final GrpcServerDecorator DECORATE = new GrpcServerDecorator(); private static final Function NORMALIZE = diff --git a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java index a7ca3e02bb7..a6229f9d153 100644 --- a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java @@ -2,18 +2,15 @@ import static datadog.context.propagation.Propagators.defaultPropagator; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.BUS_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v2.eventbridge.TextMapInjectAdapter.SETTER; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.datastreams.PathwayContext; import datadog.trace.bootstrap.InstanceStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import java.util.ArrayList; -import java.util.LinkedHashMap; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -89,7 +86,13 @@ private String getTraceContextToInject( // Inject context datadog.context.Context context = span; if (traceConfig().isDataStreamsEnabled()) { - DataStreamsContext dsmContext = DataStreamsContext.fromTags(getTags(eventBusName)); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Outbound) + .withType("bus") + .withBus(eventBusName) + .build(); + DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); context = context.with(dsmContext); } defaultPropagator().inject(context, jsonBuilder, SETTER); @@ -111,13 +114,4 @@ private String getTraceContextToInject( jsonBuilder.append('}'); return jsonBuilder.toString(); } - - private LinkedHashMap getTags(String eventBusName) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(BUS_TAG, eventBusName); - sortedTags.put(TYPE_TAG, "bus"); - - return sortedTags; - } } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java index 9d73e248534..5e22b911182 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java @@ -3,7 +3,6 @@ import static datadog.trace.api.datastreams.DataStreamsContext.create; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; import static datadog.trace.bootstrap.instrumentation.api.ResourceNamePriorities.RPC_COMMAND_NAME; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.AmazonWebServiceResponse; @@ -15,6 +14,8 @@ import datadog.trace.api.DDTags; import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -23,9 +24,7 @@ import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.bootstrap.instrumentation.decorator.HttpClientDecorator; -import datadog.trace.core.datastreams.TagsProcessor; import java.net.URI; -import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.regex.Matcher; @@ -255,17 +254,17 @@ && traceConfig().isDataStreamsEnabled()) { if (HttpMethodName.GET.name().equals(span.getTag(Tags.HTTP_METHOD)) && ("GetObjectMetadataRequest".equalsIgnoreCase(awsOperation) || "GetObjectRequest".equalsIgnoreCase(awsOperation))) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - - sortedTags.put(TagsProcessor.DIRECTION_TAG, TagsProcessor.DIRECTION_IN); - sortedTags.put(TagsProcessor.DATASET_NAME_TAG, key); - sortedTags.put(TagsProcessor.DATASET_NAMESPACE_TAG, bucket); - sortedTags.put(TagsProcessor.TOPIC_TAG, bucket); - sortedTags.put(TagsProcessor.TYPE_TAG, "s3"); - + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("s3") + .withDirection(DataStreamsTags.Direction.Inbound) + .withTopic(bucket) + .withDatasetNamespace(bucket) + .withDatasetName(key) + .build(); AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, 0, responseSize)); + .setCheckpoint(span, create(tags, 0, responseSize)); } if ("PutObjectRequest".equalsIgnoreCase(awsOperation) @@ -276,17 +275,18 @@ && traceConfig().isDataStreamsEnabled()) { payloadSize = (long) requestSize; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - - sortedTags.put(TagsProcessor.DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(TagsProcessor.DATASET_NAME_TAG, key); - sortedTags.put(TagsProcessor.DATASET_NAMESPACE_TAG, bucket); - sortedTags.put(TagsProcessor.TOPIC_TAG, bucket); - sortedTags.put(TagsProcessor.TYPE_TAG, "s3"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("s3") + .withDirection(DataStreamsTags.Direction.Outbound) + .withTopic(bucket) + .withDatasetNamespace(bucket) + .withDatasetName(key) + .build(); AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, 0, payloadSize)); + .setCheckpoint(span, create(tags, 0, payloadSize)); } } } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java index 1e1db786bba..b0774cd0a93 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java @@ -6,10 +6,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.blackholeSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v0.AwsSdkClientDecorator.AWS_LEGACY_TRACING; import static datadog.trace.instrumentation.aws.v0.AwsSdkClientDecorator.DECORATE; @@ -20,14 +16,11 @@ import com.amazonaws.handlers.RequestHandler2; import datadog.context.propagation.Propagators; import datadog.trace.api.Config; -import datadog.trace.api.datastreams.AgentDataStreamsMonitoring; -import datadog.trace.api.datastreams.DataStreamsContext; -import datadog.trace.api.datastreams.PathwayContext; +import datadog.trace.api.datastreams.*; import datadog.trace.bootstrap.ContextStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.Date; -import java.util.LinkedHashMap; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -116,16 +109,19 @@ && traceConfig().isDataStreamsEnabled() List records = GetterAccess.of(response.getAwsResponse()).getRecords(response.getAwsResponse()); if (null != records) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(TOPIC_TAG, streamArn); - sortedTags.put(TYPE_TAG, "kinesis"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("kinesis") + .withDirection(DataStreamsTags.Direction.Inbound) + .withTopic(streamArn) + .build(); + for (Object record : records) { Date arrivalTime = GetterAccess.of(record).getApproximateArrivalTimestamp(record); AgentDataStreamsMonitoring dataStreamsMonitoring = AgentTracer.get().getDataStreamsMonitoring(); PathwayContext pathwayContext = dataStreamsMonitoring.newPathwayContext(); - DataStreamsContext context = create(sortedTags, arrivalTime.getTime(), 0); + DataStreamsContext context = create(tags, arrivalTime.getTime(), 0); pathwayContext.setCheckpoint(context, dataStreamsMonitoring::add); if (!span.context().getPathwayContext().isStarted()) { span.context().mergePathwayContext(pathwayContext); diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java index 5ac706df645..54a509ad8a4 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java @@ -2,11 +2,6 @@ import static datadog.trace.api.datastreams.DataStreamsContext.create; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import datadog.context.propagation.CarrierSetter; import datadog.trace.api.Config; @@ -15,6 +10,8 @@ import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.AgentDataStreamsMonitoring; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.datastreams.PathwayContext; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.InstanceStore; @@ -25,7 +22,6 @@ import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.bootstrap.instrumentation.decorator.HttpClientDecorator; -import datadog.trace.core.datastreams.TagsProcessor; import datadog.trace.payloadtags.PayloadTagsData; import java.net.URI; import java.time.Instant; @@ -33,7 +29,6 @@ import java.util.Collection; import java.util.Collections; import java.util.HashSet; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -338,10 +333,13 @@ public AgentSpan onSdkResponse( //noinspection unchecked List records = (List) recordsRaw; if (!records.isEmpty()) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(TOPIC_TAG, streamArn); - sortedTags.put(TYPE_TAG, "kinesis"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("kinesis") + .withDirection(DataStreamsTags.Direction.Inbound) + .withTopic(streamArn) + .build(); + if (null == kinesisApproximateArrivalTimestampField) { Optional> maybeField = records.get(0).sdkFields().stream() @@ -363,7 +361,7 @@ public AgentSpan onSdkResponse( AgentTracer.get().getDataStreamsMonitoring(); PathwayContext pathwayContext = dataStreamsMonitoring.newPathwayContext(); pathwayContext.setCheckpoint( - create(sortedTags, arrivalTime.toEpochMilli(), 0), + create(tags, arrivalTime.toEpochMilli(), 0), dataStreamsMonitoring::add); if (!span.context().getPathwayContext().isStarted()) { span.context().mergePathwayContext(pathwayContext); @@ -384,17 +382,18 @@ public AgentSpan onSdkResponse( if (key != null && bucket != null && awsOperation != null) { if ("GetObject".equalsIgnoreCase(awsOperation)) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - - sortedTags.put(TagsProcessor.DIRECTION_TAG, TagsProcessor.DIRECTION_IN); - sortedTags.put(TagsProcessor.DATASET_NAME_TAG, key); - sortedTags.put(TagsProcessor.DATASET_NAMESPACE_TAG, bucket); - sortedTags.put(TagsProcessor.TOPIC_TAG, bucket); - sortedTags.put(TagsProcessor.TYPE_TAG, "s3"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Inbound) + .withType("s3") + .withDatasetName(key) + .withDatasetNamespace(bucket) + .withTopic(bucket) + .build(); AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, 0, responseSize)); + .setCheckpoint(span, create(tags, 0, responseSize)); } if ("PutObject".equalsIgnoreCase(awsOperation)) { @@ -404,17 +403,18 @@ public AgentSpan onSdkResponse( payloadSize = (long) requestSize; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - - sortedTags.put(TagsProcessor.DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(TagsProcessor.DATASET_NAME_TAG, key); - sortedTags.put(TagsProcessor.DATASET_NAMESPACE_TAG, bucket); - sortedTags.put(TagsProcessor.TOPIC_TAG, bucket); - sortedTags.put(TagsProcessor.TYPE_TAG, "s3"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("s3") + .withDirection(DataStreamsTags.Direction.Outbound) + .withDatasetName(key) + .withDatasetNamespace(bucket) + .withTopic(bucket) + .build(); AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, 0, payloadSize)); + .setCheckpoint(span, create(tags, 0, payloadSize)); } } } diff --git a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sns/SnsInterceptor.java b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sns/SnsInterceptor.java index 3d1333a777f..be56c9b864c 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sns/SnsInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sns/SnsInterceptor.java @@ -2,10 +2,6 @@ import static datadog.context.propagation.Propagators.defaultPropagator; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v1.sns.TextMapInjectAdapter.SETTER; import com.amazonaws.AmazonWebServiceRequest; @@ -16,13 +12,14 @@ import com.amazonaws.services.sns.model.PublishRequest; import datadog.context.Context; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.ContextStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.Map; public class SnsInterceptor extends RequestHandler2 { @@ -114,12 +111,11 @@ private AgentSpan newSpan(AmazonWebServiceRequest request) { return span; } - private LinkedHashMap getTags(String snsTopicName) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(TOPIC_TAG, snsTopicName); - sortedTags.put(TYPE_TAG, "sns"); - - return sortedTags; + private DataStreamsTags getTags(String snsTopicName) { + return new DataStreamsTagsBuilder() + .withType("sns") + .withDirection(DataStreamsTags.Direction.Outbound) + .withTopic(snsTopicName) + .build(); } } diff --git a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sns/SnsInterceptor.java b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sns/SnsInterceptor.java index 20143055b2a..7a6ee008cf7 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sns/SnsInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sns/SnsInterceptor.java @@ -2,19 +2,16 @@ import static datadog.context.propagation.Propagators.defaultPropagator; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v2.sns.TextMapInjectAdapter.SETTER; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstanceStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.Map; import software.amazon.awssdk.core.SdkBytes; import software.amazon.awssdk.core.SdkRequest; @@ -106,12 +103,11 @@ public SdkRequest modifyRequest( return context.request(); } - private LinkedHashMap getTags(String snsTopicName) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(TOPIC_TAG, snsTopicName); - sortedTags.put(TYPE_TAG, "sns"); - - return sortedTags; + private DataStreamsTags getTags(String snsTopicName) { + return new DataStreamsTagsBuilder() + .withType("sns") + .withDirection(DataStreamsTags.Direction.Outbound) + .withTopic(snsTopicName) + .build(); } } diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsInterceptor.java b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsInterceptor.java index 4b353f12591..18ef0f5dcbd 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsInterceptor.java @@ -4,10 +4,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentPropagation.DSM_CONCERN; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.URIUtils.urlFileName; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v1.sqs.MessageAttributeInjector.SETTER; import com.amazonaws.AmazonWebServiceRequest; @@ -21,11 +17,12 @@ import datadog.context.propagation.Propagator; import datadog.context.propagation.Propagators; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.ContextStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -95,11 +92,11 @@ private AgentSpan newSpan(AmazonWebServiceRequest request) { return span; } - private static LinkedHashMap getTags(String queueUrl) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(TOPIC_TAG, urlFileName(queueUrl)); - sortedTags.put(TYPE_TAG, "sqs"); - return sortedTags; + private static DataStreamsTags getTags(String queueUrl) { + return new DataStreamsTagsBuilder() + .withType("sqs") + .withDirection(DataStreamsTags.Direction.Outbound) + .withTopic(urlFileName(queueUrl)) + .build(); } } diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/TracingIterator.java b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/TracingIterator.java index 8eb154800c9..8cf74530733 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/TracingIterator.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/TracingIterator.java @@ -6,10 +6,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.closePrevious; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.URIUtils.urlFileName; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v1.sqs.MessageExtractAdapter.GETTER; import static datadog.trace.instrumentation.aws.v1.sqs.SqsDecorator.BROKER_DECORATE; import static datadog.trace.instrumentation.aws.v1.sqs.SqsDecorator.CONSUMER_DECORATE; @@ -20,11 +16,12 @@ import com.amazonaws.services.sqs.model.Message; import datadog.trace.api.Config; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.Iterator; -import java.util.LinkedHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -89,11 +86,13 @@ protected void startNewMessageSpan(Message message) { } AgentSpan span = startSpan(SQS_INBOUND_OPERATION, batchContext); - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(TOPIC_TAG, urlFileName(queueUrl)); - sortedTags.put(TYPE_TAG, "sqs"); - AgentTracer.get().getDataStreamsMonitoring().setCheckpoint(span, create(sortedTags, 0, 0)); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("sqs") + .withDirection(DataStreamsTags.Direction.Inbound) + .withTopic(urlFileName(queueUrl)) + .build(); + AgentTracer.get().getDataStreamsMonitoring().setCheckpoint(span, create(tags, 0, 0)); CONSUMER_DECORATE.afterStart(span); CONSUMER_DECORATE.onConsume(span, queueUrl); diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsInterceptor.java b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsInterceptor.java index bc5729e49c1..4ed8e6a81a8 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsInterceptor.java @@ -3,20 +3,17 @@ import static datadog.trace.api.datastreams.PathwayContext.DATADOG_KEY; import static datadog.trace.bootstrap.instrumentation.api.AgentPropagation.DSM_CONCERN; import static datadog.trace.bootstrap.instrumentation.api.URIUtils.urlFileName; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v2.sqs.MessageAttributeInjector.SETTER; import datadog.context.propagation.Propagator; import datadog.context.propagation.Propagators; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstanceStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -95,15 +92,15 @@ public SdkRequest modifyRequest( private datadog.context.Context getContext( ExecutionAttributes executionAttributes, String queueUrl) { AgentSpan span = executionAttributes.getAttribute(SPAN_ATTRIBUTE); - DataStreamsContext dsmContext = DataStreamsContext.fromTags(getTags(queueUrl)); - return span.with(dsmContext); - } - private LinkedHashMap getTags(String queueUrl) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(TOPIC_TAG, urlFileName(queueUrl)); - sortedTags.put(TYPE_TAG, "sqs"); - return sortedTags; + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Outbound) + .withTopic(urlFileName(queueUrl)) + .withType("sqs") + .build(); + + DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); + return span.with(dsmContext); } } diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java index 2213ac64d6e..a3aafedadc7 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java @@ -6,10 +6,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.closePrevious; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.URIUtils.urlFileName; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.aws.v2.sqs.MessageExtractAdapter.GETTER; import static datadog.trace.instrumentation.aws.v2.sqs.SqsDecorator.BROKER_DECORATE; import static datadog.trace.instrumentation.aws.v2.sqs.SqsDecorator.CONSUMER_DECORATE; @@ -19,11 +15,12 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS; import datadog.trace.api.Config; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.Iterator; -import java.util.LinkedHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.services.sqs.model.Message; @@ -91,11 +88,14 @@ protected void startNewMessageSpan(Message message) { } AgentSpan span = startSpan(SQS_INBOUND_OPERATION, batchContext); - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(TOPIC_TAG, urlFileName(queueUrl)); - sortedTags.put(TYPE_TAG, "sqs"); - AgentTracer.get().getDataStreamsMonitoring().setCheckpoint(span, create(sortedTags, 0, 0)); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("sqs") + .withDirection(DataStreamsTags.Direction.Inbound) + .withTopic(urlFileName(queueUrl)) + .build(); + + AgentTracer.get().getDataStreamsMonitoring().setCheckpoint(span, create(tags, 0, 0)); CONSUMER_DECORATE.afterStart(span); CONSUMER_DECORATE.onConsume(span, queueUrl, requestId); diff --git a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java index 3e61cd28753..49614e5b6cb 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java +++ b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java @@ -2,10 +2,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentPropagation.extractContextAndGetSpanContext; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.SUBSCRIPTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import com.google.protobuf.Timestamp; import com.google.pubsub.v1.PubsubMessage; @@ -14,6 +10,8 @@ import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; @@ -22,7 +20,6 @@ import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.bootstrap.instrumentation.decorator.MessagingClientDecorator; -import java.util.LinkedHashMap; import java.util.function.Function; import java.util.function.Supplier; import java.util.regex.Matcher; @@ -133,10 +130,12 @@ public AgentSpan onConsume(final PubsubMessage message, final String subscriptio extractContextAndGetSpanContext(message, TextMapExtractAdapter.GETTER); final AgentSpan span = startSpan(PUBSUB_CONSUME, spanContext); final CharSequence parsedSubscription = extractSubscription(subscription); - final LinkedHashMap sortedTags = new LinkedHashMap<>(3); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(SUBSCRIPTION_TAG, parsedSubscription.toString()); - sortedTags.put(TYPE_TAG, "google-pubsub"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("google-pubsub") + .withDirection(DataStreamsTags.Direction.Inbound) + .withTopic(parsedSubscription.toString()) + .build(); final Timestamp publishTime = message.getPublishTime(); // FIXME: use full nanosecond resolution when this method will accept nanos AgentTracer.get() @@ -144,7 +143,7 @@ public AgentSpan onConsume(final PubsubMessage message, final String subscriptio .setCheckpoint( span, DataStreamsContext.create( - sortedTags, + tags, publishTime.getSeconds() * 1_000 + publishTime.getNanos() / (int) 1e6, message.getSerializedSize())); afterStart(span); diff --git a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PublisherInstrumentation.java b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PublisherInstrumentation.java index 87f8413d80b..e1131e1a67a 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PublisherInstrumentation.java +++ b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PublisherInstrumentation.java @@ -5,10 +5,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.java.concurrent.ExcludeFilter.ExcludeType.RUNNABLE; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.googlepubsub.PubSubDecorator.PRODUCER_DECORATE; import static datadog.trace.instrumentation.googlepubsub.PubSubDecorator.PUBSUB_PRODUCE; import static datadog.trace.instrumentation.googlepubsub.TextMapInjectAdapter.SETTER; @@ -23,11 +19,12 @@ import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.agent.tooling.InstrumenterModule; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.java.concurrent.ExcludeFilter; import java.util.Collection; -import java.util.LinkedHashMap; import java.util.Map; import net.bytebuddy.asm.Advice; @@ -75,13 +72,15 @@ public static AgentScope before( PRODUCER_DECORATE.afterStart(span); PRODUCER_DECORATE.onProduce(span, topicName); - LinkedHashMap sortedTags = new LinkedHashMap<>(3); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(TOPIC_TAG, topicName.toString()); - sortedTags.put(TYPE_TAG, "google-pubsub"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("google-pubsub") + .withDirection(DataStreamsTags.Direction.Outbound) + .withTopic(topicName.toString()) + .build(); PubsubMessage.Builder builder = msg.toBuilder(); - DataStreamsContext dsmContext = DataStreamsContext.fromTags(sortedTags); + DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); defaultPropagator().inject(span.with(dsmContext), builder, SETTER); msg = builder.build(); return activateSpan(span); diff --git a/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/client/GrpcClientDecorator.java b/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/client/GrpcClientDecorator.java index 3e70978df16..f9846ea2321 100644 --- a/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/client/GrpcClientDecorator.java +++ b/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/client/GrpcClientDecorator.java @@ -3,9 +3,6 @@ import static datadog.context.propagation.Propagators.defaultPropagator; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import datadog.context.Context; import datadog.context.propagation.CarrierSetter; @@ -14,6 +11,8 @@ import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; @@ -23,7 +22,6 @@ import io.grpc.MethodDescriptor; import io.grpc.Status; import java.util.BitSet; -import java.util.LinkedHashMap; import java.util.Set; import java.util.function.Function; @@ -35,10 +33,11 @@ public class GrpcClientDecorator extends ClientDecorator { public static final CharSequence GRPC_MESSAGE = UTF8BytesString.create("grpc.message"); private static DataStreamsContext createDsmContext() { - LinkedHashMap result = new LinkedHashMap<>(); - result.put(DIRECTION_TAG, DIRECTION_OUT); - result.put(TYPE_TAG, "grpc"); - return DataStreamsContext.fromTags(result); + return DataStreamsContext.fromTags( + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Outbound) + .withType("grpc") + .build()); } public static final GrpcClientDecorator DECORATE = new GrpcClientDecorator(); diff --git a/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/server/GrpcServerDecorator.java b/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/server/GrpcServerDecorator.java index 905ba71ca40..094bee7e3e2 100644 --- a/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/server/GrpcServerDecorator.java +++ b/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/server/GrpcServerDecorator.java @@ -1,12 +1,10 @@ package datadog.trace.instrumentation.grpc.server; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; - import datadog.trace.api.Config; import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.ErrorPriorities; @@ -18,7 +16,6 @@ import io.grpc.StatusException; import io.grpc.StatusRuntimeException; import java.util.BitSet; -import java.util.LinkedHashMap; import java.util.function.Function; public class GrpcServerDecorator extends ServerDecorator { @@ -33,15 +30,14 @@ public class GrpcServerDecorator extends ServerDecorator { public static final CharSequence COMPONENT_NAME = UTF8BytesString.create("grpc-server"); public static final CharSequence GRPC_MESSAGE = UTF8BytesString.create("grpc.message"); - private static final LinkedHashMap createServerPathwaySortedTags() { - LinkedHashMap result = new LinkedHashMap<>(); - result.put(DIRECTION_TAG, DIRECTION_IN); - result.put(TYPE_TAG, "grpc"); - return result; + private static DataStreamsTags createServerPathwaySortedTags() { + return new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Inbound) + .withType("grpc") + .build(); } - public static final LinkedHashMap SERVER_PATHWAY_EDGE_TAGS = - createServerPathwaySortedTags(); + public static final DataStreamsTags SERVER_PATHWAY_EDGE_TAGS = createServerPathwaySortedTags(); public static final GrpcServerDecorator DECORATE = new GrpcServerDecorator(); private static final Function NORMALIZE = diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/ConsumerCoordinatorInstrumentation.java b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/ConsumerCoordinatorInstrumentation.java index 2479889d88c..8f09f5c240f 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/ConsumerCoordinatorInstrumentation.java +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/ConsumerCoordinatorInstrumentation.java @@ -2,20 +2,16 @@ import static datadog.trace.agent.tooling.bytebuddy.matcher.ClassLoaderMatchers.hasClassNamed; import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; -import static datadog.trace.core.datastreams.TagsProcessor.CONSUMER_GROUP_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.PARTITION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static net.bytebuddy.matcher.ElementMatchers.*; import com.google.auto.service.AutoService; import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.agent.tooling.InstrumenterModule; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.Map; import net.bytebuddy.asm.Advice; import net.bytebuddy.matcher.ElementMatcher; @@ -105,17 +101,16 @@ public static void trackCommitOffset( if (entry.getKey() == null || entry.getValue() == null) { continue; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(CONSUMER_GROUP_TAG, consumerGroup); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } - sortedTags.put(PARTITION_TAG, String.valueOf(entry.getKey().partition())); - sortedTags.put(TOPIC_TAG, entry.getKey().topic()); - sortedTags.put(TYPE_TAG, "kafka_commit"); - AgentTracer.get() - .getDataStreamsMonitoring() - .trackBacklog(sortedTags, entry.getValue().offset()); + + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withConsumerGroup(consumerGroup) + .withKafkaClusterId(clusterId) + .withPartition(String.valueOf(entry.getKey().partition())) + .withTopic(entry.getKey().topic()) + .withType("kafka_commit") + .build(); + AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, entry.getValue().offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerCallback.java b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerCallback.java index d1aa1cd9ec1..453e855b2b6 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerCallback.java +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerCallback.java @@ -1,16 +1,13 @@ package datadog.trace.instrumentation.kafka_clients; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.PARTITION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.kafka_clients.KafkaDecorator.PRODUCER_DECORATE; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; -import java.util.LinkedHashMap; import javax.annotation.Nullable; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.RecordMetadata; @@ -49,13 +46,15 @@ public void onCompletion(final RecordMetadata metadata, final Exception exceptio if (metadata == null) { return; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } - sortedTags.put(PARTITION_TAG, String.valueOf(metadata.partition())); - sortedTags.put(TOPIC_TAG, metadata.topic()); - sortedTags.put(TYPE_TAG, "kafka_produce"); - AgentTracer.get().getDataStreamsMonitoring().trackBacklog(sortedTags, metadata.offset()); + + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("kafka_produce") + .withKafkaClusterId(clusterId) + .withTopic(metadata.topic()) + .withPartition(String.valueOf(metadata.partition())) + .build(); + + AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, metadata.offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerInstrumentation.java b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerInstrumentation.java index a85ee9ce95d..0dfd2576c2f 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerInstrumentation.java +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerInstrumentation.java @@ -8,11 +8,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.kafka_clients.KafkaDecorator.KAFKA_PRODUCE; import static datadog.trace.instrumentation.kafka_clients.KafkaDecorator.PRODUCER_DECORATE; import static datadog.trace.instrumentation.kafka_clients.KafkaDecorator.TIME_IN_QUEUE_ENABLED; @@ -31,13 +26,14 @@ import datadog.trace.agent.tooling.InstrumenterModule; import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.datastreams.StatsPoint; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags; -import java.util.LinkedHashMap; import java.util.Map; import net.bytebuddy.asm.Advice; import net.bytebuddy.matcher.ElementMatcher; @@ -145,13 +141,13 @@ public static AgentScope onEnter( && !Config.get().isKafkaClientPropagationDisabledForTopic(record.topic())) { setter = TextMapInjectAdapter.SETTER; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } - sortedTags.put(TOPIC_TAG, record.topic()); - sortedTags.put(TYPE_TAG, "kafka"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("kafka") + .withDirection(DataStreamsTags.Direction.Outbound) + .withKafkaClusterId(clusterId) + .withTopic(record.topic()) + .build(); try { defaultPropagator().inject(span, record.headers(), setter); if (STREAMING_CONTEXT.isDisabledForTopic(record.topic()) @@ -160,7 +156,7 @@ public static AgentScope onEnter( // message size. // The stats are saved in the pathway context and sent in PayloadSizeAdvice. Propagator dsmPropagator = Propagators.forConcern(DSM_CONCERN); - DataStreamsContext dsmContext = fromTagsWithoutCheckpoint(sortedTags); + DataStreamsContext dsmContext = fromTagsWithoutCheckpoint(tags); dsmPropagator.inject(span.with(dsmContext), record.headers(), setter); AvroSchemaExtractor.tryExtractProducer(record, span); } @@ -179,7 +175,7 @@ record = if (STREAMING_CONTEXT.isDisabledForTopic(record.topic()) || STREAMING_CONTEXT.isSinkTopic(record.topic())) { Propagator dsmPropagator = Propagators.forConcern(DSM_CONCERN); - DataStreamsContext dsmContext = fromTagsWithoutCheckpoint(sortedTags); + DataStreamsContext dsmContext = fromTagsWithoutCheckpoint(tags); dsmPropagator.inject(span.with(dsmContext), record.headers(), setter); AvroSchemaExtractor.tryExtractProducer(record, span); } @@ -213,7 +209,7 @@ public static void onEnter(@Advice.Argument(value = 0) int estimatedPayloadSize) // create new stats including the payload size StatsPoint updated = new StatsPoint( - saved.getEdgeTags(), + saved.getTags(), saved.getHash(), saved.getParentHash(), saved.getAggregationHash(), diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/TracingIterator.java b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/TracingIterator.java index 30e40b40f79..cd33f23ab38 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/TracingIterator.java +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/TracingIterator.java @@ -7,12 +7,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.closePrevious; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.GROUP_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.kafka_clients.KafkaDecorator.BROKER_DECORATE; import static datadog.trace.instrumentation.kafka_clients.KafkaDecorator.KAFKA_DELIVER; import static datadog.trace.instrumentation.kafka_clients.KafkaDecorator.TIME_IN_QUEUE_ENABLED; @@ -26,12 +20,13 @@ import datadog.context.propagation.Propagators; import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags; import java.util.Iterator; -import java.util.LinkedHashMap; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,21 +96,21 @@ protected void startNewRecordSpan(ConsumerRecord val) { // spans are written out together by TraceStructureWriter when running in strict mode } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(GROUP_TAG, group); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } - sortedTags.put(TOPIC_TAG, val.topic()); - sortedTags.put(TYPE_TAG, "kafka"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Inbound) + .withGroup(group) + .withKafkaClusterId(clusterId) + .withTopic(val.topic()) + .withType("kafka") + .build(); final long payloadSize = traceConfig().isDataStreamsEnabled() ? computePayloadSizeBytes(val) : 0; if (STREAMING_CONTEXT.isDisabledForTopic(val.topic())) { AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, val.timestamp(), payloadSize)); + .setCheckpoint(span, create(tags, val.timestamp(), payloadSize)); } else { // when we're in a streaming context we want to consume only from source topics if (STREAMING_CONTEXT.isSourceTopic(val.topic())) { @@ -124,7 +119,7 @@ protected void startNewRecordSpan(ConsumerRecord val) { // some other instance of the application, breaking the context propagation // for DSM users Propagator dsmPropagator = Propagators.forConcern(DSM_CONCERN); - DataStreamsContext dsmContext = create(sortedTags, val.timestamp(), payloadSize); + DataStreamsContext dsmContext = create(tags, val.timestamp(), payloadSize); dsmPropagator.inject(span.with(dsmContext), val.headers(), SETTER); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ConsumerCoordinatorAdvice.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ConsumerCoordinatorAdvice.java index ecec8bd1e77..304167fb093 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ConsumerCoordinatorAdvice.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ConsumerCoordinatorAdvice.java @@ -1,14 +1,9 @@ package datadog.trace.instrumentation.kafka_clients38; -import static datadog.trace.core.datastreams.TagsProcessor.CONSUMER_GROUP_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.PARTITION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; - +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; -import java.util.LinkedHashMap; import java.util.Map; import net.bytebuddy.asm.Advice; import org.apache.kafka.clients.Metadata; @@ -52,17 +47,15 @@ public static void trackCommitOffset( if (entry.getKey() == null || entry.getValue() == null) { continue; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(CONSUMER_GROUP_TAG, consumerGroup); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } - sortedTags.put(PARTITION_TAG, String.valueOf(entry.getKey().partition())); - sortedTags.put(TOPIC_TAG, entry.getKey().topic()); - sortedTags.put(TYPE_TAG, "kafka_commit"); - AgentTracer.get() - .getDataStreamsMonitoring() - .trackBacklog(sortedTags, entry.getValue().offset()); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withConsumerGroup(consumerGroup) + .withKafkaClusterId(clusterId) + .withPartition(String.valueOf(entry.getKey().partition())) + .withTopic(entry.getKey().topic()) + .withType("kafka_commit") + .build(); + AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, entry.getValue().offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/DDOffsetCommitCallback.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/DDOffsetCommitCallback.java index 65b2b94b26b..727a7e4b197 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/DDOffsetCommitCallback.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/DDOffsetCommitCallback.java @@ -1,14 +1,9 @@ package datadog.trace.instrumentation.kafka_clients38; -import static datadog.trace.core.datastreams.TagsProcessor.CONSUMER_GROUP_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.PARTITION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; - +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; -import java.util.LinkedHashMap; import java.util.Map; import org.apache.kafka.clients.Metadata; import org.apache.kafka.clients.consumer.OffsetAndMetadata; @@ -34,27 +29,28 @@ public void onComplete(Map map, Exception e) if (entry.getKey() == null || entry.getValue() == null) { continue; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); + String consumerGroup = null; + String clusterId = null; + if (kafkaConsumerInfo != null) { - String consumerGroup = kafkaConsumerInfo.getConsumerGroup().get(); + consumerGroup = kafkaConsumerInfo.getConsumerGroup().get(); Metadata consumerMetadata = kafkaConsumerInfo.getmetadata().get(); - String clusterId = null; if (consumerMetadata != null) { clusterId = InstrumentationContext.get(Metadata.class, String.class).get(consumerMetadata); } - sortedTags.put(CONSUMER_GROUP_TAG, consumerGroup); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } } - sortedTags.put(PARTITION_TAG, String.valueOf(entry.getKey().partition())); - sortedTags.put(TOPIC_TAG, entry.getKey().topic()); - sortedTags.put(TYPE_TAG, "kafka_commit"); - AgentTracer.get() - .getDataStreamsMonitoring() - .trackBacklog(sortedTags, entry.getValue().offset()); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withConsumerGroup(consumerGroup) + .withKafkaClusterId(clusterId) + .withPartition(String.valueOf(entry.getKey().partition())) + .withTopic(entry.getKey().topic()) + .withType("kafka_commit") + .build(); + + AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, entry.getValue().offset()); } } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/KafkaProducerCallback.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/KafkaProducerCallback.java index c6252206ab2..de57044962b 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/KafkaProducerCallback.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/KafkaProducerCallback.java @@ -1,16 +1,13 @@ package datadog.trace.instrumentation.kafka_clients38; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.PARTITION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.kafka_clients38.KafkaDecorator.PRODUCER_DECORATE; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; -import java.util.LinkedHashMap; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.RecordMetadata; @@ -48,13 +45,13 @@ public void onCompletion(final RecordMetadata metadata, final Exception exceptio if (metadata == null) { return; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } - sortedTags.put(PARTITION_TAG, String.valueOf(metadata.partition())); - sortedTags.put(TOPIC_TAG, metadata.topic()); - sortedTags.put(TYPE_TAG, "kafka_produce"); - AgentTracer.get().getDataStreamsMonitoring().trackBacklog(sortedTags, metadata.offset()); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withKafkaClusterId(clusterId) + .withPartition(String.valueOf(metadata.partition())) + .withTopic(metadata.topic()) + .withType("kafka_produce") + .build(); + AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, metadata.offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/PayloadSizeAdvice.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/PayloadSizeAdvice.java index 8d0a8b6a7b0..7db4e95e711 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/PayloadSizeAdvice.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/PayloadSizeAdvice.java @@ -20,7 +20,7 @@ public static void onEnter(@Advice.Argument(value = 0) int estimatedPayloadSize) // create new stats including the payload size StatsPoint updated = new StatsPoint( - saved.getEdgeTags(), + saved.getTags(), saved.getHash(), saved.getParentHash(), saved.getAggregationHash(), diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ProducerAdvice.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ProducerAdvice.java index 0756f585b52..1afd56fcecc 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ProducerAdvice.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ProducerAdvice.java @@ -6,11 +6,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.kafka_clients38.KafkaDecorator.KAFKA_PRODUCE; import static datadog.trace.instrumentation.kafka_clients38.KafkaDecorator.PRODUCER_DECORATE; import static datadog.trace.instrumentation.kafka_clients38.KafkaDecorator.TIME_IN_QUEUE_ENABLED; @@ -20,11 +15,12 @@ import datadog.context.propagation.Propagators; import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags; -import java.util.LinkedHashMap; import net.bytebuddy.asm.Advice; import org.apache.kafka.clients.Metadata; import org.apache.kafka.clients.producer.Callback; @@ -67,13 +63,13 @@ public static AgentScope onEnter( && !Config.get().isKafkaClientPropagationDisabledForTopic(record.topic())) { setter = TextMapInjectAdapter.SETTER; } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } - sortedTags.put(TOPIC_TAG, record.topic()); - sortedTags.put(TYPE_TAG, "kafka"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("kafka") + .withDirection(DataStreamsTags.Direction.Outbound) + .withKafkaClusterId(clusterId) + .withTopic(record.topic()) + .build(); try { defaultPropagator().inject(span, record.headers(), setter); if (STREAMING_CONTEXT.isDisabledForTopic(record.topic()) @@ -82,7 +78,7 @@ public static AgentScope onEnter( // message size. // The stats are saved in the pathway context and sent in PayloadSizeAdvice. Propagator dsmPropagator = Propagators.forConcern(DSM_CONCERN); - DataStreamsContext dsmContext = fromTagsWithoutCheckpoint(sortedTags); + DataStreamsContext dsmContext = fromTagsWithoutCheckpoint(tags); dsmPropagator.inject(span.with(dsmContext), record.headers(), setter); AvroSchemaExtractor.tryExtractProducer(record, span); } @@ -101,7 +97,7 @@ record = if (STREAMING_CONTEXT.isDisabledForTopic(record.topic()) || STREAMING_CONTEXT.isSinkTopic(record.topic())) { Propagator dsmPropagator = Propagators.forConcern(DSM_CONCERN); - DataStreamsContext dsmContext = fromTagsWithoutCheckpoint(sortedTags); + DataStreamsContext dsmContext = fromTagsWithoutCheckpoint(tags); dsmPropagator.inject(span.with(dsmContext), record.headers(), setter); AvroSchemaExtractor.tryExtractProducer(record, span); } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/TracingIterator.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/TracingIterator.java index b43534af550..724adbd76a5 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/TracingIterator.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/TracingIterator.java @@ -7,12 +7,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.closePrevious; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.GROUP_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.KAFKA_CLUSTER_ID_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.kafka_clients38.TextMapExtractAdapter.GETTER; import static datadog.trace.instrumentation.kafka_clients38.TextMapInjectAdapter.SETTER; import static java.util.concurrent.TimeUnit.MILLISECONDS; @@ -21,6 +15,8 @@ import datadog.context.propagation.Propagators; import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -28,7 +24,6 @@ import datadog.trace.instrumentation.kafka_common.StreamingContext; import datadog.trace.instrumentation.kafka_common.Utils; import java.util.Iterator; -import java.util.LinkedHashMap; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,21 +96,21 @@ protected void startNewRecordSpan(ConsumerRecord val) { // spans are written out together by TraceStructureWriter when running in strict mode } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(GROUP_TAG, group); - if (clusterId != null) { - sortedTags.put(KAFKA_CLUSTER_ID_TAG, clusterId); - } - sortedTags.put(TOPIC_TAG, val.topic()); - sortedTags.put(TYPE_TAG, "kafka"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Inbound) + .withGroup(group) + .withKafkaClusterId(clusterId) + .withTopic(val.topic()) + .withType("kafka") + .build(); final long payloadSize = traceConfig().isDataStreamsEnabled() ? Utils.computePayloadSizeBytes(val) : 0; if (StreamingContext.STREAMING_CONTEXT.isDisabledForTopic(val.topic())) { AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, val.timestamp(), payloadSize)); + .setCheckpoint(span, create(tags, val.timestamp(), payloadSize)); } else { // when we're in a streaming context we want to consume only from source topics if (StreamingContext.STREAMING_CONTEXT.isSourceTopic(val.topic())) { @@ -124,7 +119,7 @@ protected void startNewRecordSpan(ConsumerRecord val) { // some other instance of the application, breaking the context propagation // for DSM users Propagator dsmPropagator = Propagators.forConcern(DSM_CONCERN); - DataStreamsContext dsmContext = create(sortedTags, val.timestamp(), payloadSize); + DataStreamsContext dsmContext = create(tags, val.timestamp(), payloadSize); dsmPropagator.inject(span.with(dsmContext), val.headers(), SETTER); } } diff --git a/dd-java-agent/instrumentation/kafka-streams-0.11/src/main/java/datadog/trace/instrumentation/kafka_streams/KafkaStreamTaskInstrumentation.java b/dd-java-agent/instrumentation/kafka-streams-0.11/src/main/java/datadog/trace/instrumentation/kafka_streams/KafkaStreamTaskInstrumentation.java index f477f40a14b..8674622182c 100644 --- a/dd-java-agent/instrumentation/kafka-streams-0.11/src/main/java/datadog/trace/instrumentation/kafka_streams/KafkaStreamTaskInstrumentation.java +++ b/dd-java-agent/instrumentation/kafka-streams-0.11/src/main/java/datadog/trace/instrumentation/kafka_streams/KafkaStreamTaskInstrumentation.java @@ -7,11 +7,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.GROUP_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.kafka_common.StreamingContext.STREAMING_CONTEXT; import static datadog.trace.instrumentation.kafka_common.Utils.computePayloadSizeBytes; import static datadog.trace.instrumentation.kafka_streams.KafkaStreamsDecorator.BROKER_DECORATE; @@ -38,13 +33,14 @@ import datadog.trace.agent.tooling.InstrumenterModule; import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import datadog.trace.instrumentation.kafka_clients.TracingIterableDelegator; -import java.util.LinkedHashMap; import java.util.Map; import net.bytebuddy.asm.Advice; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -250,28 +246,29 @@ public static void start( // The queueSpan will be finished after inner span has been activated to ensure that // spans are written out together by TraceStructureWriter when running in strict mode } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); + + String applicationId = null; if (streamTaskContext != null) { - String applicationId = streamTaskContext.getApplicationId(); - if (applicationId != null) { - // Kafka Streams uses the application ID as the consumer group.id. - sortedTags.put(GROUP_TAG, applicationId); - } + applicationId = streamTaskContext.getApplicationId(); } - sortedTags.put(TOPIC_TAG, record.topic()); - sortedTags.put(TYPE_TAG, "kafka"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("kafka") + .withDirection(DataStreamsTags.Direction.Inbound) + .withGroup(applicationId) + .withTopic(record.topic()) + .build(); final long payloadSize = traceConfig().isDataStreamsEnabled() ? computePayloadSizeBytes(record.value) : 0; if (STREAMING_CONTEXT.isDisabledForTopic(record.topic())) { AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, record.timestamp, payloadSize)); + .setCheckpoint(span, create(tags, record.timestamp, payloadSize)); } else { if (STREAMING_CONTEXT.isSourceTopic(record.topic())) { Propagator dsmPropagator = Propagators.forConcern(DSM_CONCERN); - DataStreamsContext dsmContext = create(sortedTags, record.timestamp, payloadSize); + DataStreamsContext dsmContext = create(tags, record.timestamp, payloadSize); dsmPropagator.inject(span.with(dsmContext), record, SR_SETTER); } } @@ -327,17 +324,17 @@ public static void start( // spans are written out together by TraceStructureWriter when running in strict mode } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); + String applicationId = null; if (streamTaskContext != null) { - String applicationId = streamTaskContext.getApplicationId(); - if (applicationId != null) { - // Kafka Streams uses the application ID as the consumer group.id. - sortedTags.put(GROUP_TAG, applicationId); - } + applicationId = streamTaskContext.getApplicationId(); } - sortedTags.put(TOPIC_TAG, record.topic()); - sortedTags.put(TYPE_TAG, "kafka"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("kafka") + .withDirection(DataStreamsTags.Direction.Inbound) + .withGroup(applicationId) + .withTopic(record.topic()) + .build(); long payloadSize = 0; // we have to go through Object to get the RecordMetadata here because the class of `record` @@ -350,11 +347,11 @@ public static void start( if (STREAMING_CONTEXT.isDisabledForTopic(record.topic())) { AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, record.timestamp(), payloadSize)); + .setCheckpoint(span, create(tags, record.timestamp(), payloadSize)); } else { if (STREAMING_CONTEXT.isSourceTopic(record.topic())) { Propagator dsmPropagator = Propagators.forConcern(DSM_CONCERN); - DataStreamsContext dsmContext = create(sortedTags, record.timestamp(), payloadSize); + DataStreamsContext dsmContext = create(tags, record.timestamp(), payloadSize); dsmPropagator.inject(span.with(dsmContext), record, PR_SETTER); } } diff --git a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitChannelInstrumentation.java b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitChannelInstrumentation.java index 1c762039653..166b9c98a7e 100644 --- a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitChannelInstrumentation.java +++ b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitChannelInstrumentation.java @@ -9,11 +9,6 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.noopSpan; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_OUT; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.EXCHANGE_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.HAS_ROUTING_KEY_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import static datadog.trace.instrumentation.rabbitmq.amqp.RabbitDecorator.CLIENT_DECORATE; import static datadog.trace.instrumentation.rabbitmq.amqp.RabbitDecorator.CONSUMER_DECORATE; import static datadog.trace.instrumentation.rabbitmq.amqp.RabbitDecorator.OPERATION_AMQP_COMMAND; @@ -41,12 +36,13 @@ import datadog.trace.agent.tooling.InstrumenterModule; import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.CallDepthThreadLocalMap; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import java.io.IOException; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.Map; import net.bytebuddy.asm.Advice; import net.bytebuddy.description.type.TypeDescription; @@ -190,13 +186,15 @@ public static AgentScope setResourceNameAddHeaders( if (TIME_IN_QUEUE_ENABLED) { RabbitDecorator.injectTimeInQueueStart(headers); } - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_OUT); - sortedTags.put(EXCHANGE_TAG, exchange); - sortedTags.put( - HAS_ROUTING_KEY_TAG, routingKey == null || routingKey.isEmpty() ? "false" : "true"); - sortedTags.put(TYPE_TAG, "rabbitmq"); - DataStreamsContext dsmContext = DataStreamsContext.fromTags(sortedTags); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Outbound) + .withExchange(exchange) + .withHasRoutingKey(routingKey != null && !routingKey.isEmpty()) + .withType("rabbitmq") + .build(); + + DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); defaultPropagator().inject(span.with(dsmContext), headers, SETTER); props = new AMQP.BasicProperties( diff --git a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitDecorator.java b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitDecorator.java index c3d1797c4fc..9f4f227aa13 100644 --- a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitDecorator.java +++ b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitDecorator.java @@ -9,16 +9,14 @@ import static datadog.trace.bootstrap.instrumentation.api.InstrumentationTags.AMQP_QUEUE; import static datadog.trace.bootstrap.instrumentation.api.InstrumentationTags.AMQP_ROUTING_KEY; import static datadog.trace.bootstrap.instrumentation.api.InstrumentationTags.RECORD_QUEUE_TIME_MS; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_IN; -import static datadog.trace.core.datastreams.TagsProcessor.DIRECTION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Command; import com.rabbitmq.client.Consumer; import com.rabbitmq.client.Envelope; import datadog.trace.api.Config; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -29,7 +27,6 @@ import datadog.trace.bootstrap.instrumentation.api.Tags; import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; import datadog.trace.bootstrap.instrumentation.decorator.MessagingClientDecorator; -import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; @@ -250,13 +247,16 @@ public static AgentScope startReceivingSpan( } if (null != headers) { - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(DIRECTION_TAG, DIRECTION_IN); - sortedTags.put(TOPIC_TAG, queue); - sortedTags.put(TYPE_TAG, "rabbitmq"); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withDirection(DataStreamsTags.Direction.Inbound) + .withTopic(queue) + .withType("rabbitmq") + .build(); + ; AgentTracer.get() .getDataStreamsMonitoring() - .setCheckpoint(span, create(sortedTags, produceMillis, 0)); + .setCheckpoint(span, create(tags, produceMillis, 0)); } CONSUMER_DECORATE.afterStart(span); diff --git a/dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java b/dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java index a4a340792e7..19730951a4a 100644 --- a/dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java +++ b/dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java @@ -1,16 +1,14 @@ package datadog.trace.instrumentation.spark; import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.traceConfig; -import static datadog.trace.core.datastreams.TagsProcessor.CONSUMER_GROUP_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.PARTITION_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TOPIC_TAG; -import static datadog.trace.core.datastreams.TagsProcessor.TYPE_TAG; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import datadog.trace.api.Config; import datadog.trace.api.DDTags; import datadog.trace.api.DDTraceId; +import datadog.trace.api.datastreams.DataStreamsTags; +import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.sampling.PrioritySampling; import datadog.trace.api.sampling.SamplingMechanism; import datadog.trace.bootstrap.InstanceStore; @@ -32,7 +30,6 @@ import java.util.Collection; import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -1317,20 +1314,18 @@ private static void reportKafkaOffsets( JsonNode topicNode = jsonNode.get(topic); // iterate thought reported partitions Iterator allPartitions = topicNode.fieldNames(); - // dsm tags - LinkedHashMap sortedTags = new LinkedHashMap<>(); - sortedTags.put(CONSUMER_GROUP_TAG, appName); - // will be overwritten - sortedTags.put(PARTITION_TAG, ""); - sortedTags.put(TOPIC_TAG, topic); - sortedTags.put(TYPE_TAG, "kafka_commit"); - while (allPartitions.hasNext()) { String partition = allPartitions.next(); - sortedTags.put(PARTITION_TAG, partition); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType("kafka_commit") + .withConsumerGroup(appName) + .withTopic(topic) + .withPartition(partition) + .build(); AgentTracer.get() .getDataStreamsMonitoring() - .trackBacklog(sortedTags, topicNode.get(partition).asLong()); + .trackBacklog(tags, topicNode.get(partition).asLong()); } } } catch (Throwable e) { diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java index 4bf30213a2b..be1502833ee 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java @@ -235,9 +235,16 @@ public void setConsumeCheckpoint(String type, String source, DataStreamsContextC return; } mergePathwayContextIntoSpan(span, carrier); - setCheckpoint( - span, - fromTags(DataStreamsTags.Create(type, DataStreamsTags.Direction.Inbound, source, true))); + + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType(type) + .withDirection(DataStreamsTags.Direction.Inbound) + .withTopic(source) + .withManual(true) + .build(); + + setCheckpoint(span, fromTags(tags)); } public void setProduceCheckpoint( @@ -253,10 +260,15 @@ public void setProduceCheckpoint( return; } - DataStreamsContext dsmContext = - fromTags( - DataStreamsTags.Create( - type, DataStreamsTags.Direction.Outbound, target, manualCheckpoint)); + DataStreamsTags tags = + new DataStreamsTagsBuilder() + .withType(type) + .withDirection(DataStreamsTags.Direction.Outbound) + .withTopic(target) + .withManual(true) + .build(); + + DataStreamsContext dsmContext = fromTags(tags); this.propagator.inject( span.with(dsmContext), carrier, DataStreamsContextCarrierAdapter.INSTANCE); } diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java index 4809d985cde..53ea6491d5e 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java @@ -17,9 +17,15 @@ public class DataStreamsContext implements ImplicitContextKeyed { static { CLIENT_PATHWAY_EDGE_TAGS = - DataStreamsTags.Create("http", DataStreamsTags.Direction.Outbound, null); + new DataStreamsTagsBuilder() + .withType("http") + .withDirection(DataStreamsTags.Direction.Outbound) + .build(); SERVER_PATHWAY_EDGE_TAGS = - DataStreamsTags.Create("http", DataStreamsTags.Direction.Inbound, null); + new DataStreamsTagsBuilder() + .withType("http") + .withDirection(DataStreamsTags.Direction.Inbound) + .build(); } public static DataStreamsContext fromContext(Context context) { diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index c4eb78cb497..5b94b5be12b 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -2,10 +2,9 @@ import datadog.trace.util.FNV64Hash; -import javax.xml.crypto.Data; - public class DataStreamsTags { public enum Direction { + Unknown, Inbound, Outbound, } @@ -17,25 +16,9 @@ public enum TagTraverseMode { All } - public static DataStreamsTags EMPTY = DataStreamsTags.Create(null, null, null); - - // hash tags - private final String bus; - private final Direction direction; - private final String exchange; - private final String topic; - private final String type; - private final String subscription; - // additional grouping tags - private final String datasetName; - private final String datasetNamespace; - private final Boolean isManual; - // informational tags - private final String group; - private final Boolean hasRoutingKey; - private final String kafkaClusterId; - private final String partition; + public static DataStreamsTags EMPTY = new DataStreamsTagsBuilder().build(); + private final DataStreamsTagsBuilder builder; private long hash; private long aggregationHash; private long completeHash; @@ -58,16 +41,6 @@ public enum TagTraverseMode { public static final String HAS_ROUTING_KEY_TAG = "has_routing_key"; public static final String KAFKA_CLUSTER_ID_TAG = "kafka_cluster_id"; - public static DataStreamsTags Create(String type, Direction direction, String topic) { - return DataStreamsTags.Create(type, direction, topic, false); - } - - public static DataStreamsTags Create( - String type, Direction direction, String topic, Boolean isManual) { - return new DataStreamsTags( - type, direction, topic, isManual, null, null, null, null, null, null, false, null, null); - } - public static byte[] longToBytes(long val) { return new byte[] { (byte) val, @@ -81,34 +54,8 @@ public static byte[] longToBytes(long val) { }; } - private DataStreamsTags( - String type, - Direction direction, - String topic, - Boolean isManual, - String bus, - String exchange, - String subscription, - String datasetName, - String datasetNamespace, - String group, - Boolean hasRoutingKey, - String kafkaClusterId, - String partition) { - this.bus = bus; - this.direction = direction; - this.exchange = exchange; - this.topic = topic; - this.type = type; - this.subscription = subscription; - this.isManual = isManual; - this.datasetName = datasetName; - this.datasetNamespace = datasetNamespace; - this.group = group; - this.hasRoutingKey = hasRoutingKey; - this.kafkaClusterId = kafkaClusterId; - this.partition = partition; - + public DataStreamsTags(DataStreamsTagsBuilder builder) { + this.builder = builder; this.size = this.forEachTag( (name, value) -> { @@ -142,61 +89,71 @@ public int forEachTag(DataStreamsTagsProcessor processor, TagTraverseMode mode) int count = 0; if (mode == TagTraverseMode.HashOnly || mode == TagTraverseMode.All) { - if (this.bus != null) { - processor.process(BUS_TAG, this.bus); + if (this.builder.bus != null) { + processor.process(BUS_TAG, this.builder.bus); count += 1; } - count += 1; - if (this.direction == Direction.Inbound) { + if (this.builder.direction == Direction.Inbound) { + count += 1; processor.process(DIRECTION_TAG, DIRECTION_IN); - } else { + } else if (this.builder.direction == Direction.Outbound) { + count += 1; processor.process(DIRECTION_TAG, DIRECTION_OUT); } - if (this.exchange != null) { + if (this.builder.exchange != null) { count += 1; - processor.process(EXCHANGE_TAG, this.exchange); + processor.process(EXCHANGE_TAG, this.builder.exchange); } // topic and type are always required, no need to check for null count += 2; - processor.process(TOPIC_TAG, this.topic); - processor.process(TYPE_TAG, this.type); + processor.process(TOPIC_TAG, this.builder.topic); + processor.process(TYPE_TAG, this.builder.type); - if (this.subscription != null) { + if (this.builder.subscription != null) { count += 1; - processor.process(SUBSCRIPTION_TAG, this.subscription); + processor.process(SUBSCRIPTION_TAG, this.builder.subscription); } } if (mode == TagTraverseMode.GroupOnly || mode == TagTraverseMode.All) { count += 1; - processor.process(MANUAL_TAG, this.isManual.toString()); + processor.process(MANUAL_TAG, this.builder.isManual.toString()); - if (this.datasetName != null) { + if (this.builder.datasetName != null) { count += 1; - processor.process(DATASET_NAME_TAG, this.datasetName); + processor.process(DATASET_NAME_TAG, this.builder.datasetName); } - if (this.datasetNamespace != null) { + if (this.builder.datasetNamespace != null) { count += 1; - processor.process(DATASET_NAMESPACE_TAG, this.datasetNamespace); + processor.process(DATASET_NAMESPACE_TAG, this.builder.datasetNamespace); } } if (mode == TagTraverseMode.ValueOnly || mode == TagTraverseMode.All) { - if (this.hasRoutingKey != null) { + count += 1; + processor.process(HAS_ROUTING_KEY_TAG, this.builder.hasRoutingKey.toString()); + + if (this.builder.consumerGroup != null) { + count += 1; + processor.process(CONSUMER_GROUP_TAG, this.builder.consumerGroup); + } + + if (this.builder.group != null) { count += 1; - processor.process(HAS_ROUTING_KEY_TAG, this.hasRoutingKey.toString()); + processor.process(GROUP_TAG, this.builder.group); } - if (this.kafkaClusterId != null) { + + if (this.builder.kafkaClusterId != null) { count += 1; - processor.process(KAFKA_CLUSTER_ID_TAG, this.kafkaClusterId); + processor.process(KAFKA_CLUSTER_ID_TAG, this.builder.kafkaClusterId); } - if (this.partition != null) { + if (this.builder.partition != null) { count += 1; - processor.process(PARTITION_TAG, this.partition); + processor.process(PARTITION_TAG, this.builder.partition); } } @@ -204,55 +161,55 @@ public int forEachTag(DataStreamsTagsProcessor processor, TagTraverseMode mode) } public Direction getDirection() { - return direction; + return this.builder.direction; } public String getTopic() { - return topic; + return this.builder.topic; } public String getType() { - return type; + return this.builder.type; } public Boolean isManual() { - return isManual; + return this.builder.isManual; } public String getBus() { - return bus; + return this.builder.bus; } public String getExchange() { - return exchange; + return this.builder.exchange; } public String getSubscription() { - return subscription; + return this.builder.subscription; } public String getDatasetName() { - return datasetName; + return this.builder.datasetName; } public String getDatasetNamespace() { - return datasetNamespace; + return this.builder.datasetNamespace; } public String getGroup() { - return group; + return this.builder.group; } public String getPartition() { - return partition; + return this.builder.partition; } public String getKafkaClusterId() { - return kafkaClusterId; + return this.builder.kafkaClusterId; } public boolean getHasRoutingKey() { - return hasRoutingKey; + return this.builder.hasRoutingKey; } public long getHash() { @@ -263,10 +220,6 @@ public long getAggregationHash() { return aggregationHash; } - public long getCompleteHash() { - return completeHash; - } - public int getSize() { return size; } @@ -284,42 +237,44 @@ public boolean equals(Object o) { public String toString() { return "DataStreamsTags{" + "bus='" - + bus + + this.builder.bus + "," + ", direction=" - + direction + + this.builder.direction + "," + ", exchange='" - + exchange + + this.builder.exchange + "," + ", topic='" - + topic + + this.builder.topic + "," + ", type='" - + type + + this.builder.type + "," + ", subscription='" - + subscription + + this.builder.subscription + "," + ", datasetName='" - + datasetName + + this.builder.datasetName + "," + ", datasetNamespace='" - + datasetNamespace + + this.builder.datasetNamespace + "," + ", isManual=" - + isManual + + this.builder.isManual + ", group='" - + group + + this.builder.group + + ", consumerGroup='" + + this.builder.consumerGroup + "," + ", hasRoutingKey='" - + hasRoutingKey + + this.builder.hasRoutingKey + "," + ", kafkaClusterId='" - + kafkaClusterId + + this.builder.kafkaClusterId + "," + ", partition='" - + partition + + this.builder.partition + "," + ", hash=" + hash diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsBuilder.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsBuilder.java new file mode 100644 index 00000000000..8732f928d87 --- /dev/null +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsBuilder.java @@ -0,0 +1,95 @@ +package datadog.trace.api.datastreams; + +public class DataStreamsTagsBuilder { + // hash tags + protected String bus; + protected DataStreamsTags.Direction direction; + protected String exchange; + protected String topic; + protected String type; + protected String subscription; + // additional grouping tags + protected String datasetName; + protected String datasetNamespace; + protected Boolean isManual; + // informational tags + protected String group; + protected String consumerGroup; + protected Boolean hasRoutingKey; + protected String kafkaClusterId; + protected String partition; + + public DataStreamsTagsBuilder withBus(String bus) { + this.bus = bus; + return this; + } + + public DataStreamsTagsBuilder withDirection(DataStreamsTags.Direction direction) { + this.direction = direction; + return this; + } + + public DataStreamsTagsBuilder withExchange(String exchange) { + this.exchange = exchange; + return this; + } + + public DataStreamsTagsBuilder withTopic(String topic) { + this.topic = topic; + return this; + } + + public DataStreamsTagsBuilder withType(String type) { + this.type = type; + return this; + } + + public DataStreamsTagsBuilder withSubscription(String subscription) { + this.subscription = subscription; + return this; + } + + public DataStreamsTagsBuilder withDatasetName(String datasetName) { + this.datasetName = datasetName; + return this; + } + + public DataStreamsTagsBuilder withDatasetNamespace(String datasetNamespace) { + this.datasetNamespace = datasetNamespace; + return this; + } + + public DataStreamsTagsBuilder withManual(Boolean isManual) { + this.isManual = isManual; + return this; + } + + public DataStreamsTagsBuilder withGroup(String group) { + this.group = group; + return this; + } + + public DataStreamsTagsBuilder withConsumerGroup(String consumerGroup) { + this.consumerGroup = consumerGroup; + return this; + } + + public DataStreamsTagsBuilder withHasRoutingKey(Boolean hasRoutingKey) { + this.hasRoutingKey = hasRoutingKey; + return this; + } + + public DataStreamsTagsBuilder withKafkaClusterId(String kafkaClusterId) { + this.kafkaClusterId = kafkaClusterId; + return this; + } + + public DataStreamsTagsBuilder withPartition(String partition) { + this.partition = partition; + return this; + } + + public DataStreamsTags build() { + return new DataStreamsTags(this); + } +} From 018b9d5203a9eec15dba256ebb81ed6730b0c5a5 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Fri, 11 Jul 2025 16:17:43 -0500 Subject: [PATCH 03/29] Removed tests which no longer needed --- .../datastreams/DataSetHashBuilderTest.groovy | 22 ------------------- 1 file changed, 22 deletions(-) delete mode 100644 dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataSetHashBuilderTest.groovy diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataSetHashBuilderTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataSetHashBuilderTest.groovy deleted file mode 100644 index 8ae9cee4405..00000000000 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataSetHashBuilderTest.groovy +++ /dev/null @@ -1,22 +0,0 @@ -package datadog.trace.core.datastreams - -import datadog.trace.core.test.DDCoreSpecification - -class DataSetHashBuilderTest extends DDCoreSpecification { - - def "Dataset hash generation"() { - given: - var tag = "ds.namespace=s3://my_bucket" - var builderOne = new DefaultPathwayContext.DataSetHashBuilder() - builderOne.addValue(tag) - - var builderTwo = new DefaultPathwayContext.DataSetHashBuilder() - builderTwo.addValue(tag) - - expect: - // hashing should be consistent - assert builderOne.addValue("0") == builderTwo.addValue("0") - // different parent hashes should produce different results - assert builderOne.addValue("1") != builderTwo.addValue("0") - } -} From dba7d11ed907b48d0f6fd829ffcf8f474980f0dd Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Fri, 11 Jul 2025 16:34:32 -0500 Subject: [PATCH 04/29] Fixed some more tests --- .../datastreams/DataStreamsWritingTest.groovy | 20 ++++++++++--------- .../groovy/DataStreamsIntegrationTest.groovy | 8 +++++++- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy index e9bf3803e1e..906b3825b53 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy @@ -7,6 +7,7 @@ import datadog.trace.api.Config import datadog.trace.api.ProcessTags import datadog.trace.api.TraceConfig import datadog.trace.api.WellKnownTags +import datadog.trace.api.datastreams.DataStreamsTagsBuilder import datadog.trace.api.time.ControllableTimeSource import datadog.trace.api.datastreams.StatsPoint import datadog.trace.core.DDTraceCoreInfo @@ -82,8 +83,9 @@ class DataStreamsWritingTest extends DDCoreSpecification { def dataStreams = new DefaultDataStreamsMonitoring(fakeConfig, sharedCommObjects, timeSource, { traceConfig }) dataStreams.start() dataStreams.setThreadServiceName(serviceNameOverride) - dataStreams.add(new StatsPoint([], 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, serviceNameOverride)) - dataStreams.trackBacklog(new LinkedHashMap<>(["partition": "1", "topic": "testTopic", "type": "kafka_produce"]), 130) + dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().build(), 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, serviceNameOverride)) + def tags = new DataStreamsTagsBuilder().withPartition("1").withTopic("testTopic").withType("kafka_produce").build() + dataStreams.trackBacklog(tags, 130) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) // force flush dataStreams.report() @@ -140,15 +142,15 @@ class DataStreamsWritingTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(fakeConfig, sharedCommObjects, timeSource, { traceConfig }) dataStreams.start() - dataStreams.add(new StatsPoint([], 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, null)) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) - dataStreams.trackBacklog(new LinkedHashMap<>(["partition": "1", "topic": "testTopic", "type": "kafka_produce"]), 100) - dataStreams.trackBacklog(new LinkedHashMap<>(["partition": "1", "topic": "testTopic", "type": "kafka_produce"]), 130) + dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().build(), 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().withType("testType").withGroup("testGroup").withTopic("testTopic").build(), 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.trackBacklog(new DataStreamsTagsBuilder().withPartition("1").withTopic("testTopic").withType("kafka_produce").build(), 100) + dataStreams.trackBacklog(new DataStreamsTagsBuilder().withPartition("1").withTopic("testTopic").withType("kafka_produce").build(), 130) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS - 100l) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(10), SECONDS.toNanos(10), 10, null)) + dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().withGroup("testGroup").withTopic("testTopic").withType("testType").build(), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(10), SECONDS.toNanos(10), 10, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(5), SECONDS.toNanos(5), 5, null)) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic2"], 3, 4, 6, timeSource.currentTimeNanos, SECONDS.toNanos(2), 0, 2, null)) + dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().withGroup("testGroup").withTopic("testTopic").withType("testType").build(), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(5), SECONDS.toNanos(5), 5, null)) + dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().withGroup("testGroup").withTopic("testTopic2").withType("testType").build(), 3, 4, 6, timeSource.currentTimeNanos, SECONDS.toNanos(2), 0, 2, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.close() diff --git a/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy b/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy index 526fa6b785f..7a41bd1d11e 100644 --- a/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy +++ b/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy @@ -3,6 +3,7 @@ import datadog.communication.ddagent.SharedCommunicationObjects import datadog.communication.http.OkHttpUtils import datadog.trace.api.Config import datadog.trace.api.TraceConfig +import datadog.trace.api.datastreams.DataStreamsTagsBuilder import datadog.trace.api.time.ControllableTimeSource import datadog.trace.api.datastreams.StatsPoint import datadog.trace.common.metrics.EventListener @@ -46,7 +47,12 @@ class DataStreamsIntegrationTest extends AbstractTraceAgentTest { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, sharedCommunicationObjects.featuresDiscovery(Config.get()), timeSource, { traceConfig }, Config.get()) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tags = new DataStreamsTagsBuilder() + .withTopic("testTopic") + .withGroup("testGroup") + .withType("testType") + .build() + dataStreams.add(new StatsPoint(tags, 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(Config.get().getDataStreamsBucketDurationNanoseconds()) dataStreams.report() From 0b96dd0824d0272f8cb6ff66644a15aee9bb9953 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Fri, 11 Jul 2025 17:03:10 -0500 Subject: [PATCH 05/29] Fixed payload writer tests --- .../MsgPackDatastreamsPayloadWriter.java | 19 +++++------- .../datastreams/DataStreamsWritingTest.groovy | 6 ++-- .../api/datastreams/DataStreamsTags.java | 30 ++++++++++++++----- 3 files changed, 32 insertions(+), 23 deletions(-) diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java index 37f5602680e..7bcffb4a289 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java @@ -161,7 +161,8 @@ private void writeBucket(StatsBucket bucket, Writable packer) { Collection groups = bucket.getGroups(); packer.startArray(groups.size()); for (StatsGroup group : groups) { - packer.startMap(6); + boolean firstNode = group.getTags().getSize() == 0; + packer.startMap(firstNode ? 5 : 6); /* 1 */ packer.writeUTF8(PATHWAY_LATENCY); @@ -183,9 +184,11 @@ private void writeBucket(StatsBucket bucket, Writable packer) { packer.writeUTF8(PARENT_HASH); packer.writeUnsignedLong(group.getParentHash()); - /* 6 */ - packer.writeUTF8(EDGE_TAGS); - writeDataStreamsTags(group.getTags(), packer); + if (!firstNode) { + /* 6 */ + packer.writeUTF8(EDGE_TAGS); + writeDataStreamsTags(group.getTags(), packer); + } } } @@ -204,14 +207,6 @@ private void writeBacklogs( } } - private void writeStringIfNotEmpty(String name, String value, Writable packer) { - if (value == null || value.isEmpty()) { - return; - } - - packer.writeString(name + ":" + value, null); - } - private void writeDataStreamsTags(DataStreamsTags tags, Writable packer) { packer.startArray(tags.getSize()); diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy index 906b3825b53..e333d25725b 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy @@ -228,9 +228,9 @@ class DataStreamsWritingTest extends DDCoreSpecification { assert unpacker.unpackLong() == 2 assert unpacker.unpackString() == "EdgeTags" assert unpacker.unpackArrayHeader() == 3 + assert unpacker.unpackString() == "topic:testTopic" assert unpacker.unpackString() == "type:testType" assert unpacker.unpackString() == "group:testGroup" - assert unpacker.unpackString() == "topic:testTopic" } } @@ -240,9 +240,9 @@ class DataStreamsWritingTest extends DDCoreSpecification { assert unpacker.unpackMapHeader() == 2 assert unpacker.unpackString() == "Tags" assert unpacker.unpackArrayHeader() == 3 - assert unpacker.unpackString() == "partition:1" assert unpacker.unpackString() == "topic:testTopic" assert unpacker.unpackString() == "type:kafka_produce" + assert unpacker.unpackString() == "partition:1" assert unpacker.unpackString() == "Value" assert unpacker.unpackLong() == 130 @@ -271,9 +271,9 @@ class DataStreamsWritingTest extends DDCoreSpecification { assert unpacker.unpackLong() == (hash == 1 ? 2 : 4) assert unpacker.unpackString() == "EdgeTags" assert unpacker.unpackArrayHeader() == 3 + assert unpacker.unpackString() == (hash == 1 ? "topic:testTopic" : "topic:testTopic2") assert unpacker.unpackString() == "type:testType" assert unpacker.unpackString() == "group:testGroup" - assert unpacker.unpackString() == (hash == 1 ? "topic:testTopic" : "topic:testTopic2") } assert unpacker.unpackString() == "ProductMask" diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 5b94b5be12b..e95bbacb3b1 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -107,10 +107,15 @@ public int forEachTag(DataStreamsTagsProcessor processor, TagTraverseMode mode) processor.process(EXCHANGE_TAG, this.builder.exchange); } - // topic and type are always required, no need to check for null - count += 2; - processor.process(TOPIC_TAG, this.builder.topic); - processor.process(TYPE_TAG, this.builder.type); + if (this.builder.topic != null) { + count += 1; + processor.process(TOPIC_TAG, this.builder.topic); + } + + if (this.builder.type != null) { + count += 1; + processor.process(TYPE_TAG, this.builder.type); + } if (this.builder.subscription != null) { count += 1; @@ -119,8 +124,10 @@ public int forEachTag(DataStreamsTagsProcessor processor, TagTraverseMode mode) } if (mode == TagTraverseMode.GroupOnly || mode == TagTraverseMode.All) { - count += 1; - processor.process(MANUAL_TAG, this.builder.isManual.toString()); + if (this.builder.isManual != null) { + count += 1; + processor.process(MANUAL_TAG, this.builder.isManual.toString()); + } if (this.builder.datasetName != null) { count += 1; @@ -134,8 +141,10 @@ public int forEachTag(DataStreamsTagsProcessor processor, TagTraverseMode mode) } if (mode == TagTraverseMode.ValueOnly || mode == TagTraverseMode.All) { - count += 1; - processor.process(HAS_ROUTING_KEY_TAG, this.builder.hasRoutingKey.toString()); + if (this.builder.hasRoutingKey != null) { + count += 1; + processor.process(HAS_ROUTING_KEY_TAG, this.builder.hasRoutingKey.toString()); + } if (this.builder.consumerGroup != null) { count += 1; @@ -233,6 +242,11 @@ public boolean equals(Object o) { return this.completeHash == that.completeHash; } + @Override + public int hashCode() { + return Long.hashCode(this.completeHash); + } + @Override public String toString() { return "DataStreamsTags{" From 4c28cbb64005a4d7481fd3eca66e7b7aedbb8c5e Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Fri, 11 Jul 2025 17:46:01 -0500 Subject: [PATCH 06/29] Fixed http tests --- .../agent/test/base/HttpServerTest.groovy | 98 +++++++------------ 1 file changed, 34 insertions(+), 64 deletions(-) diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy index 452373a6d91..0e7bb07e1b1 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy @@ -106,10 +106,7 @@ import static org.junit.Assume.assumeTrue abstract class HttpServerTest extends WithHttpServer { public static final Logger SERVER_LOGGER = LoggerFactory.getLogger("http-server") - protected static final DSM_EDGE_TAGS = DataStreamsContext.forHttpServer().sortedTags().collect { - key, value -> - return key + ":" + value - } + protected static final DSM_EDGE_TAGS = DataStreamsContext.forHttpServer().tags() static { try { ((ch.qos.logback.classic.Logger) SERVER_LOGGER).setLevel(Level.DEBUG) @@ -640,8 +637,8 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS + tags.getSize() == DSM_EDGE_TAGS.getSize() } } @@ -684,8 +681,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } @@ -730,8 +726,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == edgeTags } } @@ -759,7 +754,7 @@ abstract class HttpServerTest extends WithHttpServer { assertTraces(1) { trace(spanCount(SUCCESS)) { sortSpansByStart() - serverSpan(it, null, null, method, SUCCESS, tags) + serverSpan(it, null, null, method, SUCCESS, spanTags) if (hasHandlerSpan()) { handlerSpan(it) } @@ -774,13 +769,12 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } where: - method | body | header | value | tags + method | body | header | value | spanTags 'GET' | null | 'x-datadog-test-both-header' | 'foo' | ['both_header_tag': 'foo'] 'GET' | null | 'x-datadog-test-request-header' | 'bar' | ['request_header_tag': 'bar'] } @@ -794,7 +788,7 @@ abstract class HttpServerTest extends WithHttpServer { def body = null def header = IG_RESPONSE_HEADER def mapping = 'mapped_response_header_tag' - def tags = ['mapped_response_header_tag': "$IG_RESPONSE_HEADER_VALUE"] + def spanTags = ['mapped_response_header_tag': "$IG_RESPONSE_HEADER_VALUE"] injectSysConfig(HTTP_SERVER_TAG_QUERY_STRING, "true") injectSysConfig(RESPONSE_HEADER_TAGS, "$header:$mapping") @@ -813,7 +807,7 @@ abstract class HttpServerTest extends WithHttpServer { assertTraces(1) { trace(spanCount(endpoint)) { sortSpansByStart() - serverSpan(it, null, null, method, endpoint, tags) + serverSpan(it, null, null, method, endpoint, spanTags) if (hasHandlerSpan()) { handlerSpan(it, endpoint) } @@ -828,8 +822,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -871,8 +864,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } @@ -926,8 +918,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } @@ -974,8 +965,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1003,8 +993,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1047,8 +1036,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } @@ -1094,8 +1082,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1135,8 +1122,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1178,8 +1164,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1220,8 +1205,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1262,8 +1246,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1303,8 +1286,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1358,8 +1340,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } @@ -1405,8 +1386,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1438,8 +1418,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1471,8 +1450,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1506,8 +1484,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1539,8 +1516,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1588,8 +1564,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } @@ -1632,8 +1607,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1671,8 +1645,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1708,8 +1681,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1765,8 +1737,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } } @@ -1818,8 +1789,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + tags == DSM_EDGE_TAGS } } From 6836f499d80409716978e98da4b913a1e00f20cc Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Mon, 14 Jul 2025 17:41:06 -0500 Subject: [PATCH 07/29] Removed DataStreamsTagsBuilder, updated all integraions and tests --- .../grpc/client/GrpcClientDecorator.java | 6 +- .../grpc/server/GrpcServerDecorator.java | 8 +- .../eventbridge/EventBridgeInterceptor.java | 7 +- .../aws/v0/AwsSdkClientDecorator.java | 21 +- .../aws/v0/TracingRequestHandler.java | 7 +- .../aws/v2/AwsSdkClientDecorator.java | 29 +- .../aws/v1/sns/SnsInterceptor.java | 7 +- .../aws/v2/sns/SnsInterceptor.java | 7 +- .../aws/v1/sqs/SqsInterceptor.java | 7 +- .../aws/v1/sqs/TracingIterator.java | 7 +- .../aws/v2/sqs/SqsInterceptor.java | 8 +- .../aws/v2/sqs/TracingIterator.java | 8 +- .../googlepubsub/PubSubDecorator.java | 8 +- .../PublisherInstrumentation.java | 9 +- .../grpc/client/GrpcClientDecorator.java | 6 +- .../grpc/server/GrpcServerDecorator.java | 6 +- .../ConsumerCoordinatorInstrumentation.java | 14 +- .../kafka_clients/KafkaProducerCallback.java | 14 +- .../KafkaProducerInstrumentation.java | 9 +- .../kafka_clients/TracingIterator.java | 11 +- .../ConsumerCoordinatorAdvice.java | 14 +- .../DDOffsetCommitCallback.java | 15 +- .../KafkaProducerCallback.java | 13 +- .../kafka_clients38/ProducerAdvice.java | 9 +- .../kafka_clients38/TracingIterator.java | 11 +- .../KafkaStreamTaskInstrumentation.java | 17 +- .../amqp/RabbitChannelInstrumentation.java | 13 +- .../rabbitmq/amqp/RabbitDecorator.java | 8 +- .../spark/AbstractDatadogSparkListener.java | 9 +- .../DefaultDataStreamsMonitoring.java | 16 +- .../datastreams/DefaultPathwayContext.java | 16 +- .../MsgPackDatastreamsPayloadWriter.java | 15 +- .../datastreams/DataStreamsWritingTest.groovy | 26 +- .../DefaultDataStreamsMonitoringTest.groovy | 178 +++++--- .../DefaultPathwayContextTest.groovy | 15 +- .../groovy/DataStreamsIntegrationTest.groovy | 3 +- .../api/datastreams/DataStreamsContext.java | 12 +- .../api/datastreams/DataStreamsTags.java | 411 +++++++++++------- .../datastreams/DataStreamsTagsBuilder.java | 95 ---- .../datastreams/DataStreamsTagsProcessor.java | 5 - 40 files changed, 481 insertions(+), 619 deletions(-) delete mode 100644 internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsBuilder.java delete mode 100644 internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsProcessor.java diff --git a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java index aa13260467c..dc4c794efc6 100644 --- a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java +++ b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/client/GrpcClientDecorator.java @@ -12,7 +12,6 @@ import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; @@ -34,10 +33,7 @@ public class GrpcClientDecorator extends ClientDecorator { private static DataStreamsContext createDsmContext() { return DataStreamsContext.fromTags( - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Outbound) - .withType("grpc") - .build()); + DataStreamsTags.create("grpc", DataStreamsTags.Direction.Outbound)); } public static final GrpcClientDecorator DECORATE = new GrpcClientDecorator(); diff --git a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java index fc364131869..944324f3b4d 100644 --- a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java +++ b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/main/java/datadog/trace/instrumentation/armeria/grpc/server/GrpcServerDecorator.java @@ -4,7 +4,6 @@ import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.ErrorPriorities; @@ -30,11 +29,8 @@ public class GrpcServerDecorator extends ServerDecorator { public static final CharSequence COMPONENT_NAME = UTF8BytesString.create("armeria-grpc-server"); public static final CharSequence GRPC_MESSAGE = UTF8BytesString.create("grpc.message"); - private static final DataStreamsTags createServerPathwaySortedTags() { - return new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Inbound) - .withGroup("grpc") - .build(); + private static DataStreamsTags createServerPathwaySortedTags() { + return DataStreamsTags.create("grpc", DataStreamsTags.Direction.Inbound); } public static final DataStreamsTags SERVER_PATHWAY_EDGE_TAGS = createServerPathwaySortedTags(); diff --git a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java index a6229f9d153..41b556e5764 100644 --- a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java @@ -6,7 +6,6 @@ import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.datastreams.PathwayContext; import datadog.trace.bootstrap.InstanceStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -87,11 +86,7 @@ private String getTraceContextToInject( datadog.context.Context context = span; if (traceConfig().isDataStreamsEnabled()) { DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Outbound) - .withType("bus") - .withBus(eventBusName) - .build(); + DataStreamsTags.createWithBus("bus", DataStreamsTags.Direction.Outbound, eventBusName); DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); context = context.with(dsmContext); } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java index 5e22b911182..fd6ddb35600 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java @@ -15,7 +15,6 @@ import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -255,13 +254,8 @@ && traceConfig().isDataStreamsEnabled()) { && ("GetObjectMetadataRequest".equalsIgnoreCase(awsOperation) || "GetObjectRequest".equalsIgnoreCase(awsOperation))) { DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("s3") - .withDirection(DataStreamsTags.Direction.Inbound) - .withTopic(bucket) - .withDatasetNamespace(bucket) - .withDatasetName(key) - .build(); + DataStreamsTags.createWithDataset( + "s3", DataStreamsTags.Direction.Inbound, bucket, key, bucket); AgentTracer.get() .getDataStreamsMonitoring() .setCheckpoint(span, create(tags, 0, responseSize)); @@ -274,16 +268,7 @@ && traceConfig().isDataStreamsEnabled()) { if (requestSize != null) { payloadSize = (long) requestSize; } - - DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("s3") - .withDirection(DataStreamsTags.Direction.Outbound) - .withTopic(bucket) - .withDatasetNamespace(bucket) - .withDatasetName(key) - .build(); - + DataStreamsTags tags = DataStreamsTags.createWithDataset("s3", DataStreamsTags.Direction.Outbound, bucket, key, bucket); AgentTracer.get() .getDataStreamsMonitoring() .setCheckpoint(span, create(tags, 0, payloadSize)); diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java index b0774cd0a93..07b872a1551 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/TracingRequestHandler.java @@ -110,12 +110,7 @@ && traceConfig().isDataStreamsEnabled() GetterAccess.of(response.getAwsResponse()).getRecords(response.getAwsResponse()); if (null != records) { DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("kinesis") - .withDirection(DataStreamsTags.Direction.Inbound) - .withTopic(streamArn) - .build(); - + DataStreamsTags.create("kinesis", DataStreamsTags.Direction.Inbound, streamArn); for (Object record : records) { Date arrivalTime = GetterAccess.of(record).getApproximateArrivalTimestamp(record); AgentDataStreamsMonitoring dataStreamsMonitoring = diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java index 54a509ad8a4..c5aa03bf64f 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/main/java/datadog/trace/instrumentation/aws/v2/AwsSdkClientDecorator.java @@ -11,7 +11,6 @@ import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.AgentDataStreamsMonitoring; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.datastreams.PathwayContext; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.InstanceStore; @@ -334,12 +333,8 @@ public AgentSpan onSdkResponse( List records = (List) recordsRaw; if (!records.isEmpty()) { DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("kinesis") - .withDirection(DataStreamsTags.Direction.Inbound) - .withTopic(streamArn) - .build(); - + DataStreamsTags.create( + "kinesis", DataStreamsTags.Direction.Inbound, streamArn); if (null == kinesisApproximateArrivalTimestampField) { Optional> maybeField = records.get(0).sdkFields().stream() @@ -383,14 +378,8 @@ public AgentSpan onSdkResponse( if (key != null && bucket != null && awsOperation != null) { if ("GetObject".equalsIgnoreCase(awsOperation)) { DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Inbound) - .withType("s3") - .withDatasetName(key) - .withDatasetNamespace(bucket) - .withTopic(bucket) - .build(); - + DataStreamsTags.createWithDataset( + "s3", DataStreamsTags.Direction.Inbound, bucket, key, bucket); AgentTracer.get() .getDataStreamsMonitoring() .setCheckpoint(span, create(tags, 0, responseSize)); @@ -404,14 +393,8 @@ public AgentSpan onSdkResponse( } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("s3") - .withDirection(DataStreamsTags.Direction.Outbound) - .withDatasetName(key) - .withDatasetNamespace(bucket) - .withTopic(bucket) - .build(); - + DataStreamsTags.createWithDataset( + "s3", DataStreamsTags.Direction.Outbound, bucket, key, bucket); AgentTracer.get() .getDataStreamsMonitoring() .setCheckpoint(span, create(tags, 0, payloadSize)); diff --git a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sns/SnsInterceptor.java b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sns/SnsInterceptor.java index be56c9b864c..0b991bdef8e 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sns/SnsInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sns/SnsInterceptor.java @@ -13,7 +13,6 @@ import datadog.context.Context; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.ContextStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -112,10 +111,6 @@ private AgentSpan newSpan(AmazonWebServiceRequest request) { } private DataStreamsTags getTags(String snsTopicName) { - return new DataStreamsTagsBuilder() - .withType("sns") - .withDirection(DataStreamsTags.Direction.Outbound) - .withTopic(snsTopicName) - .build(); + return DataStreamsTags.create("sns", DataStreamsTags.Direction.Outbound, snsTopicName); } } diff --git a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sns/SnsInterceptor.java b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sns/SnsInterceptor.java index 7a6ee008cf7..3a10aa085e4 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sns/SnsInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sns/SnsInterceptor.java @@ -6,7 +6,6 @@ import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstanceStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import java.nio.charset.StandardCharsets; @@ -104,10 +103,6 @@ public SdkRequest modifyRequest( } private DataStreamsTags getTags(String snsTopicName) { - return new DataStreamsTagsBuilder() - .withType("sns") - .withDirection(DataStreamsTags.Direction.Outbound) - .withTopic(snsTopicName) - .build(); + return DataStreamsTags.create("sns", DataStreamsTags.Direction.Outbound, snsTopicName); } } diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsInterceptor.java b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsInterceptor.java index 18ef0f5dcbd..ef6c1e516a1 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/SqsInterceptor.java @@ -18,7 +18,6 @@ import datadog.context.propagation.Propagators; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.ContextStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import java.util.ArrayList; @@ -93,10 +92,6 @@ private AgentSpan newSpan(AmazonWebServiceRequest request) { } private static DataStreamsTags getTags(String queueUrl) { - return new DataStreamsTagsBuilder() - .withType("sqs") - .withDirection(DataStreamsTags.Direction.Outbound) - .withTopic(urlFileName(queueUrl)) - .build(); + return DataStreamsTags.create("sqs", DataStreamsTags.Direction.Outbound, urlFileName(queueUrl)); } } diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/TracingIterator.java b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/TracingIterator.java index 8cf74530733..25a82dbac65 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/TracingIterator.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/main/java/datadog/trace/instrumentation/aws/v1/sqs/TracingIterator.java @@ -17,7 +17,6 @@ import com.amazonaws.services.sqs.model.Message; import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -87,11 +86,7 @@ protected void startNewMessageSpan(Message message) { AgentSpan span = startSpan(SQS_INBOUND_OPERATION, batchContext); DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("sqs") - .withDirection(DataStreamsTags.Direction.Inbound) - .withTopic(urlFileName(queueUrl)) - .build(); + DataStreamsTags.create("sqs", DataStreamsTags.Direction.Inbound, urlFileName(queueUrl)); AgentTracer.get().getDataStreamsMonitoring().setCheckpoint(span, create(tags, 0, 0)); CONSUMER_DECORATE.afterStart(span); diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsInterceptor.java b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsInterceptor.java index 4ed8e6a81a8..399cd889a31 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/SqsInterceptor.java @@ -9,7 +9,6 @@ import datadog.context.propagation.Propagators; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstanceStore; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import java.util.ArrayList; @@ -94,12 +93,7 @@ private datadog.context.Context getContext( AgentSpan span = executionAttributes.getAttribute(SPAN_ATTRIBUTE); DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Outbound) - .withTopic(urlFileName(queueUrl)) - .withType("sqs") - .build(); - + DataStreamsTags.create("sqs", DataStreamsTags.Direction.Outbound, urlFileName(queueUrl)); DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); return span.with(dsmContext); } diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java index a3aafedadc7..3a00836e44d 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/sqs/TracingIterator.java @@ -16,7 +16,6 @@ import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -89,12 +88,7 @@ protected void startNewMessageSpan(Message message) { AgentSpan span = startSpan(SQS_INBOUND_OPERATION, batchContext); DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("sqs") - .withDirection(DataStreamsTags.Direction.Inbound) - .withTopic(urlFileName(queueUrl)) - .build(); - + DataStreamsTags.create("sqs", DataStreamsTags.Direction.Inbound, urlFileName(queueUrl)); AgentTracer.get().getDataStreamsMonitoring().setCheckpoint(span, create(tags, 0, 0)); CONSUMER_DECORATE.afterStart(span); diff --git a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java index 49614e5b6cb..07085db704c 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java +++ b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java @@ -11,7 +11,6 @@ import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; @@ -131,11 +130,8 @@ public AgentSpan onConsume(final PubsubMessage message, final String subscriptio final AgentSpan span = startSpan(PUBSUB_CONSUME, spanContext); final CharSequence parsedSubscription = extractSubscription(subscription); DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("google-pubsub") - .withDirection(DataStreamsTags.Direction.Inbound) - .withTopic(parsedSubscription.toString()) - .build(); + DataStreamsTags.create( + "google-pubsub", DataStreamsTags.Direction.Inbound, parsedSubscription.toString()); final Timestamp publishTime = message.getPublishTime(); // FIXME: use full nanosecond resolution when this method will accept nanos AgentTracer.get() diff --git a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PublisherInstrumentation.java b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PublisherInstrumentation.java index e1131e1a67a..7c51970ffc8 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PublisherInstrumentation.java +++ b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PublisherInstrumentation.java @@ -20,7 +20,6 @@ import datadog.trace.agent.tooling.InstrumenterModule; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.java.concurrent.ExcludeFilter; @@ -73,12 +72,8 @@ public static AgentScope before( PRODUCER_DECORATE.onProduce(span, topicName); DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("google-pubsub") - .withDirection(DataStreamsTags.Direction.Outbound) - .withTopic(topicName.toString()) - .build(); - + DataStreamsTags.create( + "google-pubsub", DataStreamsTags.Direction.Outbound, topicName.toString()); PubsubMessage.Builder builder = msg.toBuilder(); DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); defaultPropagator().inject(span.with(dsmContext), builder, SETTER); diff --git a/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/client/GrpcClientDecorator.java b/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/client/GrpcClientDecorator.java index f9846ea2321..c35da6a0e34 100644 --- a/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/client/GrpcClientDecorator.java +++ b/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/client/GrpcClientDecorator.java @@ -12,7 +12,6 @@ import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; @@ -34,10 +33,7 @@ public class GrpcClientDecorator extends ClientDecorator { private static DataStreamsContext createDsmContext() { return DataStreamsContext.fromTags( - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Outbound) - .withType("grpc") - .build()); + DataStreamsTags.create("grpc", DataStreamsTags.Direction.Outbound)); } public static final GrpcClientDecorator DECORATE = new GrpcClientDecorator(); diff --git a/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/server/GrpcServerDecorator.java b/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/server/GrpcServerDecorator.java index 094bee7e3e2..cafd0d64ad7 100644 --- a/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/server/GrpcServerDecorator.java +++ b/dd-java-agent/instrumentation/grpc-1.5/src/main/java/datadog/trace/instrumentation/grpc/server/GrpcServerDecorator.java @@ -4,7 +4,6 @@ import datadog.trace.api.cache.DDCache; import datadog.trace.api.cache.DDCaches; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.ErrorPriorities; @@ -31,10 +30,7 @@ public class GrpcServerDecorator extends ServerDecorator { public static final CharSequence GRPC_MESSAGE = UTF8BytesString.create("grpc.message"); private static DataStreamsTags createServerPathwaySortedTags() { - return new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Inbound) - .withType("grpc") - .build(); + return DataStreamsTags.create("grpc", DataStreamsTags.Direction.Inbound); } public static final DataStreamsTags SERVER_PATHWAY_EDGE_TAGS = createServerPathwaySortedTags(); diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/ConsumerCoordinatorInstrumentation.java b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/ConsumerCoordinatorInstrumentation.java index 8f09f5c240f..67712ff706f 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/ConsumerCoordinatorInstrumentation.java +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/ConsumerCoordinatorInstrumentation.java @@ -8,7 +8,6 @@ import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.agent.tooling.InstrumenterModule; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.HashMap; @@ -103,13 +102,12 @@ public static void trackCommitOffset( } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withConsumerGroup(consumerGroup) - .withKafkaClusterId(clusterId) - .withPartition(String.valueOf(entry.getKey().partition())) - .withTopic(entry.getKey().topic()) - .withType("kafka_commit") - .build(); + DataStreamsTags.createWithPartition( + "kafka_commit", + entry.getKey().topic(), + String.valueOf(entry.getKey().partition()), + clusterId, + consumerGroup); AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, entry.getValue().offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerCallback.java b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerCallback.java index 453e855b2b6..83962b9c56e 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerCallback.java +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerCallback.java @@ -4,7 +4,6 @@ import static datadog.trace.instrumentation.kafka_clients.KafkaDecorator.PRODUCER_DECORATE; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -48,13 +47,12 @@ public void onCompletion(final RecordMetadata metadata, final Exception exceptio } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("kafka_produce") - .withKafkaClusterId(clusterId) - .withTopic(metadata.topic()) - .withPartition(String.valueOf(metadata.partition())) - .build(); - + DataStreamsTags.createWithPartition( + "kafka_produce", + metadata.topic(), + String.valueOf(metadata.partition()), + clusterId, + null); AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, metadata.offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerInstrumentation.java b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerInstrumentation.java index 0dfd2576c2f..7bb21a7e893 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerInstrumentation.java +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/KafkaProducerInstrumentation.java @@ -27,7 +27,6 @@ import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.datastreams.StatsPoint; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentScope; @@ -142,12 +141,8 @@ public static AgentScope onEnter( setter = TextMapInjectAdapter.SETTER; } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("kafka") - .withDirection(DataStreamsTags.Direction.Outbound) - .withKafkaClusterId(clusterId) - .withTopic(record.topic()) - .build(); + DataStreamsTags.createWithClusterId( + "" + "kafka", DataStreamsTags.Direction.Outbound, record.topic(), clusterId); try { defaultPropagator().inject(span, record.headers(), setter); if (STREAMING_CONTEXT.isDisabledForTopic(record.topic()) diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/TracingIterator.java b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/TracingIterator.java index cd33f23ab38..832a5097bb5 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/TracingIterator.java +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/main/java/datadog/trace/instrumentation/kafka_clients/TracingIterator.java @@ -21,7 +21,6 @@ import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -97,14 +96,8 @@ protected void startNewRecordSpan(ConsumerRecord val) { } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Inbound) - .withGroup(group) - .withKafkaClusterId(clusterId) - .withTopic(val.topic()) - .withType("kafka") - .build(); - + DataStreamsTags.create( + "kafka", DataStreamsTags.Direction.Inbound, val.topic(), group, clusterId); final long payloadSize = traceConfig().isDataStreamsEnabled() ? computePayloadSizeBytes(val) : 0; if (STREAMING_CONTEXT.isDisabledForTopic(val.topic())) { diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ConsumerCoordinatorAdvice.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ConsumerCoordinatorAdvice.java index 304167fb093..0592aa22855 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ConsumerCoordinatorAdvice.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ConsumerCoordinatorAdvice.java @@ -1,7 +1,6 @@ package datadog.trace.instrumentation.kafka_clients38; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.Map; @@ -48,13 +47,12 @@ public static void trackCommitOffset( continue; } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withConsumerGroup(consumerGroup) - .withKafkaClusterId(clusterId) - .withPartition(String.valueOf(entry.getKey().partition())) - .withTopic(entry.getKey().topic()) - .withType("kafka_commit") - .build(); + DataStreamsTags.createWithPartition( + "kafka_commit", + entry.getKey().topic(), + String.valueOf(entry.getKey().partition()), + clusterId, + consumerGroup); AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, entry.getValue().offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/DDOffsetCommitCallback.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/DDOffsetCommitCallback.java index 727a7e4b197..59b4677b7b3 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/DDOffsetCommitCallback.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/DDOffsetCommitCallback.java @@ -1,7 +1,6 @@ package datadog.trace.instrumentation.kafka_clients38; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; import java.util.Map; @@ -42,14 +41,12 @@ public void onComplete(Map map, Exception e) } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withConsumerGroup(consumerGroup) - .withKafkaClusterId(clusterId) - .withPartition(String.valueOf(entry.getKey().partition())) - .withTopic(entry.getKey().topic()) - .withType("kafka_commit") - .build(); - + DataStreamsTags.createWithPartition( + "kafka_commit", + entry.getKey().topic(), + String.valueOf(entry.getKey().partition()), + clusterId, + consumerGroup); AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, entry.getValue().offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/KafkaProducerCallback.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/KafkaProducerCallback.java index de57044962b..58c6bbbb8a1 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/KafkaProducerCallback.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/KafkaProducerCallback.java @@ -4,7 +4,6 @@ import static datadog.trace.instrumentation.kafka_clients38.KafkaDecorator.PRODUCER_DECORATE; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -46,12 +45,12 @@ public void onCompletion(final RecordMetadata metadata, final Exception exceptio return; } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withKafkaClusterId(clusterId) - .withPartition(String.valueOf(metadata.partition())) - .withTopic(metadata.topic()) - .withType("kafka_produce") - .build(); + DataStreamsTags.createWithPartition( + "kafka_produce", + metadata.topic(), + String.valueOf(metadata.partition()), + clusterId, + null); AgentTracer.get().getDataStreamsMonitoring().trackBacklog(tags, metadata.offset()); } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ProducerAdvice.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ProducerAdvice.java index 1afd56fcecc..c37d99796da 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ProducerAdvice.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/ProducerAdvice.java @@ -16,7 +16,6 @@ import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -64,12 +63,8 @@ public static AgentScope onEnter( setter = TextMapInjectAdapter.SETTER; } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("kafka") - .withDirection(DataStreamsTags.Direction.Outbound) - .withKafkaClusterId(clusterId) - .withTopic(record.topic()) - .build(); + DataStreamsTags.create( + "kafka", DataStreamsTags.Direction.Outbound, record.topic(), null, clusterId); try { defaultPropagator().inject(span, record.headers(), setter); if (STREAMING_CONTEXT.isDisabledForTopic(record.topic()) diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/TracingIterator.java b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/TracingIterator.java index 724adbd76a5..75ef16c865a 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/TracingIterator.java +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/main/java17/datadog/trace/instrumentation/kafka_clients38/TracingIterator.java @@ -16,7 +16,6 @@ import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; import datadog.trace.bootstrap.instrumentation.api.AgentTracer; @@ -97,14 +96,8 @@ protected void startNewRecordSpan(ConsumerRecord val) { } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Inbound) - .withGroup(group) - .withKafkaClusterId(clusterId) - .withTopic(val.topic()) - .withType("kafka") - .build(); - + DataStreamsTags.create( + "kafka", DataStreamsTags.Direction.Inbound, val.topic(), group, clusterId); final long payloadSize = traceConfig().isDataStreamsEnabled() ? Utils.computePayloadSizeBytes(val) : 0; if (StreamingContext.STREAMING_CONTEXT.isDisabledForTopic(val.topic())) { diff --git a/dd-java-agent/instrumentation/kafka-streams-0.11/src/main/java/datadog/trace/instrumentation/kafka_streams/KafkaStreamTaskInstrumentation.java b/dd-java-agent/instrumentation/kafka-streams-0.11/src/main/java/datadog/trace/instrumentation/kafka_streams/KafkaStreamTaskInstrumentation.java index 8674622182c..ffd6a4df88e 100644 --- a/dd-java-agent/instrumentation/kafka-streams-0.11/src/main/java/datadog/trace/instrumentation/kafka_streams/KafkaStreamTaskInstrumentation.java +++ b/dd-java-agent/instrumentation/kafka-streams-0.11/src/main/java/datadog/trace/instrumentation/kafka_streams/KafkaStreamTaskInstrumentation.java @@ -34,7 +34,6 @@ import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.InstrumentationContext; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -252,12 +251,8 @@ public static void start( applicationId = streamTaskContext.getApplicationId(); } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("kafka") - .withDirection(DataStreamsTags.Direction.Inbound) - .withGroup(applicationId) - .withTopic(record.topic()) - .build(); + DataStreamsTags.createWithGroup( + "kafka", DataStreamsTags.Direction.Inbound, applicationId, record.topic()); final long payloadSize = traceConfig().isDataStreamsEnabled() ? computePayloadSizeBytes(record.value) : 0; @@ -329,12 +324,8 @@ public static void start( applicationId = streamTaskContext.getApplicationId(); } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("kafka") - .withDirection(DataStreamsTags.Direction.Inbound) - .withGroup(applicationId) - .withTopic(record.topic()) - .build(); + DataStreamsTags.createWithGroup( + "kafka", DataStreamsTags.Direction.Inbound, applicationId, record.topic()); long payloadSize = 0; // we have to go through Object to get the RecordMetadata here because the class of `record` diff --git a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitChannelInstrumentation.java b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitChannelInstrumentation.java index 166b9c98a7e..0308399aee9 100644 --- a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitChannelInstrumentation.java +++ b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitChannelInstrumentation.java @@ -37,7 +37,6 @@ import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.bootstrap.CallDepthThreadLocalMap; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -187,13 +186,11 @@ public static AgentScope setResourceNameAddHeaders( RabbitDecorator.injectTimeInQueueStart(headers); } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Outbound) - .withExchange(exchange) - .withHasRoutingKey(routingKey != null && !routingKey.isEmpty()) - .withType("rabbitmq") - .build(); - + DataStreamsTags.createWithExchange( + "rabbitmq", + DataStreamsTags.Direction.Outbound, + exchange, + routingKey != null && !routingKey.isEmpty()); DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); defaultPropagator().inject(span.with(dsmContext), headers, SETTER); props = diff --git a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitDecorator.java b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitDecorator.java index 9f4f227aa13..fb4ac5e72dc 100644 --- a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitDecorator.java +++ b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/main/java/datadog/trace/instrumentation/rabbitmq/amqp/RabbitDecorator.java @@ -16,7 +16,6 @@ import com.rabbitmq.client.Envelope; import datadog.trace.api.Config; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.naming.SpanNaming; import datadog.trace.bootstrap.instrumentation.api.AgentScope; import datadog.trace.bootstrap.instrumentation.api.AgentSpan; @@ -248,12 +247,7 @@ public static AgentScope startReceivingSpan( if (null != headers) { DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withDirection(DataStreamsTags.Direction.Inbound) - .withTopic(queue) - .withType("rabbitmq") - .build(); - ; + DataStreamsTags.create("rabbitmq", DataStreamsTags.Direction.Inbound, queue); AgentTracer.get() .getDataStreamsMonitoring() .setCheckpoint(span, create(tags, produceMillis, 0)); diff --git a/dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java b/dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java index 19730951a4a..5ac7f524ce7 100644 --- a/dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java +++ b/dd-java-agent/instrumentation/spark/src/main/java/datadog/trace/instrumentation/spark/AbstractDatadogSparkListener.java @@ -8,7 +8,6 @@ import datadog.trace.api.DDTags; import datadog.trace.api.DDTraceId; import datadog.trace.api.datastreams.DataStreamsTags; -import datadog.trace.api.datastreams.DataStreamsTagsBuilder; import datadog.trace.api.sampling.PrioritySampling; import datadog.trace.api.sampling.SamplingMechanism; import datadog.trace.bootstrap.InstanceStore; @@ -1317,12 +1316,8 @@ private static void reportKafkaOffsets( while (allPartitions.hasNext()) { String partition = allPartitions.next(); DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType("kafka_commit") - .withConsumerGroup(appName) - .withTopic(topic) - .withPartition(partition) - .build(); + DataStreamsTags.createWithPartition( + "kafka_commit", topic, partition, null, appName); AgentTracer.get() .getDataStreamsMonitoring() .trackBacklog(tags, topicNode.get(partition).asLong()); diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java index be1502833ee..64ab5f027d2 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java @@ -237,13 +237,7 @@ public void setConsumeCheckpoint(String type, String source, DataStreamsContextC mergePathwayContextIntoSpan(span, carrier); DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType(type) - .withDirection(DataStreamsTags.Direction.Inbound) - .withTopic(source) - .withManual(true) - .build(); - + DataStreamsTags.createManual(type, DataStreamsTags.Direction.Inbound, source); setCheckpoint(span, fromTags(tags)); } @@ -261,13 +255,7 @@ public void setProduceCheckpoint( } DataStreamsTags tags = - new DataStreamsTagsBuilder() - .withType(type) - .withDirection(DataStreamsTags.Direction.Outbound) - .withTopic(target) - .withManual(true) - .build(); - + DataStreamsTags.createManual(type, DataStreamsTags.Direction.Outbound, target); DataStreamsContext dsmContext = fromTags(tags); this.propagator.inject( span.with(dsmContext), carrier, DataStreamsContextCarrierAdapter.INSTANCE); diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java index 6420863a67a..8af0d77d420 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java @@ -120,7 +120,7 @@ public synchronized void setCheckpoint( // loop protection - a node should not be chosen as parent // for a sequential node with the same direction, as this // will cause a `cardinality explosion` for hash / parentHash tag values - DataStreamsTags.Direction direction = context.tags().getDirection(); + DataStreamsTags.Direction direction = context.tags().getDirectionValue(); if (direction == previousDirection) { hash = closestOppositeDirectionHash; } else { @@ -259,7 +259,7 @@ private static DefaultPathwayContext decode( long pathwayStartMillis = VarEncodingHelper.decodeSignedVarLong(input); long pathwayStartNanos = TimeUnit.MILLISECONDS.toNanos(pathwayStartMillis); - // Convert the start time to the current JVM's nanoclock + // Convert the start time to the current JVM's nano clock long nowNanos = timeSource.getCurrentTimeNanos(); long nanosSinceStart = nowNanos - pathwayStartNanos; long nowNanoTicks = timeSource.getNanoTicks(); @@ -292,14 +292,6 @@ public PathwayHashBuilder(long baseHash, String serviceNameOverride) { public void addTag(String tag) { hash = FNV64Hash.continueHash(hash, tag, FNV64Hash.Version.v1); } - - public void addValue(long val) { - hash = FNV64Hash.continueHash(hash, DataStreamsTags.longToBytes(val), FNV64Hash.Version.v1); - } - - public long getHash() { - return hash; - } } public static long getBaseHash(WellKnownTags wellKnownTags) { @@ -318,10 +310,6 @@ public static long getBaseHash(WellKnownTags wellKnownTags) { return FNV64Hash.generateHash(builder.toString(), FNV64Hash.Version.v1); } - private long generateNodeHash(PathwayHashBuilder pathwayHashBuilder) { - return pathwayHashBuilder.getHash(); - } - private long generatePathwayHash(long nodeHash, long parentHash) { outputBuffer.clear(); outputBuffer.writeLongLE(nodeHash); diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java index 7bcffb4a289..6ffa166cc1c 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java @@ -161,7 +161,7 @@ private void writeBucket(StatsBucket bucket, Writable packer) { Collection groups = bucket.getGroups(); packer.startArray(groups.size()); for (StatsGroup group : groups) { - boolean firstNode = group.getTags().getSize() == 0; + boolean firstNode = group.getTags().size() == 0; packer.startMap(firstNode ? 5 : 6); /* 1 */ @@ -208,12 +208,13 @@ private void writeBacklogs( } private void writeDataStreamsTags(DataStreamsTags tags, Writable packer) { - packer.startArray(tags.getSize()); + packer.startArray(tags.size()); - tags.forEachTag( - (name, value) -> { - packer.writeString(name + ":" + value, null); - }, - DataStreamsTags.TagTraverseMode.All); + for (int i = 0; i < tags.size(); i++) { + String val = tags.tagByIndex(i); + if (val != null) { + packer.writeString(val, null); + } + } } } diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy index e333d25725b..400cd91e39b 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy @@ -7,7 +7,7 @@ import datadog.trace.api.Config import datadog.trace.api.ProcessTags import datadog.trace.api.TraceConfig import datadog.trace.api.WellKnownTags -import datadog.trace.api.datastreams.DataStreamsTagsBuilder +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.time.ControllableTimeSource import datadog.trace.api.datastreams.StatsPoint import datadog.trace.core.DDTraceCoreInfo @@ -83,9 +83,8 @@ class DataStreamsWritingTest extends DDCoreSpecification { def dataStreams = new DefaultDataStreamsMonitoring(fakeConfig, sharedCommObjects, timeSource, { traceConfig }) dataStreams.start() dataStreams.setThreadServiceName(serviceNameOverride) - dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().build(), 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, serviceNameOverride)) - def tags = new DataStreamsTagsBuilder().withPartition("1").withTopic("testTopic").withType("kafka_produce").build() - dataStreams.trackBacklog(tags, 130) + dataStreams.add(new StatsPoint(DataStreamsTags.create(null, null), 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, serviceNameOverride)) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_produce", "testTopic", "1"), 130) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) // force flush dataStreams.report() @@ -107,6 +106,11 @@ class DataStreamsWritingTest extends DDCoreSpecification { assert unpacker.unpackString() == serviceNameOverride } + def getTags(String type, String topic, String partition, String group) { + return DataStreamsTags.createWithPartition(type, topic, partition, null, group) + } + + def "Write bucket to mock server with process tags enabled #processTagsEnabled"() { setup: injectSysConfig(EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED, "$processTagsEnabled") @@ -142,15 +146,15 @@ class DataStreamsWritingTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(fakeConfig, sharedCommObjects, timeSource, { traceConfig }) dataStreams.start() - dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().build(), 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, null)) - dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().withType("testType").withGroup("testGroup").withTopic("testTopic").build(), 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) - dataStreams.trackBacklog(new DataStreamsTagsBuilder().withPartition("1").withTopic("testTopic").withType("kafka_produce").build(), 100) - dataStreams.trackBacklog(new DataStreamsTagsBuilder().withPartition("1").withTopic("testTopic").withType("kafka_produce").build(), 130) + dataStreams.add(new StatsPoint(DataStreamsTags.create(null, null), 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(getTags("testType", "testTopic", null, null), 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.trackBacklog(getTags("kafka_produce", "testTopic", "1", null), 100) + dataStreams.trackBacklog(getTags("kafka_produce", "testTopic", "1", null), 130) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS - 100l) - dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().withGroup("testGroup").withTopic("testTopic").withType("testType").build(), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(10), SECONDS.toNanos(10), 10, null)) + dataStreams.add(new StatsPoint(getTags("testType", "testTopic", null, "testGroup"), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(10), SECONDS.toNanos(10), 10, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) - dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().withGroup("testGroup").withTopic("testTopic").withType("testType").build(), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(5), SECONDS.toNanos(5), 5, null)) - dataStreams.add(new StatsPoint(new DataStreamsTagsBuilder().withGroup("testGroup").withTopic("testTopic2").withType("testType").build(), 3, 4, 6, timeSource.currentTimeNanos, SECONDS.toNanos(2), 0, 2, null)) + dataStreams.add(new StatsPoint(getTags("testType", "testTopic", null, "testGroup"), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(5), SECONDS.toNanos(5), 5, null)) + dataStreams.add(new StatsPoint(getTags("testType", "testTopic2", null, "testGroup"), 3, 4, 6, timeSource.currentTimeNanos, SECONDS.toNanos(2), 0, 2, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.close() diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultDataStreamsMonitoringTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultDataStreamsMonitoringTest.groovy index ffcd15cd0c3..9a048c5a010 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultDataStreamsMonitoringTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultDataStreamsMonitoringTest.groovy @@ -4,6 +4,7 @@ import datadog.communication.ddagent.DDAgentFeaturesDiscovery import datadog.trace.api.Config import datadog.trace.api.TraceConfig import datadog.trace.api.WellKnownTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.datastreams.StatsPoint import datadog.trace.api.experimental.DataStreamsContextCarrier import datadog.trace.api.time.ControllableTimeSource @@ -12,6 +13,7 @@ import datadog.trace.common.metrics.Sink import datadog.trace.core.test.DDCoreSpecification import spock.util.concurrent.PollingConditions +import javax.xml.crypto.Data import java.util.concurrent.TimeUnit import java.util.function.BiConsumer @@ -40,7 +42,7 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 0, 0, 0, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(DataStreamsTags.create("testType", null, "testTopic", "testGroup", null), 0, 0, 0, timeSource.currentTimeNanos, 0, 0, 0, null)) dataStreams.report() then: @@ -130,7 +132,8 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -145,8 +148,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 } @@ -176,7 +181,8 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, bucketDuration) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(bucketDuration) then: @@ -190,8 +196,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 } @@ -219,9 +227,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 3, 4, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 3, 4, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS - 100l) dataStreams.report() @@ -236,8 +245,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 } @@ -265,9 +276,11 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + def tg2 = DataStreamsTags.create("testType", null, "testTopic2", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic2"], 3, 4, 6, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg2, 3, 4, 6, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS - 100l) dataStreams.close() @@ -282,8 +295,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 } @@ -293,8 +308,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic2"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic2" + tags.nonNullSize() == 3 hash == 3 parentHash == 4 } @@ -321,11 +338,11 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.trackBacklog(new LinkedHashMap<>(["consumer_group": "testGroup", "partition": "2", "topic": "testTopic", "type": "kafka_commit"]), 23) - dataStreams.trackBacklog(new LinkedHashMap<>(["consumer_group": "testGroup", "partition": "2", "topic": "testTopic", "type": "kafka_commit"]), 24) - dataStreams.trackBacklog(new LinkedHashMap<>(["partition": "2", "topic": "testTopic", "type": "kafka_produce"]), 23) - dataStreams.trackBacklog(new LinkedHashMap<>(["partition": "2", "topic": "testTopic2", "type": "kafka_produce"]), 23) - dataStreams.trackBacklog(new LinkedHashMap<>(["partition": "2", "topic": "testTopic", "type": "kafka_produce"]), 45) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_commit", "testTopic", "2", null, "testGroup"), 23) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_commit", "testTopic", "2", null, "testGroup"), 24) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_produce", "testTopic", "2", null, null), 23) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_produce", "testTopic2", "2", null, null), 23) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_produce", "testTopic", "2", null, null), 45) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -338,18 +355,17 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { with(payloadWriter.buckets.get(0)) { backlogs.size() == 3 - List, Long>> sortedBacklogs = new ArrayList<>(backlogs) - sortedBacklogs.sort({ it.key.toString() }) - with(sortedBacklogs[0]) { - it.key == ["consumer_group:testGroup", "partition:2", "topic:testTopic", "type:kafka_commit"] + def list = backlogs.sort({ it.key.toString() }) + with(list[0]) { + it.key == DataStreamsTags.createWithPartition("kafka_commit", "testTopic", "2", null, "testGroup") it.value == 24 } - with(sortedBacklogs[1]) { - it.key == ["partition:2", "topic:testTopic", "type:kafka_produce"] + with(list[1]) { + it.key == DataStreamsTags.createWithPartition("kafka_produce", "testTopic", "2", null, null) it.value == 45 } - with(sortedBacklogs[2]) { - it.key == ["partition:2", "topic:testTopic2", "type:kafka_produce"] + with(list[2]) { + it.key == DataStreamsTags.createWithPartition("kafka_produce", "testTopic2", "2", null, null) it.value == 23 } } @@ -376,9 +392,11 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) - timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic2"], 3, 4, 6, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) + timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS*10) + def tg2 = DataStreamsTags.create("testType", null, "testTopic2", "testGroup", null) + dataStreams.add(new StatsPoint(tg2, 3, 4, 6, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -391,10 +409,12 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { with(payloadWriter.buckets.get(0)) { groups.size() == 1 - + groups with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.nonNullSize() == 3 + tags.getType() == "type:testType" + tags.getGroup() == "group:testGroup" + tags.getTopic() == "topic:testTopic" hash == 1 parentHash == 2 } @@ -404,8 +424,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic2"]) - edgeTags.size() == 3 + tags.getType() == "type:testType" + tags.getGroup() == "group:testGroup" + tags.getTopic() == "topic:testTopic2" + tags.nonNullSize() == 3 hash == 3 parentHash == 4 } @@ -431,14 +453,15 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { } when: + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 1, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 1, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS - 100l) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 1, timeSource.currentTimeNanos, SECONDS.toNanos(10), SECONDS.toNanos(10), 10, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 1, timeSource.currentTimeNanos, SECONDS.toNanos(10), SECONDS.toNanos(10), 10, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2,1, timeSource.currentTimeNanos, SECONDS.toNanos(5), SECONDS.toNanos(5), 5, null)) - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic2"], 3, 4, 5, timeSource.currentTimeNanos, SECONDS.toNanos(2), 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2,1, timeSource.currentTimeNanos, SECONDS.toNanos(5), SECONDS.toNanos(5), 5, null)) + dataStreams.add(new StatsPoint(tg, 3, 4, 5, timeSource.currentTimeNanos, SECONDS.toNanos(2), 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -453,8 +476,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 Math.abs((pathwayLatency.getMaxValue()-10)/10) < 0.01 @@ -470,16 +495,20 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { with(sortedGroups[0]) { hash == 1 parentHash == 2 - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 Math.abs((pathwayLatency.getMaxValue()-5)/5) < 0.01 } with(sortedGroups[1]) { hash == 3 parentHash == 4 - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic2"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 Math.abs((pathwayLatency.getMaxValue()-2)/2) < 0.01 } } @@ -507,7 +536,8 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: "reporting points when data streams is not supported" def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -537,7 +567,7 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { timeSource.advance(FEATURE_CHECK_INTERVAL_NANOS) dataStreams.report() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -551,8 +581,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 } @@ -583,7 +615,8 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { dataStreams.start() supportsDataStreaming = false dataStreams.onEvent(EventListener.EventType.DOWNGRADED, "") - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -600,7 +633,7 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { timeSource.advance(FEATURE_CHECK_INTERVAL_NANOS) dataStreams.report() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -614,8 +647,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 } @@ -643,9 +678,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { } when: "reporting points when data streams is not enabled" + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -661,7 +697,7 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { dsmEnabled = true dataStreams.report() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -675,8 +711,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 } @@ -694,7 +732,7 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: "submitting points after being disabled" payloadWriter.buckets.clear() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -728,7 +766,8 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: "reporting points when data streams is not supported" def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -745,7 +784,7 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { timeSource.advance(FEATURE_CHECK_INTERVAL_NANOS) dataStreams.report() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -759,7 +798,7 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { dsmEnabled = true dataStreams.report() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -773,8 +812,10 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { groups.size() == 1 with(groups.iterator().next()) { - edgeTags.containsAll(["type:testType", "group:testGroup", "topic:testTopic"]) - edgeTags.size() == 3 + tags.type == "type:testType" + tags.group == "group:testGroup" + tags.topic == "topic:testTopic" + tags.nonNullSize() == 3 hash == 1 parentHash == 2 } @@ -804,7 +845,8 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { when: "reporting points when data streams is not supported" def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) dataStreams.start() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() @@ -820,7 +862,7 @@ class DefaultDataStreamsMonitoringTest extends DDCoreSpecification { dsmEnabled = true dataStreams.report() - dataStreams.add(new StatsPoint(["type:testType", "group:testGroup", "topic:testTopic"], 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.add(new StatsPoint(tg, 1, 2, 3, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.report() diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy index 3fcf8b48b78..17464c8cc0b 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy @@ -6,6 +6,7 @@ import datadog.trace.api.DDTraceId import datadog.trace.api.ProcessTags import datadog.trace.api.TraceConfig import datadog.trace.api.WellKnownTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.datastreams.StatsPoint import datadog.trace.api.time.ControllableTimeSource import datadog.trace.bootstrap.instrumentation.api.AgentPropagation @@ -54,7 +55,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(50) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", null)), pointConsumer) then: context.isStarted() @@ -69,18 +70,20 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(50) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", null)), pointConsumer) timeSource.advance(25) - context.setCheckpoint( - fromTags(new LinkedHashMap<>(["group": "group", "topic": "topic", "type": "kafka"])), pointConsumer) + def tags = DataStreamsTags.create("kafka", null, "topic", "group", null) + context.setCheckpoint(fromTags(tags), pointConsumer) then: context.isStarted() pointConsumer.points.size() == 2 verifyFirstPoint(pointConsumer.points[0]) with(pointConsumer.points[1]) { - edgeTags == ["group:group", "topic:topic", "type:kafka"] - edgeTags.size() == 3 + tags.group == "group" + tags.topic == "topic" + tags.type == "kafka" + tags.size() == 3 parentHash == pointConsumer.points[0].hash hash != 0 pathwayLatencyNano == 25 diff --git a/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy b/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy index 7a41bd1d11e..f2730c4ffc8 100644 --- a/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy +++ b/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy @@ -3,7 +3,6 @@ import datadog.communication.ddagent.SharedCommunicationObjects import datadog.communication.http.OkHttpUtils import datadog.trace.api.Config import datadog.trace.api.TraceConfig -import datadog.trace.api.datastreams.DataStreamsTagsBuilder import datadog.trace.api.time.ControllableTimeSource import datadog.trace.api.datastreams.StatsPoint import datadog.trace.common.metrics.EventListener @@ -47,7 +46,7 @@ class DataStreamsIntegrationTest extends AbstractTraceAgentTest { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, sharedCommunicationObjects.featuresDiscovery(Config.get()), timeSource, { traceConfig }, Config.get()) dataStreams.start() - def tags = new DataStreamsTagsBuilder() + def tags = DataStreamsTags .withTopic("testTopic") .withGroup("testGroup") .withType("testType") diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java index 53ea6491d5e..22cc02c74fe 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsContext.java @@ -16,16 +16,8 @@ public class DataStreamsContext implements ImplicitContextKeyed { final boolean sendCheckpoint; static { - CLIENT_PATHWAY_EDGE_TAGS = - new DataStreamsTagsBuilder() - .withType("http") - .withDirection(DataStreamsTags.Direction.Outbound) - .build(); - SERVER_PATHWAY_EDGE_TAGS = - new DataStreamsTagsBuilder() - .withType("http") - .withDirection(DataStreamsTags.Direction.Inbound) - .build(); + CLIENT_PATHWAY_EDGE_TAGS = DataStreamsTags.create("http", DataStreamsTags.Direction.Outbound); + SERVER_PATHWAY_EDGE_TAGS = DataStreamsTags.create("http", DataStreamsTags.Direction.Inbound); } public static DataStreamsContext fromContext(Context context) { diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index e95bbacb3b1..378d3f068aa 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -9,26 +9,35 @@ public enum Direction { Outbound, } - public enum TagTraverseMode { - HashOnly, - GroupOnly, - ValueOnly, - All - } - - public static DataStreamsTags EMPTY = new DataStreamsTagsBuilder().build(); + public static DataStreamsTags EMPTY = DataStreamsTags.create(null, null); - private final DataStreamsTagsBuilder builder; private long hash; private long aggregationHash; private long completeHash; - private int size; + private int nonNullSize; + + // hash tags + protected String bus; + protected String direction; + protected Direction directionValue; + protected String exchange; + protected String topic; + protected String type; + protected String subscription; + // additional grouping tags + protected String datasetName; + protected String datasetNamespace; + protected String isManual; + // informational tags + protected String group; + protected String consumerGroup; + protected String hasRoutingKey; + protected String kafkaClusterId; + protected String partition; public static final String MANUAL_TAG = "manual_checkpoint"; public static final String TYPE_TAG = "type"; public static final String DIRECTION_TAG = "direction"; - public static final String DIRECTION_IN = "in"; - public static final String DIRECTION_OUT = "out"; public static final String TOPIC_TAG = "topic"; public static final String BUS_TAG = "bus"; public static final String PARTITION_TAG = "partition"; @@ -54,171 +63,281 @@ public static byte[] longToBytes(long val) { }; } - public DataStreamsTags(DataStreamsTagsBuilder builder) { - this.builder = builder; - this.size = - this.forEachTag( - (name, value) -> { - this.hash = - FNV64Hash.continueHash(this.hash, name + ":" + value, FNV64Hash.Version.v1); - }, - TagTraverseMode.HashOnly); + public static DataStreamsTags create(String type, Direction direction) { + return DataStreamsTags.create(type, direction, null); + } - this.aggregationHash = this.hash; - this.size += - this.forEachTag( - (name, value) -> { - this.aggregationHash = - FNV64Hash.continueHash( - this.aggregationHash, name + ":" + value, FNV64Hash.Version.v1); - }, - TagTraverseMode.GroupOnly); + public static DataStreamsTags create(String type, Direction direction, String topic) { + return DataStreamsTags.createWithGroup(type, direction, topic, null); + } - this.completeHash = aggregationHash; - this.size += - this.forEachTag( - (name, value) -> { - this.completeHash = - FNV64Hash.continueHash( - this.completeHash, name + ":" + value, FNV64Hash.Version.v1); - }, - TagTraverseMode.ValueOnly); - } - - public int forEachTag(DataStreamsTagsProcessor processor, TagTraverseMode mode) { - int count = 0; - - if (mode == TagTraverseMode.HashOnly || mode == TagTraverseMode.All) { - if (this.builder.bus != null) { - processor.process(BUS_TAG, this.builder.bus); - count += 1; - } + public static DataStreamsTags create( + String type, Direction direction, String topic, String group, String kafkaClusterId) { + return new DataStreamsTags( + null, + direction, + null, + topic, + type, + null, + null, + null, + null, + group, + null, + null, + kafkaClusterId, + null); + } - if (this.builder.direction == Direction.Inbound) { - count += 1; - processor.process(DIRECTION_TAG, DIRECTION_IN); - } else if (this.builder.direction == Direction.Outbound) { - count += 1; - processor.process(DIRECTION_TAG, DIRECTION_OUT); - } + public static DataStreamsTags createManual(String type, Direction direction, String topic) { + return new DataStreamsTags( + null, direction, null, topic, type, null, null, null, true, null, null, null, null, null); + } - if (this.builder.exchange != null) { - count += 1; - processor.process(EXCHANGE_TAG, this.builder.exchange); - } + public static DataStreamsTags createWithBus(String type, Direction direction, String bus) { + return new DataStreamsTags( + bus, direction, null, null, type, null, null, null, null, null, null, null, null, null); + } - if (this.builder.topic != null) { - count += 1; - processor.process(TOPIC_TAG, this.builder.topic); - } + public static DataStreamsTags createWithPartition( + String type, String topic, String partition, String kafkaClusterId, String consumerGroup) { + return new DataStreamsTags( + null, + null, + null, + topic, + type, + null, + null, + null, + null, + null, + consumerGroup, + null, + kafkaClusterId, + partition); + } - if (this.builder.type != null) { - count += 1; - processor.process(TYPE_TAG, this.builder.type); - } + public static DataStreamsTags createWithGroup( + String type, Direction direction, String topic, String group) { + return new DataStreamsTags( + null, direction, null, topic, type, null, null, null, null, group, null, null, null, null); + } - if (this.builder.subscription != null) { - count += 1; - processor.process(SUBSCRIPTION_TAG, this.builder.subscription); - } - } + public static DataStreamsTags createWithDataset( + String type, Direction direction, String topic, String datasetName, String datasetNamespace) { + return new DataStreamsTags( + null, + direction, + null, + topic, + type, + null, + datasetName, + datasetNamespace, + null, + null, + null, + null, + null, + null); + } - if (mode == TagTraverseMode.GroupOnly || mode == TagTraverseMode.All) { - if (this.builder.isManual != null) { - count += 1; - processor.process(MANUAL_TAG, this.builder.isManual.toString()); - } + public static DataStreamsTags createWithClusterId( + String type, Direction direction, String topic, String clusterId) { + return new DataStreamsTags( + null, direction, null, topic, type, null, null, null, false, null, null, null, clusterId, + null); + } - if (this.builder.datasetName != null) { - count += 1; - processor.process(DATASET_NAME_TAG, this.builder.datasetName); - } + public static DataStreamsTags createWithExchange( + String type, Direction direction, String exchange, Boolean hasRoutingKey) { + return new DataStreamsTags( + null, + direction, + exchange, + null, + type, + null, + null, + null, + false, + null, + null, + hasRoutingKey, + null, + null); + } - if (this.builder.datasetNamespace != null) { - count += 1; - processor.process(DATASET_NAMESPACE_TAG, this.builder.datasetNamespace); + public DataStreamsTags( + String bus, + Direction direction, + String exchange, + String topic, + String type, + String subscription, + String datasetName, + String datasetNamespace, + Boolean isManual, + String group, + String consumerGroup, + Boolean hasRoutingKey, + String kafkaClusterId, + String partition) { + this.bus = bus != null ? BUS_TAG + ":" + bus : null; + this.directionValue = direction; + if (direction == Direction.Inbound) { + this.direction = DIRECTION_TAG + ":in"; + } else if (direction == Direction.Outbound) { + this.direction = DIRECTION_TAG + ":out"; + } + this.exchange = exchange != null ? EXCHANGE_TAG + ":" + exchange : null; + this.topic = topic != null ? TOPIC_TAG + ":" + topic : null; + this.type = type != null ? TYPE_TAG + ":" + type : null; + this.subscription = subscription != null ? SUBSCRIPTION_TAG + ":" + subscription : null; + this.datasetName = datasetName != null ? DATASET_NAME_TAG + ":" + datasetName : null; + this.datasetNamespace = + datasetNamespace != null ? DATASET_NAMESPACE_TAG + ":" + datasetNamespace : null; + this.isManual = isManual != null ? MANUAL_TAG + ":" + isManual : null; + this.group = group != null ? GROUP_TAG + ":" + group : null; + this.consumerGroup = consumerGroup != null ? CONSUMER_GROUP_TAG + ":" + consumerGroup : null; + this.hasRoutingKey = hasRoutingKey != null ? HAS_ROUTING_KEY_TAG + ":" + hasRoutingKey : null; + this.kafkaClusterId = + kafkaClusterId != null ? KAFKA_CLUSTER_ID_TAG + ":" + kafkaClusterId : null; + this.partition = partition != null ? PARTITION_TAG + ":" + partition : null; + + + // hashable tags are 0-4 + for (int i = 0; i < 7; i++) { + String tag = this.tagByIndex(i); + if (tag != null) { + this.nonNullSize++; + this.hash = FNV64Hash.continueHash(this.hash, tag, FNV64Hash.Version.v1); } } - if (mode == TagTraverseMode.ValueOnly || mode == TagTraverseMode.All) { - if (this.builder.hasRoutingKey != null) { - count += 1; - processor.process(HAS_ROUTING_KEY_TAG, this.builder.hasRoutingKey.toString()); + // aggregation tags are 5-7 + this.aggregationHash = this.hash; + for (int i = 7; i < 10; i++) { + String tag = this.tagByIndex(i); + if (tag != null) { + this.nonNullSize++; + this.aggregationHash = + FNV64Hash.continueHash(this.aggregationHash, tag, FNV64Hash.Version.v1); } + } - if (this.builder.consumerGroup != null) { - count += 1; - processor.process(CONSUMER_GROUP_TAG, this.builder.consumerGroup); + // the rest are values + this.completeHash = aggregationHash; + for (int i = 10; i < this.size(); i++) { + String tag = this.tagByIndex(i); + if (tag != null) { + this.nonNullSize++; + this.completeHash = + FNV64Hash.continueHash(this.completeHash, tag, FNV64Hash.Version.v1); } + } + } - if (this.builder.group != null) { - count += 1; - processor.process(GROUP_TAG, this.builder.group); - } + public int size() { + // make sure it's in sync with tagByIndex logic + return 13; + } - if (this.builder.kafkaClusterId != null) { - count += 1; - processor.process(KAFKA_CLUSTER_ID_TAG, this.builder.kafkaClusterId); - } - if (this.builder.partition != null) { - count += 1; - processor.process(PARTITION_TAG, this.builder.partition); - } + public String tagByIndex(int index) { + switch (index) { + case 0: + return this.bus; + case 1: + return this.direction; + case 2: + return this.exchange; + case 3: + return this.topic; + case 4: + return this.type; + case 5: + return this.subscription; + case 6: + return this.datasetName; + case 7: + return this.datasetNamespace; + case 8: + return this.isManual; + case 9: + return this.group; + case 10: + return this.consumerGroup; + case 11: + return this.hasRoutingKey; + case 12: + return this.kafkaClusterId; + case 13: + return this.partition; + default: + return null; } - - return count; } - public Direction getDirection() { - return this.builder.direction; + public String getDirection() { + return this.direction; } public String getTopic() { - return this.builder.topic; + return this.topic; } public String getType() { - return this.builder.type; + return this.type; } - public Boolean isManual() { - return this.builder.isManual; + public String isManual() { + return this.isManual; } public String getBus() { - return this.builder.bus; + return this.bus; } public String getExchange() { - return this.builder.exchange; + return this.exchange; + } + + public Direction getDirectionValue() { + return this.directionValue; } public String getSubscription() { - return this.builder.subscription; + return this.subscription; } public String getDatasetName() { - return this.builder.datasetName; + return this.datasetName; } public String getDatasetNamespace() { - return this.builder.datasetNamespace; + return this.datasetNamespace; } public String getGroup() { - return this.builder.group; + return this.group; } public String getPartition() { - return this.builder.partition; + return this.partition; } public String getKafkaClusterId() { - return this.builder.kafkaClusterId; + return this.kafkaClusterId; + } + + public String getHasRoutingKey() { + return this.hasRoutingKey; } - public boolean getHasRoutingKey() { - return this.builder.hasRoutingKey; + public int nonNullSize() { + return this.nonNullSize; } public long getHash() { @@ -229,10 +348,6 @@ public long getAggregationHash() { return aggregationHash; } - public int getSize() { - return size; - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -251,52 +366,38 @@ public int hashCode() { public String toString() { return "DataStreamsTags{" + "bus='" - + this.builder.bus - + "," + + this.bus + ", direction=" - + this.builder.direction - + "," + + this.direction + ", exchange='" - + this.builder.exchange - + "," + + this.exchange + ", topic='" - + this.builder.topic - + "," + + this.topic + ", type='" - + this.builder.type - + "," + + this.type + ", subscription='" - + this.builder.subscription - + "," + + this.subscription + ", datasetName='" - + this.builder.datasetName - + "," + + this.datasetName + ", datasetNamespace='" - + this.builder.datasetNamespace - + "," + + this.datasetNamespace + ", isManual=" - + this.builder.isManual + + this.isManual + ", group='" - + this.builder.group + + this.group + ", consumerGroup='" - + this.builder.consumerGroup - + "," + + this.consumerGroup + ", hasRoutingKey='" - + this.builder.hasRoutingKey - + "," + + this.hasRoutingKey + ", kafkaClusterId='" - + this.builder.kafkaClusterId - + "," + + this.kafkaClusterId + ", partition='" - + this.builder.partition - + "," + + this.partition + ", hash=" + hash - + "," + ", aggregationHash=" + aggregationHash - + "," + ", size=" - + size; + + size(); } } diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsBuilder.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsBuilder.java deleted file mode 100644 index 8732f928d87..00000000000 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsBuilder.java +++ /dev/null @@ -1,95 +0,0 @@ -package datadog.trace.api.datastreams; - -public class DataStreamsTagsBuilder { - // hash tags - protected String bus; - protected DataStreamsTags.Direction direction; - protected String exchange; - protected String topic; - protected String type; - protected String subscription; - // additional grouping tags - protected String datasetName; - protected String datasetNamespace; - protected Boolean isManual; - // informational tags - protected String group; - protected String consumerGroup; - protected Boolean hasRoutingKey; - protected String kafkaClusterId; - protected String partition; - - public DataStreamsTagsBuilder withBus(String bus) { - this.bus = bus; - return this; - } - - public DataStreamsTagsBuilder withDirection(DataStreamsTags.Direction direction) { - this.direction = direction; - return this; - } - - public DataStreamsTagsBuilder withExchange(String exchange) { - this.exchange = exchange; - return this; - } - - public DataStreamsTagsBuilder withTopic(String topic) { - this.topic = topic; - return this; - } - - public DataStreamsTagsBuilder withType(String type) { - this.type = type; - return this; - } - - public DataStreamsTagsBuilder withSubscription(String subscription) { - this.subscription = subscription; - return this; - } - - public DataStreamsTagsBuilder withDatasetName(String datasetName) { - this.datasetName = datasetName; - return this; - } - - public DataStreamsTagsBuilder withDatasetNamespace(String datasetNamespace) { - this.datasetNamespace = datasetNamespace; - return this; - } - - public DataStreamsTagsBuilder withManual(Boolean isManual) { - this.isManual = isManual; - return this; - } - - public DataStreamsTagsBuilder withGroup(String group) { - this.group = group; - return this; - } - - public DataStreamsTagsBuilder withConsumerGroup(String consumerGroup) { - this.consumerGroup = consumerGroup; - return this; - } - - public DataStreamsTagsBuilder withHasRoutingKey(Boolean hasRoutingKey) { - this.hasRoutingKey = hasRoutingKey; - return this; - } - - public DataStreamsTagsBuilder withKafkaClusterId(String kafkaClusterId) { - this.kafkaClusterId = kafkaClusterId; - return this; - } - - public DataStreamsTagsBuilder withPartition(String partition) { - this.partition = partition; - return this; - } - - public DataStreamsTags build() { - return new DataStreamsTags(this); - } -} diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsProcessor.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsProcessor.java deleted file mode 100644 index ce147374b2a..00000000000 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTagsProcessor.java +++ /dev/null @@ -1,5 +0,0 @@ -package datadog.trace.api.datastreams; - -public interface DataStreamsTagsProcessor { - void process(String name, String value); -} From ea3add7d9e6b48ef7e9ba4771cc219600e91d3e5 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Mon, 14 Jul 2025 17:42:17 -0500 Subject: [PATCH 08/29] Spotless apply --- .../trace/instrumentation/aws/v0/AwsSdkClientDecorator.java | 4 +++- .../java/datadog/trace/api/datastreams/DataStreamsTags.java | 4 +--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java index fd6ddb35600..b164cf54349 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/main/java/datadog/trace/instrumentation/aws/v0/AwsSdkClientDecorator.java @@ -268,7 +268,9 @@ && traceConfig().isDataStreamsEnabled()) { if (requestSize != null) { payloadSize = (long) requestSize; } - DataStreamsTags tags = DataStreamsTags.createWithDataset("s3", DataStreamsTags.Direction.Outbound, bucket, key, bucket); + DataStreamsTags tags = + DataStreamsTags.createWithDataset( + "s3", DataStreamsTags.Direction.Outbound, bucket, key, bucket); AgentTracer.get() .getDataStreamsMonitoring() .setCheckpoint(span, create(tags, 0, payloadSize)); diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 378d3f068aa..a97f097d5be 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -207,7 +207,6 @@ public DataStreamsTags( kafkaClusterId != null ? KAFKA_CLUSTER_ID_TAG + ":" + kafkaClusterId : null; this.partition = partition != null ? PARTITION_TAG + ":" + partition : null; - // hashable tags are 0-4 for (int i = 0; i < 7; i++) { String tag = this.tagByIndex(i); @@ -234,8 +233,7 @@ public DataStreamsTags( String tag = this.tagByIndex(i); if (tag != null) { this.nonNullSize++; - this.completeHash = - FNV64Hash.continueHash(this.completeHash, tag, FNV64Hash.Version.v1); + this.completeHash = FNV64Hash.continueHash(this.completeHash, tag, FNV64Hash.Version.v1); } } } From 5e197d11f3dd5dbdb6111030591575db78687b33 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Tue, 15 Jul 2025 14:21:41 -0500 Subject: [PATCH 09/29] Fixed more tests --- .../DefaultPathwayContextTest.groovy | 148 +++++++++++------- .../api/datastreams/DataStreamsTags.java | 32 ++-- .../datastreams/DataStreamsContextTest.groovy | 10 +- 3 files changed, 111 insertions(+), 79 deletions(-) diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy index 10ac09e5cec..14f8a491558 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy @@ -71,9 +71,9 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(50) - context.setCheckpoint(fromTags(DataStreamsTags.create("internal", null)), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) timeSource.advance(25) - def tags = DataStreamsTags.create("kafka", null, "topic", "group", null) + def tags = DataStreamsTags.create("kafka", DataStreamsTags.Direction.Outbound, "topic", "group", null) context.setCheckpoint(fromTags(tags), pointConsumer) then: @@ -81,10 +81,11 @@ class DefaultPathwayContextTest extends DDCoreSpecification { pointConsumer.points.size() == 2 verifyFirstPoint(pointConsumer.points[0]) with(pointConsumer.points[1]) { - tags.group == "group" - tags.topic == "topic" - tags.type == "kafka" - tags.size() == 3 + tags.group == "group:group" + tags.topic == "topic:topic" + tags.type == "type:kafka" + tags.getDirection() == "direction:out" + tags.nonNullSize() == 4 parentHash == pointConsumer.points[0].hash hash != 0 pathwayLatencyNano == 25 @@ -100,15 +101,17 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(25) context.setCheckpoint( - create(new LinkedHashMap<>(["group": "group", "topic": "topic", "type": "kafka"]), 0, 72), + create(DataStreamsTags.create("kafka", null, "topic", "group", null), 0, 72), pointConsumer) then: context.isStarted() pointConsumer.points.size() == 1 with(pointConsumer.points[0]) { - edgeTags == ["group:group", "topic:topic", "type:kafka"] - edgeTags.size() == 3 + tags.getGroup() == "group:group" + tags.getTopic() == "topic:topic" + tags.getType() == "type:kafka" + tags.nonNullSize() == 3 hash != 0 payloadSizeBytes == 72 } @@ -121,29 +124,34 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(50) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["direction": "out", "type": "kafka"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("kafka", DataStreamsTags.Direction.Outbound)), pointConsumer) timeSource.advance(25) - context.setCheckpoint( - fromTags(new LinkedHashMap<>(["direction": "in", "group": "group", "topic": "topic", "type": "kafka"])), pointConsumer) + def tg = DataStreamsTags.create("kafka", DataStreamsTags.Direction.Inbound, "topic", "group", null) + context.setCheckpoint(fromTags(tg), pointConsumer) timeSource.advance(30) - context.setCheckpoint( - fromTags(new LinkedHashMap<>(["direction": "in", "group": "group", "topic": "topic", "type": "kafka"])), pointConsumer) + context.setCheckpoint(fromTags(tg), pointConsumer) then: context.isStarted() pointConsumer.points.size() == 3 verifyFirstPoint(pointConsumer.points[0]) with(pointConsumer.points[1]) { - edgeTags == ["direction:in", "group:group", "topic:topic", "type:kafka"] - edgeTags.size() == 4 + tags.nonNullSize() == 4 + tags.direction == "direction:in" + tags.group == "group:group" + tags.topic == "topic:topic" + tags.type == "type:kafka" parentHash == pointConsumer.points[0].hash hash != 0 pathwayLatencyNano == 25 edgeLatencyNano == 25 } with(pointConsumer.points[2]) { - edgeTags == ["direction:in", "group:group", "topic:topic", "type:kafka"] - edgeTags.size() == 4 + tags.nonNullSize() == 4 + tags.direction == "direction:in" + tags.group == "group:group" + tags.topic == "topic:topic" + tags.type == "type:kafka" // this point should have the first point as parent, // as the loop protection will reset the parent if two identical // points (same hash for tag values) are about to form a hierarchy @@ -173,19 +181,20 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "s3", "ds.namespace": "my_bucket", "ds.name": "my_object.csv", "direction": "in"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.createWithDataset("s3", DataStreamsTags.Direction.Inbound, null, "my_object.csv", "my_bucket")), pointConsumer) def encoded = context.encode() timeSource.advance(MILLISECONDS.toNanos(2)) def decodedContext = DefaultPathwayContext.decode(timeSource, baseHash, null, encoded) timeSource.advance(MILLISECONDS.toNanos(25)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "s3", "ds.namespace": "my_bucket", "ds.name": "my_object.csv", "direction": "out"])), pointConsumer) + def tg = DataStreamsTags.createWithDataset("s3", DataStreamsTags.Direction.Outbound, null, "my_object.csv", "my_bucket") + context.setCheckpoint(fromTags(tg), pointConsumer) then: decodedContext.isStarted() pointConsumer.points.size() == 2 // all points should have datasetHash, which is not equal to hash or 0 - for (var i = 0; i < pointConsumer.points.size(); i++){ + for (def i = 0; i < pointConsumer.points.size(); i++){ pointConsumer.points[i].aggregationHash != pointConsumer.points[i].hash pointConsumer.points[i].aggregationHash != 0 } @@ -199,20 +208,22 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) def encoded = context.encode() timeSource.advance(MILLISECONDS.toNanos(2)) def decodedContext = DefaultPathwayContext.decode(timeSource, baseHash, null, encoded) timeSource.advance(MILLISECONDS.toNanos(25)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["group": "group", "topic": "topic", "type": "kafka"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("kafka", null, "topic", "group", null)), pointConsumer) then: decodedContext.isStarted() pointConsumer.points.size() == 2 with(pointConsumer.points[1]) { - edgeTags == ["group:group", "topic:topic", "type:kafka"] - edgeTags.size() == 3 + tags.nonNullSize() == 3 + tags.getGroup() == "group:group" + tags.getType() == "type:kafka" + tags.getTopic() == "topic:topic" parentHash == pointConsumer.points[0].hash hash != 0 pathwayLatencyNano == MILLISECONDS.toNanos(27) @@ -226,13 +237,13 @@ class DefaultPathwayContextTest extends DDCoreSpecification { def context = new DefaultPathwayContext(timeSource, baseHash, null) def timeFromQueue = timeSource.getCurrentTimeMillis() - 200 when: - context.setCheckpoint(create(["type": "internal"], timeFromQueue, 0), pointConsumer) + context.setCheckpoint(create(DataStreamsTags.create("internal", null), timeFromQueue, 0), pointConsumer) then: context.isStarted() pointConsumer.points.size() == 1 with(pointConsumer.points[0]) { - edgeTags == ["type:internal"] - edgeTags.size() == 1 + tags.getType() == "type:internal" + tags.nonNullSize() == 1 parentHash == 0 hash != 0 pathwayLatencyNano == MILLISECONDS.toNanos(200) @@ -248,20 +259,23 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) def encoded = context.encode() timeSource.advance(MILLISECONDS.toNanos(1)) def decodedContext = DefaultPathwayContext.decode(timeSource, baseHash, null, encoded) timeSource.advance(MILLISECONDS.toNanos(25)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["group": "group", "topic": "topic", "type": "kafka"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("kafka", DataStreamsTags.Direction.Outbound, "topic", "group", null)), pointConsumer) then: decodedContext.isStarted() pointConsumer.points.size() == 2 with(pointConsumer.points[1]) { - edgeTags == ["group:group", "topic:topic", "type:kafka"] - edgeTags.size() == 3 + tags.group == "group:group" + tags.topic == "topic:topic" + tags.type == "type:kafka" + tags.direction == "direction:out" + tags.nonNullSize() == 4 parentHash == pointConsumer.points[0].hash hash != 0 pathwayLatencyNano == MILLISECONDS.toNanos(26) @@ -273,14 +287,17 @@ class DefaultPathwayContextTest extends DDCoreSpecification { timeSource.advance(MILLISECONDS.toNanos(2)) def secondDecode = DefaultPathwayContext.decode(timeSource, baseHash, null, secondEncode) timeSource.advance(MILLISECONDS.toNanos(30)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["group": "group", "topic": "topicB", "type": "kafka"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("kafka", DataStreamsTags.Direction.Inbound, "topicB", "group", null)), pointConsumer) then: secondDecode.isStarted() pointConsumer.points.size() == 3 with(pointConsumer.points[2]) { - edgeTags == ["group:group", "topic:topicB", "type:kafka"] - edgeTags.size() == 3 + tags.group == "group:group" + tags.topic == "topic:topicB" + tags.type == "type:kafka" + tags.direction == "direction:in" + tags.nonNullSize() == 4 parentHash == pointConsumer.points[1].hash hash != 0 pathwayLatencyNano == MILLISECONDS.toNanos(58) @@ -297,21 +314,24 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) def encoded = context.encode() Map carrier = [(PROPAGATION_KEY_BASE64): encoded, "someotherkey": "someothervalue"] timeSource.advance(MILLISECONDS.toNanos(1)) def decodedContext = DefaultPathwayContext.extract(carrier, contextVisitor, timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(25)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["group": "group", "topic": "topic", "type": "kafka"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("kafka", DataStreamsTags.Direction.Outbound, "topic", "group", null)), pointConsumer) then: decodedContext.isStarted() pointConsumer.points.size() == 2 with(pointConsumer.points[1]) { - edgeTags == ["group:group", "topic:topic", "type:kafka"] - edgeTags.size() == 3 + tags.nonNullSize() == 4 + tags.group == "group:group" + tags.topic == "topic:topic" + tags.type == "type:kafka" + tags.direction == "direction:out" parentHash == pointConsumer.points[0].hash hash != 0 pathwayLatencyNano == MILLISECONDS.toNanos(26) @@ -324,14 +344,17 @@ class DefaultPathwayContextTest extends DDCoreSpecification { timeSource.advance(MILLISECONDS.toNanos(2)) def secondDecode = DefaultPathwayContext.extract(carrier, contextVisitor, timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(30)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["group": "group", "topic": "topicB", "type": "kafka"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("kafka", DataStreamsTags.Direction.Inbound, "topicB", "group", null)), pointConsumer) then: secondDecode.isStarted() pointConsumer.points.size() == 3 with(pointConsumer.points[2]) { - edgeTags == ["group:group", "topic:topicB", "type:kafka"] - edgeTags.size() == 3 + tags.nonNullSize() == 4 + tags.group == "group:group" + tags.topic == "topic:topicB" + tags.type == "type:kafka" + tags.direction == "direction:in" parentHash == pointConsumer.points[1].hash hash != 0 pathwayLatencyNano == MILLISECONDS.toNanos(58) @@ -348,21 +371,23 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) def encoded = context.encode() Map carrier = [(PROPAGATION_KEY_BASE64): encoded, "someotherkey": "someothervalue"] timeSource.advance(MILLISECONDS.toNanos(1)) def decodedContext = DefaultPathwayContext.extract(carrier, contextVisitor, timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(25)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["topic": "topic", "type": "sqs"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("sqs", DataStreamsTags.Direction.Outbound, "topic", null, null)), pointConsumer) then: decodedContext.isStarted() pointConsumer.points.size() == 2 with(pointConsumer.points[1]) { - edgeTags == ["topic:topic", "type:sqs"] - edgeTags.size() == 2 + tags.direction == "direction:out" + tags.topic == "topic:topic" + tags.type == "type:sqs" + tags.nonNullSize() == 3 parentHash == pointConsumer.points[0].hash hash != 0 pathwayLatencyNano == MILLISECONDS.toNanos(26) @@ -375,14 +400,15 @@ class DefaultPathwayContextTest extends DDCoreSpecification { timeSource.advance(MILLISECONDS.toNanos(2)) def secondDecode = DefaultPathwayContext.extract(carrier, contextVisitor, timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(30)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["topic": "topicB", "type": "sqs"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("sqs", DataStreamsTags.Direction.Inbound, "topicB", null, null)), pointConsumer) then: secondDecode.isStarted() pointConsumer.points.size() == 3 with(pointConsumer.points[2]) { - edgeTags == ["topic:topicB", "type:sqs"] - edgeTags.size() == 2 + tags.type == "type:sqs" + tags.topic == "topic:topicB" + tags.nonNullSize() == 3 parentHash == pointConsumer.points[1].hash hash != 0 pathwayLatencyNano == MILLISECONDS.toNanos(58) @@ -397,26 +423,29 @@ class DefaultPathwayContextTest extends DDCoreSpecification { when: timeSource.advance(50) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) timeSource.advance(25) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["group": "group", "topic": "topic", "type": "type"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound, "topic", "group", null)), pointConsumer) timeSource.advance(25) - context.setCheckpoint(fromTags(new LinkedHashMap<>()), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create(null, null)), pointConsumer) then: context.isStarted() pointConsumer.points.size() == 3 verifyFirstPoint(pointConsumer.points[0]) with(pointConsumer.points[1]) { - edgeTags == ["group:group", "topic:topic", "type:type"] - edgeTags.size() == 3 + tags.type == "type:type" + tags.topic == "topic:topic" + tags.group == "group:group" + tags.direction == "direction:out" + tags.nonNullSize() == 4 parentHash == pointConsumer.points[0].hash hash != 0 pathwayLatencyNano == 25 edgeLatencyNano == 25 } with(pointConsumer.points[2]) { - edgeTags.size() == 0 + tags.nonNullSize() == 0 parentHash == pointConsumer.points[1].hash hash != 0 pathwayLatencyNano == 50 @@ -475,7 +504,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { def context = new DefaultPathwayContext(timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("itnernal", DataStreamsTags.Direction.Inbound)), pointConsumer) def encoded = context.encode() Map carrier = [ (PROPAGATION_KEY_BASE64): encoded, @@ -521,7 +550,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { def context = new DefaultPathwayContext(timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) def encoded = context.encode() Map carrier = [(PROPAGATION_KEY_BASE64): encoded, "someotherkey": "someothervalue"] @@ -564,7 +593,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { def context = new DefaultPathwayContext(timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(new LinkedHashMap<>(["type": "internal"])), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", null)), pointConsumer) def encoded = context.encode() Map carrier = [(PROPAGATION_KEY_BASE64): encoded, "someotherkey": "someothervalue"] def contextVisitor = new Base64MapContextVisitor() @@ -609,7 +638,8 @@ class DefaultPathwayContextTest extends DDCoreSpecification { isDataStreamsEnabled() >> true } - def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) + def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, + wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) Map carrier = ["someotherkey": "someothervalue"] def contextVisitor = new Base64MapContextVisitor() diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index a97f097d5be..3cd1810a4e7 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -17,23 +17,23 @@ public enum Direction { private int nonNullSize; // hash tags - protected String bus; - protected String direction; - protected Direction directionValue; - protected String exchange; - protected String topic; - protected String type; - protected String subscription; + protected final String bus; + protected final String direction; + protected final Direction directionValue; + protected final String exchange; + protected final String topic; + protected final String type; + protected final String subscription; // additional grouping tags - protected String datasetName; - protected String datasetNamespace; - protected String isManual; + protected final String datasetName; + protected final String datasetNamespace; + protected final String isManual; // informational tags - protected String group; - protected String consumerGroup; - protected String hasRoutingKey; - protected String kafkaClusterId; - protected String partition; + protected final String group; + protected final String consumerGroup; + protected final String hasRoutingKey; + protected final String kafkaClusterId; + protected final String partition; public static final String MANUAL_TAG = "manual_checkpoint"; public static final String TYPE_TAG = "type"; @@ -191,6 +191,8 @@ public DataStreamsTags( this.direction = DIRECTION_TAG + ":in"; } else if (direction == Direction.Outbound) { this.direction = DIRECTION_TAG + ":out"; + } else { + this.direction = null; } this.exchange = exchange != null ? EXCHANGE_TAG + ":" + exchange : null; this.topic = topic != null ? TOPIC_TAG + ":" + topic : null; diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsContextTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsContextTest.groovy index a565e0ed5e0..babc061fa2a 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsContextTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsContextTest.groovy @@ -6,13 +6,13 @@ import spock.lang.Specification class DataStreamsContextTest extends Specification { def 'test constructor'() { setup: - def tags = new LinkedHashMap() + def tags = DataStreamsTags.EMPTY when: def dsmContext = DataStreamsContext.fromTags(tags) then: - dsmContext.sortedTags() == tags + dsmContext.tags() == tags dsmContext.defaultTimestamp() == 0 dsmContext.payloadSizeBytes() == 0 dsmContext.sendCheckpoint() @@ -21,7 +21,7 @@ class DataStreamsContextTest extends Specification { dsmContext = DataStreamsContext.fromTagsWithoutCheckpoint(tags) then: - dsmContext.sortedTags() == tags + dsmContext.tags() == tags dsmContext.defaultTimestamp() == 0 dsmContext.payloadSizeBytes() == 0 !dsmContext.sendCheckpoint() @@ -32,7 +32,7 @@ class DataStreamsContextTest extends Specification { dsmContext = DataStreamsContext.create(tags, timestamp, payloadSize) then: - dsmContext.sortedTags() == tags + dsmContext.tags() == tags dsmContext.defaultTimestamp() == timestamp dsmContext.payloadSizeBytes() == payloadSize dsmContext.sendCheckpoint() @@ -40,7 +40,7 @@ class DataStreamsContextTest extends Specification { def 'test context store'() { setup: - def tags = new LinkedHashMap() + def tags = DataStreamsTags.EMPTY when: def dsmContext = DataStreamsContext.fromTags(tags) From 1c32c8d6df1b2164b4139ce945f0e8c06c4087ad Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Tue, 15 Jul 2025 15:26:27 -0500 Subject: [PATCH 10/29] Add base hash support and service name overrides --- .../DefaultDataStreamsMonitoring.java | 3 +++ .../api/datastreams/DataStreamsTags.java | 24 +++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java index 64ab5f027d2..c35ecf2c722 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java @@ -123,6 +123,9 @@ public DefaultDataStreamsMonitoring( this.propagator = new DataStreamsPropagator(this, this.timeSource, this.hashOfKnownTags, serviceNameOverride); + // configure global tags behavior + DataStreamsTags.setGlobalBaseHash(this.hashOfKnownTags); + DataStreamsTags.setServiceNameOverride(serviceNameOverride); } @Override diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 3cd1810a4e7..873443d08d1 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -2,6 +2,8 @@ import datadog.trace.util.FNV64Hash; +import javax.xml.crypto.Data; + public class DataStreamsTags { public enum Direction { Unknown, @@ -50,6 +52,9 @@ public enum Direction { public static final String HAS_ROUTING_KEY_TAG = "has_routing_key"; public static final String KAFKA_CLUSTER_ID_TAG = "kafka_cluster_id"; + private static volatile ThreadLocal serviceNameOverride; + private static volatile long baseHash; + public static byte[] longToBytes(long val) { return new byte[] { (byte) val, @@ -144,6 +149,14 @@ public static DataStreamsTags createWithDataset( null); } + public static void setServiceNameOverride(ThreadLocal serviceNameOverride) { + DataStreamsTags.serviceNameOverride = serviceNameOverride; + } + + public static void setGlobalBaseHash(long hash) { + DataStreamsTags.baseHash = hash; + } + public static DataStreamsTags createWithClusterId( String type, Direction direction, String topic, String clusterId) { return new DataStreamsTags( @@ -209,6 +222,17 @@ public DataStreamsTags( kafkaClusterId != null ? KAFKA_CLUSTER_ID_TAG + ":" + kafkaClusterId : null; this.partition = partition != null ? PARTITION_TAG + ":" + partition : null; + if (DataStreamsTags.baseHash != 0) { + this.hash = DataStreamsTags.baseHash; + } + + if (DataStreamsTags.serviceNameOverride != null) { + String val = DataStreamsTags.serviceNameOverride.get(); + if (val != null) { + this.hash = FNV64Hash.continueHash(this.hash, val, FNV64Hash.Version.v1); + } + } + // hashable tags are 0-4 for (int i = 0; i < 7; i++) { String tag = this.tagByIndex(i); From 5ec63df2998e67890358eab27e0c1283982f84db Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Tue, 15 Jul 2025 16:17:44 -0500 Subject: [PATCH 11/29] Fixed more tests --- .../datastreams/DefaultPathwayContext.java | 11 +----- .../DefaultPathwayContextTest.groovy | 39 ++++++++++++++++--- .../groovy/DataStreamsIntegrationTest.groovy | 9 ++--- 3 files changed, 38 insertions(+), 21 deletions(-) diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java index 8af0d77d420..f77a74a328f 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java @@ -109,19 +109,12 @@ public synchronized void setCheckpoint( } // generate node hash - long nodeHash = hashOfKnownTags; - if (serviceNameOverride != null) { - nodeHash = FNV64Hash.continueHash(nodeHash, serviceNameOverride, FNV64Hash.Version.v1); - } - nodeHash = - FNV64Hash.continueHash( - nodeHash, DataStreamsTags.longToBytes(context.tags().getHash()), FNV64Hash.Version.v1); - + long nodeHash = context.tags().getHash(); // loop protection - a node should not be chosen as parent // for a sequential node with the same direction, as this // will cause a `cardinality explosion` for hash / parentHash tag values DataStreamsTags.Direction direction = context.tags().getDirectionValue(); - if (direction == previousDirection) { + if (direction == previousDirection && previousDirection != null) { hash = closestOppositeDirectionHash; } else { previousDirection = direction; diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy index 14f8a491558..30ce35ad88c 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy @@ -7,6 +7,7 @@ import datadog.trace.api.ProcessTags import datadog.trace.api.TagMap import datadog.trace.api.TraceConfig import datadog.trace.api.WellKnownTags +import datadog.trace.api.config.GeneralConfig import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.datastreams.StatsPoint import datadog.trace.api.time.ControllableTimeSource @@ -14,6 +15,7 @@ import datadog.trace.bootstrap.instrumentation.api.AgentPropagation import datadog.trace.bootstrap.instrumentation.api.AgentSpan import datadog.trace.bootstrap.instrumentation.api.AgentTracer import datadog.trace.common.metrics.Sink +import datadog.trace.core.CoreTracer import datadog.trace.core.propagation.ExtractedContext import datadog.trace.core.test.DDCoreSpecification @@ -502,9 +504,10 @@ class DefaultPathwayContextTest extends DDCoreSpecification { def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { globalTraceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) + DataStreamsTags.setGlobalBaseHash(baseHash) def context = new DefaultPathwayContext(timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(DataStreamsTags.create("itnernal", DataStreamsTags.Direction.Inbound)), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) def encoded = context.encode() Map carrier = [ (PROPAGATION_KEY_BASE64): encoded, @@ -518,6 +521,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { def extractedSpan = AgentSpan.fromContext(extractedContext) then: + encoded == "L+lDG/Pa9hRkZA==" !dynamicConfigEnabled || extractedSpan != null if (dynamicConfigEnabled) { def extracted = extractedSpan.context() @@ -546,8 +550,15 @@ class DefaultPathwayContextTest extends DDCoreSpecification { isDataStreamsEnabled() >> { return globalDsmEnabled } } + def tracerApi = Mock(AgentTracer.TracerAPI) { + captureTraceConfig() >> globalTraceConfig + } + AgentTracer.TracerAPI originalTracer = AgentTracer.get() + AgentTracer.forceRegister(tracerApi) + def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { globalTraceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) + DataStreamsTags.setGlobalBaseHash(baseHash) def context = new DefaultPathwayContext(timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(50)) context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) @@ -562,6 +573,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { def extractedSpan = AgentSpan.fromContext(extractedContext) then: + encoded == "L+lDG/Pa9hRkZA==" if (globalDsmEnabled) { extractedSpan != null def extracted = extractedSpan.context() @@ -572,6 +584,9 @@ class DefaultPathwayContextTest extends DDCoreSpecification { extractedSpan == null } + cleanup: + AgentTracer.forceRegister(originalTracer) + where: globalDsmEnabled << [true, false] } @@ -589,19 +604,27 @@ class DefaultPathwayContextTest extends DDCoreSpecification { isDataStreamsEnabled() >> { return globalDsmEnabled } } - def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { globalTraceConfig }, wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) + def tracerApi = Mock(AgentTracer.TracerAPI) { + captureTraceConfig() >> globalTraceConfig + } + AgentTracer.TracerAPI originalTracer = AgentTracer.get() + AgentTracer.forceRegister(tracerApi) + + def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { globalTraceConfig }, + wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) + DataStreamsTags.setGlobalBaseHash(baseHash) def context = new DefaultPathwayContext(timeSource, baseHash, null) timeSource.advance(MILLISECONDS.toNanos(50)) - context.setCheckpoint(fromTags(DataStreamsTags.create("internal", null)), pointConsumer) + context.setCheckpoint(fromTags(DataStreamsTags.create("internal", DataStreamsTags.Direction.Inbound)), pointConsumer) def encoded = context.encode() Map carrier = [(PROPAGATION_KEY_BASE64): encoded, "someotherkey": "someothervalue"] def contextVisitor = new Base64MapContextVisitor() - def spanContext = new ExtractedContext(DDTraceId.ONE, 1, 0, null, 0, null, (TagMap)null, null, null, null, DATADOG) + def spanContext = new ExtractedContext(DDTraceId.ONE, 1, 0, null, 0, + null, (TagMap)null, null, null, globalTraceConfig, DATADOG) def baseContext = AgentSpan.fromSpanContext(spanContext).storeInto(root()) def propagator = dataStreams.propagator() - when: def extractedContext = propagator.extract(baseContext, carrier, contextVisitor) def extractedSpan = AgentSpan.fromContext(extractedContext) @@ -614,6 +637,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { then: extracted != null + encoded == "L+lDG/Pa9hRkZA==" if (globalDsmEnabled) { extracted.pathwayContext != null extracted.pathwayContext.isStarted() @@ -621,6 +645,9 @@ class DefaultPathwayContextTest extends DDCoreSpecification { extracted.pathwayContext == null } + cleanup: + AgentTracer.forceRegister(originalTracer) + where: globalDsmEnabled << [true, false] } @@ -661,4 +688,4 @@ class DefaultPathwayContextTest extends DDCoreSpecification { } } } -} \ No newline at end of file +} diff --git a/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy b/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy index f2730c4ffc8..15f71c0e45c 100644 --- a/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy +++ b/dd-trace-core/src/traceAgentTest/groovy/DataStreamsIntegrationTest.groovy @@ -3,6 +3,7 @@ import datadog.communication.ddagent.SharedCommunicationObjects import datadog.communication.http.OkHttpUtils import datadog.trace.api.Config import datadog.trace.api.TraceConfig +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.time.ControllableTimeSource import datadog.trace.api.datastreams.StatsPoint import datadog.trace.common.metrics.EventListener @@ -46,12 +47,8 @@ class DataStreamsIntegrationTest extends AbstractTraceAgentTest { when: def dataStreams = new DefaultDataStreamsMonitoring(sink, sharedCommunicationObjects.featuresDiscovery(Config.get()), timeSource, { traceConfig }, Config.get()) dataStreams.start() - def tags = DataStreamsTags - .withTopic("testTopic") - .withGroup("testGroup") - .withType("testType") - .build() - dataStreams.add(new StatsPoint(tags, 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) + def tg = DataStreamsTags.create("testType", null, "testTopic", "testGroup", null) + dataStreams.add(new StatsPoint(tg, 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) timeSource.advance(Config.get().getDataStreamsBucketDurationNanoseconds()) dataStreams.report() From 35a21568cb9aaf387418579034c7682a8fe0fa50 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Tue, 15 Jul 2025 16:34:32 -0500 Subject: [PATCH 12/29] Spotless apply --- .../aws/v2/eventbridge/EventBridgeInterceptor.java | 2 +- .../trace/core/datastreams/DefaultPathwayContextTest.groovy | 4 ++-- .../java/datadog/trace/api/datastreams/DataStreamsTags.java | 6 ++---- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java index 41b556e5764..30d8d02d356 100644 --- a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java +++ b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/main/java/datadog/trace/instrumentation/aws/v2/eventbridge/EventBridgeInterceptor.java @@ -86,7 +86,7 @@ private String getTraceContextToInject( datadog.context.Context context = span; if (traceConfig().isDataStreamsEnabled()) { DataStreamsTags tags = - DataStreamsTags.createWithBus("bus", DataStreamsTags.Direction.Outbound, eventBusName); + DataStreamsTags.createWithBus(DataStreamsTags.Direction.Outbound, eventBusName); DataStreamsContext dsmContext = DataStreamsContext.fromTags(tags); context = context.with(dsmContext); } diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy index 30ce35ad88c..be4cf237d40 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy @@ -611,7 +611,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { AgentTracer.forceRegister(tracerApi) def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { globalTraceConfig }, - wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) + wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) DataStreamsTags.setGlobalBaseHash(baseHash) def context = new DefaultPathwayContext(timeSource, baseHash, null) @@ -666,7 +666,7 @@ class DefaultPathwayContextTest extends DDCoreSpecification { } def dataStreams = new DefaultDataStreamsMonitoring(sink, features, timeSource, { traceConfig }, - wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) + wellKnownTags, payloadWriter, DEFAULT_BUCKET_DURATION_NANOS) Map carrier = ["someotherkey": "someothervalue"] def contextVisitor = new Base64MapContextVisitor() diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 873443d08d1..2435abba19c 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -2,8 +2,6 @@ import datadog.trace.util.FNV64Hash; -import javax.xml.crypto.Data; - public class DataStreamsTags { public enum Direction { Unknown, @@ -100,9 +98,9 @@ public static DataStreamsTags createManual(String type, Direction direction, Str null, direction, null, topic, type, null, null, null, true, null, null, null, null, null); } - public static DataStreamsTags createWithBus(String type, Direction direction, String bus) { + public static DataStreamsTags createWithBus(Direction direction, String bus) { return new DataStreamsTags( - bus, direction, null, null, type, null, null, null, null, null, null, null, null, null); + bus, direction, null, null, "bus", null, null, null, null, null, null, null, null, null); } public static DataStreamsTags createWithPartition( From 9ca0f51d084e92db74bcf5a6b52eb47df61dc9ea Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 12:27:58 -0500 Subject: [PATCH 13/29] Added tests for tags --- .../api/datastreams/DataStreamsTags.java | 6 +++- .../datastreams/DataStreamsTagsTest.groovy | 33 +++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 2435abba19c..87dc8e2b184 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -314,7 +314,7 @@ public String getType() { return this.type; } - public String isManual() { + public String getIsManual() { return this.isManual; } @@ -346,6 +346,10 @@ public String getGroup() { return this.group; } + public String getConsumerGroup() { + return this.consumerGroup; + } + public String getPartition() { return this.partition; } diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy new file mode 100644 index 00000000000..438a6fb66bc --- /dev/null +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -0,0 +1,33 @@ +package datadog.trace.api.datastreams + +import spock.lang.Specification + + +class DataStreamsTagsTest extends Specification { + def getTags(int idx) { + return new DataStreamsTags("bus" + idx, DataStreamsTags.Direction.Outbound, "exchange" + idx, "topic" + idx, "type" + idx, "subscription" + idx, + "dataset_name" + idx, "dataset_namespace" + idx, true, "group" + idx, "consumer_group" + idx, true, + "kafka_cluster_id" + idx, "partition" + idx) + } + + def 'test tags are properly set'() { + setup: + def tg = getTags(0) + + expect: + tg.getBus() == DataStreamsTags.BUS_TAG + ":bus0" + tg.getDirection() == DataStreamsTags.DIRECTION_TAG + ":out" + tg.getExchange() == DataStreamsTags.EXCHANGE_TAG + ":exchange0" + tg.getTopic() == DataStreamsTags.TOPIC_TAG + ":topic0" + tg.getType() == DataStreamsTags.TYPE_TAG + ":type0" + tg.getSubscription() == DataStreamsTags.SUBSCRIPTION_TAG + ":subscription0" + tg.getDatasetName() == DataStreamsTags.DATASET_NAME_TAG + ":dataset_name0" + tg.getDatasetNamespace() == DataStreamsTags.DATASET_NAMESPACE_TAG + ":dataset_namespace0" + tg.getIsManual() == DataStreamsTags.MANUAL_TAG + ":true" + tg.getGroup() == DataStreamsTags.GROUP_TAG + ":group0" + tg.getConsumerGroup() == DataStreamsTags.CONSUMER_GROUP_TAG + ":consumer_group0" + tg.getHasRoutingKey() == DataStreamsTags.HAS_ROUTING_KEY_TAG + ":true" + tg.getKafkaClusterId() == DataStreamsTags.KAFKA_CLUSTER_ID_TAG + ":kafka_cluster_id0" + tg.getPartition() == DataStreamsTags.PARTITION_TAG + ":partition0" + } +} From 6faef842853a7206d972fbaffb35d0b8a21c153e Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 13:14:43 -0500 Subject: [PATCH 14/29] Improved coverage --- .../api/datastreams/DataStreamsTags.java | 2 +- .../datastreams/DataStreamsTagsTest.groovy | 34 +++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 87dc8e2b184..30cead9d425 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -264,7 +264,7 @@ public DataStreamsTags( public int size() { // make sure it's in sync with tagByIndex logic - return 13; + return 14; } public String tagByIndex(int index) { diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index 438a6fb66bc..ff34b373552 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -29,5 +29,39 @@ class DataStreamsTagsTest extends Specification { tg.getHasRoutingKey() == DataStreamsTags.HAS_ROUTING_KEY_TAG + ":true" tg.getKafkaClusterId() == DataStreamsTags.KAFKA_CLUSTER_ID_TAG + ":kafka_cluster_id0" tg.getPartition() == DataStreamsTags.PARTITION_TAG + ":partition0" + tg.getDirectionValue() == DataStreamsTags.Direction.Outbound + tg.toString() == "DataStreamsTags{bus='bus:bus0, direction=direction:out, exchange='exchange:exchange0, topic='topic:topic0, type='type:type0, subscription='subscription:subscription0, datasetName='ds.name:dataset_name0, datasetNamespace='ds.namespace:dataset_namespace0, isManual=manual_checkpoint:true, group='group:group0, consumerGroup='consumer_group:consumer_group0, hasRoutingKey='has_routing_key:true, kafkaClusterId='kafka_cluster_id:kafka_cluster_id0, partition='partition:partition0, hash=8349314675200082083, aggregationHash=1264721246230085006, size=14" + } + + def 'test service name override and global hash'() { + setup: + def one = getTags(0) + + def serviceName = new ThreadLocal() + serviceName.set("test") + DataStreamsTags.setServiceNameOverride(serviceName) + def two = getTags(0) + + DataStreamsTags.setGlobalBaseHash(12) + def three = getTags(0) + + expect: + one.getHash() != two.getHash() + one.getAggregationHash() != two.getAggregationHash() + one.getHash() != three.getHash() + one.getAggregationHash() != three.getAggregationHash() + two.getHash() != three.getHash() + two.getAggregationHash() != three.getAggregationHash() + } + + def 'test compare'() { + setup: + def one = getTags(0) + def two = getTags(0) + def three = getTags(1) + expect: + one == two + one != three + two != three } } From 85846c72ed15302e1a155940550883b5a1b91571 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 13:33:14 -0500 Subject: [PATCH 15/29] Removed unused imports --- .../core/datastreams/DefaultDataStreamsMonitoringTest.groovy | 1 - .../trace/core/datastreams/DefaultPathwayContextTest.groovy | 2 -- 2 files changed, 3 deletions(-) diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultDataStreamsMonitoringTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultDataStreamsMonitoringTest.groovy index 9a048c5a010..bf7de275af9 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultDataStreamsMonitoringTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultDataStreamsMonitoringTest.groovy @@ -13,7 +13,6 @@ import datadog.trace.common.metrics.Sink import datadog.trace.core.test.DDCoreSpecification import spock.util.concurrent.PollingConditions -import javax.xml.crypto.Data import java.util.concurrent.TimeUnit import java.util.function.BiConsumer diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy index be4cf237d40..802670b6fe5 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DefaultPathwayContextTest.groovy @@ -7,7 +7,6 @@ import datadog.trace.api.ProcessTags import datadog.trace.api.TagMap import datadog.trace.api.TraceConfig import datadog.trace.api.WellKnownTags -import datadog.trace.api.config.GeneralConfig import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.datastreams.StatsPoint import datadog.trace.api.time.ControllableTimeSource @@ -15,7 +14,6 @@ import datadog.trace.bootstrap.instrumentation.api.AgentPropagation import datadog.trace.bootstrap.instrumentation.api.AgentSpan import datadog.trace.bootstrap.instrumentation.api.AgentTracer import datadog.trace.common.metrics.Sink -import datadog.trace.core.CoreTracer import datadog.trace.core.propagation.ExtractedContext import datadog.trace.core.test.DDCoreSpecification From ddebf44596163441ca849b11ec87042f00458a59 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 14:33:58 -0500 Subject: [PATCH 16/29] Fixed even more tests --- .../MsgPackDatastreamsPayloadWriter.java | 4 +-- .../datastreams/DataStreamsWritingTest.groovy | 25 ++++++++----------- 2 files changed, 13 insertions(+), 16 deletions(-) diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java index 6ffa166cc1c..0df8e7291d7 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/MsgPackDatastreamsPayloadWriter.java @@ -161,7 +161,7 @@ private void writeBucket(StatsBucket bucket, Writable packer) { Collection groups = bucket.getGroups(); packer.startArray(groups.size()); for (StatsGroup group : groups) { - boolean firstNode = group.getTags().size() == 0; + boolean firstNode = group.getTags().nonNullSize() == 0; packer.startMap(firstNode ? 5 : 6); /* 1 */ @@ -208,7 +208,7 @@ private void writeBacklogs( } private void writeDataStreamsTags(DataStreamsTags tags, Writable packer) { - packer.startArray(tags.size()); + packer.startArray(tags.nonNullSize()); for (int i = 0; i < tags.size(); i++) { String val = tags.tagByIndex(i); diff --git a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy index 400cd91e39b..c37033e23d9 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams/DataStreamsWritingTest.groovy @@ -84,7 +84,7 @@ class DataStreamsWritingTest extends DDCoreSpecification { dataStreams.start() dataStreams.setThreadServiceName(serviceNameOverride) dataStreams.add(new StatsPoint(DataStreamsTags.create(null, null), 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, serviceNameOverride)) - dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_produce", "testTopic", "1"), 130) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_produce", "testTopic", "1", null, null), 130) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) // force flush dataStreams.report() @@ -106,11 +106,6 @@ class DataStreamsWritingTest extends DDCoreSpecification { assert unpacker.unpackString() == serviceNameOverride } - def getTags(String type, String topic, String partition, String group) { - return DataStreamsTags.createWithPartition(type, topic, partition, null, group) - } - - def "Write bucket to mock server with process tags enabled #processTagsEnabled"() { setup: injectSysConfig(EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED, "$processTagsEnabled") @@ -147,14 +142,14 @@ class DataStreamsWritingTest extends DDCoreSpecification { def dataStreams = new DefaultDataStreamsMonitoring(fakeConfig, sharedCommObjects, timeSource, { traceConfig }) dataStreams.start() dataStreams.add(new StatsPoint(DataStreamsTags.create(null, null), 9, 0, 10, timeSource.currentTimeNanos, 0, 0, 0, null)) - dataStreams.add(new StatsPoint(getTags("testType", "testTopic", null, null), 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) - dataStreams.trackBacklog(getTags("kafka_produce", "testTopic", "1", null), 100) - dataStreams.trackBacklog(getTags("kafka_produce", "testTopic", "1", null), 130) + dataStreams.add(new StatsPoint(DataStreamsTags.create("testType", DataStreamsTags.Direction.Inbound, "testTopic", "testGroup", null), 1, 2, 5, timeSource.currentTimeNanos, 0, 0, 0, null)) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_produce", "testTopic", "1", null, null), 100) + dataStreams.trackBacklog(DataStreamsTags.createWithPartition("kafka_produce", "testTopic", "1", null, null), 130) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS - 100l) - dataStreams.add(new StatsPoint(getTags("testType", "testTopic", null, "testGroup"), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(10), SECONDS.toNanos(10), 10, null)) + dataStreams.add(new StatsPoint(DataStreamsTags.create("testType", DataStreamsTags.Direction.Inbound, "testTopic", "testGroup", null), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(10), SECONDS.toNanos(10), 10, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) - dataStreams.add(new StatsPoint(getTags("testType", "testTopic", null, "testGroup"), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(5), SECONDS.toNanos(5), 5, null)) - dataStreams.add(new StatsPoint(getTags("testType", "testTopic2", null, "testGroup"), 3, 4, 6, timeSource.currentTimeNanos, SECONDS.toNanos(2), 0, 2, null)) + dataStreams.add(new StatsPoint(DataStreamsTags.create("testType", DataStreamsTags.Direction.Inbound, "testTopic", "testGroup", null), 1, 2, 5, timeSource.currentTimeNanos, SECONDS.toNanos(5), SECONDS.toNanos(5), 5, null)) + dataStreams.add(new StatsPoint(DataStreamsTags.create("testType", DataStreamsTags.Direction.Inbound, "testTopic2", "testGroup", null), 3, 4, 6, timeSource.currentTimeNanos, SECONDS.toNanos(2), 0, 2, null)) timeSource.advance(DEFAULT_BUCKET_DURATION_NANOS) dataStreams.close() @@ -231,7 +226,8 @@ class DataStreamsWritingTest extends DDCoreSpecification { assert unpacker.unpackString() == "ParentHash" assert unpacker.unpackLong() == 2 assert unpacker.unpackString() == "EdgeTags" - assert unpacker.unpackArrayHeader() == 3 + assert unpacker.unpackArrayHeader() == 4 + assert unpacker.unpackString() == "direction:in" assert unpacker.unpackString() == "topic:testTopic" assert unpacker.unpackString() == "type:testType" assert unpacker.unpackString() == "group:testGroup" @@ -274,7 +270,8 @@ class DataStreamsWritingTest extends DDCoreSpecification { assert unpacker.unpackString() == "ParentHash" assert unpacker.unpackLong() == (hash == 1 ? 2 : 4) assert unpacker.unpackString() == "EdgeTags" - assert unpacker.unpackArrayHeader() == 3 + assert unpacker.unpackArrayHeader() == 4 + assert unpacker.unpackString() == "direction:in" assert unpacker.unpackString() == (hash == 1 ? "topic:testTopic" : "topic:testTopic2") assert unpacker.unpackString() == "type:testType" assert unpacker.unpackString() == "group:testGroup" From 94661134b4aa515ad7efdc82af505bbc3c8d05e8 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 17:10:45 -0500 Subject: [PATCH 17/29] Updated multiple tests --- .../src/test/groovy/ArmeriaGrpcTest.groovy | 8 +- .../groovy/AWS1KinesisClientTest.groovy | 4 +- .../dsmTest/groovy/AWS1SnsClientTest.groovy | 4 +- .../groovy/Aws2KinesisDataStreamsTest.groovy | 7 +- .../groovy/Aws2SnsDataStreamsTest.groovy | 7 +- .../src/test/groovy/SnsClientTest.groovy | 6 +- .../src/test/groovy/SnsClientTest.groovy | 6 +- .../src/test/groovy/SqsClientTest.groovy | 19 ++-- .../src/test/groovy/SqsClientTest.groovy | 7 +- .../src/test/groovy/PubSubTest.groovy | 8 +- .../grpc-1.5/src/test/groovy/GrpcTest.groovy | 8 +- .../groovy/KafkaClientTestBase.groovy | 11 +-- .../test/groovy/KafkaClientTestBase.groovy | 7 +- .../test/groovy/KafkaClientTestBase.groovy | 19 ++-- .../ConnectWorkerInstrumentationTest.groovy | 45 +++++----- .../groovy/KafkaStreamsTest.groovy | 17 ++-- .../test/groovy/KafkaStreamsTestBase.groovy | 17 ++-- .../src/test/groovy/RabbitMQTest.groovy | 43 ++++----- .../agent/test/base/HttpClientTest.groovy | 53 ++++------- .../DefaultDataStreamsMonitoring.java | 22 ++++- .../api/datastreams/DataStreamsTags.java | 88 +++++++++++++++++++ 21 files changed, 229 insertions(+), 177 deletions(-) diff --git a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy index c8237c1d812..1effce3d8b2 100644 --- a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy +++ b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy @@ -1,3 +1,5 @@ +import datadog.trace.api.datastreams.DataStreamsTags + import static datadog.trace.api.config.TraceInstrumentationConfig.GRPC_SERVER_ERROR_STATUSES import com.google.common.util.concurrent.ListenableFuture @@ -254,14 +256,12 @@ abstract class ArmeriaGrpcTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(["direction:out", "type:grpc"]) - edgeTags.size() == 2 + tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags.containsAll(["direction:in", "type:grpc"]) - edgeTags.size() == 2 + tags == DataStreamsTags.fromTags("direction:in", "type:grpc") } } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy index 047f8cc2ec0..53120a85c75 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy @@ -11,6 +11,7 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import spock.lang.AutoCleanup @@ -115,8 +116,7 @@ abstract class AWS1KinesisClientTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - edgeTags.containsAll(["direction:" + dsmDirection, "topic:" + streamArn, "type:kinesis"]) - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:" + streamArn, "type:kinesis") } } verifyAll { diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy index 28bd6dd4741..5f01c7af612 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy @@ -10,6 +10,7 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import spock.lang.AutoCleanup @@ -96,8 +97,7 @@ abstract class AWS1SnsClientTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - edgeTags.containsAll(["direction:" + dsmDirection, "topic:" + topicName, "type:sns"]) - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:" + topicName, "type:sns") } } verifyAll { diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy index 1e649cbb25e..bf8e99fc13d 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy @@ -2,6 +2,7 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import org.eclipse.jetty.http2.server.HTTP2CServerConnectionFactory @@ -156,8 +157,7 @@ abstract class Aws2KinesisDataStreamsTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - edgeTags.containsAll(["direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis"]) - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis") } } verifyAll { @@ -278,8 +278,7 @@ abstract class Aws2KinesisDataStreamsTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - edgeTags.containsAll(["direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis"]) - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis") } } verifyAll { diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy index 9147636660d..0e81c49835f 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy @@ -2,6 +2,7 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import datadog.trace.instrumentation.aws.ExpectedQueryParams @@ -144,8 +145,7 @@ abstract class Aws2SnsDataStreamsTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - edgeTags.containsAll(["direction:" + dsmDirection, "topic:mytopic", "type:sns"]) - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:mytopic", "type:sns") } } verifyAll { @@ -243,8 +243,7 @@ abstract class Aws2SnsDataStreamsTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - edgeTags.containsAll(["direction:" + dsmDirection, "topic:mytopic", "type:sns"]) - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:mytopic", "type:sns") } } verifyAll { diff --git a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy index 1653ecfa586..11727ce3882 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy @@ -10,6 +10,7 @@ import datadog.trace.agent.test.utils.TraceUtils import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.GeneralConfig +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import groovy.json.JsonSlurper @@ -194,10 +195,7 @@ abstract class SnsClientTest extends VersionedNamingTestBase { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.contains("direction:out") - edgeTags.contains("topic:testtopic") - edgeTags.contains("type:sns") - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out", "topic:testtopic", "type:sns") } } diff --git a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy index 3e40aa138dc..fe2a3674da6 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy @@ -3,6 +3,7 @@ import datadog.trace.agent.test.utils.TraceUtils import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.GeneralConfig +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import datadog.trace.instrumentation.aws.ExpectedQueryParams @@ -164,10 +165,7 @@ abstract class SnsClientTest extends VersionedNamingTestBase { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.contains("direction:out") - edgeTags.contains("topic:testtopic") - edgeTags.contains("type:sns") - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out", "topic:testtopic", "type:sns") } } diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy index 9ae956e5068..8eb1a79abfe 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy @@ -17,6 +17,7 @@ import datadog.trace.api.DDSpanId import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.GeneralConfig +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.naming.SpanNaming import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags @@ -172,14 +173,12 @@ abstract class SqsClientTest extends VersionedNamingTestBase { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "topic:somequeue", "type:sqs"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out", "topic:somequeue", "type:sqs") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == ["direction:in", "topic:somequeue", "type:sqs"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") } } @@ -629,8 +628,7 @@ class SqsClientV1DataStreamsForkedTest extends SqsClientTest { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == -2734507826469073289 } verifyAll(first) { - edgeTags == ["direction:in", "topic:somequeue", "type:sqs"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") } cleanup: @@ -659,8 +657,7 @@ class SqsClientV1DataStreamsForkedTest extends SqsClientTest { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:in", "topic:somequeue", "type:sqs"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") } cleanup: @@ -690,8 +687,10 @@ class SqsClientV1DataStreamsForkedTest extends SqsClientTest { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:in", "topic:somequeue", "type:sqs"] - edgeTags.size() == 3 + tags.direction == DataStreamsTags.DIRECTION_TAG + ":in" + tags.topic == DataStreamsTags.TOPIC_TAG + ":somequeue" + tags.type == DataStreamsTags.TYPE_TAG + ":sqs" + tags.nonNullSize() == 3 } cleanup: diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy index c0a1085d6ad..073aebb9a25 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy @@ -7,6 +7,7 @@ import datadog.trace.api.DDSpanId import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.GeneralConfig +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.naming.SpanNaming import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags @@ -173,14 +174,12 @@ abstract class SqsClientTest extends VersionedNamingTestBase { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "topic:somequeue", "type:sqs"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out", "topic:somequeue", "type:sqs") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == ["direction:in", "topic:somequeue", "type:sqs"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") } } diff --git a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy index cf24e0115bc..65ce8c85732 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy +++ b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy @@ -1,3 +1,5 @@ +import datadog.trace.api.datastreams.DataStreamsTags + import static datadog.trace.agent.test.utils.TraceUtils.basicSpan import com.google.api.gax.core.NoCredentialsProvider @@ -236,13 +238,11 @@ abstract class PubSubTest extends VersionedNamingTestBase { StatsGroup sendStat = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0} verifyAll (sendStat) { - edgeTags.containsAll(["direction:out" , "topic:test-topic", "type:google-pubsub"]) - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out" , "topic:test-topic", "type:google-pubsub") } StatsGroup receiveStat = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == sendStat.hash} verifyAll(receiveStat) { - edgeTags.containsAll(["direction:in" , "subscription:my-subscription", "type:google-pubsub"]) - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in" , "subscription:my-subscription", "type:google-pubsub") pathwayLatency.count == 1 pathwayLatency.minValue > 0.0 edgeLatency.count == 1 diff --git a/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy b/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy index 0b4d174b4dc..c3f31367a9f 100644 --- a/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy +++ b/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy @@ -1,3 +1,5 @@ +import datadog.trace.api.datastreams.DataStreamsTags + import static datadog.trace.agent.test.asserts.TagsAssert.codeOriginTags import static datadog.trace.api.config.TraceInstrumentationConfig.GRPC_SERVER_ERROR_STATUSES @@ -243,14 +245,12 @@ abstract class GrpcTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(["direction:out", "type:grpc"]) - edgeTags.size() == 2 + tags == DataStreamsTags.fromTags("direction:out", "type:grpc") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags.containsAll(["direction:in", "type:grpc"]) - edgeTags.size() == 2 + tags == DataStreamsTags.fromTags("direction:in", "type:grpc") } } diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy index 493e1af0967..01cbe2cc784 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy @@ -1,3 +1,5 @@ +import datadog.trace.api.datastreams.DataStreamsTags + import static datadog.trace.agent.test.utils.TraceUtils.basicSpan import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.isAsyncPropagationEnabled @@ -231,22 +233,21 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { new String(headers.headers("x-datadog-parent-id").iterator().next().value()) == "${traces[produceTraceIdx][2].spanId}" if (isDataStreamsEnabled()) { + def val = DataStreamsTags.fromTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka"] - edgeTags.size() == 4 + tags.toString() == val.toString() } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == [ + tags == DataStreamsTags.fromTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka" - ] - edgeTags.size() == 5 + ) } List produce = [ "kafka_cluster_id:$clusterId", diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy index 569538197d4..adc4814b616 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy @@ -1,3 +1,5 @@ +import datadog.trace.api.datastreams.DataStreamsTags + import static datadog.trace.agent.test.utils.TraceUtils.basicSpan import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -927,14 +929,13 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == [ + tags == DataStreamsTags.fromTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka" - ] - edgeTags.size() == 5 + ) } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy index 9e71a218b7a..99fe7673bd4 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy @@ -2,6 +2,7 @@ import datadog.trace.agent.test.asserts.TraceAssert import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.common.writer.ListWriter @@ -260,19 +261,17 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == [ + tags == DataStreamsTags.fromTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka" - ] - edgeTags.size() == 5 + ) } List produce = [ "kafka_cluster_id:$clusterId", @@ -412,25 +411,23 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == [ + tags == DataStreamsTags.fromTags( "direction:out", "kafka_cluster_id:$clusterId".toString(), "topic:$SHARED_TOPIC".toString(), "type:kafka" - ] - edgeTags.size() == 4 + ) } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == [ + tags == DataStreamsTags.fromTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId".toString(), "topic:$SHARED_TOPIC".toString(), "type:kafka" - ] - edgeTags.size() == 5 + ) } List produce = [ "kafka_cluster_id:$clusterId".toString(), diff --git a/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy b/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy index 244a5213ff2..aad552c2764 100644 --- a/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy +++ b/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy @@ -1,4 +1,5 @@ import datadog.trace.agent.test.AgentTestRunner +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.core.datastreams.StatsGroup import org.apache.kafka.clients.admin.AdminClient import org.apache.kafka.clients.admin.AdminClientConfig @@ -153,21 +154,21 @@ class ConnectWorkerInstrumentationTest extends AgentTestRunner { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - assert [ - "direction:out", - "topic:test-topic", - "type:kafka" - ].every( tag -> edgeTags.contains(tag) ) + tags == DataStreamsTags.fromTags( + "direction:out", + "topic:test-topic", + "type:kafka" + ) } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - assert [ - "direction:in", - "group:test-consumer-group", - "topic:test-topic", - "type:kafka" - ].every( tag -> edgeTags.contains(tag) ) + tags == DataStreamsTags.fromTags( + "direction:in", + "group:test-consumer-group", + "topic:test-topic", + "type:kafka" + ) } TEST_DATA_STREAMS_WRITER.getServices().contains('file-source-connector') @@ -285,21 +286,21 @@ class ConnectWorkerInstrumentationTest extends AgentTestRunner { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - assert [ - "direction:out", - "topic:test-topic", - "type:kafka" - ].every( tag -> edgeTags.contains(tag) ) + tags == DataStreamsTags.fromTags( + "direction:out", + "topic:test-topic", + "type:kafka" + ) } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - assert [ - "direction:in", - "group:connect-file-sink-connector", - "topic:test-topic", - "type:kafka" - ].every( tag -> edgeTags.contains(tag) ) + tags == DataStreamsTags.fromTags( + "direction:in", + "group:connect-file-sink-connector", + "topic:test-topic", + "type:kafka" + ) } TEST_DATA_STREAMS_WRITER.getServices().contains('file-sink-connector') diff --git a/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy b/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy index 095d479e2d9..bc4a2e83f39 100644 --- a/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy +++ b/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy @@ -1,5 +1,6 @@ import datadog.trace.agent.test.AgentTestRunner import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup @@ -226,31 +227,25 @@ class KafkaStreamsTest extends AgentTestRunner { if (isDataStreamsEnabled()) { StatsGroup originProducerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(originProducerPoint) { - edgeTags == ["direction:out", "topic:$STREAM_PENDING", "type:kafka"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out", "topic:$STREAM_PENDING", "type:kafka") } StatsGroup kafkaStreamsConsumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == originProducerPoint.hash } verifyAll(kafkaStreamsConsumerPoint) { - edgeTags == [ - "direction:in", + tags == DataStreamsTags.fromTags("direction:in", "group:test-application", "topic:$STREAM_PENDING".toString(), - "type:kafka" - ] - edgeTags.size() == 4 + "type:kafka") } StatsGroup kafkaStreamsProducerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == kafkaStreamsConsumerPoint.hash } verifyAll(kafkaStreamsProducerPoint) { - edgeTags == ["direction:out", "topic:$STREAM_PROCESSED", "type:kafka"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out", "topic:$STREAM_PROCESSED", "type:kafka") } StatsGroup finalConsumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == kafkaStreamsProducerPoint.hash } verifyAll(finalConsumerPoint) { - edgeTags == ["direction:in", "group:sender", "topic:$STREAM_PROCESSED".toString(), "type:kafka"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:in", "group:sender", "topic:$STREAM_PROCESSED".toString(), "type:kafka") } } diff --git a/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy b/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy index 1267187681b..d56cc942b19 100644 --- a/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy @@ -1,5 +1,6 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup @@ -289,31 +290,25 @@ abstract class KafkaStreamsTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup originProducerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(originProducerPoint) { - edgeTags == ["direction:out", "topic:$STREAM_PENDING", "type:kafka"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out", "topic:$STREAM_PENDING", "type:kafka") } StatsGroup kafkaStreamsConsumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == originProducerPoint.hash } verifyAll(kafkaStreamsConsumerPoint) { - edgeTags == [ - "direction:in", + tags == DataStreamsTags.fromTags("direction:in", "group:test-application", "topic:$STREAM_PENDING".toString(), - "type:kafka" - ] - edgeTags.size() == 4 + "type:kafka") } StatsGroup kafkaStreamsProducerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == kafkaStreamsConsumerPoint.hash } verifyAll(kafkaStreamsProducerPoint) { - edgeTags == ["direction:out", "topic:$STREAM_PROCESSED", "type:kafka"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:out", "topic:$STREAM_PROCESSED", "type:kafka") } StatsGroup finalConsumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == kafkaStreamsProducerPoint.hash } verifyAll(finalConsumerPoint) { - edgeTags == ["direction:in", "group:sender", "topic:$STREAM_PROCESSED".toString(), "type:kafka"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:in", "group:sender", "topic:$STREAM_PROCESSED".toString(), "type:kafka") } } diff --git a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy index b47668d272a..e94ba171e07 100644 --- a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy +++ b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy @@ -12,6 +12,7 @@ import datadog.trace.agent.test.utils.PortUtils import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.DDSpan @@ -171,14 +172,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == ["direction:in", "topic:" + queueName, "type:rabbitmq"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } @@ -225,14 +224,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "exchange:", "has_routing_key:true", "type:rabbitmq"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "exchange:", "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == ["direction:in", "topic:" + queueName, "type:rabbitmq"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } } @@ -322,15 +319,13 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { List producerPoints = TEST_DATA_STREAMS_WRITER.groups.findAll { it.parentHash == 0 } producerPoints.each { producerPoint -> verifyAll(producerPoint) { - edgeTags == ["direction:out", "exchange:" + exchangeName, "has_routing_key:false", "type:rabbitmq"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:false", "type:rabbitmq") } } StatsGroup consumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == producerPoints.get(0).hash } verifyAll(consumerPoint) { - edgeTags == ["direction:in", "topic:" + queueName, "type:rabbitmq"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } @@ -414,14 +409,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "exchange:" + exchangeName, "has_routing_key:false", "type:rabbitmq"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:false", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == ["direction:in", "topic:" + queueName, "type:rabbitmq"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } @@ -499,14 +492,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(["direction:out", "exchange:", "has_routing_key:true", "type:rabbitmq"]) - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "exchange:", "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == ["direction:in", "topic:some-routing-queue", "type:rabbitmq"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:some-routing-queue", "type:rabbitmq") } } } @@ -583,14 +574,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled() && !noParent) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == ["direction:in", "topic:" + queueName, "type:rabbitmq"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } @@ -679,14 +668,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { // assert with retries in case DSM data is split in more groups that take a bit longer to arrive. StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == ["direction:in", "topic:" + queueName, "type:rabbitmq"] - edgeTags.size() == 3 + tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } } diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy index f08d8a556c0..bebd9833962 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy @@ -6,7 +6,7 @@ import datadog.trace.agent.test.server.http.HttpProxy import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.TracerConfig -import datadog.trace.api.datastreams.DataStreamsContext +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.bootstrap.instrumentation.api.URIUtils import datadog.trace.core.DDSpan @@ -36,9 +36,6 @@ abstract class HttpClientTest extends VersionedNamingTestBase { protected static final int READ_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(5) as int protected static final BASIC_AUTH_KEY = "custom_authorization_header" protected static final BASIC_AUTH_VAL = "plain text auth token" - protected static final DSM_EDGE_TAGS = DataStreamsContext.forHttpClient().sortedTags().collect { key, value -> - return key + ":" + value - } @AutoCleanup @Shared @@ -178,8 +175,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -221,8 +217,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -269,8 +264,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -304,8 +298,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -342,8 +335,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -381,8 +373,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -415,8 +406,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -450,8 +440,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -495,8 +484,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } } @@ -543,8 +531,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -580,8 +567,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -615,8 +601,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -674,8 +659,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -759,8 +743,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -788,8 +771,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } @@ -822,8 +804,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags.containsAll(DSM_EDGE_TAGS) - edgeTags.size() == DSM_EDGE_TAGS.size() + getTags() == DataStreamsTags.fromTags("type:http", "direction:in") } } diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java index c35ecf2c722..58093fae2d2 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultDataStreamsMonitoring.java @@ -227,6 +227,11 @@ public void setCheckpoint(AgentSpan span, DataStreamsContext context) { @Override public void setConsumeCheckpoint(String type, String source, DataStreamsContextCarrier carrier) { + setConsumeCheckpoint(type, source, carrier, true); + } + + public void setConsumeCheckpoint( + String type, String source, DataStreamsContextCarrier carrier, Boolean isManual) { if (type == null || type.isEmpty() || source == null || source.isEmpty()) { log.warn("setConsumeCheckpoint should be called with non-empty type and source"); return; @@ -239,8 +244,13 @@ public void setConsumeCheckpoint(String type, String source, DataStreamsContextC } mergePathwayContextIntoSpan(span, carrier); - DataStreamsTags tags = - DataStreamsTags.createManual(type, DataStreamsTags.Direction.Inbound, source); + DataStreamsTags tags; + if (isManual) { + tags = DataStreamsTags.createManual(type, DataStreamsTags.Direction.Inbound, source); + } else { + tags = DataStreamsTags.create(type, DataStreamsTags.Direction.Inbound, source); + } + setCheckpoint(span, fromTags(tags)); } @@ -256,9 +266,13 @@ public void setProduceCheckpoint( log.warn("SetProduceCheckpoint is called with no active span"); return; } + DataStreamsTags tags; + if (manualCheckpoint) { + tags = DataStreamsTags.createManual(type, DataStreamsTags.Direction.Outbound, target); + } else { + tags = DataStreamsTags.create(type, DataStreamsTags.Direction.Outbound, target); + } - DataStreamsTags tags = - DataStreamsTags.createManual(type, DataStreamsTags.Direction.Outbound, target); DataStreamsContext dsmContext = fromTags(tags); this.propagator.inject( span.with(dsmContext), carrier, DataStreamsContextCarrierAdapter.INSTANCE); diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 30cead9d425..35a6637eb0f 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -1,6 +1,7 @@ package datadog.trace.api.datastreams; import datadog.trace.util.FNV64Hash; +import java.util.Objects; public class DataStreamsTags { public enum Direction { @@ -122,6 +123,93 @@ public static DataStreamsTags createWithPartition( partition); } + /// For usage in tests *only* + public static DataStreamsTags fromTags(String[] tags) { + String bus = null; + Direction direction = null; + String exchange = null; + String topic = null; + String type = null; + String subscription = null; + // additional grouping tags + String datasetName = null; + String datasetNamespace = null; + Boolean isManual = null; + // informational tags + String group = null; + String consumerGroup = null; + Boolean hasRoutingKey = null; + String kafkaClusterId = null; + String partition = null; + + for (String tag : tags) { + String[] splitResult = tag.split(":"); + if (splitResult.length != 2) { + continue; + } + switch (splitResult[0]) { + case BUS_TAG: + bus = splitResult[1]; + break; + case DIRECTION_TAG: + direction = + Objects.equals(splitResult[1], "out") ? Direction.Outbound : Direction.Inbound; + break; + case EXCHANGE_TAG: + exchange = splitResult[1]; + break; + case TOPIC_TAG: + topic = splitResult[1]; + break; + case TYPE_TAG: + type = splitResult[1]; + break; + case SUBSCRIPTION_TAG: + subscription = splitResult[1]; + break; + case DATASET_NAME_TAG: + datasetName = splitResult[1]; + break; + case DATASET_NAMESPACE_TAG: + datasetNamespace = splitResult[1]; + break; + case MANUAL_TAG: + isManual = Objects.equals(splitResult[1], "true"); + case GROUP_TAG: + group = splitResult[1]; + break; + case CONSUMER_GROUP_TAG: + consumerGroup = splitResult[1]; + break; + case HAS_ROUTING_KEY_TAG: + hasRoutingKey = Objects.equals(splitResult[1], "true"); + break; + case KAFKA_CLUSTER_ID_TAG: + kafkaClusterId = splitResult[1]; + break; + case PARTITION_TAG: + partition = splitResult[1]; + break; + } + } + + return new DataStreamsTags( + bus, + direction, + exchange, + topic, + type, + subscription, + datasetName, + datasetNamespace, + isManual, + group, + consumerGroup, + hasRoutingKey, + kafkaClusterId, + partition); + } + public static DataStreamsTags createWithGroup( String type, Direction direction, String topic, String group) { return new DataStreamsTags( From 0def2039da3e628302b932a8f54cabfa5290df28 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 17:20:17 -0500 Subject: [PATCH 18/29] Fixed more tests --- .../src/test/groovy/KafkaClientTestBase.groovy | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy index adc4814b616..7765deda863 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy @@ -285,20 +285,18 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == [ + tags == DataStreamsTags.fromTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka" - ] - edgeTags.size() == 5 + ) } List produce = [ "kafka_cluster_id:$clusterId", @@ -433,25 +431,23 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == [ + tags == DataStreamsTags.fromTags( "direction:out", "kafka_cluster_id:$clusterId".toString(), "topic:$SHARED_TOPIC".toString(), "type:kafka" - ] - edgeTags.size() == 4 + ) } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - edgeTags == [ + tags == DataStreamsTags.fromTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId".toString(), "topic:$SHARED_TOPIC".toString(), "type:kafka" - ] - edgeTags.size() == 5 + ) } List produce = [ "kafka_cluster_id:$clusterId".toString(), From 3433f481b57cf8b0dca7506df6766aeb2bf3454d Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 17:33:02 -0500 Subject: [PATCH 19/29] Spotless apply --- .../test/groovy/KafkaClientTestBase.groovy | 6 ++-- .../api/datastreams/DataStreamsTags.java | 36 +++++++++---------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy index 7765deda863..9d1388256ec 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy @@ -296,7 +296,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka" - ) + ) } List produce = [ "kafka_cluster_id:$clusterId", @@ -436,7 +436,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { "kafka_cluster_id:$clusterId".toString(), "topic:$SHARED_TOPIC".toString(), "type:kafka" - ) + ) } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } @@ -447,7 +447,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { "kafka_cluster_id:$clusterId".toString(), "topic:$SHARED_TOPIC".toString(), "type:kafka" - ) + ) } List produce = [ "kafka_cluster_id:$clusterId".toString(), diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 35a6637eb0f..0028f10b972 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -143,52 +143,52 @@ public static DataStreamsTags fromTags(String[] tags) { String partition = null; for (String tag : tags) { - String[] splitResult = tag.split(":"); - if (splitResult.length != 2) { + if (tag.indexOf(':') == -1) { continue; } - switch (splitResult[0]) { + String key = tag.substring(0, tag.indexOf(':')); + String value = tag.substring(tag.indexOf(':') + 1); + switch (key) { case BUS_TAG: - bus = splitResult[1]; + bus = value; break; case DIRECTION_TAG: - direction = - Objects.equals(splitResult[1], "out") ? Direction.Outbound : Direction.Inbound; + direction = Objects.equals(value, "out") ? Direction.Outbound : Direction.Inbound; break; case EXCHANGE_TAG: - exchange = splitResult[1]; + exchange = value; break; case TOPIC_TAG: - topic = splitResult[1]; + topic = value; break; case TYPE_TAG: - type = splitResult[1]; + type = value; break; case SUBSCRIPTION_TAG: - subscription = splitResult[1]; + subscription = value; break; case DATASET_NAME_TAG: - datasetName = splitResult[1]; + datasetName = value; break; case DATASET_NAMESPACE_TAG: - datasetNamespace = splitResult[1]; + datasetNamespace = value; break; case MANUAL_TAG: - isManual = Objects.equals(splitResult[1], "true"); + isManual = Objects.equals(value, "true"); case GROUP_TAG: - group = splitResult[1]; + group = value; break; case CONSUMER_GROUP_TAG: - consumerGroup = splitResult[1]; + consumerGroup = value; break; case HAS_ROUTING_KEY_TAG: - hasRoutingKey = Objects.equals(splitResult[1], "true"); + hasRoutingKey = Objects.equals(value, "true"); break; case KAFKA_CLUSTER_ID_TAG: - kafkaClusterId = splitResult[1]; + kafkaClusterId = value; break; case PARTITION_TAG: - partition = splitResult[1]; + partition = value; break; } } From 8718b2299f2ace6bd29743bb91ec167055ba5add Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 18:36:13 -0500 Subject: [PATCH 20/29] Fixed even more tests --- .../test/groovy/KafkaClientTestBase.groovy | 3 +-- .../agent/test/base/HttpServerTest.groovy | 3 +-- .../datastreams/DefaultPathwayContext.java | 19 ------------------ .../datastreams/DataStreamsTagsTest.groovy | 20 +++++++++++++++++++ 4 files changed, 22 insertions(+), 23 deletions(-) diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy index 9d1388256ec..102c9886ddb 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy @@ -919,8 +919,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - edgeTags == ["direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka"] - edgeTags.size() == 4 + tags == DataStreamsTags.fromTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy index 08843097449..ed077d10621 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy @@ -638,7 +638,6 @@ abstract class HttpServerTest extends WithHttpServer { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { tags == DSM_EDGE_TAGS - tags.getSize() == DSM_EDGE_TAGS.getSize() } } @@ -726,7 +725,7 @@ abstract class HttpServerTest extends WithHttpServer { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == edgeTags + tags == DSM_EDGE_TAGS } } diff --git a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java index f77a74a328f..9fe9af7e9e6 100644 --- a/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java +++ b/dd-trace-core/src/main/java/datadog/trace/core/datastreams/DefaultPathwayContext.java @@ -87,9 +87,6 @@ public synchronized void setCheckpoint( long startNanos = timeSource.getCurrentTimeNanos(); long nanoTicks = timeSource.getNanoTicks(); - PathwayHashBuilder pathwayHashBuilder = - new PathwayHashBuilder(hashOfKnownTags, serviceNameOverride); - if (!started) { long defaultTimestamp = context.defaultTimestamp(); if (defaultTimestamp == 0) { @@ -146,7 +143,6 @@ public synchronized void setCheckpoint( hash = newHash; pointConsumer.accept(point); - log.debug("Checkpoint set {}, hash source: {}", this, pathwayHashBuilder); } @Override @@ -272,21 +268,6 @@ private static DefaultPathwayContext decode( serviceNameOverride); } - private static class PathwayHashBuilder { - private long hash; - - public PathwayHashBuilder(long baseHash, String serviceNameOverride) { - hash = baseHash; - if (serviceNameOverride != null) { - addTag(serviceNameOverride); - } - } - - public void addTag(String tag) { - hash = FNV64Hash.continueHash(hash, tag, FNV64Hash.Version.v1); - } - } - public static long getBaseHash(WellKnownTags wellKnownTags) { StringBuilder builder = new StringBuilder(); builder.append(wellKnownTags.getService()); diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index ff34b373552..ece51044a5e 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -64,4 +64,24 @@ class DataStreamsTagsTest extends Specification { one != three two != three } + + def 'test from tags'() { + setup: + def one = DataStreamsTags.fromTags("direction:in", "topic:abc") + expect: + one.nonNullSize() == 2 + one.direction == "direction:in" + one.topic == "topic:abc" + } + + def 'test create'() { + setup: + def one = DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound) + def two = DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound, "topic") + def three = DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound, "topic", "group", "cluster") + expect: + one == DataStreamsTags.fromTags("type:type", "direction:out") + two == DataStreamsTags.fromTags("type:type", "direction:out", "topic:topic") + three == DataStreamsTags.fromTags("type:type", "direction:out", "topic:topic", "group:group", "kafka_cluster_id:cluster") + } } From 6800ca8169df6474331d85904f1b124be65ea020 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 19:28:22 -0500 Subject: [PATCH 21/29] Fixed http tests, improved test coverage --- .../agent/test/base/HttpClientTest.groovy | 34 ++++++++++--------- .../datastreams/DataStreamsTagsTest.groovy | 32 +++++++++++++++-- 2 files changed, 48 insertions(+), 18 deletions(-) diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy index bebd9833962..1f7a15db86b 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy @@ -6,6 +6,7 @@ import datadog.trace.agent.test.server.http.HttpProxy import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.TracerConfig +import datadog.trace.api.datastreams.DataStreamsContext import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.bootstrap.instrumentation.api.URIUtils @@ -36,6 +37,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { protected static final int READ_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(5) as int protected static final BASIC_AUTH_KEY = "custom_authorization_header" protected static final BASIC_AUTH_VAL = "plain text auth token" + protected static final DSM_EDGE_TAGS = DataStreamsContext.forHttpClient().tags() @AutoCleanup @Shared @@ -175,7 +177,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -217,7 +219,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -264,7 +266,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -298,7 +300,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -335,7 +337,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -373,7 +375,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -406,7 +408,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -440,7 +442,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -484,7 +486,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } } @@ -531,7 +533,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -567,7 +569,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -601,7 +603,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -659,7 +661,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -743,7 +745,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -771,7 +773,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } @@ -804,7 +806,7 @@ abstract class HttpClientTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - getTags() == DataStreamsTags.fromTags("type:http", "direction:in") + getTags() == DSM_EDGE_TAGS } } diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index ece51044a5e..abbea4507d6 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -67,11 +67,35 @@ class DataStreamsTagsTest extends Specification { def 'test from tags'() { setup: - def one = DataStreamsTags.fromTags("direction:in", "topic:abc") + def one = DataStreamsTags.fromTags( + "direction:in", + "topic:abc", + "exchange:exchange", + "partition:0", + "has_routing_key:true", + "ds.name:dataset", + "subscription:subscription", + "bus:bus", + "garbage", + "ds.namespace:namespace", + "manual_checkpoint:false", + "consumer_group:group", + "group:group" + ) expect: - one.nonNullSize() == 2 + one.nonNullSize() == 12 + one.bus == "bus:bus" one.direction == "direction:in" one.topic == "topic:abc" + one.exchange == "exchange:exchange" + one.partition == "partition:0" + one.hasRoutingKey == "has_routing_key:true" + one.datasetName == "ds.name:dataset" + one.subscription == "subscription:subscription" + one.datasetNamespace == "ds.namespace:namespace" + one.isManual == "manual_checkpoint:false" + one.consumerGroup == "consumer_group:group" + one.group == "group:group" } def 'test create'() { @@ -79,9 +103,13 @@ class DataStreamsTagsTest extends Specification { def one = DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound) def two = DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound, "topic") def three = DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound, "topic", "group", "cluster") + def four = DataStreamsTags.createWithPartition("type", "topic", "partition", "cluster", "group") + def five = DataStreamsTags.createWithDataset("type", DataStreamsTags.Direction.Outbound, "topic", "dataset", "namespace") expect: one == DataStreamsTags.fromTags("type:type", "direction:out") two == DataStreamsTags.fromTags("type:type", "direction:out", "topic:topic") three == DataStreamsTags.fromTags("type:type", "direction:out", "topic:topic", "group:group", "kafka_cluster_id:cluster") + four == DataStreamsTags.fromTags("type:type", "topic:topic", "partition:partition", "kafka_cluster_id:cluster", "consumer_group:group") + five == DataStreamsTags.fromTags("type:type", "direction:out", "topic:topic", "ds.name:dataset", "ds.namespace:namespace") } } From 12059371828ad2ffee1da2906c22e7150afb09d9 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 19:42:45 -0500 Subject: [PATCH 22/29] Spotless apply --- .../datastreams/DataStreamsTagsTest.groovy | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index abbea4507d6..dfd1ec34f2a 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -68,20 +68,20 @@ class DataStreamsTagsTest extends Specification { def 'test from tags'() { setup: def one = DataStreamsTags.fromTags( - "direction:in", - "topic:abc", - "exchange:exchange", - "partition:0", - "has_routing_key:true", - "ds.name:dataset", - "subscription:subscription", - "bus:bus", - "garbage", - "ds.namespace:namespace", - "manual_checkpoint:false", - "consumer_group:group", - "group:group" - ) + "direction:in", + "topic:abc", + "exchange:exchange", + "partition:0", + "has_routing_key:true", + "ds.name:dataset", + "subscription:subscription", + "bus:bus", + "garbage", + "ds.namespace:namespace", + "manual_checkpoint:false", + "consumer_group:group", + "group:group" + ) expect: one.nonNullSize() == 12 one.bus == "bus:bus" From 18258b08ec3a1e24a7bef89f231cbf886b153f6d Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 21:12:40 -0500 Subject: [PATCH 23/29] One more refactoring --- .../src/test/groovy/ArmeriaGrpcTest.groovy | 4 +- .../groovy/AWS1KinesisClientTest.groovy | 2 +- .../dsmTest/groovy/AWS1SnsClientTest.groovy | 2 +- .../groovy/Aws2KinesisDataStreamsTest.groovy | 4 +- .../groovy/Aws2SnsDataStreamsTest.groovy | 4 +- .../src/test/groovy/SnsClientTest.groovy | 2 +- .../src/test/groovy/SnsClientTest.groovy | 2 +- .../src/test/groovy/SqsClientTest.groovy | 8 +- .../src/test/groovy/SqsClientTest.groovy | 4 +- .../src/test/groovy/PubSubTest.groovy | 8 +- .../grpc-1.5/src/test/groovy/GrpcTest.groovy | 4 +- .../groovy/KafkaClientTestBase.groovy | 4 +- .../test/groovy/KafkaClientTestBase.groovy | 12 +- .../test/groovy/KafkaClientTestBase.groovy | 8 +- .../ConnectWorkerInstrumentationTest.groovy | 31 ++--- .../groovy/KafkaStreamsTest.groovy | 8 +- .../test/groovy/KafkaStreamsTestBase.groovy | 8 +- .../src/test/groovy/RabbitMQTest.groovy | 28 ++-- .../api/datastreams/DataStreamsTags.java | 131 +++++++++--------- .../datastreams/DataStreamsTagsTest.groovy | 12 +- 20 files changed, 139 insertions(+), 147 deletions(-) diff --git a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy index 1effce3d8b2..1ee14d1e395 100644 --- a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy +++ b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy @@ -256,12 +256,12 @@ abstract class ArmeriaGrpcTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") + tags.hasAllTags("direction:in", "topic:somequeue", "type:grpc") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "type:grpc") + tags.hasAllTags("direction:in", "type:grpc") } } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy index 53120a85c75..51a6258b236 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy @@ -116,7 +116,7 @@ abstract class AWS1KinesisClientTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:" + streamArn, "type:kinesis") + tags.hasAllTags("direction:" + dsmDirection, "topic:" + streamArn, "type:kinesis") } } verifyAll { diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy index 5f01c7af612..0fbbb52d6ab 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy @@ -97,7 +97,7 @@ abstract class AWS1SnsClientTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:" + topicName, "type:sns") + tags.hasAllTags("direction:" + dsmDirection, "topic:" + topicName, "type:sns") } } verifyAll { diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy index bf8e99fc13d..7bacf3c1aab 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy @@ -157,7 +157,7 @@ abstract class Aws2KinesisDataStreamsTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis") + tags.hasAllTags("direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis") } } verifyAll { @@ -278,7 +278,7 @@ abstract class Aws2KinesisDataStreamsTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis") + tags.hasAllTags("direction:" + dsmDirection, "topic:arnprefix:stream/somestream", "type:kinesis") } } verifyAll { diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy index 0e81c49835f..f691b1d1a98 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy @@ -145,7 +145,7 @@ abstract class Aws2SnsDataStreamsTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:mytopic", "type:sns") + tags.hasAllTags("direction:" + dsmDirection, "topic:mytopic", "type:sns") } } verifyAll { @@ -243,7 +243,7 @@ abstract class Aws2SnsDataStreamsTest extends VersionedNamingTestBase { pathwayLatencyCount += group.pathwayLatency.count edgeLatencyCount += group.edgeLatency.count verifyAll(group) { - tags == DataStreamsTags.fromTags("direction:" + dsmDirection, "topic:mytopic", "type:sns") + tags.hasAllTags("direction:" + dsmDirection, "topic:mytopic", "type:sns") } } verifyAll { diff --git a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy index 11727ce3882..2c5a6a3b4d4 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy @@ -195,7 +195,7 @@ abstract class SnsClientTest extends VersionedNamingTestBase { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "topic:testtopic", "type:sns") + tags.hasAllTags("direction:out", "topic:testtopic", "type:sns") } } diff --git a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy index fe2a3674da6..f85a12f449d 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy @@ -165,7 +165,7 @@ abstract class SnsClientTest extends VersionedNamingTestBase { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "topic:testtopic", "type:sns") + tags.hasAllTags("direction:out", "topic:testtopic", "type:sns") } } diff --git a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy index 8eb1a79abfe..bcf94da0e1c 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sqs-1.0/src/test/groovy/SqsClientTest.groovy @@ -173,12 +173,12 @@ abstract class SqsClientTest extends VersionedNamingTestBase { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "topic:somequeue", "type:sqs") + tags.hasAllTags("direction:out", "topic:somequeue", "type:sqs") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") + tags.hasAllTags("direction:in", "topic:somequeue", "type:sqs") } } @@ -628,7 +628,7 @@ class SqsClientV1DataStreamsForkedTest extends SqsClientTest { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == -2734507826469073289 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") + tags.hasAllTags("direction:in", "topic:somequeue", "type:sqs") } cleanup: @@ -657,7 +657,7 @@ class SqsClientV1DataStreamsForkedTest extends SqsClientTest { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") + tags.hasAllTags("direction:in", "topic:somequeue", "type:sqs") } cleanup: diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy index 073aebb9a25..4549e35be97 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy @@ -174,12 +174,12 @@ abstract class SqsClientTest extends VersionedNamingTestBase { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "topic:somequeue", "type:sqs") + tags.hasAllTags("direction:out", "topic:somequeue", "type:sqs") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "topic:somequeue", "type:sqs") + tags.hasAllTags("direction:in", "topic:somequeue", "type:sqs") } } diff --git a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy index 65ce8c85732..7c8b57243f4 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy +++ b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy @@ -40,10 +40,6 @@ import spock.lang.Shared import java.nio.charset.StandardCharsets import java.util.concurrent.CountDownLatch -import java.util.function.Function -import java.util.function.ToDoubleFunction -import java.util.function.ToIntFunction -import java.util.function.ToLongFunction abstract class PubSubTest extends VersionedNamingTestBase { private static final String PROJECT_ID = "dd-trace-java" @@ -238,11 +234,11 @@ abstract class PubSubTest extends VersionedNamingTestBase { StatsGroup sendStat = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0} verifyAll (sendStat) { - tags == DataStreamsTags.fromTags("direction:out" , "topic:test-topic", "type:google-pubsub") + tags.hasAllTags("direction:out" , "topic:test-topic", "type:google-pubsub") } StatsGroup receiveStat = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == sendStat.hash} verifyAll(receiveStat) { - tags == DataStreamsTags.fromTags("direction:in" , "subscription:my-subscription", "type:google-pubsub") + tags.hasAllTags("direction:in" , "subscription:my-subscription", "type:google-pubsub") pathwayLatency.count == 1 pathwayLatency.minValue > 0.0 edgeLatency.count == 1 diff --git a/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy b/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy index c3f31367a9f..efa80190e92 100644 --- a/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy +++ b/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy @@ -245,12 +245,12 @@ abstract class GrpcTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "type:grpc") + tags.hasAllTags("direction:out", "type:grpc") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "type:grpc") + tags.hasAllTags("direction:in", "type:grpc") } } diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy index 01cbe2cc784..6d1e432c621 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy @@ -233,7 +233,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { new String(headers.headers("x-datadog-parent-id").iterator().next().value()) == "${traces[produceTraceIdx][2].spanId}" if (isDataStreamsEnabled()) { - def val = DataStreamsTags.fromTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") + def val = DataStreamsTags.hasAllTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { tags.toString() == val.toString() @@ -241,7 +241,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId", diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy index 102c9886ddb..5bf5d304e73 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy @@ -285,12 +285,12 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") + tags.hasAllTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId", @@ -431,7 +431,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:out", "kafka_cluster_id:$clusterId".toString(), "topic:$SHARED_TOPIC".toString(), @@ -441,7 +441,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId".toString(), @@ -919,12 +919,12 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") + tags.hasAllTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId", diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy index 99fe7673bd4..5960cd31830 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy @@ -261,11 +261,11 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") + tags.hasAllTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId", @@ -411,7 +411,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:out", "kafka_cluster_id:$clusterId".toString(), "topic:$SHARED_TOPIC".toString(), @@ -421,7 +421,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:in", "group:sender", "kafka_cluster_id:$clusterId".toString(), diff --git a/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy b/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy index aad552c2764..00a8cd219ce 100644 --- a/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy +++ b/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy @@ -154,7 +154,7 @@ class ConnectWorkerInstrumentationTest extends AgentTestRunner { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags( + tags.hasAllTags( "direction:out", "topic:test-topic", "type:kafka" @@ -162,13 +162,12 @@ class ConnectWorkerInstrumentationTest extends AgentTestRunner { } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } + def control = DataStreamsTags.hasAllTags("direction:in", "group:test-consumer-group", "topic:test-topic", "type:kafka") verifyAll(second) { - tags == DataStreamsTags.fromTags( - "direction:in", - "group:test-consumer-group", - "topic:test-topic", - "type:kafka" - ) + tags.direction == control.direction + tags.group == control.group + tags.topic == control.topic + tags.type == control.type } TEST_DATA_STREAMS_WRITER.getServices().contains('file-source-connector') @@ -286,21 +285,17 @@ class ConnectWorkerInstrumentationTest extends AgentTestRunner { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags( - "direction:out", - "topic:test-topic", - "type:kafka" - ) + tags.hasAllTags("direction:out", "topic:test-topic", "type:kafka", "kafka_cluster_id:" + clusterId) } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } + def control = DataStreamsTags.hasAllTags("direction:in", "group:connect-file-sink-connector", "topic:test-topic", "type:kafka", "kafka_cluster_id:" + clusterId) verifyAll(second) { - tags == DataStreamsTags.fromTags( - "direction:in", - "group:connect-file-sink-connector", - "topic:test-topic", - "type:kafka" - ) + tags.direction == control.direction + tags.group == control.group + tags.topic == control.topic + tags.type == control.type + tags.kafkaClusterId == control.kafkaClusterId } TEST_DATA_STREAMS_WRITER.getServices().contains('file-sink-connector') diff --git a/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy b/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy index bc4a2e83f39..e9ef3652cac 100644 --- a/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy +++ b/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy @@ -227,12 +227,12 @@ class KafkaStreamsTest extends AgentTestRunner { if (isDataStreamsEnabled()) { StatsGroup originProducerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(originProducerPoint) { - tags == DataStreamsTags.fromTags("direction:out", "topic:$STREAM_PENDING", "type:kafka") + tags.hasAllTags("direction:out", "topic:$STREAM_PENDING", "type:kafka") } StatsGroup kafkaStreamsConsumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == originProducerPoint.hash } verifyAll(kafkaStreamsConsumerPoint) { - tags == DataStreamsTags.fromTags("direction:in", + tags.hasAllTags("direction:in", "group:test-application", "topic:$STREAM_PENDING".toString(), "type:kafka") @@ -240,12 +240,12 @@ class KafkaStreamsTest extends AgentTestRunner { StatsGroup kafkaStreamsProducerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == kafkaStreamsConsumerPoint.hash } verifyAll(kafkaStreamsProducerPoint) { - tags == DataStreamsTags.fromTags("direction:out", "topic:$STREAM_PROCESSED", "type:kafka") + tags.hasAllTags("direction:out", "topic:$STREAM_PROCESSED", "type:kafka") } StatsGroup finalConsumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == kafkaStreamsProducerPoint.hash } verifyAll(finalConsumerPoint) { - tags == DataStreamsTags.fromTags("direction:in", "group:sender", "topic:$STREAM_PROCESSED".toString(), "type:kafka") + tags.hasAllTags("direction:in", "group:sender", "topic:$STREAM_PROCESSED".toString(), "type:kafka") } } diff --git a/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy b/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy index d56cc942b19..1dadaf5c477 100644 --- a/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy @@ -290,12 +290,12 @@ abstract class KafkaStreamsTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup originProducerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(originProducerPoint) { - tags == DataStreamsTags.fromTags("direction:out", "topic:$STREAM_PENDING", "type:kafka") + tags.hasAllTags("direction:out", "topic:$STREAM_PENDING", "type:kafka") } StatsGroup kafkaStreamsConsumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == originProducerPoint.hash } verifyAll(kafkaStreamsConsumerPoint) { - tags == DataStreamsTags.fromTags("direction:in", + tags.hasAllTags("direction:in", "group:test-application", "topic:$STREAM_PENDING".toString(), "type:kafka") @@ -303,12 +303,12 @@ abstract class KafkaStreamsTestBase extends VersionedNamingTestBase { StatsGroup kafkaStreamsProducerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == kafkaStreamsConsumerPoint.hash } verifyAll(kafkaStreamsProducerPoint) { - tags == DataStreamsTags.fromTags("direction:out", "topic:$STREAM_PROCESSED", "type:kafka") + tags.hasAllTags("direction:out", "topic:$STREAM_PROCESSED", "type:kafka") } StatsGroup finalConsumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == kafkaStreamsProducerPoint.hash } verifyAll(finalConsumerPoint) { - tags == DataStreamsTags.fromTags("direction:in", "group:sender", "topic:$STREAM_PROCESSED".toString(), "type:kafka") + tags.hasAllTags("direction:in", "group:sender", "topic:$STREAM_PROCESSED".toString(), "type:kafka") } } diff --git a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy index e94ba171e07..fe30457ee89 100644 --- a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy +++ b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy @@ -172,12 +172,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") + tags.hasAllTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") + tags.hasAllTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } @@ -224,12 +224,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "exchange:", "has_routing_key:true", "type:rabbitmq") + tags.hasAllTags("direction:out", "exchange:", "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") + tags.hasAllTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } } @@ -319,13 +319,13 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { List producerPoints = TEST_DATA_STREAMS_WRITER.groups.findAll { it.parentHash == 0 } producerPoints.each { producerPoint -> verifyAll(producerPoint) { - tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:false", "type:rabbitmq") + tags.hasAllTags("direction:out", "exchange:" + exchangeName, "has_routing_key:false", "type:rabbitmq") } } StatsGroup consumerPoint = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == producerPoints.get(0).hash } verifyAll(consumerPoint) { - tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") + tags.hasAllTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } @@ -409,12 +409,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:false", "type:rabbitmq") + tags.hasAllTags("direction:out", "exchange:" + exchangeName, "has_routing_key:false", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") + tags.hasAllTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } @@ -492,12 +492,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "exchange:", "has_routing_key:true", "type:rabbitmq") + tags.hasAllTags("direction:out", "exchange:", "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "topic:some-routing-queue", "type:rabbitmq") + tags.hasAllTags("direction:in", "topic:some-routing-queue", "type:rabbitmq") } } } @@ -574,12 +574,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled() && !noParent) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") + tags.hasAllTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") + tags.hasAllTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } @@ -668,12 +668,12 @@ abstract class RabbitMQTestBase extends VersionedNamingTestBase { // assert with retries in case DSM data is split in more groups that take a bit longer to arrive. StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags == DataStreamsTags.fromTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") + tags.hasAllTags("direction:out", "exchange:" + exchangeName, "has_routing_key:true", "type:rabbitmq") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } verifyAll(second) { - tags == DataStreamsTags.fromTags("direction:in", "topic:" + queueName, "type:rabbitmq") + tags.hasAllTags("direction:in", "topic:" + queueName, "type:rabbitmq") } } } diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 0028f10b972..cb5ccc97a00 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -124,90 +124,91 @@ public static DataStreamsTags createWithPartition( } /// For usage in tests *only* - public static DataStreamsTags fromTags(String[] tags) { - String bus = null; - Direction direction = null; - String exchange = null; - String topic = null; - String type = null; - String subscription = null; - // additional grouping tags - String datasetName = null; - String datasetNamespace = null; - Boolean isManual = null; - // informational tags - String group = null; - String consumerGroup = null; - Boolean hasRoutingKey = null; - String kafkaClusterId = null; - String partition = null; - + public Boolean hasAllTags(String[] tags) { for (String tag : tags) { if (tag.indexOf(':') == -1) { - continue; + return false; } String key = tag.substring(0, tag.indexOf(':')); String value = tag.substring(tag.indexOf(':') + 1); switch (key) { case BUS_TAG: - bus = value; + if (!Objects.equals(this.bus, tag)) { + return false; + } break; case DIRECTION_TAG: - direction = Objects.equals(value, "out") ? Direction.Outbound : Direction.Inbound; + if (!Objects.equals( + this.directionValue, + Objects.equals(value, "out") ? Direction.Outbound : Direction.Inbound)) { + return false; + } break; case EXCHANGE_TAG: - exchange = value; + if (!Objects.equals(this.exchange, tag)) { + return false; + } break; case TOPIC_TAG: - topic = value; + if (!Objects.equals(this.topic, tag)) { + return false; + } break; case TYPE_TAG: - type = value; + if (!Objects.equals(this.type, tag)) { + return false; + } break; case SUBSCRIPTION_TAG: - subscription = value; + if (!Objects.equals(this.subscription, tag)) { + return false; + } break; case DATASET_NAME_TAG: - datasetName = value; + if (!Objects.equals(this.datasetName, tag)) { + return false; + } break; case DATASET_NAMESPACE_TAG: - datasetNamespace = value; + if (!Objects.equals(this.datasetNamespace, tag)) { + return false; + } break; case MANUAL_TAG: - isManual = Objects.equals(value, "true"); + if (!Objects.equals(this.isManual, tag)) { + return false; + } case GROUP_TAG: - group = value; + if (!Objects.equals(this.group, tag)) { + return false; + } break; case CONSUMER_GROUP_TAG: - consumerGroup = value; + if (!Objects.equals(this.consumerGroup, tag)) { + return false; + } break; case HAS_ROUTING_KEY_TAG: - hasRoutingKey = Objects.equals(value, "true"); + if (!Objects.equals(this.hasRoutingKey, tag)) { + return false; + } break; case KAFKA_CLUSTER_ID_TAG: - kafkaClusterId = value; + if (!Objects.equals(this.kafkaClusterId, tag)) { + return false; + } break; case PARTITION_TAG: - partition = value; + if (!Objects.equals(this.partition, tag)) { + return false; + } break; + default: + return false; } } - return new DataStreamsTags( - bus, - direction, - exchange, - topic, - type, - subscription, - datasetName, - datasetNamespace, - isManual, - group, - consumerGroup, - hasRoutingKey, - kafkaClusterId, - partition); + return true; } public static DataStreamsTags createWithGroup( @@ -246,7 +247,7 @@ public static void setGlobalBaseHash(long hash) { public static DataStreamsTags createWithClusterId( String type, Direction direction, String topic, String clusterId) { return new DataStreamsTags( - null, direction, null, topic, type, null, null, null, false, null, null, null, clusterId, + null, direction, null, topic, type, null, null, null, null, null, null, null, clusterId, null); } @@ -481,37 +482,37 @@ public String toString() { return "DataStreamsTags{" + "bus='" + this.bus - + ", direction=" + + "', direction='" + this.direction - + ", exchange='" + + "', exchange='" + this.exchange - + ", topic='" + + "', topic='" + this.topic - + ", type='" + + "', type='" + this.type - + ", subscription='" + + "', subscription='" + this.subscription - + ", datasetName='" + + "', datasetName='" + this.datasetName - + ", datasetNamespace='" + + "', datasetNamespace='" + this.datasetNamespace - + ", isManual=" + + "', isManual=" + this.isManual - + ", group='" + + "', group='" + this.group - + ", consumerGroup='" + + "', consumerGroup='" + this.consumerGroup - + ", hasRoutingKey='" + + "', hasRoutingKey='" + this.hasRoutingKey - + ", kafkaClusterId='" + + "', kafkaClusterId='" + this.kafkaClusterId - + ", partition='" + + "', partition='" + this.partition - + ", hash=" + + "', hash='" + hash - + ", aggregationHash=" + + "', aggregationHash='" + aggregationHash - + ", size=" + + "', size='" + size(); } } diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index dfd1ec34f2a..32d2ed102e3 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -67,7 +67,7 @@ class DataStreamsTagsTest extends Specification { def 'test from tags'() { setup: - def one = DataStreamsTags.fromTags( + def one = DataStreamsTags.hasAllTags( "direction:in", "topic:abc", "exchange:exchange", @@ -106,10 +106,10 @@ class DataStreamsTagsTest extends Specification { def four = DataStreamsTags.createWithPartition("type", "topic", "partition", "cluster", "group") def five = DataStreamsTags.createWithDataset("type", DataStreamsTags.Direction.Outbound, "topic", "dataset", "namespace") expect: - one == DataStreamsTags.fromTags("type:type", "direction:out") - two == DataStreamsTags.fromTags("type:type", "direction:out", "topic:topic") - three == DataStreamsTags.fromTags("type:type", "direction:out", "topic:topic", "group:group", "kafka_cluster_id:cluster") - four == DataStreamsTags.fromTags("type:type", "topic:topic", "partition:partition", "kafka_cluster_id:cluster", "consumer_group:group") - five == DataStreamsTags.fromTags("type:type", "direction:out", "topic:topic", "ds.name:dataset", "ds.namespace:namespace") + one.hasAllTags("type:type", "direction:out") + two.hasAllTags("type:type", "direction:out", "topic:topic") + three.hasAllTags("type:type", "direction:out", "topic:topic", "group:group", "kafka_cluster_id:cluster") + four.hasAllTags("type:type", "topic:topic", "partition:partition", "kafka_cluster_id:cluster", "consumer_group:group") + five.hasAllTags("type:type", "direction:out", "topic:topic", "ds.name:dataset", "ds.namespace:namespace") } } From ddf3008a2e3fdee35496948876187594b5c6fe8c Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Wed, 16 Jul 2025 21:19:56 -0500 Subject: [PATCH 24/29] Some fixes in tests --- .../datastreams/DataStreamsTagsTest.groovy | 37 ++----------------- 1 file changed, 3 insertions(+), 34 deletions(-) diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index 32d2ed102e3..27bb3dbc61c 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -14,6 +14,7 @@ class DataStreamsTagsTest extends Specification { setup: def tg = getTags(0) + expect: tg.getBus() == DataStreamsTags.BUS_TAG + ":bus0" tg.getDirection() == DataStreamsTags.DIRECTION_TAG + ":out" @@ -30,7 +31,8 @@ class DataStreamsTagsTest extends Specification { tg.getKafkaClusterId() == DataStreamsTags.KAFKA_CLUSTER_ID_TAG + ":kafka_cluster_id0" tg.getPartition() == DataStreamsTags.PARTITION_TAG + ":partition0" tg.getDirectionValue() == DataStreamsTags.Direction.Outbound - tg.toString() == "DataStreamsTags{bus='bus:bus0, direction=direction:out, exchange='exchange:exchange0, topic='topic:topic0, type='type:type0, subscription='subscription:subscription0, datasetName='ds.name:dataset_name0, datasetNamespace='ds.namespace:dataset_namespace0, isManual=manual_checkpoint:true, group='group:group0, consumerGroup='consumer_group:consumer_group0, hasRoutingKey='has_routing_key:true, kafkaClusterId='kafka_cluster_id:kafka_cluster_id0, partition='partition:partition0, hash=8349314675200082083, aggregationHash=1264721246230085006, size=14" + tg.toString() != null + tg.hasAllTags("123") == false } def 'test service name override and global hash'() { @@ -65,39 +67,6 @@ class DataStreamsTagsTest extends Specification { two != three } - def 'test from tags'() { - setup: - def one = DataStreamsTags.hasAllTags( - "direction:in", - "topic:abc", - "exchange:exchange", - "partition:0", - "has_routing_key:true", - "ds.name:dataset", - "subscription:subscription", - "bus:bus", - "garbage", - "ds.namespace:namespace", - "manual_checkpoint:false", - "consumer_group:group", - "group:group" - ) - expect: - one.nonNullSize() == 12 - one.bus == "bus:bus" - one.direction == "direction:in" - one.topic == "topic:abc" - one.exchange == "exchange:exchange" - one.partition == "partition:0" - one.hasRoutingKey == "has_routing_key:true" - one.datasetName == "ds.name:dataset" - one.subscription == "subscription:subscription" - one.datasetNamespace == "ds.namespace:namespace" - one.isManual == "manual_checkpoint:false" - one.consumerGroup == "consumer_group:group" - one.group == "group:group" - } - def 'test create'() { setup: def one = DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound) From b559bdfcf3ab6d779986f554b5da0c477442a5fc Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Thu, 17 Jul 2025 08:45:02 -0500 Subject: [PATCH 25/29] Improved test coverage --- .../src/test/groovy/ArmeriaGrpcTest.groovy | 2 - .../groovy/AWS1KinesisClientTest.groovy | 1 - .../dsmTest/groovy/AWS1SnsClientTest.groovy | 1 - .../groovy/Aws2KinesisDataStreamsTest.groovy | 1 - .../groovy/Aws2SnsDataStreamsTest.groovy | 1 - .../src/test/groovy/SnsClientTest.groovy | 1 - .../src/test/groovy/SnsClientTest.groovy | 1 - .../src/test/groovy/SqsClientTest.groovy | 1 - .../src/test/groovy/PubSubTest.groovy | 1 - .../grpc-1.5/src/test/groovy/GrpcTest.groovy | 1 - .../test/groovy/KafkaClientTestBase.groovy | 2 - .../test/groovy/KafkaClientTestBase.groovy | 1 - .../groovy/KafkaStreamsTest.groovy | 1 - .../test/groovy/KafkaStreamsTestBase.groovy | 1 - .../src/test/groovy/RabbitMQTest.groovy | 1 - .../api/datastreams/DataStreamsTags.java | 17 +++++---- .../datastreams/DataStreamsTagsTest.groovy | 38 ++++++++++++++++++- 17 files changed, 46 insertions(+), 26 deletions(-) diff --git a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy index 1ee14d1e395..6a0885c22dd 100644 --- a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy +++ b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy @@ -1,5 +1,3 @@ -import datadog.trace.api.datastreams.DataStreamsTags - import static datadog.trace.api.config.TraceInstrumentationConfig.GRPC_SERVER_ERROR_STATUSES import com.google.common.util.concurrent.ListenableFuture diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy index 51a6258b236..5b6c8d782e7 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1KinesisClientTest.groovy @@ -11,7 +11,6 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import spock.lang.AutoCleanup diff --git a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy index 0fbbb52d6ab..ca4ced2dbe0 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-1.11.0/src/dsmTest/groovy/AWS1SnsClientTest.groovy @@ -10,7 +10,6 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import spock.lang.AutoCleanup diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy index 7bacf3c1aab..39aba2e2225 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2KinesisDataStreamsTest.groovy @@ -2,7 +2,6 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import org.eclipse.jetty.http2.server.HTTP2CServerConnectionFactory diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy index f691b1d1a98..a312e4196fe 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/dsmTest/groovy/Aws2SnsDataStreamsTest.groovy @@ -2,7 +2,6 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import datadog.trace.instrumentation.aws.ExpectedQueryParams diff --git a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy index 2c5a6a3b4d4..efa7dfe4c8c 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy @@ -10,7 +10,6 @@ import datadog.trace.agent.test.utils.TraceUtils import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.GeneralConfig -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import groovy.json.JsonSlurper diff --git a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy index f85a12f449d..4e4f83ccc84 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy @@ -3,7 +3,6 @@ import datadog.trace.agent.test.utils.TraceUtils import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.GeneralConfig -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup import datadog.trace.instrumentation.aws.ExpectedQueryParams diff --git a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy index 4549e35be97..fe196f12a6f 100644 --- a/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sqs-2.0/src/test/groovy/SqsClientTest.groovy @@ -7,7 +7,6 @@ import datadog.trace.api.DDSpanId import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.GeneralConfig -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.api.naming.SpanNaming import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags diff --git a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy index 7c8b57243f4..1564628cc86 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy +++ b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy @@ -1,4 +1,3 @@ -import datadog.trace.api.datastreams.DataStreamsTags import static datadog.trace.agent.test.utils.TraceUtils.basicSpan diff --git a/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy b/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy index efa80190e92..37d87ce30c5 100644 --- a/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy +++ b/dd-java-agent/instrumentation/grpc-1.5/src/test/groovy/GrpcTest.groovy @@ -1,4 +1,3 @@ -import datadog.trace.api.datastreams.DataStreamsTags import static datadog.trace.agent.test.asserts.TagsAssert.codeOriginTags import static datadog.trace.api.config.TraceInstrumentationConfig.GRPC_SERVER_ERROR_STATUSES diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy index 5bf5d304e73..cbac1e1203a 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy @@ -1,5 +1,3 @@ -import datadog.trace.api.datastreams.DataStreamsTags - import static datadog.trace.agent.test.utils.TraceUtils.basicSpan import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy index 5960cd31830..cfd8d4d5fd3 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy @@ -2,7 +2,6 @@ import datadog.trace.agent.test.asserts.TraceAssert import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDTags -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.common.writer.ListWriter diff --git a/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy b/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy index e9ef3652cac..436fdf6d68e 100644 --- a/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy +++ b/dd-java-agent/instrumentation/kafka-streams-0.11/src/latestDepTest/groovy/KafkaStreamsTest.groovy @@ -1,6 +1,5 @@ import datadog.trace.agent.test.AgentTestRunner import datadog.trace.api.DDTags -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup diff --git a/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy b/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy index 1dadaf5c477..c7f2b5d20c6 100644 --- a/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-streams-0.11/src/test/groovy/KafkaStreamsTestBase.groovy @@ -1,6 +1,5 @@ import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.DDTags -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.datastreams.StatsGroup diff --git a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy index fe30457ee89..c3c69a167fd 100644 --- a/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy +++ b/dd-java-agent/instrumentation/rabbitmq-amqp-2.7/src/test/groovy/RabbitMQTest.groovy @@ -12,7 +12,6 @@ import datadog.trace.agent.test.utils.PortUtils import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.core.DDSpan diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index cb5ccc97a00..79b06e3541f 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -56,14 +56,14 @@ public enum Direction { public static byte[] longToBytes(long val) { return new byte[] { - (byte) val, - (byte) (val >> 8), - (byte) (val >> 16), - (byte) (val >> 24), - (byte) (val >> 32), - (byte) (val >> 40), - (byte) (val >> 48), - (byte) (val >> 56) + (byte) (val >> 56), + (byte) (val >> 48), + (byte) (val >> 40), + (byte) (val >> 32), + (byte) (val >> 24), + (byte) (val >> 16), + (byte) (val >> 8), + (byte) val }; } @@ -178,6 +178,7 @@ public Boolean hasAllTags(String[] tags) { if (!Objects.equals(this.isManual, tag)) { return false; } + break; case GROUP_TAG: if (!Objects.equals(this.group, tag)) { return false; diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index 27bb3dbc61c..6df555e9a46 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -1,6 +1,7 @@ package datadog.trace.api.datastreams import spock.lang.Specification +import java.nio.ByteBuffer class DataStreamsTagsTest extends Specification { @@ -32,7 +33,42 @@ class DataStreamsTagsTest extends Specification { tg.getPartition() == DataStreamsTags.PARTITION_TAG + ":partition0" tg.getDirectionValue() == DataStreamsTags.Direction.Outbound tg.toString() != null - tg.hasAllTags("123") == false + } + + def 'test has all tags'() { + setup: + def tags = new DataStreamsTags("bus", DataStreamsTags.Direction.Outbound, + "exchange", "topic", "type", "subscription", "dataset_name", "dataset_namespace", true, + "group", "consumer_group", true, "kafka_cluster_id", "partition") + expect: + tags.hasAllTags( + "bus:bus", + "direction:out", + "exchange:exchange", + "topic:topic", + "type:type", + "subscription:subscription", + "ds.name:dataset_name", + "ds.namespace:dataset_namespace", + "manual_checkpoint:true", + "group:group", + "consumer_group:consumer_group", + "has_routing_key:true", + "kafka_cluster_id:kafka_cluster_id", + "partition:partition" + ) + !tags.hasAllTags("garbage") + } + + def 'test long to bytes'() { + setup: + def value = 123444L + def bts = DataStreamsTags.longToBytes(value) + ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES); + buffer.putLong(value) + def ctrl = buffer.array() + expect: + bts == ctrl } def 'test service name override and global hash'() { From cd5073f5cca07bd67d497f84c74d31419c3ea8c3 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Thu, 17 Jul 2025 09:01:59 -0500 Subject: [PATCH 26/29] Spotless apply --- .../trace/api/datastreams/DataStreamsTags.java | 16 ++++++++-------- .../api/datastreams/DataStreamsTagsTest.groovy | 4 ++-- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 79b06e3541f..8cff602895d 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -56,14 +56,14 @@ public enum Direction { public static byte[] longToBytes(long val) { return new byte[] { - (byte) (val >> 56), - (byte) (val >> 48), - (byte) (val >> 40), - (byte) (val >> 32), - (byte) (val >> 24), - (byte) (val >> 16), - (byte) (val >> 8), - (byte) val + (byte) (val >> 56), + (byte) (val >> 48), + (byte) (val >> 40), + (byte) (val >> 32), + (byte) (val >> 24), + (byte) (val >> 16), + (byte) (val >> 8), + (byte) val }; } diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index 6df555e9a46..bbbb4857e76 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -56,7 +56,7 @@ class DataStreamsTagsTest extends Specification { "has_routing_key:true", "kafka_cluster_id:kafka_cluster_id", "partition:partition" - ) + ) !tags.hasAllTags("garbage") } @@ -64,7 +64,7 @@ class DataStreamsTagsTest extends Specification { setup: def value = 123444L def bts = DataStreamsTags.longToBytes(value) - ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES); + ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES) buffer.putLong(value) def ctrl = buffer.array() expect: From 30b796ea5eb953a447fe5ea2c4f1ac0e1f7c7b59 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Thu, 17 Jul 2025 09:30:13 -0500 Subject: [PATCH 27/29] Removed unused import --- .../groovy/datadog/trace/agent/test/base/HttpClientTest.groovy | 1 - 1 file changed, 1 deletion(-) diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy index 1f7a15db86b..19f87288b02 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpClientTest.groovy @@ -7,7 +7,6 @@ import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDTags import datadog.trace.api.config.TracerConfig import datadog.trace.api.datastreams.DataStreamsContext -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.bootstrap.instrumentation.api.URIUtils import datadog.trace.core.DDSpan From 82431fed417675c85ce0a5616ea91709fcfc26d8 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Thu, 17 Jul 2025 11:21:36 -0500 Subject: [PATCH 28/29] Fixed all tags --- .../src/test/groovy/ArmeriaGrpcTest.groovy | 2 +- .../googlepubsub/PubSubDecorator.java | 9 +--- .../groovy/KafkaClientTestBase.groovy | 46 ++++++++----------- .../ConnectWorkerInstrumentationTest.groovy | 20 ++------ .../RecordingDatastreamsPayloadWriter.groovy | 9 ++-- .../api/datastreams/DataStreamsTags.java | 19 ++++++++ .../datastreams/DataStreamsTagsTest.groovy | 2 + 7 files changed, 54 insertions(+), 53 deletions(-) diff --git a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy index 6a0885c22dd..d7591a19bfe 100644 --- a/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy +++ b/dd-java-agent/instrumentation/armeria/armeria-grpc-0.84/src/test/groovy/ArmeriaGrpcTest.groovy @@ -254,7 +254,7 @@ abstract class ArmeriaGrpcTest extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags.hasAllTags("direction:in", "topic:somequeue", "type:grpc") + tags.hasAllTags("direction:out", "type:grpc") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } diff --git a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java index 07085db704c..a6287c22cef 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java +++ b/dd-java-agent/instrumentation/google-pubsub/src/main/java/datadog/trace/instrumentation/googlepubsub/PubSubDecorator.java @@ -12,12 +12,7 @@ import datadog.trace.api.datastreams.DataStreamsContext; import datadog.trace.api.datastreams.DataStreamsTags; import datadog.trace.api.naming.SpanNaming; -import datadog.trace.bootstrap.instrumentation.api.AgentSpan; -import datadog.trace.bootstrap.instrumentation.api.AgentSpanContext; -import datadog.trace.bootstrap.instrumentation.api.AgentTracer; -import datadog.trace.bootstrap.instrumentation.api.InternalSpanTypes; -import datadog.trace.bootstrap.instrumentation.api.Tags; -import datadog.trace.bootstrap.instrumentation.api.UTF8BytesString; +import datadog.trace.bootstrap.instrumentation.api.*; import datadog.trace.bootstrap.instrumentation.decorator.MessagingClientDecorator; import java.util.function.Function; import java.util.function.Supplier; @@ -130,7 +125,7 @@ public AgentSpan onConsume(final PubsubMessage message, final String subscriptio final AgentSpan span = startSpan(PUBSUB_CONSUME, spanContext); final CharSequence parsedSubscription = extractSubscription(subscription); DataStreamsTags tags = - DataStreamsTags.create( + DataStreamsTags.createWithSubscription( "google-pubsub", DataStreamsTags.Direction.Inbound, parsedSubscription.toString()); final Timestamp publishTime = message.getPublishTime(); // FIXME: use full nanosecond resolution when this method will accept nanos diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy index 6d1e432c621..14e5029357d 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/latestDepTest/groovy/KafkaClientTestBase.groovy @@ -1,13 +1,8 @@ -import datadog.trace.api.datastreams.DataStreamsTags - -import static datadog.trace.agent.test.utils.TraceUtils.basicSpan -import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace -import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.isAsyncPropagationEnabled - import datadog.trace.agent.test.asserts.TraceAssert import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.common.writer.ListWriter @@ -23,15 +18,19 @@ import org.junit.Rule import org.springframework.kafka.core.DefaultKafkaConsumerFactory import org.springframework.kafka.listener.KafkaMessageListenerContainer import org.springframework.kafka.listener.MessageListener +import org.springframework.kafka.test.EmbeddedKafkaBroker +import org.springframework.kafka.test.rule.EmbeddedKafkaRule import org.springframework.kafka.test.utils.ContainerTestUtils import org.springframework.kafka.test.utils.KafkaTestUtils -import org.springframework.kafka.test.rule.EmbeddedKafkaRule -import org.springframework.kafka.test.EmbeddedKafkaBroker import spock.lang.Shared import java.util.concurrent.LinkedBlockingQueue import java.util.concurrent.TimeUnit +import static datadog.trace.agent.test.utils.TraceUtils.basicSpan +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace +import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.isAsyncPropagationEnabled + abstract class KafkaClientTestBase extends VersionedNamingTestBase { static final SHARED_TOPIC = "shared.topic" @@ -233,10 +232,9 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { new String(headers.headers("x-datadog-parent-id").iterator().next().value()) == "${traces[produceTraceIdx][2].spanId}" if (isDataStreamsEnabled()) { - def val = DataStreamsTags.hasAllTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") StatsGroup first = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == 0 } verifyAll(first) { - tags.toString() == val.toString() + tags.hasAllTags("direction:out", "kafka_cluster_id:$clusterId", "topic:$SHARED_TOPIC".toString(), "type:kafka") } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } @@ -249,22 +247,18 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { "type:kafka" ) } - List produce = [ - "kafka_cluster_id:$clusterId", - "partition:" + received.partition(), - "topic:" + SHARED_TOPIC, - "type:kafka_produce" - ] - List commit = [ - "consumer_group:sender", - "kafka_cluster_id:$clusterId", - "partition:" + received.partition(), - "topic:" + SHARED_TOPIC, - "type:kafka_commit" - ] - verifyAll(TEST_DATA_STREAMS_WRITER.backlogs) { - contains(new AbstractMap.SimpleEntry, Long>(commit, 1).toString()) - contains(new AbstractMap.SimpleEntry, Long>(produce, 0).toString()) + def sorted = new ArrayList(TEST_DATA_STREAMS_WRITER.backlogs).sort() + verifyAll(sorted) { + size() == 2 + get(0).hasAllTags("consumer_group:sender", + "kafka_cluster_id:$clusterId", + "partition:" + received.partition(), + "topic:" + SHARED_TOPIC, + "type:kafka_commit") + get(1).hasAllTags("kafka_cluster_id:$clusterId", + "partition:" + received.partition(), + "topic:" + SHARED_TOPIC, + "type:kafka_produce") } } diff --git a/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy b/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy index 00a8cd219ce..031242c592e 100644 --- a/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy +++ b/dd-java-agent/instrumentation/kafka-connect-0.11/src/test/groovy/ConnectWorkerInstrumentationTest.groovy @@ -1,5 +1,4 @@ import datadog.trace.agent.test.AgentTestRunner -import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.core.datastreams.StatsGroup import org.apache.kafka.clients.admin.AdminClient import org.apache.kafka.clients.admin.AdminClientConfig @@ -14,12 +13,12 @@ import org.apache.kafka.common.utils.Time import org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy import org.apache.kafka.connect.connector.policy.ConnectorClientConfigOverridePolicy import org.apache.kafka.connect.runtime.Herder -import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo -import org.apache.kafka.connect.runtime.standalone.StandaloneConfig -import org.apache.kafka.connect.runtime.standalone.StandaloneHerder import org.apache.kafka.connect.runtime.Worker import org.apache.kafka.connect.runtime.WorkerConfig import org.apache.kafka.connect.runtime.isolation.Plugins +import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo +import org.apache.kafka.connect.runtime.standalone.StandaloneConfig +import org.apache.kafka.connect.runtime.standalone.StandaloneHerder import org.apache.kafka.connect.storage.FileOffsetBackingStore import org.apache.kafka.connect.util.Callback import org.springframework.kafka.test.EmbeddedKafkaBroker @@ -162,12 +161,8 @@ class ConnectWorkerInstrumentationTest extends AgentTestRunner { } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } - def control = DataStreamsTags.hasAllTags("direction:in", "group:test-consumer-group", "topic:test-topic", "type:kafka") verifyAll(second) { - tags.direction == control.direction - tags.group == control.group - tags.topic == control.topic - tags.type == control.type + tags.hasAllTags("direction:in", "group:test-consumer-group", "topic:test-topic", "type:kafka") } TEST_DATA_STREAMS_WRITER.getServices().contains('file-source-connector') @@ -289,13 +284,8 @@ class ConnectWorkerInstrumentationTest extends AgentTestRunner { } StatsGroup second = TEST_DATA_STREAMS_WRITER.groups.find { it.parentHash == first.hash } - def control = DataStreamsTags.hasAllTags("direction:in", "group:connect-file-sink-connector", "topic:test-topic", "type:kafka", "kafka_cluster_id:" + clusterId) verifyAll(second) { - tags.direction == control.direction - tags.group == control.group - tags.topic == control.topic - tags.type == control.type - tags.kafkaClusterId == control.kafkaClusterId + tags.hasAllTags("direction:in", "group:connect-file-sink-connector", "topic:test-topic", "type:kafka", "kafka_cluster_id:" + clusterId) } TEST_DATA_STREAMS_WRITER.getServices().contains('file-sink-connector') diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/datastreams/RecordingDatastreamsPayloadWriter.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/datastreams/RecordingDatastreamsPayloadWriter.groovy index cedcf14724b..b963a0a08bc 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/datastreams/RecordingDatastreamsPayloadWriter.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/datastreams/RecordingDatastreamsPayloadWriter.groovy @@ -1,5 +1,6 @@ package datadog.trace.agent.test.datastreams +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.core.datastreams.DatastreamsPayloadWriter import datadog.trace.core.datastreams.StatsBucket import datadog.trace.core.datastreams.StatsGroup @@ -16,7 +17,7 @@ class RecordingDatastreamsPayloadWriter implements DatastreamsPayloadWriter { private final List groups = [] @SuppressWarnings('UnusedPrivateField') - private final Set backlogs = [] + private final Set backlogs = [] private final Set serviceNameOverrides = [] @@ -28,8 +29,8 @@ class RecordingDatastreamsPayloadWriter implements DatastreamsPayloadWriter { data.each { this.@groups.addAll(it.groups) } for (StatsBucket bucket : data) { if (bucket.backlogs != null) { - for (Map.Entry, Long> backlog : bucket.backlogs) { - this.@backlogs.add(backlog.toString()) + for (Map.Entry backlog : bucket.backlogs) { + this.@backlogs.add(backlog.getKey()) } } } @@ -47,7 +48,7 @@ class RecordingDatastreamsPayloadWriter implements DatastreamsPayloadWriter { Collections.unmodifiableList(new ArrayList<>(this.@groups)) } - synchronized List getBacklogs() { + synchronized List getBacklogs() { Collections.unmodifiableList(new ArrayList<>(this.@backlogs)) } diff --git a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java index 8cff602895d..938d8d953f2 100644 --- a/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java +++ b/internal-api/src/main/java/datadog/trace/api/datastreams/DataStreamsTags.java @@ -75,6 +75,25 @@ public static DataStreamsTags create(String type, Direction direction, String to return DataStreamsTags.createWithGroup(type, direction, topic, null); } + public static DataStreamsTags createWithSubscription( + String type, Direction direction, String subscription) { + return new DataStreamsTags( + null, + direction, + null, + null, + type, + subscription, + null, + null, + null, + null, + null, + null, + null, + null); + } + public static DataStreamsTags create( String type, Direction direction, String topic, String group, String kafkaClusterId) { return new DataStreamsTags( diff --git a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy index bbbb4857e76..ef73cfaae4c 100644 --- a/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy +++ b/internal-api/src/test/groovy/datadog/trace/api/datastreams/DataStreamsTagsTest.groovy @@ -110,11 +110,13 @@ class DataStreamsTagsTest extends Specification { def three = DataStreamsTags.create("type", DataStreamsTags.Direction.Outbound, "topic", "group", "cluster") def four = DataStreamsTags.createWithPartition("type", "topic", "partition", "cluster", "group") def five = DataStreamsTags.createWithDataset("type", DataStreamsTags.Direction.Outbound, "topic", "dataset", "namespace") + def six = DataStreamsTags.createWithSubscription("type", DataStreamsTags.Direction.Inbound, "subscription") expect: one.hasAllTags("type:type", "direction:out") two.hasAllTags("type:type", "direction:out", "topic:topic") three.hasAllTags("type:type", "direction:out", "topic:topic", "group:group", "kafka_cluster_id:cluster") four.hasAllTags("type:type", "topic:topic", "partition:partition", "kafka_cluster_id:cluster", "consumer_group:group") five.hasAllTags("type:type", "direction:out", "topic:topic", "ds.name:dataset", "ds.namespace:namespace") + six.hasAllTags("type:type", "direction:in", "subscription:subscription") } } From 68373afa8bb80829382992965bac23f0951d0d57 Mon Sep 17 00:00:00 2001 From: Igor Kravchenko Date: Thu, 17 Jul 2025 14:29:15 -0500 Subject: [PATCH 29/29] Fixed kafka tests --- .../test/groovy/KafkaClientTestBase.groovy | 84 +++++++++++-------- .../test/groovy/KafkaClientTestBase.groovy | 77 ++++++++--------- 2 files changed, 86 insertions(+), 75 deletions(-) diff --git a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy index cbac1e1203a..9870627fda3 100644 --- a/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-0.11/src/test/groovy/KafkaClientTestBase.groovy @@ -1,3 +1,5 @@ +import datadog.trace.api.datastreams.DataStreamsTags + import static datadog.trace.agent.test.utils.TraceUtils.basicSpan import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -239,7 +241,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { TEST_DATA_STREAMS_WRITER.waitForGroups(2) // wait for produce offset 0, commit offset 0 on partition 0 and 1, and commit offset 1 on 1 partition. - TEST_DATA_STREAMS_WRITER.waitForBacklogs(4) + TEST_DATA_STREAMS_WRITER.waitForBacklogs(3) } then: @@ -296,22 +298,29 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { "type:kafka" ) } - List produce = [ - "kafka_cluster_id:$clusterId", - "partition:" + received.partition(), - "topic:" + SHARED_TOPIC, - "type:kafka_produce" - ] - List commit = [ - "consumer_group:sender", - "kafka_cluster_id:$clusterId", - "partition:" + received.partition(), - "topic:" + SHARED_TOPIC, - "type:kafka_commit" - ] - verifyAll(TEST_DATA_STREAMS_WRITER.backlogs) { - contains(new AbstractMap.SimpleEntry, Long>(commit, 1).toString()) - contains(new AbstractMap.SimpleEntry, Long>(produce, 0).toString()) + def items = new ArrayList(TEST_DATA_STREAMS_WRITER.backlogs).sort { it.type + it.partition} + verifyAll(items) { + size() == 3 + get(0).hasAllTags( + "consumer_group:sender", + "kafka_cluster_id:$clusterId", + "partition:0", + "topic:" + SHARED_TOPIC, + "type:kafka_commit" + ) + get(1).hasAllTags( + "consumer_group:sender", + "kafka_cluster_id:$clusterId", + "partition:1", + "topic:" + SHARED_TOPIC, + "type:kafka_commit" + ) + get(2).hasAllTags( + "kafka_cluster_id:$clusterId", + "partition:" + received.partition(), + "topic:" + SHARED_TOPIC, + "type:kafka_produce" + ) } } @@ -394,7 +403,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { if (isDataStreamsEnabled()) { TEST_DATA_STREAMS_WRITER.waitForGroups(2) // wait for produce offset 0, commit offset 0 on partition 0 and 1, and commit offset 1 on 1 partition. - TEST_DATA_STREAMS_WRITER.waitForBacklogs(4) + TEST_DATA_STREAMS_WRITER.waitForBacklogs(3) } then: @@ -447,22 +456,29 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { "type:kafka" ) } - List produce = [ - "kafka_cluster_id:$clusterId".toString(), - "partition:" + received.partition(), - "topic:" + SHARED_TOPIC, - "type:kafka_produce" - ] - List commit = [ - "consumer_group:sender", - "kafka_cluster_id:$clusterId".toString(), - "partition:" + received.partition(), - "topic:" + SHARED_TOPIC, - "type:kafka_commit" - ] - verifyAll(TEST_DATA_STREAMS_WRITER.backlogs) { - contains(new AbstractMap.SimpleEntry, Long>(commit, 1).toString()) - contains(new AbstractMap.SimpleEntry, Long>(produce, 0).toString()) + def items = new ArrayList(TEST_DATA_STREAMS_WRITER.backlogs).sort {it.type + it.partition} + verifyAll(items) { + size() == 3 + get(0).hasAllTags( + "consumer_group:sender", + "kafka_cluster_id:$clusterId".toString(), + "partition:0", + "topic:" + SHARED_TOPIC, + "type:kafka_commit" + ) + get(1).hasAllTags( + "consumer_group:sender", + "kafka_cluster_id:$clusterId".toString(), + "partition:1", + "topic:" + SHARED_TOPIC, + "type:kafka_commit" + ) + get(2).hasAllTags( + "kafka_cluster_id:$clusterId".toString(), + "partition:" + received.partition(), + "topic:" + SHARED_TOPIC, + "type:kafka_produce" + ) } } diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy index cfd8d4d5fd3..665e2df70d8 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy @@ -2,6 +2,7 @@ import datadog.trace.agent.test.asserts.TraceAssert import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDTags +import datadog.trace.api.datastreams.DataStreamsTags import datadog.trace.bootstrap.instrumentation.api.InstrumentationTags import datadog.trace.bootstrap.instrumentation.api.Tags import datadog.trace.common.writer.ListWriter @@ -10,11 +11,7 @@ import datadog.trace.core.datastreams.StatsGroup import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.clients.consumer.KafkaConsumer -import org.apache.kafka.clients.producer.KafkaProducer -import org.apache.kafka.clients.producer.Producer -import org.apache.kafka.clients.producer.ProducerConfig -import org.apache.kafka.clients.producer.ProducerRecord -import org.apache.kafka.clients.producer.RecordMetadata +import org.apache.kafka.clients.producer.* import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.serialization.StringSerializer import org.junit.Rule @@ -29,15 +26,12 @@ import org.springframework.kafka.test.rule.EmbeddedKafkaRule import org.springframework.kafka.test.utils.ContainerTestUtils import org.springframework.kafka.test.utils.KafkaTestUtils - import java.util.concurrent.ExecutionException import java.util.concurrent.Future - -import static datadog.trace.agent.test.asserts.TagsAssert.codeOriginTags - import java.util.concurrent.LinkedBlockingQueue import java.util.concurrent.TimeUnit +import static datadog.trace.agent.test.asserts.TagsAssert.codeOriginTags import static datadog.trace.agent.test.utils.TraceUtils.basicSpan import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -272,22 +266,23 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { "type:kafka" ) } - List produce = [ - "kafka_cluster_id:$clusterId", - "partition:"+received.partition(), - "topic:"+SHARED_TOPIC, - "type:kafka_produce" - ] - List commit = [ - "consumer_group:sender", - "kafka_cluster_id:$clusterId", - "partition:"+received.partition(), - "topic:$SHARED_TOPIC", - "type:kafka_commit" - ] - verifyAll(TEST_DATA_STREAMS_WRITER.backlogs) { - contains(new AbstractMap.SimpleEntry, Long>(commit, 1).toString()) - contains(new AbstractMap.SimpleEntry, Long>(produce, 0).toString()) + + def sorted = new ArrayList(TEST_DATA_STREAMS_WRITER.backlogs).sort{ it.type } + verifyAll(sorted) { + size() == 2 + get(0).hasAllTags( + "consumer_group:sender", + "kafka_cluster_id:$clusterId", + "partition:"+received.partition(), + "topic:$SHARED_TOPIC", + "type:kafka_commit" + ) + get(1).hasAllTags( + "kafka_cluster_id:$clusterId", + "partition:"+received.partition(), + "topic:"+SHARED_TOPIC, + "type:kafka_produce" + ) } } @@ -428,22 +423,22 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { "type:kafka" ) } - List produce = [ - "kafka_cluster_id:$clusterId".toString(), - "partition:"+received.partition(), - "topic:"+SHARED_TOPIC, - "type:kafka_produce" - ] - List commit = [ - "consumer_group:sender", - "kafka_cluster_id:$clusterId".toString(), - "partition:"+received.partition(), - "topic:"+SHARED_TOPIC, - "type:kafka_commit" - ] - verifyAll(TEST_DATA_STREAMS_WRITER.backlogs) { - contains(new AbstractMap.SimpleEntry, Long>(commit, 1).toString()) - contains(new AbstractMap.SimpleEntry, Long>(produce, 0).toString()) + def items = new ArrayList(TEST_DATA_STREAMS_WRITER.backlogs).sort {it.type} + verifyAll(items) { + size() == 2 + get(0).hasAllTags( + "consumer_group:sender", + "kafka_cluster_id:$clusterId".toString(), + "partition:"+received.partition(), + "topic:"+SHARED_TOPIC, + "type:kafka_commit" + ) + get(1).hasAllTags( + "kafka_cluster_id:$clusterId".toString(), + "partition:"+received.partition(), + "topic:"+SHARED_TOPIC, + "type:kafka_produce" + ) } }