diff --git a/lib-injection/build/docker/java/jetty-app/JettyServletMain.java b/lib-injection/build/docker/java/jetty-app/JettyServletMain.java
index e2491f8d406..3fb818e21b2 100644
--- a/lib-injection/build/docker/java/jetty-app/JettyServletMain.java
+++ b/lib-injection/build/docker/java/jetty-app/JettyServletMain.java
@@ -46,6 +46,7 @@ public static void main(String[] args) throws Exception {
webAppContext.addServlet(new ServletHolder(new CrashServlet()), "/fork_and_crash");
webAppContext.addServlet(new ServletHolder(new CrashServlet()), "/child_pids");
webAppContext.addServlet(new ServletHolder(new CrashServlet()), "/zombies");
+ webAppContext.addServlet(new ServletHolder(new MyServlet()), "/myservlet");
// Start the server!
server.start();
diff --git a/lib-injection/build/docker/java/jetty-app/MyServlet.java b/lib-injection/build/docker/java/jetty-app/MyServlet.java
new file mode 100644
index 00000000000..423858efb13
--- /dev/null
+++ b/lib-injection/build/docker/java/jetty-app/MyServlet.java
@@ -0,0 +1,81 @@
+import java.io.IOException;
+import java.io.PrintWriter;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.ServletException;
+
+public class MyServlet extends HttpServlet {
+ @Override
+ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
+ throws ServletException, IOException {
+
+ resp.setContentType("text/html");
+ resp.setCharacterEncoding("UTF-8");
+ resp.setStatus(HttpServletResponse.SC_OK);
+
+ PrintWriter out = resp.getWriter();
+
+ out.println("");
+ out.println("");
+ out.println("
");
+ out.println(" ");
+ out.println(" ");
+ out.println(" My Servlet");
+ out.println(" ");
+ out.println("");
+ out.println("");
+ out.println(" ");
+ out.println("
Welcome to MyServlet!
");
+ out.println("
This is a simple HTML view served by a Jetty servlet. The server is running and ready to handle your requests.
");
+ out.println("
Jetty Server");
+ out.println("
");
+ out.println("");
+ out.println("");
+ }
+}
+
diff --git a/tests/appsec/iast/source/test_uri.py b/tests/appsec/iast/source/test_uri.py
index 0519b0bbbc9..6ab271da659 100644
--- a/tests/appsec/iast/source/test_uri.py
+++ b/tests/appsec/iast/source/test_uri.py
@@ -13,5 +13,5 @@ class TestURI(BaseSourceTest):
endpoint = "/iast/source/uri/test"
requests_kwargs = [{"method": "GET"}]
source_type = "http.request.uri"
- source_value = "http://localhost:7777/iast/source/uri/test"
+ source_value = "http://weblog:7777/iast/source/uri/test"
source_names = None
diff --git a/tests/appsec/test_traces.py b/tests/appsec/test_traces.py
index f7510a0b72c..bd45b67a9f7 100644
--- a/tests/appsec/test_traces.py
+++ b/tests/appsec/test_traces.py
@@ -18,6 +18,7 @@
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
+@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_RetainTraces:
"""Retain trace (manual keep & appsec.event = true)"""
@@ -64,6 +65,8 @@ def validate_appsec_event_span_tags(span: dict):
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
+@scenarios.default_antithesis
+@scenarios.default_antithesis_debug
@scenarios.appsec_lambda_default
class Test_AppSecEventSpanTags:
"""AppSec correctly fill span tags."""
@@ -152,6 +155,7 @@ def test_root_span_coherence(self):
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
+@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_AppSecObfuscator:
"""AppSec obfuscates sensitive data."""
@@ -308,6 +312,7 @@ def validate_appsec_span_tags(span: dict, appsec_data: dict): # noqa: ARG001
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
+@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_CollectRespondHeaders:
"""AppSec should collect some headers for http.response and store them in span tags."""
@@ -340,6 +345,7 @@ def validate_response_headers(span: dict):
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
+@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_CollectDefaultRequestHeader:
HEADERS = {
@@ -376,6 +382,7 @@ def test_collect_default_request_headers(self):
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
+@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_ExternalWafRequestsIdentification:
def setup_external_wafs_header_collection(self):
diff --git a/tests/docker_ssi/test_docker_ssi_profiling.py b/tests/docker_ssi/test_docker_ssi_profiling.py
new file mode 100644
index 00000000000..68ec5e5f8a4
--- /dev/null
+++ b/tests/docker_ssi/test_docker_ssi_profiling.py
@@ -0,0 +1,37 @@
+from urllib.parse import urlparse
+import requests
+import time
+from utils import scenarios, weblog, features
+from utils import logger
+
+
+@features.profiling
+@scenarios.docker_ssi_profiling
+class TestDockerSSIAppsecFeatures:
+ """Test the ssi in a simulated host injection environment (docker container + test agent)
+ We test that the injection is performed and profiling is enabled and telemetry is generated.
+ """
+
+ def setup_profiling(self):
+ parsed_url = urlparse(scenarios.docker_ssi_profiling.weblog_url)
+ self.r = weblog.request("GET", parsed_url.path, domain=parsed_url.hostname, port=parsed_url.port)
+ logger.info(f"Setup Docker SSI profiling installation {self.r}")
+
+ def test_profiling(self):
+ agent_port = scenarios.docker_ssi_profiling.agent_port
+ agent_host = scenarios.docker_ssi_profiling.agent_host
+ profiling_request_found = False
+ timeout = 90
+ mustend = time.time() + timeout
+ while time.time() < mustend:
+ response = requests.get(
+ f"http://{agent_host}:{agent_port}/test/session/requests",
+ timeout=60,
+ )
+ logger.info(f"Profiling request response: {response.json()}")
+ for request in response.json():
+ logger.info(f"Profiling request: {request}")
+ if request["url"].endswith("/profiling/v1/input"):
+ profiling_request_found = True
+ time.sleep(1)
+ assert profiling_request_found, "No profiling request found"
diff --git a/tests/test_the_test/test_group_rules.py b/tests/test_the_test/test_group_rules.py
index a0f6ad54dd7..60224501488 100644
--- a/tests/test_the_test/test_group_rules.py
+++ b/tests/test_the_test/test_group_rules.py
@@ -39,6 +39,7 @@ def test_tracer_release():
scenarios.docker_ssi_appsec,
scenarios.docker_ssi_crashtracking,
scenarios.docker_ssi_servicenaming,
+ scenarios.docker_ssi_profiling,
scenarios.external_processing_blocking, # need to declare a white list of library in get-workflow-parameters
scenarios.external_processing, # need to declare a white list of library in get-workflow-parameters
scenarios.stream_processing_offload_blocking, # need to declare a white list of library in get-workflow-parameters
@@ -67,6 +68,7 @@ def test_tracer_release():
scenarios.multi_installer_auto_injection,
scenarios.demo_aws,
scenarios.otel_collector_e2e,
+ scenarios.default_antithesis,
]
for scenario in get_all_scenarios():
diff --git a/utils/_context/_scenarios/__init__.py b/utils/_context/_scenarios/__init__.py
index ee326e853ca..24e1995aaf6 100644
--- a/utils/_context/_scenarios/__init__.py
+++ b/utils/_context/_scenarios/__init__.py
@@ -7,6 +7,7 @@
from .aws_lambda import LambdaScenario
from .core import Scenario, scenario_groups
from .default import DefaultScenario
+from .default_antithesis import DefaultAntithesisScenario
from .endtoend import DockerScenario, EndToEndScenario
from .integrations import CrossedTracingLibraryScenario, IntegrationsScenario, AWSIntegrationsScenario
from .open_telemetry import OpenTelemetryScenario
@@ -37,6 +38,8 @@ class _Scenarios:
mock_the_test_2 = TestTheTestScenario("MOCK_THE_TEST_2", doc="Mock scenario that check system-tests internals")
default = DefaultScenario("DEFAULT")
+ default_antithesis = DefaultAntithesisScenario("DEFAULT_ANTITHESIS")
+ default_antithesis_debug = DefaultScenario("DEFAULT_ANTITHESIS_DEBUG")
# performance scenario just spawn an agent and a weblog, and spies the CPU and mem usage
performances = PerformanceScenario(
@@ -999,6 +1002,17 @@ class _Scenarios:
appsec_enabled="true",
scenario_groups=[scenario_groups.all, scenario_groups.docker_ssi],
)
+ docker_ssi_profiling = DockerSSIScenario(
+ "DOCKER_SSI_PROFILING",
+ doc="Validates the crashtracking for ssi on a docker environment",
+ extra_env_vars={
+ "DD_PROFILING_UPLOAD_PERIOD": "2",
+ "DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": "1000",
+ "DD_PROFILING_START_FORCE_FIRST": "true",
+ },
+ profiling_enabled="auto",
+ scenario_groups=[scenario_groups.all, scenario_groups.docker_ssi],
+ )
docker_ssi_crashtracking = DockerSSIScenario(
"DOCKER_SSI_CRASHTRACKING",
doc="Validates the crashtracking for ssi on a docker environment",
diff --git a/utils/_context/_scenarios/core.py b/utils/_context/_scenarios/core.py
index 360e6f16aa6..0e05d4a5939 100644
--- a/utils/_context/_scenarios/core.py
+++ b/utils/_context/_scenarios/core.py
@@ -62,6 +62,7 @@ class _ScenarioGroups:
parametric = ScenarioGroup()
appsec_low_waf_timeout = ScenarioGroup()
default = ScenarioGroup()
+ default_antithesis = ScenarioGroup()
feature_flag_exposure = ScenarioGroup()
def __getitem__(self, key: str) -> ScenarioGroup:
diff --git a/utils/_context/_scenarios/default_antithesis.py b/utils/_context/_scenarios/default_antithesis.py
new file mode 100644
index 00000000000..c5399db97e2
--- /dev/null
+++ b/utils/_context/_scenarios/default_antithesis.py
@@ -0,0 +1,192 @@
+"""Default Antithesis scenario - a minimal scenario that doesn't start any containers."""
+
+from logging import FileHandler
+import os
+import pytest
+
+from watchdog.observers.polling import PollingObserver
+from watchdog.events import FileSystemEventHandler, FileSystemEvent
+
+from utils import interfaces
+from utils.interfaces._core import ProxyBasedInterfaceValidator
+from utils._context.component_version import ComponentVersion
+from utils._logger import logger, get_log_formatter
+from .core import Scenario, scenario_groups
+
+
+class DefaultAntithesisScenario(Scenario):
+ """A minimal scenario that doesn't start containers.
+
+ This scenario is designed for Antithesis testing where containers
+ are managed externally and we only want to run the test logic.
+
+ This scenario will run all tests that are decorated with @scenarios.default
+ by checking for the "DEFAULT" scenario marker during test collection.
+ """
+
+ def __init__(self, name: str) -> None:
+ super().__init__(
+ name,
+ github_workflow=None,
+ doc="Antithesis scenario that doesn't start containers - for external container management",
+ # Include DEFAULT scenario groups for tests using @scenario_groups decorators
+ scenario_groups=[
+ scenario_groups.essentials,
+ scenario_groups.telemetry,
+ scenario_groups.default,
+ scenario_groups.default_antithesis,
+ ],
+ )
+ self._library: ComponentVersion | None = None
+
+ # Interface timeout properties (will be set based on library in configure)
+ self.library_interface_timeout = 35 # Default timeout
+ self.agent_interface_timeout = 30
+ self.backend_interface_timeout = 0
+
+ def pytest_configure(self, config: pytest.Config) -> None:
+ """Configure the scenario but don't delete the logs folder if it exists."""
+ # Store replay and worker status
+ self.replay = config.option.replay
+ self.is_main_worker = not hasattr(config, "workerinput")
+
+ # Create log folder WITHOUT removing it if it exists
+ if self.is_main_worker:
+ self._create_log_subfolder("", remove_if_exists=False)
+
+ # Set up logging handler
+ handler = FileHandler(f"{self.host_log_folder}/tests.log", encoding="utf-8")
+ handler.setFormatter(get_log_formatter())
+ logger.addHandler(handler)
+
+ # Call configure
+ self.configure(config)
+
+ def configure(self, config: pytest.Config) -> None:
+ """Configure the scenario but don't start any containers."""
+ # Get library information from command line or environment
+ library_name = config.option.library or os.environ.get("DD_LANG", "")
+ library_version = os.environ.get("DD_LIBRARY_VERSION", "unknown")
+
+ if library_name:
+ self._library = ComponentVersion(library_name, library_version)
+
+ # Configure interfaces like in endtoend.py
+ # interfaces.agent.configure(self.host_log_folder, replay=self.replay)
+ interfaces.library.configure(self.host_log_folder, replay=self.replay)
+ interfaces.backend.configure(self.host_log_folder, replay=self.replay)
+ interfaces.library_dotnet_managed.configure(self.host_log_folder, replay=self.replay)
+ interfaces.library_stdout.configure(self.host_log_folder, replay=self.replay)
+ # interfaces.agent_stdout.configure(self.host_log_folder, replay=self.replay)
+
+ # Set library-specific interface timeouts
+ if library_name == "java":
+ self.library_interface_timeout = 35
+ elif library_name in ("golang",):
+ self.library_interface_timeout = 10
+ elif library_name in ("nodejs", "ruby"):
+ self.library_interface_timeout = 0
+ elif library_name in ("php",):
+ # possibly something weird on obfuscator, let increase the delay for now
+ self.library_interface_timeout = 10
+ elif library_name in ("python",):
+ self.library_interface_timeout = 5
+ else:
+ self.library_interface_timeout = 40
+ logger.debug(f"Library interface timeout set to::: {self.library_interface_timeout}")
+
+ logger.debug("Getting warmups")
+ if not self.replay:
+ self.warmups.insert(1, self._start_interfaces_watchdog)
+
+ @property
+ def library(self) -> ComponentVersion:
+ """Return the library component version."""
+ if not self._library:
+ library_name = os.environ.get("DD_LANG", "")
+ library_version = os.environ.get("DD_LIBRARY_VERSION", "unknown")
+ self._library = ComponentVersion(library_name, library_version)
+ return self._library
+
+ @property
+ def host_log_folder(self) -> str:
+ """Override to use 'logs' folder instead of 'logs_default_antithesis'."""
+ return "logs"
+
+ @property
+ def weblog_variant(self):
+ return os.environ.get("SYSTEM_TESTS_WEBLOG_VARIANT", "")
+
+ def start_interfaces_watchdog(self, interfaces_list: list[ProxyBasedInterfaceValidator]) -> None:
+ """Start file system watchdog to automatically ingest interface files."""
+
+ class Event(FileSystemEventHandler):
+ def __init__(self, interface: ProxyBasedInterfaceValidator) -> None:
+ super().__init__()
+ self.interface = interface
+
+ def _ingest(self, event: FileSystemEvent) -> None:
+ if event.is_directory:
+ return
+ self.interface.ingest_file(event.src_path)
+
+ on_modified = _ingest
+ on_created = _ingest
+
+ # Using polling observer to avoid issues with OS-dependent notifiers
+ observer = PollingObserver()
+
+ for interface in interfaces_list:
+ logger.debug(f"Starting watchdog for {interface} at {interface.log_folder}")
+ observer.schedule(Event(interface), path=interface.log_folder)
+
+ observer.start()
+
+ def _start_interfaces_watchdog(self) -> None:
+ """Start the interfaces watchdog for library and agent interfaces."""
+ # self.start_interfaces_watchdog([interfaces.library, interfaces.agent])
+ self.start_interfaces_watchdog([interfaces.library])
+
+ def post_setup(self, session: pytest.Session) -> None: # noqa: ARG002
+ """Wait for all interfaces to finish collecting messages after test setup."""
+ if self.replay:
+ logger.terminal.write_sep("-", "Load all data from logs")
+ logger.terminal.flush()
+
+ interfaces.library.load_data_from_logs()
+ interfaces.library.check_deserialization_errors()
+
+ interfaces.agent.load_data_from_logs()
+ interfaces.agent.check_deserialization_errors()
+
+ interfaces.backend.load_data_from_logs()
+ else:
+ # Wait for library interface to finish collecting traces
+ self._wait_interface(interfaces.library, self.library_interface_timeout)
+ interfaces.library.check_deserialization_errors()
+
+ # Wait for agent interface to finish collecting traces
+ # self._wait_interface(interfaces.agent, self.agent_interface_timeout)
+ # interfaces.agent.check_deserialization_errors()
+
+ # Wait for backend interface
+ self._wait_interface(interfaces.backend, self.backend_interface_timeout)
+
+ # Load .NET managed library data if applicable
+ interfaces.library_dotnet_managed.load_data()
+
+ def _wait_interface(self, interface: ProxyBasedInterfaceValidator, timeout: int) -> None:
+ """Wait for an interface to finish collecting messages.
+
+ Args:
+ interface: The interface validator to wait for
+ timeout: Timeout in seconds to wait for the interface
+
+ """
+ logger.terminal.write_sep("-", f"Wait for {interface} ({timeout}s)")
+ logger.terminal.flush()
+ interface.wait(timeout)
+
+ def pytest_sessionfinish(self, session: pytest.Session, exitstatus: int) -> None:
+ """Clean up after the test session."""
+ # No containers to clean up
diff --git a/utils/_context/_scenarios/docker_ssi.py b/utils/_context/_scenarios/docker_ssi.py
index 0d081bd2ef0..89d7359d437 100644
--- a/utils/_context/_scenarios/docker_ssi.py
+++ b/utils/_context/_scenarios/docker_ssi.py
@@ -37,11 +37,18 @@ class DockerSSIScenario(Scenario):
_network: Network = None
def __init__(
- self, name, doc, extra_env_vars: dict | None = None, scenario_groups=None, appsec_enabled=None
+ self,
+ name,
+ doc,
+ extra_env_vars: dict | None = None,
+ scenario_groups=None,
+ appsec_enabled=None,
+ profiling_enabled=None,
) -> None:
super().__init__(name, doc=doc, github_workflow="dockerssi", scenario_groups=scenario_groups)
self._appsec_enabled = appsec_enabled
+ self._profiling_enabled = profiling_enabled
self.agent_port = _get_free_port()
self.agent_host = "localhost"
self._weblog_injection = DockerSSIContainer(extra_env_vars=extra_env_vars)
@@ -111,6 +118,7 @@ def configure(self, config: pytest.Config):
self._custom_library_version,
self._custom_injector_version,
self._appsec_enabled,
+ self._profiling_enabled,
)
self.ssi_image_builder.configure()
self.ssi_image_builder.build_weblog()
@@ -300,6 +308,7 @@ def __init__(
custom_library_version,
custom_injector_version,
appsec_enabled=None,
+ profiling_enabled=None,
) -> None:
self.scenario_name = scenario_name
self.host_log_folder = host_log_folder
@@ -319,6 +328,7 @@ def __init__(
self._custom_library_version = custom_library_version
self._custom_injector_version = custom_injector_version
self._appsec_enabled = appsec_enabled
+ self._profiling_enabled = profiling_enabled
@property
def dd_lang(self) -> str:
@@ -484,6 +494,7 @@ def build_weblog_image(self, ssi_installer_docker_tag):
"DD_INSTALLER_LIBRARY_VERSION": self._custom_library_version,
"DD_INSTALLER_INJECTOR_VERSION": self._custom_injector_version,
"DD_APPSEC_ENABLED": self._appsec_enabled,
+ "DD_PROFILING_ENABLED": self._profiling_enabled,
},
)
self.print_docker_build_logs(self.ssi_all_docker_tag, build_logs)
diff --git a/utils/build/docker/dotnet/install_ddtrace.sh b/utils/build/docker/dotnet/install_ddtrace.sh
index f785cf445cd..86995a42829 100755
--- a/utils/build/docker/dotnet/install_ddtrace.sh
+++ b/utils/build/docker/dotnet/install_ddtrace.sh
@@ -46,4 +46,4 @@ else
fi
tar xzf $(ls datadog-dotnet-apm*.tar.gz) -C /opt/datadog
-fi
+fi
\ No newline at end of file
diff --git a/utils/build/docker/java/install_ddtrace.sh b/utils/build/docker/java/install_ddtrace.sh
index 2e705e1149e..c88be6c3f4f 100755
--- a/utils/build/docker/java/install_ddtrace.sh
+++ b/utils/build/docker/java/install_ddtrace.sh
@@ -39,5 +39,4 @@ echo "Installed $(cat /binaries/SYSTEM_TESTS_LIBRARY_VERSION) java library"
SYSTEM_TESTS_LIBRARY_VERSION=$(cat /binaries/SYSTEM_TESTS_LIBRARY_VERSION)
-echo "dd-trace version: $(cat /binaries/SYSTEM_TESTS_LIBRARY_VERSION)"
-
+echo "dd-trace version: $(cat /binaries/SYSTEM_TESTS_LIBRARY_VERSION)"
\ No newline at end of file
diff --git a/utils/build/docker/java/spring-boot-3-native.Dockerfile b/utils/build/docker/java/spring-boot-3-native.Dockerfile
index b904ab4d4d6..ff1cfc5120c 100644
--- a/utils/build/docker/java/spring-boot-3-native.Dockerfile
+++ b/utils/build/docker/java/spring-boot-3-native.Dockerfile
@@ -1,5 +1,8 @@
FROM ghcr.io/graalvm/native-image-community:22.0.0 as build
+# Install required utilities for install_ddtrace.sh (unzip, zip, wget)
+RUN microdnf install -y unzip zip wget && microdnf clean all
+
ENV JAVA_TOOL_OPTIONS="-Djava.net.preferIPv4Stack=true"
COPY --from=maven:3.9.9-eclipse-temurin-17 /usr/share/maven /usr/share/maven
diff --git a/utils/build/ssi/base/base_ssi.Dockerfile b/utils/build/ssi/base/base_ssi.Dockerfile
index df4ab20d6d1..56addde31ca 100644
--- a/utils/build/ssi/base/base_ssi.Dockerfile
+++ b/utils/build/ssi/base/base_ssi.Dockerfile
@@ -23,6 +23,9 @@ ENV DD_INSTALLER_INJECTOR_VERSION=${DD_INSTALLER_INJECTOR_VERSION}
ARG DD_APPSEC_ENABLED
ENV DD_APPSEC_ENABLED=${DD_APPSEC_ENABLED}
+ARG DD_PROFILING_ENABLED
+ENV DD_PROFILING_ENABLED=${DD_PROFILING_ENABLED}
+
RUN ./install_script_ssi.sh
ENV DD_APM_INSTRUMENTATION_DEBUG=true
diff --git a/utils/build/ssi/java/jetty-app.Dockerfile b/utils/build/ssi/java/jetty-app.Dockerfile
index 6449cca55ee..f3007ce25a4 100644
--- a/utils/build/ssi/java/jetty-app.Dockerfile
+++ b/utils/build/ssi/java/jetty-app.Dockerfile
@@ -13,6 +13,13 @@ RUN find jetty-distribution-9.4.56.v20240826/lib -iname '*.jar' -exec cp \{\} je
RUN rm jetty-classpath/jetty-jaspi*
COPY lib-injection/build/docker/java/jetty-app/ .
-RUN javac -cp "jetty-classpath/*" JettyServletMain.java CrashServlet.java
+RUN javac -cp "jetty-classpath/*" JettyServletMain.java CrashServlet.java MyServlet.java
+RUN mkdir -p /var/log/java
+
+RUN echo '#!/bin/bash' > app.sh && \
+ echo 'java -cp "jetty-classpath/*:." JettyServletMain' >> app.sh && \
+ chmod +x app.sh
+
+CMD [ "./app.sh" ]
+
-CMD [ "java", "-cp", "jetty-classpath/*:.", "JettyServletMain" ]
diff --git a/utils/build/ssi/java/spring-boot.Dockerfile b/utils/build/ssi/java/spring-boot.Dockerfile
new file mode 100644
index 00000000000..fba720d8767
--- /dev/null
+++ b/utils/build/ssi/java/spring-boot.Dockerfile
@@ -0,0 +1,30 @@
+ARG BASE_IMAGE
+
+#syntax=docker/dockerfile:1.4
+FROM maven:3.9-eclipse-temurin-11 as build
+
+ENV JAVA_TOOL_OPTIONS="-Djava.net.preferIPv4Stack=true"
+
+COPY ./utils/build/docker/java/iast-common/src /iast-common/src
+
+WORKDIR /app
+
+COPY ./utils/build/docker/java/spring-boot/pom.xml .
+RUN mkdir /maven && mvn -Dmaven.repo.local=/maven -B dependency:go-offline
+
+COPY ./utils/build/docker/java/spring-boot/src ./src
+RUN mvn -Dmaven.repo.local=/maven package
+
+FROM ${BASE_IMAGE}
+
+COPY --from=build /app/target/myproject-0.0.1-SNAPSHOT.jar /workdir/app.jar
+
+RUN mkdir -p /var/log/java
+
+RUN echo '#!/bin/bash' > app.sh && \
+ echo 'java -jar /workdir/app.jar --server.port=18080' >> app.sh && \
+ chmod +x app.sh
+
+CMD [ "./app.sh" ]
+
+
diff --git a/utils/docker_ssi/docker_ssi_definitions.py b/utils/docker_ssi/docker_ssi_definitions.py
index 602a4addcc9..837e08a7577 100644
--- a/utils/docker_ssi/docker_ssi_definitions.py
+++ b/utils/docker_ssi/docker_ssi_definitions.py
@@ -13,15 +13,22 @@ class JavaRuntimeInstallableVersions:
JAVA_24 = RuntimeInstallableVersion("JAVA_24", "24.0.1-zulu")
JAVA_21 = RuntimeInstallableVersion("JAVA_21", "21.0.7-zulu")
JAVA_17 = RuntimeInstallableVersion("JAVA_17", "17.0.15-zulu")
+ JAVA_21_AMZN = RuntimeInstallableVersion("JAVA_21_AMZN", "21.0.9-amzn")
+ JAVA_25_ORACLE = RuntimeInstallableVersion("JAVA_25_ORACLE", "25.0.1-oracle")
+ JAVA_26_EA_24_OPEN = RuntimeInstallableVersion("JAVA_26_EA_24_OPEN", "26.ea.24-open")
JAVA_11 = RuntimeInstallableVersion("JAVA_11", "11.0.27-zulu")
+
@staticmethod
def get_all_versions():
return [
JavaRuntimeInstallableVersions.JAVA_24,
JavaRuntimeInstallableVersions.JAVA_21,
JavaRuntimeInstallableVersions.JAVA_17,
- JavaRuntimeInstallableVersions.JAVA_11,
+ JavaRuntimeInstallableVersions.JAVA_21_AMZN,
+ JavaRuntimeInstallableVersions.JAVA_25_ORACLE,
+ JavaRuntimeInstallableVersions.JAVA_26_EA_24_OPEN,
+ JavaRuntimeInstallableVersions.JAVA_11
]
@staticmethod
@@ -29,7 +36,8 @@ def get_version_id(version):
for version_check in JavaRuntimeInstallableVersions.get_all_versions():
if version_check.version == version:
return version_check.version_id
- raise ValueError(f"Java version {version} not supported")
+ #raise ValueError(f"Java version {version} not supported")
+ return "all_runtimes_in_one"
class PHPRuntimeInstallableVersions:
diff --git a/utils/docker_ssi/docker_ssi_runtimes.json b/utils/docker_ssi/docker_ssi_runtimes.json
index 78d34ea9376..ace4ce7c83e 100644
--- a/utils/docker_ssi/docker_ssi_runtimes.json
+++ b/utils/docker_ssi/docker_ssi_runtimes.json
@@ -13,6 +13,18 @@
"version_id": "JAVA_17",
"version": "17.0.15-zulu"
},
+ {
+ "version_id": "JAVA_21_AMZN",
+ "version": "21.0.9-amzn"
+ },
+ {
+ "version_id": "JAVA_25_ORACLE",
+ "version": "25.0.1-oracle"
+ },
+ {
+ "version_id": "JAVA_26_EA_24_OPEN",
+ "version": "26.ea.24-open"
+ },
{
"version_id": "JAVA_11",
"version": "11.0.27-zulu"
diff --git a/utils/scripts/ci_orchestrators/docker_ssi.json b/utils/scripts/ci_orchestrators/docker_ssi.json
index 4b61b37c855..0bb729ac2b7 100644
--- a/utils/scripts/ci_orchestrators/docker_ssi.json
+++ b/utils/scripts/ci_orchestrators/docker_ssi.json
@@ -2,13 +2,14 @@
"scenario_matrix": [
{
"scenarios": [
- "DOCKER_SSI", "DOCKER_SSI_APPSEC"
+ "DOCKER_SSI", "DOCKER_SSI_APPSEC", "DOCKER_SSI_PROFILING"
],
"weblogs": [
{
"java": [
"jetty-app",
- "java7-app"
+ "java7-app",
+ "spring-boot"
],
"nodejs": [
"js-app"
@@ -172,6 +173,17 @@
"name": "Wildfly_amd64"
}
]
+ },
+ {
+ "name": "spring-boot",
+ "supported_images": [
+ {
+ "name": "Ubuntu_22_amd64",
+ "allowed_runtime_versions": [
+ "*"
+ ]
+ }
+ ]
}
],
"python": [
diff --git a/utils/scripts/ci_orchestrators/workflow_data.py b/utils/scripts/ci_orchestrators/workflow_data.py
index 9ddf069cc3f..4de3d67d672 100644
--- a/utils/scripts/ci_orchestrators/workflow_data.py
+++ b/utils/scripts/ci_orchestrators/workflow_data.py
@@ -156,6 +156,8 @@ def get_docker_ssi_matrix(
runtime["version"]
for runtime in runtimes["docker_ssi_runtimes"].get(language, [])
)
+ # Add an allowed_runtimes that is all the supported versions for the language separated by commas
+ allowed_runtimes.append(",".join(allowed_runtimes))
else:
runtime_map = {
rt["version_id"]: rt["version"]