Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
121 changes: 121 additions & 0 deletions .github/scripts/filter_gradle_log.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
#!/usr/bin/env python3
"""
Streaming filter for Gradle test output.
Compresses verbose test logs:
- PASSED tests: single summary line; all buffered output (including [TEST::INFO]) discarded
- FAILED tests: full context emitted: STARTED line + buffered stdout/stderr + FAILED line
followed by exception/stack trace that comes after the FAILED marker
- SKIPPED tests: single summary line
- CRASHED tests: if the stream ends mid-test (JVM kill, OOM, sanitizer abort), the full
buffer is emitted with a warning header
Designed for inline use with `tee` so the unfiltered raw log is preserved:
./gradlew ... 2>&1 \\
| tee -a "${RAW_LOG}" \\
| python3 -u .github/scripts/filter_gradle_log.py
Exit code and PIPESTATUS:
The filter always exits 0 regardless of test outcomes; use ${PIPESTATUS[0]} in bash
to capture the Gradle exit code:
./gradlew ... 2>&1 | tee -a raw.log | python3 -u filter_gradle_log.py
GRADLE_EXIT=${PIPESTATUS[0]}
Limitations:
- [TEST::INFO] lines emitted from class-level lifecycle methods (@BeforeAll, static
initializers) appear before any STARTED marker and are suppressed in OUTSIDE state.
They remain visible in the raw log preserved by tee.
"""

import re
import sys

# Matches Gradle per-test event lines emitted by the Test task:
#
# com.example.FooTest > testBar STARTED
# com.example.FooTest > testBar[1] PASSED (0.456s)
# com.example.FooTest > testBar(int) FAILED
# com.example.FooTest > testBar SKIPPED
#
# The class name starts with a word character (not '>'), which prevents matching
# "> Task :project:taskName FAILED" build-level lines.
_TEST_EVENT = re.compile(
r'^([\w.$][\w.$ ]* > \S.*?) (STARTED|PASSED|FAILED|SKIPPED)(\s+\([^)]+\))?\s*$'
)


def emit(line: str) -> None:
print(line, flush=True)


def main() -> None:
# --- States ---
OUTSIDE = 0 # between tests: pass lines through directly
BUFFERING = 1 # inside a running test: accumulate output
FAILING = 2 # after FAILED marker: pass lines through until next test

state = OUTSIDE
buf: list = []

for raw in sys.stdin:
line = raw.rstrip('\n')
m = _TEST_EVENT.match(line)

if m:
event = m.group(2)

if event == 'STARTED':
if state == BUFFERING:
# Previous test had no outcome line (shouldn't normally happen).
# Emit the buffer so we don't silently discard output.
for buffered_line in buf:
emit(buffered_line)
elif state == FAILING:
emit('') # blank line to visually separate failure blocks

# Include the STARTED line in the buffer so it appears in failure output.
buf = [line]
state = BUFFERING

elif event == 'PASSED':
buf = []
emit(line)
state = OUTSIDE

elif event == 'FAILED':
# Emit everything collected since STARTED (includes [TEST::INFO] lines).
for buffered_line in buf:
emit(buffered_line)
buf = []
emit(line)
state = FAILING

elif event == 'SKIPPED':
buf = []
emit(line)
state = OUTSIDE

elif state == BUFFERING:
buf.append(line)

else:
# OUTSIDE or FAILING: pass through directly.
# In FAILING state this captures exception lines, stack traces, etc.
# In OUTSIDE state, suppress [TEST::INFO] lines: they originate from
# class-level init (@BeforeAll, static blocks) and are noise when no
# test has failed; the raw log still contains them for reference.
if state == FAILING or not line.startswith('[TEST::INFO]'):
emit(line)

# EOF handling: if still inside a test the JVM likely crashed (SIGABRT from sanitizer,
# OOM kill, etc.). Emit everything so the failure is visible in the filtered log.
if state == BUFFERING and buf:
emit('# WARNING: stream ended inside a test (crash / OOM / sanitizer abort?)')
for buffered_line in buf:
emit(buffered_line)


if __name__ == '__main__':
main()
4 changes: 4 additions & 0 deletions .github/scripts/prepare_reports.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@
set -e
mkdir -p test-reports
mkdir -p unwinding-reports
cp build/test-raw.log test-reports/ || true
cp /tmp/hs_err* test-reports/ || true
cp /tmp/asan_*.log test-reports/ || true
cp /tmp/ubsan_*.log test-reports/ || true
cp /tmp/tsan_*.log test-reports/ || true
cp ddprof-test/javacore*.txt test-reports/ || true
cp ddprof-test/build/hs_err* test-reports/ || true
cp -r ddprof-lib/build/tmp test-reports/native_build || true
Expand Down
33 changes: 23 additions & 10 deletions .github/workflows/test_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,12 @@ jobs:
export TEST_CONFIGURATION=glibc/${{ matrix.java_version }}-${{ matrix.config }}-amd64
export LIBC=glibc
export SANITIZER=${{ matrix.config }}

./gradlew -PCI -PkeepJFRs :ddprof-test:test${{ matrix.config }} --no-daemon --parallel --build-cache --no-watch-fs
EXIT_CODE=$?

mkdir -p build/logs
./gradlew -PCI -PkeepJFRs :ddprof-test:test${{ matrix.config }} --no-daemon --parallel --build-cache --no-watch-fs 2>&1 \
| tee -a build/test-raw.log \
| python3 -u .github/scripts/filter_gradle_log.py
EXIT_CODE=${PIPESTATUS[0]}

if [ $EXIT_CODE -ne 0 ]; then
echo "glibc-${{ matrix.java_version }}-${{ matrix.config }}-amd64" >> failures_glibc-${{ matrix.java_version }}-${{ matrix.config }}-amd64.txt
Expand Down Expand Up @@ -163,7 +166,7 @@ jobs:
steps:
- name: Setup OS
run: |
apk update && apk add curl moreutils wget hexdump linux-headers bash make g++ clang git cppcheck jq cmake gtest-dev gmock tar binutils >/dev/null
apk update && apk add curl moreutils wget hexdump linux-headers bash make g++ clang git cppcheck jq cmake gtest-dev gmock tar binutils python3 >/dev/null
# Install debug symbols for musl libc
apk add musl-dbg
- uses: actions/checkout@v6
Expand All @@ -190,6 +193,7 @@ jobs:
- name: Extract Versions
uses: ./.github/actions/extract_versions
- name: Test
shell: bash
run: |
set +e
Expand Down Expand Up @@ -217,8 +221,11 @@ jobs:
export JAVA_VERSION
echo "JAVA_VERSION=${JAVA_VERSION}"
./gradlew -PCI -PkeepJFRs :ddprof-test:test${{ matrix.config }} --no-daemon --parallel --build-cache --no-watch-fs
EXIT_CODE=$?
mkdir -p build/logs
./gradlew -PCI -PkeepJFRs :ddprof-test:test${{ matrix.config }} --no-daemon --parallel --build-cache --no-watch-fs 2>&1 \
| tee -a build/test-raw.log \
| python3 -u .github/scripts/filter_gradle_log.py
EXIT_CODE=${PIPESTATUS[0]}
if [ $EXIT_CODE -ne 0 ]; then
echo "musl-${{ matrix.java_version }}-${{ matrix.config }}-amd64" >> failures_musl-${{ matrix.java_version }}-${{ matrix.config }}-amd64.txt
Expand Down Expand Up @@ -339,8 +346,11 @@ jobs:
export LIBC=glibc
export SANITIZER=${{ matrix.config }}
./gradlew -PCI -PkeepJFRs :ddprof-test:test${{ matrix.config }} --no-daemon --parallel --build-cache --no-watch-fs
EXIT_CODE=$?
mkdir -p build/logs
./gradlew -PCI -PkeepJFRs :ddprof-test:test${{ matrix.config }} --no-daemon --parallel --build-cache --no-watch-fs 2>&1 \
| tee -a build/test-raw.log \
| python3 -u .github/scripts/filter_gradle_log.py
EXIT_CODE=${PIPESTATUS[0]}
if [ $EXIT_CODE -ne 0 ]; then
echo "glibc-${{ matrix.java_version }}-${{ matrix.config }}-aarch64" >> failures_glibc-${{ matrix.java_version }}-${{ matrix.config }}-aarch64.txt
Expand Down Expand Up @@ -424,13 +434,16 @@ jobs:
run: |
set +e
# the effective JAVA_VERSION is computed in the test_alpine_aarch64.sh script
mkdir -p build/logs
docker run --cpus 4 --rm -v /tmp:/tmp -v "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}" -w "${GITHUB_WORKSPACE}" alpine:3.21 /bin/sh -c "
\"$GITHUB_WORKSPACE/.github/scripts/test_alpine_aarch64.sh\" \
\"${{ github.sha }}\" \"musl/${{ matrix.java_version }}-${{ matrix.config }}-aarch64\" \
\"${{ matrix.config }}\" \"${{ env.JAVA_HOME }}\" \"${{ env.JAVA_TEST_HOME }}\"
"
" 2>&1 \
| tee -a build/test-raw.log \
| python3 -u .github/scripts/filter_gradle_log.py
EXIT_CODE=$?
EXIT_CODE=${PIPESTATUS[0]}
if [ $EXIT_CODE -ne 0 ]; then
echo "musl-${{ matrix.java_version }}-${{ matrix.config }}-aarch64" >> failures_musl-${{ matrix.java_version }}-${{ matrix.config }}-aarch64.txt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,8 @@ object ConfigurationPresets {
if (libasan != null) {
config.testEnvironment.apply {
put("LD_PRELOAD", libasan)
put("ASAN_OPTIONS", "allocator_may_return_null=1:unwind_abort_on_malloc=1:use_sigaltstack=0:detect_stack_use_after_return=0:handle_segv=1:halt_on_error=0:abort_on_error=0:print_stacktrace=1:symbolize=1:suppressions=$rootDir/gradle/sanitizers/asan.supp")
put("UBSAN_OPTIONS", "halt_on_error=0:abort_on_error=0:print_stacktrace=1:suppressions=$rootDir/gradle/sanitizers/ubsan.supp")
put("ASAN_OPTIONS", "allocator_may_return_null=1:unwind_abort_on_malloc=1:use_sigaltstack=0:detect_stack_use_after_return=0:handle_segv=1:halt_on_error=0:abort_on_error=0:print_stacktrace=1:symbolize=1:log_path=/tmp/asan_%p.log:suppressions=$rootDir/gradle/sanitizers/asan.supp")
put("UBSAN_OPTIONS", "halt_on_error=0:abort_on_error=0:print_stacktrace=1:log_path=/tmp/ubsan_%p.log:suppressions=$rootDir/gradle/sanitizers/ubsan.supp")
put("LSAN_OPTIONS", "detect_leaks=0")
}
}
Expand Down Expand Up @@ -260,7 +260,7 @@ object ConfigurationPresets {
if (libtsan != null) {
config.testEnvironment.apply {
put("LD_PRELOAD", libtsan)
put("TSAN_OPTIONS", "suppressions=$rootDir/gradle/sanitizers/tsan.supp")
put("TSAN_OPTIONS", "suppressions=$rootDir/gradle/sanitizers/tsan.supp:log_path=/tmp/tsan_%p.log")
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ class ProfilerTestPlugin : Plugin<Project> {
// Test output
testTask.testLogging {
val logging = this
logging.events("passed", "skipped", "failed")
logging.events("started", "passed", "skipped", "failed")
logging.showStandardStreams = true
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,21 @@
package com.datadoghq.profiler.test;

import org.junit.platform.engine.TestExecutionResult;
import org.junit.platform.engine.TestSource;
import org.junit.platform.engine.discovery.ClassNameFilter;
import org.junit.platform.engine.discovery.DiscoverySelectors;
import org.junit.platform.engine.support.descriptor.MethodSource;
import org.junit.platform.launcher.Launcher;
import org.junit.platform.launcher.LauncherDiscoveryRequest;
import org.junit.platform.launcher.TestExecutionListener;
import org.junit.platform.launcher.TestIdentifier;
import org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder;
import org.junit.platform.launcher.core.LauncherFactory;
import org.junit.platform.launcher.listeners.SummaryGeneratingListener;

import java.io.PrintWriter;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;

/**
* Custom test runner using JUnit Platform Launcher API.
Expand Down Expand Up @@ -101,10 +108,10 @@ private static void runTests() {

LauncherDiscoveryRequest request = requestBuilder.build();

// Create launcher and register listener
// Create launcher and register listeners
Launcher launcher = LauncherFactory.create();
SummaryGeneratingListener listener = new SummaryGeneratingListener();
launcher.registerTestExecutionListeners(listener);
launcher.registerTestExecutionListeners(new GradleStyleTestListener(), listener);

// Execute tests
launcher.execute(request);
Expand Down Expand Up @@ -149,4 +156,69 @@ private static boolean isMethodFilter(String filter) {
// Method names conventionally start with lowercase
return Character.isLowerCase(lastSegment.charAt(0));
}

/**
* Emits per-test STARTED / PASSED / FAILED / SKIPPED markers to stdout in the same
* format as Gradle's Test task, so that filter_gradle_log.py can compress the output
* identically on both glibc and musl paths.
*
* Output format (matches Gradle's testLogging output):
* com.example.FooTest > testBar STARTED
* com.example.FooTest > testBar PASSED (42ms)
* com.example.FooTest > testBar FAILED
* java.lang.AssertionError: ...
*/
private static final class GradleStyleTestListener implements TestExecutionListener {
private final ConcurrentHashMap<String, Long> startTimes = new ConcurrentHashMap<>();

@Override
public void executionStarted(TestIdentifier testIdentifier) {
if (!testIdentifier.isTest()) return;
startTimes.put(testIdentifier.getUniqueId(), System.currentTimeMillis());
String name = formatName(testIdentifier);
if (name != null) {
System.out.println(name + " STARTED");
System.out.flush();
}
}

@Override
public void executionFinished(TestIdentifier testIdentifier, TestExecutionResult result) {
if (!testIdentifier.isTest()) return;
String name = formatName(testIdentifier);
if (name == null) return;

Long start = startTimes.remove(testIdentifier.getUniqueId());
long ms = start != null ? System.currentTimeMillis() - start : 0;

switch (result.getStatus()) {
case SUCCESSFUL:
System.out.printf("%s PASSED (%dms)%n", name, ms);
break;
case FAILED:
case ABORTED:
System.out.printf("%s FAILED%n", name);
result.getThrowable().ifPresent(t -> t.printStackTrace(System.out));
break;
}
System.out.flush();
}

@Override
public void executionSkipped(TestIdentifier testIdentifier, String reason) {
if (!testIdentifier.isTest()) return;
String name = formatName(testIdentifier);
if (name != null) {
System.out.println(name + " SKIPPED");
System.out.flush();
}
}

private static String formatName(TestIdentifier testIdentifier) {
Optional<TestSource> source = testIdentifier.getSource();
if (!source.isPresent() || !(source.get() instanceof MethodSource)) return null;
MethodSource ms = (MethodSource) source.get();
return ms.getClassName() + " > " + ms.getMethodName();
}
}
}
Loading