Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ pipeline {

parameters {
choice(name: 'nodeLabel', choices: ['ubuntu', 's390x', 'arm', 'Windows'])
choice(name: 'jdkVersion', choices: ['jdk_17_latest', 'jdk_21_latest', 'jdk_24_latest', 'jdk_17_latest_windows', 'jdk_21_latest_windows', 'jdk_24_latest_windows'])
choice(name: 'jdkVersion', choices: ['jdk_17_latest', 'jdk_21_latest', 'jdk_25_latest', 'jdk_17_latest_windows', 'jdk_21_latest_windows', 'jdk_25_latest_windows'])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess we should remove jdk_17 there already.

Copy link
Contributor Author

@mattrpav mattrpav Nov 13, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

JDK 17 should stay. The new change is during release builds we need to ensure JDK 21 is used to support the multi-release jar working. The other PR has the enforce plugin set to check for JDK 21 being used only during release.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It does, but at least at present this PR should also be updating it to require at least 24 (I just went with requiring 25 for Artemis, sets the 25 baseline now and saves changing it later, plus who really needs to use 24 for releasing now) since thats the compilation level being used.

booleanParam(name: 'deployEnabled', defaultValue: false)
booleanParam(name: 'sonarEnabled', defaultValue: false)
booleanParam(name: 'testsEnabled', defaultValue: true)
Expand Down Expand Up @@ -72,12 +72,12 @@ pipeline {
}
}

stage('Build JDK 24') {
stage('Build JDK 25') {
tools {
jdk "jdk_24_latest"
jdk "jdk_25_latest"
}
steps {
echo 'Building JDK 24'
echo 'Building JDK 25'
sh 'java -version'
sh 'mvn -version'
sh 'mvn -U -B -e clean install -DskipTests'
Expand Down
29 changes: 28 additions & 1 deletion activemq-broker/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@
</plugins>
</build>
</profile>
<profile>
<profile>
<id>activemq.tests-autoTransport</id>
<activation>
<property>
Expand All @@ -263,5 +263,32 @@
</plugins>
</build>
</profile>
<profile>
<id>jdk24-plus</id>
<activation>
<jdk>[24,)</jdk>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<executions>
<execution>
<id>java24-compile</id>
<phase>compile</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<release>24</release> <!-- Specific Java version for alternative classes -->
<compileSourceRoots>${project.basedir}/src/main/java24</compileSourceRoots>
<multiReleaseOutput>true</multiReleaseOutput>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,15 @@ public Object invoke(String s, Object[] objects, String[] strings) throws MBeanE
objects = (objects == null) ? new Object[]{} : objects;
JMXAuditLogEntry entry = null;
if (audit != OFF) {
// [AMQ-9563] TODO: JDK 21 use Subject.current() instead
Subject subject = Subject.getSubject(AccessController.getContext());
/**
* [AMQ-9563] JDK JAAS API conversion assistance
*
* Use a shim along with multi-release jar to
* support JDK 17 and JDK 24+ in one build.
*
* see: src/main/java24 folder
*/
Subject subject = SubjectShim.lookupSubject();
String caller = "anonymous";
if (subject != null) {
caller = "";
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.broker.jmx;

import javax.security.auth.Subject;
import java.security.AccessController;

/**
* [AMQ-9563] JDK JAAS API conversion assistance
*
* This instance of the class is for JDK [17, 24)
*
*/
public class SubjectShim {

private SubjectShim() {}

public static Subject lookupSubject() {
return Subject.getSubject(AccessController.getContext());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.broker.jmx;

import javax.security.auth.Subject;

/**
* [AMQ-9563] JDK JAAS API conversion assistance
*
* This instance of the class is for JDK 24+
*
*/
public class SubjectShim {

private SubjectShim() {}

public static Subject lookupSubject() {
return Subject.current();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import org.apache.activemq.store.kahadb.data.KahaLocalTransactionId;
import org.apache.activemq.store.kahadb.data.KahaTransactionInfo;
import org.apache.activemq.store.kahadb.data.KahaXATransactionId;
import org.apache.activemq.store.kahadb.disk.journal.DataFileFactory;
import org.apache.activemq.store.kahadb.disk.journal.Journal.JournalDiskSyncStrategy;
import org.apache.activemq.usage.SystemUsage;
import org.apache.activemq.util.ServiceStopper;
Expand Down Expand Up @@ -840,4 +841,8 @@ public void setCleanupOnStop(boolean cleanupOnStop) {
public boolean getCleanupOnStop() {
return this.letter.getCleanupOnStop();
}

public void setDataFileFactory(DataFileFactory dataFileFactory) {
this.letter.setDataFileFactory(dataFileFactory);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -92,11 +92,8 @@
import org.apache.activemq.store.kahadb.disk.index.BTreeIndex;
import org.apache.activemq.store.kahadb.disk.index.BTreeVisitor;
import org.apache.activemq.store.kahadb.disk.index.ListIndex;
import org.apache.activemq.store.kahadb.disk.journal.DataFile;
import org.apache.activemq.store.kahadb.disk.journal.Journal;
import org.apache.activemq.store.kahadb.disk.journal.*;
import org.apache.activemq.store.kahadb.disk.journal.Journal.JournalDiskSyncStrategy;
import org.apache.activemq.store.kahadb.disk.journal.Location;
import org.apache.activemq.store.kahadb.disk.journal.TargetedDataFileAppender;
import org.apache.activemq.store.kahadb.disk.page.Page;
import org.apache.activemq.store.kahadb.disk.page.PageFile;
import org.apache.activemq.store.kahadb.disk.page.Transaction;
Expand Down Expand Up @@ -266,6 +263,7 @@ public enum PurgeRecoveredXATransactionStrategy {

protected JournalDiskSyncStrategy journalDiskSyncStrategy = JournalDiskSyncStrategy.ALWAYS;
protected boolean archiveDataLogs;
protected DataFileFactory dataFileFactory;
protected File directoryArchive;
protected AtomicLong journalSize = new AtomicLong(0);
long journalDiskSyncInterval = 1000;
Expand Down Expand Up @@ -3421,6 +3419,9 @@ protected Journal createJournal() throws IOException {
IOHelper.mkdirs(getDirectoryArchive());
manager.setDirectoryArchive(getDirectoryArchive());
}
if (getDataFileFactory() != null) {
manager.setDataFileFactory(getDataFileFactory());
}
return manager;
}

Expand Down Expand Up @@ -4297,4 +4298,12 @@ private void handleIOException(String taskName, IOException ioe) {
LOG.debug(e.getMessage(), e);
}
}

public DataFileFactory getDataFileFactory() {
return this.dataFileFactory;
}

public void setDataFileFactory(DataFileFactory dataFileFactory) {
this.dataFileFactory = dataFileFactory;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.store.kahadb.disk.journal;

import java.io.File;

public interface DataFileFactory {
DataFile create(File file, int number);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.store.kahadb.disk.journal;

import java.io.File;

public class DefaultDataFileFactory implements DataFileFactory {

@Override
public DataFile create(File file, int number) {
return new DataFile(file, number);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ private static byte[] createEofBatchAndLocationRecord() {
private long cleanupInterval = DEFAULT_CLEANUP_INTERVAL;

protected JournalDiskSyncStrategy journalDiskSyncStrategy = JournalDiskSyncStrategy.ALWAYS;
protected DataFileFactory dataFileFactory = new DefaultDataFileFactory();

public interface DataFileRemovedListener {
void fileRemoved(DataFile datafile);
Expand Down Expand Up @@ -272,7 +273,7 @@ public boolean accept(File dir, String n) {
String n = file.getName();
String numStr = n.substring(filePrefix.length(), n.length()-fileSuffix.length());
int num = Integer.parseInt(numStr);
DataFile dataFile = new DataFile(file, num);
DataFile dataFile = dataFileFactory.create(file, num);
fileMap.put(dataFile.getDataFileId(), dataFile);
totalLength.addAndGet(dataFile.getLength());
} catch (NumberFormatException e) {
Expand Down Expand Up @@ -687,7 +688,7 @@ public void run() {
private DataFile newDataFile() throws IOException {
int nextNum = nextDataFileId++;
File file = getFile(nextNum);
DataFile nextWriteFile = new DataFile(file, nextNum);
DataFile nextWriteFile = dataFileFactory.create(file, nextNum);
preallocateEntireJournalDataFile(nextWriteFile.appendRandomAccessFile());
return nextWriteFile;
}
Expand All @@ -697,7 +698,7 @@ public DataFile reserveDataFile() {
synchronized (dataFileIdLock) {
int nextNum = nextDataFileId++;
File file = getFile(nextNum);
DataFile reservedDataFile = new DataFile(file, nextNum);
DataFile reservedDataFile = dataFileFactory.create(file, nextNum);
synchronized (currentDataFile) {
fileMap.put(reservedDataFile.getDataFileId(), reservedDataFile);
fileByFileMap.put(file, reservedDataFile);
Expand Down Expand Up @@ -1164,6 +1165,14 @@ public void setDataFileRemovedListener(DataFileRemovedListener dataFileRemovedLi
this.dataFileRemovedListener = dataFileRemovedListener;
}

public void setDataFileFactory(DataFileFactory dataFileFactory) {
this.dataFileFactory = dataFileFactory;
}

public DataFileFactory getDataFileFactory() {
return this.dataFileFactory;
}

public static class WriteCommand extends LinkedNode<WriteCommand> {
public final Location location;
public final ByteSequence data;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.store.kahadb.disk.journal.DataFile;
import org.apache.activemq.store.kahadb.disk.journal.DataFileFactory;
import org.apache.activemq.store.kahadb.disk.journal.DefaultDataFileFactory;
import org.apache.activemq.store.kahadb.disk.journal.ErrorDataFileFactory;
import org.junit.After;
import org.junit.Test;
import org.slf4j.Logger;
Expand Down Expand Up @@ -54,6 +57,7 @@ public class JournalArchiveTest {
private BrokerService broker = null;
private final Destination destination = new ActiveMQQueue("Test");
private KahaDBPersistenceAdapter adapter;
private DataFileFactory dataFileFactory;

protected void startBroker() throws Exception {
doStartBroker(true);
Expand Down Expand Up @@ -104,6 +108,7 @@ protected void configurePersistence(BrokerService brokerService) throws Exceptio
adapter.setCheckForCorruptJournalFiles(true);

adapter.setArchiveDataLogs(true);
adapter.setDataFileFactory(dataFileFactory);
}

@After
Expand All @@ -119,16 +124,8 @@ public void tearDown() throws Exception {
public void testRecoveryOnArchiveFailure() throws Exception {
final AtomicInteger atomicInteger = new AtomicInteger();

System.setSecurityManager(new SecurityManager() {
public void checkPermission(Permission perm) {}
public void checkPermission(Permission perm, Object context) {}
this.dataFileFactory = new ErrorDataFileFactory();

public void checkWrite(String file) {
if (file.contains(DEFAULT_ARCHIVE_DIRECTORY) && atomicInteger.incrementAndGet() > 4) {
throw new SecurityException("No Perms to write to archive times:" + atomicInteger.get());
}
}
});
startBroker();

int sent = produceMessagesToConsumeMultipleDataFiles(50);
Expand All @@ -151,7 +148,7 @@ public void run() {
assertTrue("broker got shutdown on page in error", gotShutdown.await(10, TimeUnit.SECONDS));

// no restrictions
System.setSecurityManager(null);
this.dataFileFactory = new DefaultDataFileFactory();

int numFilesAfterRestart = 0;
try {
Expand Down
Loading