Skip to content

Commit e0bde3e

Browse files
committed
Kerberos now working in Phoenix
1 parent 9cd27ea commit e0bde3e

File tree

4 files changed

+238
-46
lines changed

4 files changed

+238
-46
lines changed

contrib/storage-phoenix/pom.xml

Lines changed: 91 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -193,28 +193,102 @@
193193
<groupId>log4j</groupId>
194194
<artifactId>log4j</artifactId>
195195
</exclusion>
196-
</exclusions>
197-
</dependency>
198-
199-
<!-- Helpful for JAAS/Kerberos bits used by secured tests -->
200-
<dependency>
201-
<groupId>org.apache.hadoop</groupId>
202-
<artifactId>hadoop-auth</artifactId>
203-
<version>${hadoop.version}</version>
204-
<scope>test</scope>
205-
<exclusions>
196+
<!-- Exclude old Kerby versions to avoid conflicts -->
206197
<exclusion>
207-
<groupId>commons-logging</groupId>
208-
<artifactId>commons-logging</artifactId>
198+
<groupId>org.apache.kerby</groupId>
199+
<artifactId>kerb-simplekdc</artifactId>
209200
</exclusion>
210201
<exclusion>
211-
<groupId>log4j</groupId>
212-
<artifactId>log4j</artifactId>
202+
<groupId>org.apache.kerby</groupId>
203+
<artifactId>kerb-client</artifactId>
204+
</exclusion>
205+
<exclusion>
206+
<groupId>org.apache.kerby</groupId>
207+
<artifactId>kerb-common</artifactId>
208+
</exclusion>
209+
<exclusion>
210+
<groupId>org.apache.kerby</groupId>
211+
<artifactId>kerb-core</artifactId>
212+
</exclusion>
213+
<exclusion>
214+
<groupId>org.apache.kerby</groupId>
215+
<artifactId>kerb-crypto</artifactId>
216+
</exclusion>
217+
<exclusion>
218+
<groupId>org.apache.kerby</groupId>
219+
<artifactId>kerb-util</artifactId>
213220
</exclusion>
214221
</exclusions>
215222
</dependency>
216223

224+
<!-- Explicitly add compatible Kerby dependencies for Hadoop 3.4.1 -->
225+
<dependency>
226+
<groupId>org.apache.kerby</groupId>
227+
<artifactId>kerb-simplekdc</artifactId>
228+
<version>${kerby.version}</version>
229+
<scope>test</scope>
230+
</dependency>
231+
<dependency>
232+
<groupId>org.apache.kerby</groupId>
233+
<artifactId>kerb-client</artifactId>
234+
<version>${kerby.version}</version>
235+
<scope>test</scope>
236+
</dependency>
237+
<dependency>
238+
<groupId>org.apache.kerby</groupId>
239+
<artifactId>kerb-common</artifactId>
240+
<version>${kerby.version}</version>
241+
<scope>test</scope>
242+
</dependency>
243+
<dependency>
244+
<groupId>org.apache.kerby</groupId>
245+
<artifactId>kerb-core</artifactId>
246+
<version>${kerby.version}</version>
247+
<scope>test</scope>
248+
</dependency>
249+
<dependency>
250+
<groupId>org.apache.kerby</groupId>
251+
<artifactId>kerb-crypto</artifactId>
252+
<version>${kerby.version}</version>
253+
<scope>test</scope>
254+
</dependency>
255+
<dependency>
256+
<groupId>org.apache.kerby</groupId>
257+
<artifactId>kerb-util</artifactId>
258+
<version>${kerby.version}</version>
259+
<scope>test</scope>
260+
</dependency>
261+
<dependency>
262+
<groupId>org.apache.kerby</groupId>
263+
<artifactId>kerby-config</artifactId>
264+
<version>${kerby.version}</version>
265+
<scope>test</scope>
266+
</dependency>
267+
<dependency>
268+
<groupId>org.apache.kerby</groupId>
269+
<artifactId>kerby-asn1</artifactId>
270+
<version>${kerby.version}</version>
271+
<scope>test</scope>
272+
</dependency>
273+
<dependency>
274+
<groupId>org.apache.kerby</groupId>
275+
<artifactId>kerby-pkix</artifactId>
276+
<version>${kerby.version}</version>
277+
<scope>test</scope>
278+
</dependency>
279+
<dependency>
280+
<groupId>org.apache.kerby</groupId>
281+
<artifactId>kerby-util</artifactId>
282+
<version>${kerby.version}</version>
283+
<scope>test</scope>
284+
</dependency>
217285

286+
<dependency>
287+
<groupId>org.bouncycastle</groupId>
288+
<artifactId>bcprov-jdk18on</artifactId>
289+
<version>1.78.1</version>
290+
<scope>test</scope>
291+
</dependency>
218292
</dependencies>
219293

220294
<build>
@@ -230,8 +304,9 @@
230304
-Xms2048m -Xmx2048m
231305
--add-opens=java.base/java.lang=ALL-UNNAMED
232306
--add-opens=java.base/java.util=ALL-UNNAMED
233-
-Dhbase.security.authentication=simple
234-
-Dhadoop.security.authentication=simple
307+
-Djava.net.preferIPv4Stack=true
308+
-Dsun.security.krb5.debug=true
309+
-Dsun.security.krb5.allowUdp=false
235310
</argLine>
236311
</configuration>
237312
</plugin>

contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/PhoenixEnvironment.java

Lines changed: 116 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@
3737
import org.apache.phoenix.query.ConfigurationFactory;
3838
import org.apache.phoenix.util.InstanceResolver;
3939
import org.apache.phoenix.util.PhoenixRuntime;
40+
import org.slf4j.Logger;
41+
import org.slf4j.LoggerFactory;
4042

4143
import java.io.File;
4244
import java.io.IOException;
@@ -46,6 +48,7 @@
4648
import java.util.ArrayList;
4749
import java.util.List;
4850
import java.util.Map;
51+
import java.util.Properties;
4952
import java.util.UUID;
5053

5154
import static org.apache.hadoop.hbase.HConstants.HBASE_DIR;
@@ -80,7 +83,7 @@ public class PhoenixEnvironment {
8083
static {
8184
try {
8285
// uncomment it for debugging purposes
83-
// System.setProperty("sun.security.krb5.debug", "true");
86+
System.setProperty("sun.security.krb5.debug", "true");
8487
LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME = InetAddress.getByName("127.0.0.1").getCanonicalHostName();
8588
String userName = System.getProperty("user.name");
8689
LOGIN_USER = userName != null ? userName : "securecluster";
@@ -100,6 +103,7 @@ public class PhoenixEnvironment {
100103
private int numCreatedUsers;
101104

102105
private final String phoenixUrl;
106+
private static final Logger logger = LoggerFactory.getLogger(PhoenixEnvironment.class);
103107

104108
private static Configuration conf() {
105109
Configuration configuration = HBaseConfiguration.create();
@@ -206,49 +210,97 @@ public PhoenixEnvironment(final Configuration confIn, int numberOfUsers, boolean
206210
ensureIsEmptyDirectory(tempDir);
207211
ensureIsEmptyDirectory(keytabDir);
208212
keytab = new File(keytabDir, "test.keytab");
213+
209214
// Start a MiniKDC
210-
kdc = util.setupMiniKdc(keytab);
211-
// Create a service principal and spnego principal in one keytab
212-
// NB. Due to some apparent limitations between HDFS and HBase in the same JVM, trying to
213-
// use separate identies for HBase and HDFS results in a GSS initiate error. The quick
214-
// solution is to just use a single "service" principal instead of "hbase" and "hdfs"
215-
// (or "dn" and "nn") per usual.
216-
kdc.createPrincipal(keytab, SPNEGO_PRINCIPAL, PQS_PRINCIPAL, SERVICE_PRINCIPAL);
217-
// Start ZK by hand
215+
File kdcWorkDir = new File(new File(getTempDir()), "kdc-" + System.currentTimeMillis());
216+
ensureIsEmptyDirectory(kdcWorkDir);
217+
218+
Properties kdcConf = org.apache.hadoop.minikdc.MiniKdc.createConf();
219+
kdcConf.setProperty(org.apache.hadoop.minikdc.MiniKdc.KDC_BIND_ADDRESS, "127.0.0.1");
220+
kdcConf.setProperty("kdc.tcp.port", "0");
221+
kdcConf.setProperty("kdc.allow_udp", "false");
222+
kdcConf.setProperty("kdc.encryption.types", "aes128-cts-hmac-sha1-96");
223+
kdcConf.setProperty("kdc.fast.enabled", "false");
224+
kdcConf.setProperty("kdc.preauth.required", "true");
225+
kdcConf.setProperty("kdc.allowable.clockskew", "300000"); // 5m
226+
kdcConf.setProperty(org.apache.hadoop.minikdc.MiniKdc.DEBUG, "true");
227+
228+
kdc = new org.apache.hadoop.minikdc.MiniKdc(kdcConf, kdcWorkDir);
229+
kdc.start();
230+
231+
// Write krb5.conf that disables referrals/canonicalization
232+
File krb5File = new File(kdcWorkDir, "krb5.conf");
233+
writeKrb5Conf(krb5File.toPath(), kdc.getRealm(), "127.0.0.1", kdc.getPort());
234+
System.setProperty("java.security.krb5.conf", krb5File.getAbsolutePath());
235+
System.setProperty("sun.security.krb5.allowUdp", "false");
236+
System.setProperty("sun.security.krb5.disableReferrals", "true");
237+
System.setProperty("java.net.preferIPv4Stack", "true");
238+
System.setProperty("sun.security.krb5.debug", "true");
239+
System.clearProperty("java.security.krb5.realm"); // avoid env overrides
240+
System.clearProperty("java.security.krb5.kdc");
241+
242+
// Fresh keytab every run; create principals in one shot
243+
if (keytab.exists() && !keytab.delete()) {
244+
throw new IOException("Couldn't delete old keytab: " + keytab);
245+
}
246+
keytab.getParentFile().mkdirs();
247+
248+
// Use a conventional service principal to avoid canonicalization surprises
249+
final String SERVICE_PRINCIPAL_LOCAL = "hbase/localhost";
250+
final String SPNEGO_PRINCIPAL_LOCAL = "HTTP/localhost";
251+
final String PQS_PRINCIPAL_LOCAL = "phoenixqs/localhost";
252+
253+
kdc.createPrincipal(
254+
keytab,
255+
SPNEGO_PRINCIPAL_LOCAL,
256+
PQS_PRINCIPAL_LOCAL,
257+
SERVICE_PRINCIPAL_LOCAL
258+
);
259+
// --- End explicit MiniKDC setup ---
260+
261+
// Start ZK by hand
218262
util.startMiniZKCluster();
219263

220264
// Create a number of unprivileged users
221265
createUsers(numberOfUsers);
222266

223-
// Set configuration for HBase
224-
HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + kdc.getRealm());
267+
// HBase ↔ Kerberos wiring: set creds BEFORE setSecuredConfiguration
268+
final String servicePrincipal = "hbase/localhost@" + kdc.getRealm();
269+
270+
conf.set("hadoop.security.authentication", "kerberos");
271+
conf.set("hbase.security.authentication", "kerberos");
272+
273+
conf.set("hbase.master.keytab.file", keytab.getAbsolutePath());
274+
conf.set("hbase.regionserver.keytab.file", keytab.getAbsolutePath());
275+
conf.set("hbase.master.kerberos.principal", servicePrincipal);
276+
conf.set("hbase.regionserver.kerberos.principal", servicePrincipal);
277+
278+
// Make HBase copy its secured defaults *after* we have principals/keytab in conf
279+
HBaseKerberosUtils.setPrincipalForTesting(servicePrincipal);
280+
HBaseKerberosUtils.setKeytabFileForTesting(keytab.getAbsolutePath());
225281
HBaseKerberosUtils.setSecuredConfiguration(conf);
282+
283+
// HDFS side
226284
setHdfsSecuredConfiguration(conf);
285+
286+
// UGI must see kerberos
287+
UserGroupInformation.setConfiguration(conf);
288+
289+
// Preflight: prove the keytab/KDC works *before* we start HBase
290+
UserGroupInformation.loginUserFromKeytab(servicePrincipal, keytab.getAbsolutePath());
291+
logger.info("UGI login OK for {}", servicePrincipal);
292+
227293
UserGroupInformation.setConfiguration(conf);
294+
228295
conf.setInt(HConstants.MASTER_PORT, 0);
229296
conf.setInt(HConstants.MASTER_INFO_PORT, 0);
230297
conf.setInt(HConstants.REGIONSERVER_PORT, 0);
231298
conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
232299

233-
conf.set("hadoop.security.authentication", "kerberos");
234-
conf.set("hbase.security.authentication", "kerberos");
235-
236-
File serviceKeytab = getServiceKeytab(); // already generated by your MiniKdc setup
237-
String servicePrincipal = HBaseKerberosUtils.getPrincipalForTesting();
238-
if (serviceKeytab == null || servicePrincipal == null) {
239-
throw new IllegalStateException("MiniKdc did not provide service keytab/principal");
240-
}
241-
242-
conf.set("hbase.master.keytab.file", serviceKeytab.getAbsolutePath());
243-
conf.set("hbase.regionserver.keytab.file", serviceKeytab.getAbsolutePath());
244-
conf.set("hbase.master.kerberos.principal", servicePrincipal);
245-
conf.set("hbase.regionserver.kerberos.principal", servicePrincipal);
246-
247300
// Coprocessors, proxy user configs, etc. (whatever you already have)
248301
conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
249302
conf.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
250-
conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
251-
AccessController.class.getName(), TokenProvider.class.getName());
303+
conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName(), TokenProvider.class.getName());
252304

253305
// Clear the cached singletons so we can inject our own.
254306
InstanceResolver.clearSingletons();
@@ -280,10 +332,47 @@ public Configuration getConfiguration(Configuration confToClone) {
280332
phoenixUrl = PhoenixRuntime.JDBC_PROTOCOL + ":localhost:" + getZookeeperPort();
281333
}
282334

335+
private static void writeKrb5Conf(java.nio.file.Path path, String realm, String host, int port) throws Exception {
336+
String cfg =
337+
"[libdefaults]\n" +
338+
" default_realm = " + realm + "\n" +
339+
" dns_lookup_kdc = false\n" +
340+
" dns_lookup_realm = false\n" +
341+
" dns_canonicalize_hostname = false\n" +
342+
" rdns = false\n" +
343+
" udp_preference_limit = 1\n" +
344+
" default_tkt_enctypes = aes128-cts-hmac-sha1-96\n" +
345+
" default_tgs_enctypes = aes128-cts-hmac-sha1-96\n" +
346+
" permitted_enctypes = aes128-cts-hmac-sha1-96\n" +
347+
"\n" +
348+
"[realms]\n" +
349+
" " + realm + " = {\n" +
350+
" kdc = " + host + ":" + port + "\n" +
351+
" admin_server = " + host + ":" + port + "\n" +
352+
" }\n";
353+
java.nio.file.Files.createDirectories(path.getParent());
354+
java.nio.file.Files.write(path, cfg.getBytes(java.nio.charset.StandardCharsets.UTF_8));
355+
}
356+
357+
283358
public int getZookeeperPort() {
284359
return util.getConfiguration().getInt(HConstants.ZOOKEEPER_CLIENT_PORT, 2181);
285360
}
286361

362+
private static void createPrincipalIfAbsent(MiniKdc kdc, File keytab, String principal) throws Exception {
363+
try {
364+
kdc.createPrincipal(keytab, principal);
365+
} catch (org.apache.kerby.kerberos.kerb.KrbException e) {
366+
String msg = e.getMessage();
367+
if (msg != null && msg.contains("already exists")) {
368+
// Principal is already in the KDC; fine to proceed.
369+
// (Keys were generated when it was first created.)
370+
return;
371+
}
372+
throw e;
373+
}
374+
}
375+
287376
public void stop() throws Exception {
288377
// Remove our custom ConfigurationFactory for future tests
289378
InstanceResolver.clearSingletons();

0 commit comments

Comments
 (0)