Skip to content

Commit be6c801

Browse files
author
Inigo Goiri
committed
HDFS-14418. Remove redundant super user priveledge checks from namenode. Contributed by Ayush Saxena.
1 parent 2364c7d commit be6c801

File tree

3 files changed

+55
-4
lines changed

3 files changed

+55
-4
lines changed

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7397,7 +7397,6 @@ void createEncryptionZone(final String src, final String keyName,
73977397
Metadata metadata = FSDirEncryptionZoneOp.ensureKeyIsInitialized(dir,
73987398
keyName, src);
73997399
final FSPermissionChecker pc = getPermissionChecker();
7400-
checkSuperuserPrivilege(pc);
74017400
checkOperation(OperationCategory.WRITE);
74027401
final FileStatus resultingStat;
74037402
writeLock();
@@ -7459,7 +7458,6 @@ BatchedListEntries<EncryptionZone> listEncryptionZones(long prevId)
74597458
boolean success = false;
74607459
checkOperation(OperationCategory.READ);
74617460
final FSPermissionChecker pc = getPermissionChecker();
7462-
checkSuperuserPrivilege(pc);
74637461
readLock();
74647462
try {
74657463
checkOperation(OperationCategory.READ);
@@ -7497,7 +7495,6 @@ BatchedListEntries<ZoneReencryptionStatus> listReencryptionStatus(
74977495
boolean success = false;
74987496
checkOperation(OperationCategory.READ);
74997497
final FSPermissionChecker pc = getPermissionChecker();
7500-
checkSuperuserPrivilege(pc);
75017498
readLock();
75027499
try {
75037500
checkOperation(OperationCategory.READ);

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1331,7 +1331,6 @@ public long getMostRecentCheckpointTxId() throws IOException {
13311331
@Override // NamenodeProtocol
13321332
public CheckpointSignature rollEditLog() throws IOException {
13331333
checkNNStartup();
1334-
namesystem.checkSuperuserPrivilege();
13351334
return namesystem.rollEditLog();
13361335
}
13371336

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -97,13 +97,15 @@
9797
import org.apache.hadoop.hdfs.server.namenode.ErasureCodingPolicyManager;
9898
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
9999
import org.apache.hadoop.io.erasurecode.ECSchema;
100+
import org.apache.hadoop.ipc.RemoteException;
100101
import org.apache.hadoop.net.DNSToSwitchMapping;
101102
import org.apache.hadoop.net.NetUtils;
102103
import org.apache.hadoop.net.ScriptBasedMapping;
103104
import org.apache.hadoop.net.StaticMapping;
104105
import org.apache.hadoop.security.AccessControlException;
105106
import org.apache.hadoop.security.UserGroupInformation;
106107
import org.apache.hadoop.test.GenericTestUtils;
108+
import org.apache.hadoop.test.LambdaTestUtils;
107109
import org.apache.hadoop.test.Whitebox;
108110
import org.apache.hadoop.util.DataChecksum;
109111
import org.apache.hadoop.util.Time;
@@ -1804,6 +1806,59 @@ public void testDFSDataOutputStreamBuilderForAppend() throws IOException {
18041806
}
18051807
}
18061808

1809+
@Test
1810+
public void testSuperUserPrivilege() throws Exception {
1811+
HdfsConfiguration conf = new HdfsConfiguration();
1812+
File tmpDir = GenericTestUtils.getTestDir(UUID.randomUUID().toString());
1813+
final Path jksPath = new Path(tmpDir.toString(), "test.jks");
1814+
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
1815+
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
1816+
1817+
try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()) {
1818+
cluster.waitActive();
1819+
final DistributedFileSystem dfs = cluster.getFileSystem();
1820+
Path dir = new Path("/testPrivilege");
1821+
dfs.mkdirs(dir);
1822+
1823+
final KeyProvider provider =
1824+
cluster.getNameNode().getNamesystem().getProvider();
1825+
final KeyProvider.Options options = KeyProvider.options(conf);
1826+
provider.createKey("key", options);
1827+
provider.flush();
1828+
1829+
// Create a non-super user.
1830+
UserGroupInformation user = UserGroupInformation.createUserForTesting(
1831+
"Non_SuperUser", new String[] {"Non_SuperGroup"});
1832+
1833+
DistributedFileSystem userfs = (DistributedFileSystem) user.doAs(
1834+
(PrivilegedExceptionAction<FileSystem>) () -> FileSystem.get(conf));
1835+
1836+
LambdaTestUtils.intercept(AccessControlException.class,
1837+
"Superuser privilege is required",
1838+
() -> userfs.createEncryptionZone(dir, "key"));
1839+
1840+
RemoteException re = LambdaTestUtils.intercept(RemoteException.class,
1841+
"Superuser privilege is required",
1842+
() -> userfs.listEncryptionZones().hasNext());
1843+
assertTrue(re.unwrapRemoteException() instanceof AccessControlException);
1844+
1845+
re = LambdaTestUtils.intercept(RemoteException.class,
1846+
"Superuser privilege is required",
1847+
() -> userfs.listReencryptionStatus().hasNext());
1848+
assertTrue(re.unwrapRemoteException() instanceof AccessControlException);
1849+
1850+
LambdaTestUtils.intercept(AccessControlException.class,
1851+
"Superuser privilege is required",
1852+
() -> user.doAs(new PrivilegedExceptionAction<Void>() {
1853+
@Override
1854+
public Void run() throws Exception {
1855+
cluster.getNameNode().getRpcServer().rollEditLog();
1856+
return null;
1857+
}
1858+
}));
1859+
}
1860+
}
1861+
18071862
@Test
18081863
public void testRemoveErasureCodingPolicy() throws Exception {
18091864
Configuration conf = getTestConfiguration();

0 commit comments

Comments
 (0)