|
97 | 97 | import org.apache.hadoop.hdfs.server.namenode.ErasureCodingPolicyManager; |
98 | 98 | import org.apache.hadoop.hdfs.web.WebHdfsConstants; |
99 | 99 | import org.apache.hadoop.io.erasurecode.ECSchema; |
| 100 | +import org.apache.hadoop.ipc.RemoteException; |
100 | 101 | import org.apache.hadoop.net.DNSToSwitchMapping; |
101 | 102 | import org.apache.hadoop.net.NetUtils; |
102 | 103 | import org.apache.hadoop.net.ScriptBasedMapping; |
103 | 104 | import org.apache.hadoop.net.StaticMapping; |
104 | 105 | import org.apache.hadoop.security.AccessControlException; |
105 | 106 | import org.apache.hadoop.security.UserGroupInformation; |
106 | 107 | import org.apache.hadoop.test.GenericTestUtils; |
| 108 | +import org.apache.hadoop.test.LambdaTestUtils; |
107 | 109 | import org.apache.hadoop.test.Whitebox; |
108 | 110 | import org.apache.hadoop.util.DataChecksum; |
109 | 111 | import org.apache.hadoop.util.Time; |
@@ -1804,6 +1806,59 @@ public void testDFSDataOutputStreamBuilderForAppend() throws IOException { |
1804 | 1806 | } |
1805 | 1807 | } |
1806 | 1808 |
|
| 1809 | + @Test |
| 1810 | + public void testSuperUserPrivilege() throws Exception { |
| 1811 | + HdfsConfiguration conf = new HdfsConfiguration(); |
| 1812 | + File tmpDir = GenericTestUtils.getTestDir(UUID.randomUUID().toString()); |
| 1813 | + final Path jksPath = new Path(tmpDir.toString(), "test.jks"); |
| 1814 | + conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH, |
| 1815 | + JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()); |
| 1816 | + |
| 1817 | + try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()) { |
| 1818 | + cluster.waitActive(); |
| 1819 | + final DistributedFileSystem dfs = cluster.getFileSystem(); |
| 1820 | + Path dir = new Path("/testPrivilege"); |
| 1821 | + dfs.mkdirs(dir); |
| 1822 | + |
| 1823 | + final KeyProvider provider = |
| 1824 | + cluster.getNameNode().getNamesystem().getProvider(); |
| 1825 | + final KeyProvider.Options options = KeyProvider.options(conf); |
| 1826 | + provider.createKey("key", options); |
| 1827 | + provider.flush(); |
| 1828 | + |
| 1829 | + // Create a non-super user. |
| 1830 | + UserGroupInformation user = UserGroupInformation.createUserForTesting( |
| 1831 | + "Non_SuperUser", new String[] {"Non_SuperGroup"}); |
| 1832 | + |
| 1833 | + DistributedFileSystem userfs = (DistributedFileSystem) user.doAs( |
| 1834 | + (PrivilegedExceptionAction<FileSystem>) () -> FileSystem.get(conf)); |
| 1835 | + |
| 1836 | + LambdaTestUtils.intercept(AccessControlException.class, |
| 1837 | + "Superuser privilege is required", |
| 1838 | + () -> userfs.createEncryptionZone(dir, "key")); |
| 1839 | + |
| 1840 | + RemoteException re = LambdaTestUtils.intercept(RemoteException.class, |
| 1841 | + "Superuser privilege is required", |
| 1842 | + () -> userfs.listEncryptionZones().hasNext()); |
| 1843 | + assertTrue(re.unwrapRemoteException() instanceof AccessControlException); |
| 1844 | + |
| 1845 | + re = LambdaTestUtils.intercept(RemoteException.class, |
| 1846 | + "Superuser privilege is required", |
| 1847 | + () -> userfs.listReencryptionStatus().hasNext()); |
| 1848 | + assertTrue(re.unwrapRemoteException() instanceof AccessControlException); |
| 1849 | + |
| 1850 | + LambdaTestUtils.intercept(AccessControlException.class, |
| 1851 | + "Superuser privilege is required", |
| 1852 | + () -> user.doAs(new PrivilegedExceptionAction<Void>() { |
| 1853 | + @Override |
| 1854 | + public Void run() throws Exception { |
| 1855 | + cluster.getNameNode().getRpcServer().rollEditLog(); |
| 1856 | + return null; |
| 1857 | + } |
| 1858 | + })); |
| 1859 | + } |
| 1860 | + } |
| 1861 | + |
1807 | 1862 | @Test |
1808 | 1863 | public void testRemoveErasureCodingPolicy() throws Exception { |
1809 | 1864 | Configuration conf = getTestConfiguration(); |
|
0 commit comments