Skip to content

Commit 24f5f70

Browse files
authored
HADOOP-18778. Fixes failing tests when CSE is enabled. (#5763)
Contributed By: Ahmar Suhail <[email protected]>
1 parent 068d8c7 commit 24f5f70

File tree

10 files changed

+34
-31
lines changed

10 files changed

+34
-31
lines changed

hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,7 @@
213213
import static org.apache.hadoop.fs.s3a.S3AUtils.*;
214214
import static org.apache.hadoop.fs.s3a.Statistic.*;
215215
import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.INITIALIZE_SPAN;
216-
import static org.apache.hadoop.fs.s3a.auth.RolePolicies.STATEMENT_ALLOW_SSE_KMS_RW;
216+
import static org.apache.hadoop.fs.s3a.auth.RolePolicies.STATEMENT_ALLOW_KMS_RW;
217217
import static org.apache.hadoop.fs.s3a.auth.RolePolicies.allowS3Operations;
218218
import static org.apache.hadoop.fs.s3a.auth.delegation.S3ADelegationTokens.TokenIssuingPolicy.NoTokensAvailable;
219219
import static org.apache.hadoop.fs.s3a.auth.delegation.S3ADelegationTokens.hasDelegationTokenBinding;
@@ -4222,7 +4222,7 @@ public List<RoleModel.Statement> listAWSPolicyRules(
42224222
// no attempt is made to qualify KMS access; there's no
42234223
// way to predict read keys, and not worried about granting
42244224
// too much encryption access.
4225-
statements.add(STATEMENT_ALLOW_SSE_KMS_RW);
4225+
statements.add(STATEMENT_ALLOW_KMS_RW);
42264226

42274227
return statements;
42284228
}

hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ private RolePolicies() {
8080
* Statement to allow KMS R/W access access, so full use of
8181
* SSE-KMS.
8282
*/
83-
public static final Statement STATEMENT_ALLOW_SSE_KMS_RW =
83+
public static final Statement STATEMENT_ALLOW_KMS_RW =
8484
statement(true, KMS_ALL_KEYS, KMS_ALL_OPERATIONS);
8585

8686
/**

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingCacheFiles.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ public synchronized void teardown() throws Exception {
105105
@Test
106106
public void testCacheFileExistence() throws Throwable {
107107
describe("Verify that FS cache files exist on local FS");
108+
skipIfClientSideEncryption();
108109

109110
try (FSDataInputStream in = fs.open(testFile)) {
110111
byte[] buffer = new byte[prefetchBlockSize];

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingInputStream.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@ private static int calculateNumBlocks(long largeFileSize, int blockSize) {
106106
@Test
107107
public void testReadLargeFileFully() throws Throwable {
108108
describe("read a large file fully, uses S3ACachingInputStream");
109+
skipIfClientSideEncryption();
109110
IOStatistics ioStats;
110111
createLargeFile();
111112

@@ -139,6 +140,7 @@ public void testReadLargeFileFully() throws Throwable {
139140
public void testReadLargeFileFullyLazySeek() throws Throwable {
140141
describe("read a large file using readFully(position,buffer,offset,length),"
141142
+ " uses S3ACachingInputStream");
143+
skipIfClientSideEncryption();
142144
IOStatistics ioStats;
143145
createLargeFile();
144146

@@ -170,6 +172,7 @@ public void testReadLargeFileFullyLazySeek() throws Throwable {
170172
@Test
171173
public void testRandomReadLargeFile() throws Throwable {
172174
describe("random read on a large file, uses S3ACachingInputStream");
175+
skipIfClientSideEncryption();
173176
IOStatistics ioStats;
174177
createLargeFile();
175178

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ARequesterPays.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ protected Configuration createConfiguration() {
5959
@Test
6060
public void testRequesterPaysOptionSuccess() throws Throwable {
6161
describe("Test requester pays enabled case by reading last then first byte");
62-
62+
skipIfClientSideEncryption();
6363
Configuration conf = this.createConfiguration();
6464
conf.setBoolean(ALLOW_REQUESTER_PAYS, true);
6565
// Enable bucket exists check, the first failure point people may encounter

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -426,8 +426,7 @@ public void testAssumeRolePoliciesOverrideRolePerms() throws Throwable {
426426
bindRolePolicy(conf,
427427
policy(
428428
statement(false, S3_ALL_BUCKETS, S3_GET_OBJECT_TORRENT),
429-
ALLOW_S3_GET_BUCKET_LOCATION,
430-
STATEMENT_ALLOW_SSE_KMS_RW));
429+
ALLOW_S3_GET_BUCKET_LOCATION, STATEMENT_ALLOW_KMS_RW));
431430
Path path = path("testAssumeRoleStillIncludesRolePerms");
432431
roleFS = (S3AFileSystem) path.getFileSystem(conf);
433432
assertTouchForbidden(roleFS, path);
@@ -447,8 +446,7 @@ public void testReadOnlyOperations() throws Throwable {
447446
bindRolePolicy(conf,
448447
policy(
449448
statement(false, S3_ALL_BUCKETS, S3_PATH_WRITE_OPERATIONS),
450-
STATEMENT_ALL_S3,
451-
STATEMENT_ALLOW_SSE_KMS_READ));
449+
STATEMENT_ALL_S3, STATEMENT_ALLOW_KMS_RW));
452450
Path path = methodPath();
453451
roleFS = (S3AFileSystem) path.getFileSystem(conf);
454452
// list the root path, expect happy
@@ -495,8 +493,7 @@ public void testRestrictedWriteSubdir() throws Throwable {
495493
Configuration conf = createAssumedRoleConfig();
496494

497495
bindRolePolicyStatements(conf,
498-
STATEMENT_ALL_BUCKET_READ_ACCESS,
499-
STATEMENT_ALLOW_SSE_KMS_RW,
496+
STATEMENT_ALL_BUCKET_READ_ACCESS, STATEMENT_ALLOW_KMS_RW,
500497
new Statement(Effects.Allow)
501498
.addActions(S3_ALL_OPERATIONS)
502499
.addResources(directory(restrictedDir)));
@@ -563,8 +560,7 @@ public void testRestrictedCommitActions() throws Throwable {
563560
fs.delete(basePath, true);
564561
fs.mkdirs(readOnlyDir);
565562

566-
bindRolePolicyStatements(conf,
567-
STATEMENT_ALLOW_SSE_KMS_RW,
563+
bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
568564
STATEMENT_ALL_BUCKET_READ_ACCESS,
569565
new Statement(Effects.Allow)
570566
.addActions(S3_PATH_RW_OPERATIONS)
@@ -714,8 +710,7 @@ public void executePartialDelete(final Configuration conf,
714710
S3AFileSystem fs = getFileSystem();
715711
fs.delete(destDir, true);
716712

717-
bindRolePolicyStatements(conf,
718-
STATEMENT_ALLOW_SSE_KMS_RW,
713+
bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
719714
statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
720715
new Statement(Effects.Deny)
721716
.addActions(S3_PATH_WRITE_OPERATIONS)
@@ -746,8 +741,7 @@ public void testBucketLocationForbidden() throws Throwable {
746741
describe("Restrict role to read only");
747742
Configuration conf = createAssumedRoleConfig();
748743

749-
bindRolePolicyStatements(conf,
750-
STATEMENT_ALLOW_SSE_KMS_RW,
744+
bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
751745
statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
752746
statement(false, S3_ALL_BUCKETS, S3_GET_BUCKET_LOCATION));
753747
Path path = methodPath();

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumedRoleCommitOperations.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,8 +61,7 @@ public void setup() throws Exception {
6161
restrictedDir = super.path("restricted");
6262
Configuration conf = newAssumedRoleConfig(getConfiguration(),
6363
getAssumedRoleARN());
64-
bindRolePolicyStatements(conf,
65-
STATEMENT_ALLOW_SSE_KMS_RW,
64+
bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
6665
statement(true, S3_ALL_BUCKETS, S3_BUCKET_READ_OPERATIONS),
6766
new RoleModel.Statement(RoleModel.Effects.Allow)
6867
.addActions(S3_PATH_RW_OPERATIONS)

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestRestrictedReadAccess.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -260,8 +260,7 @@ public void initNoReadAccess() throws Throwable {
260260
// it still has write access, which can be explored in the final
261261
// step to delete files and directories.
262262
roleConfig = createAssumedRoleConfig();
263-
bindRolePolicyStatements(roleConfig,
264-
STATEMENT_ALLOW_SSE_KMS_RW,
263+
bindRolePolicyStatements(roleConfig, STATEMENT_ALLOW_KMS_RW,
265264
statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
266265
new Statement(Effects.Deny)
267266
.addActions(S3_ALL_GET)

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestPartialRenamesDeletes.java

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@
5656
import static org.apache.hadoop.fs.s3a.auth.RoleModel.Effects;
5757
import static org.apache.hadoop.fs.s3a.auth.RoleModel.Statement;
5858
import static org.apache.hadoop.fs.s3a.auth.RoleModel.directory;
59+
import static org.apache.hadoop.fs.s3a.auth.RoleModel.resource;
5960
import static org.apache.hadoop.fs.s3a.auth.RoleModel.statement;
6061
import static org.apache.hadoop.fs.s3a.auth.RolePolicies.*;
6162
import static org.apache.hadoop.fs.s3a.auth.RoleTestUtils.bindRolePolicyStatements;
@@ -144,6 +145,11 @@ public class ITestPartialRenamesDeletes extends AbstractS3ATestBase {
144145
*/
145146
private Path writableDir;
146147

148+
/**
149+
* Instruction file created when using CSE, required to be added to policies.
150+
*/
151+
private Path writableDirInstructionFile;
152+
147153
/**
148154
* A directory to which restricted roles have only read access.
149155
*/
@@ -216,6 +222,7 @@ public void setup() throws Exception {
216222
basePath = uniquePath();
217223
readOnlyDir = new Path(basePath, "readonlyDir");
218224
writableDir = new Path(basePath, "writableDir");
225+
writableDirInstructionFile = new Path(basePath, "writableDir.instruction");
219226
readOnlyChild = new Path(readOnlyDir, "child");
220227
noReadDir = new Path(basePath, "noReadDir");
221228
// the full FS
@@ -225,8 +232,7 @@ public void setup() throws Exception {
225232

226233
// create the baseline assumed role
227234
assumedRoleConfig = createAssumedRoleConfig();
228-
bindRolePolicyStatements(assumedRoleConfig,
229-
STATEMENT_ALLOW_SSE_KMS_RW,
235+
bindRolePolicyStatements(assumedRoleConfig, STATEMENT_ALLOW_KMS_RW,
230236
STATEMENT_ALL_BUCKET_READ_ACCESS, // root: r-x
231237
new Statement(Effects.Allow) // dest: rwx
232238
.addActions(S3_PATH_RW_OPERATIONS)
@@ -365,13 +371,13 @@ public void testMultiDeleteOptionPropagated() throws Throwable {
365371
public void testRenameParentPathNotWriteable() throws Throwable {
366372
describe("rename with parent paths not writeable; multi=%s", multiDelete);
367373
final Configuration conf = createAssumedRoleConfig();
368-
bindRolePolicyStatements(conf,
369-
STATEMENT_ALLOW_SSE_KMS_RW,
374+
bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
370375
STATEMENT_ALL_BUCKET_READ_ACCESS,
371376
new Statement(Effects.Allow)
372377
.addActions(S3_PATH_RW_OPERATIONS)
373378
.addResources(directory(readOnlyDir))
374-
.addResources(directory(writableDir)));
379+
.addResources(directory(writableDir))
380+
.addResources(resource(writableDirInstructionFile, false, false)));
375381
roleFS = (S3AFileSystem) readOnlyDir.getFileSystem(conf);
376382

377383
S3AFileSystem fs = getFileSystem();
@@ -733,8 +739,7 @@ public void testRenamePermissionRequirements() throws Throwable {
733739
// s3:DeleteObjectVersion permission, and attempt rename
734740
// and then delete.
735741
Configuration roleConfig = createAssumedRoleConfig();
736-
bindRolePolicyStatements(roleConfig,
737-
STATEMENT_ALLOW_SSE_KMS_RW,
742+
bindRolePolicyStatements(roleConfig, STATEMENT_ALLOW_KMS_RW,
738743
STATEMENT_ALL_BUCKET_READ_ACCESS, // root: r-x
739744
new Statement(Effects.Allow) // dest: rwx
740745
.addActions(S3_PATH_RW_OPERATIONS)

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardTool.java

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ public void testLandsatBucketRequireGuarded() throws Throwable {
7070

7171
@Test
7272
public void testLandsatBucketRequireUnencrypted() throws Throwable {
73+
skipIfClientSideEncryption();
7374
run(BucketInfo.NAME,
7475
"-" + BucketInfo.ENCRYPTION_FLAG, "none",
7576
getLandsatCSVFile(getConfiguration()));
@@ -178,8 +179,9 @@ public void testUploadListByAge() throws Throwable {
178179
// least a second old
179180
describe("Sleeping 1 second then confirming upload still there");
180181
Thread.sleep(1000);
181-
LambdaTestUtils.eventually(5000, 1000,
182-
() -> { assertNumUploadsAge(path, 1, 1); });
182+
LambdaTestUtils.eventually(5000, 1000, () -> {
183+
assertNumUploadsAge(path, 1, 1);
184+
});
183185

184186
// 7. Assert deletion works when age filter matches
185187
describe("Doing aged deletion");
@@ -231,8 +233,8 @@ private void assertNumDeleted(S3AFileSystem fs, Path path, int numDeleted)
231233
* search all parts
232234
* @throws Exception on failure
233235
*/
234-
private void uploadCommandAssertCount(S3AFileSystem fs, String options[],
235-
Path path, int numUploads, int ageSeconds)
236+
private void uploadCommandAssertCount(S3AFileSystem fs, String[] options, Path path,
237+
int numUploads, int ageSeconds)
236238
throws Exception {
237239
List<String> allOptions = new ArrayList<>();
238240
List<String> output = new ArrayList<>();

0 commit comments

Comments
 (0)