Skip to content

Commit 4964b5a

Browse files
committed
HADOOP-18980. S3A credential provider remapping: make extensible
1 parent 7a7db7f commit 4964b5a

File tree

7 files changed

+165
-2
lines changed

7 files changed

+165
-2
lines changed

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2339,7 +2339,25 @@ public Collection<String> getTrimmedStringCollection(String name) {
23392339
}
23402340
return StringUtils.getTrimmedStringCollection(valueString);
23412341
}
2342-
2342+
2343+
/**
2344+
* Get the equal op (=) delimited key-value pairs of the <code>name</code> property as
2345+
* a collection of pair of <code>String</code>s, trimmed of the leading and trailing whitespace
2346+
* after delimiting the <code>name</code> by comma and new line separator.
2347+
* If no such property is specified then empty <code>Collection</code> is returned.
2348+
*
2349+
* @param name property name.
2350+
* @return property value as a <code>Map</code> of <code>String</code>s, or empty
2351+
* <code>Map</code>.
2352+
*/
2353+
public Map<String, String> getTrimmedStringCollectionSplitByEquals(String name) {
2354+
String valueString = get(name);
2355+
if (null == valueString) {
2356+
return new HashMap<>();
2357+
}
2358+
return StringUtils.getTrimmedStringCollectionSplitByEquals(valueString);
2359+
}
2360+
23432361
/**
23442362
* Get the comma delimited values of the <code>name</code> property as
23452363
* an array of <code>String</code>s, trimmed of the leading and trailing whitespace.

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java

Lines changed: 39 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import java.util.ArrayList;
2626
import java.util.Arrays;
2727
import java.util.Collection;
28+
import java.util.HashMap;
2829
import java.util.Iterator;
2930
import java.util.LinkedHashSet;
3031
import java.util.List;
@@ -479,7 +480,28 @@ public static Collection<String> getTrimmedStringCollection(String str){
479480
set.remove("");
480481
return set;
481482
}
482-
483+
484+
/**
485+
* Splits an "=" separated value <code>String</code>, trimming leading and
486+
* trailing whitespace on each value after splitting by comma and new line separator.
487+
*
488+
* @param str a comma separated <code>String</code> with values, may be null
489+
* @return a <code>Map</code> of <code>String</code> keys and values, empty
490+
* Collection if null String input.
491+
*/
492+
public static Map<String, String> getTrimmedStringCollectionSplitByEquals(
493+
String str) {
494+
String[] trimmedList = getTrimmedStrings(str);
495+
Map<String, String> pairs = new HashMap<>();
496+
for (String s : trimmedList) {
497+
String[] splitByKeyVal = getTrimmedStringsSplitByEquals(s);
498+
if (splitByKeyVal.length == 2) {
499+
pairs.put(splitByKeyVal[0], splitByKeyVal[1]);
500+
}
501+
}
502+
return pairs;
503+
}
504+
483505
/**
484506
* Splits a comma or newline separated value <code>String</code>, trimming
485507
* leading and trailing whitespace on each value.
@@ -497,6 +519,22 @@ public static String[] getTrimmedStrings(String str){
497519
return str.trim().split("\\s*[,\n]\\s*");
498520
}
499521

522+
/**
523+
* Splits "=" separated value <code>String</code>, trimming
524+
* leading and trailing whitespace on each value.
525+
*
526+
* @param str an "=" separated <code>String</code> with values,
527+
* may be null
528+
* @return an array of <code>String</code> values, empty array if null String
529+
* input
530+
*/
531+
public static String[] getTrimmedStringsSplitByEquals(String str){
532+
if (null == str || str.trim().isEmpty()) {
533+
return emptyStringArray;
534+
}
535+
return str.trim().split("\\s*=\\s*");
536+
}
537+
500538
final public static String[] emptyStringArray = {};
501539
final public static char COMMA = ',';
502540
final public static String COMMA_STR = ",";

hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,10 @@ private Constants() {
6868
public static final String AWS_CREDENTIALS_PROVIDER =
6969
"fs.s3a.aws.credentials.provider";
7070

71+
// aws credentials providers mapping with key/value pairs
72+
public static final String AWS_CREDENTIALS_PROVIDER_MAPPING =
73+
"fs.s3a.aws.credentials.provider.mapping";
74+
7175
/**
7276
* Extra set of security credentials which will be prepended to that
7377
* set in {@code "hadoop.security.credential.provider.path"}.

hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/CredentialProviderListFactory.java

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@
5151
import org.apache.hadoop.fs.store.LogExactlyOnce;
5252

5353
import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER;
54+
import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER_MAPPING;
5455
import static org.apache.hadoop.fs.s3a.adapter.AwsV1BindingSupport.isAwsV1SdkAvailable;
5556

5657
/**
@@ -216,6 +217,8 @@ public static AWSCredentialProviderList buildAWSProviderList(
216217
key,
217218
defaultValues.toArray(new Class[defaultValues.size()]));
218219

220+
Map<String, String> awsCredsMappedClasses =
221+
conf.getTrimmedStringCollectionSplitByEquals(AWS_CREDENTIALS_PROVIDER_MAPPING);
219222
Map<String, String> v1v2CredentialProviderMap = V1_V2_CREDENTIAL_PROVIDER_MAP;
220223
final Set<String> forbiddenClassnames =
221224
forbidden.stream().map(c -> c.getName()).collect(Collectors.toSet());
@@ -233,6 +236,11 @@ public static AWSCredentialProviderList buildAWSProviderList(
233236
key, className, mapped);
234237
className = mapped;
235238
}
239+
if (awsCredsMappedClasses != null && awsCredsMappedClasses.containsKey(className)) {
240+
final String mapped = awsCredsMappedClasses.get(className);
241+
LOG_REMAPPED_ENTRY.info("Credential entry {} is mapped to {}", className, mapped);
242+
className = mapped;
243+
}
236244
// now scan the forbidden list. doing this after any mappings ensures the v1 names
237245
// are also blocked
238246
if (forbiddenClassnames.contains(className)) {

hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,28 @@ For more information see [Upcoming upgrade to AWS Java SDK V2](./aws_sdk_upgrade
275275
credentials.
276276
</description>
277277
</property>
278+
279+
<property>
280+
<name>fs.s3a.aws.credentials.provider.mapping</name>
281+
<description>
282+
Comma-separated key-value pairs of mapped credential providers that are
283+
separated by equal operator (=). The key can be used by
284+
fs.s3a.aws.credentials.provider config, and it will be translated into
285+
the specified value of credential provider class based on the key-value
286+
pair provided by this config.
287+
288+
Example:
289+
com.amazonaws.auth.AnonymousAWSCredentials=org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider,
290+
com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper=org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider,
291+
com.amazonaws.auth.InstanceProfileCredentialsProvider=org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider
292+
293+
With the above key-value pairs, if fs.s3a.aws.credentials.provider specifies
294+
com.amazonaws.auth.AnonymousAWSCredentials, it will be remapped to
295+
org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider by S3A while
296+
preparing AWS credential provider list for any S3 access.
297+
We can use the same credentials provider list for both v1 and v2 SDK clients.
298+
</description>
299+
</property>
278300
```
279301

280302
### <a name="auth_env_vars"></a> Authenticating via the AWS Environment Variables

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,19 @@ public void testBadCredentialsConstructor() throws Exception {
108108
}
109109
}
110110

111+
@Test
112+
public void testBadCredentialsConstructorWithRemap() throws Exception {
113+
Configuration conf = createConf("aws.test.map1");
114+
conf.set(AWS_CREDENTIALS_PROVIDER_MAPPING,
115+
"aws.test.map1=" + BadCredentialsProviderConstructor.class.getName());
116+
final InstantiationIOException ex =
117+
intercept(InstantiationIOException.class, CONSTRUCTOR_EXCEPTION, () ->
118+
createFailingFS(conf));
119+
if (InstantiationIOException.Kind.UnsupportedConstructor != ex.getKind()) {
120+
throw ex;
121+
}
122+
}
123+
111124
/**
112125
* Create a configuration bonded to the given provider classname.
113126
* @param provider provider to bond to
@@ -169,6 +182,16 @@ public void testBadCredentials() throws Exception {
169182
createFailingFS(conf));
170183
}
171184

185+
@Test
186+
public void testBadCredentialsWithRemap() throws Exception {
187+
Configuration conf = createConf("aws.test.map.key");
188+
conf.set(AWS_CREDENTIALS_PROVIDER_MAPPING,
189+
"aws.test.map.key=" + BadCredentialsProvider.class.getName());
190+
intercept(AccessDeniedException.class,
191+
"",
192+
() -> createFailingFS(conf));
193+
}
194+
172195
/**
173196
* Test using the anonymous credential provider with the public csv
174197
* test file; if the test file path is unset then it will be skipped.

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import java.util.ArrayList;
2626
import java.util.Arrays;
2727
import java.util.Collections;
28+
import java.util.HashSet;
2829
import java.util.List;
2930
import java.util.concurrent.ExecutorService;
3031
import java.util.concurrent.Executors;
@@ -47,14 +48,17 @@
4748
import org.apache.hadoop.fs.Path;
4849
import org.apache.hadoop.fs.s3a.auth.AbstractSessionCredentialsProvider;
4950
import org.apache.hadoop.fs.s3a.auth.AssumedRoleCredentialProvider;
51+
import org.apache.hadoop.fs.s3a.auth.CredentialProviderListFactory;
5052
import org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider;
5153
import org.apache.hadoop.fs.s3a.auth.NoAuthWithAWSException;
54+
import org.apache.hadoop.fs.s3a.auth.delegation.CountInvocationsProvider;
5255
import org.apache.hadoop.fs.s3a.impl.InstantiationIOException;
5356
import org.apache.hadoop.io.retry.RetryPolicy;
5457
import org.apache.hadoop.util.Sets;
5558

5659
import static org.apache.hadoop.fs.s3a.Constants.ASSUMED_ROLE_CREDENTIALS_PROVIDER;
5760
import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER;
61+
import static org.apache.hadoop.fs.s3a.Constants.AWS_CREDENTIALS_PROVIDER_MAPPING;
5862
import static org.apache.hadoop.fs.s3a.S3ATestConstants.DEFAULT_CSVTEST_FILE;
5963
import static org.apache.hadoop.fs.s3a.S3ATestUtils.authenticationContains;
6064
import static org.apache.hadoop.fs.s3a.S3ATestUtils.buildClassListString;
@@ -206,6 +210,52 @@ public void testFallbackToDefaults() throws Throwable {
206210
assertTrue("empty credentials", credentials.size() > 0);
207211
}
208212

213+
@Test
214+
public void testAssumedRoleWithRemap() throws Throwable {
215+
Configuration conf = new Configuration(false);
216+
conf.set(ASSUMED_ROLE_CREDENTIALS_PROVIDER,
217+
"custom.assume.role.key1,custom.assume.role.key2,custom.assume.role.key3");
218+
conf.set(AWS_CREDENTIALS_PROVIDER_MAPPING,
219+
"custom.assume.role.key1="
220+
+ CredentialProviderListFactory.ENVIRONMENT_CREDENTIALS_V2
221+
+ " ,custom.assume.role.key2 ="
222+
+ CountInvocationsProvider.NAME
223+
+ ", custom.assume.role.key3= "
224+
+ CredentialProviderListFactory.PROFILE_CREDENTIALS_V1);
225+
final AWSCredentialProviderList credentials =
226+
buildAWSProviderList(
227+
new URI("s3a://bucket1"),
228+
conf,
229+
ASSUMED_ROLE_CREDENTIALS_PROVIDER,
230+
new ArrayList<>(),
231+
new HashSet<>());
232+
assertEquals("Credentials not matching", 3, credentials.size());
233+
}
234+
235+
@Test
236+
public void testAwsCredentialProvidersWithRemap() throws Throwable {
237+
Configuration conf = new Configuration(false);
238+
conf.set(AWS_CREDENTIALS_PROVIDER,
239+
"custom.aws.creds.key1,custom.aws.creds.key2,custom.aws.creds.key3,custom.aws.creds.key4");
240+
conf.set(AWS_CREDENTIALS_PROVIDER_MAPPING,
241+
"custom.aws.creds.key1="
242+
+ CredentialProviderListFactory.ENVIRONMENT_CREDENTIALS_V2
243+
+ " ,\ncustom.aws.creds.key2="
244+
+ CountInvocationsProvider.NAME
245+
+ "\n, custom.aws.creds.key3="
246+
+ CredentialProviderListFactory.PROFILE_CREDENTIALS_V1
247+
+ ",custom.aws.creds.key4 = "
248+
+ CredentialProviderListFactory.PROFILE_CREDENTIALS_V2);
249+
final AWSCredentialProviderList credentials =
250+
buildAWSProviderList(
251+
new URI("s3a://bucket1"),
252+
conf,
253+
AWS_CREDENTIALS_PROVIDER,
254+
new ArrayList<>(),
255+
new HashSet<>());
256+
assertEquals("Credentials not matching", 4, credentials.size());
257+
}
258+
209259
@Test
210260
public void testProviderConstructor() throws Throwable {
211261
final AWSCredentialProviderList list = new AWSCredentialProviderList("name",

0 commit comments

Comments
 (0)