Skip to content

Commit 3612f59

Browse files
Merge branch 'trunk' into HDDS-1884
2 parents 8c206eb + 3ac0f3a commit 3612f59

File tree

110 files changed

+2854
-498
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

110 files changed

+2854
-498
lines changed

LICENSE.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -667,7 +667,7 @@ spin.js v2.3.2
667667
Azure Data Lake Store - Java client SDK 2.0.11
668668
JCodings 1.0.8
669669
Joni 2.1.2
670-
Mockito 2.23.4
670+
Mockito 2.28.2
671671
JUL to SLF4J bridge 1.7.25
672672
SLF4J API Module 1.7.25
673673
SLF4J LOG4J-12 Binding 1.7.25

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,25 @@ public class CommonConfigurationKeysPublic {
7272
public static final String FS_DU_INTERVAL_KEY = "fs.du.interval";
7373
/** Default value for FS_DU_INTERVAL_KEY */
7474
public static final long FS_DU_INTERVAL_DEFAULT = 600000;
75+
76+
/**
77+
* @see
78+
* <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/core-default.xml">
79+
* core-default.xml</a>
80+
*/
81+
public static final String FS_GETSPACEUSED_CLASSNAME =
82+
"fs.getspaceused.classname";
83+
84+
/**
85+
* @see
86+
* <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/core-default.xml">
87+
* core-default.xml</a>
88+
*/
89+
public static final String FS_GETSPACEUSED_JITTER_KEY =
90+
"fs.getspaceused.jitterMillis";
91+
/** Default value for FS_GETSPACEUSED_JITTER_KEY */
92+
public static final long FS_GETSPACEUSED_JITTER_DEFAULT = 60000;
93+
7594
/**
7695
* @see
7796
* <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/core-default.xml">

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GetSpaceUsed.java

Lines changed: 19 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import java.io.IOException;
2727
import java.lang.reflect.Constructor;
2828
import java.lang.reflect.InvocationTargetException;
29-
import java.util.concurrent.TimeUnit;
3029

3130
public interface GetSpaceUsed {
3231

@@ -36,20 +35,16 @@ public interface GetSpaceUsed {
3635
/**
3736
* The builder class
3837
*/
39-
final class Builder {
38+
class Builder {
4039
static final Logger LOG = LoggerFactory.getLogger(Builder.class);
4140

42-
static final String CLASSNAME_KEY = "fs.getspaceused.classname";
43-
static final String JITTER_KEY = "fs.getspaceused.jitterMillis";
44-
static final long DEFAULT_JITTER = TimeUnit.MINUTES.toMillis(1);
45-
46-
4741
private Configuration conf;
4842
private Class<? extends GetSpaceUsed> klass = null;
4943
private File path = null;
5044
private Long interval = null;
5145
private Long jitter = null;
5246
private Long initialUsed = null;
47+
private Constructor<? extends GetSpaceUsed> cons;
5348

5449
public Configuration getConf() {
5550
return conf;
@@ -89,7 +84,8 @@ public Class<? extends GetSpaceUsed> getKlass() {
8984
if (conf == null) {
9085
return result;
9186
}
92-
return conf.getClass(CLASSNAME_KEY, result, GetSpaceUsed.class);
87+
return conf.getClass(CommonConfigurationKeys.FS_GETSPACEUSED_CLASSNAME,
88+
result, GetSpaceUsed.class);
9389
}
9490

9591
public Builder setKlass(Class<? extends GetSpaceUsed> klass) {
@@ -124,9 +120,10 @@ public long getJitter() {
124120
Configuration configuration = this.conf;
125121

126122
if (configuration == null) {
127-
return DEFAULT_JITTER;
123+
return CommonConfigurationKeys.FS_GETSPACEUSED_JITTER_DEFAULT;
128124
}
129-
return configuration.getLong(JITTER_KEY, DEFAULT_JITTER);
125+
return configuration.getLong(CommonConfigurationKeys.FS_GETSPACEUSED_JITTER_KEY,
126+
CommonConfigurationKeys.FS_GETSPACEUSED_JITTER_DEFAULT);
130127
}
131128
return jitter;
132129
}
@@ -136,11 +133,21 @@ public Builder setJitter(Long jit) {
136133
return this;
137134
}
138135

136+
public Constructor<? extends GetSpaceUsed> getCons() {
137+
return cons;
138+
}
139+
140+
public void setCons(Constructor<? extends GetSpaceUsed> cons) {
141+
this.cons = cons;
142+
}
143+
139144
public GetSpaceUsed build() throws IOException {
140145
GetSpaceUsed getSpaceUsed = null;
141146
try {
142-
Constructor<? extends GetSpaceUsed> cons =
143-
getKlass().getConstructor(Builder.class);
147+
if (cons == null) {
148+
cons = getKlass().getConstructor(Builder.class);
149+
}
150+
144151
getSpaceUsed = cons.newInstance(this);
145152
} catch (InstantiationException e) {
146153
LOG.warn("Error trying to create an instance of " + getKlass(), e);

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -97,21 +97,23 @@ public void authorize(UserGroupInformation user,
9797
throw new AuthorizationException("Protocol " + protocol +
9898
" is not known.");
9999
}
100-
101-
// get client principal key to verify (if available)
102-
KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
103-
String clientPrincipal = null;
104-
if (krbInfo != null) {
105-
String clientKey = krbInfo.clientPrincipal();
106-
if (clientKey != null && !clientKey.isEmpty()) {
107-
try {
108-
clientPrincipal = SecurityUtil.getServerPrincipal(
109-
conf.get(clientKey), addr);
110-
} catch (IOException e) {
111-
throw (AuthorizationException) new AuthorizationException(
112-
"Can't figure out Kerberos principal name for connection from "
113-
+ addr + " for user=" + user + " protocol=" + protocol)
114-
.initCause(e);
100+
101+
String clientPrincipal = null;
102+
if (UserGroupInformation.isSecurityEnabled()) {
103+
// get client principal key to verify (if available)
104+
KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol, conf);
105+
if (krbInfo != null) {
106+
String clientKey = krbInfo.clientPrincipal();
107+
if (clientKey != null && !clientKey.isEmpty()) {
108+
try {
109+
clientPrincipal = SecurityUtil.getServerPrincipal(
110+
conf.get(clientKey), addr);
111+
} catch (IOException e) {
112+
throw (AuthorizationException) new AuthorizationException(
113+
"Can't figure out Kerberos principal name for connection from "
114+
+ addr + " for user=" + user + " protocol=" + protocol)
115+
.initCause(e);
116+
}
115117
}
116118
}
117119
}

hadoop-common-project/hadoop-common/src/main/resources/core-default.xml

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2742,6 +2742,15 @@
27422742
</description>
27432743
</property>
27442744

2745+
<property>
2746+
<name>ha.health-monitor.rpc.connect.max.retries</name>
2747+
<value>1</value>
2748+
<description>
2749+
The number of retries on connect error when establishing RPC proxy
2750+
connection to NameNode, used for monitorHealth() calls.
2751+
</description>
2752+
</property>
2753+
27452754
<property>
27462755
<name>ha.health-monitor.rpc-timeout.ms</name>
27472756
<value>45000</value>
@@ -3504,4 +3513,24 @@
35043513
</description>
35053514
</property>
35063515

3516+
<property>
3517+
<name>fs.getspaceused.classname</name>
3518+
<value></value>
3519+
<description>
3520+
The class that can tell estimate much space is used in a directory.
3521+
There are four impl classes that being supported:
3522+
org.apache.hadoop.fs.DU(default), org.apache.hadoop.fs.WindowsGetSpaceUsed
3523+
org.apache.hadoop.fs.DFCachingGetSpaceUsed and
3524+
org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.ReplicaCachingGetSpaceUsed.
3525+
And the ReplicaCachingGetSpaceUsed impl class only used in HDFS module.
3526+
</description>
3527+
</property>
3528+
3529+
<property>
3530+
<name>fs.getspaceused.jitterMillis</name>
3531+
<value>60000</value>
3532+
<description>
3533+
fs space usage statistics refresh jitter in msec.
3534+
</description>
3535+
</property>
35073536
</configuration>

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,18 @@
2020
import static org.junit.Assert.assertEquals;
2121
import static org.junit.Assert.fail;
2222

23+
import java.lang.annotation.Annotation;
2324
import java.net.InetAddress;
2425
import java.net.UnknownHostException;
2526

2627
import org.apache.hadoop.conf.Configuration;
2728
import org.apache.hadoop.fs.CommonConfigurationKeys;
2829
import org.apache.hadoop.ipc.TestRPC.TestProtocol;
30+
import org.apache.hadoop.security.KerberosInfo;
31+
import org.apache.hadoop.security.SecurityInfo;
32+
import org.apache.hadoop.security.SecurityUtil;
2933
import org.apache.hadoop.security.UserGroupInformation;
34+
import org.apache.hadoop.security.token.TokenInfo;
3035
import org.junit.Test;
3136

3237
public class TestServiceAuthorization {
@@ -52,6 +57,53 @@ public Service[] getServices() {
5257
}
5358
}
5459

60+
private static class CustomSecurityInfo extends SecurityInfo {
61+
@Override
62+
public KerberosInfo getKerberosInfo(Class<?> protocol,
63+
Configuration conf) {
64+
return new KerberosInfo() {
65+
@Override
66+
public Class<? extends Annotation> annotationType() {
67+
return null;
68+
}
69+
@Override
70+
public String serverPrincipal() {
71+
return null;
72+
}
73+
@Override
74+
public String clientPrincipal() {
75+
return "dfs.datanode.kerberos.principal";
76+
}
77+
};
78+
}
79+
80+
@Override
81+
public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
82+
return null;
83+
}
84+
}
85+
86+
@Test
87+
public void testWithClientPrincipalOnUnsecureMode()
88+
throws UnknownHostException {
89+
UserGroupInformation hdfsUser = UserGroupInformation.createUserForTesting(
90+
"hdfs", new String[] {"hadoop"});
91+
ServiceAuthorizationManager serviceAuthorizationManager =
92+
new ServiceAuthorizationManager();
93+
SecurityUtil.setSecurityInfoProviders(new CustomSecurityInfo());
94+
95+
Configuration conf = new Configuration();
96+
conf.set("dfs.datanode.kerberos.principal", "dn/[email protected]");
97+
conf.set(ACL_CONFIG, "user1 hadoop");
98+
serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
99+
try {
100+
serviceAuthorizationManager.authorize(hdfsUser, TestProtocol.class, conf,
101+
InetAddress.getByName(ADDRESS));
102+
} catch (AuthorizationException e) {
103+
fail();
104+
}
105+
}
106+
55107
@Test
56108
public void testDefaultAcl() {
57109
ServiceAuthorizationManager serviceAuthorizationManager =
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.crypto.key.kms.server;
19+
20+
import static org.junit.Assert.assertEquals;
21+
import static org.junit.Assert.assertNull;
22+
import static org.mockito.Mockito.when;
23+
24+
import java.io.IOException;
25+
26+
import javax.servlet.FilterChain;
27+
import javax.servlet.ServletException;
28+
import javax.servlet.ServletRequest;
29+
import javax.servlet.ServletResponse;
30+
import javax.servlet.http.HttpServletRequest;
31+
import javax.servlet.http.HttpServletResponse;
32+
33+
import org.junit.Before;
34+
import org.junit.Test;
35+
import org.mockito.Mockito;
36+
37+
/**
38+
* Test for {@link KMSMDCFilter}.
39+
*
40+
*/
41+
public class TestKMSMDCFilter {
42+
43+
private static final String REMOTE_ADDRESS = "192.168.100.100";
44+
private static final String URL = "/admin";
45+
private static final String METHOD = "GET";
46+
47+
private KMSMDCFilter filter;
48+
private HttpServletRequest httpRequest;
49+
private HttpServletResponse httpResponse;
50+
51+
@Before
52+
public void setUp() throws IOException {
53+
filter = new KMSMDCFilter();
54+
httpRequest = Mockito.mock(HttpServletRequest.class);
55+
httpResponse = Mockito.mock(HttpServletResponse.class);
56+
KMSMDCFilter.setContext(null, null, null, null);
57+
}
58+
59+
@Test
60+
public void testFilter() throws IOException, ServletException {
61+
when(httpRequest.getMethod()).thenReturn(METHOD);
62+
when(httpRequest.getRequestURL()).thenReturn(new StringBuffer(URL));
63+
when(httpRequest.getRemoteAddr()).thenReturn(REMOTE_ADDRESS);
64+
65+
FilterChain filterChain = new FilterChain() {
66+
@Override
67+
public void doFilter(ServletRequest request, ServletResponse response)
68+
throws IOException, ServletException {
69+
assertEquals("filter.remoteClientAddress", REMOTE_ADDRESS,
70+
KMSMDCFilter.getRemoteClientAddress());
71+
assertEquals("filter.method", METHOD, KMSMDCFilter.getMethod());
72+
assertEquals("filter.url", URL, KMSMDCFilter.getURL());
73+
}
74+
};
75+
76+
checkMDCValuesAreEmpty();
77+
filter.doFilter(httpRequest, httpResponse, filterChain);
78+
checkMDCValuesAreEmpty();
79+
}
80+
81+
private void checkMDCValuesAreEmpty() {
82+
assertNull("getRemoteClientAddress", KMSMDCFilter.getRemoteClientAddress());
83+
assertNull("getMethod", KMSMDCFilter.getMethod());
84+
assertNull("getURL", KMSMDCFilter.getURL());
85+
assertNull("getUgi", KMSMDCFilter.getUgi());
86+
}
87+
88+
}

hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -99,9 +99,9 @@ public XceiverClientGrpc(Pipeline pipeline, Configuration config) {
9999
this.metrics = XceiverClientManager.getXceiverClientMetrics();
100100
this.channels = new HashMap<>();
101101
this.asyncStubs = new HashMap<>();
102-
this.topologyAwareRead = Boolean.parseBoolean(config.get(
103-
ScmConfigKeys.DFS_NETWORK_TOPOLOGY_AWARE_READ_ENABLED,
104-
ScmConfigKeys.DFS_NETWORK_TOPOLOGY_AWARE_READ_ENABLED_DEFAULT));
102+
this.topologyAwareRead = config.getBoolean(
103+
OzoneConfigKeys.OZONE_NETWORK_TOPOLOGY_AWARE_READ_KEY,
104+
OzoneConfigKeys.OZONE_NETWORK_TOPOLOGY_AWARE_READ_DEFAULT);
105105
}
106106

107107
/**

hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
import org.apache.hadoop.conf.Configuration;
2828
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
2929
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
30+
import org.apache.hadoop.ozone.OzoneConfigKeys;
3031
import org.apache.hadoop.ozone.OzoneSecurityUtil;
3132
import org.apache.hadoop.security.UserGroupInformation;
3233
import org.slf4j.Logger;
@@ -102,9 +103,9 @@ public void onRemoval(
102103
}
103104
}
104105
}).build();
105-
topologyAwareRead = Boolean.parseBoolean(conf.get(
106-
ScmConfigKeys.DFS_NETWORK_TOPOLOGY_AWARE_READ_ENABLED,
107-
ScmConfigKeys.DFS_NETWORK_TOPOLOGY_AWARE_READ_ENABLED_DEFAULT));
106+
topologyAwareRead = conf.getBoolean(
107+
OzoneConfigKeys.OZONE_NETWORK_TOPOLOGY_AWARE_READ_KEY,
108+
OzoneConfigKeys.OZONE_NETWORK_TOPOLOGY_AWARE_READ_DEFAULT);
108109
}
109110

110111
@VisibleForTesting

0 commit comments

Comments
 (0)