Skip to content

Commit 193fe06

Browse files
authored
Merge branch 'apache:trunk' into YARN-11158-V3
2 parents 829d87c + a48e8c9 commit 193fe06

File tree

30 files changed

+2112
-776
lines changed

30 files changed

+2112
-776
lines changed

LICENSE-binary

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -241,8 +241,8 @@ com.google.guava:guava:27.0-jre
241241
com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
242242
com.microsoft.azure:azure-storage:7.0.0
243243
com.nimbusds:nimbus-jose-jwt:9.8.1
244-
com.squareup.okhttp3:okhttp:4.9.3
245-
com.squareup.okio:okio:1.6.0
244+
com.squareup.okhttp3:okhttp:4.10.0
245+
com.squareup.okio:okio:3.2.0
246246
com.zaxxer:HikariCP:4.0.3
247247
commons-beanutils:commons-beanutils:1.9.3
248248
commons-cli:commons-cli:1.2

hadoop-client-modules/hadoop-client-runtime/pom.xml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,7 @@
148148
<!-- Leave javax APIs that are stable -->
149149
<!-- the jdk ships part of the javax.annotation namespace, so if we want to relocate this we'll have to care it out by class :( -->
150150
<exclude>com.google.code.findbugs:jsr305</exclude>
151+
<exclude>io.netty:*</exclude>
151152
<exclude>io.dropwizard.metrics:metrics-core</exclude>
152153
<exclude>org.eclipse.jetty:jetty-servlet</exclude>
153154
<exclude>org.eclipse.jetty:jetty-security</exclude>
@@ -156,6 +157,8 @@
156157
<exclude>org.bouncycastle:*</exclude>
157158
<!-- Leave snappy that includes native methods which cannot be relocated. -->
158159
<exclude>org.xerial.snappy:*</exclude>
160+
<!-- leave out kotlin classes -->
161+
<exclude>org.jetbrains.kotlin:*</exclude>
159162
</excludes>
160163
</artifactSet>
161164
<filters>

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -383,6 +383,11 @@
383383
<artifactId>mockwebserver</artifactId>
384384
<scope>test</scope>
385385
</dependency>
386+
<dependency>
387+
<groupId>com.squareup.okio</groupId>
388+
<artifactId>okio-jvm</artifactId>
389+
<scope>test</scope>
390+
</dependency>
386391
<dependency>
387392
<groupId>dnsjava</groupId>
388393
<artifactId>dnsjava</artifactId>

hadoop-common-project/hadoop-common/src/site/markdown/DeprecatedProperties.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,8 @@ The following table lists the configuration property names that are deprecated i
208208
| mapred.task.profile.params | mapreduce.task.profile.params |
209209
| mapred.task.profile.reduces | mapreduce.task.profile.reduces |
210210
| mapred.task.timeout | mapreduce.task.timeout |
211-
| mapred.tasktracker.indexcache.mb | mapreduce.tasktracker.indexcache.mb |
211+
| mapred.tasktracker.indexcache.mb | mapreduce.reduce.shuffle.indexcache.mb |
212+
| mapreduce.tasktracker.indexcache.mb | mapreduce.reduce.shuffle.indexcache.mb |
212213
| mapred.tasktracker.map.tasks.maximum | mapreduce.tasktracker.map.tasks.maximum |
213214
| mapred.tasktracker.memory\_calculator\_plugin | mapreduce.tasktracker.resourcecalculatorplugin |
214215
| mapred.tasktracker.memorycalculatorplugin | mapreduce.tasktracker.resourcecalculatorplugin |

hadoop-hdfs-project/hadoop-hdfs-client/pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,16 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
3737
<dependency>
3838
<groupId>com.squareup.okhttp3</groupId>
3939
<artifactId>okhttp</artifactId>
40+
<exclusions>
41+
<exclusion>
42+
<groupId>com.squareup.okio</groupId>
43+
<artifactId>okio-jvm</artifactId>
44+
</exclusion>
45+
</exclusions>
46+
</dependency>
47+
<dependency>
48+
<groupId>com.squareup.okio</groupId>
49+
<artifactId>okio-jvm</artifactId>
4050
</dependency>
4151
<dependency>
4252
<groupId>org.jetbrains.kotlin</groupId>

hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/PoolAlignmentContext.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,7 @@ public void receiveResponseState(RpcHeaderProtos.RpcResponseHeaderProto header)
7171
*/
7272
@Override
7373
public void updateRequestState(RpcHeaderProtos.RpcRequestHeaderProto.Builder header) {
74-
long maxStateId = Long.max(poolLocalStateId.get(), sharedGlobalStateId.get());
75-
header.setStateId(maxStateId);
74+
header.setStateId(poolLocalStateId.get());
7675
}
7776

7877
/**
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.server.federation.router;
19+
20+
import org.apache.hadoop.conf.Configuration;
21+
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto;
22+
import org.junit.jupiter.api.Assertions;
23+
import org.junit.jupiter.api.Test;
24+
25+
26+
public class TestPoolAlignmentContext {
27+
@Test
28+
public void testNamenodeRequestsOnlyUsePoolLocalStateID() {
29+
RouterStateIdContext routerStateIdContext = new RouterStateIdContext(new Configuration());
30+
String namespaceId = "namespace1";
31+
routerStateIdContext.getNamespaceStateId(namespaceId).accumulate(20L);
32+
PoolAlignmentContext poolContext1 = new PoolAlignmentContext(routerStateIdContext, namespaceId);
33+
PoolAlignmentContext poolContext2 = new PoolAlignmentContext(routerStateIdContext, namespaceId);
34+
35+
assertRequestHeaderStateId(poolContext1, Long.MIN_VALUE);
36+
assertRequestHeaderStateId(poolContext2, Long.MIN_VALUE);
37+
Assertions.assertEquals(20L, poolContext1.getLastSeenStateId());
38+
Assertions.assertEquals(20L, poolContext2.getLastSeenStateId());
39+
40+
poolContext1.advanceClientStateId(30L);
41+
assertRequestHeaderStateId(poolContext1, 30L);
42+
assertRequestHeaderStateId(poolContext2, Long.MIN_VALUE);
43+
Assertions.assertEquals(20L, poolContext1.getLastSeenStateId());
44+
Assertions.assertEquals(20L, poolContext2.getLastSeenStateId());
45+
}
46+
47+
private void assertRequestHeaderStateId(PoolAlignmentContext poolAlignmentContext,
48+
Long expectedValue) {
49+
RpcRequestHeaderProto.Builder builder = RpcRequestHeaderProto.newBuilder();
50+
poolAlignmentContext.updateRequestState(builder);
51+
Assertions.assertEquals(expectedValue, builder.getStateId());
52+
}
53+
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -520,7 +520,7 @@ public void addVolume(final StorageLocation location,
520520

521521
for (final NamespaceInfo nsInfo : nsInfos) {
522522
String bpid = nsInfo.getBlockPoolID();
523-
try (AutoCloseDataSetLock l = lockManager.writeLock(LockLevel.BLOCK_POOl, bpid)) {
523+
try {
524524
fsVolume.addBlockPool(bpid, this.conf, this.timer);
525525
fsVolume.getVolumeMap(bpid, tempVolumeMap, ramDiskReplicaTracker);
526526
} catch (IOException e) {

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexCache.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
import java.util.concurrent.atomic.AtomicInteger;
2424

2525
import org.apache.hadoop.fs.Path;
26-
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
26+
import org.apache.hadoop.mapreduce.MRJobConfig;
2727
import org.slf4j.Logger;
2828
import org.slf4j.LoggerFactory;
2929

@@ -43,7 +43,7 @@ class IndexCache {
4343
public IndexCache(JobConf conf) {
4444
this.conf = conf;
4545
totalMemoryAllowed =
46-
conf.getInt(TTConfig.TT_INDEX_CACHE, 10) * 1024 * 1024;
46+
conf.getInt(MRJobConfig.SHUFFLE_INDEX_CACHE, 10) * 1024 * 1024;
4747
LOG.info("IndexCache created with max memory = " + totalMemoryAllowed);
4848
}
4949

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -577,6 +577,8 @@ public interface MRJobConfig {
577577
public static final String MAX_SHUFFLE_FETCH_HOST_FAILURES = "mapreduce.reduce.shuffle.max-host-failures";
578578
public static final int DEFAULT_MAX_SHUFFLE_FETCH_HOST_FAILURES = 5;
579579

580+
public static final String SHUFFLE_INDEX_CACHE = "mapreduce.reduce.shuffle.indexcache.mb";
581+
580582
public static final String REDUCE_SKIP_INCR_PROC_COUNT = "mapreduce.reduce.skip.proc-count.auto-incr";
581583

582584
public static final String REDUCE_SKIP_MAXGROUPS = "mapreduce.reduce.skip.maxgroups";

0 commit comments

Comments
 (0)