Skip to content

Commit 99473e3

Browse files
authored
Merge branch 'apache:trunk' into YARN-11424-V2
2 parents 3d3365c + e2ab350 commit 99473e3

File tree

127 files changed

+3758
-3359
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

127 files changed

+3758
-3359
lines changed

.asf.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
# limitations under the License.
1515

1616
github:
17+
ghp_path: /
18+
ghp_branch: gh-pages
1719
enabled_merge_buttons:
1820
squash: true
1921
merge: false

.github/workflows/website.yml

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one or more
2+
# contributor license agreements. See the NOTICE file distributed with
3+
# this work for additional information regarding copyright ownership.
4+
# The ASF licenses this file to You under the Apache License, Version 2.0
5+
# (the "License"); you may not use this file except in compliance with
6+
# the License. You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
16+
17+
name: website
18+
19+
# Controls when the action will run.
20+
on:
21+
push:
22+
branches: [ trunk ]
23+
24+
jobs:
25+
build:
26+
runs-on: ubuntu-latest
27+
steps:
28+
- name: Checkout Hadoop trunk
29+
uses: actions/checkout@v3
30+
with:
31+
repository: apache/hadoop
32+
- name: Set up JDK 8
33+
uses: actions/setup-java@v3
34+
with:
35+
java-version: '8'
36+
distribution: 'temurin'
37+
- name: Cache local Maven repository
38+
uses: actions/cache@v3
39+
with:
40+
path: ~/.m2/repository
41+
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
42+
restore-keys: |
43+
${{ runner.os }}-maven-
44+
- name: Build Hadoop maven plugins
45+
run: cd hadoop-maven-plugins && mvn --batch-mode install
46+
- name: Build Hadoop
47+
run: mvn clean install -DskipTests -DskipShade
48+
- name: Build document
49+
run: mvn clean site
50+
- name: Stage document
51+
run: mvn site:stage -DstagingDirectory=${GITHUB_WORKSPACE}/staging/
52+
- name: Deploy to GitHub Pages
53+
uses: peaceiris/actions-gh-pages@v3
54+
with:
55+
github_token: ${{ secrets.GITHUB_TOKEN }}
56+
publish_dir: ./staging/hadoop-project
57+
user_name: 'github-actions[bot]'
58+
user_email: 'github-actions[bot]@users.noreply.github.com'
59+

LICENSE-binary

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,6 @@ commons-codec:commons-codec:1.11
250250
commons-collections:commons-collections:3.2.2
251251
commons-daemon:commons-daemon:1.0.13
252252
commons-io:commons-io:2.8.0
253-
commons-logging:commons-logging:1.1.3
254253
commons-net:commons-net:3.9.0
255254
de.ruedigermoeller:fst:2.50
256255
io.grpc:grpc-api:1.26.0

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -180,11 +180,6 @@
180180
<artifactId>jersey-server</artifactId>
181181
<scope>compile</scope>
182182
</dependency>
183-
<dependency>
184-
<groupId>commons-logging</groupId>
185-
<artifactId>commons-logging</artifactId>
186-
<scope>compile</scope>
187-
</dependency>
188183
<dependency>
189184
<groupId>log4j</groupId>
190185
<artifactId>log4j</artifactId>
@@ -200,11 +195,6 @@
200195
<artifactId>assertj-core</artifactId>
201196
<scope>test</scope>
202197
</dependency>
203-
<dependency>
204-
<groupId>org.glassfish.grizzly</groupId>
205-
<artifactId>grizzly-http-servlet</artifactId>
206-
<scope>test</scope>
207-
</dependency>
208198
<dependency>
209199
<groupId>commons-beanutils</groupId>
210200
<artifactId>commons-beanutils</artifactId>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -241,12 +241,15 @@ public synchronized void close() throws IOException {
241241
return;
242242
}
243243
try {
244-
flush();
245-
if (closeOutputStream) {
246-
super.close();
247-
codec.close();
244+
try {
245+
flush();
246+
} finally {
247+
if (closeOutputStream) {
248+
super.close();
249+
codec.close();
250+
}
251+
freeBuffers();
248252
}
249-
freeBuffers();
250253
} finally {
251254
closed = true;
252255
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java

Lines changed: 0 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@
3232
import java.util.ArrayList;
3333
import java.util.List;
3434

35-
import org.apache.commons.logging.Log;
3635
import org.apache.hadoop.classification.InterfaceAudience;
3736
import org.apache.hadoop.classification.InterfaceStability;
3837
import org.apache.hadoop.conf.Configuration;
@@ -246,30 +245,6 @@ public static void skipFully(InputStream in, long len) throws IOException {
246245
}
247246
}
248247

249-
/**
250-
* Close the Closeable objects and <b>ignore</b> any {@link Throwable} or
251-
* null pointers. Must only be used for cleanup in exception handlers.
252-
*
253-
* @param log the log to record problems to at debug level. Can be null.
254-
* @param closeables the objects to close
255-
* @deprecated use {@link #cleanupWithLogger(Logger, java.io.Closeable...)}
256-
* instead
257-
*/
258-
@Deprecated
259-
public static void cleanup(Log log, java.io.Closeable... closeables) {
260-
for (java.io.Closeable c : closeables) {
261-
if (c != null) {
262-
try {
263-
c.close();
264-
} catch(Throwable e) {
265-
if (log != null && log.isDebugEnabled()) {
266-
log.debug("Exception in closing " + c, e);
267-
}
268-
}
269-
}
270-
}
271-
}
272-
273248
/**
274249
* Close the Closeable objects and <b>ignore</b> any {@link Throwable} or
275250
* null pointers. Must only be used for cleanup in exception handlers.

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1181,7 +1181,14 @@ public void sendRpcRequest(final Call call)
11811181
final ResponseBuffer buf = new ResponseBuffer();
11821182
header.writeDelimitedTo(buf);
11831183
RpcWritable.wrap(call.rpcRequest).writeTo(buf);
1184-
rpcRequestQueue.put(Pair.of(call, buf));
1184+
// Wait for the message to be sent. We offer with timeout to
1185+
// prevent a race condition between checking the shouldCloseConnection
1186+
// and the stopping of the polling thread
1187+
while (!shouldCloseConnection.get()) {
1188+
if (rpcRequestQueue.offer(Pair.of(call, buf), 1, TimeUnit.SECONDS)) {
1189+
break;
1190+
}
1191+
}
11851192
}
11861193

11871194
/* Receive a response.

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -896,15 +896,29 @@ public Builder setNumHandlers(int numHandlers) {
896896
this.numHandlers = numHandlers;
897897
return this;
898898
}
899-
899+
900900
/**
901901
* @return Default: -1.
902902
* @param numReaders input numReaders.
903+
* @deprecated call {@link #setNumReaders(int value)} instead.
903904
*/
905+
@Deprecated
904906
public Builder setnumReaders(int numReaders) {
905907
this.numReaders = numReaders;
906908
return this;
907909
}
910+
911+
/**
912+
* Set the number of reader threads.
913+
*
914+
* @return this builder.
915+
* @param value input numReaders.
916+
* @since HADOOP-18625.
917+
*/
918+
public Builder setNumReaders(int value) {
919+
this.numReaders = value;
920+
return this;
921+
}
908922

909923
/**
910924
* @return Default: -1.

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

Lines changed: 30 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1985,11 +1985,26 @@ public class Connection {
19851985
private long lastContact;
19861986
private int dataLength;
19871987
private Socket socket;
1988+
19881989
// Cache the remote host & port info so that even if the socket is
19891990
// disconnected, we can say where it used to connect to.
1990-
private String hostAddress;
1991-
private int remotePort;
1992-
private InetAddress addr;
1991+
1992+
/**
1993+
* Client Host IP address from where the socket connection is being established to the Server.
1994+
*/
1995+
private final String hostAddress;
1996+
/**
1997+
* Client remote port used for the given socket connection.
1998+
*/
1999+
private final int remotePort;
2000+
/**
2001+
* Address to which the socket is connected to.
2002+
*/
2003+
private final InetAddress addr;
2004+
/**
2005+
* Client Host address from where the socket connection is being established to the Server.
2006+
*/
2007+
private final String hostName;
19932008

19942009
IpcConnectionContextProto connectionContext;
19952010
String protocolName;
@@ -2033,8 +2048,12 @@ public Connection(SocketChannel channel, long lastContact,
20332048
this.isOnAuxiliaryPort = isOnAuxiliaryPort;
20342049
if (addr == null) {
20352050
this.hostAddress = "*Unknown*";
2051+
this.hostName = this.hostAddress;
20362052
} else {
2053+
// host IP address
20372054
this.hostAddress = addr.getHostAddress();
2055+
// host name for the IP address
2056+
this.hostName = addr.getHostName();
20382057
}
20392058
this.remotePort = socket.getPort();
20402059
this.responseQueue = new LinkedList<RpcCall>();
@@ -2050,7 +2069,7 @@ public Connection(SocketChannel channel, long lastContact,
20502069

20512070
@Override
20522071
public String toString() {
2053-
return getHostAddress() + ":" + remotePort;
2072+
return hostName + ":" + remotePort + " / " + hostAddress + ":" + remotePort;
20542073
}
20552074

20562075
boolean setShouldClose() {
@@ -2463,19 +2482,18 @@ public int readAndProcess() throws IOException, InterruptedException {
24632482
return -1;
24642483
}
24652484

2466-
if(!RpcConstants.HEADER.equals(dataLengthBuffer)) {
2467-
LOG.warn("Incorrect RPC Header length from {}:{} "
2468-
+ "expected length: {} got length: {}",
2469-
hostAddress, remotePort, RpcConstants.HEADER, dataLengthBuffer);
2485+
if (!RpcConstants.HEADER.equals(dataLengthBuffer)) {
2486+
LOG.warn("Incorrect RPC Header length from {}:{} / {}:{}. Expected: {}. Actual: {}",
2487+
hostName, remotePort, hostAddress, remotePort, RpcConstants.HEADER,
2488+
dataLengthBuffer);
24702489
setupBadVersionResponse(version);
24712490
return -1;
24722491
}
24732492
if (version != CURRENT_VERSION) {
24742493
//Warning is ok since this is not supposed to happen.
2475-
LOG.warn("Version mismatch from " +
2476-
hostAddress + ":" + remotePort +
2477-
" got version " + version +
2478-
" expected version " + CURRENT_VERSION);
2494+
LOG.warn("Version mismatch from {}:{} / {}:{}. "
2495+
+ "Expected version: {}. Actual version: {} ", hostName,
2496+
remotePort, hostAddress, remotePort, CURRENT_VERSION, version);
24792497
setupBadVersionResponse(version);
24802498
return -1;
24812499
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java

Lines changed: 7 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -34,10 +34,6 @@
3434

3535
import org.apache.hadoop.classification.VisibleForTesting;
3636
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
37-
import org.apache.commons.logging.Log;
38-
import org.apache.commons.logging.LogFactory;
39-
import org.apache.commons.logging.impl.Jdk14Logger;
40-
import org.apache.commons.logging.impl.Log4JLogger;
4137
import org.apache.hadoop.HadoopIllegalArgumentException;
4238
import org.apache.hadoop.classification.InterfaceAudience;
4339
import org.apache.hadoop.classification.InterfaceStability;
@@ -51,6 +47,8 @@
5147
import org.apache.hadoop.util.ServletUtil;
5248
import org.apache.hadoop.util.Tool;
5349
import org.apache.hadoop.util.ToolRunner;
50+
import org.apache.log4j.Level;
51+
import org.apache.log4j.Logger;
5452

5553
/**
5654
* Change log level in runtime.
@@ -340,22 +338,14 @@ public void doGet(HttpServletRequest request, HttpServletResponse response
340338
out.println(MARKER
341339
+ "Submitted Class Name: <b>" + logName + "</b><br />");
342340

343-
Log log = LogFactory.getLog(logName);
341+
Logger log = Logger.getLogger(logName);
344342
out.println(MARKER
345343
+ "Log Class: <b>" + log.getClass().getName() +"</b><br />");
346344
if (level != null) {
347345
out.println(MARKER + "Submitted Level: <b>" + level + "</b><br />");
348346
}
349347

350-
if (log instanceof Log4JLogger) {
351-
process(((Log4JLogger)log).getLogger(), level, out);
352-
}
353-
else if (log instanceof Jdk14Logger) {
354-
process(((Jdk14Logger)log).getLogger(), level, out);
355-
}
356-
else {
357-
out.println("Sorry, " + log.getClass() + " not supported.<br />");
358-
}
348+
process(log, level, out);
359349
}
360350

361351
out.println(FORMS);
@@ -371,36 +361,20 @@ else if (log instanceof Jdk14Logger) {
371361
+ "<input type='submit' value='Set Log Level' />"
372362
+ "</form>";
373363

374-
private static void process(org.apache.log4j.Logger log, String level,
364+
private static void process(Logger log, String level,
375365
PrintWriter out) throws IOException {
376366
if (level != null) {
377-
if (!level.equalsIgnoreCase(org.apache.log4j.Level.toLevel(level)
367+
if (!level.equalsIgnoreCase(Level.toLevel(level)
378368
.toString())) {
379369
out.println(MARKER + "Bad Level : <b>" + level + "</b><br />");
380370
} else {
381-
log.setLevel(org.apache.log4j.Level.toLevel(level));
371+
log.setLevel(Level.toLevel(level));
382372
out.println(MARKER + "Setting Level to " + level + " ...<br />");
383373
}
384374
}
385375
out.println(MARKER
386376
+ "Effective Level: <b>" + log.getEffectiveLevel() + "</b><br />");
387377
}
388378

389-
private static void process(java.util.logging.Logger log, String level,
390-
PrintWriter out) throws IOException {
391-
if (level != null) {
392-
String levelToUpperCase = level.toUpperCase();
393-
try {
394-
log.setLevel(java.util.logging.Level.parse(levelToUpperCase));
395-
} catch (IllegalArgumentException e) {
396-
out.println(MARKER + "Bad Level : <b>" + level + "</b><br />");
397-
}
398-
out.println(MARKER + "Setting Level to " + level + " ...<br />");
399-
}
400-
401-
java.util.logging.Level lev;
402-
for(; (lev = log.getLevel()) == null; log = log.getParent());
403-
out.println(MARKER + "Effective Level: <b>" + lev + "</b><br />");
404-
}
405379
}
406380
}

0 commit comments

Comments
 (0)