Skip to content

Commit 562ccb5

Browse files
authored
Merge branch 'apache:trunk' into YARN-11239
2 parents 2e764a5 + a5f48ea commit 562ccb5

File tree

564 files changed

+20260
-7087
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

564 files changed

+20260
-7087
lines changed

.asf.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
# limitations under the License.
1515

1616
github:
17+
ghp_path: /
18+
ghp_branch: gh-pages
1719
enabled_merge_buttons:
1820
squash: true
1921
merge: false

.github/workflows/website.yml

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one or more
2+
# contributor license agreements. See the NOTICE file distributed with
3+
# this work for additional information regarding copyright ownership.
4+
# The ASF licenses this file to You under the Apache License, Version 2.0
5+
# (the "License"); you may not use this file except in compliance with
6+
# the License. You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
16+
17+
name: website
18+
19+
# Controls when the action will run.
20+
on:
21+
push:
22+
branches: [ trunk ]
23+
24+
jobs:
25+
build:
26+
runs-on: ubuntu-latest
27+
steps:
28+
- name: Checkout Hadoop trunk
29+
uses: actions/checkout@v3
30+
with:
31+
repository: apache/hadoop
32+
- name: Set up JDK 8
33+
uses: actions/setup-java@v3
34+
with:
35+
java-version: '8'
36+
distribution: 'temurin'
37+
- name: Cache local Maven repository
38+
uses: actions/cache@v3
39+
with:
40+
path: ~/.m2/repository
41+
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
42+
restore-keys: |
43+
${{ runner.os }}-maven-
44+
- name: Build Hadoop maven plugins
45+
run: cd hadoop-maven-plugins && mvn --batch-mode install
46+
- name: Build Hadoop
47+
run: mvn clean install -DskipTests -DskipShade
48+
- name: Build document
49+
run: mvn clean site
50+
- name: Stage document
51+
run: mvn site:stage -DstagingDirectory=${GITHUB_WORKSPACE}/staging/
52+
- name: Deploy to GitHub Pages
53+
uses: peaceiris/actions-gh-pages@v3
54+
with:
55+
github_token: ${{ secrets.GITHUB_TOKEN }}
56+
publish_dir: ./staging/hadoop-project
57+
user_name: 'github-actions[bot]'
58+
user_email: 'github-actions[bot]@users.noreply.github.com'
59+

LICENSE-binary

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -250,8 +250,7 @@ commons-codec:commons-codec:1.11
250250
commons-collections:commons-collections:3.2.2
251251
commons-daemon:commons-daemon:1.0.13
252252
commons-io:commons-io:2.8.0
253-
commons-logging:commons-logging:1.1.3
254-
commons-net:commons-net:3.8.0
253+
commons-net:commons-net:3.9.0
255254
de.ruedigermoeller:fst:2.50
256255
io.grpc:grpc-api:1.26.0
257256
io.grpc:grpc-context:1.26.0
@@ -260,7 +259,6 @@ io.grpc:grpc-netty:1.26.0
260259
io.grpc:grpc-protobuf:1.26.0
261260
io.grpc:grpc-protobuf-lite:1.26.0
262261
io.grpc:grpc-stub:1.26.0
263-
io.netty:netty:3.10.6.Final
264262
io.netty:netty-all:4.1.77.Final
265263
io.netty:netty-buffer:4.1.77.Final
266264
io.netty:netty-codec:4.1.77.Final
@@ -324,7 +322,7 @@ org.apache.htrace:htrace-core:3.1.0-incubating
324322
org.apache.htrace:htrace-core4:4.1.0-incubating
325323
org.apache.httpcomponents:httpclient:4.5.6
326324
org.apache.httpcomponents:httpcore:4.4.10
327-
org.apache.kafka:kafka-clients:2.8.1
325+
org.apache.kafka:kafka-clients:2.8.2
328326
org.apache.kerby:kerb-admin:2.0.2
329327
org.apache.kerby:kerb-client:2.0.2
330328
org.apache.kerby:kerb-common:2.0.2
@@ -343,7 +341,7 @@ org.apache.kerby:token-provider:2.0.2
343341
org.apache.solr:solr-solrj:8.8.2
344342
org.apache.yetus:audience-annotations:0.5.0
345343
org.apache.zookeeper:zookeeper:3.6.3
346-
org.codehaus.jettison:jettison:1.5.1
344+
org.codehaus.jettison:jettison:1.5.3
347345
org.eclipse.jetty:jetty-annotations:9.4.48.v20220622
348346
org.eclipse.jetty:jetty-http:9.4.48.v20220622
349347
org.eclipse.jetty:jetty-io:9.4.48.v20220622
@@ -363,7 +361,7 @@ org.lz4:lz4-java:1.7.1
363361
org.objenesis:objenesis:2.6
364362
org.xerial.snappy:snappy-java:1.0.5
365363
org.yaml:snakeyaml:1.33
366-
org.wildfly.openssl:wildfly-openssl:1.0.7.Final
364+
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
367365

368366

369367
--------------------------------------------------------------------------------

hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java

Lines changed: 60 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,10 @@
1818

1919
package org.apache.hadoop.util;
2020

21+
import java.security.AccessController;
22+
import java.security.PrivilegedAction;
23+
import java.util.Arrays;
24+
2125
import org.apache.hadoop.classification.InterfaceAudience;
2226
import org.apache.hadoop.classification.InterfaceStability;
2327

@@ -33,21 +37,71 @@ public class PlatformName {
3337
* per the java-vm.
3438
*/
3539
public static final String PLATFORM_NAME =
36-
(System.getProperty("os.name").startsWith("Windows")
37-
? System.getenv("os") : System.getProperty("os.name"))
38-
+ "-" + System.getProperty("os.arch")
39-
+ "-" + System.getProperty("sun.arch.data.model");
40+
(System.getProperty("os.name").startsWith("Windows") ?
41+
System.getenv("os") : System.getProperty("os.name"))
42+
+ "-" + System.getProperty("os.arch") + "-"
43+
+ System.getProperty("sun.arch.data.model");
4044

4145
/**
4246
* The java vendor name used in this platform.
4347
*/
4448
public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
4549

50+
/**
51+
* Define a system class accessor that is open to changes in underlying implementations
52+
* of the system class loader modules.
53+
*/
54+
private static final class SystemClassAccessor extends ClassLoader {
55+
public Class<?> getSystemClass(String className) throws ClassNotFoundException {
56+
return findSystemClass(className);
57+
}
58+
}
59+
4660
/**
4761
* A public static variable to indicate the current java vendor is
48-
* IBM java or not.
62+
* IBM and the type is Java Technology Edition which provides its
63+
* own implementations of many security packages and Cipher suites.
64+
* Note that these are not provided in Semeru runtimes:
65+
* See https://developer.ibm.com/languages/java/semeru-runtimes for details.
4966
*/
50-
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
67+
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM") &&
68+
hasIbmTechnologyEditionModules();
69+
70+
private static boolean hasIbmTechnologyEditionModules() {
71+
return Arrays.asList(
72+
"com.ibm.security.auth.module.JAASLoginModule",
73+
"com.ibm.security.auth.module.Win64LoginModule",
74+
"com.ibm.security.auth.module.NTLoginModule",
75+
"com.ibm.security.auth.module.AIX64LoginModule",
76+
"com.ibm.security.auth.module.LinuxLoginModule",
77+
"com.ibm.security.auth.module.Krb5LoginModule"
78+
).stream().anyMatch((module) -> isSystemClassAvailable(module));
79+
}
80+
81+
/**
82+
* In rare cases where different behaviour is performed based on the JVM vendor
83+
* this method should be used to test for a unique JVM class provided by the
84+
* vendor rather than using the vendor method. For example if on JVM provides a
85+
* different Kerberos login module testing for that login module being loadable
86+
* before configuring to use it is preferable to using the vendor data.
87+
*
88+
* @param className the name of a class in the JVM to test for
89+
* @return true if the class is available, false otherwise.
90+
*/
91+
private static boolean isSystemClassAvailable(String className) {
92+
return AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> {
93+
try {
94+
// Using ClassLoader.findSystemClass() instead of
95+
// Class.forName(className, false, null) because Class.forName with a null
96+
// ClassLoader only looks at the boot ClassLoader with Java 9 and above
97+
// which doesn't look at all the modules available to the findSystemClass.
98+
new SystemClassAccessor().getSystemClass(className);
99+
return true;
100+
} catch (Exception ignored) {
101+
return false;
102+
}
103+
});
104+
}
51105

52106
public static void main(String[] args) {
53107
System.out.println(PLATFORM_NAME);

hadoop-common-project/hadoop-auth/src/site/markdown/Configuration.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ This filter must be configured in front of all the web application resources tha
2424

2525
The Hadoop Auth and dependent JAR files must be in the web application classpath (commonly the `WEB-INF/lib` directory).
2626

27-
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part part of the web application classpath as well as the Log4j configuration file.
27+
Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define the SLF4J API dependency but it does not define the dependency on a concrete logging implementation, this must be addded explicitly to the web application. For example, if the web applicationan uses Log4j, the SLF4J-LOG4J12 and LOG4J jar files must be part of the web application classpath as well as the Log4j configuration file.
2828

2929
### Common Configuration parameters
3030

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -180,11 +180,6 @@
180180
<artifactId>jersey-server</artifactId>
181181
<scope>compile</scope>
182182
</dependency>
183-
<dependency>
184-
<groupId>commons-logging</groupId>
185-
<artifactId>commons-logging</artifactId>
186-
<scope>compile</scope>
187-
</dependency>
188183
<dependency>
189184
<groupId>log4j</groupId>
190185
<artifactId>log4j</artifactId>
@@ -200,11 +195,6 @@
200195
<artifactId>assertj-core</artifactId>
201196
<scope>test</scope>
202197
</dependency>
203-
<dependency>
204-
<groupId>org.glassfish.grizzly</groupId>
205-
<artifactId>grizzly-http-servlet</artifactId>
206-
<scope>test</scope>
207-
</dependency>
208198
<dependency>
209199
<groupId>commons-beanutils</groupId>
210200
<artifactId>commons-beanutils</artifactId>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -241,12 +241,15 @@ public synchronized void close() throws IOException {
241241
return;
242242
}
243243
try {
244-
flush();
245-
if (closeOutputStream) {
246-
super.close();
247-
codec.close();
244+
try {
245+
flush();
246+
} finally {
247+
if (closeOutputStream) {
248+
super.close();
249+
codec.close();
250+
}
251+
freeBuffers();
248252
}
249-
freeBuffers();
250253
} finally {
251254
closed = true;
252255
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,6 @@ public AvroFSInput(final FileContext fc, final Path p) throws IOException {
6060
FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL)
6161
.withFileStatus(status)
6262
.build());
63-
fc.open(p);
6463
}
6564

6665
@Override

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -402,7 +402,8 @@ public void setSymlink(final Path p) {
402402
}
403403

404404
/**
405-
* Compare this FileStatus to another FileStatus
405+
* Compare this FileStatus to another FileStatus based on lexicographical
406+
* order of path.
406407
* @param o the FileStatus to be compared.
407408
* @return a negative integer, zero, or a positive integer as this object
408409
* is less than, equal to, or greater than the specified object.
@@ -412,7 +413,8 @@ public int compareTo(FileStatus o) {
412413
}
413414

414415
/**
415-
* Compare this FileStatus to another FileStatus.
416+
* Compare this FileStatus to another FileStatus based on lexicographical
417+
* order of path.
416418
* This method was added back by HADOOP-14683 to keep binary compatibility.
417419
*
418420
* @param o the FileStatus to be compared.

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ public Trash(FileSystem fs, Configuration conf) throws IOException {
6969
* Hence we get the file system of the fully-qualified resolved-path and
7070
* then move the path p to the trashbin in that volume,
7171
* @param fs - the filesystem of path p
72-
* @param p - the path being deleted - to be moved to trasg
72+
* @param p - the path being deleted - to be moved to trash
7373
* @param conf - configuration
7474
* @return false if the item is already in the trash or trash is disabled
7575
* @throws IOException on error

0 commit comments

Comments
 (0)