Skip to content

Commit 2719bd2

Browse files
committed
Merge branch 'trunk' of github.com:susheel-gupta/hadoop into YARN-11079
Aim: to trigger untriggered jenkins Change-Id: Iea1ee17e02efd66c30b97a7e7d025a6eb6ae25d0
2 parents 0ae1f4d + 4067fac commit 2719bd2

File tree

381 files changed

+11759
-5845
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

381 files changed

+11759
-5845
lines changed

.asf.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
# limitations under the License.
1515

1616
github:
17+
ghp_path: /
18+
ghp_branch: gh-pages
1719
enabled_merge_buttons:
1820
squash: true
1921
merge: false

.github/workflows/website.yml

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one or more
2+
# contributor license agreements. See the NOTICE file distributed with
3+
# this work for additional information regarding copyright ownership.
4+
# The ASF licenses this file to You under the Apache License, Version 2.0
5+
# (the "License"); you may not use this file except in compliance with
6+
# the License. You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
16+
17+
name: website
18+
19+
# Controls when the action will run.
20+
on:
21+
push:
22+
branches: [ trunk ]
23+
24+
jobs:
25+
build:
26+
runs-on: ubuntu-latest
27+
steps:
28+
- name: Checkout Hadoop trunk
29+
uses: actions/checkout@v3
30+
with:
31+
repository: apache/hadoop
32+
- name: Set up JDK 8
33+
uses: actions/setup-java@v3
34+
with:
35+
java-version: '8'
36+
distribution: 'temurin'
37+
- name: Cache local Maven repository
38+
uses: actions/cache@v3
39+
with:
40+
path: ~/.m2/repository
41+
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
42+
restore-keys: |
43+
${{ runner.os }}-maven-
44+
- name: Build Hadoop maven plugins
45+
run: cd hadoop-maven-plugins && mvn --batch-mode install
46+
- name: Build Hadoop
47+
run: mvn clean install -DskipTests -DskipShade
48+
- name: Build document
49+
run: mvn clean site
50+
- name: Stage document
51+
run: mvn site:stage -DstagingDirectory=${GITHUB_WORKSPACE}/staging/
52+
- name: Deploy to GitHub Pages
53+
uses: peaceiris/actions-gh-pages@v3
54+
with:
55+
github_token: ${{ secrets.GITHUB_TOKEN }}
56+
publish_dir: ./staging/hadoop-project
57+
user_name: 'github-actions[bot]'
58+
user_email: 'github-actions[bot]@users.noreply.github.com'
59+

LICENSE-binary

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,6 @@ commons-codec:commons-codec:1.11
250250
commons-collections:commons-collections:3.2.2
251251
commons-daemon:commons-daemon:1.0.13
252252
commons-io:commons-io:2.8.0
253-
commons-logging:commons-logging:1.1.3
254253
commons-net:commons-net:3.9.0
255254
de.ruedigermoeller:fst:2.50
256255
io.grpc:grpc-api:1.26.0

hadoop-common-project/hadoop-common/pom.xml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -180,11 +180,6 @@
180180
<artifactId>jersey-server</artifactId>
181181
<scope>compile</scope>
182182
</dependency>
183-
<dependency>
184-
<groupId>commons-logging</groupId>
185-
<artifactId>commons-logging</artifactId>
186-
<scope>compile</scope>
187-
</dependency>
188183
<dependency>
189184
<groupId>log4j</groupId>
190185
<artifactId>log4j</artifactId>
@@ -200,11 +195,6 @@
200195
<artifactId>assertj-core</artifactId>
201196
<scope>test</scope>
202197
</dependency>
203-
<dependency>
204-
<groupId>org.glassfish.grizzly</groupId>
205-
<artifactId>grizzly-http-servlet</artifactId>
206-
<scope>test</scope>
207-
</dependency>
208198
<dependency>
209199
<groupId>commons-beanutils</groupId>
210200
<artifactId>commons-beanutils</artifactId>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -241,12 +241,15 @@ public synchronized void close() throws IOException {
241241
return;
242242
}
243243
try {
244-
flush();
245-
if (closeOutputStream) {
246-
super.close();
247-
codec.close();
244+
try {
245+
flush();
246+
} finally {
247+
if (closeOutputStream) {
248+
super.close();
249+
codec.close();
250+
}
251+
freeBuffers();
248252
}
249-
freeBuffers();
250253
} finally {
251254
closed = true;
252255
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -414,7 +414,14 @@ public Path getLocalPathForWrite(String pathStr, long size,
414414

415415
//build the "roulette wheel"
416416
for(int i =0; i < ctx.dirDF.length; ++i) {
417-
availableOnDisk[i] = ctx.dirDF[i].getAvailable();
417+
final DF target = ctx.dirDF[i];
418+
// attempt to recreate the dir so that getAvailable() is valid
419+
// if it fails, getAvailable() will return 0, so the dir will
420+
// be declared unavailable.
421+
// return value is logged at debug to keep spotbugs quiet.
422+
final boolean b = new File(target.getDirPath()).mkdirs();
423+
LOG.debug("mkdirs of {}={}", target, b);
424+
availableOnDisk[i] = target.getAvailable();
418425
totalAvailable += availableOnDisk[i];
419426
}
420427

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/prefetch/CachingBlockManager.java

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,12 @@ public void cancelPrefetches() {
302302

303303
private void read(BufferData data) throws IOException {
304304
synchronized (data) {
305-
readBlock(data, false, BufferData.State.BLANK);
305+
try {
306+
readBlock(data, false, BufferData.State.BLANK);
307+
} catch (IOException e) {
308+
LOG.error("error reading block {}", data.getBlockNumber(), e);
309+
throw e;
310+
}
306311
}
307312
}
308313

@@ -362,9 +367,6 @@ private void readBlock(BufferData data, boolean isPrefetch, BufferData.State...
362367
buffer.flip();
363368
data.setReady(expectedState);
364369
} catch (Exception e) {
365-
String message = String.format("error during readBlock(%s)", data.getBlockNumber());
366-
LOG.error(message, e);
367-
368370
if (isPrefetch && tracker != null) {
369371
tracker.failed();
370372
}
@@ -406,7 +408,8 @@ public Void get() {
406408
try {
407409
blockManager.prefetch(data, taskQueuedStartTime);
408410
} catch (Exception e) {
409-
LOG.error("error during prefetch", e);
411+
LOG.info("error prefetching block {}. {}", data.getBlockNumber(), e.getMessage());
412+
LOG.debug("error prefetching block {}", data.getBlockNumber(), e);
410413
}
411414
return null;
412415
}
@@ -493,7 +496,8 @@ private void addToCacheAndRelease(BufferData data, Future<Void> blockFuture,
493496
return;
494497
}
495498
} catch (Exception e) {
496-
LOG.error("error waiting on blockFuture: {}", data, e);
499+
LOG.info("error waiting on blockFuture: {}. {}", data, e.getMessage());
500+
LOG.debug("error waiting on blockFuture: {}", data, e);
497501
data.setDone();
498502
return;
499503
}
@@ -523,8 +527,8 @@ private void addToCacheAndRelease(BufferData data, Future<Void> blockFuture,
523527
data.setDone();
524528
} catch (Exception e) {
525529
numCachingErrors.incrementAndGet();
526-
String message = String.format("error adding block to cache after wait: %s", data);
527-
LOG.error(message, e);
530+
LOG.info("error adding block to cache after wait: {}. {}", data, e.getMessage());
531+
LOG.debug("error adding block to cache after wait: {}", data, e);
528532
data.setDone();
529533
}
530534

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/package-info.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,11 @@
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
1717
*/
18+
19+
/**
20+
* Filesystem implementations that allow Hadoop to read directly from
21+
* the local file system.
22+
*/
1823
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
1924
@InterfaceStability.Unstable
2025
package org.apache.hadoop.fs.local;

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/package-info.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,10 @@
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
1717
*/
18+
19+
/**
20+
* Support for the execution of a file system command.
21+
*/
1822
@InterfaceAudience.Private
1923
@InterfaceStability.Unstable
2024
package org.apache.hadoop.fs.shell;

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/package-info.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,10 @@
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
1717
*/
18+
19+
/**
20+
* Support for embedded HTTP services.
21+
*/
1822
@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
1923
@InterfaceStability.Unstable
2024
package org.apache.hadoop.http;

0 commit comments

Comments
 (0)