Skip to content

Commit 5d112c6

Browse files
committed
fix checkstyle and SuppressFBWarnings
1 parent dab37c9 commit 5d112c6

File tree

3 files changed

+5
-3
lines changed

3 files changed

+5
-3
lines changed

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java

+3
Original file line numberDiff line numberDiff line change
@@ -264,6 +264,8 @@
264264
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
265265
import org.apache.hadoop.util.concurrent.HadoopExecutors;
266266
import org.apache.hadoop.tracing.Tracer;
267+
268+
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
267269
import org.eclipse.jetty.util.ajax.JSON;
268270

269271
import org.apache.hadoop.classification.VisibleForTesting;
@@ -2835,6 +2837,7 @@ private void reportBadBlock(final BPOfferService bpos,
28352837
LOG.warn(msg);
28362838
}
28372839

2840+
@SuppressFBWarnings()
28382841
@VisibleForTesting
28392842
void transferBlock(ExtendedBlock block, DatanodeInfo[] xferTargets,
28402843
StorageType[] xferTargetStorageTypes, String[] xferTargetStorageIDs)

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ public int compare(File f1, File f2) {
139139
private volatile GetSpaceUsed dfsUsage;
140140

141141
/**
142-
* Create a blook pool slice
142+
* Create a blook pool slice.
143143
* @param bpid Block pool Id
144144
* @param volume {@link FsVolumeImpl} to which this BlockPool belongs to
145145
* @param bpDir directory corresponding to the BlockPool

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java

+1-2
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,6 @@
6868
import org.apache.hadoop.hdfs.server.datanode.FileIoProvider;
6969
import org.apache.hadoop.hdfs.server.datanode.FinalizedReplica;
7070
import org.apache.hadoop.hdfs.server.datanode.LocalReplica;
71-
import org.apache.hadoop.hdfs.server.datanode.LocalReplicaInPipeline;
7271
import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics;
7372
import org.apache.hadoop.util.AutoCloseableLock;
7473
import org.apache.hadoop.hdfs.protocol.Block;
@@ -3875,7 +3874,7 @@ public void hardLinkOneBlock(ExtendedBlock srcBlock, ExtendedBlock dstBlock) thr
38753874
DatanodeUtil.getMetaName(dstBlock.getBlockName(), dstBlock.getGenerationStamp()));
38763875
File dstBlockFile = new File(dstTmpDir, dstBlock.getBlockName());
38773876

3878-
File files[] = hardLinkBlockFiles(srcReplicaInfo, dstMeta, dstBlockFile);
3877+
File[] files = hardLinkBlockFiles(srcReplicaInfo, dstMeta, dstBlockFile);
38793878

38803879
ReplicaInfo dstReplicaInfo = new ReplicaBuilder(ReplicaState.TEMPORARY)
38813880
.setBlockId(dstBlock.getBlockId())

0 commit comments

Comments
 (0)