Skip to content

Commit a703dae

Browse files
author
Sean Mackrory
committed
HADOOP-16222. Fix new deprecations after guava 27.0 update in trunk. Contributed by Gabor Bota.
1 parent e1c5ddf commit a703dae

File tree

11 files changed

+29
-23
lines changed

11 files changed

+29
-23
lines changed

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ public ArrayWritable(Class<? extends Writable> valueClass, Writable[] values) {
5858
}
5959

6060
public ArrayWritable(String[] strings) {
61-
this(UTF8.class, new Writable[strings.length]);
61+
this(Text.class, new Writable[strings.length]);
6262
for (int i = 0; i < strings.length; i++) {
6363
values[i] = new UTF8(strings[i]);
6464
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SemaphoredDelegatingExecutor.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ public <T> ListenableFuture<T> submit(Callable<T> task) {
107107
queueingPermits.acquire();
108108
} catch (InterruptedException e) {
109109
Thread.currentThread().interrupt();
110-
return Futures.immediateFailedCheckedFuture(e);
110+
return Futures.immediateFailedFuture(e);
111111
}
112112
return super.submit(new CallableWithPermitRelease<>(task));
113113
}
@@ -118,7 +118,7 @@ public <T> ListenableFuture<T> submit(Runnable task, T result) {
118118
queueingPermits.acquire();
119119
} catch (InterruptedException e) {
120120
Thread.currentThread().interrupt();
121-
return Futures.immediateFailedCheckedFuture(e);
121+
return Futures.immediateFailedFuture(e);
122122
}
123123
return super.submit(new RunnableWithPermitRelease(task), result);
124124
}
@@ -129,7 +129,7 @@ public ListenableFuture<?> submit(Runnable task) {
129129
queueingPermits.acquire();
130130
} catch (InterruptedException e) {
131131
Thread.currentThread().interrupt();
132-
return Futures.immediateFailedCheckedFuture(e);
132+
return Futures.immediateFailedFuture(e);
133133
}
134134
return super.submit(new RunnableWithPermitRelease(task));
135135
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ public static String resolveConfIndirection(String valInConf)
172172
return valInConf;
173173
}
174174
String path = valInConf.substring(1).trim();
175-
return Files.toString(new File(path), Charsets.UTF_8).trim();
175+
return Files.asCharSource(new File(path), Charsets.UTF_8).read().trim();
176176
}
177177

178178
/**

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ public void testArrayWritableStringConstructor() {
106106
String[] original = { "test1", "test2", "test3" };
107107
ArrayWritable arrayWritable = new ArrayWritable(original);
108108
assertEquals("testArrayWritableStringConstructor class error!!!",
109-
UTF8.class, arrayWritable.getValueClass());
109+
Text.class, arrayWritable.getValueClass());
110110
assertArrayEquals("testArrayWritableStringConstructor toString error!!!",
111111
original, arrayWritable.toStrings());
112112
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestTableMapping.java

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,8 @@ public class TestTableMapping {
4141
public void testResolve() throws IOException {
4242
File mapFile = File.createTempFile(getClass().getSimpleName() +
4343
".testResolve", ".txt");
44-
Files.write(hostName1 + " /rack1\n" +
45-
hostName2 + "\t/rack2\n", mapFile, Charsets.UTF_8);
44+
Files.asCharSink(mapFile, Charsets.UTF_8).write(
45+
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
4646
mapFile.deleteOnExit();
4747
TableMapping mapping = new TableMapping();
4848

@@ -64,8 +64,8 @@ public void testResolve() throws IOException {
6464
public void testTableCaching() throws IOException {
6565
File mapFile = File.createTempFile(getClass().getSimpleName() +
6666
".testTableCaching", ".txt");
67-
Files.write(hostName1 + " /rack1\n" +
68-
hostName2 + "\t/rack2\n", mapFile, Charsets.UTF_8);
67+
Files.asCharSink(mapFile, Charsets.UTF_8).write(
68+
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
6969
mapFile.deleteOnExit();
7070
TableMapping mapping = new TableMapping();
7171

@@ -128,8 +128,8 @@ public void testFileDoesNotExist() {
128128
public void testClearingCachedMappings() throws IOException {
129129
File mapFile = File.createTempFile(getClass().getSimpleName() +
130130
".testClearingCachedMappings", ".txt");
131-
Files.write(hostName1 + " /rack1\n" +
132-
hostName2 + "\t/rack2\n", mapFile, Charsets.UTF_8);
131+
Files.asCharSink(mapFile, Charsets.UTF_8).write(
132+
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
133133
mapFile.deleteOnExit();
134134

135135
TableMapping mapping = new TableMapping();
@@ -147,7 +147,7 @@ public void testClearingCachedMappings() throws IOException {
147147
assertEquals("/rack1", result.get(0));
148148
assertEquals("/rack2", result.get(1));
149149

150-
Files.write("", mapFile, Charsets.UTF_8);
150+
Files.asCharSink(mapFile, Charsets.UTF_8).write("");
151151

152152
mapping.reloadCachedMappings();
153153

@@ -166,7 +166,7 @@ public void testClearingCachedMappings() throws IOException {
166166
public void testBadFile() throws IOException {
167167
File mapFile = File.createTempFile(getClass().getSimpleName() +
168168
".testBadFile", ".txt");
169-
Files.write("bad contents", mapFile, Charsets.UTF_8);
169+
Files.asCharSink(mapFile, Charsets.UTF_8).write("bad contents");
170170
mapFile.deleteOnExit();
171171
TableMapping mapping = new TableMapping();
172172

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -434,7 +434,8 @@ public void testAuthPlainTextFile() throws Exception {
434434
Configuration conf = new Configuration();
435435
File passwordTxtFile = File.createTempFile(
436436
getClass().getSimpleName() + ".testAuthAtPathNotation-", ".txt");
437-
Files.write(ZK_AUTH_VALUE, passwordTxtFile, StandardCharsets.UTF_8);
437+
Files.asCharSink(passwordTxtFile, StandardCharsets.UTF_8)
438+
.write(ZK_AUTH_VALUE);
438439
try {
439440
conf.set(CommonConfigurationKeys.ZK_AUTH,
440441
"@" + passwordTxtFile.getAbsolutePath());

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ public void testConfIndirection() throws IOException {
131131
assertEquals("x", ZKUtil.resolveConfIndirection("x"));
132132

133133
TEST_FILE.getParentFile().mkdirs();
134-
Files.write("hello world", TEST_FILE, Charsets.UTF_8);
134+
Files.asCharSink(TEST_FILE, Charsets.UTF_8).write("hello world");
135135
assertEquals("hello world", ZKUtil.resolveConfIndirection(
136136
"@" + TEST_FILE.getAbsolutePath()));
137137

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -208,16 +208,16 @@ public void testFencer() throws Exception {
208208
assertEquals(0, runTool("-ns", "minidfs-ns", "-failover", "nn2", "nn1"));
209209

210210
// Fencer has not run yet, since none of the above required fencing
211-
assertEquals("", Files.toString(tmpFile, Charsets.UTF_8));
211+
assertEquals("", Files.asCharSource(tmpFile, Charsets.UTF_8).read());
212212

213213
// Test failover with fencer and forcefence option
214214
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
215215

216216
// The fence script should run with the configuration from the target
217217
// node, rather than the configuration from the fencing node. Strip
218218
// out any trailing spaces and CR/LFs which may be present on Windows.
219-
String fenceCommandOutput =Files.toString(tmpFile, Charsets.UTF_8).
220-
replaceAll(" *[\r\n]+", "");
219+
String fenceCommandOutput = Files.asCharSource(tmpFile, Charsets.UTF_8)
220+
.read().replaceAll(" *[\r\n]+", "");
221221
assertEquals("minidfs-ns.nn1 " + nn1Port + " nn1", fenceCommandOutput);
222222
tmpFile.delete();
223223

hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -410,7 +410,8 @@ protected void starting(Description description) {
410410
fs = new SliderFileSystem(conf);
411411
fs.setAppDir(new Path(serviceBasePath.toString()));
412412
} catch (IOException e) {
413-
Throwables.propagate(e);
413+
Throwables.throwIfUnchecked(e);
414+
throw new RuntimeException(e);
414415
}
415416
}
416417

hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/fpga/TestFpgaResourceHandler.java

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,7 @@
7474
import org.junit.rules.ExpectedException;
7575

7676
import com.google.common.io.Files;
77+
import com.google.common.io.FileWriteMode;
7778

7879
public class TestFpgaResourceHandler {
7980
@Rule
@@ -133,7 +134,8 @@ public void setup() throws IOException, YarnException {
133134
dummyAocx = new File(aocxPath);
134135
Files.createParentDirs(dummyAocx);
135136
Files.touch(dummyAocx);
136-
Files.append(HASHABLE_STRING, dummyAocx, StandardCharsets.UTF_8);
137+
Files.asCharSink(dummyAocx, StandardCharsets.UTF_8, FileWriteMode.APPEND)
138+
.write(HASHABLE_STRING);
137139
}
138140

139141
@After
@@ -358,7 +360,8 @@ public void testsAllocationWithExistingIPIDDevices()
358360

359361
// Case 2. id-2 container request preStart, with 1 plugin.configureIP called
360362
// Add some characters to the dummy file to have its hash changed
361-
Files.append("12345", dummyAocx, StandardCharsets.UTF_8);
363+
Files.asCharSink(dummyAocx, StandardCharsets.UTF_8, FileWriteMode.APPEND)
364+
.write("12345");
362365
fpgaResourceHandler.preStart(mockContainer(1, 1, "GZIP"));
363366
// we should have 4 times invocation
364367
verify(mockVendorPlugin, times(4)).configureIP(anyString(),

0 commit comments

Comments
 (0)