Skip to content

Commit 068d8c7

Browse files
authored
HDFS-17115. HttpFS Add Support getErasureCodeCodecs API (#5875). Contributed by Hualong Zhang.
Reviewed-by: Shilun Fan <[email protected]> Signed-off-by: Ayush Saxena <[email protected]>
1 parent ad001c9 commit 068d8c7

File tree

6 files changed

+107
-2
lines changed

6 files changed

+107
-2
lines changed

hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -286,6 +286,7 @@ public enum Operation {
286286
GETFILELINKSTATUS(HTTP_GET),
287287
GETSTATUS(HTTP_GET),
288288
GETECPOLICIES(HTTP_GET),
289+
GETECCODECS(HTTP_GET),
289290
GET_BLOCK_LOCATIONS(HTTP_GET);
290291

291292
private String httpMethod;
@@ -1786,6 +1787,17 @@ public Collection<ErasureCodingPolicyInfo> getAllErasureCodingPolicies() throws
17861787
return JsonUtilClient.getAllErasureCodingPolicies(json);
17871788
}
17881789

1790+
public Map<String, String> getAllErasureCodingCodecs() throws IOException {
1791+
Map<String, String> params = new HashMap<>();
1792+
params.put(OP_PARAM, Operation.GETECCODECS.toString());
1793+
Path path = new Path(getUri().toString(), "/");
1794+
HttpURLConnection conn =
1795+
getConnection(Operation.GETECCODECS.getMethod(), params, path, false);
1796+
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
1797+
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
1798+
return JsonUtilClient.getErasureCodeCodecs(json);
1799+
}
1800+
17891801
@VisibleForTesting
17901802
static BlockLocation[] toBlockLocations(JSONObject json) throws IOException {
17911803
ObjectMapper mapper = new ObjectMapper();

hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@
6565
import java.io.OutputStream;
6666
import java.util.Collection;
6767
import java.util.EnumSet;
68+
import java.util.HashMap;
6869
import java.util.LinkedHashMap;
6970
import java.util.List;
7071
import java.util.Map;
@@ -2369,4 +2370,30 @@ public String execute(FileSystem fs) throws IOException {
23692370
return JsonUtil.toJsonString(ecPolicyInfos.stream().toArray(ErasureCodingPolicyInfo[]::new));
23702371
}
23712372
}
2373+
2374+
/**
2375+
* Executor that performs a FSGetErasureCodingCodecs operation.
2376+
*/
2377+
@InterfaceAudience.Private
2378+
public static class FSGetErasureCodingCodecs
2379+
implements FileSystemAccess.FileSystemExecutor<Map> {
2380+
2381+
public FSGetErasureCodingCodecs() {
2382+
}
2383+
2384+
@Override
2385+
public Map execute(FileSystem fs) throws IOException {
2386+
Map<String, Map<String, String>> ecCodecs = new HashMap<>();
2387+
if (fs instanceof DistributedFileSystem) {
2388+
DistributedFileSystem dfs = (DistributedFileSystem) fs;
2389+
ecCodecs.put("ErasureCodingCodecs", dfs.getAllErasureCodingCodecs());
2390+
} else {
2391+
throw new UnsupportedOperationException("getErasureCodeCodecs is " +
2392+
"not supported for HttpFs on " + fs.getClass() +
2393+
". Please check your fs.defaultFS configuration");
2394+
}
2395+
HttpFSServerWebApp.get().getMetrics().incrOpsECCodecs();
2396+
return ecCodecs;
2397+
}
2398+
}
23722399
}

hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
131131
PARAMS_DEF.put(Operation.GETFILELINKSTATUS, new Class[]{});
132132
PARAMS_DEF.put(Operation.GETSTATUS, new Class[]{});
133133
PARAMS_DEF.put(Operation.GETECPOLICIES, new Class[]{});
134+
PARAMS_DEF.put(Operation.GETECCODECS, new Class[]{});
134135
PARAMS_DEF.put(Operation.GET_BLOCK_LOCATIONS, new Class[] {OffsetParam.class, LenParam.class});
135136
}
136137

hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -534,6 +534,14 @@ public InputStream run() throws Exception {
534534
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
535535
break;
536536
}
537+
case GETECCODECS: {
538+
FSOperations.FSGetErasureCodingCodecs command =
539+
new FSOperations.FSGetErasureCodingCodecs();
540+
Map json = fsExecute(user, command);
541+
AUDIT_LOG.info("[{}]", path);
542+
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
543+
break;
544+
}
537545
case GET_BLOCK_LOCATIONS: {
538546
long offset = 0;
539547
long len = Long.MAX_VALUE;

hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/metrics/HttpFSServerMetrics.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ public class HttpFSServerMetrics {
6666
private @Metric MutableCounterLong opsCheckAccess;
6767
private @Metric MutableCounterLong opsStatus;
6868
private @Metric MutableCounterLong opsAllECPolicies;
69+
private @Metric MutableCounterLong opsECCodecs;
6970

7071
private final MetricsRegistry registry = new MetricsRegistry("httpfsserver");
7172
private final String name;
@@ -170,4 +171,8 @@ public void incrOpsStatus() {
170171
public void incrOpsAllECPolicies() {
171172
opsAllECPolicies.incr();
172173
}
174+
175+
public void incrOpsECCodecs() {
176+
opsECCodecs.incr();
177+
}
173178
}

hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java

Lines changed: 54 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@
105105
import java.util.List;
106106
import java.util.Map;
107107
import java.util.regex.Pattern;
108+
import java.util.concurrent.atomic.AtomicReference;
108109

109110
import static org.junit.Assert.assertArrayEquals;
110111
import static org.junit.Assert.assertEquals;
@@ -1218,9 +1219,9 @@ protected enum Operation {
12181219
FILE_STATUS_ATTR, GET_SNAPSHOT_DIFF, GET_SNAPSHOTTABLE_DIRECTORY_LIST,
12191220
GET_SNAPSHOT_LIST, GET_SERVERDEFAULTS, CHECKACCESS, SETECPOLICY,
12201221
SATISFYSTORAGEPOLICY, GET_SNAPSHOT_DIFF_LISTING, GETFILEBLOCKLOCATIONS,
1221-
GETFILELINKSTATUS, GETSTATUS, GETECPOLICIES
1222+
GETFILELINKSTATUS, GETSTATUS, GETECPOLICIES, GETECCODECS
12221223
}
1223-
1224+
@SuppressWarnings("methodlength")
12241225
private void operation(Operation op) throws Exception {
12251226
switch (op) {
12261227
case GET:
@@ -1370,6 +1371,9 @@ private void operation(Operation op) throws Exception {
13701371
case GETECPOLICIES:
13711372
testGetAllEEPolicies();
13721373
break;
1374+
case GETECCODECS:
1375+
testGetECCodecs();
1376+
break;
13731377
}
13741378
}
13751379

@@ -2149,6 +2153,54 @@ private void testGetAllEEPolicies() throws Exception {
21492153
}
21502154
}
21512155

2156+
private void testGetECCodecs() throws Exception {
2157+
if (isLocalFS()) {
2158+
// do not test the testGetECCodecs for local FS.
2159+
return;
2160+
}
2161+
final Path path = new Path("/foo");
2162+
2163+
FileSystem fs = FileSystem.get(path.toUri(), this.getProxiedFSConf());
2164+
LambdaTestUtils.intercept(AssertionError.class, () -> {
2165+
if (!(fs instanceof DistributedFileSystem)) {
2166+
throw new AssertionError(fs.getClass().getSimpleName() +
2167+
" is not of type DistributedFileSystem.");
2168+
}
2169+
});
2170+
2171+
DistributedFileSystem dfs =
2172+
(DistributedFileSystem) FileSystem.get(path.toUri(), this.getProxiedFSConf());
2173+
FileSystem httpFs = this.getHttpFSFileSystem();
2174+
2175+
Map<String, String> dfsErasureCodingCodecs = dfs.getAllErasureCodingCodecs();
2176+
2177+
final AtomicReference<Map<String, String>> diffErasureCodingCodecsRef =
2178+
new AtomicReference<>();
2179+
LambdaTestUtils.intercept(AssertionError.class, () -> {
2180+
if (httpFs instanceof HttpFSFileSystem) {
2181+
HttpFSFileSystem httpFSFileSystem = (HttpFSFileSystem) httpFs;
2182+
diffErasureCodingCodecsRef.set(httpFSFileSystem.getAllErasureCodingCodecs());
2183+
} else if (httpFs instanceof WebHdfsFileSystem) {
2184+
WebHdfsFileSystem webHdfsFileSystem = (WebHdfsFileSystem) httpFs;
2185+
diffErasureCodingCodecsRef.set(webHdfsFileSystem.getAllErasureCodingCodecs());
2186+
} else {
2187+
throw new AssertionError(httpFs.getClass().getSimpleName() +
2188+
" is not of type HttpFSFileSystem or WebHdfsFileSystem");
2189+
}
2190+
});
2191+
Map<String, String> diffErasureCodingCodecs = diffErasureCodingCodecsRef.get();
2192+
2193+
//Validate testGetECCodecs are the same as DistributedFileSystem
2194+
Assert.assertEquals(dfsErasureCodingCodecs.size(), diffErasureCodingCodecs.size());
2195+
2196+
for (Map.Entry<String, String> entry : dfsErasureCodingCodecs.entrySet()) {
2197+
String key = entry.getKey();
2198+
String value = entry.getValue();
2199+
Assert.assertTrue(diffErasureCodingCodecs.containsKey(key));
2200+
Assert.assertEquals(value, diffErasureCodingCodecs.get(key));
2201+
}
2202+
}
2203+
21522204
private void assertHttpFsReportListingWithDfsClient(SnapshotDiffReportListing diffReportListing,
21532205
SnapshotDiffReportListing dfsDiffReportListing) {
21542206
Assert.assertEquals(diffReportListing.getCreateList().size(),

0 commit comments

Comments
 (0)