From 18eaf34734e7efad160b13cd224a9e97cf50eb11 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Tue, 19 Sep 2023 12:38:36 +0100 Subject: [PATCH 1/4] HADOOP-18890. Remove use of okhttp in runtime code (#6057) Contributed by PJ Fanning --- LICENSE-binary | 4 - NOTICE-binary | 13 ---- hadoop-client-modules/hadoop-client/pom.xml | 12 --- hadoop-common-project/hadoop-common/pom.xml | 4 +- .../dev-support/findbugsExcludeFile.xml | 13 ---- .../hadoop-hdfs-client/pom.xml | 22 ------ ...fRefreshTokenBasedAccessTokenProvider.java | 76 ++++++++++-------- .../CredentialBasedAccessTokenProvider.java | 78 ++++++++++--------- .../hdfs/web/oauth2/OAuth2Constants.java | 5 +- .../hadoop-hdfs-httpfs/pom.xml | 10 +++ hadoop-project/pom.xml | 54 ++----------- 11 files changed, 106 insertions(+), 185 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index f2804d3c9ed3f..b5b212efcf526 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -243,8 +243,6 @@ com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava com.google.j2objc:j2objc-annotations:1.3 com.microsoft.azure:azure-storage:7.0.1 com.nimbusds:nimbus-jose-jwt:9.8.1 -com.squareup.okhttp3:okhttp:4.10.0 -com.squareup.okio:okio:3.4.0 com.yammer.metrics:metrics-core:2.2.0 com.zaxxer:HikariCP-java7:2.4.12 commons-beanutils:commons-beanutils:1.9.4 @@ -361,8 +359,6 @@ org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.51.v20230217 org.apache.zookeeper:zookeeper:3.6.3 org.ehcache:ehcache:3.3.1 org.ini4j:ini4j:0.5.4 -org.jetbrains.kotlin:kotlin-stdlib:1.4.10 -org.jetbrains.kotlin:kotlin-stdlib-common:1.4.10 org.lz4:lz4-java:1.7.1 org.objenesis:objenesis:2.6 org.xerial.snappy:snappy-java:1.1.10.1 diff --git a/NOTICE-binary b/NOTICE-binary index 2189de34e37ed..1969f71ef674e 100644 --- a/NOTICE-binary +++ b/NOTICE-binary @@ -334,19 +334,6 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------------ - -This product contains a modified portion of 'OkHttp', an open source -HTTP & SPDY client for Android and Java applications, which can be obtained -at: - - * LICENSE: - * okhttp/third_party/okhttp/LICENSE (Apache License 2.0) - * HOMEPAGE: - * https://github.com/square/okhttp - * LOCATION_IN_GRPC: - * okhttp/third_party/okhttp - This product contains a modified portion of 'Netty', an open source networking library, which can be obtained at: diff --git a/hadoop-client-modules/hadoop-client/pom.xml b/hadoop-client-modules/hadoop-client/pom.xml index 05326d493a106..cb78442e15f6a 100644 --- a/hadoop-client-modules/hadoop-client/pom.xml +++ b/hadoop-client-modules/hadoop-client/pom.xml @@ -114,18 +114,6 @@ org.eclipse.jetty jetty-server - - org.jetbrains.kotlin - kotlin-stdlib - - - org.jetbrains.kotlin - kotlin-stdlib-common - - - com.squareup.okhttp3 - okhttp - com.sun.jersey jersey-core diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 54f56dae2562a..c266357864a36 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -374,8 +374,8 @@ test - com.squareup.okio - okio-jvm + org.jetbrains.kotlin + kotlin-stdlib-jdk8 test diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml index 1cefa55baa1ea..278d01dc22d0f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml @@ -93,17 +93,4 @@ - - - - - - - - - - - - - diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml index e2b1a212b637c..b2740ce495efd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml @@ -34,28 +34,6 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> - - com.squareup.okhttp3 - okhttp - - - com.squareup.okio - okio-jvm - - - - - com.squareup.okio - okio-jvm - - - org.jetbrains.kotlin - kotlin-stdlib - - - org.jetbrains.kotlin - kotlin-stdlib-common - org.apache.hadoop hadoop-common diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java index e944e8c1c8d77..7b82cad215dde 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java @@ -19,13 +19,10 @@ package org.apache.hadoop.hdfs.web.oauth2; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; import java.util.Map; -import java.util.concurrent.TimeUnit; - -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.RequestBody; -import okhttp3.Response; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -33,7 +30,17 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.util.JsonSerialization; import org.apache.hadoop.util.Timer; +import org.apache.http.HttpHeaders; import org.apache.http.HttpStatus; +import org.apache.http.NameValuePair; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.apache.http.util.EntityUtils; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY; @@ -103,34 +110,37 @@ public synchronized String getAccessToken() throws IOException { } void refresh() throws IOException { - OkHttpClient client = - new OkHttpClient.Builder().connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, - TimeUnit.MILLISECONDS) - .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS) - .build(); - - String bodyString = - Utils.postBody(GRANT_TYPE, REFRESH_TOKEN, REFRESH_TOKEN, refreshToken, CLIENT_ID, clientId); - - RequestBody body = RequestBody.create(bodyString, URLENCODED); - - Request request = new Request.Builder().url(refreshURL).post(body).build(); - try (Response response = client.newCall(request).execute()) { - if (!response.isSuccessful()) { - throw new IOException("Unexpected code " + response); - } - if (response.code() != HttpStatus.SC_OK) { - throw new IllegalArgumentException( - "Received invalid http response: " + response.code() + ", text = " - + response.toString()); + final List pairs = new ArrayList<>(); + pairs.add(new BasicNameValuePair(GRANT_TYPE, REFRESH_TOKEN)); + pairs.add(new BasicNameValuePair(REFRESH_TOKEN, refreshToken)); + pairs.add(new BasicNameValuePair(CLIENT_ID, clientId)); + final RequestConfig config = RequestConfig.custom() + .setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT) + .setConnectionRequestTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT) + .setSocketTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT) + .build(); + try (CloseableHttpClient client = + HttpClientBuilder.create().setDefaultRequestConfig(config).build()) { + final HttpPost httpPost = new HttpPost(refreshURL); + httpPost.setEntity(new UrlEncodedFormEntity(pairs, StandardCharsets.UTF_8)); + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, URLENCODED); + try (CloseableHttpResponse response = client.execute(httpPost)) { + final int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode != HttpStatus.SC_OK) { + throw new IllegalArgumentException( + "Received invalid http response: " + statusCode + ", text = " + + EntityUtils.toString(response.getEntity())); + } + Map responseBody = JsonSerialization.mapReader().readValue( + EntityUtils.toString(response.getEntity())); + + String newExpiresIn = responseBody.get(EXPIRES_IN).toString(); + accessTokenTimer.setExpiresIn(newExpiresIn); + + accessToken = responseBody.get(ACCESS_TOKEN).toString(); } - - Map responseBody = JsonSerialization.mapReader().readValue(response.body().string()); - - String newExpiresIn = responseBody.get(EXPIRES_IN).toString(); - accessTokenTimer.setExpiresIn(newExpiresIn); - - accessToken = responseBody.get(ACCESS_TOKEN).toString(); + } catch (RuntimeException e) { + throw new IOException("Exception while refreshing access token", e); } catch (Exception e) { throw new IOException("Exception while refreshing access token", e); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java index 25ceb8846092b..1803e997adc64 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java @@ -19,13 +19,10 @@ package org.apache.hadoop.hdfs.web.oauth2; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; import java.util.Map; -import java.util.concurrent.TimeUnit; - -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.RequestBody; -import okhttp3.Response; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -33,7 +30,17 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.util.JsonSerialization; import org.apache.hadoop.util.Timer; +import org.apache.http.HttpHeaders; import org.apache.http.HttpStatus; +import org.apache.http.NameValuePair; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.apache.http.util.EntityUtils; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY; @@ -97,38 +104,37 @@ public synchronized String getAccessToken() throws IOException { } void refresh() throws IOException { - OkHttpClient client = new OkHttpClient.Builder() - .connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS) - .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS) - .build(); - - String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(), - GRANT_TYPE, CLIENT_CREDENTIALS, - CLIENT_ID, clientId); - - RequestBody body = RequestBody.create(bodyString, URLENCODED); - - Request request = new Request.Builder() - .url(refreshURL) - .post(body) + final List pairs = new ArrayList<>(); + pairs.add(new BasicNameValuePair(CLIENT_SECRET, getCredential())); + pairs.add(new BasicNameValuePair(GRANT_TYPE, CLIENT_CREDENTIALS)); + pairs.add(new BasicNameValuePair(CLIENT_ID, clientId)); + final RequestConfig config = RequestConfig.custom() + .setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT) + .setConnectionRequestTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT) + .setSocketTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT) .build(); - try (Response response = client.newCall(request).execute()) { - if (!response.isSuccessful()) { - throw new IOException("Unexpected code " + response); - } - - if (response.code() != HttpStatus.SC_OK) { - throw new IllegalArgumentException("Received invalid http response: " - + response.code() + ", text = " + response.toString()); + try (CloseableHttpClient client = + HttpClientBuilder.create().setDefaultRequestConfig(config).build()) { + final HttpPost httpPost = new HttpPost(refreshURL); + httpPost.setEntity(new UrlEncodedFormEntity(pairs, StandardCharsets.UTF_8)); + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, URLENCODED); + try (CloseableHttpResponse response = client.execute(httpPost)) { + final int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode != HttpStatus.SC_OK) { + throw new IllegalArgumentException( + "Received invalid http response: " + statusCode + ", text = " + + EntityUtils.toString(response.getEntity())); + } + Map responseBody = JsonSerialization.mapReader().readValue( + EntityUtils.toString(response.getEntity())); + + String newExpiresIn = responseBody.get(EXPIRES_IN).toString(); + timer.setExpiresIn(newExpiresIn); + + accessToken = responseBody.get(ACCESS_TOKEN).toString(); } - - Map responseBody = JsonSerialization.mapReader().readValue( - response.body().string()); - - String newExpiresIn = responseBody.get(EXPIRES_IN).toString(); - timer.setExpiresIn(newExpiresIn); - - accessToken = responseBody.get(ACCESS_TOKEN).toString(); + } catch (RuntimeException e) { + throw new IOException("Unable to obtain access token from credential", e); } catch (Exception e) { throw new IOException("Unable to obtain access token from credential", e); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java index 2f28b65e40e92..dbe95aca31a0c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java @@ -18,7 +18,6 @@ */ package org.apache.hadoop.hdfs.web.oauth2; -import okhttp3.MediaType; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -30,8 +29,8 @@ public final class OAuth2Constants { private OAuth2Constants() { /** Private constructor. **/ } - public static final MediaType URLENCODED - = MediaType.parse("application/x-www-form-urlencoded; charset=utf-8"); + public static final String URLENCODED + = "application/x-www-form-urlencoded; charset=utf-8"; /* Constants for OAuth protocol */ public static final String ACCESS_TOKEN = "access_token"; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml index 89a982207035b..4269baf497755 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml @@ -199,6 +199,16 @@ bcprov-jdk15on test + + com.squareup.okhttp3 + mockwebserver + test + + + org.jetbrains.kotlin + kotlin-stdlib-jdk8 + test + diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 8db7f5fba8d03..851dfa3ae0022 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -135,10 +135,8 @@ 2.4.12 10.14.2.0 6.2.1.jre7 - 4.10.0 - 3.4.0 - 1.6.20 - 1.6.20 + 4.11.0 + 1.6.20 1.1 5.2.0 2.9.0 @@ -224,59 +222,21 @@ com.squareup.okhttp3 - okhttp + mockwebserver ${okhttp3.version} + test org.jetbrains.kotlin - kotlin-stdlib - - - org.jetbrains.kotlin - kotlin-stdlib-common - - - com.squareup.okio - okio-jvm - - - - - com.squareup.okio - okio-jvm - ${okio.version} - - - org.jetbrains.kotlin - kotlin-stdlib - ${kotlin-stdlib.verion} - - - org.jetbrains - annotations + kotlin-stdlib-jdk8 org.jetbrains.kotlin - kotlin-stdlib-common - ${kotlin-stdlib-common.version} - - - com.squareup.okhttp3 - mockwebserver - ${okhttp3.version} + kotlin-stdlib-jdk8 + ${kotlin-stdlib.version} test - - - com.squareup.okio - okio-jvm - - - org.jetbrains.kotlin - kotlin-stdlib-jdk8 - - jdiff From bde85221d1ae31b5c4e5c316e20f7aab91044492 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Wed, 20 Sep 2023 12:09:55 +0100 Subject: [PATCH 2/4] keep okhttp at 4.10.0 --- hadoop-project/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 851dfa3ae0022..790e062f470c6 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -135,7 +135,7 @@ 2.4.12 10.14.2.0 6.2.1.jre7 - 4.11.0 + 4.10.0 1.6.20 1.1 5.2.0 From ca016d7d5b8f8a7e7f1372b3a4361a1808afb077 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Thu, 28 Sep 2023 18:12:25 +0100 Subject: [PATCH 3/4] kotlin-stdlib-common dependency convergence issue --- hadoop-project/pom.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 790e062f470c6..7ccfdff473dcd 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -226,12 +226,22 @@ ${okhttp3.version} test + + org.jetbrains.kotlin + kotlin-stdlib-common + org.jetbrains.kotlin kotlin-stdlib-jdk8 + + org.jetbrains.kotlin + kotlin-stdlib-common + ${kotlin-stdlib.version} + test + org.jetbrains.kotlin kotlin-stdlib-jdk8 From 8291dd47bec68bef8cae6e5b43b19352664cb362 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Sat, 30 Sep 2023 12:56:45 +0100 Subject: [PATCH 4/4] better to have explicit httpclient dependency --- hadoop-hdfs-project/hadoop-hdfs-client/pom.xml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml index b2740ce495efd..c19393b6a9364 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml @@ -49,6 +49,14 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> + + org.apache.httpcomponents + httpclient + + + org.apache.httpcomponents + httpcore + junit junit