Skip to content

Commit 72e7f49

Browse files
authored
Merge branch 'apache:trunk' into YARN-11394
2 parents 7b6f1ea + a3b500d commit 72e7f49

File tree

12 files changed

+188
-46
lines changed

12 files changed

+188
-46
lines changed

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ public static synchronized Class<?> getClass(String name, Configuration conf
9292
) throws IOException {
9393
Class<?> writableClass = NAME_TO_CLASS.get(name);
9494
if (writableClass != null)
95-
return writableClass.asSubclass(Writable.class);
95+
return writableClass;
9696
try {
9797
return conf.getClassByName(name);
9898
} catch (ClassNotFoundException e) {

hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,9 @@ Hadoop in Secure Mode
2020
Introduction
2121
------------
2222

23-
This document describes how to configure authentication for Hadoop in secure mode. When Hadoop is configured to run in secure mode, each Hadoop service and each user must be authenticated by Kerberos.
23+
In its default configuration, we expect you to make sure attackers don't have access to your Hadoop cluster by restricting all network access. If you want any restrictions on who can remotely access data or submit work, you MUST secure authentication and access for your Hadoop cluster as described in this document.
24+
25+
When Hadoop is configured to run in secure mode, each Hadoop service and each user must be authenticated by Kerberos.
2426

2527
Forward and reverse host lookup for all service hosts must be configured correctly to allow services to authenticate with each other. Host lookups may be configured using either DNS or `/etc/hosts` files. Working knowledge of Kerberos and DNS is recommended before attempting to configure Hadoop services in Secure Mode.
2628

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java

Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,9 @@
2626
import org.apache.hadoop.io.SequenceFile.Metadata;
2727
import org.apache.hadoop.io.compress.CompressionCodec;
2828
import org.apache.hadoop.io.compress.DefaultCodec;
29+
import org.apache.hadoop.io.serializer.Deserializer;
30+
import org.apache.hadoop.io.serializer.Serialization;
31+
import org.apache.hadoop.io.serializer.Serializer;
2932
import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization;
3033
import org.apache.hadoop.test.GenericTestUtils;
3134
import org.apache.hadoop.util.ReflectionUtils;
@@ -756,6 +759,122 @@ public void testSequenceFileWriter() throws Exception {
756759
}
757760
}
758761

762+
@Test
763+
public void testSerializationUsingWritableNameAlias() throws IOException {
764+
Configuration config = new Configuration();
765+
config.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, SimpleSerializer.class.getName());
766+
Path path = new Path(System.getProperty("test.build.data", "."),
767+
"SerializationUsingWritableNameAlias");
768+
769+
// write with the original serializable class
770+
SequenceFile.Writer writer = SequenceFile.createWriter(
771+
config,
772+
SequenceFile.Writer.file(path),
773+
SequenceFile.Writer.keyClass(SimpleSerializable.class),
774+
SequenceFile.Writer.valueClass(SimpleSerializable.class));
775+
776+
int max = 10;
777+
try {
778+
SimpleSerializable val = new SimpleSerializable();
779+
val.setId(-1);
780+
for (int i = 0; i < max; i++) {
781+
SimpleSerializable key = new SimpleSerializable();
782+
key.setId(i);
783+
writer.append(key, val);
784+
}
785+
} finally {
786+
writer.close();
787+
}
788+
789+
// override name so it gets forced to the new serializable
790+
WritableName.setName(AnotherSimpleSerializable.class, SimpleSerializable.class.getName());
791+
792+
// read and expect our new serializable, and all the correct values read
793+
SequenceFile.Reader reader = new SequenceFile.Reader(
794+
config,
795+
SequenceFile.Reader.file(path));
796+
797+
AnotherSimpleSerializable key = new AnotherSimpleSerializable();
798+
int count = 0;
799+
while (true) {
800+
key = (AnotherSimpleSerializable) reader.next(key);
801+
if (key == null) {
802+
// make sure we exhausted all the ints we wrote
803+
assertEquals(count, max);
804+
break;
805+
}
806+
assertEquals(count++, key.getId());
807+
}
808+
}
809+
810+
public static class SimpleSerializable implements Serializable {
811+
812+
private int id;
813+
814+
public int getId() {
815+
return id;
816+
}
817+
818+
public void setId(int id) {
819+
this.id = id;
820+
}
821+
}
822+
823+
public static class AnotherSimpleSerializable extends SimpleSerializable {
824+
}
825+
826+
public static class SimpleSerializer implements Serialization<SimpleSerializable> {
827+
828+
@Override
829+
public boolean accept(Class<?> c) {
830+
return SimpleSerializable.class.isAssignableFrom(c);
831+
}
832+
833+
@Override
834+
public Serializer<SimpleSerializable> getSerializer(Class<SimpleSerializable> c) {
835+
return new Serializer<SimpleSerializable>() {
836+
private DataOutputStream out;
837+
@Override
838+
public void open(OutputStream out) throws IOException {
839+
this.out = new DataOutputStream(out);
840+
}
841+
842+
@Override
843+
public void serialize(SimpleSerializable simpleSerializable) throws IOException {
844+
out.writeInt(simpleSerializable.getId());
845+
}
846+
847+
@Override
848+
public void close() throws IOException {
849+
out.close();
850+
}
851+
};
852+
}
853+
854+
@Override
855+
public Deserializer<SimpleSerializable> getDeserializer(Class<SimpleSerializable> c) {
856+
return new Deserializer<SimpleSerializable>() {
857+
private DataInputStream dis;
858+
@Override
859+
public void open(InputStream in) throws IOException {
860+
dis = new DataInputStream(in);
861+
}
862+
863+
@Override
864+
public SimpleSerializable deserialize(SimpleSerializable simpleSerializable)
865+
throws IOException {
866+
simpleSerializable.setId(dis.readInt());
867+
return simpleSerializable;
868+
}
869+
870+
@Override
871+
public void close() throws IOException {
872+
dis.close();
873+
}
874+
};
875+
}
876+
}
877+
759878
/** For debugging and testing. */
760879
public static void main(String[] args) throws Exception {
761880
int count = 1024 * 1024;

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java

Lines changed: 49 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,14 @@
2424
import java.util.Random;
2525

2626
import org.apache.hadoop.conf.Configuration;
27+
import org.apache.hadoop.fs.CommonConfigurationKeys;
28+
import org.apache.hadoop.io.serializer.Deserializer;
29+
import org.apache.hadoop.io.serializer.Serialization;
30+
import org.apache.hadoop.io.serializer.SerializationFactory;
31+
import org.apache.hadoop.io.serializer.Serializer;
2732
import org.junit.Test;
28-
33+
import static org.junit.Assert.assertEquals;
34+
import static org.junit.Assert.assertNotNull;
2935
import static org.junit.Assert.assertTrue;
3036

3137
/** Unit tests for WritableName. */
@@ -63,6 +69,28 @@ public boolean equals(Object o) {
6369
}
6470
}
6571

72+
private static class SimpleSerializable {
73+
74+
}
75+
76+
private static class SimpleSerializer implements Serialization<SimpleSerializable> {
77+
78+
@Override
79+
public boolean accept(Class<?> c) {
80+
return c.equals(SimpleSerializable.class);
81+
}
82+
83+
@Override
84+
public Serializer<SimpleSerializable> getSerializer(Class<SimpleSerializable> c) {
85+
return null;
86+
}
87+
88+
@Override
89+
public Deserializer<SimpleSerializable> getDeserializer(Class<SimpleSerializable> c) {
90+
return null;
91+
}
92+
}
93+
6694
private static final String testName = "mystring";
6795

6896
@Test
@@ -95,7 +123,27 @@ public void testAddName() throws Exception {
95123
// check original name still works
96124
test = WritableName.getClass(testName, conf);
97125
assertTrue(test.equals(SimpleWritable.class));
126+
}
127+
128+
@Test
129+
public void testAddNameSerializable() throws Exception {
130+
Configuration conf = new Configuration();
131+
conf.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, SimpleSerializer.class.getName());
132+
SerializationFactory serializationFactory =
133+
new SerializationFactory(conf);
98134

135+
String altName = testName + ".alt";
136+
137+
WritableName.addName(SimpleSerializable.class, altName);
138+
139+
Class<?> test = WritableName.getClass(altName, conf);
140+
assertEquals(test, SimpleSerializable.class);
141+
assertNotNull(serializationFactory.getSerialization(test));
142+
143+
// check original name still works
144+
test = WritableName.getClass(SimpleSerializable.class.getName(), conf);
145+
assertEquals(test, SimpleSerializable.class);
146+
assertNotNull(serializationFactory.getSerialization(test));
99147
}
100148

101149
@Test

hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/fs/TestXAttr.java

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919
package org.apache.hadoop.fs;
2020

2121
import static org.junit.Assert.assertEquals;
22-
import static org.junit.Assert.assertFalse;
2322
import static org.junit.Assert.assertNotSame;
23+
import static org.junit.Assert.assertNotEquals;
2424

2525
import org.junit.BeforeClass;
2626
import org.junit.Test;
@@ -77,18 +77,18 @@ public void testXAttrEquals() {
7777
assertEquals(XATTR3, XATTR3);
7878
assertEquals(XATTR4, XATTR4);
7979
assertEquals(XATTR5, XATTR5);
80-
assertFalse(XATTR1.equals(XATTR2));
81-
assertFalse(XATTR2.equals(XATTR3));
82-
assertFalse(XATTR3.equals(XATTR4));
83-
assertFalse(XATTR4.equals(XATTR5));
80+
assertNotEquals(XATTR1, XATTR2);
81+
assertNotEquals(XATTR2, XATTR3);
82+
assertNotEquals(XATTR3, XATTR4);
83+
assertNotEquals(XATTR4, XATTR5);
8484
}
8585

8686
@Test
8787
public void testXAttrHashCode() {
8888
assertEquals(XATTR.hashCode(), XATTR1.hashCode());
89-
assertFalse(XATTR1.hashCode() == XATTR2.hashCode());
90-
assertFalse(XATTR2.hashCode() == XATTR3.hashCode());
91-
assertFalse(XATTR3.hashCode() == XATTR4.hashCode());
92-
assertFalse(XATTR4.hashCode() == XATTR5.hashCode());
89+
assertNotEquals(XATTR1.hashCode(), XATTR2.hashCode());
90+
assertNotEquals(XATTR2.hashCode(), XATTR3.hashCode());
91+
assertNotEquals(XATTR3.hashCode(), XATTR4.hashCode());
92+
assertNotEquals(XATTR4.hashCode(), XATTR5.hashCode());
9393
}
9494
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1557,7 +1557,8 @@ public ReplicaHandler recoverAppend(
15571557
@Override // FsDatasetSpi
15581558
public Replica recoverClose(ExtendedBlock b, long newGS,
15591559
long expectedBlockLen) throws IOException {
1560-
LOG.info("Recover failed close " + b);
1560+
LOG.info("Recover failed close {}, new GS:{}, expectedBlockLen:{}",
1561+
b, newGS, expectedBlockLen);
15611562
while (true) {
15621563
try {
15631564
try (AutoCloseableLock lock = lockManager.writeLock(LockLevel.VOLUME,

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5939,6 +5939,8 @@ LocatedBlock bumpBlockGenerationStamp(ExtendedBlock block,
59395939
}
59405940
// Ensure we record the new generation stamp
59415941
getEditLog().logSync();
5942+
LOG.info("bumpBlockGenerationStamp({}, client={}) success",
5943+
locatedBlock.getBlock(), clientName);
59425944
return locatedBlock;
59435945
}
59445946

hadoop-tools/hadoop-distcp/src/site/markdown/DistCp.md.vm

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -653,7 +653,7 @@ checksums if the checksum algorithm between the two stores is different.
653653
* `distcp.update.modification.time` would only be used if either of the two
654654
stores don't have checksum validation resulting in incompatible checksum
655655
comparison between the two. Even if the property is set to true, it won't
656-
be used if their is valid checksum comparison between the two stores.
656+
be used if there is valid checksum comparison between the two stores.
657657

658658
To turn off the modification time check, set this in your core-site.xml
659659
```xml

hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -899,8 +899,7 @@ public void testDistCpUpdateCheckFileSkip() throws Exception {
899899
// Creating a source file with certain dataset.
900900
byte[] sourceBlock = dataset(10, 'a', 'z');
901901

902-
// Write the dataset and as well create the target path.
903-
ContractTestUtils.createFile(localFS, dest, true, sourceBlock);
902+
// Write the dataset.
904903
ContractTestUtils
905904
.writeDataset(remoteFS, source, sourceBlock, sourceBlock.length,
906905
1024, true);

hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
<!-- Basedir eeded for generating FindBugs warnings using parent pom -->
3131
<yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>
3232
<container-executor.conf.dir>../etc/hadoop</container-executor.conf.dir>
33-
<extra.libhadoop.rpath>../lib/native</extra.libhadoop.rpath>
3433
<container-executor.additional_cflags></container-executor.additional_cflags>
3534
</properties>
3635

@@ -224,7 +223,6 @@
224223
<source>${basedir}/src</source>
225224
<vars>
226225
<HADOOP_CONF_DIR>${container-executor.conf.dir}</HADOOP_CONF_DIR>
227-
<EXTRA_LIBHADOOP_RPATH>${extra.libhadoop.rpath}</EXTRA_LIBHADOOP_RPATH>
228226
<JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
229227
</vars>
230228
<env>

0 commit comments

Comments
 (0)