Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,39 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-junit-jupiter</artifactId>
<version>4.11.0</version>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we have these versioned imports pulled up into the hadoop-project pom for (a) version maintenance and (b) ease of using an IDE to find where things are used. this is particularly important for mockito as it is so brittle

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi @steveloughran ,
Fair point.

@susheel-gupta Could you please open a follow-up jira for this?

Thanks.

<scope>test</scope>
</dependency>
<dependency>
<groupId>uk.org.webcompere</groupId>
<artifactId>system-stubs-core</artifactId>
<version>1.1.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>uk.org.webcompere</groupId>
<artifactId>system-stubs-jupiter</artifactId>
<version>1.1.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,11 @@
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
Expand All @@ -75,7 +76,7 @@ private static void delete(File dir) throws IOException {
fs.delete(p, true);
}

@BeforeClass
@BeforeAll
public static void setupTestDirs() throws IOException {
testWorkDir = new File("target",
TestLocalContainerLauncher.class.getCanonicalName());
Expand All @@ -89,15 +90,16 @@ public static void setupTestDirs() throws IOException {
}
}

@AfterClass
@AfterAll
public static void cleanupTestDirs() throws IOException {
if (testWorkDir != null) {
delete(testWorkDir);
}
}

@SuppressWarnings("rawtypes")
@Test(timeout=10000)
@Test
@Timeout(10000)
public void testKillJob() throws Exception {
JobConf conf = new JobConf();
AppContext context = mock(AppContext.class);
Expand Down Expand Up @@ -198,8 +200,8 @@ public void testRenameMapOutputForReduce() throws Exception {
final Path mapOut = mrOutputFiles.getOutputFileForWrite(1);
conf.set(MRConfig.LOCAL_DIR, localDirs[1].toString());
final Path mapOutIdx = mrOutputFiles.getOutputIndexFileForWrite(1);
Assert.assertNotEquals("Paths must be different!",
mapOut.getParent(), mapOutIdx.getParent());
Assertions.assertNotEquals(mapOut.getParent(), mapOutIdx.getParent(),
"Paths must be different!");

// make both dirs part of LOCAL_DIR
conf.setStrings(MRConfig.LOCAL_DIR, localDirs);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.SystemClock;

import org.junit.Test;
import static org.junit.Assert.assertTrue;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

Expand Down Expand Up @@ -87,7 +87,7 @@ public void testFinshingAttemptTimeout()
}
taskAttemptFinishingMonitor.stop();

assertTrue("Finishing attempt didn't time out.", eventHandler.timedOut);
assertTrue(eventHandler.timedOut, "Finishing attempt didn't time out.");

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,18 @@

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;

import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -67,14 +66,15 @@
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.ControlledClock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.mockito.junit.jupiter.MockitoExtension;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.eq;
Expand All @@ -87,7 +87,7 @@
/**
* Tests the behavior of TaskAttemptListenerImpl.
*/
@RunWith(MockitoJUnitRunner.class)
@ExtendWith(MockitoExtension.class)
public class TestTaskAttemptListenerImpl {
private static final String ATTEMPT1_ID =
"attempt_123456789012_0001_m_000001_0";
Expand Down Expand Up @@ -172,15 +172,16 @@ protected void stopRpcServer() {
}
}

@After
@AfterEach
public void after() throws IOException {
if (listener != null) {
listener.close();
listener = null;
}
}

@Test (timeout=5000)
@Test
@Timeout(5000)
public void testGetTask() throws IOException {
configureMocks();
startListener(false);
Expand All @@ -189,12 +190,12 @@ public void testGetTask() throws IOException {
//The JVM ID has not been registered yet so we should kill it.
JvmContext context = new JvmContext();

context.jvmId = id;
context.jvmId = id;
JvmTask result = listener.getTask(context);
assertNotNull(result);
assertTrue(result.shouldDie);

// Verify ask after registration but before launch.
// Verify ask after registration but before launch.
// Don't kill, should be null.
//Now put a task with the ID
listener.registerPendingTask(task, wid);
Expand Down Expand Up @@ -238,7 +239,8 @@ public void testGetTask() throws IOException {

}

@Test (timeout=5000)
@Test
@Timeout(5000)
public void testJVMId() {

JVMId jvmid = new JVMId("test", 1, true, 2);
Expand All @@ -247,7 +249,8 @@ public void testJVMId() {
assertEquals(0, jvmid.compareTo(jvmid1));
}

@Test (timeout=10000)
@Test
@Timeout(10000)
public void testGetMapCompletionEvents() throws IOException {
TaskAttemptCompletionEvent[] empty = {};
TaskAttemptCompletionEvent[] taskEvents = {
Expand All @@ -257,12 +260,6 @@ public void testGetMapCompletionEvents() throws IOException {
createTce(3, false, TaskAttemptCompletionEventStatus.FAILED) };
TaskAttemptCompletionEvent[] mapEvents = { taskEvents[0], taskEvents[2] };
Job mockJob = mock(Job.class);
when(mockJob.getTaskAttemptCompletionEvents(0, 100))
.thenReturn(taskEvents);
when(mockJob.getTaskAttemptCompletionEvents(0, 2))
.thenReturn(Arrays.copyOfRange(taskEvents, 0, 2));
when(mockJob.getTaskAttemptCompletionEvents(2, 100))
.thenReturn(Arrays.copyOfRange(taskEvents, 2, 4));
when(mockJob.getMapAttemptCompletionEvents(0, 100)).thenReturn(
TypeConverter.fromYarn(mapEvents));
when(mockJob.getMapAttemptCompletionEvents(0, 2)).thenReturn(
Expand Down Expand Up @@ -312,7 +309,8 @@ private static TaskAttemptCompletionEvent createTce(int eventId,
return tce;
}

@Test (timeout=10000)
@Test
@Timeout(10000)
public void testCommitWindow() throws IOException {
SystemClock clock = SystemClock.getInstance();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ClusterStorageCapacityExceededException;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

import static org.mockito.Mockito.*;

Expand All @@ -36,7 +36,7 @@ public class TestYarnChild {
final static private String KILL_LIMIT_EXCEED_CONF_NAME =
"mapreduce.job.dfs.storage.capacity.kill-limit-exceed";

@Before
@BeforeEach
public void setUp() throws Exception {
task = mock(Task.class);
umbilical = mock(TaskUmbilicalProtocol.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
package org.apache.hadoop.mapreduce.jobhistory;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
Expand All @@ -40,7 +40,8 @@
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;

public class TestEvents {

Expand All @@ -50,9 +51,9 @@ public class TestEvents {
*
* @throws Exception
*/
@Test(timeout = 10000)
@Test
@Timeout(10000)
public void testTaskAttemptFinishedEvent() throws Exception {

JobID jid = new JobID("001", 1);
TaskID tid = new TaskID(jid, TaskType.REDUCE, 2);
TaskAttemptID taskAttemptId = new TaskAttemptID(tid, 3);
Expand All @@ -79,17 +80,18 @@ public void testTaskAttemptFinishedEvent() throws Exception {
* @throws Exception
*/

@Test(timeout = 10000)
@Test
@Timeout(10000)
public void testJobPriorityChange() throws Exception {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
JobPriority.LOW);
assertThat(test.getJobId().toString()).isEqualTo(jid.toString());
assertThat(test.getPriority()).isEqualTo(JobPriority.LOW);

}

@Test(timeout = 10000)

@Test
@Timeout(10000)
public void testJobQueueChange() throws Exception {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobQueueChangeEvent test = new JobQueueChangeEvent(jid,
Expand All @@ -103,24 +105,24 @@ public void testJobQueueChange() throws Exception {
*
* @throws Exception
*/
@Test(timeout = 10000)
@Test
@Timeout(10000)
public void testTaskUpdated() throws Exception {
JobID jid = new JobID("001", 1);
TaskID tid = new TaskID(jid, TaskType.REDUCE, 2);
TaskUpdatedEvent test = new TaskUpdatedEvent(tid, 1234L);
assertThat(test.getTaskId().toString()).isEqualTo(tid.toString());
assertThat(test.getFinishTime()).isEqualTo(1234L);

}

/*
* test EventReader EventReader should read the list of events and return
* instance of HistoryEvent Different HistoryEvent should have a different
* datum.
*/
@Test(timeout = 10000)
@Test
@Timeout(10000)
public void testEvents() throws Exception {

EventReader reader = new EventReader(new DataInputStream(
new ByteArrayInputStream(getEvents())));
HistoryEvent e = reader.getNextEvent();
Expand Down
Loading