Skip to content

Commit 62010fd

Browse files
committed
More cleanup (renaming variables, updating comments etc)
1 parent ad2beff commit 62010fd

File tree

3 files changed

+8
-9
lines changed

3 files changed

+8
-9
lines changed

core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {
240240
// Verify file contains exactly the two events logged
241241
val eventLoggingInfo = EventLoggingListener.parseLoggingInfo(eventLogger.logDir, fileSystem)
242242
assert(eventLoggingInfo.logPaths.size > 0)
243-
val lines = getLines(eventLoggingInfo.logPaths.head, eventLoggingInfo.compressionCodec)
243+
val lines = readFileLines(eventLoggingInfo.logPaths.head, eventLoggingInfo.compressionCodec)
244244
assert(lines.size === 2)
245245
assert(lines(0).contains("SparkListenerApplicationStart"))
246246
assert(lines(1).contains("SparkListenerApplicationEnd"))
@@ -281,7 +281,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {
281281
private def assertEventsExist(eventLogger: EventLoggingListener, events: Seq[String]) {
282282
val eventLoggingInfo = EventLoggingListener.parseLoggingInfo(eventLogger.logDir, fileSystem)
283283
assert(eventLoggingInfo.logPaths.size > 0)
284-
val lines = getLines(eventLoggingInfo.logPaths.head, eventLoggingInfo.compressionCodec)
284+
val lines = readFileLines(eventLoggingInfo.logPaths.head, eventLoggingInfo.compressionCodec)
285285
val eventSet = mutable.Set(events: _*)
286286
lines.foreach { line =>
287287
eventSet.foreach { event =>
@@ -301,7 +301,9 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {
301301
* Read all lines from the file specified by the given path.
302302
* If a compression codec is specified, use it to read the file.
303303
*/
304-
private def getLines(filePath: Path, compressionCodec: Option[CompressionCodec]): Seq[String] = {
304+
private def readFileLines(
305+
filePath: Path,
306+
compressionCodec: Option[CompressionCodec]): Seq[String] = {
305307
val fstream = fileSystem.open(filePath)
306308
val cstream =
307309
compressionCodec.map { codec =>

core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -119,16 +119,16 @@ class ReplayListenerSuite extends FunSuite with BeforeAndAfter {
119119
val codec = codecName.map(getCompressionCodec)
120120
val applications = fileSystem.listStatus(new Path(logDir))
121121
assert(applications != null && applications.size > 0)
122-
val eventLogDir =
123-
applications.filter(_.getPath.getName.startsWith("test-replay")).sortBy(_.getAccessTime).last
122+
val eventLogDir = applications.sortBy(_.getAccessTime).last
124123
assert(eventLogDir.isDir)
125124
val logFiles = fileSystem.listStatus(eventLogDir.getPath)
126125
assert(logFiles != null && logFiles.size > 0)
127126
val logFile = logFiles.find(_.getPath.getName.startsWith("EVENT_LOG_"))
128127
assert(logFile.isDefined)
128+
val logFilePath = logFile.get.getPath
129129

130130
// Replay events
131-
val replayer = new ReplayListenerBus(Seq(logFile.get.getPath), fileSystem, codec)
131+
val replayer = new ReplayListenerBus(Seq(logFilePath), fileSystem, codec)
132132
val eventMonster = new EventMonster(conf)
133133
replayer.addListener(eventMonster)
134134
replayer.replay()

core/src/test/scala/org/apache/spark/util/FileLoggerSuite.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -123,9 +123,6 @@ class FileLoggerSuite extends FunSuite with BeforeAndAfter {
123123
} else {
124124
new FileLogger(logDir, conf)
125125
}
126-
assert(fileSystem.exists(logDirPath))
127-
assert(fileSystem.getFileStatus(logDirPath).isDir)
128-
assert(fileSystem.listStatus(logDirPath).size === 0)
129126

130127
logger.newFile("Jean_Valjean")
131128
logger.logLine("Who am I?")

0 commit comments

Comments
 (0)