Skip to content

Commit 71ddeac

Browse files
committed
Fix tests.
1 parent 2ae39eb commit 71ddeac

File tree

5 files changed

+13
-47
lines changed

5 files changed

+13
-47
lines changed

core/src/main/scala/org/apache/spark/storage/StorageLevel.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,9 @@ object StorageLevel {
150150
val MEMORY_AND_DISK_2 = new StorageLevel(true, true, false, true, 2)
151151
val MEMORY_AND_DISK_SER = new StorageLevel(true, true, false, false)
152152
val MEMORY_AND_DISK_SER_2 = new StorageLevel(true, true, false, false, 2)
153-
val OFF_HEAP = new StorageLevel(false, false, true, false)
153+
154+
// Redirect to MEMORY_ONLY_SER for now.
155+
val OFF_HEAP = MEMORY_ONLY_SER
154156

155157
/**
156158
* :: DeveloperApi ::

core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") {
5454
"Cached Partitions",
5555
"Fraction Cached",
5656
"Size in Memory",
57-
"Size in ExternalBlockStore",
5857
"Size on Disk")
5958

6059
/** Render an HTML row representing an RDD */
@@ -103,7 +102,6 @@ private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") {
103102
"Executor ID",
104103
"Address",
105104
"Total Size in Memory",
106-
"Total Size in ExternalBlockStore",
107105
"Total Size on Disk",
108106
"Stream Blocks")
109107

core/src/test/scala/org/apache/spark/rdd/LocalCheckpointSuite.scala

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.rdd
1919

20-
import org.mockito.Mockito.spy
21-
2220
import org.apache.spark.{LocalSparkContext, SparkContext, SparkException, SparkFunSuite}
2321
import org.apache.spark.storage.{RDDBlockId, StorageLevel}
2422

@@ -46,10 +44,6 @@ class LocalCheckpointSuite extends SparkFunSuite with LocalSparkContext {
4644
assert(transform(StorageLevel.MEMORY_AND_DISK_SER) === StorageLevel.MEMORY_AND_DISK_SER)
4745
assert(transform(StorageLevel.MEMORY_AND_DISK_2) === StorageLevel.MEMORY_AND_DISK_2)
4846
assert(transform(StorageLevel.MEMORY_AND_DISK_SER_2) === StorageLevel.MEMORY_AND_DISK_SER_2)
49-
// Off-heap is not supported and Spark should fail fast
50-
intercept[SparkException] {
51-
transform(StorageLevel.OFF_HEAP)
52-
}
5347
}
5448

5549
test("basic lineage truncation") {

core/src/test/scala/org/apache/spark/ui/storage/StoragePageSuite.scala

Lines changed: 6 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.ui.storage
1919

20-
import scala.xml.Utility
21-
2220
import org.mockito.Mockito._
2321

2422
import org.apache.spark.SparkFunSuite
@@ -64,26 +62,24 @@ class StoragePageSuite extends SparkFunSuite {
6462
"Cached Partitions",
6563
"Fraction Cached",
6664
"Size in Memory",
67-
"Size in ExternalBlockStore",
6865
"Size on Disk")
6966
assert((xmlNodes \\ "th").map(_.text) === headers)
7067

7168
assert((xmlNodes \\ "tr").size === 3)
7269
assert(((xmlNodes \\ "tr")(0) \\ "td").map(_.text.trim) ===
73-
Seq("rdd1", "Memory Deserialized 1x Replicated", "10", "100%", "100.0 B", "0.0 B", "0.0 B"))
70+
Seq("rdd1", "Memory Deserialized 1x Replicated", "10", "100%", "100.0 B", "0.0 B"))
7471
// Check the url
7572
assert(((xmlNodes \\ "tr")(0) \\ "td" \ "a")(0).attribute("href").map(_.text) ===
7673
Some("http://localhost:4040/storage/rdd?id=1"))
7774

7875
assert(((xmlNodes \\ "tr")(1) \\ "td").map(_.text.trim) ===
79-
Seq("rdd2", "Disk Serialized 1x Replicated", "5", "50%", "0.0 B", "0.0 B", "200.0 B"))
76+
Seq("rdd2", "Disk Serialized 1x Replicated", "5", "50%", "0.0 B", "200.0 B"))
8077
// Check the url
8178
assert(((xmlNodes \\ "tr")(1) \\ "td" \ "a")(0).attribute("href").map(_.text) ===
8279
Some("http://localhost:4040/storage/rdd?id=2"))
8380

8481
assert(((xmlNodes \\ "tr")(2) \\ "td").map(_.text.trim) ===
85-
Seq("rdd3", "Disk Memory Serialized 1x Replicated", "10", "100%", "400.0 B", "0.0 B",
86-
"500.0 B"))
82+
Seq("rdd3", "Disk Memory Serialized 1x Replicated", "10", "100%", "400.0 B", "500.0 B"))
8783
// Check the url
8884
assert(((xmlNodes \\ "tr")(2) \\ "td" \ "a")(0).attribute("href").map(_.text) ===
8985
Some("http://localhost:4040/storage/rdd?id=3"))
@@ -115,14 +111,6 @@ class StoragePageSuite extends SparkFunSuite {
115111
memSize = 0,
116112
diskSize = 100)
117113
assert(("Disk", 100) === storagePage.streamBlockStorageLevelDescriptionAndSize(diskBlock))
118-
119-
val externalBlock = BlockUIData(StreamBlockId(0, 0),
120-
"localhost:1111",
121-
StorageLevel.OFF_HEAP,
122-
memSize = 0,
123-
diskSize = 0)
124-
assert(("External", 100) ===
125-
storagePage.streamBlockStorageLevelDescriptionAndSize(externalBlock))
126114
}
127115

128116
test("receiverBlockTables") {
@@ -146,11 +134,6 @@ class StoragePageSuite extends SparkFunSuite {
146134
StorageLevel.MEMORY_ONLY,
147135
memSize = 100,
148136
diskSize = 0),
149-
BlockUIData(StreamBlockId(2, 2),
150-
"localhost:10001",
151-
StorageLevel.OFF_HEAP,
152-
memSize = 0,
153-
diskSize = 0),
154137
BlockUIData(StreamBlockId(1, 1),
155138
"localhost:10001",
156139
StorageLevel.MEMORY_ONLY_SER,
@@ -165,16 +148,15 @@ class StoragePageSuite extends SparkFunSuite {
165148
"Executor ID",
166149
"Address",
167150
"Total Size in Memory",
168-
"Total Size in ExternalBlockStore",
169151
"Total Size on Disk",
170152
"Stream Blocks")
171153
assert((executorTable \\ "th").map(_.text) === executorHeaders)
172154

173155
assert((executorTable \\ "tr").size === 2)
174156
assert(((executorTable \\ "tr")(0) \\ "td").map(_.text.trim) ===
175-
Seq("0", "localhost:10000", "100.0 B", "0.0 B", "100.0 B", "2"))
157+
Seq("0", "localhost:10000", "100.0 B", "100.0 B", "2"))
176158
assert(((executorTable \\ "tr")(1) \\ "td").map(_.text.trim) ===
177-
Seq("1", "localhost:10001", "200.0 B", "200.0 B", "0.0 B", "3"))
159+
Seq("1", "localhost:10001", "200.0 B", "0.0 B", "2"))
178160

179161
val blockTable = (xmlNodes \\ "table")(1)
180162
val blockHeaders = Seq(
@@ -185,7 +167,7 @@ class StoragePageSuite extends SparkFunSuite {
185167
"Size")
186168
assert((blockTable \\ "th").map(_.text) === blockHeaders)
187169

188-
assert((blockTable \\ "tr").size === 5)
170+
assert((blockTable \\ "tr").size === 4)
189171
assert(((blockTable \\ "tr")(0) \\ "td").map(_.text.trim) ===
190172
Seq("input-0-0", "2", "localhost:10000", "Memory", "100.0 B"))
191173
// Check "rowspan=2" for the first 2 columns
@@ -203,17 +185,10 @@ class StoragePageSuite extends SparkFunSuite {
203185

204186
assert(((blockTable \\ "tr")(3) \\ "td").map(_.text.trim) ===
205187
Seq("localhost:10001", "Memory Serialized", "100.0 B"))
206-
207-
assert(((blockTable \\ "tr")(4) \\ "td").map(_.text.trim) ===
208-
Seq("input-2-2", "1", "localhost:10001", "External", "200.0 B"))
209-
// Check "rowspan=1" for the first 2 columns
210-
assert(((blockTable \\ "tr")(4) \\ "td")(0).attribute("rowspan").map(_.text) === Some("1"))
211-
assert(((blockTable \\ "tr")(4) \\ "td")(1).attribute("rowspan").map(_.text) === Some("1"))
212188
}
213189

214190
test("empty receiverBlockTables") {
215191
assert(storagePage.receiverBlockTables(Seq.empty).isEmpty)
216-
217192
val executor0 = ExecutorStreamBlockStatus("0", "localhost:10000", Seq.empty)
218193
val executor1 = ExecutorStreamBlockStatus("1", "localhost:10001", Seq.empty)
219194
assert(storagePage.receiverBlockTables(Seq(executor0, executor1)).isEmpty)

core/src/test/scala/org/apache/spark/ui/storage/StorageTabSuite.scala

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -133,14 +133,12 @@ class StorageTabSuite extends SparkFunSuite with BeforeAndAfter {
133133
(RDDBlockId(1, 20), BlockStatus(memAndDisk, 0L, 240L))
134134
))
135135
bus.postToAll(SparkListenerTaskEnd(1, 0, "obliteration", Success, taskInfo, metrics1))
136-
assert(storageListener._rddInfoMap(0).memSize === 800L)
136+
assert(storageListener._rddInfoMap(0).memSize === 400L)
137137
assert(storageListener._rddInfoMap(0).diskSize === 400L)
138-
assert(storageListener._rddInfoMap(0).externalBlockStoreSize === 200L)
139-
assert(storageListener._rddInfoMap(0).numCachedPartitions === 3)
138+
assert(storageListener._rddInfoMap(0).numCachedPartitions === 2)
140139
assert(storageListener._rddInfoMap(0).isCached)
141140
assert(storageListener._rddInfoMap(1).memSize === 0L)
142141
assert(storageListener._rddInfoMap(1).diskSize === 240L)
143-
assert(storageListener._rddInfoMap(1).externalBlockStoreSize === 0L)
144142
assert(storageListener._rddInfoMap(1).numCachedPartitions === 1)
145143
assert(storageListener._rddInfoMap(1).isCached)
146144
assert(!storageListener._rddInfoMap(2).isCached)
@@ -155,10 +153,9 @@ class StorageTabSuite extends SparkFunSuite with BeforeAndAfter {
155153
(RDDBlockId(4, 80), BlockStatus(none, 0L, 0L)) // doesn't actually exist
156154
))
157155
bus.postToAll(SparkListenerTaskEnd(2, 0, "obliteration", Success, taskInfo, metrics2))
158-
assert(storageListener._rddInfoMap(0).memSize === 400L)
156+
assert(storageListener._rddInfoMap(0).memSize === 0L)
159157
assert(storageListener._rddInfoMap(0).diskSize === 400L)
160-
assert(storageListener._rddInfoMap(0).externalBlockStoreSize === 200L)
161-
assert(storageListener._rddInfoMap(0).numCachedPartitions === 2)
158+
assert(storageListener._rddInfoMap(0).numCachedPartitions === 1)
162159
assert(storageListener._rddInfoMap(0).isCached)
163160
assert(!storageListener._rddInfoMap(1).isCached)
164161
assert(storageListener._rddInfoMap(2).numCachedPartitions === 0)

0 commit comments

Comments
 (0)