@@ -26,94 +26,94 @@ import edu.berkeley.cs.rise.opaque.benchmark.TPCH
26
26
trait TPCHTests extends OpaqueTestsBase { self =>
27
27
28
28
def size = " sf_small"
29
- def tpch = TPCH (spark.sqlContext, size)
29
+ def tpch = new TPCH (spark.sqlContext, size)
30
30
31
31
testAgainstSpark(" TPC-H 1" ) { securityLevel =>
32
- tpch.query(1 , securityLevel, spark.sqlContext, numPartitions).collect
32
+ tpch.query(1 , securityLevel, numPartitions).collect
33
33
}
34
34
35
35
testAgainstSpark(" TPC-H 2" , ignore) { securityLevel =>
36
- tpch.query(2 , securityLevel, spark.sqlContext, numPartitions).collect
36
+ tpch.query(2 , securityLevel, numPartitions).collect
37
37
}
38
38
39
39
testAgainstSpark(" TPC-H 3" ) { securityLevel =>
40
- tpch.query(3 , securityLevel, spark.sqlContext, numPartitions).collect
40
+ tpch.query(3 , securityLevel, numPartitions).collect
41
41
}
42
42
43
43
testAgainstSpark(" TPC-H 4" , ignore) { securityLevel =>
44
- tpch.query(4 , securityLevel, spark.sqlContext, numPartitions).collect
44
+ tpch.query(4 , securityLevel, numPartitions).collect
45
45
}
46
46
47
47
testAgainstSpark(" TPC-H 5" ) { securityLevel =>
48
- tpch.query(5 , securityLevel, spark.sqlContext, numPartitions).collect
48
+ tpch.query(5 , securityLevel, numPartitions).collect
49
49
}
50
50
51
51
testAgainstSpark(" TPC-H 6" ) { securityLevel =>
52
- tpch.query(6 , securityLevel, spark.sqlContext, numPartitions).collect.toSet
52
+ tpch.query(6 , securityLevel, numPartitions).collect.toSet
53
53
}
54
54
55
55
testAgainstSpark(" TPC-H 7" ) { securityLevel =>
56
- tpch.query(7 , securityLevel, spark.sqlContext, numPartitions).collect
56
+ tpch.query(7 , securityLevel, numPartitions).collect
57
57
}
58
58
59
59
testAgainstSpark(" TPC-H 8" ) { securityLevel =>
60
- tpch.query(8 , securityLevel, spark.sqlContext, numPartitions).collect
60
+ tpch.query(8 , securityLevel, numPartitions).collect
61
61
}
62
62
63
63
testAgainstSpark(" TPC-H 9" ) { securityLevel =>
64
- tpch.query(9 , securityLevel, spark.sqlContext, numPartitions).collect
64
+ tpch.query(9 , securityLevel, numPartitions).collect
65
65
}
66
66
67
67
testAgainstSpark(" TPC-H 10" ) { securityLevel =>
68
- tpch.query(10 , securityLevel, spark.sqlContext, numPartitions).collect
68
+ tpch.query(10 , securityLevel, numPartitions).collect
69
69
}
70
70
71
71
testAgainstSpark(" TPC-H 11" , ignore) { securityLevel =>
72
- tpch.query(11 , securityLevel, spark.sqlContext, numPartitions).collect
72
+ tpch.query(11 , securityLevel, numPartitions).collect
73
73
}
74
74
75
75
testAgainstSpark(" TPC-H 12" ) { securityLevel =>
76
- tpch.query(12 , securityLevel, spark.sqlContext, numPartitions).collect
76
+ tpch.query(12 , securityLevel, numPartitions).collect
77
77
}
78
78
79
79
testAgainstSpark(" TPC-H 13" , ignore) { securityLevel =>
80
- tpch.query(13 , securityLevel, spark.sqlContext, numPartitions).collect
80
+ tpch.query(13 , securityLevel, numPartitions).collect
81
81
}
82
82
83
83
testAgainstSpark(" TPC-H 14" ) { securityLevel =>
84
- tpch.query(14 , securityLevel, spark.sqlContext, numPartitions).collect.toSet
84
+ tpch.query(14 , securityLevel, numPartitions).collect.toSet
85
85
}
86
86
87
87
testAgainstSpark(" TPC-H 15" , ignore) { securityLevel =>
88
- tpch.query(15 , securityLevel, spark.sqlContext, numPartitions).collect
88
+ tpch.query(15 , securityLevel, numPartitions).collect
89
89
}
90
90
91
91
testAgainstSpark(" TPC-H 16" , ignore) { securityLevel =>
92
- tpch.query(16 , securityLevel, spark.sqlContext, numPartitions).collect
92
+ tpch.query(16 , securityLevel, numPartitions).collect
93
93
}
94
94
95
95
testAgainstSpark(" TPC-H 17" ) { securityLevel =>
96
- tpch.query(17 , securityLevel, spark.sqlContext, numPartitions).collect.toSet
96
+ tpch.query(17 , securityLevel, numPartitions).collect.toSet
97
97
}
98
98
99
99
testAgainstSpark(" TPC-H 18" , ignore) { securityLevel =>
100
- tpch.query(18 , securityLevel, spark.sqlContext, numPartitions).collect
100
+ tpch.query(18 , securityLevel, numPartitions).collect
101
101
}
102
102
103
103
testAgainstSpark(" TPC-H 19" ) { securityLevel =>
104
- tpch.query(19 , securityLevel, spark.sqlContext, numPartitions).collect.toSet
104
+ tpch.query(19 , securityLevel, numPartitions).collect.toSet
105
105
}
106
106
107
107
testAgainstSpark(" TPC-H 20" ) { securityLevel =>
108
- tpch.query(20 , securityLevel, spark.sqlContext, numPartitions).collect.toSet
108
+ tpch.query(20 , securityLevel, numPartitions).collect.toSet
109
109
}
110
110
111
111
testAgainstSpark(" TPC-H 21" , ignore) { securityLevel =>
112
- tpch.query(21 , securityLevel, spark.sqlContext, numPartitions).collect
112
+ tpch.query(21 , securityLevel, numPartitions).collect
113
113
}
114
114
115
115
testAgainstSpark(" TPC-H 22" , ignore) { securityLevel =>
116
- tpch.query(22 , securityLevel, spark.sqlContext, numPartitions).collect
116
+ tpch.query(22 , securityLevel, numPartitions).collect
117
117
}
118
118
}
119
119
0 commit comments