@@ -5,11 +5,10 @@ if (typeof describe === "undefined") {
5
5
namespace Harness . Parallel . Host {
6
6
7
7
interface ChildProcessPartial {
8
- send ( message : any , callback ?: ( error : Error ) => void ) : boolean ;
8
+ send ( message : ParallelHostMessage , callback ?: ( error : Error ) => void ) : boolean ;
9
9
on ( event : "error" , listener : ( err : Error ) => void ) : this;
10
10
on ( event : "exit" , listener : ( code : number , signal : string ) => void ) : this;
11
- on ( event : "message" , listener : ( message : any ) => void ) : this;
12
- disconnect ( ) : void ;
11
+ on ( event : "message" , listener : ( message : ParallelClientMessage ) => void ) : this;
13
12
}
14
13
15
14
interface ProgressBarsOptions {
@@ -27,23 +26,54 @@ namespace Harness.Parallel.Host {
27
26
text ?: string ;
28
27
}
29
28
29
+ const perfdataFileName = ".parallelperf.json" ;
30
+ function readSavedPerfData ( ) : { [ testHash : string ] : number } {
31
+ const perfDataContents = Harness . IO . readFile ( perfdataFileName ) ;
32
+ if ( perfDataContents ) {
33
+ return JSON . parse ( perfDataContents ) ;
34
+ }
35
+ return undefined ;
36
+ }
37
+
38
+ function hashName ( runner : TestRunnerKind , test : string ) {
39
+ return `tsrunner-${ runner } ://${ test } ` ;
40
+ }
41
+
30
42
export function start ( ) {
31
43
initializeProgressBarsDependencies ( ) ;
32
44
console . log ( "Discovering tests..." ) ;
33
45
const discoverStart = + ( new Date ( ) ) ;
34
46
const { statSync } : { statSync ( path : string ) : { size : number } ; } = require ( "fs" ) ;
35
47
const tasks : { runner : TestRunnerKind , file : string , size : number } [ ] = [ ] ;
36
- let totalSize = 0 ;
48
+ const perfData = readSavedPerfData ( ) ;
49
+ let totalCost = 0 ;
50
+ let unknownValue : string | undefined ;
37
51
for ( const runner of runners ) {
38
52
const files = runner . enumerateTestFiles ( ) ;
39
53
for ( const file of files ) {
40
- const size = statSync ( file ) . size ;
54
+ let size : number ;
55
+ if ( ! perfData ) {
56
+ size = statSync ( file ) . size ;
57
+
58
+ }
59
+ else {
60
+ const hashedName = hashName ( runner . kind ( ) , file ) ;
61
+ size = perfData [ hashedName ] ;
62
+ if ( size === undefined ) {
63
+ size = Number . MAX_SAFE_INTEGER ;
64
+ unknownValue = hashedName ;
65
+ }
66
+ }
41
67
tasks . push ( { runner : runner . kind ( ) , file, size } ) ;
42
- totalSize += size ;
68
+ totalCost += size ;
43
69
}
44
70
}
45
71
tasks . sort ( ( a , b ) => a . size - b . size ) ;
46
- const batchSize = ( totalSize / workerCount ) * 0.9 ;
72
+ // 1 fewer batches than threads to account for unittests running on the final thread
73
+ const batchCount = runners . length === 1 ? workerCount : workerCount - 1 ;
74
+ const packfraction = 0.9 ;
75
+ const chunkSize = 1000 ; // ~1KB or 1s for sending batches near the end of a test
76
+ const batchSize = ( totalCost / workerCount ) * packfraction ; // Keep spare tests for unittest thread in reserve
47
77
console . log ( `Discovered ${ tasks . length } test files in ${ + ( new Date ( ) ) - discoverStart } ms.` ) ;
48
78
console . log ( `Starting to run tests using ${ workerCount } threads...` ) ;
49
79
const { fork } : { fork ( modulePath : string , args ?: string [ ] , options ?: { } ) : ChildProcessPartial ; } = require ( "child_process" ) ;
@@ -59,15 +89,17 @@ namespace Harness.Parallel.Host {
59
89
const progressUpdateInterval = 1 / progressBars . _options . width ;
60
90
let nextProgress = progressUpdateInterval ;
61
91
92
+ const newPerfData : { [ testHash : string ] : number } = { } ;
93
+
62
94
const workers : ChildProcessPartial [ ] = [ ] ;
95
+ let closedWorkers = 0 ;
63
96
for ( let i = 0 ; i < workerCount ; i ++ ) {
64
97
// TODO: Just send the config over the IPC channel or in the command line arguments
65
98
const config : TestConfig = { light : Harness . lightMode , listenForWork : true , runUnitTests : runners . length === 1 ? false : i === workerCount - 1 } ;
66
99
const configPath = ts . combinePaths ( taskConfigsFolder , `task-config${ i } .json` ) ;
67
100
Harness . IO . writeFile ( configPath , JSON . stringify ( config ) ) ;
68
101
const child = fork ( __filename , [ `--config="${ configPath } "` ] ) ;
69
102
child . on ( "error" , err => {
70
- child . disconnect ( ) ;
71
103
console . error ( "Unexpected error in child process:" ) ;
72
104
console . error ( err ) ;
73
105
return process . exit ( 2 ) ;
@@ -81,7 +113,6 @@ namespace Harness.Parallel.Host {
81
113
child . on ( "message" , ( data : ParallelClientMessage ) => {
82
114
switch ( data . type ) {
83
115
case "error" : {
84
- child . disconnect ( ) ;
85
116
console . error ( `Test worker encounted unexpected error and was forced to close:
86
117
Message: ${ data . payload . error }
87
118
Stack: ${ data . payload . stack } ` ) ;
@@ -97,6 +128,7 @@ namespace Harness.Parallel.Host {
97
128
else {
98
129
passingFiles ++ ;
99
130
}
131
+ newPerfData [ hashName ( data . payload . runner , data . payload . file ) ] = data . payload . duration ;
100
132
101
133
const progress = ( failingFiles + passingFiles ) / totalFiles ;
102
134
if ( progress >= nextProgress ) {
@@ -106,20 +138,27 @@ namespace Harness.Parallel.Host {
106
138
updateProgress ( progress , errorResults . length ? `${ errorResults . length } failing` : `${ totalPassing } passing` , errorResults . length ? "fail" : undefined ) ;
107
139
}
108
140
109
- if ( failingFiles + passingFiles === totalFiles ) {
110
- // Done. Finished every task and collected results.
111
- child . send ( { type : "close" } ) ;
112
- child . disconnect ( ) ;
113
- return outputFinalResult ( ) ;
114
- }
115
- if ( tasks . length === 0 ) {
116
- // No more tasks to distribute
117
- child . send ( { type : "close" } ) ;
118
- child . disconnect ( ) ;
119
- return ;
120
- }
121
141
if ( data . type === "result" ) {
122
- child . send ( { type : "test" , payload : tasks . pop ( ) } ) ;
142
+ if ( tasks . length === 0 ) {
143
+ // No more tasks to distribute
144
+ child . send ( { type : "close" } ) ;
145
+ closedWorkers ++ ;
146
+ if ( closedWorkers === workerCount ) {
147
+ outputFinalResult ( ) ;
148
+ }
149
+ return ;
150
+ }
151
+ // Send tasks in blocks if the tasks are small
152
+ const taskList = [ tasks . pop ( ) ] ;
153
+ while ( tasks . length && taskList . reduce ( ( p , c ) => p + c . size , 0 ) > chunkSize ) {
154
+ taskList . push ( tasks . pop ( ) ) ;
155
+ }
156
+ if ( taskList . length === 1 ) {
157
+ child . send ( { type : "test" , payload : taskList [ 0 ] } ) ;
158
+ }
159
+ else {
160
+ child . send ( { type : "batch" , payload : taskList } ) ;
161
+ }
123
162
}
124
163
}
125
164
}
@@ -130,12 +169,13 @@ namespace Harness.Parallel.Host {
130
169
// It's only really worth doing an initial batching if there are a ton of files to go through
131
170
if ( totalFiles > 1000 ) {
132
171
console . log ( "Batching initial test lists..." ) ;
133
- const batches : { runner : TestRunnerKind , file : string , size : number } [ ] [ ] = new Array ( workerCount ) ;
134
- const doneBatching = new Array ( workerCount ) ;
172
+ const batches : { runner : TestRunnerKind , file : string , size : number } [ ] [ ] = new Array ( batchCount ) ;
173
+ const doneBatching = new Array ( batchCount ) ;
174
+ let scheduledTotal = 0 ;
135
175
batcher: while ( true ) {
136
- for ( let i = 0 ; i < workerCount ; i ++ ) {
176
+ for ( let i = 0 ; i < batchCount ; i ++ ) {
137
177
if ( tasks . length === 0 ) {
138
- // TODO: This indicates a particularly suboptimal packing
178
+ console . log ( `Suboptimal packing detected: no tests remain to be stolen. Reduce packing fraction from ${ packfraction } to fix.` ) ;
139
179
break batcher;
140
180
}
141
181
if ( doneBatching [ i ] ) {
@@ -145,26 +185,36 @@ namespace Harness.Parallel.Host {
145
185
batches [ i ] = [ ] ;
146
186
}
147
187
const total = batches [ i ] . reduce ( ( p , c ) => p + c . size , 0 ) ;
148
- if ( total >= batchSize && ! doneBatching [ i ] ) {
188
+ if ( total >= batchSize ) {
149
189
doneBatching [ i ] = true ;
150
190
continue ;
151
191
}
152
- batches [ i ] . push ( tasks . pop ( ) ) ;
192
+ const task = tasks . pop ( ) ;
193
+ batches [ i ] . push ( task ) ;
194
+ scheduledTotal += task . size ;
153
195
}
154
- for ( let j = 0 ; j < workerCount ; j ++ ) {
196
+ for ( let j = 0 ; j < batchCount ; j ++ ) {
155
197
if ( ! doneBatching [ j ] ) {
156
- continue ;
198
+ continue batcher ;
157
199
}
158
200
}
159
201
break ;
160
202
}
161
- console . log ( `Batched into ${ workerCount } groups with approximate total file sizes of ${ Math . floor ( batchSize ) } bytes in each group.` ) ;
203
+ const prefix = `Batched into ${ batchCount } groups` ;
204
+ if ( unknownValue ) {
205
+ console . log ( `${ prefix } . Unprofiled tests including ${ unknownValue } will be run first.` ) ;
206
+ }
207
+ else {
208
+ console . log ( `${ prefix } with approximate total ${ perfData ? "time" : "file sizes" } of ${ perfData ? ms ( batchSize ) : `${ Math . floor ( batchSize ) } bytes` } in each group. (${ ( scheduledTotal / totalCost * 100 ) . toFixed ( 1 ) } % of total tests batched)` ) ;
209
+ }
162
210
for ( const worker of workers ) {
163
- const action : ParallelBatchMessage = { type : "batch" , payload : batches . pop ( ) } ;
164
- if ( ! action . payload [ 0 ] ) {
165
- throw new Error ( `Tried to send invalid message ${ action } ` ) ;
211
+ const payload = batches . pop ( ) ;
212
+ if ( payload ) {
213
+ worker . send ( { type : "batch" , payload } ) ;
214
+ }
215
+ else { // Unittest thread - send off just one test
216
+ worker . send ( { type : "test" , payload : tasks . pop ( ) } ) ;
166
217
}
167
- worker . send ( action ) ;
168
218
}
169
219
}
170
220
else {
@@ -177,7 +227,6 @@ namespace Harness.Parallel.Host {
177
227
updateProgress ( 0 ) ;
178
228
let duration : number ;
179
229
180
- const ms = require ( "mocha/lib/ms" ) ;
181
230
function completeBar ( ) {
182
231
const isPartitionFail = failingFiles !== 0 ;
183
232
const summaryColor = isPartitionFail ? "fail" : "green" ;
@@ -235,6 +284,8 @@ namespace Harness.Parallel.Host {
235
284
reporter . epilogue ( ) ;
236
285
}
237
286
287
+ Harness . IO . writeFile ( perfdataFileName , JSON . stringify ( newPerfData , null , 4 ) ) ; // tslint:disable-line:no-null-keyword
288
+
238
289
process . exit ( errorResults . length ) ;
239
290
}
240
291
@@ -264,6 +315,38 @@ namespace Harness.Parallel.Host {
264
315
let tty : { isatty ( x : number ) : boolean } ;
265
316
let isatty : boolean ;
266
317
318
+ const s = 1000 ;
319
+ const m = s * 60 ;
320
+ const h = m * 60 ;
321
+ const d = h * 24 ;
322
+ function ms ( ms : number ) {
323
+ let result = "" ;
324
+ if ( ms >= d ) {
325
+ const count = Math . floor ( ms / d ) ;
326
+ result += count + "d" ;
327
+ ms -= count * d ;
328
+ }
329
+ if ( ms >= h ) {
330
+ const count = Math . floor ( ms / h ) ;
331
+ result += count + "h" ;
332
+ ms -= count * h ;
333
+ }
334
+ if ( ms >= m ) {
335
+ const count = Math . floor ( ms / m ) ;
336
+ result += count + "m" ;
337
+ ms -= count * m ;
338
+ }
339
+ if ( ms >= s ) {
340
+ const count = Math . round ( ms / s ) ;
341
+ result += count + "s" ;
342
+ return result ;
343
+ }
344
+ if ( ms > 0 ) {
345
+ result += Math . round ( ms ) + "ms" ;
346
+ }
347
+ return result ;
348
+ }
349
+
267
350
function initializeProgressBarsDependencies ( ) {
268
351
Mocha = require ( "mocha" ) ;
269
352
Base = Mocha . reporters . Base ;
@@ -286,7 +369,7 @@ namespace Harness.Parallel.Host {
286
369
const close = options . close || "]" ;
287
370
const complete = options . complete || "▬" ;
288
371
const incomplete = options . incomplete || Base . symbols . dot ;
289
- const maxWidth = Base . window . width - open . length - close . length - 30 ;
372
+ const maxWidth = Base . window . width - open . length - close . length - 34 ;
290
373
const width = minMax ( options . width || maxWidth , 10 , maxWidth ) ;
291
374
this . _options = {
292
375
open,
0 commit comments