@@ -65,11 +65,13 @@ type worklist struct {
65
65
defBlock map [* Value ][]* Block // use blocks of def
66
66
}
67
67
68
+ // possibleConst checks if Value can be fold to const. For those Values that can never become
69
+ // constants(e.g. StaticCall), we don't make futile efforts.
68
70
func possibleConst (val * Value ) bool {
71
+ if isConst (val ) {
72
+ return true
73
+ }
69
74
switch val .Op {
70
- case OpConst64 , OpConst32 , OpConst16 , OpConst8 ,
71
- OpConstBool , OpConst32F , OpConst64F :
72
- fallthrough
73
75
case OpCopy :
74
76
fallthrough
75
77
case OpPhi :
@@ -79,7 +81,7 @@ func possibleConst(val *Value) bool {
79
81
OpNeg8 , OpNeg16 , OpNeg32 , OpNeg64 , OpNeg32F , OpNeg64F ,
80
82
OpCom8 , OpCom16 , OpCom32 , OpCom64 ,
81
83
// math
82
- OpFloor , OpCeil , OpTrunc , OpRoundToEven ,
84
+ OpFloor , OpCeil , OpTrunc , OpRoundToEven , OpSqrt ,
83
85
// conversion
84
86
OpTrunc16to8 , OpTrunc32to8 , OpTrunc32to16 , OpTrunc64to8 ,
85
87
OpTrunc64to16 , OpTrunc64to32 , OpCvt32to32F , OpCvt32to64F ,
@@ -89,6 +91,12 @@ func possibleConst(val *Value) bool {
89
91
OpZeroExt8to16 , OpZeroExt8to32 , OpZeroExt8to64 , OpZeroExt16to32 ,
90
92
OpZeroExt16to64 , OpZeroExt32to64 , OpSignExt8to16 , OpSignExt8to32 ,
91
93
OpSignExt8to64 , OpSignExt16to32 , OpSignExt16to64 , OpSignExt32to64 ,
94
+ // bit
95
+ OpCtz8 , OpCtz16 , OpCtz32 , OpCtz64 ,
96
+ // mask
97
+ OpSlicemask ,
98
+ // safety check
99
+ OpIsNonNil ,
92
100
// not
93
101
OpNot :
94
102
fallthrough
@@ -122,7 +130,7 @@ func possibleConst(val *Value) bool {
122
130
OpLsh64x64 , OpRsh64x64 , OpRsh64Ux64 , OpLsh32x64 ,
123
131
OpRsh32x64 , OpRsh32Ux64 , OpLsh16x64 , OpRsh16x64 ,
124
132
OpRsh16Ux64 , OpLsh8x64 , OpRsh8x64 , OpRsh8Ux64 ,
125
- // inbound safety check
133
+ // safety check
126
134
OpIsInBounds , OpIsSliceInBounds ,
127
135
// bit
128
136
OpAnd8 , OpAnd16 , OpAnd32 , OpAnd64 ,
@@ -308,7 +316,7 @@ func (t *worklist) visitValue(val *Value) {
308
316
OpNeg8 , OpNeg16 , OpNeg32 , OpNeg64 , OpNeg32F , OpNeg64F ,
309
317
OpCom8 , OpCom16 , OpCom32 , OpCom64 ,
310
318
// math
311
- OpFloor , OpCeil , OpTrunc , OpRoundToEven ,
319
+ OpFloor , OpCeil , OpTrunc , OpRoundToEven , OpSqrt ,
312
320
// conversion
313
321
OpTrunc16to8 , OpTrunc32to8 , OpTrunc32to16 , OpTrunc64to8 ,
314
322
OpTrunc64to16 , OpTrunc64to32 , OpCvt32to32F , OpCvt32to64F ,
@@ -318,6 +326,12 @@ func (t *worklist) visitValue(val *Value) {
318
326
OpZeroExt8to16 , OpZeroExt8to32 , OpZeroExt8to64 , OpZeroExt16to32 ,
319
327
OpZeroExt16to64 , OpZeroExt32to64 , OpSignExt8to16 , OpSignExt8to32 ,
320
328
OpSignExt8to64 , OpSignExt16to32 , OpSignExt16to64 , OpSignExt32to64 ,
329
+ // bit
330
+ OpCtz8 , OpCtz16 , OpCtz32 , OpCtz64 ,
331
+ // mask
332
+ OpSlicemask ,
333
+ // safety check
334
+ OpIsNonNil ,
321
335
// not
322
336
OpNot :
323
337
var lt1 = t .getLatticeCell (val .Args [0 ])
@@ -359,7 +373,7 @@ func (t *worklist) visitValue(val *Value) {
359
373
OpLsh64x64 , OpRsh64x64 , OpRsh64Ux64 , OpLsh32x64 ,
360
374
OpRsh32x64 , OpRsh32Ux64 , OpLsh16x64 , OpRsh16x64 ,
361
375
OpRsh16Ux64 , OpLsh8x64 , OpRsh8x64 , OpRsh8Ux64 ,
362
- // inbound safety check
376
+ // safety check
363
377
OpIsInBounds , OpIsSliceInBounds ,
364
378
// bit
365
379
OpAnd8 , OpAnd16 , OpAnd32 , OpAnd64 ,
@@ -449,10 +463,22 @@ func (t *worklist) replaceConst() (int, int) {
449
463
block .ResetControls ()
450
464
rewireCnt ++
451
465
if t .f .pass .debug > 0 {
452
- fmt .Printf ("Rewire %v successors\n " , block )
466
+ fmt .Printf ("Rewire BlockIf %v successors\n " , block )
453
467
}
454
468
case BlockJumpTable :
455
- // TODO: optimize jump table
469
+ var idx = int (lt .val .AuxInt )
470
+ var targetBlock = block .Succs [idx ].b
471
+ for len (block .Succs ) > 0 {
472
+ block .removeEdge (0 )
473
+ }
474
+ block .AddEdgeTo (targetBlock )
475
+ block .Kind = BlockPlain
476
+ block .Likely = BranchUnknown
477
+ block .ResetControls ()
478
+ rewireCnt ++
479
+ if t .f .pass .debug > 0 {
480
+ fmt .Printf ("Rewire JumpTable %v successors\n " , block )
481
+ }
456
482
}
457
483
}
458
484
}
0 commit comments