@@ -109,6 +109,9 @@ never_optimize(
109
109
_PyExecutorObject * * exec ,
110
110
int Py_UNUSED (stack_entries ))
111
111
{
112
+ /* Although it should be benign for this to be called,
113
+ * it shouldn't happen, so fail in debug builds. */
114
+ assert (0 && "never optimize should never be called" );
112
115
return 0 ;
113
116
}
114
117
@@ -120,38 +123,53 @@ PyTypeObject _PyDefaultOptimizer_Type = {
120
123
.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION ,
121
124
};
122
125
123
- _PyOptimizerObject _PyOptimizer_Default = {
126
+ static _PyOptimizerObject _PyOptimizer_Default = {
124
127
PyObject_HEAD_INIT (& _PyDefaultOptimizer_Type )
125
128
.optimize = never_optimize ,
126
- .resume_threshold = INT16_MAX ,
127
- .backedge_threshold = INT16_MAX ,
129
+ .resume_threshold = OPTIMIZER_UNREACHABLE_THRESHOLD ,
130
+ .backedge_threshold = OPTIMIZER_UNREACHABLE_THRESHOLD ,
128
131
};
129
132
133
+ static uint32_t
134
+ shift_and_offset_threshold (uint16_t threshold )
135
+ {
136
+ return (threshold << OPTIMIZER_BITS_IN_COUNTER ) + (1 << 15 );
137
+ }
138
+
130
139
_PyOptimizerObject *
131
140
PyUnstable_GetOptimizer (void )
132
141
{
133
142
PyInterpreterState * interp = _PyInterpreterState_GET ();
134
143
if (interp -> optimizer == & _PyOptimizer_Default ) {
135
144
return NULL ;
136
145
}
137
- assert (interp -> optimizer_backedge_threshold == interp -> optimizer -> backedge_threshold );
138
- assert (interp -> optimizer_resume_threshold == interp -> optimizer -> resume_threshold );
146
+ assert (interp -> optimizer_backedge_threshold ==
147
+ shift_and_offset_threshold (interp -> optimizer -> backedge_threshold ));
148
+ assert (interp -> optimizer_resume_threshold ==
149
+ shift_and_offset_threshold (interp -> optimizer -> resume_threshold ));
139
150
Py_INCREF (interp -> optimizer );
140
151
return interp -> optimizer ;
141
152
}
142
153
143
- void
144
- PyUnstable_SetOptimizer ( _PyOptimizerObject * optimizer )
154
+ _PyOptimizerObject *
155
+ _Py_SetOptimizer ( PyInterpreterState * interp , _PyOptimizerObject * optimizer )
145
156
{
146
- PyInterpreterState * interp = _PyInterpreterState_GET ();
147
157
if (optimizer == NULL ) {
148
158
optimizer = & _PyOptimizer_Default ;
149
159
}
150
160
_PyOptimizerObject * old = interp -> optimizer ;
151
161
Py_INCREF (optimizer );
152
162
interp -> optimizer = optimizer ;
153
- interp -> optimizer_backedge_threshold = optimizer -> backedge_threshold ;
154
- interp -> optimizer_resume_threshold = optimizer -> resume_threshold ;
163
+ interp -> optimizer_backedge_threshold = shift_and_offset_threshold (optimizer -> backedge_threshold );
164
+ interp -> optimizer_resume_threshold = shift_and_offset_threshold (optimizer -> resume_threshold );
165
+ return old ;
166
+ }
167
+
168
+ void
169
+ PyUnstable_SetOptimizer (_PyOptimizerObject * optimizer )
170
+ {
171
+ PyInterpreterState * interp = _PyInterpreterState_GET ();
172
+ _PyOptimizerObject * old = _Py_SetOptimizer (interp , optimizer );
155
173
Py_DECREF (old );
156
174
}
157
175
@@ -860,10 +878,10 @@ PyUnstable_Optimizer_NewUOpOptimizer(void)
860
878
return NULL ;
861
879
}
862
880
opt -> optimize = uop_optimize ;
863
- opt -> resume_threshold = INT16_MAX ;
864
- // Need at least 3 iterations to settle specializations.
865
- // A few lower bits of the counter are reserved for other flags .
866
- opt -> backedge_threshold = 16 << OPTIMIZER_BITS_IN_COUNTER ;
881
+ opt -> resume_threshold = OPTIMIZER_UNREACHABLE_THRESHOLD ;
882
+ // Need a few iterations to settle specializations,
883
+ // and to ammortize the cost of optimization .
884
+ opt -> backedge_threshold = 16 ;
867
885
return (PyObject * )opt ;
868
886
}
869
887
@@ -950,7 +968,7 @@ PyUnstable_Optimizer_NewCounter(void)
950
968
return NULL ;
951
969
}
952
970
opt -> base .optimize = counter_optimize ;
953
- opt -> base .resume_threshold = INT16_MAX ;
971
+ opt -> base .resume_threshold = OPTIMIZER_UNREACHABLE_THRESHOLD ;
954
972
opt -> base .backedge_threshold = 0 ;
955
973
opt -> count = 0 ;
956
974
return (PyObject * )opt ;
0 commit comments