18
18
19
19
from pandas .core .construction import create_series_with_explicit_dtype
20
20
21
+ from pandas .core .series import Series
22
+ from pandas import DataFrame
23
+
21
24
if TYPE_CHECKING :
22
25
from pandas import DataFrame , Series , Index
23
26
@@ -220,14 +223,13 @@ def apply_empty_result(self):
220
223
221
224
def apply_raw (self ):
222
225
""" apply to the values as a numpy array """
223
- try :
224
- result = libreduction .compute_reduction (self .values , self .f , axis = self .axis )
225
- except ValueError as err :
226
- if "Function does not reduce" not in str (err ):
227
- # catch only ValueError raised intentionally in libreduction
228
- raise
229
- # We expect np.apply_along_axis to give a two-dimensional result, or
230
- # also raise.
226
+ result , partial_result = libreduction .compute_reduction (
227
+ self .values , self .f , axis = self .axis
228
+ )
229
+
230
+ # A non None partial_result means that the reduction was unsuccessful
231
+ # We expect np.apply_along_axis to give a two-dimensional result, or raise.
232
+ if partial_result is not None :
231
233
result = np .apply_along_axis (self .f , self .axis , self .values )
232
234
233
235
# TODO: mixed type case
@@ -265,6 +267,7 @@ def apply_broadcast(self, target: "DataFrame") -> "DataFrame":
265
267
266
268
def apply_standard (self ):
267
269
270
+ partial_result = None
268
271
# try to reduce first (by default)
269
272
# this only matters if the reduction in values is of different dtype
270
273
# e.g. if we want to apply to a SparseFrame, then can't directly reduce
@@ -292,13 +295,9 @@ def apply_standard(self):
292
295
)
293
296
294
297
try :
295
- result = libreduction .compute_reduction (
298
+ result , partial_result = libreduction .compute_reduction (
296
299
values , self .f , axis = self .axis , dummy = dummy , labels = labels
297
300
)
298
- except ValueError as err :
299
- if "Function does not reduce" not in str (err ):
300
- # catch only ValueError raised intentionally in libreduction
301
- raise
302
301
except TypeError :
303
302
# e.g. test_apply_ignore_failures we just ignore
304
303
if not self .ignore_failures :
@@ -307,23 +306,36 @@ def apply_standard(self):
307
306
# reached via numexpr; fall back to python implementation
308
307
pass
309
308
else :
310
- return self .obj ._constructor_sliced (result , index = labels )
309
+ # this means that the reduction was successful
310
+ if partial_result is None :
311
+ return self .obj ._constructor_sliced (result , index = labels )
312
+ else :
313
+ if isinstance (partial_result , Series ):
314
+ partial_result = DataFrame .infer_objects (partial_result )
311
315
312
316
# compute the result using the series generator
313
- results , res_index = self .apply_series_generator ()
317
+ results , res_index = self .apply_series_generator (partial_result )
314
318
315
319
# wrap results
316
320
return self .wrap_results (results , res_index )
317
321
318
- def apply_series_generator (self ) -> Tuple [ResType , "Index" ]:
322
+ def apply_series_generator (self , partial_result = None ) -> Tuple [ResType , "Index" ]:
319
323
series_gen = self .series_generator
320
324
res_index = self .result_index
321
325
322
326
keys = []
323
327
results = {}
328
+
329
+ # If a partial result was already computed, use it instead of running on the first element again
330
+ series_gen_enumeration = enumerate (series_gen )
331
+ if partial_result is not None :
332
+ i , v = next (series_gen_enumeration )
333
+ results [i ] = partial_result
334
+ keys .append (v .name )
335
+
324
336
if self .ignore_failures :
325
337
successes = []
326
- for i , v in enumerate ( series_gen ) :
338
+ for i , v in series_gen_enumeration :
327
339
try :
328
340
results [i ] = self .f (v )
329
341
except Exception :
@@ -337,7 +349,8 @@ def apply_series_generator(self) -> Tuple[ResType, "Index"]:
337
349
res_index = res_index .take (successes )
338
350
339
351
else :
340
- for i , v in enumerate (series_gen ):
352
+ for i , v in series_gen_enumeration :
353
+
341
354
results [i ] = self .f (v )
342
355
keys .append (v .name )
343
356
0 commit comments