Skip to content

Commit a9a6354

Browse files
committed
update allow_large_result warning
1 parent e39ee3b commit a9a6354

13 files changed

+75
-32
lines changed

bigframes/_config/bigquery_options.py

Lines changed: 29 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,8 @@
1919
from typing import Literal, Optional
2020
import warnings
2121

22-
import google.api_core.exceptions
2322
import google.auth.credentials
2423

25-
import bigframes.constants
2624
import bigframes.enums
2725
import bigframes.exceptions as bfe
2826

@@ -239,22 +237,43 @@ def skip_bq_connection_check(self, value: bool):
239237
@property
240238
def allow_large_results(self) -> bool:
241239
"""
242-
Sets the flag to allow or disallow query results larger than 10 GB.
240+
DEPRECATED: Checks the legacy global setting for allowing large results.
241+
Use ``bpd.options.compute.allow_large_results`` instead.
243242
244-
The default setting for this flag is True, which allows queries to return results
245-
exceeding 10 GB by creating an explicit destination table. If set to False, it
246-
restricts the result size to 10 GB, and BigQuery will raise an error if this limit
247-
is exceeded.
243+
Warning: Accessing ``bpd.options.bigquery.allow_large_results`` is deprecated
244+
and this property will be removed in a future version. The configuration for
245+
handling large results has moved.
248246
249247
Returns:
250-
bool: True if large results are allowed with an explicit destination table,
251-
False if results are limited to 10 GB and errors are raised when exceeded.
248+
bool: The value of the deprecated setting.
252249
"""
250+
warnings.warn(
251+
"`bpd.options.bigquery.allow_large_results` is deprecated and will be removed soon. "
252+
"Please use `bpd.options.compute.allow_large_results` instead.",
253+
DeprecationWarning,
254+
stacklevel=2,
255+
)
253256
return self._allow_large_results
254257

255258
@allow_large_results.setter
256259
def allow_large_results(self, value: bool):
257-
self._allow_large_results = value
260+
"""
261+
DEPRECATED: Setting ``allow_large_results`` via ``bpd.options.bigquery``
262+
is deprecated and has no effect. Use
263+
``bpd.options.compute.allow_large_results`` instead.
264+
265+
Warning: Setting this option here is deprecated, ignored, and this setter
266+
will be removed in a future version. The configuration for handling large
267+
results has moved.
268+
"""
269+
warnings.warn(
270+
"Setting `bpd.options.bigquery.allow_large_results` is deprecated, ignored, "
271+
"and will be removed soon. "
272+
"Please use `bpd.options.compute.allow_large_results = <value>` instead.",
273+
DeprecationWarning,
274+
stacklevel=2,
275+
)
276+
pass
258277

259278
@property
260279
def use_regional_endpoints(self) -> bool:

bigframes/_config/compute_options.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,12 @@ class ComputeOptions:
8686
ai_ops_threshold_autofail (bool):
8787
Guards against unexpected processing of large amount of rows by semantic operators.
8888
When set to True, the operation automatically fails without asking for user inputs.
89+
90+
allow_large_results (bool):
91+
Specifies whether query results can exceed 10 GB. Defaults to False. Setting this
92+
to False (the default) restricts results to 10 GB for potentially faster execution;
93+
BigQuery will raise an error if this limit is exceeded. Setting to True removes
94+
this result size limit.
8995
"""
9096

9197
maximum_bytes_billed: Optional[int] = None
@@ -97,7 +103,9 @@ class ComputeOptions:
97103
semantic_ops_threshold_autofail = False
98104

99105
ai_ops_confirmation_threshold: Optional[int] = 0
100-
ai_ops_threshold_autofail = False
106+
ai_ops_threshold_autofail: bool = False
107+
108+
allow_large_results: bool = False
101109

102110
def assign_extra_query_labels(self, **kwargs: Any) -> None:
103111
"""

bigframes/session/bq_caching_executor.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ def execute(
112112
max_results: Optional[int] = None,
113113
) -> executor.ExecuteResult:
114114
if use_explicit_destination is None:
115-
use_explicit_destination = bigframes.options.bigquery.allow_large_results
115+
use_explicit_destination = bigframes.options.compute.allow_large_results
116116

117117
if bigframes.options.compute.enable_multi_query_execution:
118118
self._simplify_with_caching(array_value)
@@ -231,7 +231,7 @@ def peek(
231231
msg = bfe.format_message("Peeking this value cannot be done efficiently.")
232232
warnings.warn(msg)
233233
if use_explicit_destination is None:
234-
use_explicit_destination = bigframes.options.bigquery.allow_large_results
234+
use_explicit_destination = bigframes.options.compute.allow_large_results
235235

236236
destination_table = (
237237
self.storage_manager.create_temp_table(
@@ -555,7 +555,7 @@ def iterator_supplier():
555555
"The query result size has exceeded 10 GB. In BigFrames 2.0 and "
556556
"later, you might need to manually set `allow_large_results=True` in "
557557
"the IO method or adjust the BigFrames option: "
558-
"`bigframes.options.bigquery.allow_large_results=True`."
558+
"`bigframes.options.compute.allow_large_results=True`."
559559
)
560560
warnings.warn(msg, FutureWarning)
561561
# Runs strict validations to ensure internal type predictions and ibis are completely in sync

tests/system/conftest.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ def resourcemanager_client(
142142

143143
@pytest.fixture(scope="session")
144144
def session() -> Generator[bigframes.Session, None, None]:
145-
context = bigframes.BigQueryOptions(location="US", allow_large_results=False)
145+
context = bigframes.BigQueryOptions(location="US")
146146
session = bigframes.Session(context=context)
147147
yield session
148148
session.close() # close generated session at cleanup time
@@ -158,19 +158,15 @@ def session_load() -> Generator[bigframes.Session, None, None]:
158158

159159
@pytest.fixture(scope="session", params=["strict", "partial"])
160160
def maybe_ordered_session(request) -> Generator[bigframes.Session, None, None]:
161-
context = bigframes.BigQueryOptions(
162-
location="US", ordering_mode=request.param, allow_large_results=False
163-
)
161+
context = bigframes.BigQueryOptions(location="US", ordering_mode=request.param)
164162
session = bigframes.Session(context=context)
165163
yield session
166164
session.close() # close generated session at cleanup type
167165

168166

169167
@pytest.fixture(scope="session")
170168
def unordered_session() -> Generator[bigframes.Session, None, None]:
171-
context = bigframes.BigQueryOptions(
172-
location="US", ordering_mode="partial", allow_large_results=False
173-
)
169+
context = bigframes.BigQueryOptions(location="US", ordering_mode="partial")
174170
session = bigframes.Session(context=context)
175171
yield session
176172
session.close() # close generated session at cleanup type
@@ -1419,7 +1415,7 @@ def floats_product_bf(session, floats_product_pd):
14191415

14201416
@pytest.fixture(scope="session", autouse=True)
14211417
def use_fast_query_path():
1422-
with bpd.option_context("bigquery.allow_large_results", False):
1418+
with bpd.option_context("compute.allow_large_results", False):
14231419
yield
14241420

14251421

tests/system/large/test_dataframe_io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import bigframes
2222

2323
WIKIPEDIA_TABLE = "bigquery-public-data.samples.wikipedia"
24-
LARGE_TABLE_OPTION = "bigquery.allow_large_results"
24+
LARGE_TABLE_OPTION = "compute.allow_large_results"
2525

2626

2727
def test_to_pandas_batches_raise_when_large_result_not_allowed(session):

tests/system/small/test_dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5015,7 +5015,7 @@ def test_df_bool_interpretation_error(scalars_df_index):
50155015

50165016
def test_query_job_setters(scalars_df_default_index: dataframe.DataFrame):
50175017
# if allow_large_results=False, might not create query job
5018-
with bigframes.option_context("bigquery.allow_large_results", True):
5018+
with bigframes.option_context("compute.allow_large_results", True):
50195019
job_ids = set()
50205020
repr(scalars_df_default_index)
50215021
assert scalars_df_default_index.query_job is not None

tests/system/small/test_dataframe_io.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,7 @@ def test_to_pandas_array_struct_correct_result(session):
254254
def test_to_pandas_override_global_option(scalars_df_index):
255255
# Direct call to_pandas uses global default setting (allow_large_results=True),
256256
# table has 'bqdf' prefix.
257-
with bigframes.option_context("bigquery.allow_large_results", True):
257+
with bigframes.option_context("compute.allow_large_results", True):
258258

259259
scalars_df_index.to_pandas()
260260
table_id = scalars_df_index._query_job.destination.table_id
@@ -324,7 +324,7 @@ def test_to_pandas_dry_run(session, scalars_pandas_df_multi_index):
324324

325325
def test_to_arrow_override_global_option(scalars_df_index):
326326
# Direct call to_arrow uses global default setting (allow_large_results=True),
327-
with bigframes.option_context("bigquery.allow_large_results", True):
327+
with bigframes.option_context("compute.allow_large_results", True):
328328

329329
scalars_df_index.to_arrow()
330330
table_id = scalars_df_index._query_job.destination.table_id

tests/system/small/test_index_io.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616

1717
def test_to_pandas_override_global_option(scalars_df_index):
18-
with bigframes.option_context("bigquery.allow_large_results", True):
18+
with bigframes.option_context("compute.allow_large_results", True):
1919

2020
bf_index = scalars_df_index.index
2121

@@ -39,7 +39,7 @@ def test_to_pandas_dry_run(scalars_df_index):
3939

4040

4141
def test_to_numpy_override_global_option(scalars_df_index):
42-
with bigframes.option_context("bigquery.allow_large_results", True):
42+
with bigframes.option_context("compute.allow_large_results", True):
4343

4444
bf_index = scalars_df_index.index
4545

tests/system/small/test_progress_bar.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def test_progress_bar_scalar_allow_large_results(
6464
capsys.readouterr() # clear output
6565

6666
with bf.option_context(
67-
"display.progress_bar", "terminal", "bigquery.allow_large_results", "True"
67+
"display.progress_bar", "terminal", "compute.allow_large_results", "True"
6868
):
6969
penguins_df_default_index["body_mass_g"].head(10).mean()
7070

tests/system/small/test_series.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3925,7 +3925,7 @@ def test_series_bool_interpretation_error(scalars_df_index):
39253925

39263926
def test_query_job_setters(scalars_dfs):
39273927
# if allow_large_results=False, might not create query job
3928-
with bigframes.option_context("bigquery.allow_large_results", True):
3928+
with bigframes.option_context("compute.allow_large_results", True):
39293929
job_ids = set()
39303930
df, _ = scalars_dfs
39313931
series = df["int64_col"]

tests/system/small/test_series_io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020

2121
def test_to_pandas_override_global_option(scalars_df_index):
22-
with bigframes.option_context("bigquery.allow_large_results", True):
22+
with bigframes.option_context("compute.allow_large_results", True):
2323

2424
bf_series = scalars_df_index["int64_col"]
2525

tests/unit/_config/test_bigquery_options.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,5 +188,4 @@ def test_client_endpoints_override_set_shows_warning():
188188
def test_default_options():
189189
options = bigquery_options.BigQueryOptions()
190190

191-
assert options.allow_large_results is False
192191
assert options.ordering_mode == "strict"
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# Copyright 2025 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
import bigframes._config.compute_options as compute_options
16+
17+
18+
def test_default_options():
19+
options = compute_options.ComputeOptions()
20+
21+
assert options.allow_large_results is False

0 commit comments

Comments
 (0)