Skip to content

Commit b1aa2e4

Browse files
committed
feat!: move allow_large_results from bigquery option to compute option
1 parent c5b3edf commit b1aa2e4

12 files changed

+25
-44
lines changed

bigframes/_config/bigquery_options.py

Lines changed: 0 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ def __init__(
8989
kms_key_name: Optional[str] = None,
9090
skip_bq_connection_check: bool = False,
9191
*,
92-
allow_large_results: bool = False,
9392
ordering_mode: Literal["strict", "partial"] = "strict",
9493
client_endpoints_override: Optional[dict] = None,
9594
):
@@ -101,7 +100,6 @@ def __init__(
101100
self._application_name = application_name
102101
self._kms_key_name = kms_key_name
103102
self._skip_bq_connection_check = skip_bq_connection_check
104-
self._allow_large_results = allow_large_results
105103
self._session_started = False
106104
# Determines the ordering strictness for the session.
107105
self._ordering_mode = _validate_ordering_mode(ordering_mode)
@@ -236,26 +234,6 @@ def skip_bq_connection_check(self, value: bool):
236234
)
237235
self._skip_bq_connection_check = value
238236

239-
@property
240-
def allow_large_results(self) -> bool:
241-
"""
242-
Sets the flag to allow or disallow query results larger than 10 GB.
243-
244-
The default setting for this flag is True, which allows queries to return results
245-
exceeding 10 GB by creating an explicit destination table. If set to False, it
246-
restricts the result size to 10 GB, and BigQuery will raise an error if this limit
247-
is exceeded.
248-
249-
Returns:
250-
bool: True if large results are allowed with an explicit destination table,
251-
False if results are limited to 10 GB and errors are raised when exceeded.
252-
"""
253-
return self._allow_large_results
254-
255-
@allow_large_results.setter
256-
def allow_large_results(self, value: bool):
257-
self._allow_large_results = value
258-
259237
@property
260238
def use_regional_endpoints(self) -> bool:
261239
"""Flag to connect to regional API endpoints for BigQuery API and

bigframes/_config/compute_options.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,12 @@ class ComputeOptions:
8686
ai_ops_threshold_autofail (bool):
8787
Guards against unexpected processing of large amount of rows by semantic operators.
8888
When set to True, the operation automatically fails without asking for user inputs.
89+
90+
allow_large_results (bool):
91+
Specifies whether query results can exceed 10 GB. Defaults to False. Setting this
92+
to False (the default) restricts results to 10 GB for potentially faster execution;
93+
BigQuery will raise an error if this limit is exceeded. Setting to True removes
94+
this result size limit.
8995
"""
9096

9197
maximum_bytes_billed: Optional[int] = None
@@ -97,7 +103,9 @@ class ComputeOptions:
97103
semantic_ops_threshold_autofail = False
98104

99105
ai_ops_confirmation_threshold: Optional[int] = 0
100-
ai_ops_threshold_autofail = False
106+
ai_ops_threshold_autofail: bool = False
107+
108+
allow_large_results: bool = False
101109

102110
def assign_extra_query_labels(self, **kwargs: Any) -> None:
103111
"""

bigframes/session/executor.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@ def execute(
239239
max_results: Optional[int] = None,
240240
):
241241
if use_explicit_destination is None:
242-
use_explicit_destination = bigframes.options.bigquery.allow_large_results
242+
use_explicit_destination = bigframes.options.compute.allow_large_results
243243

244244
if bigframes.options.compute.enable_multi_query_execution:
245245
self._simplify_with_caching(array_value)
@@ -282,7 +282,7 @@ def iterator_supplier():
282282
"The query result size has exceeded 10 GB. In BigFrames 2.0 and "
283283
"later, you might need to manually set `allow_large_results=True` in "
284284
"the IO method or adjust the BigFrames option: "
285-
"`bigframes.options.bigquery.allow_large_results=True`."
285+
"`bigframes.options.compute.allow_large_results=True`."
286286
)
287287
warnings.warn(msg, FutureWarning)
288288
# Runs strict validations to ensure internal type predictions and ibis are completely in sync
@@ -393,7 +393,7 @@ def peek(
393393
msg = bfe.format_message("Peeking this value cannot be done efficiently.")
394394
warnings.warn(msg)
395395
if use_explicit_destination is None:
396-
use_explicit_destination = bigframes.options.bigquery.allow_large_results
396+
use_explicit_destination = bigframes.options.compute.allow_large_results
397397

398398
job_config = bigquery.QueryJobConfig()
399399
# Use explicit destination to avoid 10GB limit of temporary table

tests/system/conftest.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ def resourcemanager_client(
142142

143143
@pytest.fixture(scope="session")
144144
def session() -> Generator[bigframes.Session, None, None]:
145-
context = bigframes.BigQueryOptions(location="US", allow_large_results=False)
145+
context = bigframes.BigQueryOptions(location="US")
146146
session = bigframes.Session(context=context)
147147
yield session
148148
session.close() # close generated session at cleanup time
@@ -158,19 +158,15 @@ def session_load() -> Generator[bigframes.Session, None, None]:
158158

159159
@pytest.fixture(scope="session", params=["strict", "partial"])
160160
def maybe_ordered_session(request) -> Generator[bigframes.Session, None, None]:
161-
context = bigframes.BigQueryOptions(
162-
location="US", ordering_mode=request.param, allow_large_results=False
163-
)
161+
context = bigframes.BigQueryOptions(location="US", ordering_mode=request.param)
164162
session = bigframes.Session(context=context)
165163
yield session
166164
session.close() # close generated session at cleanup type
167165

168166

169167
@pytest.fixture(scope="session")
170168
def unordered_session() -> Generator[bigframes.Session, None, None]:
171-
context = bigframes.BigQueryOptions(
172-
location="US", ordering_mode="partial", allow_large_results=False
173-
)
169+
context = bigframes.BigQueryOptions(location="US", ordering_mode="partial")
174170
session = bigframes.Session(context=context)
175171
yield session
176172
session.close() # close generated session at cleanup type
@@ -1405,7 +1401,7 @@ def floats_product_bf(session, floats_product_pd):
14051401

14061402
@pytest.fixture(scope="session", autouse=True)
14071403
def use_fast_query_path():
1408-
with bpd.option_context("bigquery.allow_large_results", False):
1404+
with bpd.option_context("compute.allow_large_results", False):
14091405
yield
14101406

14111407

tests/system/large/test_dataframe_io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import bigframes
2222

2323
WIKIPEDIA_TABLE = "bigquery-public-data.samples.wikipedia"
24-
LARGE_TABLE_OPTION = "bigquery.allow_large_results"
24+
LARGE_TABLE_OPTION = "compute.allow_large_results"
2525

2626

2727
def test_to_pandas_batches_raise_when_large_result_not_allowed(session):

tests/system/small/test_dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4975,7 +4975,7 @@ def test_df_bool_interpretation_error(scalars_df_index):
49754975

49764976
def test_query_job_setters(scalars_df_default_index: dataframe.DataFrame):
49774977
# if allow_large_results=False, might not create query job
4978-
with bigframes.option_context("bigquery.allow_large_results", True):
4978+
with bigframes.option_context("compute.allow_large_results", True):
49794979
job_ids = set()
49804980
repr(scalars_df_default_index)
49814981
assert scalars_df_default_index.query_job is not None

tests/system/small/test_dataframe_io.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,7 @@ def test_to_pandas_array_struct_correct_result(session):
254254
def test_to_pandas_override_global_option(scalars_df_index):
255255
# Direct call to_pandas uses global default setting (allow_large_results=True),
256256
# table has 'bqdf' prefix.
257-
with bigframes.option_context("bigquery.allow_large_results", True):
257+
with bigframes.option_context("compute.allow_large_results", True):
258258

259259
scalars_df_index.to_pandas()
260260
table_id = scalars_df_index._query_job.destination.table_id
@@ -268,7 +268,7 @@ def test_to_pandas_override_global_option(scalars_df_index):
268268

269269
def test_to_arrow_override_global_option(scalars_df_index):
270270
# Direct call to_arrow uses global default setting (allow_large_results=True),
271-
with bigframes.option_context("bigquery.allow_large_results", True):
271+
with bigframes.option_context("compute.allow_large_results", True):
272272

273273
scalars_df_index.to_arrow()
274274
table_id = scalars_df_index._query_job.destination.table_id

tests/system/small/test_index_io.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616

1717
def test_to_pandas_override_global_option(scalars_df_index):
18-
with bigframes.option_context("bigquery.allow_large_results", True):
18+
with bigframes.option_context("compute.allow_large_results", True):
1919

2020
bf_index = scalars_df_index.index
2121

@@ -31,7 +31,7 @@ def test_to_pandas_override_global_option(scalars_df_index):
3131

3232

3333
def test_to_numpy_override_global_option(scalars_df_index):
34-
with bigframes.option_context("bigquery.allow_large_results", True):
34+
with bigframes.option_context("compute.allow_large_results", True):
3535

3636
bf_index = scalars_df_index.index
3737

tests/system/small/test_progress_bar.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def test_progress_bar_scalar_allow_large_results(
6464
capsys.readouterr() # clear output
6565

6666
with bf.option_context(
67-
"display.progress_bar", "terminal", "bigquery.allow_large_results", "True"
67+
"display.progress_bar", "terminal", "compute.allow_large_results", "True"
6868
):
6969
penguins_df_default_index["body_mass_g"].head(10).mean()
7070

tests/system/small/test_series.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3921,7 +3921,7 @@ def test_series_bool_interpretation_error(scalars_df_index):
39213921

39223922
def test_query_job_setters(scalars_dfs):
39233923
# if allow_large_results=False, might not create query job
3924-
with bigframes.option_context("bigquery.allow_large_results", True):
3924+
with bigframes.option_context("compute.allow_large_results", True):
39253925
job_ids = set()
39263926
df, _ = scalars_dfs
39273927
series = df["int64_col"]

tests/system/small/test_series_io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616

1717
def test_to_pandas_override_global_option(scalars_df_index):
18-
with bigframes.option_context("bigquery.allow_large_results", True):
18+
with bigframes.option_context("compute.allow_large_results", True):
1919

2020
bf_series = scalars_df_index["int64_col"]
2121

tests/unit/_config/test_bigquery_options.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,5 +188,4 @@ def test_client_endpoints_override_set_shows_warning():
188188
def test_default_options():
189189
options = bigquery_options.BigQueryOptions()
190190

191-
assert options.allow_large_results is False
192191
assert options.ordering_mode == "strict"

0 commit comments

Comments
 (0)