Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion google/cloud/bigquery/external_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -1027,7 +1027,7 @@ def __init__(
default_storage_location_uri: Optional[str] = None,
parameters: Optional[Dict[str, Any]] = None,
):
self._properties = {}
self._properties: Dict[str, Any] = {}
self.default_storage_location_uri = default_storage_location_uri
self.parameters = parameters

Expand Down Expand Up @@ -1168,6 +1168,7 @@ def to_api_repr(self) -> dict:
Dict[str, Any]:
A dictionary in the format used by the BigQuery API.
"""

config = copy.deepcopy(self._properties)
return config

Expand All @@ -1186,3 +1187,6 @@ def from_api_repr(cls, resource: dict) -> ExternalCatalogTableOptions:
config = cls()
config._properties = copy.deepcopy(resource)
return config

def __eq__(self, value):
return self.to_api_repr() == value.to_api_repr()
26 changes: 13 additions & 13 deletions google/cloud/bigquery/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -610,7 +610,7 @@ class TableSchema:
def __init__(
self, fields: Optional[list] = None, foreign_type_info: Optional[str] = None
):
self._properties = {}
self._properties: Dict[str, Any] = {}
self.fields = fields
self.foreign_type_info = foreign_type_info

Expand All @@ -621,7 +621,7 @@ def fields(self) -> Any:
return self._properties.get("fields")

@fields.setter
def fields(self, value: list, dtype: str) -> str:
def fields(self, value: list, dtype: str) -> None:
value = _isinstance_or_raise(value, list, none_allowed=True)
self._properties["fields"] = value

Expand All @@ -633,7 +633,7 @@ def foreign_type_info(self) -> Any:
return self._properties.get("foreignTypeInfo")

@foreign_type_info.setter
def foreign_type_info(self, value: str, dtype: str) -> str:
def foreign_type_info(self, value: str, dtype: str) -> None:
if not isinstance(value, str):
raise ValueError(
f"Pass {value} as a '{repr(dtype)}'." f"Got {type(value)}."
Expand Down Expand Up @@ -701,8 +701,8 @@ class StorageDescriptor:
"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum
length is 128 characters.
locationUri (Optional[str]): The physical location of the table (e.g.
`gs://spark-dataproc-data/pangea-data/case_sensitive/` or
`gs://spark-dataproc-data/pangea-data/*`). The maximum length is
'gs://spark-dataproc-data/pangea-data/case_sensitive/' or
'gs://spark-dataproc-data/pangea-data/'). The maximum length is
2056 bytes.
outputFormat (Optional[str]): Specifies the fully qualified class name
of the OutputFormat (e.g.
Expand All @@ -718,7 +718,7 @@ def __init__(
output_format: Optional[str] = None,
serde_info: Optional[SerDeInfo] = None,
):
self._properties = {}
self._properties: Dict[str, Any] = {}
self.input_format = input_format
self.location_uri = location_uri
self.output_format = output_format
Expand All @@ -739,9 +739,9 @@ def input_format(self, value: Optional[str]):

@property
def location_uri(self) -> Any:
"""Optional. The physical location of the table (e.g. `gs://spark-
dataproc-data/pangea-data/case_sensitive/` or `gs://spark-dataproc-
data/pangea-data/*`). The maximum length is 2056 bytes."""
"""Optional. The physical location of the table (e.g. 'gs://spark-
dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-
data/pangea-data/'). The maximum length is 2056 bytes."""

return self._properties.get("locationUri")

Expand All @@ -768,9 +768,9 @@ def serde_info(self) -> Any:
"""Optional. Serializer and deserializer information."""

prop = _get_sub_prop(self._properties, ["serDeInfo"])
print(f"DINOSAUR in SD: {prop}\n\n{self._properties}")
if prop is not None:
prop = SerDeInfo().from_api_repr(prop)
prop = StorageDescriptor().from_api_repr(prop)
print(f"DINOSAUR prop: {prop}")

return prop

Expand Down Expand Up @@ -829,7 +829,7 @@ def __init__(
name: Optional[str] = None,
parameters: Optional[dict[str, str]] = None,
):
self._properties = {}
self._properties: Dict[str, Any] = {}
self.serialization_library = serialization_library
self.name = name
self.parameters = parameters
Expand Down Expand Up @@ -892,6 +892,6 @@ def from_api_repr(cls, resource: dict) -> SerDeInfo:
Returns:
An instance of the class initialized with data from 'resource'.
"""
config = cls()
config = cls("")
config._properties = copy.deepcopy(resource)
return config
21 changes: 5 additions & 16 deletions tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -5880,34 +5880,23 @@ def test_external_catalog_table_options_getter(
dataset = DatasetReference(self.PROJECT, self.DS_ID)
table_ref = dataset.table(self.TABLE_NAME)
table = self._make_one(table_ref)
expected = external_catalog_table_options

# Confirm that external catalog table options have not been set
assert table.external_catalog_table_options is None

# Add an ExternalCatalogTableOptions object to the table.
table._properties[
"externalCatalogTableOptions"
] = external_catalog_table_options
table_repr = table.to_api_repr()
] = external_catalog_table_options.to_api_repr()

# Extract the ecto object.
ecto_output = table_repr["externalCatalogTableOptions"]
result = table.external_catalog_table_options

# Confirm that external catalog table options are an
# ExternalCatalogTableOptions object
assert isinstance(ecto_output, ExternalCatalogTableOptions)

storage_descriptor = request.getfixturevalue("_make_storage_descriptor")

expected = {
"connectionId": "connection123",
"parameters": {"key": "value"},
"storageDescriptor": storage_descriptor.to_api_repr(),
}
result = ecto_output.to_api_repr()

# Confirm that the api_repr of the ecto_output matches the inputs
print(f"DINOSAUR : {result}\n\n{expected}")
assert isinstance(result, ExternalCatalogTableOptions)
assert isinstance(expected, ExternalCatalogTableOptions)
assert result == expected

def test_external_catalog_table_options_setter(
Expand Down