diff --git a/google/cloud/bigquery/external_config.py b/google/cloud/bigquery/external_config.py index 59b715995..8d66f8347 100644 --- a/google/cloud/bigquery/external_config.py +++ b/google/cloud/bigquery/external_config.py @@ -1027,7 +1027,7 @@ def __init__( default_storage_location_uri: Optional[str] = None, parameters: Optional[Dict[str, Any]] = None, ): - self._properties = {} + self._properties: Dict[str, Any] = {} self.default_storage_location_uri = default_storage_location_uri self.parameters = parameters @@ -1168,6 +1168,7 @@ def to_api_repr(self) -> dict: Dict[str, Any]: A dictionary in the format used by the BigQuery API. """ + config = copy.deepcopy(self._properties) return config @@ -1186,3 +1187,6 @@ def from_api_repr(cls, resource: dict) -> ExternalCatalogTableOptions: config = cls() config._properties = copy.deepcopy(resource) return config + + def __eq__(self, value): + return self.to_api_repr() == value.to_api_repr() diff --git a/google/cloud/bigquery/schema.py b/google/cloud/bigquery/schema.py index dc6241214..825e9bc5a 100644 --- a/google/cloud/bigquery/schema.py +++ b/google/cloud/bigquery/schema.py @@ -610,7 +610,7 @@ class TableSchema: def __init__( self, fields: Optional[list] = None, foreign_type_info: Optional[str] = None ): - self._properties = {} + self._properties: Dict[str, Any] = {} self.fields = fields self.foreign_type_info = foreign_type_info @@ -621,7 +621,7 @@ def fields(self) -> Any: return self._properties.get("fields") @fields.setter - def fields(self, value: list, dtype: str) -> str: + def fields(self, value: list, dtype: str) -> None: value = _isinstance_or_raise(value, list, none_allowed=True) self._properties["fields"] = value @@ -633,7 +633,7 @@ def foreign_type_info(self) -> Any: return self._properties.get("foreignTypeInfo") @foreign_type_info.setter - def foreign_type_info(self, value: str, dtype: str) -> str: + def foreign_type_info(self, value: str, dtype: str) -> None: if not isinstance(value, str): raise ValueError( f"Pass {value} as a '{repr(dtype)}'." f"Got {type(value)}." @@ -701,8 +701,8 @@ class StorageDescriptor: "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters. locationUri (Optional[str]): The physical location of the table (e.g. - `gs://spark-dataproc-data/pangea-data/case_sensitive/` or - `gs://spark-dataproc-data/pangea-data/*`). The maximum length is + 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or + 'gs://spark-dataproc-data/pangea-data/'). The maximum length is 2056 bytes. outputFormat (Optional[str]): Specifies the fully qualified class name of the OutputFormat (e.g. @@ -718,7 +718,7 @@ def __init__( output_format: Optional[str] = None, serde_info: Optional[SerDeInfo] = None, ): - self._properties = {} + self._properties: Dict[str, Any] = {} self.input_format = input_format self.location_uri = location_uri self.output_format = output_format @@ -739,9 +739,9 @@ def input_format(self, value: Optional[str]): @property def location_uri(self) -> Any: - """Optional. The physical location of the table (e.g. `gs://spark- - dataproc-data/pangea-data/case_sensitive/` or `gs://spark-dataproc- - data/pangea-data/*`). The maximum length is 2056 bytes.""" + """Optional. The physical location of the table (e.g. 'gs://spark- + dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc- + data/pangea-data/'). The maximum length is 2056 bytes.""" return self._properties.get("locationUri") @@ -768,9 +768,9 @@ def serde_info(self) -> Any: """Optional. Serializer and deserializer information.""" prop = _get_sub_prop(self._properties, ["serDeInfo"]) - print(f"DINOSAUR in SD: {prop}\n\n{self._properties}") if prop is not None: - prop = SerDeInfo().from_api_repr(prop) + prop = StorageDescriptor().from_api_repr(prop) + print(f"DINOSAUR prop: {prop}") return prop @@ -829,7 +829,7 @@ def __init__( name: Optional[str] = None, parameters: Optional[dict[str, str]] = None, ): - self._properties = {} + self._properties: Dict[str, Any] = {} self.serialization_library = serialization_library self.name = name self.parameters = parameters @@ -892,6 +892,6 @@ def from_api_repr(cls, resource: dict) -> SerDeInfo: Returns: An instance of the class initialized with data from 'resource'. """ - config = cls() + config = cls("") config._properties = copy.deepcopy(resource) return config diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py index dafeb9aa0..22251d037 100644 --- a/tests/unit/test_table.py +++ b/tests/unit/test_table.py @@ -5880,6 +5880,7 @@ def test_external_catalog_table_options_getter( dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) + expected = external_catalog_table_options # Confirm that external catalog table options have not been set assert table.external_catalog_table_options is None @@ -5887,27 +5888,15 @@ def test_external_catalog_table_options_getter( # Add an ExternalCatalogTableOptions object to the table. table._properties[ "externalCatalogTableOptions" - ] = external_catalog_table_options - table_repr = table.to_api_repr() + ] = external_catalog_table_options.to_api_repr() # Extract the ecto object. - ecto_output = table_repr["externalCatalogTableOptions"] + result = table.external_catalog_table_options # Confirm that external catalog table options are an # ExternalCatalogTableOptions object - assert isinstance(ecto_output, ExternalCatalogTableOptions) - - storage_descriptor = request.getfixturevalue("_make_storage_descriptor") - - expected = { - "connectionId": "connection123", - "parameters": {"key": "value"}, - "storageDescriptor": storage_descriptor.to_api_repr(), - } - result = ecto_output.to_api_repr() - - # Confirm that the api_repr of the ecto_output matches the inputs - print(f"DINOSAUR : {result}\n\n{expected}") + assert isinstance(result, ExternalCatalogTableOptions) + assert isinstance(expected, ExternalCatalogTableOptions) assert result == expected def test_external_catalog_table_options_setter(