diff --git a/.coveragerc b/.coveragerc index 23861a8eb..33ea00ba9 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,7 +2,6 @@ branch = True [report] -fail_under = 100 show_missing = True omit = google/cloud/bigquery/__init__.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8912e9b5d..1bbd78783 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.1 hooks: - id: flake8 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a0e330e44..20ba9e62e 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -160,21 +160,7 @@ Running System Tests auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage diff --git a/docs/conf.py b/docs/conf.py index fdea01aad..e833cb143 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -361,9 +361,13 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + } diff --git a/examples/feature_fragments b/examples/feature_fragments new file mode 100644 index 000000000..7d1b62477 --- /dev/null +++ b/examples/feature_fragments @@ -0,0 +1,14 @@ + + + + + + + + + + + + + +} diff --git a/google/cloud/bigquery_v2/__init__.py b/google/cloud/bigquery_v2/__init__.py index ebcc26bef..2691069fc 100644 --- a/google/cloud/bigquery_v2/__init__.py +++ b/google/cloud/bigquery_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +15,8 @@ # +from .services.model_service import ModelServiceAsyncClient + from .types.encryption_config import EncryptionConfiguration from .types.model import DeleteModelRequest from .types.model import GetModelRequest @@ -29,18 +30,18 @@ from .types.standard_sql import StandardSqlStructType from .types.table_reference import TableReference - __all__ = ( - "DeleteModelRequest", - "EncryptionConfiguration", - "GetModelRequest", - "ListModelsRequest", - "ListModelsResponse", - "Model", - "ModelReference", - "PatchModelRequest", - "StandardSqlDataType", - "StandardSqlField", - "StandardSqlStructType", - "TableReference", +'DeleteModelRequest', +'EncryptionConfiguration', +'GetModelRequest', +'ListModelsRequest', +'ListModelsResponse', +'Model', +'ModelReference', + +'PatchModelRequest', +'StandardSqlDataType', +'StandardSqlField', +'StandardSqlStructType', +'TableReference', ) diff --git a/google/cloud/bigquery_v2/gapic_metadata.json b/google/cloud/bigquery_v2/gapic_metadata.json new file mode 100644 index 000000000..3251a2630 --- /dev/null +++ b/google/cloud/bigquery_v2/gapic_metadata.json @@ -0,0 +1,63 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery_v2", + "protoPackage": "google.cloud.bigquery.v2", + "schema": "1.0", + "services": { + "ModelService": { + "clients": { + "grpc": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "DeleteModel": { + "methods": [ + "delete_model" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "PatchModel": { + "methods": [ + "patch_model" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ModelServiceAsyncClient", + "rpcs": { + "DeleteModel": { + "methods": [ + "delete_model" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "PatchModel": { + "methods": [ + "patch_model" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/bigquery_v2/types/__init__.py b/google/cloud/bigquery_v2/types/__init__.py index b76e65c65..3de848e23 100644 --- a/google/cloud/bigquery_v2/types/__init__.py +++ b/google/cloud/bigquery_v2/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,8 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -from .encryption_config import EncryptionConfiguration +from .encryption_config import ( + EncryptionConfiguration, +) from .model import ( DeleteModelRequest, GetModelRequest, @@ -24,25 +24,29 @@ Model, PatchModelRequest, ) -from .model_reference import ModelReference +from .model_reference import ( + ModelReference, +) from .standard_sql import ( StandardSqlDataType, StandardSqlField, StandardSqlStructType, ) -from .table_reference import TableReference +from .table_reference import ( + TableReference, +) __all__ = ( - "EncryptionConfiguration", - "DeleteModelRequest", - "GetModelRequest", - "ListModelsRequest", - "ListModelsResponse", - "Model", - "PatchModelRequest", - "ModelReference", - "StandardSqlDataType", - "StandardSqlField", - "StandardSqlStructType", - "TableReference", + 'EncryptionConfiguration', + 'DeleteModelRequest', + 'GetModelRequest', + 'ListModelsRequest', + 'ListModelsResponse', + 'Model', + 'PatchModelRequest', + 'ModelReference', + 'StandardSqlDataType', + 'StandardSqlField', + 'StandardSqlStructType', + 'TableReference', ) diff --git a/google/cloud/bigquery_v2/types/encryption_config.py b/google/cloud/bigquery_v2/types/encryption_config.py index 2d801bde3..a1f60c1b9 100644 --- a/google/cloud/bigquery_v2/types/encryption_config.py +++ b/google/cloud/bigquery_v2/types/encryption_config.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import wrappers_pb2 as wrappers # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore __protobuf__ = proto.module( - package="google.cloud.bigquery.v2", manifest={"EncryptionConfiguration",}, + package='google.cloud.bigquery.v2', + manifest={ + 'EncryptionConfiguration', + }, ) class EncryptionConfiguration(proto.Message): r""" - Attributes: kms_key_name (google.protobuf.wrappers_pb2.StringValue): Optional. Describes the Cloud KMS encryption @@ -38,7 +37,11 @@ class EncryptionConfiguration(proto.Message): this encryption key. """ - kms_key_name = proto.Field(proto.MESSAGE, number=1, message=wrappers.StringValue,) + kms_key_name = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.StringValue, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigquery_v2/types/model.py b/google/cloud/bigquery_v2/types/model.py index 8ae158b64..652ce634c 100644 --- a/google/cloud/bigquery_v2/types/model.py +++ b/google/cloud/bigquery_v2/types/model.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,34 +13,31 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.bigquery_v2.types import encryption_config from google.cloud.bigquery_v2.types import model_reference as gcb_model_reference from google.cloud.bigquery_v2.types import standard_sql from google.cloud.bigquery_v2.types import table_reference -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore __protobuf__ = proto.module( - package="google.cloud.bigquery.v2", + package='google.cloud.bigquery.v2', manifest={ - "Model", - "GetModelRequest", - "PatchModelRequest", - "DeleteModelRequest", - "ListModelsRequest", - "ListModelsResponse", + 'Model', + 'GetModelRequest', + 'PatchModelRequest', + 'DeleteModelRequest', + 'ListModelsRequest', + 'ListModelsResponse', }, ) class Model(proto.Message): r""" - Attributes: etag (str): Output only. A hash of this resource. @@ -101,7 +97,6 @@ class Model(proto.Message): model. The output of the model will have a `predicted_` prefix to these columns. """ - class ModelType(proto.Enum): r"""Indicates the type of the Model.""" MODEL_TYPE_UNSPECIFIED = 0 @@ -251,8 +246,7 @@ class FeedbackType(proto.Enum): EXPLICIT = 2 class SeasonalPeriod(proto.Message): - r"""""" - + r""" """ class SeasonalPeriodType(proto.Enum): r"""""" SEASONAL_PERIOD_TYPE_UNSPECIFIED = 0 @@ -264,8 +258,7 @@ class SeasonalPeriodType(proto.Enum): YEARLY = 6 class KmeansEnums(proto.Message): - r"""""" - + r""" """ class KmeansInitializationMethod(proto.Enum): r"""Indicates the method used to initialize the centroids for KMeans clustering algorithm. @@ -293,22 +286,30 @@ class RegressionMetrics(proto.Message): """ mean_absolute_error = proto.Field( - proto.MESSAGE, number=1, message=wrappers.DoubleValue, + proto.MESSAGE, + number=1, + message=wrappers_pb2.DoubleValue, ) - mean_squared_error = proto.Field( - proto.MESSAGE, number=2, message=wrappers.DoubleValue, + proto.MESSAGE, + number=2, + message=wrappers_pb2.DoubleValue, ) - mean_squared_log_error = proto.Field( - proto.MESSAGE, number=3, message=wrappers.DoubleValue, + proto.MESSAGE, + number=3, + message=wrappers_pb2.DoubleValue, ) - median_absolute_error = proto.Field( - proto.MESSAGE, number=4, message=wrappers.DoubleValue, + proto.MESSAGE, + number=4, + message=wrappers_pb2.DoubleValue, + ) + r_squared = proto.Field( + proto.MESSAGE, + number=5, + message=wrappers_pb2.DoubleValue, ) - - r_squared = proto.Field(proto.MESSAGE, number=5, message=wrappers.DoubleValue,) class AggregateClassificationMetrics(proto.Message): r"""Aggregate metrics for classification/classifier models. For @@ -350,19 +351,41 @@ class AggregateClassificationMetrics(proto.Message): is a macro-averaged metric. """ - precision = proto.Field(proto.MESSAGE, number=1, message=wrappers.DoubleValue,) - - recall = proto.Field(proto.MESSAGE, number=2, message=wrappers.DoubleValue,) - - accuracy = proto.Field(proto.MESSAGE, number=3, message=wrappers.DoubleValue,) - - threshold = proto.Field(proto.MESSAGE, number=4, message=wrappers.DoubleValue,) - - f1_score = proto.Field(proto.MESSAGE, number=5, message=wrappers.DoubleValue,) - - log_loss = proto.Field(proto.MESSAGE, number=6, message=wrappers.DoubleValue,) - - roc_auc = proto.Field(proto.MESSAGE, number=7, message=wrappers.DoubleValue,) + precision = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.DoubleValue, + ) + recall = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.DoubleValue, + ) + accuracy = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers_pb2.DoubleValue, + ) + threshold = proto.Field( + proto.MESSAGE, + number=4, + message=wrappers_pb2.DoubleValue, + ) + f1_score = proto.Field( + proto.MESSAGE, + number=5, + message=wrappers_pb2.DoubleValue, + ) + log_loss = proto.Field( + proto.MESSAGE, + number=6, + message=wrappers_pb2.DoubleValue, + ) + roc_auc = proto.Field( + proto.MESSAGE, + number=7, + message=wrappers_pb2.DoubleValue, + ) class BinaryClassificationMetrics(proto.Message): r"""Evaluation metrics for binary classification/classifier @@ -382,7 +405,6 @@ class BinaryClassificationMetrics(proto.Message): class BinaryConfusionMatrix(proto.Message): r"""Confusion matrix for binary classification models. - Attributes: positive_class_threshold (google.protobuf.wrappers_pb2.DoubleValue): Threshold value used when computing each of @@ -410,52 +432,69 @@ class BinaryConfusionMatrix(proto.Message): """ positive_class_threshold = proto.Field( - proto.MESSAGE, number=1, message=wrappers.DoubleValue, + proto.MESSAGE, + number=1, + message=wrappers_pb2.DoubleValue, ) - true_positives = proto.Field( - proto.MESSAGE, number=2, message=wrappers.Int64Value, + proto.MESSAGE, + number=2, + message=wrappers_pb2.Int64Value, ) - false_positives = proto.Field( - proto.MESSAGE, number=3, message=wrappers.Int64Value, + proto.MESSAGE, + number=3, + message=wrappers_pb2.Int64Value, ) - true_negatives = proto.Field( - proto.MESSAGE, number=4, message=wrappers.Int64Value, + proto.MESSAGE, + number=4, + message=wrappers_pb2.Int64Value, ) - false_negatives = proto.Field( - proto.MESSAGE, number=5, message=wrappers.Int64Value, + proto.MESSAGE, + number=5, + message=wrappers_pb2.Int64Value, ) - precision = proto.Field( - proto.MESSAGE, number=6, message=wrappers.DoubleValue, + proto.MESSAGE, + number=6, + message=wrappers_pb2.DoubleValue, + ) + recall = proto.Field( + proto.MESSAGE, + number=7, + message=wrappers_pb2.DoubleValue, ) - - recall = proto.Field(proto.MESSAGE, number=7, message=wrappers.DoubleValue,) - f1_score = proto.Field( - proto.MESSAGE, number=8, message=wrappers.DoubleValue, + proto.MESSAGE, + number=8, + message=wrappers_pb2.DoubleValue, ) - accuracy = proto.Field( - proto.MESSAGE, number=9, message=wrappers.DoubleValue, + proto.MESSAGE, + number=9, + message=wrappers_pb2.DoubleValue, ) aggregate_classification_metrics = proto.Field( - proto.MESSAGE, number=1, message="Model.AggregateClassificationMetrics", + proto.MESSAGE, + number=1, + message='Model.AggregateClassificationMetrics', ) - binary_confusion_matrix_list = proto.RepeatedField( proto.MESSAGE, number=2, - message="Model.BinaryClassificationMetrics.BinaryConfusionMatrix", + message='Model.BinaryClassificationMetrics.BinaryConfusionMatrix', + ) + positive_label = proto.Field( + proto.STRING, + number=3, + ) + negative_label = proto.Field( + proto.STRING, + number=4, ) - - positive_label = proto.Field(proto.STRING, number=3) - - negative_label = proto.Field(proto.STRING, number=4) class MultiClassClassificationMetrics(proto.Message): r"""Evaluation metrics for multi-class classification/classifier @@ -470,7 +509,6 @@ class MultiClassClassificationMetrics(proto.Message): class ConfusionMatrix(proto.Message): r"""Confusion matrix for multi-class classification models. - Attributes: confidence_threshold (google.protobuf.wrappers_pb2.DoubleValue): Confidence threshold used when computing the @@ -481,7 +519,6 @@ class ConfusionMatrix(proto.Message): class Entry(proto.Message): r"""A single entry in the confusion matrix. - Attributes: predicted_label (str): The predicted label. For confidence_threshold > 0, we will @@ -492,15 +529,18 @@ class Entry(proto.Message): label. """ - predicted_label = proto.Field(proto.STRING, number=1) - + predicted_label = proto.Field( + proto.STRING, + number=1, + ) item_count = proto.Field( - proto.MESSAGE, number=2, message=wrappers.Int64Value, + proto.MESSAGE, + number=2, + message=wrappers_pb2.Int64Value, ) class Row(proto.Message): r"""A single row in the confusion matrix. - Attributes: actual_label (str): The original label of this row. @@ -508,37 +548,40 @@ class Row(proto.Message): Info describing predicted label distribution. """ - actual_label = proto.Field(proto.STRING, number=1) - + actual_label = proto.Field( + proto.STRING, + number=1, + ) entries = proto.RepeatedField( proto.MESSAGE, number=2, - message="Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry", + message='Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry', ) confidence_threshold = proto.Field( - proto.MESSAGE, number=1, message=wrappers.DoubleValue, + proto.MESSAGE, + number=1, + message=wrappers_pb2.DoubleValue, ) - rows = proto.RepeatedField( proto.MESSAGE, number=2, - message="Model.MultiClassClassificationMetrics.ConfusionMatrix.Row", + message='Model.MultiClassClassificationMetrics.ConfusionMatrix.Row', ) aggregate_classification_metrics = proto.Field( - proto.MESSAGE, number=1, message="Model.AggregateClassificationMetrics", + proto.MESSAGE, + number=1, + message='Model.AggregateClassificationMetrics', ) - confusion_matrix_list = proto.RepeatedField( proto.MESSAGE, number=2, - message="Model.MultiClassClassificationMetrics.ConfusionMatrix", + message='Model.MultiClassClassificationMetrics.ConfusionMatrix', ) class ClusteringMetrics(proto.Message): r"""Evaluation metrics for clustering models. - Attributes: davies_bouldin_index (google.protobuf.wrappers_pb2.DoubleValue): Davies-Bouldin index. @@ -551,7 +594,6 @@ class ClusteringMetrics(proto.Message): class Cluster(proto.Message): r"""Message containing the information about one cluster. - Attributes: centroid_id (int): Centroid id. @@ -565,7 +607,6 @@ class Cluster(proto.Message): class FeatureValue(proto.Message): r"""Representative value of a single feature within the cluster. - Attributes: feature_column (str): The feature column name. @@ -578,7 +619,6 @@ class FeatureValue(proto.Message): class CategoricalValue(proto.Message): r"""Representative value of a categorical feature. - Attributes: category_counts (Sequence[google.cloud.bigquery_v2.types.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount]): Counts of all categories for the categorical feature. If @@ -590,7 +630,6 @@ class CategoricalValue(proto.Message): class CategoryCount(proto.Message): r"""Represents the count of a single category within the cluster. - Attributes: category (str): The name of category. @@ -599,54 +638,68 @@ class CategoryCount(proto.Message): category within the cluster. """ - category = proto.Field(proto.STRING, number=1) - + category = proto.Field( + proto.STRING, + number=1, + ) count = proto.Field( - proto.MESSAGE, number=2, message=wrappers.Int64Value, + proto.MESSAGE, + number=2, + message=wrappers_pb2.Int64Value, ) category_counts = proto.RepeatedField( proto.MESSAGE, number=1, - message="Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount", + message='Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount', ) - feature_column = proto.Field(proto.STRING, number=1) - + feature_column = proto.Field( + proto.STRING, + number=1, + ) numerical_value = proto.Field( proto.MESSAGE, number=2, - oneof="value", - message=wrappers.DoubleValue, + oneof='value', + message=wrappers_pb2.DoubleValue, ) - categorical_value = proto.Field( proto.MESSAGE, number=3, - oneof="value", - message="Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue", + oneof='value', + message='Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue', ) - centroid_id = proto.Field(proto.INT64, number=1) - + centroid_id = proto.Field( + proto.INT64, + number=1, + ) feature_values = proto.RepeatedField( proto.MESSAGE, number=2, - message="Model.ClusteringMetrics.Cluster.FeatureValue", + message='Model.ClusteringMetrics.Cluster.FeatureValue', + ) + count = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers_pb2.Int64Value, ) - - count = proto.Field(proto.MESSAGE, number=3, message=wrappers.Int64Value,) davies_bouldin_index = proto.Field( - proto.MESSAGE, number=1, message=wrappers.DoubleValue, + proto.MESSAGE, + number=1, + message=wrappers_pb2.DoubleValue, ) - mean_squared_distance = proto.Field( - proto.MESSAGE, number=2, message=wrappers.DoubleValue, + proto.MESSAGE, + number=2, + message=wrappers_pb2.DoubleValue, ) - clusters = proto.RepeatedField( - proto.MESSAGE, number=3, message="Model.ClusteringMetrics.Cluster", + proto.MESSAGE, + number=3, + message='Model.ClusteringMetrics.Cluster', ) class RankingMetrics(proto.Message): @@ -677,24 +730,28 @@ class RankingMetrics(proto.Message): """ mean_average_precision = proto.Field( - proto.MESSAGE, number=1, message=wrappers.DoubleValue, + proto.MESSAGE, + number=1, + message=wrappers_pb2.DoubleValue, ) - mean_squared_error = proto.Field( - proto.MESSAGE, number=2, message=wrappers.DoubleValue, + proto.MESSAGE, + number=2, + message=wrappers_pb2.DoubleValue, ) - normalized_discounted_cumulative_gain = proto.Field( - proto.MESSAGE, number=3, message=wrappers.DoubleValue, + proto.MESSAGE, + number=3, + message=wrappers_pb2.DoubleValue, ) - average_rank = proto.Field( - proto.MESSAGE, number=4, message=wrappers.DoubleValue, + proto.MESSAGE, + number=4, + message=wrappers_pb2.DoubleValue, ) class ArimaForecastingMetrics(proto.Message): r"""Model evaluation metrics for ARIMA forecasting models. - Attributes: non_seasonal_order (Sequence[google.cloud.bigquery_v2.types.Model.ArimaOrder]): Non-seasonal order. @@ -735,41 +792,56 @@ class ArimaSingleModelForecastingMetrics(proto.Message): """ non_seasonal_order = proto.Field( - proto.MESSAGE, number=1, message="Model.ArimaOrder", + proto.MESSAGE, + number=1, + message='Model.ArimaOrder', ) - arima_fitting_metrics = proto.Field( - proto.MESSAGE, number=2, message="Model.ArimaFittingMetrics", + proto.MESSAGE, + number=2, + message='Model.ArimaFittingMetrics', + ) + has_drift = proto.Field( + proto.BOOL, + number=3, + ) + time_series_id = proto.Field( + proto.STRING, + number=4, ) - - has_drift = proto.Field(proto.BOOL, number=3) - - time_series_id = proto.Field(proto.STRING, number=4) - seasonal_periods = proto.RepeatedField( - proto.ENUM, number=5, enum="Model.SeasonalPeriod.SeasonalPeriodType", + proto.ENUM, + number=5, + enum='Model.SeasonalPeriod.SeasonalPeriodType', ) non_seasonal_order = proto.RepeatedField( - proto.MESSAGE, number=1, message="Model.ArimaOrder", + proto.MESSAGE, + number=1, + message='Model.ArimaOrder', ) - arima_fitting_metrics = proto.RepeatedField( - proto.MESSAGE, number=2, message="Model.ArimaFittingMetrics", + proto.MESSAGE, + number=2, + message='Model.ArimaFittingMetrics', ) - seasonal_periods = proto.RepeatedField( - proto.ENUM, number=3, enum="Model.SeasonalPeriod.SeasonalPeriodType", + proto.ENUM, + number=3, + enum='Model.SeasonalPeriod.SeasonalPeriodType', + ) + has_drift = proto.RepeatedField( + proto.BOOL, + number=4, + ) + time_series_id = proto.RepeatedField( + proto.STRING, + number=5, ) - - has_drift = proto.RepeatedField(proto.BOOL, number=4) - - time_series_id = proto.RepeatedField(proto.STRING, number=5) - arima_single_model_forecasting_metrics = proto.RepeatedField( proto.MESSAGE, number=6, - message="Model.ArimaForecastingMetrics.ArimaSingleModelForecastingMetrics", + message='Model.ArimaForecastingMetrics.ArimaSingleModelForecastingMetrics', ) class EvaluationMetrics(proto.Message): @@ -798,36 +870,40 @@ class EvaluationMetrics(proto.Message): """ regression_metrics = proto.Field( - proto.MESSAGE, number=1, oneof="metrics", message="Model.RegressionMetrics", + proto.MESSAGE, + number=1, + oneof='metrics', + message='Model.RegressionMetrics', ) - binary_classification_metrics = proto.Field( proto.MESSAGE, number=2, - oneof="metrics", - message="Model.BinaryClassificationMetrics", + oneof='metrics', + message='Model.BinaryClassificationMetrics', ) - multi_class_classification_metrics = proto.Field( proto.MESSAGE, number=3, - oneof="metrics", - message="Model.MultiClassClassificationMetrics", + oneof='metrics', + message='Model.MultiClassClassificationMetrics', ) - clustering_metrics = proto.Field( - proto.MESSAGE, number=4, oneof="metrics", message="Model.ClusteringMetrics", + proto.MESSAGE, + number=4, + oneof='metrics', + message='Model.ClusteringMetrics', ) - ranking_metrics = proto.Field( - proto.MESSAGE, number=5, oneof="metrics", message="Model.RankingMetrics", + proto.MESSAGE, + number=5, + oneof='metrics', + message='Model.RankingMetrics', ) - arima_forecasting_metrics = proto.Field( proto.MESSAGE, number=6, - oneof="metrics", - message="Model.ArimaForecastingMetrics", + oneof='metrics', + message='Model.ArimaForecastingMetrics', ) class DataSplitResult(proto.Message): @@ -844,11 +920,14 @@ class DataSplitResult(proto.Message): """ training_table = proto.Field( - proto.MESSAGE, number=1, message=table_reference.TableReference, + proto.MESSAGE, + number=1, + message=table_reference.TableReference, ) - evaluation_table = proto.Field( - proto.MESSAGE, number=2, message=table_reference.TableReference, + proto.MESSAGE, + number=2, + message=table_reference.TableReference, ) class ArimaOrder(proto.Message): @@ -864,15 +943,21 @@ class ArimaOrder(proto.Message): Order of the moving-average part. """ - p = proto.Field(proto.INT64, number=1) - - d = proto.Field(proto.INT64, number=2) - - q = proto.Field(proto.INT64, number=3) + p = proto.Field( + proto.INT64, + number=1, + ) + d = proto.Field( + proto.INT64, + number=2, + ) + q = proto.Field( + proto.INT64, + number=3, + ) class ArimaFittingMetrics(proto.Message): r"""ARIMA model fitting metrics. - Attributes: log_likelihood (float): Log-likelihood. @@ -882,11 +967,18 @@ class ArimaFittingMetrics(proto.Message): Variance. """ - log_likelihood = proto.Field(proto.DOUBLE, number=1) - - aic = proto.Field(proto.DOUBLE, number=2) - - variance = proto.Field(proto.DOUBLE, number=3) + log_likelihood = proto.Field( + proto.DOUBLE, + number=1, + ) + aic = proto.Field( + proto.DOUBLE, + number=2, + ) + variance = proto.Field( + proto.DOUBLE, + number=3, + ) class GlobalExplanation(proto.Message): r"""Global explanations containing the top most important @@ -906,7 +998,6 @@ class GlobalExplanation(proto.Message): class Explanation(proto.Message): r"""Explanation for a single feature. - Attributes: feature_name (str): Full name of the feature. For non-numerical features, will @@ -917,21 +1008,28 @@ class Explanation(proto.Message): Attribution of feature. """ - feature_name = proto.Field(proto.STRING, number=1) - + feature_name = proto.Field( + proto.STRING, + number=1, + ) attribution = proto.Field( - proto.MESSAGE, number=2, message=wrappers.DoubleValue, + proto.MESSAGE, + number=2, + message=wrappers_pb2.DoubleValue, ) explanations = proto.RepeatedField( - proto.MESSAGE, number=1, message="Model.GlobalExplanation.Explanation", + proto.MESSAGE, + number=1, + message='Model.GlobalExplanation.Explanation', + ) + class_label = proto.Field( + proto.STRING, + number=2, ) - - class_label = proto.Field(proto.STRING, number=2) class TrainingRun(proto.Message): r"""Information about a single training query run for the model. - Attributes: training_options (google.cloud.bigquery_v2.types.Model.TrainingRun.TrainingOptions): Options that were used for this training run, @@ -957,7 +1055,6 @@ class TrainingRun(proto.Message): class TrainingOptions(proto.Message): r""" - Attributes: max_iterations (int): The maximum number of iterations in training. @@ -1100,131 +1197,200 @@ class TrainingOptions(proto.Message): The max value of non-seasonal p and q. """ - max_iterations = proto.Field(proto.INT64, number=1) - - loss_type = proto.Field(proto.ENUM, number=2, enum="Model.LossType",) - - learn_rate = proto.Field(proto.DOUBLE, number=3) - + max_iterations = proto.Field( + proto.INT64, + number=1, + ) + loss_type = proto.Field( + proto.ENUM, + number=2, + enum='Model.LossType', + ) + learn_rate = proto.Field( + proto.DOUBLE, + number=3, + ) l1_regularization = proto.Field( - proto.MESSAGE, number=4, message=wrappers.DoubleValue, + proto.MESSAGE, + number=4, + message=wrappers_pb2.DoubleValue, ) - l2_regularization = proto.Field( - proto.MESSAGE, number=5, message=wrappers.DoubleValue, + proto.MESSAGE, + number=5, + message=wrappers_pb2.DoubleValue, ) - min_relative_progress = proto.Field( - proto.MESSAGE, number=6, message=wrappers.DoubleValue, + proto.MESSAGE, + number=6, + message=wrappers_pb2.DoubleValue, ) - warm_start = proto.Field( - proto.MESSAGE, number=7, message=wrappers.BoolValue, + proto.MESSAGE, + number=7, + message=wrappers_pb2.BoolValue, ) - early_stop = proto.Field( - proto.MESSAGE, number=8, message=wrappers.BoolValue, + proto.MESSAGE, + number=8, + message=wrappers_pb2.BoolValue, + ) + input_label_columns = proto.RepeatedField( + proto.STRING, + number=9, ) - - input_label_columns = proto.RepeatedField(proto.STRING, number=9) - data_split_method = proto.Field( - proto.ENUM, number=10, enum="Model.DataSplitMethod", + proto.ENUM, + number=10, + enum='Model.DataSplitMethod', + ) + data_split_eval_fraction = proto.Field( + proto.DOUBLE, + number=11, + ) + data_split_column = proto.Field( + proto.STRING, + number=12, ) - - data_split_eval_fraction = proto.Field(proto.DOUBLE, number=11) - - data_split_column = proto.Field(proto.STRING, number=12) - learn_rate_strategy = proto.Field( - proto.ENUM, number=13, enum="Model.LearnRateStrategy", + proto.ENUM, + number=13, + enum='Model.LearnRateStrategy', + ) + initial_learn_rate = proto.Field( + proto.DOUBLE, + number=16, + ) + label_class_weights = proto.MapField( + proto.STRING, + proto.DOUBLE, + number=17, + ) + user_column = proto.Field( + proto.STRING, + number=18, + ) + item_column = proto.Field( + proto.STRING, + number=19, ) - - initial_learn_rate = proto.Field(proto.DOUBLE, number=16) - - label_class_weights = proto.MapField(proto.STRING, proto.DOUBLE, number=17) - - user_column = proto.Field(proto.STRING, number=18) - - item_column = proto.Field(proto.STRING, number=19) - distance_type = proto.Field( - proto.ENUM, number=20, enum="Model.DistanceType", + proto.ENUM, + number=20, + enum='Model.DistanceType', + ) + num_clusters = proto.Field( + proto.INT64, + number=21, + ) + model_uri = proto.Field( + proto.STRING, + number=22, ) - - num_clusters = proto.Field(proto.INT64, number=21) - - model_uri = proto.Field(proto.STRING, number=22) - optimization_strategy = proto.Field( - proto.ENUM, number=23, enum="Model.OptimizationStrategy", + proto.ENUM, + number=23, + enum='Model.OptimizationStrategy', + ) + hidden_units = proto.RepeatedField( + proto.INT64, + number=24, + ) + batch_size = proto.Field( + proto.INT64, + number=25, ) - - hidden_units = proto.RepeatedField(proto.INT64, number=24) - - batch_size = proto.Field(proto.INT64, number=25) - dropout = proto.Field( - proto.MESSAGE, number=26, message=wrappers.DoubleValue, + proto.MESSAGE, + number=26, + message=wrappers_pb2.DoubleValue, + ) + max_tree_depth = proto.Field( + proto.INT64, + number=27, + ) + subsample = proto.Field( + proto.DOUBLE, + number=28, ) - - max_tree_depth = proto.Field(proto.INT64, number=27) - - subsample = proto.Field(proto.DOUBLE, number=28) - min_split_loss = proto.Field( - proto.MESSAGE, number=29, message=wrappers.DoubleValue, + proto.MESSAGE, + number=29, + message=wrappers_pb2.DoubleValue, + ) + num_factors = proto.Field( + proto.INT64, + number=30, ) - - num_factors = proto.Field(proto.INT64, number=30) - feedback_type = proto.Field( - proto.ENUM, number=31, enum="Model.FeedbackType", + proto.ENUM, + number=31, + enum='Model.FeedbackType', ) - wals_alpha = proto.Field( - proto.MESSAGE, number=32, message=wrappers.DoubleValue, + proto.MESSAGE, + number=32, + message=wrappers_pb2.DoubleValue, ) - kmeans_initialization_method = proto.Field( proto.ENUM, number=33, - enum="Model.KmeansEnums.KmeansInitializationMethod", + enum='Model.KmeansEnums.KmeansInitializationMethod', + ) + kmeans_initialization_column = proto.Field( + proto.STRING, + number=34, + ) + time_series_timestamp_column = proto.Field( + proto.STRING, + number=35, + ) + time_series_data_column = proto.Field( + proto.STRING, + number=36, + ) + auto_arima = proto.Field( + proto.BOOL, + number=37, ) - - kmeans_initialization_column = proto.Field(proto.STRING, number=34) - - time_series_timestamp_column = proto.Field(proto.STRING, number=35) - - time_series_data_column = proto.Field(proto.STRING, number=36) - - auto_arima = proto.Field(proto.BOOL, number=37) - non_seasonal_order = proto.Field( - proto.MESSAGE, number=38, message="Model.ArimaOrder", + proto.MESSAGE, + number=38, + message='Model.ArimaOrder', ) - data_frequency = proto.Field( - proto.ENUM, number=39, enum="Model.DataFrequency", + proto.ENUM, + number=39, + enum='Model.DataFrequency', + ) + include_drift = proto.Field( + proto.BOOL, + number=41, ) - - include_drift = proto.Field(proto.BOOL, number=41) - holiday_region = proto.Field( - proto.ENUM, number=42, enum="Model.HolidayRegion", + proto.ENUM, + number=42, + enum='Model.HolidayRegion', + ) + time_series_id_column = proto.Field( + proto.STRING, + number=43, + ) + horizon = proto.Field( + proto.INT64, + number=44, + ) + preserve_input_structs = proto.Field( + proto.BOOL, + number=45, + ) + auto_arima_max_order = proto.Field( + proto.INT64, + number=46, ) - - time_series_id_column = proto.Field(proto.STRING, number=43) - - horizon = proto.Field(proto.INT64, number=44) - - preserve_input_structs = proto.Field(proto.BOOL, number=45) - - auto_arima_max_order = proto.Field(proto.INT64, number=46) class IterationResult(proto.Message): r"""Information about a single iteration of the training run. - Attributes: index (google.protobuf.wrappers_pb2.Int32Value): Index of the iteration, 0 based. @@ -1248,7 +1414,6 @@ class IterationResult(proto.Message): class ClusterInfo(proto.Message): r"""Information about a single cluster for clustering model. - Attributes: centroid_id (int): Centroid id. @@ -1260,14 +1425,19 @@ class ClusterInfo(proto.Message): assigned to the cluster. """ - centroid_id = proto.Field(proto.INT64, number=1) - + centroid_id = proto.Field( + proto.INT64, + number=1, + ) cluster_radius = proto.Field( - proto.MESSAGE, number=2, message=wrappers.DoubleValue, + proto.MESSAGE, + number=2, + message=wrappers_pb2.DoubleValue, ) - cluster_size = proto.Field( - proto.MESSAGE, number=3, message=wrappers.Int64Value, + proto.MESSAGE, + number=3, + message=wrappers_pb2.Int64Value, ) class ArimaResult(proto.Message): @@ -1287,7 +1457,6 @@ class ArimaResult(proto.Message): class ArimaCoefficients(proto.Message): r"""Arima coefficients. - Attributes: auto_regressive_coefficients (Sequence[float]): Auto-regressive coefficients, an array of @@ -1301,18 +1470,20 @@ class ArimaCoefficients(proto.Message): """ auto_regressive_coefficients = proto.RepeatedField( - proto.DOUBLE, number=1 + proto.DOUBLE, + number=1, ) - moving_average_coefficients = proto.RepeatedField( - proto.DOUBLE, number=2 + proto.DOUBLE, + number=2, + ) + intercept_coefficient = proto.Field( + proto.DOUBLE, + number=3, ) - - intercept_coefficient = proto.Field(proto.DOUBLE, number=3) class ArimaModelInfo(proto.Message): r"""Arima model information. - Attributes: non_seasonal_order (google.cloud.bigquery_v2.types.Model.ArimaOrder): Non-seasonal order. @@ -1331,131 +1502,178 @@ class ArimaModelInfo(proto.Message): """ non_seasonal_order = proto.Field( - proto.MESSAGE, number=1, message="Model.ArimaOrder", + proto.MESSAGE, + number=1, + message='Model.ArimaOrder', ) - arima_coefficients = proto.Field( proto.MESSAGE, number=2, - message="Model.TrainingRun.IterationResult.ArimaResult.ArimaCoefficients", + message='Model.TrainingRun.IterationResult.ArimaResult.ArimaCoefficients', ) - arima_fitting_metrics = proto.Field( - proto.MESSAGE, number=3, message="Model.ArimaFittingMetrics", + proto.MESSAGE, + number=3, + message='Model.ArimaFittingMetrics', + ) + has_drift = proto.Field( + proto.BOOL, + number=4, + ) + time_series_id = proto.Field( + proto.STRING, + number=5, ) - - has_drift = proto.Field(proto.BOOL, number=4) - - time_series_id = proto.Field(proto.STRING, number=5) - seasonal_periods = proto.RepeatedField( proto.ENUM, number=6, - enum="Model.SeasonalPeriod.SeasonalPeriodType", + enum='Model.SeasonalPeriod.SeasonalPeriodType', ) arima_model_info = proto.RepeatedField( proto.MESSAGE, number=1, - message="Model.TrainingRun.IterationResult.ArimaResult.ArimaModelInfo", + message='Model.TrainingRun.IterationResult.ArimaResult.ArimaModelInfo', ) - seasonal_periods = proto.RepeatedField( proto.ENUM, number=2, - enum="Model.SeasonalPeriod.SeasonalPeriodType", + enum='Model.SeasonalPeriod.SeasonalPeriodType', ) - index = proto.Field(proto.MESSAGE, number=1, message=wrappers.Int32Value,) - + index = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.Int32Value, + ) duration_ms = proto.Field( - proto.MESSAGE, number=4, message=wrappers.Int64Value, + proto.MESSAGE, + number=4, + message=wrappers_pb2.Int64Value, ) - training_loss = proto.Field( - proto.MESSAGE, number=5, message=wrappers.DoubleValue, + proto.MESSAGE, + number=5, + message=wrappers_pb2.DoubleValue, ) - eval_loss = proto.Field( - proto.MESSAGE, number=6, message=wrappers.DoubleValue, + proto.MESSAGE, + number=6, + message=wrappers_pb2.DoubleValue, + ) + learn_rate = proto.Field( + proto.DOUBLE, + number=7, ) - - learn_rate = proto.Field(proto.DOUBLE, number=7) - cluster_infos = proto.RepeatedField( proto.MESSAGE, number=8, - message="Model.TrainingRun.IterationResult.ClusterInfo", + message='Model.TrainingRun.IterationResult.ClusterInfo', ) - arima_result = proto.Field( proto.MESSAGE, number=9, - message="Model.TrainingRun.IterationResult.ArimaResult", + message='Model.TrainingRun.IterationResult.ArimaResult', ) training_options = proto.Field( - proto.MESSAGE, number=1, message="Model.TrainingRun.TrainingOptions", + proto.MESSAGE, + number=1, + message='Model.TrainingRun.TrainingOptions', + ) + start_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, ) - - start_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - results = proto.RepeatedField( - proto.MESSAGE, number=6, message="Model.TrainingRun.IterationResult", + proto.MESSAGE, + number=6, + message='Model.TrainingRun.IterationResult', ) - evaluation_metrics = proto.Field( - proto.MESSAGE, number=7, message="Model.EvaluationMetrics", + proto.MESSAGE, + number=7, + message='Model.EvaluationMetrics', ) - data_split_result = proto.Field( - proto.MESSAGE, number=9, message="Model.DataSplitResult", + proto.MESSAGE, + number=9, + message='Model.DataSplitResult', ) - global_explanations = proto.RepeatedField( - proto.MESSAGE, number=10, message="Model.GlobalExplanation", + proto.MESSAGE, + number=10, + message='Model.GlobalExplanation', ) - etag = proto.Field(proto.STRING, number=1) - + etag = proto.Field( + proto.STRING, + number=1, + ) model_reference = proto.Field( - proto.MESSAGE, number=2, message=gcb_model_reference.ModelReference, + proto.MESSAGE, + number=2, + message=gcb_model_reference.ModelReference, + ) + creation_time = proto.Field( + proto.INT64, + number=5, + ) + last_modified_time = proto.Field( + proto.INT64, + number=6, + ) + description = proto.Field( + proto.STRING, + number=12, + ) + friendly_name = proto.Field( + proto.STRING, + number=14, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=15, + ) + expiration_time = proto.Field( + proto.INT64, + number=16, + ) + location = proto.Field( + proto.STRING, + number=13, ) - - creation_time = proto.Field(proto.INT64, number=5) - - last_modified_time = proto.Field(proto.INT64, number=6) - - description = proto.Field(proto.STRING, number=12) - - friendly_name = proto.Field(proto.STRING, number=14) - - labels = proto.MapField(proto.STRING, proto.STRING, number=15) - - expiration_time = proto.Field(proto.INT64, number=16) - - location = proto.Field(proto.STRING, number=13) - encryption_configuration = proto.Field( - proto.MESSAGE, number=17, message=encryption_config.EncryptionConfiguration, + proto.MESSAGE, + number=17, + message=encryption_config.EncryptionConfiguration, + ) + model_type = proto.Field( + proto.ENUM, + number=7, + enum=ModelType, + ) + training_runs = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=TrainingRun, ) - - model_type = proto.Field(proto.ENUM, number=7, enum=ModelType,) - - training_runs = proto.RepeatedField(proto.MESSAGE, number=9, message=TrainingRun,) - feature_columns = proto.RepeatedField( - proto.MESSAGE, number=10, message=standard_sql.StandardSqlField, + proto.MESSAGE, + number=10, + message=standard_sql.StandardSqlField, ) - label_columns = proto.RepeatedField( - proto.MESSAGE, number=11, message=standard_sql.StandardSqlField, + proto.MESSAGE, + number=11, + message=standard_sql.StandardSqlField, ) class GetModelRequest(proto.Message): r""" - Attributes: project_id (str): Required. Project ID of the requested model. @@ -1465,16 +1683,22 @@ class GetModelRequest(proto.Message): Required. Model ID of the requested model. """ - project_id = proto.Field(proto.STRING, number=1) - - dataset_id = proto.Field(proto.STRING, number=2) - - model_id = proto.Field(proto.STRING, number=3) + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + model_id = proto.Field( + proto.STRING, + number=3, + ) class PatchModelRequest(proto.Message): r""" - Attributes: project_id (str): Required. Project ID of the model to patch. @@ -1489,18 +1713,27 @@ class PatchModelRequest(proto.Message): set to default value. """ - project_id = proto.Field(proto.STRING, number=1) - - dataset_id = proto.Field(proto.STRING, number=2) - - model_id = proto.Field(proto.STRING, number=3) - - model = proto.Field(proto.MESSAGE, number=4, message="Model",) + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + model_id = proto.Field( + proto.STRING, + number=3, + ) + model = proto.Field( + proto.MESSAGE, + number=4, + message='Model', + ) class DeleteModelRequest(proto.Message): r""" - Attributes: project_id (str): Required. Project ID of the model to delete. @@ -1510,16 +1743,22 @@ class DeleteModelRequest(proto.Message): Required. Model ID of the model to delete. """ - project_id = proto.Field(proto.STRING, number=1) - - dataset_id = proto.Field(proto.STRING, number=2) - - model_id = proto.Field(proto.STRING, number=3) + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + model_id = proto.Field( + proto.STRING, + number=3, + ) class ListModelsRequest(proto.Message): r""" - Attributes: project_id (str): Required. Project ID of the models to list. @@ -1534,18 +1773,27 @@ class ListModelsRequest(proto.Message): request the next page of results """ - project_id = proto.Field(proto.STRING, number=1) - - dataset_id = proto.Field(proto.STRING, number=2) - - max_results = proto.Field(proto.MESSAGE, number=3, message=wrappers.UInt32Value,) - - page_token = proto.Field(proto.STRING, number=4) + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + max_results = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers_pb2.UInt32Value, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) class ListModelsResponse(proto.Message): r""" - Attributes: models (Sequence[google.cloud.bigquery_v2.types.Model]): Models in the requested dataset. Only the following fields @@ -1559,9 +1807,15 @@ class ListModelsResponse(proto.Message): def raw_page(self): return self - models = proto.RepeatedField(proto.MESSAGE, number=1, message="Model",) - - next_page_token = proto.Field(proto.STRING, number=2) + models = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Model', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigquery_v2/types/model_reference.py b/google/cloud/bigquery_v2/types/model_reference.py index e3891d6c1..7dfe7b30f 100644 --- a/google/cloud/bigquery_v2/types/model_reference.py +++ b/google/cloud/bigquery_v2/types/model_reference.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,18 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore __protobuf__ = proto.module( - package="google.cloud.bigquery.v2", manifest={"ModelReference",}, + package='google.cloud.bigquery.v2', + manifest={ + 'ModelReference', + }, ) class ModelReference(proto.Message): r"""Id path of a model. - Attributes: project_id (str): Required. The ID of the project containing @@ -39,11 +39,18 @@ class ModelReference(proto.Message): maximum length is 1,024 characters. """ - project_id = proto.Field(proto.STRING, number=1) - - dataset_id = proto.Field(proto.STRING, number=2) - - model_id = proto.Field(proto.STRING, number=3) + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + model_id = proto.Field( + proto.STRING, + number=3, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigquery_v2/types/standard_sql.py b/google/cloud/bigquery_v2/types/standard_sql.py index 3bc6afedc..8085223c0 100644 --- a/google/cloud/bigquery_v2/types/standard_sql.py +++ b/google/cloud/bigquery_v2/types/standard_sql.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore __protobuf__ = proto.module( - package="google.cloud.bigquery.v2", - manifest={"StandardSqlDataType", "StandardSqlField", "StandardSqlStructType",}, + package='google.cloud.bigquery.v2', + manifest={ + 'StandardSqlDataType', + 'StandardSqlField', + 'StandardSqlStructType', + }, ) @@ -43,7 +45,6 @@ class StandardSqlDataType(proto.Message): The fields of this struct, in order, if type_kind = "STRUCT". """ - class TypeKind(proto.Enum): r"""""" TYPE_KIND_UNSPECIFIED = 0 @@ -62,20 +63,27 @@ class TypeKind(proto.Enum): ARRAY = 16 STRUCT = 17 - type_kind = proto.Field(proto.ENUM, number=1, enum=TypeKind,) - + type_kind = proto.Field( + proto.ENUM, + number=1, + enum=TypeKind, + ) array_element_type = proto.Field( - proto.MESSAGE, number=2, oneof="sub_type", message="StandardSqlDataType", + proto.MESSAGE, + number=2, + oneof='sub_type', + message='StandardSqlDataType', ) - struct_type = proto.Field( - proto.MESSAGE, number=3, oneof="sub_type", message="StandardSqlStructType", + proto.MESSAGE, + number=3, + oneof='sub_type', + message='StandardSqlStructType', ) class StandardSqlField(proto.Message): r"""A field or a column. - Attributes: name (str): Optional. The name of this field. Can be @@ -88,20 +96,29 @@ class StandardSqlField(proto.Message): this "type" field). """ - name = proto.Field(proto.STRING, number=1) - - type = proto.Field(proto.MESSAGE, number=2, message="StandardSqlDataType",) + name = proto.Field( + proto.STRING, + number=1, + ) + type = proto.Field( + proto.MESSAGE, + number=2, + message='StandardSqlDataType', + ) class StandardSqlStructType(proto.Message): r""" - Attributes: fields (Sequence[google.cloud.bigquery_v2.types.StandardSqlField]): """ - fields = proto.RepeatedField(proto.MESSAGE, number=1, message="StandardSqlField",) + fields = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='StandardSqlField', + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigquery_v2/types/table_reference.py b/google/cloud/bigquery_v2/types/table_reference.py index d213e8bb6..2e6a37202 100644 --- a/google/cloud/bigquery_v2/types/table_reference.py +++ b/google/cloud/bigquery_v2/types/table_reference.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,18 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore __protobuf__ = proto.module( - package="google.cloud.bigquery.v2", manifest={"TableReference",}, + package='google.cloud.bigquery.v2', + manifest={ + 'TableReference', + }, ) class TableReference(proto.Message): r""" - Attributes: project_id (str): Required. The ID of the project containing @@ -41,11 +41,18 @@ class TableReference(proto.Message): as ``sample_table$20190123``. """ - project_id = proto.Field(proto.STRING, number=1) - - dataset_id = proto.Field(proto.STRING, number=2) - - table_id = proto.Field(proto.STRING, number=3) + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + table_id = proto.Field( + proto.STRING, + number=3, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/samples/geography/noxfile.py b/samples/geography/noxfile.py index be1a3f251..956cdf4f9 100644 --- a/samples/geography/noxfile.py +++ b/samples/geography/noxfile.py @@ -38,25 +38,28 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": False, + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -71,12 +74,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -85,7 +88,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -134,7 +137,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: + if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -143,11 +146,9 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) - - # # Black # @@ -160,7 +161,6 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) - # # Sample Tests # @@ -169,9 +169,7 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): @@ -208,9 +206,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index be1a3f251..956cdf4f9 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -38,25 +38,28 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": False, + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -71,12 +74,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -85,7 +88,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -134,7 +137,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: + if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -143,11 +146,9 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) - - # # Black # @@ -160,7 +161,6 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) - # # Sample Tests # @@ -169,9 +169,7 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): @@ -208,9 +206,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # diff --git a/synth.metadata b/synth.metadata index b031618b0..d047a740f 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,29 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery.git", - "sha": "f95f415d3441b3928f6cc705cb8a75603d790fd6" + "sha": "a6a4eeac8f832cf9e24b0a4391b9848587fb6d29" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "28a591963253d52ce3a25a918cafbdd9928de8cf", - "internalRef": "361662015" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "043cc620d6a6111816d9e09f2a97208565fde958" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "043cc620d6a6111816d9e09f2a97208565fde958" + "sha": "83a7e1c8c2f7421ded45ed323eb1fda99ef5ea46", + "internalRef": "372197450" } } ], @@ -102,7 +88,9 @@ "docs/_templates/layout.html", "docs/bigquery_v2/types.rst", "docs/conf.py", + "examples/feature_fragments", "google/cloud/bigquery_v2/__init__.py", + "google/cloud/bigquery_v2/gapic_metadata.json", "google/cloud/bigquery_v2/proto/encryption_config.proto", "google/cloud/bigquery_v2/proto/model.proto", "google/cloud/bigquery_v2/proto/model_reference.proto", @@ -129,6 +117,9 @@ "scripts/readme-gen/templates/install_deps.tmpl.rst", "scripts/readme-gen/templates/install_portaudio.tmpl.rst", "setup.cfg", - "testing/.gitignore" + "testing/.gitignore", + "tests/__init__.py", + "tests/unit/__init__.py", + "tests/unit/gapic/__init__.py" ] } \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py index e69de29bb..b54a5fcc4 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index df379f1e9..b54a5fcc4 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2016 Google LLC + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +13,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 000000000..b54a5fcc4 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#