diff --git a/.gitignore b/.gitignore index 52b77d7f42..b9daa52f11 100644 --- a/.gitignore +++ b/.gitignore @@ -29,7 +29,6 @@ pip-log.txt .nox .cache .pytest_cache -.pytype # Mac @@ -47,6 +46,7 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ @@ -58,4 +58,4 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 707c024405..f26796a0b9 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -39,4 +39,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile new file mode 100644 index 0000000000..412b0b56a9 --- /dev/null +++ b/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 0000000000..d653dd868e --- /dev/null +++ b/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Ɓukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index f8f29f5dbe..7869d4d7a5 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-firestore/.kokoro/trampoline.sh" +build_file: "python-firestore/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 0000000000..1118107829 --- /dev/null +++ b/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index f868be2a39..8acb14e802 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-firestore - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 0000000000..719bcd5ba8 --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 0000000000..995ee29111 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/docs/conf.py b/docs/conf.py index 12129534a6..aee645c72d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,6 +20,10 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ @@ -90,7 +94,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py index 7e7dcc3f65..7cbbf204c9 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py @@ -28,8 +28,8 @@ from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation as ga_operation # type: ignore -from google.api_core import operation_async # type: ignore +from google.api_core import operation as ga_operation +from google.api_core import operation_async from google.cloud.firestore_admin_v1.services.firestore_admin import pagers from google.cloud.firestore_admin_v1.types import field from google.cloud.firestore_admin_v1.types import field as gfa_field @@ -39,7 +39,7 @@ from google.cloud.firestore_admin_v1.types import operation as gfa_operation from google.protobuf import empty_pb2 as empty # type: ignore -from .transports.base import FirestoreAdminTransport +from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport from .client import FirestoreAdminClient @@ -71,6 +71,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, FirestoreAdminTransport] = "grpc_asyncio", client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the firestore admin client. @@ -103,7 +104,10 @@ def __init__( """ self._client = FirestoreAdminClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, ) async def create_index( @@ -179,7 +183,7 @@ async def create_index( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_index, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -261,7 +265,7 @@ async def list_indexes( rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_indexes, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -339,7 +343,7 @@ async def get_index( rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_index, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -404,7 +408,7 @@ async def delete_index( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_index, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -477,7 +481,7 @@ async def get_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -569,7 +573,7 @@ async def update_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -660,7 +664,7 @@ async def list_fields( rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_fields, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -751,7 +755,7 @@ async def export_documents( rpc = gapic_v1.method_async.wrap_method( self._client._transport.export_documents, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -851,7 +855,7 @@ async def import_documents( rpc = gapic_v1.method_async.wrap_method( self._client._transport.import_documents, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -876,11 +880,11 @@ async def import_documents( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-firestore",).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("FirestoreAdminAsyncClient",) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py index b88b18dfb4..9360f86fa2 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -30,9 +30,9 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation as ga_operation # type: ignore -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore +from google.api_core import operation as ga_operation +from google.api_core import operation +from google.api_core import operation_async from google.cloud.firestore_admin_v1.services.firestore_admin import pagers from google.cloud.firestore_admin_v1.types import field from google.cloud.firestore_admin_v1.types import field as gfa_field @@ -42,7 +42,7 @@ from google.cloud.firestore_admin_v1.types import operation as gfa_operation from google.protobuf import empty_pb2 as empty # type: ignore -from .transports.base import FirestoreAdminTransport +from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc import FirestoreAdminGrpcTransport from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport @@ -177,6 +177,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, FirestoreAdminTransport] = None, client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the firestore admin client. @@ -202,6 +203,11 @@ def __init__( (2) The ``client_cert_source`` property is used to provide client SSL credentials for mutual TLS transport. If not provided, the default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -258,6 +264,8 @@ def __init__( scopes=client_options.scopes, api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def create_index( @@ -312,29 +320,31 @@ def create_index( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, index]): + has_flattened_params = any([parent, index]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.CreateIndexRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.CreateIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.CreateIndexRequest): + request = firestore_admin.CreateIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent - if index is not None: - request.index = index + if parent is not None: + request.parent = parent + if index is not None: + request.index = index # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_index, - default_timeout=None, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.create_index] # Certain fields should be provided within the metadata header; # add these here. @@ -396,27 +406,29 @@ def list_indexes( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.ListIndexesRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ListIndexesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ListIndexesRequest): + request = firestore_admin.ListIndexesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_indexes, - default_timeout=None, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_indexes] # Certain fields should be provided within the metadata header; # add these here. @@ -474,25 +486,29 @@ def get_index( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.GetIndexRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.GetIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.GetIndexRequest): + request = firestore_admin.GetIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_index, default_timeout=None, client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.get_index] # Certain fields should be provided within the metadata header; # add these here. @@ -537,27 +553,29 @@ def delete_index( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.DeleteIndexRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.DeleteIndexRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.DeleteIndexRequest): + request = firestore_admin.DeleteIndexRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_index, - default_timeout=None, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_index] # Certain fields should be provided within the metadata header; # add these here. @@ -610,25 +628,29 @@ def get_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.GetFieldRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.GetFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.GetFieldRequest): + request = firestore_admin.GetFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_field, default_timeout=None, client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.get_field] # Certain fields should be provided within the metadata header; # add these here. @@ -700,27 +722,29 @@ def update_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([field]): + has_flattened_params = any([field]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.UpdateFieldRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.UpdateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.UpdateFieldRequest): + request = firestore_admin.UpdateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if field is not None: - request.field = field + if field is not None: + request.field = field # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.update_field, - default_timeout=None, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.update_field] # Certain fields should be provided within the metadata header; # add these here. @@ -791,25 +815,29 @@ def list_fields( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.ListFieldsRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ListFieldsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ListFieldsRequest): + request = firestore_admin.ListFieldsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if parent is not None: - request.parent = parent + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_fields, default_timeout=None, client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_fields] # Certain fields should be provided within the metadata header; # add these here. @@ -880,27 +908,29 @@ def export_documents( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.ExportDocumentsRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ExportDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ExportDocumentsRequest): + request = firestore_admin.ExportDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.export_documents, - default_timeout=None, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.export_documents] # Certain fields should be provided within the metadata header; # add these here. @@ -980,27 +1010,29 @@ def import_documents( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = firestore_admin.ImportDocumentsRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a firestore_admin.ImportDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, firestore_admin.ImportDocumentsRequest): + request = firestore_admin.ImportDocumentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if name is not None: - request.name = name + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.import_documents, - default_timeout=None, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.import_documents] # Certain fields should be provided within the metadata header; # add these here. @@ -1024,11 +1056,11 @@ def import_documents( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-firestore",).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("FirestoreAdminClient",) diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py index ee9ce819e4..2e8b631069 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py @@ -17,9 +17,12 @@ import abc import typing +import pkg_resources -from google import auth # type: ignore +from google import auth from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials # type: ignore @@ -30,6 +33,14 @@ from google.protobuf import empty_pb2 as empty # type: ignore +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-firestore",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + class FirestoreAdminTransport(abc.ABC): """Abstract transport class for FirestoreAdmin.""" @@ -45,6 +56,8 @@ def __init__( credentials: credentials.Credentials = None, credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -60,6 +73,13 @@ def __init__( be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -75,14 +95,52 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes + credentials_file, scopes=scopes, quota_project_id=quota_project_id ) + elif credentials is None: - credentials, _ = auth.default(scopes=scopes) + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) # Save the credentials. self._credentials = credentials + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_index: gapic_v1.method.wrap_method( + self.create_index, default_timeout=None, client_info=client_info, + ), + self.list_indexes: gapic_v1.method.wrap_method( + self.list_indexes, default_timeout=None, client_info=client_info, + ), + self.get_index: gapic_v1.method.wrap_method( + self.get_index, default_timeout=None, client_info=client_info, + ), + self.delete_index: gapic_v1.method.wrap_method( + self.delete_index, default_timeout=None, client_info=client_info, + ), + self.get_field: gapic_v1.method.wrap_method( + self.get_field, default_timeout=None, client_info=client_info, + ), + self.update_field: gapic_v1.method.wrap_method( + self.update_field, default_timeout=None, client_info=client_info, + ), + self.list_fields: gapic_v1.method.wrap_method( + self.list_fields, default_timeout=None, client_info=client_info, + ), + self.export_documents: gapic_v1.method.wrap_method( + self.export_documents, default_timeout=None, client_info=client_info, + ), + self.import_documents: gapic_v1.method.wrap_method( + self.import_documents, default_timeout=None, client_info=client_info, + ), + } + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py index 9143e3f9ee..46143eaf99 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py @@ -19,6 +19,7 @@ from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -32,7 +33,7 @@ from google.longrunning import operations_pb2 as operations # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import FirestoreAdminTransport +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO class FirestoreAdminGrpcTransport(FirestoreAdminTransport): @@ -60,7 +61,9 @@ def __init__( scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -87,6 +90,13 @@ def __init__( callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -109,7 +119,9 @@ def __init__( ) if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -128,18 +140,21 @@ def __init__( credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) + self._stubs = {} # type: Dict[str, Callable] + # Run the base constructor. super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, ) - self._stubs = {} # type: Dict[str, Callable] - @classmethod def create_channel( cls, @@ -147,7 +162,8 @@ def create_channel( credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, - **kwargs + quota_project_id: Optional[str] = None, + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -163,6 +179,8 @@ def create_channel( scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -178,7 +196,8 @@ def create_channel( credentials=credentials, credentials_file=credentials_file, scopes=scopes, - **kwargs + quota_project_id=quota_project_id, + **kwargs, ) @property diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py index 9fdccc5fd0..e521076215 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py @@ -17,6 +17,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials # type: ignore @@ -31,7 +32,7 @@ from google.longrunning import operations_pb2 as operations # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import FirestoreAdminTransport +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import FirestoreAdminGrpcTransport @@ -59,7 +60,8 @@ def create_channel( credentials: credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - **kwargs + quota_project_id: Optional[str] = None, + **kwargs, ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: @@ -75,6 +77,8 @@ def create_channel( scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -86,7 +90,8 @@ def create_channel( credentials=credentials, credentials_file=credentials_file, scopes=scopes, - **kwargs + quota_project_id=quota_project_id, + **kwargs, ) def __init__( @@ -98,7 +103,9 @@ def __init__( scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -126,6 +133,13 @@ def __init__( callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -164,6 +178,7 @@ def __init__( credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) # Run the base constructor. @@ -172,6 +187,8 @@ def __init__( credentials=credentials, credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} diff --git a/google/cloud/firestore_admin_v1/types/__init__.py b/google/cloud/firestore_admin_v1/types/__init__.py index 8838c5bb96..f5cbaa99c9 100644 --- a/google/cloud/firestore_admin_v1/types/__init__.py +++ b/google/cloud/firestore_admin_v1/types/__init__.py @@ -30,6 +30,7 @@ ExportDocumentsRequest, ImportDocumentsRequest, ) +from .location import LocationMetadata from .operation import ( IndexOperationMetadata, FieldOperationMetadata, @@ -38,7 +39,6 @@ ExportDocumentsResponse, Progress, ) -from .location import LocationMetadata __all__ = ( @@ -55,11 +55,11 @@ "ListFieldsResponse", "ExportDocumentsRequest", "ImportDocumentsRequest", + "LocationMetadata", "IndexOperationMetadata", "FieldOperationMetadata", "ExportDocumentsMetadata", "ImportDocumentsMetadata", "ExportDocumentsResponse", "Progress", - "LocationMetadata", ) diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py index 857997f44a..87edcbcdad 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/base.py @@ -18,7 +18,7 @@ import abc import typing -from google import auth # type: ignore +from google import auth from google.api_core import exceptions # type: ignore from google.auth import credentials # type: ignore diff --git a/google/cloud/firestore_v1/types/__init__.py b/google/cloud/firestore_v1/types/__init__.py index 465a2d92e5..137c3130aa 100644 --- a/google/cloud/firestore_v1/types/__init__.py +++ b/google/cloud/firestore_v1/types/__init__.py @@ -68,54 +68,6 @@ BatchWriteRequest, BatchWriteResponse, ) -from typing import Tuple - - -__all__: Tuple[ - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, - str, -] __all__ = ( diff --git a/google/cloud/firestore_v1/types/common.py b/google/cloud/firestore_v1/types/common.py index f7bd22a3d9..b03242a4a8 100644 --- a/google/cloud/firestore_v1/types/common.py +++ b/google/cloud/firestore_v1/types/common.py @@ -19,9 +19,6 @@ from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from typing import Any - -__protobuf__: Any __protobuf__ = proto.module( diff --git a/google/cloud/firestore_v1/types/document.py b/google/cloud/firestore_v1/types/document.py index b2111b34f2..7104bfc61a 100644 --- a/google/cloud/firestore_v1/types/document.py +++ b/google/cloud/firestore_v1/types/document.py @@ -21,9 +21,6 @@ from google.protobuf import struct_pb2 as struct # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore from google.type import latlng_pb2 as latlng # type: ignore -from typing import Any - -__protobuf__: Any __protobuf__ = proto.module( diff --git a/google/cloud/firestore_v1/types/firestore.py b/google/cloud/firestore_v1/types/firestore.py index 909a782c81..cb0fa75dcb 100644 --- a/google/cloud/firestore_v1/types/firestore.py +++ b/google/cloud/firestore_v1/types/firestore.py @@ -24,9 +24,6 @@ from google.cloud.firestore_v1.types import write from google.protobuf import timestamp_pb2 as timestamp # type: ignore from google.rpc import status_pb2 as gr_status # type: ignore -from typing import Any - -__protobuf__: Any __protobuf__ = proto.module( diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py index bea9a10a50..a65b0191bb 100644 --- a/google/cloud/firestore_v1/types/query.py +++ b/google/cloud/firestore_v1/types/query.py @@ -20,9 +20,6 @@ from google.cloud.firestore_v1.types import document from google.protobuf import wrappers_pb2 as wrappers # type: ignore -from typing import Any - -__protobuf__: Any __protobuf__ = proto.module( diff --git a/google/cloud/firestore_v1/types/write.py b/google/cloud/firestore_v1/types/write.py index 12cdf99b62..6b3f49b530 100644 --- a/google/cloud/firestore_v1/types/write.py +++ b/google/cloud/firestore_v1/types/write.py @@ -21,9 +21,6 @@ from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document as gf_document from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from typing import Any - -__protobuf__: Any __protobuf__ = proto.module( diff --git a/noxfile.py b/noxfile.py index 82daad6af0..b430f74b21 100644 --- a/noxfile.py +++ b/noxfile.py @@ -22,7 +22,7 @@ import nox -PYTYPE_VERSION = "pytype==2020.7.24" + BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] @@ -61,14 +61,6 @@ def blacken(session): ) -@nox.session(python="3.7") -def pytype(session): - """Run pytype - """ - session.install(PYTYPE_VERSION) - session.run("pytype",) - - @nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" @@ -76,15 +68,16 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") -def default(session, test_dir, ignore_dir=None): +def default(session): # Install all test dependencies, then install this package in-place. - session.install("pytest-asyncio", "aiounittest") + session.install("asyncmock", "pytest-asyncio") session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") # Run py.test against the unit tests. - args = [ + session.run( + "py.test", "--quiet", "--cov=google.cloud.firestore", "--cov=google.cloud", @@ -93,22 +86,15 @@ def default(session, test_dir, ignore_dir=None): "--cov-config=.coveragerc", "--cov-report=", "--cov-fail-under=0", - test_dir, + os.path.join("tests", "unit"), *session.posargs, - ] - - if ignore_dir: - args.insert(0, f"--ignore={ignore_dir}") - - session.run("py.test", *args) + ) @nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): - """Run the unit test suite for sync tests.""" - default( - session, os.path.join("tests", "unit"), - ) + """Run the unit test suite.""" + default(session) @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) @@ -116,6 +102,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("FIRESTORE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -132,7 +122,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install( - "mock", "pytest", "pytest-asyncio", "google-cloud-testutils", + "mock", "pytest", "google-cloud-testutils", ) session.install("-e", ".") @@ -151,7 +141,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") @@ -176,3 +166,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/scripts/fixup_admin_v1_keywords.py b/scripts/fixup_admin_v1_keywords.py index b3cb9d1478..4666d89efc 100644 --- a/scripts/fixup_admin_v1_keywords.py +++ b/scripts/fixup_admin_v1_keywords.py @@ -49,6 +49,7 @@ class adminCallTransformer(cst.CSTTransformer): 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ), 'list_indexes': ('parent', 'filter', 'page_size', 'page_token', ), 'update_field': ('field', 'update_mask', ), + } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/setup.cfg b/setup.cfg index f0c722b1ed..c3a2b39f65 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,14 +17,3 @@ # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 - -[pytype] -python_version = 3.8 -inputs = - google/cloud/ -exclude = - tests/ -output = .pytype/ -# Workaround for https://github.com/google/pytype/issues/150 -disable = pyi-error - diff --git a/synth.metadata b/synth.metadata index cdaf4ab812..47b17fec31 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,8 +3,8 @@ { "git": { "name": ".", - "remote": "git@github.com:crwilcox/python-firestore.git", - "sha": "cc25d5ebfb8cc39b63bff2383e81d16793d42b20" + "remote": "https://github.com/googleapis/python-firestore.git", + "sha": "a22ecddfa31865fcb02ffa0eed44a776d228b8a4" } }, { @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "3a89215abd0e66dfc4f21d07d552d0b543abf082" + "sha": "f8823dec98277a9516f2fb6fae9f58b3a59a23e1" } } ], diff --git a/tests/unit/gapic/admin_v1/__init__.py b/tests/unit/gapic/admin_v1/__init__.py index e69de29bb2..8b13789179 100644 --- a/tests/unit/gapic/admin_v1/__init__.py +++ b/tests/unit/gapic/admin_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/admin_v1/test_firestore_admin.py b/tests/unit/gapic/admin_v1/test_firestore_admin.py index 0e6e9c27cb..a52f68efd2 100644 --- a/tests/unit/gapic/admin_v1/test_firestore_admin.py +++ b/tests/unit/gapic/admin_v1/test_firestore_admin.py @@ -58,6 +58,17 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -124,6 +135,16 @@ def test_firestore_admin_client_get_transport_class(): ), ], ) +@mock.patch.object( + FirestoreAdminClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FirestoreAdminClient), +) +@mock.patch.object( + FirestoreAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FirestoreAdminAsyncClient), +) def test_firestore_admin_client_client_options( client_class, transport_class, transport_name ): @@ -150,64 +171,31 @@ def test_firestore_admin_client_client_options( scopes=None, api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "never". - os.environ["GOOGLE_API_USE_MTLS"] = "never" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - ) + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "always". - os.environ["GOOGLE_API_USE_MTLS"] = "always" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -217,34 +205,93 @@ def test_firestore_admin_client_client_options( scopes=None, api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has # unsupported value. - os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" - with pytest.raises(MutualTLSChannelError): - client = client_class() + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() - del os.environ["GOOGLE_API_USE_MTLS"] + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -273,6 +320,8 @@ def test_firestore_admin_client_client_options_scopes( scopes=["1", "2"], api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -302,6 +351,8 @@ def test_firestore_admin_client_client_options_credentials_file( scopes=None, api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -320,17 +371,21 @@ def test_firestore_admin_client_client_options_from_dict(): scopes=None, api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) -def test_create_index(transport: str = "grpc"): +def test_create_index( + transport: str = "grpc", request_type=firestore_admin.CreateIndexRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.CreateIndexRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.create_index), "__call__") as call: @@ -343,12 +398,16 @@ def test_create_index(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.CreateIndexRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_create_index_from_dict(): + test_create_index(request_type=dict) + + @pytest.mark.asyncio async def test_create_index_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -514,14 +573,16 @@ async def test_create_index_flattened_error_async(): ) -def test_list_indexes(transport: str = "grpc"): +def test_list_indexes( + transport: str = "grpc", request_type=firestore_admin.ListIndexesRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.ListIndexesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.list_indexes), "__call__") as call: @@ -536,7 +597,7 @@ def test_list_indexes(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.ListIndexesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIndexesPager) @@ -544,6 +605,10 @@ def test_list_indexes(transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" +def test_list_indexes_from_dict(): + test_list_indexes(request_type=dict) + + @pytest.mark.asyncio async def test_list_indexes_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -756,8 +821,8 @@ def test_list_indexes_pages(): RuntimeError, ) pages = list(client.list_indexes(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -821,20 +886,22 @@ async def test_list_indexes_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_indexes(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_indexes(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_get_index(transport: str = "grpc"): +def test_get_index( + transport: str = "grpc", request_type=firestore_admin.GetIndexRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.GetIndexRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.get_index), "__call__") as call: @@ -851,7 +918,7 @@ def test_get_index(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.GetIndexRequest() # Establish that the response is the type that we expect. assert isinstance(response, index.Index) @@ -863,6 +930,10 @@ def test_get_index(transport: str = "grpc"): assert response.state == index.Index.State.CREATING +def test_get_index_from_dict(): + test_get_index(request_type=dict) + + @pytest.mark.asyncio async def test_get_index_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -1022,14 +1093,16 @@ async def test_get_index_flattened_error_async(): ) -def test_delete_index(transport: str = "grpc"): +def test_delete_index( + transport: str = "grpc", request_type=firestore_admin.DeleteIndexRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.DeleteIndexRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.delete_index), "__call__") as call: @@ -1042,12 +1115,16 @@ def test_delete_index(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.DeleteIndexRequest() # Establish that the response is the type that we expect. assert response is None +def test_delete_index_from_dict(): + test_delete_index(request_type=dict) + + @pytest.mark.asyncio async def test_delete_index_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -1195,14 +1272,16 @@ async def test_delete_index_flattened_error_async(): ) -def test_get_field(transport: str = "grpc"): +def test_get_field( + transport: str = "grpc", request_type=firestore_admin.GetFieldRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.GetFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.get_field), "__call__") as call: @@ -1215,7 +1294,7 @@ def test_get_field(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.GetFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, field.Field) @@ -1223,6 +1302,10 @@ def test_get_field(transport: str = "grpc"): assert response.name == "name_value" +def test_get_field_from_dict(): + test_get_field(request_type=dict) + + @pytest.mark.asyncio async def test_get_field_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -1374,14 +1457,16 @@ async def test_get_field_flattened_error_async(): ) -def test_update_field(transport: str = "grpc"): +def test_update_field( + transport: str = "grpc", request_type=firestore_admin.UpdateFieldRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.UpdateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.update_field), "__call__") as call: @@ -1394,12 +1479,16 @@ def test_update_field(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.UpdateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_update_field_from_dict(): + test_update_field(request_type=dict) + + @pytest.mark.asyncio async def test_update_field_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -1555,14 +1644,16 @@ async def test_update_field_flattened_error_async(): ) -def test_list_fields(transport: str = "grpc"): +def test_list_fields( + transport: str = "grpc", request_type=firestore_admin.ListFieldsRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.ListFieldsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.list_fields), "__call__") as call: @@ -1577,7 +1668,7 @@ def test_list_fields(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.ListFieldsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListFieldsPager) @@ -1585,6 +1676,10 @@ def test_list_fields(transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" +def test_list_fields_from_dict(): + test_list_fields(request_type=dict) + + @pytest.mark.asyncio async def test_list_fields_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -1791,8 +1886,8 @@ def test_list_fields_pages(): RuntimeError, ) pages = list(client.list_fields(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -1852,20 +1947,22 @@ async def test_list_fields_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_fields(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_fields(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_export_documents(transport: str = "grpc"): +def test_export_documents( + transport: str = "grpc", request_type=firestore_admin.ExportDocumentsRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.ExportDocumentsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1880,12 +1977,16 @@ def test_export_documents(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.ExportDocumentsRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_export_documents_from_dict(): + test_export_documents(request_type=dict) + + @pytest.mark.asyncio async def test_export_documents_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -2043,14 +2144,16 @@ async def test_export_documents_flattened_error_async(): ) -def test_import_documents(transport: str = "grpc"): +def test_import_documents( + transport: str = "grpc", request_type=firestore_admin.ImportDocumentsRequest +): client = FirestoreAdminClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = firestore_admin.ImportDocumentsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2065,12 +2168,16 @@ def test_import_documents(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == firestore_admin.ImportDocumentsRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_import_documents_from_dict(): + test_import_documents(request_type=dict) + + @pytest.mark.asyncio async def test_import_documents_async(transport: str = "grpc_asyncio"): client = FirestoreAdminAsyncClient( @@ -2299,9 +2406,13 @@ def test_firestore_admin_base_transport_error(): def test_firestore_admin_base_transport(): # Instantiate the base transport. - transport = transports.FirestoreAdminTransport( - credentials=credentials.AnonymousCredentials(), - ) + with mock.patch( + "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.FirestoreAdminTransport( + credentials=credentials.AnonymousCredentials(), + ) # Every method on the transport should just blindly # raise NotImplementedError. @@ -2328,10 +2439,15 @@ def test_firestore_admin_base_transport(): def test_firestore_admin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(auth, "load_credentials_from_file") as load_creds: + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.firestore_admin_v1.services.firestore_admin.transports.FirestoreAdminTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.FirestoreAdminTransport( - credentials_file="credentials.json", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2339,6 +2455,7 @@ def test_firestore_admin_base_transport_with_credentials_file(): "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", ), + quota_project_id="octopus", ) @@ -2351,7 +2468,8 @@ def test_firestore_admin_auth_adc(): scopes=( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", - ) + ), + quota_project_id=None, ) @@ -2360,12 +2478,15 @@ def test_firestore_admin_transport_auth_adc(): # ADC credentials. with mock.patch.object(auth, "default") as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - transports.FirestoreAdminGrpcTransport(host="squid.clam.whelk") + transports.FirestoreAdminGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/datastore", - ) + ), + quota_project_id="octopus", ) @@ -2456,6 +2577,7 @@ def test_firestore_admin_grpc_transport_channel_mtls_with_client_cert_source( "https://www.googleapis.com/auth/datastore", ), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -2493,6 +2615,7 @@ def test_firestore_admin_grpc_asyncio_transport_channel_mtls_with_client_cert_so "https://www.googleapis.com/auth/datastore", ), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -2532,6 +2655,7 @@ def test_firestore_admin_grpc_transport_channel_mtls_with_adc( "https://www.googleapis.com/auth/datastore", ), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -2571,6 +2695,7 @@ def test_firestore_admin_grpc_asyncio_transport_channel_mtls_with_adc( "https://www.googleapis.com/auth/datastore", ), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -2653,3 +2778,24 @@ def test_parse_index_path(): # Check that the path construction is reversible. actual = FirestoreAdminClient.parse_index_path(path) assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.FirestoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + client = FirestoreAdminClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.FirestoreAdminTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = FirestoreAdminClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info)